Remove more instances of unwrap()

This commit is contained in:
ConnorSkees 2020-01-18 19:00:49 -05:00
parent d2ae3a4a0b
commit 622ca8ceef
6 changed files with 74 additions and 36 deletions

View File

@ -156,9 +156,9 @@ pub enum Color {
} }
impl fmt::UpperHex for Color { impl fmt::UpperHex for Color {
#[allow(clippy::match_same_arms, clippy::many_single_char_names)]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// I want them all to be on separate lines so doing things with regex or multiple cursors is easier // I want them all to be on separate lines so doing things with regex or multiple cursors is easier
#[allow(clippy::match_same_arms)]
match self { match self {
Self::AliceBlue => write!(f, "#F0F8FF"), Self::AliceBlue => write!(f, "#F0F8FF"),
Self::AntiqueWhite => write!(f, "#FAEBD7"), Self::AntiqueWhite => write!(f, "#FAEBD7"),

View File

@ -70,16 +70,25 @@ impl Css {
match stmt { match stmt {
Stmt::Style(s) => { Stmt::Style(s) => {
if self.at_root { if self.at_root {
self.blocks[self.idx - 1].push_style(s) self.blocks
.get_mut(self.idx - 1)
.expect("expected block to exist at root")
.push_style(s)
} else { } else {
self.blocks[self.idx + self.inner_rulesets - 1].push_style(s) self.blocks
.get_mut(self.idx + self.inner_rulesets - 1)
.expect("expected block to exist")
.push_style(s)
} }
} }
Stmt::MultilineComment(s) => { Stmt::MultilineComment(s) => {
if self.idx == 0 { if self.idx == 0 {
self.blocks.push(Toplevel::MultilineComment(s)); self.blocks.push(Toplevel::MultilineComment(s));
} else { } else {
self.blocks[self.idx + self.inner_rulesets - 1].push_comment(s) self.blocks
.get_mut(self.idx + self.inner_rulesets - 1)
.expect("expected block to exist")
.push_comment(s)
} }
} }
Stmt::RuleSet(RuleSet { Stmt::RuleSet(RuleSet {

View File

@ -116,10 +116,7 @@ pub fn eat_call_args<I: Iterator<Item = Token>>(toks: &mut Peekable<I>) -> CallA
// } // }
} }
TokenKind::Symbol(Symbol::CloseParen) => { TokenKind::Symbol(Symbol::CloseParen) => {
args.push(CallArg { args.push(CallArg { name, val });
name: name,
val: val,
});
break; break;
} }
TokenKind::Symbol(Symbol::Comma) => { TokenKind::Symbol(Symbol::Comma) => {

View File

@ -1,34 +1,41 @@
use crate::common::Scope; use crate::common::Scope;
use crate::{Stmt, StyleSheet}; use crate::{SassResult, Stmt, StyleSheet};
use std::ffi::OsStr; use std::ffi::OsStr;
use std::path::Path; use std::path::Path;
pub fn import<P: AsRef<Path>>(name: P) -> (Vec<Stmt>, Scope) { pub fn import<P: AsRef<Path>>(path: P) -> SassResult<(Vec<Stmt>, Scope)> {
let mut rules: Vec<Stmt> = Vec::new(); let mut rules: Vec<Stmt> = Vec::new();
let mut scope = Scope::new(); let mut scope = Scope::new();
let path = name.as_ref().to_path_buf(); let path_buf = path.as_ref().to_path_buf();
let name = path.file_name().unwrap(); let name = path_buf.file_name().expect("todo! path ended in `..`");
if path.extension() == Some(OsStr::new(".css")) { if path_buf.extension() == Some(OsStr::new(".css")) {
// || name.starts_with("http://") || name.starts_with("https://") { // || name.starts_with("http://") || name.starts_with("https://") {
todo!("handle css imports") todo!("handle css imports")
} }
let mut p1 = path.clone(); let mut p1 = path_buf.clone();
p1.push("/index.scss"); p1.push("index.scss");
let mut p2 = path.clone(); let mut p2 = path_buf.clone();
p2.push("/_index.scss"); p2.push("_index.scss");
let paths = [ let paths = [
path.with_file_name(format!("{}.scss", name.to_str().unwrap())), path_buf.with_file_name(format!(
path.with_file_name(format!("_{}.scss", name.to_str().unwrap())), "{}.scss",
path, name.to_str().expect("path should be UTF-8")
)),
path_buf.with_file_name(format!(
"_{}.scss",
name.to_str().expect("path should be UTF-8")
)),
path_buf,
p1, p1,
p2, p2,
]; ];
for name in &paths { for name in &paths {
if name.is_file() { if name.is_file() {
let (rules2, scope2) = StyleSheet::export_from_path(name.to_str().unwrap()).unwrap(); let (rules2, scope2) =
StyleSheet::export_from_path(name.to_str().expect("path should be UTF-8"))?;
rules.extend(rules2); rules.extend(rules2);
scope.merge(scope2); scope.merge(scope2);
} }
} }
(rules, scope) Ok((rules, scope))
} }

View File

@ -3,7 +3,7 @@
clippy::restriction, clippy::restriction,
clippy::pedantic, clippy::pedantic,
clippy::nursery, clippy::nursery,
// clippy::cargo clippy::cargo
)] )]
#![deny(missing_debug_implementations)] #![deny(missing_debug_implementations)]
#![allow( #![allow(
@ -28,7 +28,7 @@
// todo! handle erroring on styles at the toplevel // todo! handle erroring on styles at the toplevel
use std::fmt::{self, Display}; use std::fmt::{self, Display};
use std::fs; use std::fs;
use std::io::{self, Write, BufWriter, stdout}; use std::io::{self, stdout, BufWriter, Write};
use std::iter::{Iterator, Peekable}; use std::iter::{Iterator, Peekable};
use std::path::Path; use std::path::Path;
@ -289,10 +289,18 @@ impl<'a> StyleSheetParser<'a> {
rules.push(Stmt::MultilineComment(comment)); rules.push(Stmt::MultilineComment(comment));
} }
TokenKind::AtRule(AtRule::Import) => { TokenKind::AtRule(AtRule::Import) => {
self.lexer.next(); let Token { pos, .. } = self
.lexer
.next()
.expect("this must exist because we have already peeked");
devour_whitespace(&mut self.lexer); devour_whitespace(&mut self.lexer);
let mut file_name = String::new(); let mut file_name = String::new();
match self.lexer.next().unwrap().kind { match self
.lexer
.next()
.unwrap_or_else(|| self.error(pos, "expected value after @import"))
.kind
{
TokenKind::Symbol(Symbol::DoubleQuote) => { TokenKind::Symbol(Symbol::DoubleQuote) => {
while let Some(tok) = self.lexer.next() { while let Some(tok) = self.lexer.next() {
if tok.kind == TokenKind::Symbol(Symbol::DoubleQuote) { if tok.kind == TokenKind::Symbol(Symbol::DoubleQuote) {
@ -311,12 +319,15 @@ impl<'a> StyleSheetParser<'a> {
} }
_ => todo!("expected ' or \" after @import"), _ => todo!("expected ' or \" after @import"),
} }
let Token { kind, pos } = self.lexer.next().unwrap(); let Token { kind, pos } = self
.lexer
.next()
.expect("this must exist because we have already peeked");
if kind != TokenKind::Symbol(Symbol::SemiColon) { if kind != TokenKind::Symbol(Symbol::SemiColon) {
self.error(pos, "expected `;` after @import declaration"); self.error(pos, "expected `;` after @import declaration");
} }
let (new_rules, new_scope) = import(file_name); let (new_rules, new_scope) = import(file_name)?;
rules.extend(new_rules); rules.extend(new_rules);
self.global_scope.merge(new_scope); self.global_scope.merge(new_scope);
} }
@ -399,7 +410,9 @@ fn eat_include<I: Iterator<Item = Token>>(
) -> Result<Vec<Stmt>, (Pos, &'static str)> { ) -> Result<Vec<Stmt>, (Pos, &'static str)> {
toks.next(); toks.next();
devour_whitespace(toks); devour_whitespace(toks);
let Token { kind, pos } = toks.next().unwrap(); let Token { kind, pos } = toks
.next()
.expect("this must exist because we have already peeked");
let name = if let TokenKind::Ident(s) = kind { let name = if let TokenKind::Ident(s) = kind {
s s
} else { } else {
@ -444,7 +457,9 @@ fn parse_mixin<I: Iterator<Item = Token>>(
toks: &mut Peekable<I>, toks: &mut Peekable<I>,
scope: Scope, scope: Scope,
) -> Result<(String, Mixin), Printer> { ) -> Result<(String, Mixin), Printer> {
let Token { pos, .. } = toks.next().unwrap(); let Token { pos, .. } = toks
.next()
.expect("this must exist because we have already peeked");
devour_whitespace(toks); devour_whitespace(toks);
let name = if let Some(Token { let name = if let Some(Token {
kind: TokenKind::Ident(s), kind: TokenKind::Ident(s),
@ -561,7 +576,9 @@ pub(crate) fn eat_expr<I: Iterator<Item = Token>>(
)))); ))));
} }
TokenKind::Variable(_) => { TokenKind::Variable(_) => {
let tok = toks.next().unwrap(); let tok = toks
.next()
.expect("this must exist because we have already peeked");
let name = if let TokenKind::Variable(n) = tok.kind { let name = if let TokenKind::Variable(n) = tok.kind {
n n
} else { } else {
@ -584,7 +601,9 @@ pub(crate) fn eat_expr<I: Iterator<Item = Token>>(
} }
} }
TokenKind::MultilineComment(_) => { TokenKind::MultilineComment(_) => {
let tok = toks.next().unwrap(); let tok = toks
.next()
.expect("this must exist because we have already peeked");
let s = if let TokenKind::MultilineComment(s) = &tok.kind { let s = if let TokenKind::MultilineComment(s) = &tok.kind {
s s
} else { } else {
@ -1205,8 +1224,8 @@ mod test_mixins {
#[cfg(test)] #[cfg(test)]
mod test_imports { mod test_imports {
use super::*; use super::*;
use Write;
use tempfile::Builder; use tempfile::Builder;
use Write;
macro_rules! test_import { macro_rules! test_import {
($func:ident, $input:literal => $output:literal | $( $name:literal($content:literal) ),*) => { ($func:ident, $input:literal => $output:literal | $( $name:literal($content:literal) ),*) => {

View File

@ -30,9 +30,15 @@ impl Mixin {
if arg.is_named() { if arg.is_named() {
todo!("keyword args") todo!("keyword args")
} else { } else {
self.scope self.scope.vars.insert(
.vars self.args
.insert(self.args.0[idx].name.clone(), arg.val.clone()); .0
.get(idx)
.expect("too many args passed to mixin")
.name
.clone(),
arg.val.clone(),
);
} }
} }
self self