From 41bfea3cea972fb2681d15fc4ebe6ca48ce7745e Mon Sep 17 00:00:00 2001 From: ConnorSkees <39542938+ConnorSkees@users.noreply.github.com> Date: Fri, 26 Jun 2020 08:03:43 -0400 Subject: [PATCH] update dependencies --- Cargo.toml | 6 +- src/lib.rs | 2 +- src/parse/media.rs | 4 +- src/parse/value.rs | 116 +++++++++++--------------------- src/utils/comment_whitespace.rs | 9 +-- 5 files changed, 49 insertions(+), 88 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index a5c72b7..838d9d3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -9,7 +9,7 @@ keywords = ["scss", "sass", "css", "web"] repository = "https://github.com/connorskees/grass" authors = ["ConnorSkees <39542938+ConnorSkees@users.noreply.github.com>"] edition = "2018" -exclude = ["*.scss", "sass-spec", "tests", "Cargo.lock"] +include = ["src", "Cargo.toml", "README.md", "CHANGELOG.md"] default-run = "grass" [[bin]] @@ -52,7 +52,7 @@ num-traits = "0.2.12" once_cell = "1.4.0" rand = { version = "0.7.3", optional = true } codemap = "0.1.3" -peekmore = "0.5.1" +peekmore = "0.5.2" wasm-bindgen = { version = "0.2.63", optional = true } beef = "0.4.4" # criterion is not a dev-dependency because it makes tests take too @@ -77,7 +77,7 @@ bench = ["criterion"] [dev-dependencies] tempfile = "3.1.0" -paste = "0.1.17" +paste = "0.1.18" [profile.release] debug = true diff --git a/src/lib.rs b/src/lib.rs index c8e7ed0..46fde45 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -9,7 +9,7 @@ Spec progress as of 2020-06-22: ## Use as library ``` -fn main() -> Result<(), grass::Error> { +fn main() -> Result<(), Box> { let sass = grass::from_string("a { b { color: &; } }".to_string())?; assert_eq!(sass, "a b {\n color: a b;\n}\n"); Ok(()) diff --git a/src/parse/media.rs b/src/parse/media.rs index 2a5e761..bb28c67 100644 --- a/src/parse/media.rs +++ b/src/parse/media.rs @@ -14,8 +14,8 @@ impl<'a> Parser<'a> { Err(..) => return Ok(false), }; if peeked_identifier == ident { - self.toks.take(ident.chars().count()).for_each(drop); - self.toks.reset_cursor(); + self.toks.truncate_iterator_to_cursor(); + self.toks.next(); return Ok(true); } self.toks.reset_cursor(); diff --git a/src/parse/value.rs b/src/parse/value.rs index ee1da60..7f35c04 100644 --- a/src/parse/value.rs +++ b/src/parse/value.rs @@ -186,8 +186,9 @@ impl<'a> Parser<'a> { if lower == "min" { match self.try_parse_min_max("min", true)? { - Some((val, len)) => { - self.toks.take(len).for_each(drop); + Some(val) => { + self.toks.truncate_iterator_to_cursor(); + self.toks.next(); return Ok( IntermediateValue::Value(Value::String(val, QuoteKind::None)) .span(span), @@ -199,8 +200,9 @@ impl<'a> Parser<'a> { } } else if lower == "max" { match self.try_parse_min_max("max", true)? { - Some((val, len)) => { - self.toks.take(len).for_each(drop); + Some(val) => { + self.toks.truncate_iterator_to_cursor(); + self.toks.next(); return Ok( IntermediateValue::Value(Value::String(val, QuoteKind::None)) .span(span), @@ -691,12 +693,10 @@ impl<'a> Parser<'a> { fn try_eat_url(&mut self) -> SassResult> { let mut buf = String::from("url("); - let mut peek_counter = 0; - peek_counter += peek_whitespace(self.toks); + peek_whitespace(self.toks); while let Some(tok) = self.toks.peek() { let kind = tok.kind; self.toks.advance_cursor(); - peek_counter += 1; if kind == '!' || kind == '%' || kind == '&' @@ -709,9 +709,7 @@ impl<'a> Parser<'a> { } else if kind == '#' { if let Some(Token { kind: '{', .. }) = self.toks.peek() { self.toks.advance_cursor(); - peek_counter += 1; - let (interpolation, count) = self.peek_interpolation()?; - peek_counter += count; + let interpolation = self.peek_interpolation()?; match interpolation.node { Value::String(ref s, ..) => buf.push_str(s), v => buf.push_str(v.to_css_string(interpolation.span)?.borrow()), @@ -721,17 +719,19 @@ impl<'a> Parser<'a> { } } else if kind == ')' { buf.push(')'); - self.toks.take(peek_counter).for_each(drop); + self.toks.truncate_iterator_to_cursor(); + self.toks.next(); return Ok(Some(buf)); } else if kind.is_whitespace() { - peek_counter += peek_whitespace(self.toks); + peek_whitespace(self.toks); let next = match self.toks.peek() { Some(v) => v, None => break, }; if next.kind == ')' { buf.push(')'); - self.toks.take(peek_counter + 1).for_each(drop); + self.toks.truncate_iterator_to_cursor(); + self.toks.next(); return Ok(Some(buf)); } else { break; @@ -744,23 +744,20 @@ impl<'a> Parser<'a> { Ok(None) } - fn peek_number(&mut self) -> SassResult> { + fn peek_number(&mut self) -> SassResult> { let mut buf = String::new(); - let mut peek_counter = 0; - let (num, count) = self.peek_whole_number(); - peek_counter += count; + let num = self.peek_whole_number(); buf.push_str(&num); self.toks.advance_cursor(); if let Some(Token { kind: '.', .. }) = self.toks.peek() { self.toks.advance_cursor(); - let (num, count) = self.peek_whole_number(); - if count == 0 { + let num = self.peek_whole_number(); + if num.is_empty() { return Ok(None); } - peek_counter += count; buf.push_str(&num); } else { self.toks.move_cursor_back().unwrap(); @@ -768,7 +765,7 @@ impl<'a> Parser<'a> { let next = match self.toks.peek() { Some(tok) => tok, - None => return Ok(Some((buf, peek_counter))), + None => return Ok(Some(buf)), }; match next.kind { @@ -776,56 +773,49 @@ impl<'a> Parser<'a> { let unit = peek_ident_no_interpolation(self.toks, true, self.span_before)?.node; buf.push_str(&unit); - peek_counter += unit.chars().count(); } '%' => { self.toks.advance_cursor(); - peek_counter += 1; buf.push('%'); } _ => {} } - Ok(Some((buf, peek_counter))) + Ok(Some(buf)) } - fn peek_whole_number(&mut self) -> (String, usize) { + fn peek_whole_number(&mut self) -> String { let mut buf = String::new(); - let mut peek_counter = 0; while let Some(tok) = self.toks.peek() { if tok.kind.is_ascii_digit() { buf.push(tok.kind); - peek_counter += 1; self.toks.advance_cursor(); } else { - return (buf, peek_counter); + return buf; } } - (buf, peek_counter) + buf } fn try_parse_min_max( &mut self, fn_name: &str, allow_comma: bool, - ) -> SassResult> { + ) -> SassResult> { let mut buf = if allow_comma { format!("{}(", fn_name) } else { String::new() }; - let mut peek_counter = 0; - peek_counter += peek_whitespace(self.toks); + peek_whitespace(self.toks); while let Some(tok) = self.toks.peek() { let kind = tok.kind; - peek_counter += 1; match kind { '+' | '-' | '0'..='9' => { self.toks.advance_cursor(); - if let Some((number, count)) = self.peek_number()? { + if let Some(number) = self.peek_number()? { buf.push(kind); buf.push_str(&number); - peek_counter += count; } else { return Ok(None); } @@ -834,9 +824,7 @@ impl<'a> Parser<'a> { self.toks.advance_cursor(); if let Some(Token { kind: '{', .. }) = self.toks.peek() { self.toks.advance_cursor(); - peek_counter += 1; - let (interpolation, count) = self.peek_interpolation()?; - peek_counter += count; + let interpolation = self.peek_interpolation()?; match interpolation.node { Value::String(ref s, ..) => buf.push_str(s), v => buf.push_str(v.to_css_string(interpolation.span)?.borrow()), @@ -846,30 +834,21 @@ impl<'a> Parser<'a> { } } 'c' | 'C' => { - if let Some((name, additional_peek_count)) = - self.try_parse_min_max_function("calc")? - { - peek_counter += additional_peek_count; + if let Some(name) = self.try_parse_min_max_function("calc")? { buf.push_str(&name); } else { return Ok(None); } } 'e' | 'E' => { - if let Some((name, additional_peek_count)) = - self.try_parse_min_max_function("env")? - { - peek_counter += additional_peek_count; + if let Some(name) = self.try_parse_min_max_function("env")? { buf.push_str(&name); } else { return Ok(None); } } 'v' | 'V' => { - if let Some((name, additional_peek_count)) = - self.try_parse_min_max_function("var")? - { - peek_counter += additional_peek_count; + if let Some(name) = self.try_parse_min_max_function("var")? { buf.push_str(&name); } else { return Ok(None); @@ -878,9 +857,8 @@ impl<'a> Parser<'a> { '(' => { self.toks.advance_cursor(); buf.push('('); - if let Some((val, len)) = self.try_parse_min_max(fn_name, false)? { + if let Some(val) = self.try_parse_min_max(fn_name, false)? { buf.push_str(&val); - peek_counter += len; } else { return Ok(None); } @@ -912,11 +890,9 @@ impl<'a> Parser<'a> { if !matches!(self.toks.peek(), Some(Token { kind: '(', .. })) { return Ok(None); } - peek_counter += 1; - if let Some((val, len)) = self.try_parse_min_max(fn_name, false)? { + if let Some(val) = self.try_parse_min_max(fn_name, false)? { buf.push_str(&val); - peek_counter += len; } else { return Ok(None); } @@ -924,7 +900,7 @@ impl<'a> Parser<'a> { _ => return Ok(None), } - peek_counter += peek_whitespace(self.toks); + peek_whitespace(self.toks); let next = match self.toks.peek() { Some(tok) => tok, @@ -933,10 +909,9 @@ impl<'a> Parser<'a> { match next.kind { ')' => { - peek_counter += 1; self.toks.advance_cursor(); buf.push(')'); - return Ok(Some((buf, peek_counter))); + return Ok(Some(buf)); } '+' | '-' | '*' | '/' => { buf.push(' '); @@ -955,19 +930,15 @@ impl<'a> Parser<'a> { _ => return Ok(None), } - peek_counter += peek_whitespace(self.toks); + peek_whitespace(self.toks); } - Ok(Some((buf, peek_counter))) + Ok(Some(buf)) } #[allow(dead_code, unused_mut, unused_variables, unused_assignments)] - fn try_parse_min_max_function( - &mut self, - fn_name: &'static str, - ) -> SassResult> { + fn try_parse_min_max_function(&mut self, fn_name: &'static str) -> SassResult> { let mut ident = peek_ident_no_interpolation(self.toks, false, self.span_before)?.node; - let mut peek_counter = ident.chars().count(); ident.make_ascii_lowercase(); if ident != fn_name { return Ok(None); @@ -977,22 +948,17 @@ impl<'a> Parser<'a> { } self.toks.advance_cursor(); ident.push('('); - peek_counter += 1; todo!("special functions inside `min()` or `max()`") } - fn peek_interpolation(&mut self) -> SassResult<(Spanned, usize)> { + fn peek_interpolation(&mut self) -> SassResult> { let vec = peek_until_closing_curly_brace(self.toks)?; - let peek_counter = vec.len(); self.toks.advance_cursor(); let val = self.parse_value_from_vec(vec)?; - Ok(( - Spanned { - node: val.node.eval(val.span)?.node.unquote(), - span: val.span, - }, - peek_counter, - )) + Ok(Spanned { + node: val.node.eval(val.span)?.node.unquote(), + span: val.span, + }) } fn peek_escape(&mut self) -> SassResult { diff --git a/src/utils/comment_whitespace.rs b/src/utils/comment_whitespace.rs index 51dc749..4f95c46 100644 --- a/src/utils/comment_whitespace.rs +++ b/src/utils/comment_whitespace.rs @@ -26,18 +26,13 @@ pub(crate) fn devour_whitespace, W: IsWhitespace>( found_whitespace } -pub(crate) fn peek_whitespace, W: IsWhitespace>( - s: &mut PeekMoreIterator, -) -> usize { - let mut peek_counter = 0; +pub(crate) fn peek_whitespace, W: IsWhitespace>(s: &mut PeekMoreIterator) { while let Some(w) = s.peek() { if !w.is_whitespace() { break; } - peek_counter += 1; - s.peek_forward(1); + s.advance_cursor(); } - peek_counter } /// Eat tokens until a newline