diff --git a/Cargo.toml b/Cargo.toml index a223167..b4dfce5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -57,7 +57,6 @@ num-traits = "0.2.14" once_cell = "1.5.2" rand = { version = "0.8", optional = true } codemap = "0.1.3" -peekmore = "1.0" wasm-bindgen = { version = "0.2.68", optional = true } beef = "0.5" phf = { version = "0.9", features = ["macros"] } diff --git a/src/lexer.rs b/src/lexer.rs index 8947847..3af8eed 100644 --- a/src/lexer.rs +++ b/src/lexer.rs @@ -7,20 +7,87 @@ use crate::Token; const FORM_FEED: char = '\x0C'; #[derive(Debug, Clone)] -pub(crate) struct Lexer<'a> { - buf: Peekable>, - pos: usize, - file: &'a Arc, +pub(crate) struct Lexer { + buf: Vec, + cursor: usize, + amt_peeked: usize, } -impl<'a> Iterator for Lexer<'a> { +impl Lexer { + fn peek_cursor(&self) -> usize { + self.cursor + self.amt_peeked + } + + pub fn peek(&self) -> Option { + self.buf.get(self.peek_cursor()).copied() + } + + pub fn reset_cursor(&mut self) { + self.amt_peeked = 0; + } + + pub fn advance_cursor(&mut self) { + self.amt_peeked += 1; + } + + pub fn move_cursor_back(&mut self) { + self.amt_peeked = self.amt_peeked.saturating_sub(1); + } + + pub fn peek_next(&mut self) -> Option { + self.amt_peeked += 1; + + self.peek() + } + + pub fn peek_previous(&mut self) -> Option { + self.buf.get(self.peek_cursor() - 1).copied() + } + + pub fn peek_forward(&mut self, n: usize) -> Option { + self.amt_peeked += n; + + self.peek() + } + + pub fn peek_backward(&mut self, n: usize) -> Option { + self.amt_peeked = self.amt_peeked.checked_sub(n)?; + + self.peek() + } + + pub fn truncate_iterator_to_cursor(&mut self) { + self.cursor += self.amt_peeked; + self.amt_peeked = 0; + } +} + +impl Iterator for Lexer { + type Item = Token; + + fn next(&mut self) -> Option { + self.buf.get(self.cursor).copied().map(|tok| { + self.cursor += 1; + self.amt_peeked = self.amt_peeked.saturating_sub(1); + tok + }) + } +} + +struct TokenLexer<'a> { + buf: Peekable>, + cursor: usize, + file: Arc, +} + +impl<'a> Iterator for TokenLexer<'a> { type Item = Token; fn next(&mut self) -> Option { let kind = match self.buf.next()? { FORM_FEED => '\n', '\r' => { if self.buf.peek() == Some(&'\n') { - self.pos += 1; + self.cursor += 1; self.buf.next(); } '\n' @@ -31,18 +98,29 @@ impl<'a> Iterator for Lexer<'a> { let pos = self .file .span - .subspan(self.pos as u64, (self.pos + len) as u64); - self.pos += len; + .subspan(self.cursor as u64, (self.cursor + len) as u64); + self.cursor += len; Some(Token { pos, kind }) } } -impl<'a> Lexer<'a> { - pub fn new(file: &'a Arc) -> Lexer<'a> { - Lexer { +impl Lexer { + pub fn new_from_file(file: &Arc) -> Self { + let buf = TokenLexer { + file: Arc::clone(file), buf: file.source().chars().peekable(), - pos: 0, - file, + cursor: 0, + } + .collect(); + + Self::new(buf) + } + + pub fn new(buf: Vec) -> Lexer { + Lexer { + buf, + cursor: 0, + amt_peeked: 0, } } } diff --git a/src/lib.rs b/src/lib.rs index 19d247d..0cab96c 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -101,8 +101,6 @@ pub(crate) use beef::lean::Cow; use codemap::CodeMap; -use peekmore::PeekMore; - pub use crate::error::{SassError as Error, SassResult as Result}; pub(crate) use crate::token::Token; use crate::{ @@ -288,10 +286,7 @@ pub fn from_path(p: &str, options: &Options) -> Result { let empty_span = file.span.subspan(0, 0); let stmts = Parser { - toks: &mut Lexer::new(&file) - .collect::>() - .into_iter() - .peekmore(), + toks: &mut Lexer::new_from_file(&file), map: &mut map, path: p.as_ref(), scopes: &mut Scopes::new(), @@ -334,10 +329,7 @@ pub fn from_string(p: String, options: &Options) -> Result { let file = map.add_file("stdin".into(), p); let empty_span = file.span.subspan(0, 0); let stmts = Parser { - toks: &mut Lexer::new(&file) - .collect::>() - .into_iter() - .peekmore(), + toks: &mut Lexer::new_from_file(&file), map: &mut map, path: Path::new(""), scopes: &mut Scopes::new(), @@ -371,10 +363,7 @@ pub fn from_string(p: String) -> std::result::Result { let empty_span = file.span.subspan(0, 0); let stmts = Parser { - toks: &mut Lexer::new(&file) - .collect::>() - .into_iter() - .peekmore(), + toks: &mut Lexer::new_from_file(&file), map: &mut map, path: Path::new(""), scopes: &mut Scopes::new(), diff --git a/src/output.rs b/src/output.rs index 793eb9e..a737b85 100644 --- a/src/output.rs +++ b/src/output.rs @@ -410,7 +410,7 @@ impl CompressedFormatter { fn write_block_entry(&self, buf: &mut Vec, styles: &[BlockEntry]) -> SassResult<()> { let mut styles = styles.iter(); - while let Some(style) = styles.next() { + for style in &mut styles { match style { BlockEntry::Style(s) => { let value = s.value.node.to_css_string(s.value.span)?; diff --git a/src/parse/args.rs b/src/parse/args.rs index 8df0436..277d70e 100644 --- a/src/parse/args.rs +++ b/src/parse/args.rs @@ -21,7 +21,7 @@ impl<'a> Parser<'a> { pub(super) fn parse_func_args(&mut self) -> SassResult { let mut args: Vec = Vec::new(); let mut close_paren_span: Span = match self.toks.peek() { - Some(Token { pos, .. }) => *pos, + Some(Token { pos, .. }) => pos, None => return Err(("expected \")\".", self.span_before).into()), }; @@ -164,7 +164,7 @@ impl<'a> Parser<'a> { } if let Some(Token { kind: '$', pos }) = self.toks.peek() { - span = span.merge(*pos); + span = span.merge(pos); self.toks.advance_cursor(); let v = peek_ident_no_interpolation(self.toks, false, self.span_before)?; @@ -229,10 +229,9 @@ impl<'a> Parser<'a> { continue; } Some(Token { kind: '.', pos }) => { - let pos = *pos; self.toks.next(); - if let Some(Token { kind: '.', pos }) = self.toks.peek().copied() { + if let Some(Token { kind: '.', pos }) = self.toks.peek() { if !name.is_empty() { return Err(("expected \")\".", pos).into()); } @@ -324,7 +323,7 @@ impl<'a> Parser<'a> { return Ok(CallArgs(args, span)); } Some(Token { kind: ',', pos }) => { - span = span.merge(*pos); + span = span.merge(pos); self.toks.next(); self.whitespace_or_comment(); continue; @@ -341,14 +340,14 @@ impl<'a> Parser<'a> { self.expect_char('.')?; } Some(Token { pos, .. }) => { - return Err(("expected \")\".", *pos).into()); + return Err(("expected \")\".", pos).into()); } None => return Err(("expected \")\".", span).into()), } } Some(Token { pos, .. }) => { value?; - return Err(("expected \")\".", *pos).into()); + return Err(("expected \")\".", pos).into()); } None => return Err(("expected \")\".", span).into()), } diff --git a/src/parse/control_flow.rs b/src/parse/control_flow.rs index 238bc35..748b78c 100644 --- a/src/parse/control_flow.rs +++ b/src/parse/control_flow.rs @@ -1,10 +1,10 @@ use codemap::Spanned; use num_traits::cast::ToPrimitive; -use peekmore::PeekMore; use crate::{ common::Identifier, error::SassResult, + lexer::Lexer, parse::{ContextFlags, Parser, Stmt}, unit::Unit, utils::{ @@ -60,7 +60,7 @@ impl<'a> Parser<'a> { loop { self.whitespace_or_comment(); - if let Some(Token { kind: '@', pos }) = self.toks.peek().copied() { + if let Some(Token { kind: '@', pos }) = self.toks.peek() { self.toks.peek_forward(1); let ident = peek_ident_no_interpolation(self.toks, false, pos)?; if ident.as_str() != "else" { @@ -72,7 +72,7 @@ impl<'a> Parser<'a> { break; } self.whitespace_or_comment(); - if let Some(tok) = self.toks.peek().copied() { + if let Some(tok) = self.toks.peek() { match tok.kind { 'i' if matches!( self.toks.peek_forward(1), @@ -182,7 +182,7 @@ impl<'a> Parser<'a> { Some(Token { kind: 't', pos }) | Some(Token { kind: 'T', pos }) | Some(Token { kind: '\\', pos }) => { - let span = *pos; + let span = pos; let mut ident = match peek_ident_no_interpolation(toks, false, span) { Ok(s) => s, Err(..) => return false, @@ -266,7 +266,7 @@ impl<'a> Parser<'a> { ); if self.flags.in_function() { let these_stmts = Parser { - toks: &mut body.clone().into_iter().peekmore(), + toks: &mut Lexer::new(body.clone()), map: self.map, path: self.path, scopes: self.scopes, @@ -290,7 +290,7 @@ impl<'a> Parser<'a> { } else { stmts.append( &mut Parser { - toks: &mut body.clone().into_iter().peekmore(), + toks: &mut Lexer::new(body.clone()), map: self.map, path: self.path, scopes: self.scopes, @@ -343,7 +343,7 @@ impl<'a> Parser<'a> { while val.node.is_true() { if self.flags.in_function() { let these_stmts = Parser { - toks: &mut body.clone().into_iter().peekmore(), + toks: &mut Lexer::new(body.clone()), map: self.map, path: self.path, scopes: self.scopes, @@ -367,7 +367,7 @@ impl<'a> Parser<'a> { } else { stmts.append( &mut Parser { - toks: &mut body.clone().into_iter().peekmore(), + toks: &mut Lexer::new(body.clone()), map: self.map, path: self.path, scopes: self.scopes, @@ -455,7 +455,7 @@ impl<'a> Parser<'a> { if self.flags.in_function() { let these_stmts = Parser { - toks: &mut body.clone().into_iter().peekmore(), + toks: &mut Lexer::new(body.clone()), map: self.map, path: self.path, scopes: self.scopes, @@ -479,7 +479,7 @@ impl<'a> Parser<'a> { } else { stmts.append( &mut Parser { - toks: &mut body.clone().into_iter().peekmore(), + toks: &mut Lexer::new(body.clone()), map: self.map, path: self.path, scopes: self.scopes, diff --git a/src/parse/function.rs b/src/parse/function.rs index 5b02227..44aeac4 100644 --- a/src/parse/function.rs +++ b/src/parse/function.rs @@ -1,11 +1,11 @@ use codemap::Spanned; -use peekmore::PeekMore; use crate::{ args::CallArgs, atrule::Function, common::{unvendor, Identifier}, error::SassResult, + lexer::Lexer, scope::Scopes, utils::{read_until_closing_curly_brace, read_until_semicolon_or_closing_curly_brace}, value::{SassFunction, Value}, @@ -100,7 +100,7 @@ impl<'a> Parser<'a> { }; let mut return_value = Parser { - toks: &mut body.into_iter().peekmore(), + toks: &mut Lexer::new(body), map: self.map, path: self.path, scopes: if declared_at_root { diff --git a/src/parse/ident.rs b/src/parse/ident.rs index 28f8fc1..dcf74e0 100644 --- a/src/parse/ident.rs +++ b/src/parse/ident.rs @@ -20,7 +20,7 @@ impl<'a> Parser<'a> { if unit && tok.kind == '-' { // Disallow `-` followed by a dot or a digit digit in units. let second = match self.toks.peek_forward(1) { - Some(v) => *v, + Some(v) => v, None => break, }; @@ -59,7 +59,7 @@ impl<'a> Parser<'a> { buf.push_str(&self.escape(false)?); } '#' => { - if let Some(Token { kind: '{', .. }) = self.toks.peek_forward(1).copied() { + if let Some(Token { kind: '{', .. }) = self.toks.peek_forward(1) { self.toks.next(); self.toks.next(); // TODO: if ident, interpolate literally @@ -136,7 +136,6 @@ impl<'a> Parser<'a> { let Token { kind, pos } = self .toks .peek() - .copied() .ok_or(("Expected identifier.", self.span_before))?; let mut text = String::new(); if kind == '-' { @@ -163,7 +162,7 @@ impl<'a> Parser<'a> { } let Token { kind: first, pos } = match self.toks.peek() { - Some(v) => *v, + Some(v) => v, None => return Err(("Expected identifier.", self.span_before).into()), }; @@ -205,7 +204,7 @@ impl<'a> Parser<'a> { .peek() .ok_or(("Expected identifier.", self.span_before))?; let mut text = String::new(); - if kind == &'-' { + if kind == '-' { self.toks.next(); text.push('-'); @@ -264,7 +263,7 @@ impl<'a> Parser<'a> { } '#' => { if let Some(Token { kind: '{', pos }) = self.toks.peek() { - self.span_before = self.span_before.merge(*pos); + self.span_before = self.span_before.merge(pos); self.toks.next(); let interpolation = self.parse_interpolation()?; match interpolation.node { diff --git a/src/parse/import.rs b/src/parse/import.rs index a55c26d..e0c08b6 100644 --- a/src/parse/import.rs +++ b/src/parse/import.rs @@ -1,7 +1,6 @@ use std::{ffi::OsStr, fs, path::Path, path::PathBuf}; use codemap::{Span, Spanned}; -use peekmore::PeekMore; use crate::{ common::{ListSeparator::Comma, QuoteKind}, @@ -104,10 +103,7 @@ impl<'a> Parser<'a> { String::from_utf8(fs::read(&name)?)?, ); return Parser { - toks: &mut Lexer::new(&file) - .collect::>() - .into_iter() - .peekmore(), + toks: &mut Lexer::new_from_file(&file), map: self.map, path: &name, scopes: self.scopes, @@ -141,7 +137,7 @@ impl<'a> Parser<'a> { Some(Token { kind: '\'', .. }) | Some(Token { kind: '"', .. }) | Some(Token { kind: 'u', .. }) => {} - Some(Token { pos, .. }) => return Err(("Expected string.", *pos).into()), + Some(Token { pos, .. }) => return Err(("Expected string.", pos).into()), None => return Err(("expected more input.", self.span_before).into()), }; let Spanned { diff --git a/src/parse/keyframes.rs b/src/parse/keyframes.rs index 443db16..a09c9fa 100644 --- a/src/parse/keyframes.rs +++ b/src/parse/keyframes.rs @@ -1,10 +1,9 @@ use std::fmt; -use peekmore::PeekMore; - use crate::{ atrule::keyframes::{Keyframes, KeyframesSelector}, error::SassResult, + lexer::Lexer, parse::Stmt, utils::eat_whole_number, Token, @@ -34,7 +33,7 @@ impl<'a, 'b> KeyframesSelectorParser<'a, 'b> { fn parse_keyframes_selector(&mut self) -> SassResult> { let mut selectors = Vec::new(); self.parser.whitespace_or_comment(); - while let Some(tok) = self.parser.toks.peek().copied() { + while let Some(tok) = self.parser.toks.peek() { match tok.kind { 't' | 'T' => { let mut ident = self.parser.parse_identifier()?; @@ -128,7 +127,7 @@ impl<'a> Parser<'a> { span = span.merge(tok.pos()); match tok.kind { '#' => { - if let Some(Token { kind: '{', .. }) = self.toks.peek().copied() { + if let Some(Token { kind: '{', .. }) = self.toks.peek() { self.toks.next(); string.push_str(&self.parse_interpolation()?.to_css_string(span)?); } else { @@ -154,13 +153,11 @@ impl<'a> Parser<'a> { string.push(' '); } '{' => { - // we must collect here because the parser is not generic over iterator - #[allow(clippy::needless_collect)] let sel_toks: Vec = string.chars().map(|x| Token::new(span, x)).collect(); let selector = KeyframesSelectorParser::new(&mut Parser { - toks: &mut sel_toks.into_iter().peekmore(), + toks: &mut Lexer::new(sel_toks), map: self.map, path: self.path, scopes: self.scopes, diff --git a/src/parse/media.rs b/src/parse/media.rs index 253d6de..4cd27a7 100644 --- a/src/parse/media.rs +++ b/src/parse/media.rs @@ -98,7 +98,7 @@ impl<'a> Parser<'a> { return Ok(buf); } - let next_tok = self.toks.peek().copied(); + let next_tok = self.toks.peek(); let is_angle = next_tok.map_or(false, |t| t.kind == '<' || t.kind == '>'); if is_angle || matches!(next_tok, Some(Token { kind: '=', .. })) { buf.push(' '); diff --git a/src/parse/mixin.rs b/src/parse/mixin.rs index b6ebf40..7fa248b 100644 --- a/src/parse/mixin.rs +++ b/src/parse/mixin.rs @@ -2,12 +2,11 @@ use std::mem; use codemap::Spanned; -use peekmore::PeekMore; - use crate::{ args::{CallArgs, FuncArgs}, atrule::mixin::{Content, Mixin, UserDefinedMixin}, error::SassResult, + lexer::Lexer, scope::Scopes, utils::read_until_closing_curly_brace, Token, @@ -124,7 +123,7 @@ impl<'a> Parser<'a> { let mut toks = read_until_closing_curly_brace(self.toks)?; if let Some(tok) = self.toks.peek() { - toks.push(*tok); + toks.push(tok); self.toks.next(); } Some(toks) @@ -167,7 +166,7 @@ impl<'a> Parser<'a> { }); let body = Parser { - toks: &mut body.into_iter().peekmore(), + toks: &mut Lexer::new(body), map: self.map, path: self.path, scopes: self.scopes, @@ -234,7 +233,7 @@ impl<'a> Parser<'a> { let stmts = if let Some(body) = content.content.clone() { Parser { - toks: &mut body.into_iter().peekmore(), + toks: &mut Lexer::new(body), map: self.map, path: self.path, scopes: &mut scope_at_decl, diff --git a/src/parse/mod.rs b/src/parse/mod.rs index a00af5c..617e7ad 100644 --- a/src/parse/mod.rs +++ b/src/parse/mod.rs @@ -1,7 +1,6 @@ -use std::{convert::TryFrom, path::Path, vec::IntoIter}; +use std::{convert::TryFrom, path::Path}; use codemap::{CodeMap, Span, Spanned}; -use peekmore::{PeekMore, PeekMoreIterator}; use crate::{ atrule::{ @@ -12,6 +11,7 @@ use crate::{ }, builtin::modules::{ModuleConfig, Modules}, error::SassResult, + lexer::Lexer, scope::{Scope, Scopes}, selector::{ ComplexSelectorComponent, ExtendRule, ExtendedSelector, Extender, Selector, SelectorParser, @@ -68,7 +68,7 @@ pub(crate) enum Stmt { /// hit (if there is one) is not important for now. // todo: merge at_root and at_root_has_selector into an enum pub(crate) struct Parser<'a> { - pub toks: &'a mut PeekMoreIterator>, + pub toks: &'a mut Lexer, pub map: &'a mut CodeMap, pub path: &'a Path, pub global_scope: &'a mut Scope, @@ -114,19 +114,19 @@ impl<'a> Parser<'a> { pub fn expect_char(&mut self, c: char) -> SassResult<()> { match self.toks.peek() { - Some(Token { kind, pos }) if *kind == c => { - self.span_before = *pos; + Some(Token { kind, pos }) if kind == c => { + self.span_before = pos; self.toks.next(); Ok(()) } - Some(Token { pos, .. }) => Err((format!("expected \"{}\".", c), *pos).into()), + Some(Token { pos, .. }) => Err((format!("expected \"{}\".", c), pos).into()), None => Err((format!("expected \"{}\".", c), self.span_before).into()), } } pub fn consume_char_if_exists(&mut self, c: char) -> bool { if let Some(Token { kind, .. }) = self.toks.peek() { - if *kind == c { + if kind == c { self.toks.next(); return true; } @@ -150,7 +150,7 @@ impl<'a> Parser<'a> { if self.flags.in_function() && !stmts.is_empty() { return Ok(stmts); } - self.span_before = *pos; + self.span_before = pos; match kind { '@' => { self.toks.next(); @@ -207,7 +207,7 @@ impl<'a> Parser<'a> { } = self.parse_value(false, &|_| false)?; span.merge(kind_string.span); if let Some(Token { kind: ';', pos }) = self.toks.peek() { - kind_string.span.merge(*pos); + kind_string.span.merge(pos); self.toks.next(); } self.warn(&Spanned { @@ -222,7 +222,7 @@ impl<'a> Parser<'a> { } = self.parse_value(false, &|_| false)?; span.merge(kind_string.span); if let Some(Token { kind: ';', pos }) = self.toks.peek() { - kind_string.span.merge(*pos); + kind_string.span.merge(pos); self.toks.next(); } self.debug(&Spanned { @@ -287,14 +287,14 @@ impl<'a> Parser<'a> { } } '\u{0}'..='\u{8}' | '\u{b}'..='\u{1f}' => { - return Err(("expected selector.", *pos).into()) + return Err(("expected selector.", pos).into()) } '}' => { self.toks.next(); break; } // dart-sass seems to special-case the error message here? - '!' | '{' => return Err(("expected \"}\".", *pos).into()), + '!' | '{' => return Err(("expected \"}\".", pos).into()), _ => { if self.flags.in_function() { return Err(( @@ -403,7 +403,7 @@ impl<'a> Parser<'a> { span = span.merge(pos); match kind { '#' => { - if let Some(Token { kind: '{', .. }) = self.toks.peek().copied() { + if let Some(Token { kind: '{', .. }) = self.toks.peek() { self.toks.next(); string.push_str(&self.parse_interpolation()?.to_css_string(span)?); } else { @@ -447,15 +447,13 @@ impl<'a> Parser<'a> { return Err(("expected \"{\".", span).into()); } - // we must collect here because the parser isn't generic over the iterator - #[allow(clippy::needless_collect)] let sel_toks: Vec = string.chars().map(|x| Token::new(span, x)).collect(); - let mut iter = sel_toks.into_iter().peekmore(); + let mut lexer = Lexer::new(sel_toks); let selector = SelectorParser::new( &mut Parser { - toks: &mut iter, + toks: &mut lexer, map: self.map, path: self.path, scopes: self.scopes, @@ -635,7 +633,7 @@ impl<'a> Parser<'a> { '{' => break, '#' => { if let Some(Token { kind: '{', pos }) = self.toks.peek() { - self.span_before = self.span_before.merge(*pos); + self.span_before = self.span_before.merge(pos); self.toks.next(); params.push_str(&self.parse_interpolation_as_string()?); } else { @@ -780,9 +778,7 @@ impl<'a> Parser<'a> { // return Err(("@extend may only be used within style rules.", self.span_before).into()); // } let (value, is_optional) = Parser { - toks: &mut read_until_semicolon_or_closing_curly_brace(self.toks)? - .into_iter() - .peekmore(), + toks: &mut Lexer::new(read_until_semicolon_or_closing_curly_brace(self.toks)?), map: self.map, path: self.path, scopes: self.scopes, @@ -888,7 +884,7 @@ impl<'a> Parser<'a> { match tok.kind { '{' => break, '#' => { - if let Some(Token { kind: '{', pos }) = self.toks.peek().copied() { + if let Some(Token { kind: '{', pos }) = self.toks.peek() { self.toks.next(); self.span_before = pos; let interpolation = self.parse_interpolation()?; diff --git a/src/parse/module.rs b/src/parse/module.rs index 4c149b4..638ace6 100644 --- a/src/parse/module.rs +++ b/src/parse/module.rs @@ -1,7 +1,6 @@ use std::{convert::TryFrom, fs}; use codemap::Spanned; -use peekmore::PeekMore; use crate::{ atrule::AtRuleKind, @@ -116,10 +115,7 @@ impl<'a> Parser<'a> { .add_file(name.to_owned(), String::from_utf8(fs::read(&import)?)?); let stmts = Parser { - toks: &mut Lexer::new(&file) - .collect::>() - .into_iter() - .peekmore(), + toks: &mut Lexer::new_from_file(&file), map: self.map, path: &import, scopes: self.scopes, diff --git a/src/parse/style.rs b/src/parse/style.rs index dcc6a45..65beda7 100644 --- a/src/parse/style.rs +++ b/src/parse/style.rs @@ -27,36 +27,36 @@ impl<'a> Parser<'a> { return None; } '(' => { - toks.push(*tok); + toks.push(tok); self.toks.peek_forward(1); let mut scope = 0; while let Some(tok) = self.toks.peek() { match tok.kind { ')' => { if scope == 0 { - toks.push(*tok); + toks.push(tok); self.toks.peek_forward(1); break; } scope -= 1; - toks.push(*tok); + toks.push(tok); self.toks.peek_forward(1); } '(' => { - toks.push(*tok); + toks.push(tok); self.toks.peek_forward(1); scope += 1; } _ => { - toks.push(*tok); + toks.push(tok); self.toks.peek_forward(1); } } } } _ => { - toks.push(*tok); + toks.push(tok); self.toks.peek_forward(1); } } @@ -123,7 +123,7 @@ impl<'a> Parser<'a> { property.push(':'); SelectorOrStyle::Selector(property) } - c if is_name(*c) => { + c if is_name(c) => { if let Some(toks) = self.parse_style_value_when_no_space_after_semicolon() { @@ -201,7 +201,7 @@ impl<'a> Parser<'a> { ) -> SassResult> { let mut styles = Vec::new(); self.whitespace(); - while let Some(tok) = self.toks.peek().copied() { + while let Some(tok) = self.toks.peek() { match tok.kind { '{' => { self.toks.next(); diff --git a/src/parse/value/css_function.rs b/src/parse/value/css_function.rs index 0115b91..7c5b432 100644 --- a/src/parse/value/css_function.rs +++ b/src/parse/value/css_function.rs @@ -27,7 +27,7 @@ impl<'a> Parser<'a> { } '#' => { if let Some(Token { kind: '{', pos }) = self.toks.peek() { - self.span_before = *pos; + self.span_before = pos; self.toks.next(); let interpolation = self.parse_interpolation()?; buf.push_str(&interpolation.node.to_css_string(interpolation.span)?); @@ -315,7 +315,7 @@ impl<'a> Parser<'a> { } buf.push_str(&num); } else { - self.toks.move_cursor_back().unwrap(); + self.toks.move_cursor_back(); } let next = match self.toks.peek() { @@ -365,7 +365,7 @@ impl<'a> Parser<'a> { fn peek_escape(&mut self) -> SassResult { let mut value = 0; let first = match self.toks.peek() { - Some(t) => *t, + Some(t) => t, None => return Ok(String::new()), }; let mut span = first.pos; diff --git a/src/parse/value/parse.rs b/src/parse/value/parse.rs index 210e59a..3509a7f 100644 --- a/src/parse/value/parse.rs +++ b/src/parse/value/parse.rs @@ -1,4 +1,4 @@ -use std::{iter::Iterator, mem, vec::IntoIter}; +use std::{iter::Iterator, mem}; use num_bigint::BigInt; use num_rational::{BigRational, Rational64}; @@ -6,13 +6,12 @@ use num_traits::{pow, One, ToPrimitive}; use codemap::{Span, Spanned}; -use peekmore::{PeekMore, PeekMoreIterator}; - use crate::{ builtin::GLOBAL_FUNCTIONS, color::{Color, NAMED_COLORS}, common::{unvendor, Brackets, Identifier, ListSeparator, Op, QuoteKind}, error::SassResult, + lexer::Lexer, unit::Unit, utils::{eat_whole_number, is_name, IsWhitespace, ParsedNumber}, value::{Number, SassFunction, SassMap, Value}, @@ -53,7 +52,7 @@ impl<'a> Parser<'a> { pub(crate) fn parse_value( &mut self, in_paren: bool, - predicate: &dyn Fn(&mut PeekMoreIterator>) -> bool, + predicate: &dyn Fn(&mut Lexer) -> bool, ) -> SassResult> { self.whitespace(); @@ -62,7 +61,7 @@ impl<'a> Parser<'a> { | Some(Token { kind: ';', .. }) | Some(Token { kind: '{', .. }) | None => return Err(("Expected expression.", self.span_before).into()), - Some(Token { pos, .. }) => *pos, + Some(Token { pos, .. }) => pos, }; if predicate(self.toks) { @@ -173,7 +172,7 @@ impl<'a> Parser<'a> { in_paren: bool, ) -> SassResult> { Parser { - toks: &mut toks.into_iter().peekmore(), + toks: &mut Lexer::new(toks), map: self.map, path: self.path, scopes: self.scopes, @@ -291,7 +290,7 @@ impl<'a> Parser<'a> { fn parse_ident_value( &mut self, - predicate: &dyn Fn(&mut PeekMoreIterator>) -> bool, + predicate: &dyn Fn(&mut Lexer) -> bool, ) -> SassResult> { let Spanned { node: mut s, span } = self.parse_identifier()?; @@ -363,7 +362,7 @@ impl<'a> Parser<'a> { fn parse_number( &mut self, - predicate: &dyn Fn(&mut PeekMoreIterator>) -> bool, + predicate: &dyn Fn(&mut Lexer) -> bool, ) -> SassResult> { let mut span = self.toks.peek().unwrap().pos; let mut whole = eat_whole_number(self.toks); @@ -375,7 +374,7 @@ impl<'a> Parser<'a> { }); } - let next_tok = *self.toks.peek().unwrap(); + let next_tok = self.toks.peek().unwrap(); let dec_len = if next_tok.kind == '.' { self.toks.next(); @@ -395,7 +394,7 @@ impl<'a> Parser<'a> { let mut times_ten = String::new(); let mut times_ten_is_postive = true; if let Some(Token { kind: 'e', .. }) | Some(Token { kind: 'E', .. }) = self.toks.peek() { - if let Some(&tok) = self.toks.peek_next() { + if let Some(tok) = self.toks.peek_next() { if tok.kind == '-' { self.toks.next(); times_ten_is_postive = false; @@ -404,13 +403,11 @@ impl<'a> Parser<'a> { times_ten = eat_whole_number(self.toks); if times_ten.is_empty() { - return Err( - ("Expected digit.", self.toks.peek().unwrap_or(&tok).pos).into() - ); + return Err(("Expected digit.", self.toks.peek().unwrap_or(tok).pos).into()); } else if times_ten.len() > 2 { return Err(( "Exponent too negative.", - self.toks.peek().unwrap_or(&tok).pos, + self.toks.peek().unwrap_or(tok).pos, ) .into()); } @@ -420,15 +417,15 @@ impl<'a> Parser<'a> { if times_ten.len() > 2 { return Err( - ("Exponent too large.", self.toks.peek().unwrap_or(&tok).pos).into(), + ("Exponent too large.", self.toks.peek().unwrap_or(tok).pos).into() ); } } } } - if let Ok(Some(Token { pos, .. })) = self.toks.peek_previous() { - span = span.merge(*pos); + if let Some(Token { pos, .. }) = self.toks.peek_previous() { + span = span.merge(pos); } self.toks.reset_cursor(); @@ -445,7 +442,7 @@ impl<'a> Parser<'a> { self.whitespace_or_comment(); Ok(if let Some(Token { kind: ']', pos }) = self.toks.peek() { - span = span.merge(*pos); + span = span.merge(pos); self.toks.next(); IntermediateValue::Value(HigherIntermediateValue::Literal(Value::List( Vec::new(), @@ -473,14 +470,14 @@ impl<'a> Parser<'a> { fn parse_dimension( &mut self, - predicate: &dyn Fn(&mut PeekMoreIterator>) -> bool, + predicate: &dyn Fn(&mut Lexer) -> bool, ) -> SassResult> { let Spanned { node: val, mut span, } = self.parse_number(predicate)?; let unit = if let Some(tok) = self.toks.peek() { - let Token { kind, .. } = *tok; + let Token { kind, .. } = tok; match kind { 'a'..='z' | 'A'..='Z' | '_' | '\\' | '\u{7f}'..=std::char::MAX => { let u = self.parse_identifier_no_interpolation(true)?; @@ -488,7 +485,7 @@ impl<'a> Parser<'a> { Unit::from(u.node) } '-' => { - if let Some(Token { kind, .. }) = self.toks.peek_next().copied() { + if let Some(Token { kind, .. }) = self.toks.peek_next() { self.toks.reset_cursor(); if matches!(kind, 'a'..='z' | 'A'..='Z' | '_' | '\\' | '\u{7f}'..=std::char::MAX) { @@ -679,7 +676,7 @@ impl<'a> Parser<'a> { fn in_interpolated_identifier_body(&mut self) -> bool { match self.toks.peek() { Some(Token { kind: '\\', .. }) => true, - Some(Token { kind, .. }) if is_name(*kind) => true, + Some(Token { kind, .. }) if is_name(kind) => true, Some(Token { kind: '#', .. }) => { let next_is_curly = matches!(self.toks.peek_next(), Some(Token { kind: '{', .. })); self.toks.reset_cursor(); @@ -701,9 +698,9 @@ impl<'a> Parser<'a> { for _ in 0..6 { if let Some(Token { kind, pos }) = self.toks.peek() { if kind.is_ascii_hexdigit() { - span = span.merge(*pos); - self.span_before = *pos; - buf.push(*kind); + span = span.merge(pos); + self.span_before = pos; + buf.push(kind); self.toks.next(); } else { break; @@ -715,8 +712,8 @@ impl<'a> Parser<'a> { buf.push('?'); for _ in 0..(8_usize.saturating_sub(buf.len())) { if let Some(Token { kind: '?', pos }) = self.toks.peek() { - span = span.merge(*pos); - self.span_before = *pos; + span = span.merge(pos); + self.span_before = pos; buf.push('?'); self.toks.next(); } else { @@ -743,9 +740,9 @@ impl<'a> Parser<'a> { found_hex_digit = true; if let Some(Token { kind, pos }) = self.toks.peek() { if kind.is_ascii_hexdigit() { - span = span.merge(*pos); - self.span_before = *pos; - buf.push(*kind); + span = span.merge(pos); + self.span_before = pos; + buf.push(kind); self.toks.next(); } else { break; @@ -773,7 +770,7 @@ impl<'a> Parser<'a> { fn parse_intermediate_value( &mut self, - predicate: &dyn Fn(&mut PeekMoreIterator>) -> bool, + predicate: &dyn Fn(&mut Lexer) -> bool, ) -> Option>> { if predicate(self.toks) { return None; @@ -832,7 +829,7 @@ impl<'a> Parser<'a> { } '#' => { if let Some(Token { kind: '{', pos }) = self.toks.peek_forward(1) { - self.span_before = *pos; + self.span_before = pos; self.toks.reset_cursor(); return Some(self.parse_ident_value(predicate)); } @@ -1048,7 +1045,7 @@ impl<'a> Parser<'a> { struct IntermediateValueIterator<'a, 'b: 'a> { parser: &'a mut Parser<'b>, peek: Option>>, - predicate: &'a dyn Fn(&mut PeekMoreIterator>) -> bool, + predicate: &'a dyn Fn(&mut Lexer) -> bool, } impl<'a, 'b: 'a> Iterator for IntermediateValueIterator<'a, 'b> { @@ -1063,10 +1060,7 @@ impl<'a, 'b: 'a> Iterator for IntermediateValueIterator<'a, 'b> { } impl<'a, 'b: 'a> IntermediateValueIterator<'a, 'b> { - pub fn new( - parser: &'a mut Parser<'b>, - predicate: &'a dyn Fn(&mut PeekMoreIterator>) -> bool, - ) -> Self { + pub fn new(parser: &'a mut Parser<'b>, predicate: &'a dyn Fn(&mut Lexer) -> bool) -> Self { Self { parser, peek: None, diff --git a/src/selector/attribute.rs b/src/selector/attribute.rs index bc7cde9..294e986 100644 --- a/src/selector/attribute.rs +++ b/src/selector/attribute.rs @@ -141,7 +141,7 @@ impl Attribute { }; parser.whitespace(); - let modifier = match parser.toks.peek().copied() { + let modifier = match parser.toks.peek() { Some(Token { kind: c @ 'a'..='z', .. diff --git a/src/selector/parse.rs b/src/selector/parse.rs index 93d5d2e..01b37b0 100644 --- a/src/selector/parse.rs +++ b/src/selector/parse.rs @@ -198,7 +198,7 @@ impl<'a, 'b> SelectorParser<'a, 'b> { let mut components = vec![self.parse_simple_selector(None)?]; while let Some(Token { kind, .. }) = self.parser.toks.peek() { - if !is_simple_selector_start(*kind) { + if !is_simple_selector_start(kind) { break; } @@ -219,13 +219,13 @@ impl<'a, 'b> SelectorParser<'a, 'b> { /// [the CSS algorithm]: https://drafts.csswg.org/css-syntax-3/#would-start-an-identifier fn looking_at_identifier(&mut self) -> bool { match self.parser.toks.peek() { - Some(Token { kind, .. }) if is_name_start(*kind) || kind == &'\\' => return true, + Some(Token { kind, .. }) if is_name_start(kind) || kind == '\\' => return true, Some(Token { kind: '-', .. }) => {} Some(..) | None => return false, } match self.parser.toks.peek_forward(1) { - Some(Token { kind, .. }) if is_name_start(*kind) || kind == &'-' || kind == &'\\' => { + Some(Token { kind, .. }) if is_name_start(kind) || kind == '-' || kind == '\\' => { self.parser.toks.reset_cursor(); true } @@ -391,7 +391,7 @@ impl<'a, 'b> SelectorParser<'a, 'b> { match self.parser.toks.peek() { Some(Token { kind: '*', pos }) => { - self.parser.span_before = self.parser.span_before.merge(*pos); + self.parser.span_before = self.parser.span_before.merge(pos); self.parser.toks.next(); if let Some(Token { kind: '|', .. }) = self.parser.toks.peek() { self.parser.toks.next(); @@ -409,7 +409,7 @@ impl<'a, 'b> SelectorParser<'a, 'b> { return Ok(SimpleSelector::Universal(Namespace::None)); } Some(Token { kind: '|', pos }) => { - self.parser.span_before = self.parser.span_before.merge(*pos); + self.parser.span_before = self.parser.span_before.merge(pos); self.parser.toks.next(); match self.parser.toks.peek() { Some(Token { kind: '*', .. }) => { diff --git a/src/utils/comment_whitespace.rs b/src/utils/comment_whitespace.rs index 386ecd0..7bfbb26 100644 --- a/src/utils/comment_whitespace.rs +++ b/src/utils/comment_whitespace.rs @@ -1,8 +1,4 @@ -use std::vec::IntoIter; - -use peekmore::PeekMoreIterator; - -use crate::Token; +use crate::{lexer::Lexer, Token}; use super::peek_until_newline; @@ -16,9 +12,7 @@ impl IsWhitespace for char { } } -pub(crate) fn devour_whitespace, W: IsWhitespace>( - s: &mut PeekMoreIterator, -) -> bool { +pub(crate) fn devour_whitespace(s: &mut Lexer) -> bool { let mut found_whitespace = false; while let Some(w) = s.peek() { if !w.is_whitespace() { @@ -30,7 +24,7 @@ pub(crate) fn devour_whitespace, W: IsWhitespace>( found_whitespace } -pub(crate) fn peek_whitespace(s: &mut PeekMoreIterator>) -> bool { +pub(crate) fn peek_whitespace(s: &mut Lexer) -> bool { let mut found_whitespace = false; while let Some(w) = s.peek() { if !w.is_whitespace() { @@ -42,7 +36,7 @@ pub(crate) fn peek_whitespace(s: &mut PeekMoreIterator>) -> bool found_whitespace } -pub(crate) fn peek_whitespace_or_comment(s: &mut PeekMoreIterator>) -> bool { +pub(crate) fn peek_whitespace_or_comment(s: &mut Lexer) -> bool { let mut found_whitespace = false; while let Some(w) = s.peek() { match w.kind { @@ -83,7 +77,7 @@ pub(crate) fn peek_whitespace_or_comment(s: &mut PeekMoreIterator>(toks: &mut PeekMoreIterator) { +pub(crate) fn read_until_newline(toks: &mut Lexer) { for tok in toks { if tok.kind == '\n' { return; diff --git a/src/utils/number.rs b/src/utils/number.rs index 1332f37..aecb6be 100644 --- a/src/utils/number.rs +++ b/src/utils/number.rs @@ -1,8 +1,4 @@ -use std::vec::IntoIter; - -use peekmore::PeekMoreIterator; - -use crate::Token; +use crate::lexer::Lexer; #[derive(Debug)] pub(crate) struct ParsedNumber { @@ -46,7 +42,7 @@ impl ParsedNumber { } } -pub(crate) fn eat_whole_number(toks: &mut PeekMoreIterator>) -> String { +pub(crate) fn eat_whole_number(toks: &mut Lexer) -> String { let mut buf = String::new(); while let Some(c) = toks.peek() { if !c.kind.is_ascii_digit() { diff --git a/src/utils/peek_until.rs b/src/utils/peek_until.rs index 265934b..4659802 100644 --- a/src/utils/peek_until.rs +++ b/src/utils/peek_until.rs @@ -1,19 +1,13 @@ -use std::vec::IntoIter; - use codemap::{Span, Spanned}; -use peekmore::PeekMoreIterator; - -use crate::{error::SassResult, Token}; +use crate::{error::SassResult, lexer::Lexer, Token}; use super::{as_hex, hex_char_for, is_name, is_name_start, peek_whitespace}; -pub(crate) fn peek_until_closing_curly_brace( - toks: &mut PeekMoreIterator>, -) -> SassResult> { +pub(crate) fn peek_until_closing_curly_brace(toks: &mut Lexer) -> SassResult> { let mut t = Vec::new(); let mut nesting = 0; - while let Some(tok) = toks.peek().copied() { + while let Some(tok) = toks.peek() { match tok.kind { q @ '"' | q @ '\'' => { t.push(tok); @@ -35,7 +29,7 @@ pub(crate) fn peek_until_closing_curly_brace( toks.advance_cursor(); } '/' => { - let next = *toks + let next = toks .peek_forward(1) .ok_or(("Expected expression.", tok.pos))?; match toks.peek() { @@ -54,12 +48,9 @@ pub(crate) fn peek_until_closing_curly_brace( Ok(t) } -fn peek_until_closing_quote( - toks: &mut PeekMoreIterator>, - q: char, -) -> SassResult> { +fn peek_until_closing_quote(toks: &mut Lexer, q: char) -> SassResult> { let mut t = Vec::new(); - while let Some(tok) = toks.peek().copied() { + while let Some(tok) = toks.peek() { match tok.kind { '"' if q == '"' => { t.push(tok); @@ -74,7 +65,7 @@ fn peek_until_closing_quote( '\\' => { t.push(tok); t.push(match toks.peek_forward(1) { - Some(tok) => *tok, + Some(tok) => tok, None => return Err((format!("Expected {}.", q), tok.pos).into()), }); } @@ -85,7 +76,7 @@ fn peek_until_closing_quote( None => return Err((format!("Expected {}.", q), tok.pos).into()), }; if next.kind == '{' { - t.push(*next); + t.push(next); toks.peek_forward(1); t.append(&mut peek_until_closing_curly_brace(toks)?); } @@ -97,7 +88,7 @@ fn peek_until_closing_quote( Ok(t) } -pub(crate) fn peek_until_newline(toks: &mut PeekMoreIterator>) { +pub(crate) fn peek_until_newline(toks: &mut Lexer) { while let Some(tok) = toks.peek() { if tok.kind == '\n' { break; @@ -106,10 +97,10 @@ pub(crate) fn peek_until_newline(toks: &mut PeekMoreIterator>) { } } -pub(crate) fn peek_escape(toks: &mut PeekMoreIterator>) -> SassResult { +pub(crate) fn peek_escape(toks: &mut Lexer) -> SassResult { let mut value = 0; let first = match toks.peek() { - Some(t) => *t, + Some(t) => t, None => return Ok(String::new()), }; let mut span = first.pos; @@ -155,7 +146,7 @@ pub(crate) fn peek_escape(toks: &mut PeekMoreIterator>) -> SassR } pub(crate) fn peek_ident_no_interpolation( - toks: &mut PeekMoreIterator>, + toks: &mut Lexer, unit: bool, span_before: Span, ) -> SassResult> { @@ -200,7 +191,7 @@ pub(crate) fn peek_ident_no_interpolation( } fn peek_ident_body_no_interpolation( - toks: &mut PeekMoreIterator>, + toks: &mut Lexer, unit: bool, mut span: Span, ) -> SassResult> { @@ -210,7 +201,7 @@ fn peek_ident_body_no_interpolation( if unit && tok.kind == '-' { // Disallow `-` followed by a dot or a digit digit in units. let second = match toks.peek_forward(1) { - Some(v) => *v, + Some(v) => v, None => break, }; diff --git a/src/utils/read_until.rs b/src/utils/read_until.rs index cdc7aaa..8e665f1 100644 --- a/src/utils/read_until.rs +++ b/src/utils/read_until.rs @@ -1,17 +1,11 @@ -use std::vec::IntoIter; - -use peekmore::PeekMoreIterator; - -use crate::{error::SassResult, Token}; +use crate::{error::SassResult, lexer::Lexer, Token}; use super::{devour_whitespace, read_until_newline}; // Eat tokens until an open curly brace // // Does not consume the open curly brace -pub(crate) fn read_until_open_curly_brace( - toks: &mut PeekMoreIterator>, -) -> SassResult> { +pub(crate) fn read_until_open_curly_brace(toks: &mut Lexer) -> SassResult> { let mut t = Vec::new(); let mut n = 0; while let Some(tok) = toks.peek() { @@ -49,9 +43,7 @@ pub(crate) fn read_until_open_curly_brace( Ok(t) } -pub(crate) fn read_until_closing_curly_brace( - toks: &mut PeekMoreIterator>, -) -> SassResult> { +pub(crate) fn read_until_closing_curly_brace(toks: &mut Lexer) -> SassResult> { let mut buf = Vec::new(); let mut nesting = 0; while let Some(tok) = toks.peek() { @@ -104,10 +96,7 @@ pub(crate) fn read_until_closing_curly_brace( /// Read tokens into a vector until a matching closing quote is found /// /// The closing quote is included in the output -pub(crate) fn read_until_closing_quote( - toks: &mut PeekMoreIterator>, - q: char, -) -> SassResult> { +pub(crate) fn read_until_closing_quote(toks: &mut Lexer, q: char) -> SassResult> { let mut t = Vec::new(); while let Some(tok) = toks.next() { match tok.kind { @@ -130,7 +119,7 @@ pub(crate) fn read_until_closing_quote( t.push(tok); match toks.peek() { Some(tok @ Token { kind: '{', .. }) => { - t.push(*tok); + t.push(tok); toks.next(); t.append(&mut read_until_closing_curly_brace(toks)?); } @@ -151,7 +140,7 @@ pub(crate) fn read_until_closing_quote( } pub(crate) fn read_until_semicolon_or_closing_curly_brace( - toks: &mut PeekMoreIterator>, + toks: &mut Lexer, ) -> SassResult> { let mut t = Vec::new(); let mut nesting = 0; @@ -202,9 +191,7 @@ pub(crate) fn read_until_semicolon_or_closing_curly_brace( Ok(t) } -pub(crate) fn read_until_closing_paren( - toks: &mut PeekMoreIterator>, -) -> SassResult> { +pub(crate) fn read_until_closing_paren(toks: &mut Lexer) -> SassResult> { let mut t = Vec::new(); let mut scope = 0; while let Some(tok) = toks.next() { diff --git a/src/value/mod.rs b/src/value/mod.rs index 374a57d..ef9d68d 100644 --- a/src/value/mod.rs +++ b/src/value/mod.rs @@ -1,13 +1,12 @@ use std::cmp::Ordering; -use peekmore::PeekMore; - use codemap::{Span, Spanned}; use crate::{ color::Color, common::{Brackets, ListSeparator, Op, QuoteKind}, error::SassResult, + lexer::Lexer, parse::Parser, selector::Selector, unit::Unit, @@ -511,12 +510,12 @@ impl Value { None => return Err((format!("${}: {} is not a valid selector: it must be a string, a list of strings, or a list of lists of strings.", name, self.inspect(parser.span_before)?), parser.span_before).into()), }; Ok(Parser { - toks: &mut string - .chars() - .map(|c| Token::new(parser.span_before, c)) - .collect::>() - .into_iter() - .peekmore(), + toks: &mut Lexer::new( + string + .chars() + .map(|c| Token::new(parser.span_before, c)) + .collect::>(), + ), map: parser.map, path: parser.path, scopes: parser.scopes,