remove peekmore dependency

This commit is contained in:
Connor Skees 2021-07-11 15:12:46 -04:00
parent 7ad2da040f
commit adfecfa83e
24 changed files with 231 additions and 222 deletions

View File

@ -57,7 +57,6 @@ num-traits = "0.2.14"
once_cell = "1.5.2" once_cell = "1.5.2"
rand = { version = "0.8", optional = true } rand = { version = "0.8", optional = true }
codemap = "0.1.3" codemap = "0.1.3"
peekmore = "1.0"
wasm-bindgen = { version = "0.2.68", optional = true } wasm-bindgen = { version = "0.2.68", optional = true }
beef = "0.5" beef = "0.5"
phf = { version = "0.9", features = ["macros"] } phf = { version = "0.9", features = ["macros"] }

View File

@ -7,20 +7,87 @@ use crate::Token;
const FORM_FEED: char = '\x0C'; const FORM_FEED: char = '\x0C';
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub(crate) struct Lexer<'a> { pub(crate) struct Lexer {
buf: Peekable<Chars<'a>>, buf: Vec<Token>,
pos: usize, cursor: usize,
file: &'a Arc<File>, amt_peeked: usize,
} }
impl<'a> Iterator for Lexer<'a> { impl Lexer {
fn peek_cursor(&self) -> usize {
self.cursor + self.amt_peeked
}
pub fn peek(&self) -> Option<Token> {
self.buf.get(self.peek_cursor()).copied()
}
pub fn reset_cursor(&mut self) {
self.amt_peeked = 0;
}
pub fn advance_cursor(&mut self) {
self.amt_peeked += 1;
}
pub fn move_cursor_back(&mut self) {
self.amt_peeked = self.amt_peeked.saturating_sub(1);
}
pub fn peek_next(&mut self) -> Option<Token> {
self.amt_peeked += 1;
self.peek()
}
pub fn peek_previous(&mut self) -> Option<Token> {
self.buf.get(self.peek_cursor() - 1).copied()
}
pub fn peek_forward(&mut self, n: usize) -> Option<Token> {
self.amt_peeked += n;
self.peek()
}
pub fn peek_backward(&mut self, n: usize) -> Option<Token> {
self.amt_peeked = self.amt_peeked.checked_sub(n)?;
self.peek()
}
pub fn truncate_iterator_to_cursor(&mut self) {
self.cursor += self.amt_peeked;
self.amt_peeked = 0;
}
}
impl Iterator for Lexer {
type Item = Token;
fn next(&mut self) -> Option<Self::Item> {
self.buf.get(self.cursor).copied().map(|tok| {
self.cursor += 1;
self.amt_peeked = self.amt_peeked.saturating_sub(1);
tok
})
}
}
struct TokenLexer<'a> {
buf: Peekable<Chars<'a>>,
cursor: usize,
file: Arc<File>,
}
impl<'a> Iterator for TokenLexer<'a> {
type Item = Token; type Item = Token;
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
let kind = match self.buf.next()? { let kind = match self.buf.next()? {
FORM_FEED => '\n', FORM_FEED => '\n',
'\r' => { '\r' => {
if self.buf.peek() == Some(&'\n') { if self.buf.peek() == Some(&'\n') {
self.pos += 1; self.cursor += 1;
self.buf.next(); self.buf.next();
} }
'\n' '\n'
@ -31,18 +98,29 @@ impl<'a> Iterator for Lexer<'a> {
let pos = self let pos = self
.file .file
.span .span
.subspan(self.pos as u64, (self.pos + len) as u64); .subspan(self.cursor as u64, (self.cursor + len) as u64);
self.pos += len; self.cursor += len;
Some(Token { pos, kind }) Some(Token { pos, kind })
} }
} }
impl<'a> Lexer<'a> { impl Lexer {
pub fn new(file: &'a Arc<File>) -> Lexer<'a> { pub fn new_from_file(file: &Arc<File>) -> Self {
Lexer { let buf = TokenLexer {
file: Arc::clone(file),
buf: file.source().chars().peekable(), buf: file.source().chars().peekable(),
pos: 0, cursor: 0,
file, }
.collect();
Self::new(buf)
}
pub fn new(buf: Vec<Token>) -> Lexer {
Lexer {
buf,
cursor: 0,
amt_peeked: 0,
} }
} }
} }

View File

@ -101,8 +101,6 @@ pub(crate) use beef::lean::Cow;
use codemap::CodeMap; use codemap::CodeMap;
use peekmore::PeekMore;
pub use crate::error::{SassError as Error, SassResult as Result}; pub use crate::error::{SassError as Error, SassResult as Result};
pub(crate) use crate::token::Token; pub(crate) use crate::token::Token;
use crate::{ use crate::{
@ -288,10 +286,7 @@ pub fn from_path(p: &str, options: &Options) -> Result<String> {
let empty_span = file.span.subspan(0, 0); let empty_span = file.span.subspan(0, 0);
let stmts = Parser { let stmts = Parser {
toks: &mut Lexer::new(&file) toks: &mut Lexer::new_from_file(&file),
.collect::<Vec<Token>>()
.into_iter()
.peekmore(),
map: &mut map, map: &mut map,
path: p.as_ref(), path: p.as_ref(),
scopes: &mut Scopes::new(), scopes: &mut Scopes::new(),
@ -334,10 +329,7 @@ pub fn from_string(p: String, options: &Options) -> Result<String> {
let file = map.add_file("stdin".into(), p); let file = map.add_file("stdin".into(), p);
let empty_span = file.span.subspan(0, 0); let empty_span = file.span.subspan(0, 0);
let stmts = Parser { let stmts = Parser {
toks: &mut Lexer::new(&file) toks: &mut Lexer::new_from_file(&file),
.collect::<Vec<Token>>()
.into_iter()
.peekmore(),
map: &mut map, map: &mut map,
path: Path::new(""), path: Path::new(""),
scopes: &mut Scopes::new(), scopes: &mut Scopes::new(),
@ -371,10 +363,7 @@ pub fn from_string(p: String) -> std::result::Result<String, JsValue> {
let empty_span = file.span.subspan(0, 0); let empty_span = file.span.subspan(0, 0);
let stmts = Parser { let stmts = Parser {
toks: &mut Lexer::new(&file) toks: &mut Lexer::new_from_file(&file),
.collect::<Vec<Token>>()
.into_iter()
.peekmore(),
map: &mut map, map: &mut map,
path: Path::new(""), path: Path::new(""),
scopes: &mut Scopes::new(), scopes: &mut Scopes::new(),

View File

@ -410,7 +410,7 @@ impl CompressedFormatter {
fn write_block_entry(&self, buf: &mut Vec<u8>, styles: &[BlockEntry]) -> SassResult<()> { fn write_block_entry(&self, buf: &mut Vec<u8>, styles: &[BlockEntry]) -> SassResult<()> {
let mut styles = styles.iter(); let mut styles = styles.iter();
while let Some(style) = styles.next() { for style in &mut styles {
match style { match style {
BlockEntry::Style(s) => { BlockEntry::Style(s) => {
let value = s.value.node.to_css_string(s.value.span)?; let value = s.value.node.to_css_string(s.value.span)?;

View File

@ -21,7 +21,7 @@ impl<'a> Parser<'a> {
pub(super) fn parse_func_args(&mut self) -> SassResult<FuncArgs> { pub(super) fn parse_func_args(&mut self) -> SassResult<FuncArgs> {
let mut args: Vec<FuncArg> = Vec::new(); let mut args: Vec<FuncArg> = Vec::new();
let mut close_paren_span: Span = match self.toks.peek() { let mut close_paren_span: Span = match self.toks.peek() {
Some(Token { pos, .. }) => *pos, Some(Token { pos, .. }) => pos,
None => return Err(("expected \")\".", self.span_before).into()), None => return Err(("expected \")\".", self.span_before).into()),
}; };
@ -164,7 +164,7 @@ impl<'a> Parser<'a> {
} }
if let Some(Token { kind: '$', pos }) = self.toks.peek() { if let Some(Token { kind: '$', pos }) = self.toks.peek() {
span = span.merge(*pos); span = span.merge(pos);
self.toks.advance_cursor(); self.toks.advance_cursor();
let v = peek_ident_no_interpolation(self.toks, false, self.span_before)?; let v = peek_ident_no_interpolation(self.toks, false, self.span_before)?;
@ -229,10 +229,9 @@ impl<'a> Parser<'a> {
continue; continue;
} }
Some(Token { kind: '.', pos }) => { Some(Token { kind: '.', pos }) => {
let pos = *pos;
self.toks.next(); self.toks.next();
if let Some(Token { kind: '.', pos }) = self.toks.peek().copied() { if let Some(Token { kind: '.', pos }) = self.toks.peek() {
if !name.is_empty() { if !name.is_empty() {
return Err(("expected \")\".", pos).into()); return Err(("expected \")\".", pos).into());
} }
@ -324,7 +323,7 @@ impl<'a> Parser<'a> {
return Ok(CallArgs(args, span)); return Ok(CallArgs(args, span));
} }
Some(Token { kind: ',', pos }) => { Some(Token { kind: ',', pos }) => {
span = span.merge(*pos); span = span.merge(pos);
self.toks.next(); self.toks.next();
self.whitespace_or_comment(); self.whitespace_or_comment();
continue; continue;
@ -341,14 +340,14 @@ impl<'a> Parser<'a> {
self.expect_char('.')?; self.expect_char('.')?;
} }
Some(Token { pos, .. }) => { Some(Token { pos, .. }) => {
return Err(("expected \")\".", *pos).into()); return Err(("expected \")\".", pos).into());
} }
None => return Err(("expected \")\".", span).into()), None => return Err(("expected \")\".", span).into()),
} }
} }
Some(Token { pos, .. }) => { Some(Token { pos, .. }) => {
value?; value?;
return Err(("expected \")\".", *pos).into()); return Err(("expected \")\".", pos).into());
} }
None => return Err(("expected \")\".", span).into()), None => return Err(("expected \")\".", span).into()),
} }

View File

@ -1,10 +1,10 @@
use codemap::Spanned; use codemap::Spanned;
use num_traits::cast::ToPrimitive; use num_traits::cast::ToPrimitive;
use peekmore::PeekMore;
use crate::{ use crate::{
common::Identifier, common::Identifier,
error::SassResult, error::SassResult,
lexer::Lexer,
parse::{ContextFlags, Parser, Stmt}, parse::{ContextFlags, Parser, Stmt},
unit::Unit, unit::Unit,
utils::{ utils::{
@ -60,7 +60,7 @@ impl<'a> Parser<'a> {
loop { loop {
self.whitespace_or_comment(); self.whitespace_or_comment();
if let Some(Token { kind: '@', pos }) = self.toks.peek().copied() { if let Some(Token { kind: '@', pos }) = self.toks.peek() {
self.toks.peek_forward(1); self.toks.peek_forward(1);
let ident = peek_ident_no_interpolation(self.toks, false, pos)?; let ident = peek_ident_no_interpolation(self.toks, false, pos)?;
if ident.as_str() != "else" { if ident.as_str() != "else" {
@ -72,7 +72,7 @@ impl<'a> Parser<'a> {
break; break;
} }
self.whitespace_or_comment(); self.whitespace_or_comment();
if let Some(tok) = self.toks.peek().copied() { if let Some(tok) = self.toks.peek() {
match tok.kind { match tok.kind {
'i' if matches!( 'i' if matches!(
self.toks.peek_forward(1), self.toks.peek_forward(1),
@ -182,7 +182,7 @@ impl<'a> Parser<'a> {
Some(Token { kind: 't', pos }) Some(Token { kind: 't', pos })
| Some(Token { kind: 'T', pos }) | Some(Token { kind: 'T', pos })
| Some(Token { kind: '\\', pos }) => { | Some(Token { kind: '\\', pos }) => {
let span = *pos; let span = pos;
let mut ident = match peek_ident_no_interpolation(toks, false, span) { let mut ident = match peek_ident_no_interpolation(toks, false, span) {
Ok(s) => s, Ok(s) => s,
Err(..) => return false, Err(..) => return false,
@ -266,7 +266,7 @@ impl<'a> Parser<'a> {
); );
if self.flags.in_function() { if self.flags.in_function() {
let these_stmts = Parser { let these_stmts = Parser {
toks: &mut body.clone().into_iter().peekmore(), toks: &mut Lexer::new(body.clone()),
map: self.map, map: self.map,
path: self.path, path: self.path,
scopes: self.scopes, scopes: self.scopes,
@ -290,7 +290,7 @@ impl<'a> Parser<'a> {
} else { } else {
stmts.append( stmts.append(
&mut Parser { &mut Parser {
toks: &mut body.clone().into_iter().peekmore(), toks: &mut Lexer::new(body.clone()),
map: self.map, map: self.map,
path: self.path, path: self.path,
scopes: self.scopes, scopes: self.scopes,
@ -343,7 +343,7 @@ impl<'a> Parser<'a> {
while val.node.is_true() { while val.node.is_true() {
if self.flags.in_function() { if self.flags.in_function() {
let these_stmts = Parser { let these_stmts = Parser {
toks: &mut body.clone().into_iter().peekmore(), toks: &mut Lexer::new(body.clone()),
map: self.map, map: self.map,
path: self.path, path: self.path,
scopes: self.scopes, scopes: self.scopes,
@ -367,7 +367,7 @@ impl<'a> Parser<'a> {
} else { } else {
stmts.append( stmts.append(
&mut Parser { &mut Parser {
toks: &mut body.clone().into_iter().peekmore(), toks: &mut Lexer::new(body.clone()),
map: self.map, map: self.map,
path: self.path, path: self.path,
scopes: self.scopes, scopes: self.scopes,
@ -455,7 +455,7 @@ impl<'a> Parser<'a> {
if self.flags.in_function() { if self.flags.in_function() {
let these_stmts = Parser { let these_stmts = Parser {
toks: &mut body.clone().into_iter().peekmore(), toks: &mut Lexer::new(body.clone()),
map: self.map, map: self.map,
path: self.path, path: self.path,
scopes: self.scopes, scopes: self.scopes,
@ -479,7 +479,7 @@ impl<'a> Parser<'a> {
} else { } else {
stmts.append( stmts.append(
&mut Parser { &mut Parser {
toks: &mut body.clone().into_iter().peekmore(), toks: &mut Lexer::new(body.clone()),
map: self.map, map: self.map,
path: self.path, path: self.path,
scopes: self.scopes, scopes: self.scopes,

View File

@ -1,11 +1,11 @@
use codemap::Spanned; use codemap::Spanned;
use peekmore::PeekMore;
use crate::{ use crate::{
args::CallArgs, args::CallArgs,
atrule::Function, atrule::Function,
common::{unvendor, Identifier}, common::{unvendor, Identifier},
error::SassResult, error::SassResult,
lexer::Lexer,
scope::Scopes, scope::Scopes,
utils::{read_until_closing_curly_brace, read_until_semicolon_or_closing_curly_brace}, utils::{read_until_closing_curly_brace, read_until_semicolon_or_closing_curly_brace},
value::{SassFunction, Value}, value::{SassFunction, Value},
@ -100,7 +100,7 @@ impl<'a> Parser<'a> {
}; };
let mut return_value = Parser { let mut return_value = Parser {
toks: &mut body.into_iter().peekmore(), toks: &mut Lexer::new(body),
map: self.map, map: self.map,
path: self.path, path: self.path,
scopes: if declared_at_root { scopes: if declared_at_root {

View File

@ -20,7 +20,7 @@ impl<'a> Parser<'a> {
if unit && tok.kind == '-' { if unit && tok.kind == '-' {
// Disallow `-` followed by a dot or a digit digit in units. // Disallow `-` followed by a dot or a digit digit in units.
let second = match self.toks.peek_forward(1) { let second = match self.toks.peek_forward(1) {
Some(v) => *v, Some(v) => v,
None => break, None => break,
}; };
@ -59,7 +59,7 @@ impl<'a> Parser<'a> {
buf.push_str(&self.escape(false)?); buf.push_str(&self.escape(false)?);
} }
'#' => { '#' => {
if let Some(Token { kind: '{', .. }) = self.toks.peek_forward(1).copied() { if let Some(Token { kind: '{', .. }) = self.toks.peek_forward(1) {
self.toks.next(); self.toks.next();
self.toks.next(); self.toks.next();
// TODO: if ident, interpolate literally // TODO: if ident, interpolate literally
@ -136,7 +136,6 @@ impl<'a> Parser<'a> {
let Token { kind, pos } = self let Token { kind, pos } = self
.toks .toks
.peek() .peek()
.copied()
.ok_or(("Expected identifier.", self.span_before))?; .ok_or(("Expected identifier.", self.span_before))?;
let mut text = String::new(); let mut text = String::new();
if kind == '-' { if kind == '-' {
@ -163,7 +162,7 @@ impl<'a> Parser<'a> {
} }
let Token { kind: first, pos } = match self.toks.peek() { let Token { kind: first, pos } = match self.toks.peek() {
Some(v) => *v, Some(v) => v,
None => return Err(("Expected identifier.", self.span_before).into()), None => return Err(("Expected identifier.", self.span_before).into()),
}; };
@ -205,7 +204,7 @@ impl<'a> Parser<'a> {
.peek() .peek()
.ok_or(("Expected identifier.", self.span_before))?; .ok_or(("Expected identifier.", self.span_before))?;
let mut text = String::new(); let mut text = String::new();
if kind == &'-' { if kind == '-' {
self.toks.next(); self.toks.next();
text.push('-'); text.push('-');
@ -264,7 +263,7 @@ impl<'a> Parser<'a> {
} }
'#' => { '#' => {
if let Some(Token { kind: '{', pos }) = self.toks.peek() { if let Some(Token { kind: '{', pos }) = self.toks.peek() {
self.span_before = self.span_before.merge(*pos); self.span_before = self.span_before.merge(pos);
self.toks.next(); self.toks.next();
let interpolation = self.parse_interpolation()?; let interpolation = self.parse_interpolation()?;
match interpolation.node { match interpolation.node {

View File

@ -1,7 +1,6 @@
use std::{ffi::OsStr, fs, path::Path, path::PathBuf}; use std::{ffi::OsStr, fs, path::Path, path::PathBuf};
use codemap::{Span, Spanned}; use codemap::{Span, Spanned};
use peekmore::PeekMore;
use crate::{ use crate::{
common::{ListSeparator::Comma, QuoteKind}, common::{ListSeparator::Comma, QuoteKind},
@ -104,10 +103,7 @@ impl<'a> Parser<'a> {
String::from_utf8(fs::read(&name)?)?, String::from_utf8(fs::read(&name)?)?,
); );
return Parser { return Parser {
toks: &mut Lexer::new(&file) toks: &mut Lexer::new_from_file(&file),
.collect::<Vec<Token>>()
.into_iter()
.peekmore(),
map: self.map, map: self.map,
path: &name, path: &name,
scopes: self.scopes, scopes: self.scopes,
@ -141,7 +137,7 @@ impl<'a> Parser<'a> {
Some(Token { kind: '\'', .. }) Some(Token { kind: '\'', .. })
| Some(Token { kind: '"', .. }) | Some(Token { kind: '"', .. })
| Some(Token { kind: 'u', .. }) => {} | Some(Token { kind: 'u', .. }) => {}
Some(Token { pos, .. }) => return Err(("Expected string.", *pos).into()), Some(Token { pos, .. }) => return Err(("Expected string.", pos).into()),
None => return Err(("expected more input.", self.span_before).into()), None => return Err(("expected more input.", self.span_before).into()),
}; };
let Spanned { let Spanned {

View File

@ -1,10 +1,9 @@
use std::fmt; use std::fmt;
use peekmore::PeekMore;
use crate::{ use crate::{
atrule::keyframes::{Keyframes, KeyframesSelector}, atrule::keyframes::{Keyframes, KeyframesSelector},
error::SassResult, error::SassResult,
lexer::Lexer,
parse::Stmt, parse::Stmt,
utils::eat_whole_number, utils::eat_whole_number,
Token, Token,
@ -34,7 +33,7 @@ impl<'a, 'b> KeyframesSelectorParser<'a, 'b> {
fn parse_keyframes_selector(&mut self) -> SassResult<Vec<KeyframesSelector>> { fn parse_keyframes_selector(&mut self) -> SassResult<Vec<KeyframesSelector>> {
let mut selectors = Vec::new(); let mut selectors = Vec::new();
self.parser.whitespace_or_comment(); self.parser.whitespace_or_comment();
while let Some(tok) = self.parser.toks.peek().copied() { while let Some(tok) = self.parser.toks.peek() {
match tok.kind { match tok.kind {
't' | 'T' => { 't' | 'T' => {
let mut ident = self.parser.parse_identifier()?; let mut ident = self.parser.parse_identifier()?;
@ -128,7 +127,7 @@ impl<'a> Parser<'a> {
span = span.merge(tok.pos()); span = span.merge(tok.pos());
match tok.kind { match tok.kind {
'#' => { '#' => {
if let Some(Token { kind: '{', .. }) = self.toks.peek().copied() { if let Some(Token { kind: '{', .. }) = self.toks.peek() {
self.toks.next(); self.toks.next();
string.push_str(&self.parse_interpolation()?.to_css_string(span)?); string.push_str(&self.parse_interpolation()?.to_css_string(span)?);
} else { } else {
@ -154,13 +153,11 @@ impl<'a> Parser<'a> {
string.push(' '); string.push(' ');
} }
'{' => { '{' => {
// we must collect here because the parser is not generic over iterator
#[allow(clippy::needless_collect)]
let sel_toks: Vec<Token> = let sel_toks: Vec<Token> =
string.chars().map(|x| Token::new(span, x)).collect(); string.chars().map(|x| Token::new(span, x)).collect();
let selector = KeyframesSelectorParser::new(&mut Parser { let selector = KeyframesSelectorParser::new(&mut Parser {
toks: &mut sel_toks.into_iter().peekmore(), toks: &mut Lexer::new(sel_toks),
map: self.map, map: self.map,
path: self.path, path: self.path,
scopes: self.scopes, scopes: self.scopes,

View File

@ -98,7 +98,7 @@ impl<'a> Parser<'a> {
return Ok(buf); return Ok(buf);
} }
let next_tok = self.toks.peek().copied(); let next_tok = self.toks.peek();
let is_angle = next_tok.map_or(false, |t| t.kind == '<' || t.kind == '>'); let is_angle = next_tok.map_or(false, |t| t.kind == '<' || t.kind == '>');
if is_angle || matches!(next_tok, Some(Token { kind: '=', .. })) { if is_angle || matches!(next_tok, Some(Token { kind: '=', .. })) {
buf.push(' '); buf.push(' ');

View File

@ -2,12 +2,11 @@ use std::mem;
use codemap::Spanned; use codemap::Spanned;
use peekmore::PeekMore;
use crate::{ use crate::{
args::{CallArgs, FuncArgs}, args::{CallArgs, FuncArgs},
atrule::mixin::{Content, Mixin, UserDefinedMixin}, atrule::mixin::{Content, Mixin, UserDefinedMixin},
error::SassResult, error::SassResult,
lexer::Lexer,
scope::Scopes, scope::Scopes,
utils::read_until_closing_curly_brace, utils::read_until_closing_curly_brace,
Token, Token,
@ -124,7 +123,7 @@ impl<'a> Parser<'a> {
let mut toks = read_until_closing_curly_brace(self.toks)?; let mut toks = read_until_closing_curly_brace(self.toks)?;
if let Some(tok) = self.toks.peek() { if let Some(tok) = self.toks.peek() {
toks.push(*tok); toks.push(tok);
self.toks.next(); self.toks.next();
} }
Some(toks) Some(toks)
@ -167,7 +166,7 @@ impl<'a> Parser<'a> {
}); });
let body = Parser { let body = Parser {
toks: &mut body.into_iter().peekmore(), toks: &mut Lexer::new(body),
map: self.map, map: self.map,
path: self.path, path: self.path,
scopes: self.scopes, scopes: self.scopes,
@ -234,7 +233,7 @@ impl<'a> Parser<'a> {
let stmts = if let Some(body) = content.content.clone() { let stmts = if let Some(body) = content.content.clone() {
Parser { Parser {
toks: &mut body.into_iter().peekmore(), toks: &mut Lexer::new(body),
map: self.map, map: self.map,
path: self.path, path: self.path,
scopes: &mut scope_at_decl, scopes: &mut scope_at_decl,

View File

@ -1,7 +1,6 @@
use std::{convert::TryFrom, path::Path, vec::IntoIter}; use std::{convert::TryFrom, path::Path};
use codemap::{CodeMap, Span, Spanned}; use codemap::{CodeMap, Span, Spanned};
use peekmore::{PeekMore, PeekMoreIterator};
use crate::{ use crate::{
atrule::{ atrule::{
@ -12,6 +11,7 @@ use crate::{
}, },
builtin::modules::{ModuleConfig, Modules}, builtin::modules::{ModuleConfig, Modules},
error::SassResult, error::SassResult,
lexer::Lexer,
scope::{Scope, Scopes}, scope::{Scope, Scopes},
selector::{ selector::{
ComplexSelectorComponent, ExtendRule, ExtendedSelector, Extender, Selector, SelectorParser, ComplexSelectorComponent, ExtendRule, ExtendedSelector, Extender, Selector, SelectorParser,
@ -68,7 +68,7 @@ pub(crate) enum Stmt {
/// hit (if there is one) is not important for now. /// hit (if there is one) is not important for now.
// todo: merge at_root and at_root_has_selector into an enum // todo: merge at_root and at_root_has_selector into an enum
pub(crate) struct Parser<'a> { pub(crate) struct Parser<'a> {
pub toks: &'a mut PeekMoreIterator<IntoIter<Token>>, pub toks: &'a mut Lexer,
pub map: &'a mut CodeMap, pub map: &'a mut CodeMap,
pub path: &'a Path, pub path: &'a Path,
pub global_scope: &'a mut Scope, pub global_scope: &'a mut Scope,
@ -114,19 +114,19 @@ impl<'a> Parser<'a> {
pub fn expect_char(&mut self, c: char) -> SassResult<()> { pub fn expect_char(&mut self, c: char) -> SassResult<()> {
match self.toks.peek() { match self.toks.peek() {
Some(Token { kind, pos }) if *kind == c => { Some(Token { kind, pos }) if kind == c => {
self.span_before = *pos; self.span_before = pos;
self.toks.next(); self.toks.next();
Ok(()) Ok(())
} }
Some(Token { pos, .. }) => Err((format!("expected \"{}\".", c), *pos).into()), Some(Token { pos, .. }) => Err((format!("expected \"{}\".", c), pos).into()),
None => Err((format!("expected \"{}\".", c), self.span_before).into()), None => Err((format!("expected \"{}\".", c), self.span_before).into()),
} }
} }
pub fn consume_char_if_exists(&mut self, c: char) -> bool { pub fn consume_char_if_exists(&mut self, c: char) -> bool {
if let Some(Token { kind, .. }) = self.toks.peek() { if let Some(Token { kind, .. }) = self.toks.peek() {
if *kind == c { if kind == c {
self.toks.next(); self.toks.next();
return true; return true;
} }
@ -150,7 +150,7 @@ impl<'a> Parser<'a> {
if self.flags.in_function() && !stmts.is_empty() { if self.flags.in_function() && !stmts.is_empty() {
return Ok(stmts); return Ok(stmts);
} }
self.span_before = *pos; self.span_before = pos;
match kind { match kind {
'@' => { '@' => {
self.toks.next(); self.toks.next();
@ -207,7 +207,7 @@ impl<'a> Parser<'a> {
} = self.parse_value(false, &|_| false)?; } = self.parse_value(false, &|_| false)?;
span.merge(kind_string.span); span.merge(kind_string.span);
if let Some(Token { kind: ';', pos }) = self.toks.peek() { if let Some(Token { kind: ';', pos }) = self.toks.peek() {
kind_string.span.merge(*pos); kind_string.span.merge(pos);
self.toks.next(); self.toks.next();
} }
self.warn(&Spanned { self.warn(&Spanned {
@ -222,7 +222,7 @@ impl<'a> Parser<'a> {
} = self.parse_value(false, &|_| false)?; } = self.parse_value(false, &|_| false)?;
span.merge(kind_string.span); span.merge(kind_string.span);
if let Some(Token { kind: ';', pos }) = self.toks.peek() { if let Some(Token { kind: ';', pos }) = self.toks.peek() {
kind_string.span.merge(*pos); kind_string.span.merge(pos);
self.toks.next(); self.toks.next();
} }
self.debug(&Spanned { self.debug(&Spanned {
@ -287,14 +287,14 @@ impl<'a> Parser<'a> {
} }
} }
'\u{0}'..='\u{8}' | '\u{b}'..='\u{1f}' => { '\u{0}'..='\u{8}' | '\u{b}'..='\u{1f}' => {
return Err(("expected selector.", *pos).into()) return Err(("expected selector.", pos).into())
} }
'}' => { '}' => {
self.toks.next(); self.toks.next();
break; break;
} }
// dart-sass seems to special-case the error message here? // dart-sass seems to special-case the error message here?
'!' | '{' => return Err(("expected \"}\".", *pos).into()), '!' | '{' => return Err(("expected \"}\".", pos).into()),
_ => { _ => {
if self.flags.in_function() { if self.flags.in_function() {
return Err(( return Err((
@ -403,7 +403,7 @@ impl<'a> Parser<'a> {
span = span.merge(pos); span = span.merge(pos);
match kind { match kind {
'#' => { '#' => {
if let Some(Token { kind: '{', .. }) = self.toks.peek().copied() { if let Some(Token { kind: '{', .. }) = self.toks.peek() {
self.toks.next(); self.toks.next();
string.push_str(&self.parse_interpolation()?.to_css_string(span)?); string.push_str(&self.parse_interpolation()?.to_css_string(span)?);
} else { } else {
@ -447,15 +447,13 @@ impl<'a> Parser<'a> {
return Err(("expected \"{\".", span).into()); return Err(("expected \"{\".", span).into());
} }
// we must collect here because the parser isn't generic over the iterator
#[allow(clippy::needless_collect)]
let sel_toks: Vec<Token> = string.chars().map(|x| Token::new(span, x)).collect(); let sel_toks: Vec<Token> = string.chars().map(|x| Token::new(span, x)).collect();
let mut iter = sel_toks.into_iter().peekmore(); let mut lexer = Lexer::new(sel_toks);
let selector = SelectorParser::new( let selector = SelectorParser::new(
&mut Parser { &mut Parser {
toks: &mut iter, toks: &mut lexer,
map: self.map, map: self.map,
path: self.path, path: self.path,
scopes: self.scopes, scopes: self.scopes,
@ -635,7 +633,7 @@ impl<'a> Parser<'a> {
'{' => break, '{' => break,
'#' => { '#' => {
if let Some(Token { kind: '{', pos }) = self.toks.peek() { if let Some(Token { kind: '{', pos }) = self.toks.peek() {
self.span_before = self.span_before.merge(*pos); self.span_before = self.span_before.merge(pos);
self.toks.next(); self.toks.next();
params.push_str(&self.parse_interpolation_as_string()?); params.push_str(&self.parse_interpolation_as_string()?);
} else { } else {
@ -780,9 +778,7 @@ impl<'a> Parser<'a> {
// return Err(("@extend may only be used within style rules.", self.span_before).into()); // return Err(("@extend may only be used within style rules.", self.span_before).into());
// } // }
let (value, is_optional) = Parser { let (value, is_optional) = Parser {
toks: &mut read_until_semicolon_or_closing_curly_brace(self.toks)? toks: &mut Lexer::new(read_until_semicolon_or_closing_curly_brace(self.toks)?),
.into_iter()
.peekmore(),
map: self.map, map: self.map,
path: self.path, path: self.path,
scopes: self.scopes, scopes: self.scopes,
@ -888,7 +884,7 @@ impl<'a> Parser<'a> {
match tok.kind { match tok.kind {
'{' => break, '{' => break,
'#' => { '#' => {
if let Some(Token { kind: '{', pos }) = self.toks.peek().copied() { if let Some(Token { kind: '{', pos }) = self.toks.peek() {
self.toks.next(); self.toks.next();
self.span_before = pos; self.span_before = pos;
let interpolation = self.parse_interpolation()?; let interpolation = self.parse_interpolation()?;

View File

@ -1,7 +1,6 @@
use std::{convert::TryFrom, fs}; use std::{convert::TryFrom, fs};
use codemap::Spanned; use codemap::Spanned;
use peekmore::PeekMore;
use crate::{ use crate::{
atrule::AtRuleKind, atrule::AtRuleKind,
@ -116,10 +115,7 @@ impl<'a> Parser<'a> {
.add_file(name.to_owned(), String::from_utf8(fs::read(&import)?)?); .add_file(name.to_owned(), String::from_utf8(fs::read(&import)?)?);
let stmts = Parser { let stmts = Parser {
toks: &mut Lexer::new(&file) toks: &mut Lexer::new_from_file(&file),
.collect::<Vec<Token>>()
.into_iter()
.peekmore(),
map: self.map, map: self.map,
path: &import, path: &import,
scopes: self.scopes, scopes: self.scopes,

View File

@ -27,36 +27,36 @@ impl<'a> Parser<'a> {
return None; return None;
} }
'(' => { '(' => {
toks.push(*tok); toks.push(tok);
self.toks.peek_forward(1); self.toks.peek_forward(1);
let mut scope = 0; let mut scope = 0;
while let Some(tok) = self.toks.peek() { while let Some(tok) = self.toks.peek() {
match tok.kind { match tok.kind {
')' => { ')' => {
if scope == 0 { if scope == 0 {
toks.push(*tok); toks.push(tok);
self.toks.peek_forward(1); self.toks.peek_forward(1);
break; break;
} }
scope -= 1; scope -= 1;
toks.push(*tok); toks.push(tok);
self.toks.peek_forward(1); self.toks.peek_forward(1);
} }
'(' => { '(' => {
toks.push(*tok); toks.push(tok);
self.toks.peek_forward(1); self.toks.peek_forward(1);
scope += 1; scope += 1;
} }
_ => { _ => {
toks.push(*tok); toks.push(tok);
self.toks.peek_forward(1); self.toks.peek_forward(1);
} }
} }
} }
} }
_ => { _ => {
toks.push(*tok); toks.push(tok);
self.toks.peek_forward(1); self.toks.peek_forward(1);
} }
} }
@ -123,7 +123,7 @@ impl<'a> Parser<'a> {
property.push(':'); property.push(':');
SelectorOrStyle::Selector(property) SelectorOrStyle::Selector(property)
} }
c if is_name(*c) => { c if is_name(c) => {
if let Some(toks) = if let Some(toks) =
self.parse_style_value_when_no_space_after_semicolon() self.parse_style_value_when_no_space_after_semicolon()
{ {
@ -201,7 +201,7 @@ impl<'a> Parser<'a> {
) -> SassResult<Vec<Style>> { ) -> SassResult<Vec<Style>> {
let mut styles = Vec::new(); let mut styles = Vec::new();
self.whitespace(); self.whitespace();
while let Some(tok) = self.toks.peek().copied() { while let Some(tok) = self.toks.peek() {
match tok.kind { match tok.kind {
'{' => { '{' => {
self.toks.next(); self.toks.next();

View File

@ -27,7 +27,7 @@ impl<'a> Parser<'a> {
} }
'#' => { '#' => {
if let Some(Token { kind: '{', pos }) = self.toks.peek() { if let Some(Token { kind: '{', pos }) = self.toks.peek() {
self.span_before = *pos; self.span_before = pos;
self.toks.next(); self.toks.next();
let interpolation = self.parse_interpolation()?; let interpolation = self.parse_interpolation()?;
buf.push_str(&interpolation.node.to_css_string(interpolation.span)?); buf.push_str(&interpolation.node.to_css_string(interpolation.span)?);
@ -315,7 +315,7 @@ impl<'a> Parser<'a> {
} }
buf.push_str(&num); buf.push_str(&num);
} else { } else {
self.toks.move_cursor_back().unwrap(); self.toks.move_cursor_back();
} }
let next = match self.toks.peek() { let next = match self.toks.peek() {
@ -365,7 +365,7 @@ impl<'a> Parser<'a> {
fn peek_escape(&mut self) -> SassResult<String> { fn peek_escape(&mut self) -> SassResult<String> {
let mut value = 0; let mut value = 0;
let first = match self.toks.peek() { let first = match self.toks.peek() {
Some(t) => *t, Some(t) => t,
None => return Ok(String::new()), None => return Ok(String::new()),
}; };
let mut span = first.pos; let mut span = first.pos;

View File

@ -1,4 +1,4 @@
use std::{iter::Iterator, mem, vec::IntoIter}; use std::{iter::Iterator, mem};
use num_bigint::BigInt; use num_bigint::BigInt;
use num_rational::{BigRational, Rational64}; use num_rational::{BigRational, Rational64};
@ -6,13 +6,12 @@ use num_traits::{pow, One, ToPrimitive};
use codemap::{Span, Spanned}; use codemap::{Span, Spanned};
use peekmore::{PeekMore, PeekMoreIterator};
use crate::{ use crate::{
builtin::GLOBAL_FUNCTIONS, builtin::GLOBAL_FUNCTIONS,
color::{Color, NAMED_COLORS}, color::{Color, NAMED_COLORS},
common::{unvendor, Brackets, Identifier, ListSeparator, Op, QuoteKind}, common::{unvendor, Brackets, Identifier, ListSeparator, Op, QuoteKind},
error::SassResult, error::SassResult,
lexer::Lexer,
unit::Unit, unit::Unit,
utils::{eat_whole_number, is_name, IsWhitespace, ParsedNumber}, utils::{eat_whole_number, is_name, IsWhitespace, ParsedNumber},
value::{Number, SassFunction, SassMap, Value}, value::{Number, SassFunction, SassMap, Value},
@ -53,7 +52,7 @@ impl<'a> Parser<'a> {
pub(crate) fn parse_value( pub(crate) fn parse_value(
&mut self, &mut self,
in_paren: bool, in_paren: bool,
predicate: &dyn Fn(&mut PeekMoreIterator<IntoIter<Token>>) -> bool, predicate: &dyn Fn(&mut Lexer) -> bool,
) -> SassResult<Spanned<Value>> { ) -> SassResult<Spanned<Value>> {
self.whitespace(); self.whitespace();
@ -62,7 +61,7 @@ impl<'a> Parser<'a> {
| Some(Token { kind: ';', .. }) | Some(Token { kind: ';', .. })
| Some(Token { kind: '{', .. }) | Some(Token { kind: '{', .. })
| None => return Err(("Expected expression.", self.span_before).into()), | None => return Err(("Expected expression.", self.span_before).into()),
Some(Token { pos, .. }) => *pos, Some(Token { pos, .. }) => pos,
}; };
if predicate(self.toks) { if predicate(self.toks) {
@ -173,7 +172,7 @@ impl<'a> Parser<'a> {
in_paren: bool, in_paren: bool,
) -> SassResult<Spanned<Value>> { ) -> SassResult<Spanned<Value>> {
Parser { Parser {
toks: &mut toks.into_iter().peekmore(), toks: &mut Lexer::new(toks),
map: self.map, map: self.map,
path: self.path, path: self.path,
scopes: self.scopes, scopes: self.scopes,
@ -291,7 +290,7 @@ impl<'a> Parser<'a> {
fn parse_ident_value( fn parse_ident_value(
&mut self, &mut self,
predicate: &dyn Fn(&mut PeekMoreIterator<IntoIter<Token>>) -> bool, predicate: &dyn Fn(&mut Lexer) -> bool,
) -> SassResult<Spanned<IntermediateValue>> { ) -> SassResult<Spanned<IntermediateValue>> {
let Spanned { node: mut s, span } = self.parse_identifier()?; let Spanned { node: mut s, span } = self.parse_identifier()?;
@ -363,7 +362,7 @@ impl<'a> Parser<'a> {
fn parse_number( fn parse_number(
&mut self, &mut self,
predicate: &dyn Fn(&mut PeekMoreIterator<IntoIter<Token>>) -> bool, predicate: &dyn Fn(&mut Lexer) -> bool,
) -> SassResult<Spanned<ParsedNumber>> { ) -> SassResult<Spanned<ParsedNumber>> {
let mut span = self.toks.peek().unwrap().pos; let mut span = self.toks.peek().unwrap().pos;
let mut whole = eat_whole_number(self.toks); let mut whole = eat_whole_number(self.toks);
@ -375,7 +374,7 @@ impl<'a> Parser<'a> {
}); });
} }
let next_tok = *self.toks.peek().unwrap(); let next_tok = self.toks.peek().unwrap();
let dec_len = if next_tok.kind == '.' { let dec_len = if next_tok.kind == '.' {
self.toks.next(); self.toks.next();
@ -395,7 +394,7 @@ impl<'a> Parser<'a> {
let mut times_ten = String::new(); let mut times_ten = String::new();
let mut times_ten_is_postive = true; let mut times_ten_is_postive = true;
if let Some(Token { kind: 'e', .. }) | Some(Token { kind: 'E', .. }) = self.toks.peek() { if let Some(Token { kind: 'e', .. }) | Some(Token { kind: 'E', .. }) = self.toks.peek() {
if let Some(&tok) = self.toks.peek_next() { if let Some(tok) = self.toks.peek_next() {
if tok.kind == '-' { if tok.kind == '-' {
self.toks.next(); self.toks.next();
times_ten_is_postive = false; times_ten_is_postive = false;
@ -404,13 +403,11 @@ impl<'a> Parser<'a> {
times_ten = eat_whole_number(self.toks); times_ten = eat_whole_number(self.toks);
if times_ten.is_empty() { if times_ten.is_empty() {
return Err( return Err(("Expected digit.", self.toks.peek().unwrap_or(tok).pos).into());
("Expected digit.", self.toks.peek().unwrap_or(&tok).pos).into()
);
} else if times_ten.len() > 2 { } else if times_ten.len() > 2 {
return Err(( return Err((
"Exponent too negative.", "Exponent too negative.",
self.toks.peek().unwrap_or(&tok).pos, self.toks.peek().unwrap_or(tok).pos,
) )
.into()); .into());
} }
@ -420,15 +417,15 @@ impl<'a> Parser<'a> {
if times_ten.len() > 2 { if times_ten.len() > 2 {
return Err( return Err(
("Exponent too large.", self.toks.peek().unwrap_or(&tok).pos).into(), ("Exponent too large.", self.toks.peek().unwrap_or(tok).pos).into()
); );
} }
} }
} }
} }
if let Ok(Some(Token { pos, .. })) = self.toks.peek_previous() { if let Some(Token { pos, .. }) = self.toks.peek_previous() {
span = span.merge(*pos); span = span.merge(pos);
} }
self.toks.reset_cursor(); self.toks.reset_cursor();
@ -445,7 +442,7 @@ impl<'a> Parser<'a> {
self.whitespace_or_comment(); self.whitespace_or_comment();
Ok(if let Some(Token { kind: ']', pos }) = self.toks.peek() { Ok(if let Some(Token { kind: ']', pos }) = self.toks.peek() {
span = span.merge(*pos); span = span.merge(pos);
self.toks.next(); self.toks.next();
IntermediateValue::Value(HigherIntermediateValue::Literal(Value::List( IntermediateValue::Value(HigherIntermediateValue::Literal(Value::List(
Vec::new(), Vec::new(),
@ -473,14 +470,14 @@ impl<'a> Parser<'a> {
fn parse_dimension( fn parse_dimension(
&mut self, &mut self,
predicate: &dyn Fn(&mut PeekMoreIterator<IntoIter<Token>>) -> bool, predicate: &dyn Fn(&mut Lexer) -> bool,
) -> SassResult<Spanned<IntermediateValue>> { ) -> SassResult<Spanned<IntermediateValue>> {
let Spanned { let Spanned {
node: val, node: val,
mut span, mut span,
} = self.parse_number(predicate)?; } = self.parse_number(predicate)?;
let unit = if let Some(tok) = self.toks.peek() { let unit = if let Some(tok) = self.toks.peek() {
let Token { kind, .. } = *tok; let Token { kind, .. } = tok;
match kind { match kind {
'a'..='z' | 'A'..='Z' | '_' | '\\' | '\u{7f}'..=std::char::MAX => { 'a'..='z' | 'A'..='Z' | '_' | '\\' | '\u{7f}'..=std::char::MAX => {
let u = self.parse_identifier_no_interpolation(true)?; let u = self.parse_identifier_no_interpolation(true)?;
@ -488,7 +485,7 @@ impl<'a> Parser<'a> {
Unit::from(u.node) Unit::from(u.node)
} }
'-' => { '-' => {
if let Some(Token { kind, .. }) = self.toks.peek_next().copied() { if let Some(Token { kind, .. }) = self.toks.peek_next() {
self.toks.reset_cursor(); self.toks.reset_cursor();
if matches!(kind, 'a'..='z' | 'A'..='Z' | '_' | '\\' | '\u{7f}'..=std::char::MAX) if matches!(kind, 'a'..='z' | 'A'..='Z' | '_' | '\\' | '\u{7f}'..=std::char::MAX)
{ {
@ -679,7 +676,7 @@ impl<'a> Parser<'a> {
fn in_interpolated_identifier_body(&mut self) -> bool { fn in_interpolated_identifier_body(&mut self) -> bool {
match self.toks.peek() { match self.toks.peek() {
Some(Token { kind: '\\', .. }) => true, Some(Token { kind: '\\', .. }) => true,
Some(Token { kind, .. }) if is_name(*kind) => true, Some(Token { kind, .. }) if is_name(kind) => true,
Some(Token { kind: '#', .. }) => { Some(Token { kind: '#', .. }) => {
let next_is_curly = matches!(self.toks.peek_next(), Some(Token { kind: '{', .. })); let next_is_curly = matches!(self.toks.peek_next(), Some(Token { kind: '{', .. }));
self.toks.reset_cursor(); self.toks.reset_cursor();
@ -701,9 +698,9 @@ impl<'a> Parser<'a> {
for _ in 0..6 { for _ in 0..6 {
if let Some(Token { kind, pos }) = self.toks.peek() { if let Some(Token { kind, pos }) = self.toks.peek() {
if kind.is_ascii_hexdigit() { if kind.is_ascii_hexdigit() {
span = span.merge(*pos); span = span.merge(pos);
self.span_before = *pos; self.span_before = pos;
buf.push(*kind); buf.push(kind);
self.toks.next(); self.toks.next();
} else { } else {
break; break;
@ -715,8 +712,8 @@ impl<'a> Parser<'a> {
buf.push('?'); buf.push('?');
for _ in 0..(8_usize.saturating_sub(buf.len())) { for _ in 0..(8_usize.saturating_sub(buf.len())) {
if let Some(Token { kind: '?', pos }) = self.toks.peek() { if let Some(Token { kind: '?', pos }) = self.toks.peek() {
span = span.merge(*pos); span = span.merge(pos);
self.span_before = *pos; self.span_before = pos;
buf.push('?'); buf.push('?');
self.toks.next(); self.toks.next();
} else { } else {
@ -743,9 +740,9 @@ impl<'a> Parser<'a> {
found_hex_digit = true; found_hex_digit = true;
if let Some(Token { kind, pos }) = self.toks.peek() { if let Some(Token { kind, pos }) = self.toks.peek() {
if kind.is_ascii_hexdigit() { if kind.is_ascii_hexdigit() {
span = span.merge(*pos); span = span.merge(pos);
self.span_before = *pos; self.span_before = pos;
buf.push(*kind); buf.push(kind);
self.toks.next(); self.toks.next();
} else { } else {
break; break;
@ -773,7 +770,7 @@ impl<'a> Parser<'a> {
fn parse_intermediate_value( fn parse_intermediate_value(
&mut self, &mut self,
predicate: &dyn Fn(&mut PeekMoreIterator<IntoIter<Token>>) -> bool, predicate: &dyn Fn(&mut Lexer) -> bool,
) -> Option<SassResult<Spanned<IntermediateValue>>> { ) -> Option<SassResult<Spanned<IntermediateValue>>> {
if predicate(self.toks) { if predicate(self.toks) {
return None; return None;
@ -832,7 +829,7 @@ impl<'a> Parser<'a> {
} }
'#' => { '#' => {
if let Some(Token { kind: '{', pos }) = self.toks.peek_forward(1) { if let Some(Token { kind: '{', pos }) = self.toks.peek_forward(1) {
self.span_before = *pos; self.span_before = pos;
self.toks.reset_cursor(); self.toks.reset_cursor();
return Some(self.parse_ident_value(predicate)); return Some(self.parse_ident_value(predicate));
} }
@ -1048,7 +1045,7 @@ impl<'a> Parser<'a> {
struct IntermediateValueIterator<'a, 'b: 'a> { struct IntermediateValueIterator<'a, 'b: 'a> {
parser: &'a mut Parser<'b>, parser: &'a mut Parser<'b>,
peek: Option<SassResult<Spanned<IntermediateValue>>>, peek: Option<SassResult<Spanned<IntermediateValue>>>,
predicate: &'a dyn Fn(&mut PeekMoreIterator<IntoIter<Token>>) -> bool, predicate: &'a dyn Fn(&mut Lexer) -> bool,
} }
impl<'a, 'b: 'a> Iterator for IntermediateValueIterator<'a, 'b> { impl<'a, 'b: 'a> Iterator for IntermediateValueIterator<'a, 'b> {
@ -1063,10 +1060,7 @@ impl<'a, 'b: 'a> Iterator for IntermediateValueIterator<'a, 'b> {
} }
impl<'a, 'b: 'a> IntermediateValueIterator<'a, 'b> { impl<'a, 'b: 'a> IntermediateValueIterator<'a, 'b> {
pub fn new( pub fn new(parser: &'a mut Parser<'b>, predicate: &'a dyn Fn(&mut Lexer) -> bool) -> Self {
parser: &'a mut Parser<'b>,
predicate: &'a dyn Fn(&mut PeekMoreIterator<IntoIter<Token>>) -> bool,
) -> Self {
Self { Self {
parser, parser,
peek: None, peek: None,

View File

@ -141,7 +141,7 @@ impl Attribute {
}; };
parser.whitespace(); parser.whitespace();
let modifier = match parser.toks.peek().copied() { let modifier = match parser.toks.peek() {
Some(Token { Some(Token {
kind: c @ 'a'..='z', kind: c @ 'a'..='z',
.. ..

View File

@ -198,7 +198,7 @@ impl<'a, 'b> SelectorParser<'a, 'b> {
let mut components = vec![self.parse_simple_selector(None)?]; let mut components = vec![self.parse_simple_selector(None)?];
while let Some(Token { kind, .. }) = self.parser.toks.peek() { while let Some(Token { kind, .. }) = self.parser.toks.peek() {
if !is_simple_selector_start(*kind) { if !is_simple_selector_start(kind) {
break; break;
} }
@ -219,13 +219,13 @@ impl<'a, 'b> SelectorParser<'a, 'b> {
/// [the CSS algorithm]: https://drafts.csswg.org/css-syntax-3/#would-start-an-identifier /// [the CSS algorithm]: https://drafts.csswg.org/css-syntax-3/#would-start-an-identifier
fn looking_at_identifier(&mut self) -> bool { fn looking_at_identifier(&mut self) -> bool {
match self.parser.toks.peek() { match self.parser.toks.peek() {
Some(Token { kind, .. }) if is_name_start(*kind) || kind == &'\\' => return true, Some(Token { kind, .. }) if is_name_start(kind) || kind == '\\' => return true,
Some(Token { kind: '-', .. }) => {} Some(Token { kind: '-', .. }) => {}
Some(..) | None => return false, Some(..) | None => return false,
} }
match self.parser.toks.peek_forward(1) { match self.parser.toks.peek_forward(1) {
Some(Token { kind, .. }) if is_name_start(*kind) || kind == &'-' || kind == &'\\' => { Some(Token { kind, .. }) if is_name_start(kind) || kind == '-' || kind == '\\' => {
self.parser.toks.reset_cursor(); self.parser.toks.reset_cursor();
true true
} }
@ -391,7 +391,7 @@ impl<'a, 'b> SelectorParser<'a, 'b> {
match self.parser.toks.peek() { match self.parser.toks.peek() {
Some(Token { kind: '*', pos }) => { Some(Token { kind: '*', pos }) => {
self.parser.span_before = self.parser.span_before.merge(*pos); self.parser.span_before = self.parser.span_before.merge(pos);
self.parser.toks.next(); self.parser.toks.next();
if let Some(Token { kind: '|', .. }) = self.parser.toks.peek() { if let Some(Token { kind: '|', .. }) = self.parser.toks.peek() {
self.parser.toks.next(); self.parser.toks.next();
@ -409,7 +409,7 @@ impl<'a, 'b> SelectorParser<'a, 'b> {
return Ok(SimpleSelector::Universal(Namespace::None)); return Ok(SimpleSelector::Universal(Namespace::None));
} }
Some(Token { kind: '|', pos }) => { Some(Token { kind: '|', pos }) => {
self.parser.span_before = self.parser.span_before.merge(*pos); self.parser.span_before = self.parser.span_before.merge(pos);
self.parser.toks.next(); self.parser.toks.next();
match self.parser.toks.peek() { match self.parser.toks.peek() {
Some(Token { kind: '*', .. }) => { Some(Token { kind: '*', .. }) => {

View File

@ -1,8 +1,4 @@
use std::vec::IntoIter; use crate::{lexer::Lexer, Token};
use peekmore::PeekMoreIterator;
use crate::Token;
use super::peek_until_newline; use super::peek_until_newline;
@ -16,9 +12,7 @@ impl IsWhitespace for char {
} }
} }
pub(crate) fn devour_whitespace<I: Iterator<Item = W>, W: IsWhitespace>( pub(crate) fn devour_whitespace(s: &mut Lexer) -> bool {
s: &mut PeekMoreIterator<I>,
) -> bool {
let mut found_whitespace = false; let mut found_whitespace = false;
while let Some(w) = s.peek() { while let Some(w) = s.peek() {
if !w.is_whitespace() { if !w.is_whitespace() {
@ -30,7 +24,7 @@ pub(crate) fn devour_whitespace<I: Iterator<Item = W>, W: IsWhitespace>(
found_whitespace found_whitespace
} }
pub(crate) fn peek_whitespace(s: &mut PeekMoreIterator<IntoIter<Token>>) -> bool { pub(crate) fn peek_whitespace(s: &mut Lexer) -> bool {
let mut found_whitespace = false; let mut found_whitespace = false;
while let Some(w) = s.peek() { while let Some(w) = s.peek() {
if !w.is_whitespace() { if !w.is_whitespace() {
@ -42,7 +36,7 @@ pub(crate) fn peek_whitespace(s: &mut PeekMoreIterator<IntoIter<Token>>) -> bool
found_whitespace found_whitespace
} }
pub(crate) fn peek_whitespace_or_comment(s: &mut PeekMoreIterator<IntoIter<Token>>) -> bool { pub(crate) fn peek_whitespace_or_comment(s: &mut Lexer) -> bool {
let mut found_whitespace = false; let mut found_whitespace = false;
while let Some(w) = s.peek() { while let Some(w) = s.peek() {
match w.kind { match w.kind {
@ -83,7 +77,7 @@ pub(crate) fn peek_whitespace_or_comment(s: &mut PeekMoreIterator<IntoIter<Token
/// We only have to check for \n as the lexing step normalizes all newline characters /// We only have to check for \n as the lexing step normalizes all newline characters
/// ///
/// The newline is consumed /// The newline is consumed
pub(crate) fn read_until_newline<I: Iterator<Item = Token>>(toks: &mut PeekMoreIterator<I>) { pub(crate) fn read_until_newline(toks: &mut Lexer) {
for tok in toks { for tok in toks {
if tok.kind == '\n' { if tok.kind == '\n' {
return; return;

View File

@ -1,8 +1,4 @@
use std::vec::IntoIter; use crate::lexer::Lexer;
use peekmore::PeekMoreIterator;
use crate::Token;
#[derive(Debug)] #[derive(Debug)]
pub(crate) struct ParsedNumber { pub(crate) struct ParsedNumber {
@ -46,7 +42,7 @@ impl ParsedNumber {
} }
} }
pub(crate) fn eat_whole_number(toks: &mut PeekMoreIterator<IntoIter<Token>>) -> String { pub(crate) fn eat_whole_number(toks: &mut Lexer) -> String {
let mut buf = String::new(); let mut buf = String::new();
while let Some(c) = toks.peek() { while let Some(c) = toks.peek() {
if !c.kind.is_ascii_digit() { if !c.kind.is_ascii_digit() {

View File

@ -1,19 +1,13 @@
use std::vec::IntoIter;
use codemap::{Span, Spanned}; use codemap::{Span, Spanned};
use peekmore::PeekMoreIterator; use crate::{error::SassResult, lexer::Lexer, Token};
use crate::{error::SassResult, Token};
use super::{as_hex, hex_char_for, is_name, is_name_start, peek_whitespace}; use super::{as_hex, hex_char_for, is_name, is_name_start, peek_whitespace};
pub(crate) fn peek_until_closing_curly_brace( pub(crate) fn peek_until_closing_curly_brace(toks: &mut Lexer) -> SassResult<Vec<Token>> {
toks: &mut PeekMoreIterator<IntoIter<Token>>,
) -> SassResult<Vec<Token>> {
let mut t = Vec::new(); let mut t = Vec::new();
let mut nesting = 0; let mut nesting = 0;
while let Some(tok) = toks.peek().copied() { while let Some(tok) = toks.peek() {
match tok.kind { match tok.kind {
q @ '"' | q @ '\'' => { q @ '"' | q @ '\'' => {
t.push(tok); t.push(tok);
@ -35,7 +29,7 @@ pub(crate) fn peek_until_closing_curly_brace(
toks.advance_cursor(); toks.advance_cursor();
} }
'/' => { '/' => {
let next = *toks let next = toks
.peek_forward(1) .peek_forward(1)
.ok_or(("Expected expression.", tok.pos))?; .ok_or(("Expected expression.", tok.pos))?;
match toks.peek() { match toks.peek() {
@ -54,12 +48,9 @@ pub(crate) fn peek_until_closing_curly_brace(
Ok(t) Ok(t)
} }
fn peek_until_closing_quote( fn peek_until_closing_quote(toks: &mut Lexer, q: char) -> SassResult<Vec<Token>> {
toks: &mut PeekMoreIterator<IntoIter<Token>>,
q: char,
) -> SassResult<Vec<Token>> {
let mut t = Vec::new(); let mut t = Vec::new();
while let Some(tok) = toks.peek().copied() { while let Some(tok) = toks.peek() {
match tok.kind { match tok.kind {
'"' if q == '"' => { '"' if q == '"' => {
t.push(tok); t.push(tok);
@ -74,7 +65,7 @@ fn peek_until_closing_quote(
'\\' => { '\\' => {
t.push(tok); t.push(tok);
t.push(match toks.peek_forward(1) { t.push(match toks.peek_forward(1) {
Some(tok) => *tok, Some(tok) => tok,
None => return Err((format!("Expected {}.", q), tok.pos).into()), None => return Err((format!("Expected {}.", q), tok.pos).into()),
}); });
} }
@ -85,7 +76,7 @@ fn peek_until_closing_quote(
None => return Err((format!("Expected {}.", q), tok.pos).into()), None => return Err((format!("Expected {}.", q), tok.pos).into()),
}; };
if next.kind == '{' { if next.kind == '{' {
t.push(*next); t.push(next);
toks.peek_forward(1); toks.peek_forward(1);
t.append(&mut peek_until_closing_curly_brace(toks)?); t.append(&mut peek_until_closing_curly_brace(toks)?);
} }
@ -97,7 +88,7 @@ fn peek_until_closing_quote(
Ok(t) Ok(t)
} }
pub(crate) fn peek_until_newline(toks: &mut PeekMoreIterator<IntoIter<Token>>) { pub(crate) fn peek_until_newline(toks: &mut Lexer) {
while let Some(tok) = toks.peek() { while let Some(tok) = toks.peek() {
if tok.kind == '\n' { if tok.kind == '\n' {
break; break;
@ -106,10 +97,10 @@ pub(crate) fn peek_until_newline(toks: &mut PeekMoreIterator<IntoIter<Token>>) {
} }
} }
pub(crate) fn peek_escape(toks: &mut PeekMoreIterator<IntoIter<Token>>) -> SassResult<String> { pub(crate) fn peek_escape(toks: &mut Lexer) -> SassResult<String> {
let mut value = 0; let mut value = 0;
let first = match toks.peek() { let first = match toks.peek() {
Some(t) => *t, Some(t) => t,
None => return Ok(String::new()), None => return Ok(String::new()),
}; };
let mut span = first.pos; let mut span = first.pos;
@ -155,7 +146,7 @@ pub(crate) fn peek_escape(toks: &mut PeekMoreIterator<IntoIter<Token>>) -> SassR
} }
pub(crate) fn peek_ident_no_interpolation( pub(crate) fn peek_ident_no_interpolation(
toks: &mut PeekMoreIterator<IntoIter<Token>>, toks: &mut Lexer,
unit: bool, unit: bool,
span_before: Span, span_before: Span,
) -> SassResult<Spanned<String>> { ) -> SassResult<Spanned<String>> {
@ -200,7 +191,7 @@ pub(crate) fn peek_ident_no_interpolation(
} }
fn peek_ident_body_no_interpolation( fn peek_ident_body_no_interpolation(
toks: &mut PeekMoreIterator<IntoIter<Token>>, toks: &mut Lexer,
unit: bool, unit: bool,
mut span: Span, mut span: Span,
) -> SassResult<Spanned<String>> { ) -> SassResult<Spanned<String>> {
@ -210,7 +201,7 @@ fn peek_ident_body_no_interpolation(
if unit && tok.kind == '-' { if unit && tok.kind == '-' {
// Disallow `-` followed by a dot or a digit digit in units. // Disallow `-` followed by a dot or a digit digit in units.
let second = match toks.peek_forward(1) { let second = match toks.peek_forward(1) {
Some(v) => *v, Some(v) => v,
None => break, None => break,
}; };

View File

@ -1,17 +1,11 @@
use std::vec::IntoIter; use crate::{error::SassResult, lexer::Lexer, Token};
use peekmore::PeekMoreIterator;
use crate::{error::SassResult, Token};
use super::{devour_whitespace, read_until_newline}; use super::{devour_whitespace, read_until_newline};
// Eat tokens until an open curly brace // Eat tokens until an open curly brace
// //
// Does not consume the open curly brace // Does not consume the open curly brace
pub(crate) fn read_until_open_curly_brace( pub(crate) fn read_until_open_curly_brace(toks: &mut Lexer) -> SassResult<Vec<Token>> {
toks: &mut PeekMoreIterator<IntoIter<Token>>,
) -> SassResult<Vec<Token>> {
let mut t = Vec::new(); let mut t = Vec::new();
let mut n = 0; let mut n = 0;
while let Some(tok) = toks.peek() { while let Some(tok) = toks.peek() {
@ -49,9 +43,7 @@ pub(crate) fn read_until_open_curly_brace(
Ok(t) Ok(t)
} }
pub(crate) fn read_until_closing_curly_brace( pub(crate) fn read_until_closing_curly_brace(toks: &mut Lexer) -> SassResult<Vec<Token>> {
toks: &mut PeekMoreIterator<IntoIter<Token>>,
) -> SassResult<Vec<Token>> {
let mut buf = Vec::new(); let mut buf = Vec::new();
let mut nesting = 0; let mut nesting = 0;
while let Some(tok) = toks.peek() { while let Some(tok) = toks.peek() {
@ -104,10 +96,7 @@ pub(crate) fn read_until_closing_curly_brace(
/// Read tokens into a vector until a matching closing quote is found /// Read tokens into a vector until a matching closing quote is found
/// ///
/// The closing quote is included in the output /// The closing quote is included in the output
pub(crate) fn read_until_closing_quote( pub(crate) fn read_until_closing_quote(toks: &mut Lexer, q: char) -> SassResult<Vec<Token>> {
toks: &mut PeekMoreIterator<IntoIter<Token>>,
q: char,
) -> SassResult<Vec<Token>> {
let mut t = Vec::new(); let mut t = Vec::new();
while let Some(tok) = toks.next() { while let Some(tok) = toks.next() {
match tok.kind { match tok.kind {
@ -130,7 +119,7 @@ pub(crate) fn read_until_closing_quote(
t.push(tok); t.push(tok);
match toks.peek() { match toks.peek() {
Some(tok @ Token { kind: '{', .. }) => { Some(tok @ Token { kind: '{', .. }) => {
t.push(*tok); t.push(tok);
toks.next(); toks.next();
t.append(&mut read_until_closing_curly_brace(toks)?); t.append(&mut read_until_closing_curly_brace(toks)?);
} }
@ -151,7 +140,7 @@ pub(crate) fn read_until_closing_quote(
} }
pub(crate) fn read_until_semicolon_or_closing_curly_brace( pub(crate) fn read_until_semicolon_or_closing_curly_brace(
toks: &mut PeekMoreIterator<IntoIter<Token>>, toks: &mut Lexer,
) -> SassResult<Vec<Token>> { ) -> SassResult<Vec<Token>> {
let mut t = Vec::new(); let mut t = Vec::new();
let mut nesting = 0; let mut nesting = 0;
@ -202,9 +191,7 @@ pub(crate) fn read_until_semicolon_or_closing_curly_brace(
Ok(t) Ok(t)
} }
pub(crate) fn read_until_closing_paren( pub(crate) fn read_until_closing_paren(toks: &mut Lexer) -> SassResult<Vec<Token>> {
toks: &mut PeekMoreIterator<IntoIter<Token>>,
) -> SassResult<Vec<Token>> {
let mut t = Vec::new(); let mut t = Vec::new();
let mut scope = 0; let mut scope = 0;
while let Some(tok) = toks.next() { while let Some(tok) = toks.next() {

View File

@ -1,13 +1,12 @@
use std::cmp::Ordering; use std::cmp::Ordering;
use peekmore::PeekMore;
use codemap::{Span, Spanned}; use codemap::{Span, Spanned};
use crate::{ use crate::{
color::Color, color::Color,
common::{Brackets, ListSeparator, Op, QuoteKind}, common::{Brackets, ListSeparator, Op, QuoteKind},
error::SassResult, error::SassResult,
lexer::Lexer,
parse::Parser, parse::Parser,
selector::Selector, selector::Selector,
unit::Unit, unit::Unit,
@ -511,12 +510,12 @@ impl Value {
None => return Err((format!("${}: {} is not a valid selector: it must be a string, a list of strings, or a list of lists of strings.", name, self.inspect(parser.span_before)?), parser.span_before).into()), None => return Err((format!("${}: {} is not a valid selector: it must be a string, a list of strings, or a list of lists of strings.", name, self.inspect(parser.span_before)?), parser.span_before).into()),
}; };
Ok(Parser { Ok(Parser {
toks: &mut string toks: &mut Lexer::new(
string
.chars() .chars()
.map(|c| Token::new(parser.span_before, c)) .map(|c| Token::new(parser.span_before, c))
.collect::<Vec<Token>>() .collect::<Vec<Token>>(),
.into_iter() ),
.peekmore(),
map: parser.map, map: parser.map,
path: parser.path, path: parser.path,
scopes: parser.scopes, scopes: parser.scopes,