remove peekmore dependency

This commit is contained in:
Connor Skees 2021-07-11 15:12:46 -04:00
parent 7ad2da040f
commit adfecfa83e
24 changed files with 231 additions and 222 deletions

View File

@ -57,7 +57,6 @@ num-traits = "0.2.14"
once_cell = "1.5.2"
rand = { version = "0.8", optional = true }
codemap = "0.1.3"
peekmore = "1.0"
wasm-bindgen = { version = "0.2.68", optional = true }
beef = "0.5"
phf = { version = "0.9", features = ["macros"] }

View File

@ -7,20 +7,87 @@ use crate::Token;
const FORM_FEED: char = '\x0C';
#[derive(Debug, Clone)]
pub(crate) struct Lexer<'a> {
buf: Peekable<Chars<'a>>,
pos: usize,
file: &'a Arc<File>,
pub(crate) struct Lexer {
buf: Vec<Token>,
cursor: usize,
amt_peeked: usize,
}
impl<'a> Iterator for Lexer<'a> {
impl Lexer {
fn peek_cursor(&self) -> usize {
self.cursor + self.amt_peeked
}
pub fn peek(&self) -> Option<Token> {
self.buf.get(self.peek_cursor()).copied()
}
pub fn reset_cursor(&mut self) {
self.amt_peeked = 0;
}
pub fn advance_cursor(&mut self) {
self.amt_peeked += 1;
}
pub fn move_cursor_back(&mut self) {
self.amt_peeked = self.amt_peeked.saturating_sub(1);
}
pub fn peek_next(&mut self) -> Option<Token> {
self.amt_peeked += 1;
self.peek()
}
pub fn peek_previous(&mut self) -> Option<Token> {
self.buf.get(self.peek_cursor() - 1).copied()
}
pub fn peek_forward(&mut self, n: usize) -> Option<Token> {
self.amt_peeked += n;
self.peek()
}
pub fn peek_backward(&mut self, n: usize) -> Option<Token> {
self.amt_peeked = self.amt_peeked.checked_sub(n)?;
self.peek()
}
pub fn truncate_iterator_to_cursor(&mut self) {
self.cursor += self.amt_peeked;
self.amt_peeked = 0;
}
}
impl Iterator for Lexer {
type Item = Token;
fn next(&mut self) -> Option<Self::Item> {
self.buf.get(self.cursor).copied().map(|tok| {
self.cursor += 1;
self.amt_peeked = self.amt_peeked.saturating_sub(1);
tok
})
}
}
struct TokenLexer<'a> {
buf: Peekable<Chars<'a>>,
cursor: usize,
file: Arc<File>,
}
impl<'a> Iterator for TokenLexer<'a> {
type Item = Token;
fn next(&mut self) -> Option<Self::Item> {
let kind = match self.buf.next()? {
FORM_FEED => '\n',
'\r' => {
if self.buf.peek() == Some(&'\n') {
self.pos += 1;
self.cursor += 1;
self.buf.next();
}
'\n'
@ -31,18 +98,29 @@ impl<'a> Iterator for Lexer<'a> {
let pos = self
.file
.span
.subspan(self.pos as u64, (self.pos + len) as u64);
self.pos += len;
.subspan(self.cursor as u64, (self.cursor + len) as u64);
self.cursor += len;
Some(Token { pos, kind })
}
}
impl<'a> Lexer<'a> {
pub fn new(file: &'a Arc<File>) -> Lexer<'a> {
Lexer {
impl Lexer {
pub fn new_from_file(file: &Arc<File>) -> Self {
let buf = TokenLexer {
file: Arc::clone(file),
buf: file.source().chars().peekable(),
pos: 0,
file,
cursor: 0,
}
.collect();
Self::new(buf)
}
pub fn new(buf: Vec<Token>) -> Lexer {
Lexer {
buf,
cursor: 0,
amt_peeked: 0,
}
}
}

View File

@ -101,8 +101,6 @@ pub(crate) use beef::lean::Cow;
use codemap::CodeMap;
use peekmore::PeekMore;
pub use crate::error::{SassError as Error, SassResult as Result};
pub(crate) use crate::token::Token;
use crate::{
@ -288,10 +286,7 @@ pub fn from_path(p: &str, options: &Options) -> Result<String> {
let empty_span = file.span.subspan(0, 0);
let stmts = Parser {
toks: &mut Lexer::new(&file)
.collect::<Vec<Token>>()
.into_iter()
.peekmore(),
toks: &mut Lexer::new_from_file(&file),
map: &mut map,
path: p.as_ref(),
scopes: &mut Scopes::new(),
@ -334,10 +329,7 @@ pub fn from_string(p: String, options: &Options) -> Result<String> {
let file = map.add_file("stdin".into(), p);
let empty_span = file.span.subspan(0, 0);
let stmts = Parser {
toks: &mut Lexer::new(&file)
.collect::<Vec<Token>>()
.into_iter()
.peekmore(),
toks: &mut Lexer::new_from_file(&file),
map: &mut map,
path: Path::new(""),
scopes: &mut Scopes::new(),
@ -371,10 +363,7 @@ pub fn from_string(p: String) -> std::result::Result<String, JsValue> {
let empty_span = file.span.subspan(0, 0);
let stmts = Parser {
toks: &mut Lexer::new(&file)
.collect::<Vec<Token>>()
.into_iter()
.peekmore(),
toks: &mut Lexer::new_from_file(&file),
map: &mut map,
path: Path::new(""),
scopes: &mut Scopes::new(),

View File

@ -410,7 +410,7 @@ impl CompressedFormatter {
fn write_block_entry(&self, buf: &mut Vec<u8>, styles: &[BlockEntry]) -> SassResult<()> {
let mut styles = styles.iter();
while let Some(style) = styles.next() {
for style in &mut styles {
match style {
BlockEntry::Style(s) => {
let value = s.value.node.to_css_string(s.value.span)?;

View File

@ -21,7 +21,7 @@ impl<'a> Parser<'a> {
pub(super) fn parse_func_args(&mut self) -> SassResult<FuncArgs> {
let mut args: Vec<FuncArg> = Vec::new();
let mut close_paren_span: Span = match self.toks.peek() {
Some(Token { pos, .. }) => *pos,
Some(Token { pos, .. }) => pos,
None => return Err(("expected \")\".", self.span_before).into()),
};
@ -164,7 +164,7 @@ impl<'a> Parser<'a> {
}
if let Some(Token { kind: '$', pos }) = self.toks.peek() {
span = span.merge(*pos);
span = span.merge(pos);
self.toks.advance_cursor();
let v = peek_ident_no_interpolation(self.toks, false, self.span_before)?;
@ -229,10 +229,9 @@ impl<'a> Parser<'a> {
continue;
}
Some(Token { kind: '.', pos }) => {
let pos = *pos;
self.toks.next();
if let Some(Token { kind: '.', pos }) = self.toks.peek().copied() {
if let Some(Token { kind: '.', pos }) = self.toks.peek() {
if !name.is_empty() {
return Err(("expected \")\".", pos).into());
}
@ -324,7 +323,7 @@ impl<'a> Parser<'a> {
return Ok(CallArgs(args, span));
}
Some(Token { kind: ',', pos }) => {
span = span.merge(*pos);
span = span.merge(pos);
self.toks.next();
self.whitespace_or_comment();
continue;
@ -341,14 +340,14 @@ impl<'a> Parser<'a> {
self.expect_char('.')?;
}
Some(Token { pos, .. }) => {
return Err(("expected \")\".", *pos).into());
return Err(("expected \")\".", pos).into());
}
None => return Err(("expected \")\".", span).into()),
}
}
Some(Token { pos, .. }) => {
value?;
return Err(("expected \")\".", *pos).into());
return Err(("expected \")\".", pos).into());
}
None => return Err(("expected \")\".", span).into()),
}

View File

@ -1,10 +1,10 @@
use codemap::Spanned;
use num_traits::cast::ToPrimitive;
use peekmore::PeekMore;
use crate::{
common::Identifier,
error::SassResult,
lexer::Lexer,
parse::{ContextFlags, Parser, Stmt},
unit::Unit,
utils::{
@ -60,7 +60,7 @@ impl<'a> Parser<'a> {
loop {
self.whitespace_or_comment();
if let Some(Token { kind: '@', pos }) = self.toks.peek().copied() {
if let Some(Token { kind: '@', pos }) = self.toks.peek() {
self.toks.peek_forward(1);
let ident = peek_ident_no_interpolation(self.toks, false, pos)?;
if ident.as_str() != "else" {
@ -72,7 +72,7 @@ impl<'a> Parser<'a> {
break;
}
self.whitespace_or_comment();
if let Some(tok) = self.toks.peek().copied() {
if let Some(tok) = self.toks.peek() {
match tok.kind {
'i' if matches!(
self.toks.peek_forward(1),
@ -182,7 +182,7 @@ impl<'a> Parser<'a> {
Some(Token { kind: 't', pos })
| Some(Token { kind: 'T', pos })
| Some(Token { kind: '\\', pos }) => {
let span = *pos;
let span = pos;
let mut ident = match peek_ident_no_interpolation(toks, false, span) {
Ok(s) => s,
Err(..) => return false,
@ -266,7 +266,7 @@ impl<'a> Parser<'a> {
);
if self.flags.in_function() {
let these_stmts = Parser {
toks: &mut body.clone().into_iter().peekmore(),
toks: &mut Lexer::new(body.clone()),
map: self.map,
path: self.path,
scopes: self.scopes,
@ -290,7 +290,7 @@ impl<'a> Parser<'a> {
} else {
stmts.append(
&mut Parser {
toks: &mut body.clone().into_iter().peekmore(),
toks: &mut Lexer::new(body.clone()),
map: self.map,
path: self.path,
scopes: self.scopes,
@ -343,7 +343,7 @@ impl<'a> Parser<'a> {
while val.node.is_true() {
if self.flags.in_function() {
let these_stmts = Parser {
toks: &mut body.clone().into_iter().peekmore(),
toks: &mut Lexer::new(body.clone()),
map: self.map,
path: self.path,
scopes: self.scopes,
@ -367,7 +367,7 @@ impl<'a> Parser<'a> {
} else {
stmts.append(
&mut Parser {
toks: &mut body.clone().into_iter().peekmore(),
toks: &mut Lexer::new(body.clone()),
map: self.map,
path: self.path,
scopes: self.scopes,
@ -455,7 +455,7 @@ impl<'a> Parser<'a> {
if self.flags.in_function() {
let these_stmts = Parser {
toks: &mut body.clone().into_iter().peekmore(),
toks: &mut Lexer::new(body.clone()),
map: self.map,
path: self.path,
scopes: self.scopes,
@ -479,7 +479,7 @@ impl<'a> Parser<'a> {
} else {
stmts.append(
&mut Parser {
toks: &mut body.clone().into_iter().peekmore(),
toks: &mut Lexer::new(body.clone()),
map: self.map,
path: self.path,
scopes: self.scopes,

View File

@ -1,11 +1,11 @@
use codemap::Spanned;
use peekmore::PeekMore;
use crate::{
args::CallArgs,
atrule::Function,
common::{unvendor, Identifier},
error::SassResult,
lexer::Lexer,
scope::Scopes,
utils::{read_until_closing_curly_brace, read_until_semicolon_or_closing_curly_brace},
value::{SassFunction, Value},
@ -100,7 +100,7 @@ impl<'a> Parser<'a> {
};
let mut return_value = Parser {
toks: &mut body.into_iter().peekmore(),
toks: &mut Lexer::new(body),
map: self.map,
path: self.path,
scopes: if declared_at_root {

View File

@ -20,7 +20,7 @@ impl<'a> Parser<'a> {
if unit && tok.kind == '-' {
// Disallow `-` followed by a dot or a digit digit in units.
let second = match self.toks.peek_forward(1) {
Some(v) => *v,
Some(v) => v,
None => break,
};
@ -59,7 +59,7 @@ impl<'a> Parser<'a> {
buf.push_str(&self.escape(false)?);
}
'#' => {
if let Some(Token { kind: '{', .. }) = self.toks.peek_forward(1).copied() {
if let Some(Token { kind: '{', .. }) = self.toks.peek_forward(1) {
self.toks.next();
self.toks.next();
// TODO: if ident, interpolate literally
@ -136,7 +136,6 @@ impl<'a> Parser<'a> {
let Token { kind, pos } = self
.toks
.peek()
.copied()
.ok_or(("Expected identifier.", self.span_before))?;
let mut text = String::new();
if kind == '-' {
@ -163,7 +162,7 @@ impl<'a> Parser<'a> {
}
let Token { kind: first, pos } = match self.toks.peek() {
Some(v) => *v,
Some(v) => v,
None => return Err(("Expected identifier.", self.span_before).into()),
};
@ -205,7 +204,7 @@ impl<'a> Parser<'a> {
.peek()
.ok_or(("Expected identifier.", self.span_before))?;
let mut text = String::new();
if kind == &'-' {
if kind == '-' {
self.toks.next();
text.push('-');
@ -264,7 +263,7 @@ impl<'a> Parser<'a> {
}
'#' => {
if let Some(Token { kind: '{', pos }) = self.toks.peek() {
self.span_before = self.span_before.merge(*pos);
self.span_before = self.span_before.merge(pos);
self.toks.next();
let interpolation = self.parse_interpolation()?;
match interpolation.node {

View File

@ -1,7 +1,6 @@
use std::{ffi::OsStr, fs, path::Path, path::PathBuf};
use codemap::{Span, Spanned};
use peekmore::PeekMore;
use crate::{
common::{ListSeparator::Comma, QuoteKind},
@ -104,10 +103,7 @@ impl<'a> Parser<'a> {
String::from_utf8(fs::read(&name)?)?,
);
return Parser {
toks: &mut Lexer::new(&file)
.collect::<Vec<Token>>()
.into_iter()
.peekmore(),
toks: &mut Lexer::new_from_file(&file),
map: self.map,
path: &name,
scopes: self.scopes,
@ -141,7 +137,7 @@ impl<'a> Parser<'a> {
Some(Token { kind: '\'', .. })
| Some(Token { kind: '"', .. })
| Some(Token { kind: 'u', .. }) => {}
Some(Token { pos, .. }) => return Err(("Expected string.", *pos).into()),
Some(Token { pos, .. }) => return Err(("Expected string.", pos).into()),
None => return Err(("expected more input.", self.span_before).into()),
};
let Spanned {

View File

@ -1,10 +1,9 @@
use std::fmt;
use peekmore::PeekMore;
use crate::{
atrule::keyframes::{Keyframes, KeyframesSelector},
error::SassResult,
lexer::Lexer,
parse::Stmt,
utils::eat_whole_number,
Token,
@ -34,7 +33,7 @@ impl<'a, 'b> KeyframesSelectorParser<'a, 'b> {
fn parse_keyframes_selector(&mut self) -> SassResult<Vec<KeyframesSelector>> {
let mut selectors = Vec::new();
self.parser.whitespace_or_comment();
while let Some(tok) = self.parser.toks.peek().copied() {
while let Some(tok) = self.parser.toks.peek() {
match tok.kind {
't' | 'T' => {
let mut ident = self.parser.parse_identifier()?;
@ -128,7 +127,7 @@ impl<'a> Parser<'a> {
span = span.merge(tok.pos());
match tok.kind {
'#' => {
if let Some(Token { kind: '{', .. }) = self.toks.peek().copied() {
if let Some(Token { kind: '{', .. }) = self.toks.peek() {
self.toks.next();
string.push_str(&self.parse_interpolation()?.to_css_string(span)?);
} else {
@ -154,13 +153,11 @@ impl<'a> Parser<'a> {
string.push(' ');
}
'{' => {
// we must collect here because the parser is not generic over iterator
#[allow(clippy::needless_collect)]
let sel_toks: Vec<Token> =
string.chars().map(|x| Token::new(span, x)).collect();
let selector = KeyframesSelectorParser::new(&mut Parser {
toks: &mut sel_toks.into_iter().peekmore(),
toks: &mut Lexer::new(sel_toks),
map: self.map,
path: self.path,
scopes: self.scopes,

View File

@ -98,7 +98,7 @@ impl<'a> Parser<'a> {
return Ok(buf);
}
let next_tok = self.toks.peek().copied();
let next_tok = self.toks.peek();
let is_angle = next_tok.map_or(false, |t| t.kind == '<' || t.kind == '>');
if is_angle || matches!(next_tok, Some(Token { kind: '=', .. })) {
buf.push(' ');

View File

@ -2,12 +2,11 @@ use std::mem;
use codemap::Spanned;
use peekmore::PeekMore;
use crate::{
args::{CallArgs, FuncArgs},
atrule::mixin::{Content, Mixin, UserDefinedMixin},
error::SassResult,
lexer::Lexer,
scope::Scopes,
utils::read_until_closing_curly_brace,
Token,
@ -124,7 +123,7 @@ impl<'a> Parser<'a> {
let mut toks = read_until_closing_curly_brace(self.toks)?;
if let Some(tok) = self.toks.peek() {
toks.push(*tok);
toks.push(tok);
self.toks.next();
}
Some(toks)
@ -167,7 +166,7 @@ impl<'a> Parser<'a> {
});
let body = Parser {
toks: &mut body.into_iter().peekmore(),
toks: &mut Lexer::new(body),
map: self.map,
path: self.path,
scopes: self.scopes,
@ -234,7 +233,7 @@ impl<'a> Parser<'a> {
let stmts = if let Some(body) = content.content.clone() {
Parser {
toks: &mut body.into_iter().peekmore(),
toks: &mut Lexer::new(body),
map: self.map,
path: self.path,
scopes: &mut scope_at_decl,

View File

@ -1,7 +1,6 @@
use std::{convert::TryFrom, path::Path, vec::IntoIter};
use std::{convert::TryFrom, path::Path};
use codemap::{CodeMap, Span, Spanned};
use peekmore::{PeekMore, PeekMoreIterator};
use crate::{
atrule::{
@ -12,6 +11,7 @@ use crate::{
},
builtin::modules::{ModuleConfig, Modules},
error::SassResult,
lexer::Lexer,
scope::{Scope, Scopes},
selector::{
ComplexSelectorComponent, ExtendRule, ExtendedSelector, Extender, Selector, SelectorParser,
@ -68,7 +68,7 @@ pub(crate) enum Stmt {
/// hit (if there is one) is not important for now.
// todo: merge at_root and at_root_has_selector into an enum
pub(crate) struct Parser<'a> {
pub toks: &'a mut PeekMoreIterator<IntoIter<Token>>,
pub toks: &'a mut Lexer,
pub map: &'a mut CodeMap,
pub path: &'a Path,
pub global_scope: &'a mut Scope,
@ -114,19 +114,19 @@ impl<'a> Parser<'a> {
pub fn expect_char(&mut self, c: char) -> SassResult<()> {
match self.toks.peek() {
Some(Token { kind, pos }) if *kind == c => {
self.span_before = *pos;
Some(Token { kind, pos }) if kind == c => {
self.span_before = pos;
self.toks.next();
Ok(())
}
Some(Token { pos, .. }) => Err((format!("expected \"{}\".", c), *pos).into()),
Some(Token { pos, .. }) => Err((format!("expected \"{}\".", c), pos).into()),
None => Err((format!("expected \"{}\".", c), self.span_before).into()),
}
}
pub fn consume_char_if_exists(&mut self, c: char) -> bool {
if let Some(Token { kind, .. }) = self.toks.peek() {
if *kind == c {
if kind == c {
self.toks.next();
return true;
}
@ -150,7 +150,7 @@ impl<'a> Parser<'a> {
if self.flags.in_function() && !stmts.is_empty() {
return Ok(stmts);
}
self.span_before = *pos;
self.span_before = pos;
match kind {
'@' => {
self.toks.next();
@ -207,7 +207,7 @@ impl<'a> Parser<'a> {
} = self.parse_value(false, &|_| false)?;
span.merge(kind_string.span);
if let Some(Token { kind: ';', pos }) = self.toks.peek() {
kind_string.span.merge(*pos);
kind_string.span.merge(pos);
self.toks.next();
}
self.warn(&Spanned {
@ -222,7 +222,7 @@ impl<'a> Parser<'a> {
} = self.parse_value(false, &|_| false)?;
span.merge(kind_string.span);
if let Some(Token { kind: ';', pos }) = self.toks.peek() {
kind_string.span.merge(*pos);
kind_string.span.merge(pos);
self.toks.next();
}
self.debug(&Spanned {
@ -287,14 +287,14 @@ impl<'a> Parser<'a> {
}
}
'\u{0}'..='\u{8}' | '\u{b}'..='\u{1f}' => {
return Err(("expected selector.", *pos).into())
return Err(("expected selector.", pos).into())
}
'}' => {
self.toks.next();
break;
}
// dart-sass seems to special-case the error message here?
'!' | '{' => return Err(("expected \"}\".", *pos).into()),
'!' | '{' => return Err(("expected \"}\".", pos).into()),
_ => {
if self.flags.in_function() {
return Err((
@ -403,7 +403,7 @@ impl<'a> Parser<'a> {
span = span.merge(pos);
match kind {
'#' => {
if let Some(Token { kind: '{', .. }) = self.toks.peek().copied() {
if let Some(Token { kind: '{', .. }) = self.toks.peek() {
self.toks.next();
string.push_str(&self.parse_interpolation()?.to_css_string(span)?);
} else {
@ -447,15 +447,13 @@ impl<'a> Parser<'a> {
return Err(("expected \"{\".", span).into());
}
// we must collect here because the parser isn't generic over the iterator
#[allow(clippy::needless_collect)]
let sel_toks: Vec<Token> = string.chars().map(|x| Token::new(span, x)).collect();
let mut iter = sel_toks.into_iter().peekmore();
let mut lexer = Lexer::new(sel_toks);
let selector = SelectorParser::new(
&mut Parser {
toks: &mut iter,
toks: &mut lexer,
map: self.map,
path: self.path,
scopes: self.scopes,
@ -635,7 +633,7 @@ impl<'a> Parser<'a> {
'{' => break,
'#' => {
if let Some(Token { kind: '{', pos }) = self.toks.peek() {
self.span_before = self.span_before.merge(*pos);
self.span_before = self.span_before.merge(pos);
self.toks.next();
params.push_str(&self.parse_interpolation_as_string()?);
} else {
@ -780,9 +778,7 @@ impl<'a> Parser<'a> {
// return Err(("@extend may only be used within style rules.", self.span_before).into());
// }
let (value, is_optional) = Parser {
toks: &mut read_until_semicolon_or_closing_curly_brace(self.toks)?
.into_iter()
.peekmore(),
toks: &mut Lexer::new(read_until_semicolon_or_closing_curly_brace(self.toks)?),
map: self.map,
path: self.path,
scopes: self.scopes,
@ -888,7 +884,7 @@ impl<'a> Parser<'a> {
match tok.kind {
'{' => break,
'#' => {
if let Some(Token { kind: '{', pos }) = self.toks.peek().copied() {
if let Some(Token { kind: '{', pos }) = self.toks.peek() {
self.toks.next();
self.span_before = pos;
let interpolation = self.parse_interpolation()?;

View File

@ -1,7 +1,6 @@
use std::{convert::TryFrom, fs};
use codemap::Spanned;
use peekmore::PeekMore;
use crate::{
atrule::AtRuleKind,
@ -116,10 +115,7 @@ impl<'a> Parser<'a> {
.add_file(name.to_owned(), String::from_utf8(fs::read(&import)?)?);
let stmts = Parser {
toks: &mut Lexer::new(&file)
.collect::<Vec<Token>>()
.into_iter()
.peekmore(),
toks: &mut Lexer::new_from_file(&file),
map: self.map,
path: &import,
scopes: self.scopes,

View File

@ -27,36 +27,36 @@ impl<'a> Parser<'a> {
return None;
}
'(' => {
toks.push(*tok);
toks.push(tok);
self.toks.peek_forward(1);
let mut scope = 0;
while let Some(tok) = self.toks.peek() {
match tok.kind {
')' => {
if scope == 0 {
toks.push(*tok);
toks.push(tok);
self.toks.peek_forward(1);
break;
}
scope -= 1;
toks.push(*tok);
toks.push(tok);
self.toks.peek_forward(1);
}
'(' => {
toks.push(*tok);
toks.push(tok);
self.toks.peek_forward(1);
scope += 1;
}
_ => {
toks.push(*tok);
toks.push(tok);
self.toks.peek_forward(1);
}
}
}
}
_ => {
toks.push(*tok);
toks.push(tok);
self.toks.peek_forward(1);
}
}
@ -123,7 +123,7 @@ impl<'a> Parser<'a> {
property.push(':');
SelectorOrStyle::Selector(property)
}
c if is_name(*c) => {
c if is_name(c) => {
if let Some(toks) =
self.parse_style_value_when_no_space_after_semicolon()
{
@ -201,7 +201,7 @@ impl<'a> Parser<'a> {
) -> SassResult<Vec<Style>> {
let mut styles = Vec::new();
self.whitespace();
while let Some(tok) = self.toks.peek().copied() {
while let Some(tok) = self.toks.peek() {
match tok.kind {
'{' => {
self.toks.next();

View File

@ -27,7 +27,7 @@ impl<'a> Parser<'a> {
}
'#' => {
if let Some(Token { kind: '{', pos }) = self.toks.peek() {
self.span_before = *pos;
self.span_before = pos;
self.toks.next();
let interpolation = self.parse_interpolation()?;
buf.push_str(&interpolation.node.to_css_string(interpolation.span)?);
@ -315,7 +315,7 @@ impl<'a> Parser<'a> {
}
buf.push_str(&num);
} else {
self.toks.move_cursor_back().unwrap();
self.toks.move_cursor_back();
}
let next = match self.toks.peek() {
@ -365,7 +365,7 @@ impl<'a> Parser<'a> {
fn peek_escape(&mut self) -> SassResult<String> {
let mut value = 0;
let first = match self.toks.peek() {
Some(t) => *t,
Some(t) => t,
None => return Ok(String::new()),
};
let mut span = first.pos;

View File

@ -1,4 +1,4 @@
use std::{iter::Iterator, mem, vec::IntoIter};
use std::{iter::Iterator, mem};
use num_bigint::BigInt;
use num_rational::{BigRational, Rational64};
@ -6,13 +6,12 @@ use num_traits::{pow, One, ToPrimitive};
use codemap::{Span, Spanned};
use peekmore::{PeekMore, PeekMoreIterator};
use crate::{
builtin::GLOBAL_FUNCTIONS,
color::{Color, NAMED_COLORS},
common::{unvendor, Brackets, Identifier, ListSeparator, Op, QuoteKind},
error::SassResult,
lexer::Lexer,
unit::Unit,
utils::{eat_whole_number, is_name, IsWhitespace, ParsedNumber},
value::{Number, SassFunction, SassMap, Value},
@ -53,7 +52,7 @@ impl<'a> Parser<'a> {
pub(crate) fn parse_value(
&mut self,
in_paren: bool,
predicate: &dyn Fn(&mut PeekMoreIterator<IntoIter<Token>>) -> bool,
predicate: &dyn Fn(&mut Lexer) -> bool,
) -> SassResult<Spanned<Value>> {
self.whitespace();
@ -62,7 +61,7 @@ impl<'a> Parser<'a> {
| Some(Token { kind: ';', .. })
| Some(Token { kind: '{', .. })
| None => return Err(("Expected expression.", self.span_before).into()),
Some(Token { pos, .. }) => *pos,
Some(Token { pos, .. }) => pos,
};
if predicate(self.toks) {
@ -173,7 +172,7 @@ impl<'a> Parser<'a> {
in_paren: bool,
) -> SassResult<Spanned<Value>> {
Parser {
toks: &mut toks.into_iter().peekmore(),
toks: &mut Lexer::new(toks),
map: self.map,
path: self.path,
scopes: self.scopes,
@ -291,7 +290,7 @@ impl<'a> Parser<'a> {
fn parse_ident_value(
&mut self,
predicate: &dyn Fn(&mut PeekMoreIterator<IntoIter<Token>>) -> bool,
predicate: &dyn Fn(&mut Lexer) -> bool,
) -> SassResult<Spanned<IntermediateValue>> {
let Spanned { node: mut s, span } = self.parse_identifier()?;
@ -363,7 +362,7 @@ impl<'a> Parser<'a> {
fn parse_number(
&mut self,
predicate: &dyn Fn(&mut PeekMoreIterator<IntoIter<Token>>) -> bool,
predicate: &dyn Fn(&mut Lexer) -> bool,
) -> SassResult<Spanned<ParsedNumber>> {
let mut span = self.toks.peek().unwrap().pos;
let mut whole = eat_whole_number(self.toks);
@ -375,7 +374,7 @@ impl<'a> Parser<'a> {
});
}
let next_tok = *self.toks.peek().unwrap();
let next_tok = self.toks.peek().unwrap();
let dec_len = if next_tok.kind == '.' {
self.toks.next();
@ -395,7 +394,7 @@ impl<'a> Parser<'a> {
let mut times_ten = String::new();
let mut times_ten_is_postive = true;
if let Some(Token { kind: 'e', .. }) | Some(Token { kind: 'E', .. }) = self.toks.peek() {
if let Some(&tok) = self.toks.peek_next() {
if let Some(tok) = self.toks.peek_next() {
if tok.kind == '-' {
self.toks.next();
times_ten_is_postive = false;
@ -404,13 +403,11 @@ impl<'a> Parser<'a> {
times_ten = eat_whole_number(self.toks);
if times_ten.is_empty() {
return Err(
("Expected digit.", self.toks.peek().unwrap_or(&tok).pos).into()
);
return Err(("Expected digit.", self.toks.peek().unwrap_or(tok).pos).into());
} else if times_ten.len() > 2 {
return Err((
"Exponent too negative.",
self.toks.peek().unwrap_or(&tok).pos,
self.toks.peek().unwrap_or(tok).pos,
)
.into());
}
@ -420,15 +417,15 @@ impl<'a> Parser<'a> {
if times_ten.len() > 2 {
return Err(
("Exponent too large.", self.toks.peek().unwrap_or(&tok).pos).into(),
("Exponent too large.", self.toks.peek().unwrap_or(tok).pos).into()
);
}
}
}
}
if let Ok(Some(Token { pos, .. })) = self.toks.peek_previous() {
span = span.merge(*pos);
if let Some(Token { pos, .. }) = self.toks.peek_previous() {
span = span.merge(pos);
}
self.toks.reset_cursor();
@ -445,7 +442,7 @@ impl<'a> Parser<'a> {
self.whitespace_or_comment();
Ok(if let Some(Token { kind: ']', pos }) = self.toks.peek() {
span = span.merge(*pos);
span = span.merge(pos);
self.toks.next();
IntermediateValue::Value(HigherIntermediateValue::Literal(Value::List(
Vec::new(),
@ -473,14 +470,14 @@ impl<'a> Parser<'a> {
fn parse_dimension(
&mut self,
predicate: &dyn Fn(&mut PeekMoreIterator<IntoIter<Token>>) -> bool,
predicate: &dyn Fn(&mut Lexer) -> bool,
) -> SassResult<Spanned<IntermediateValue>> {
let Spanned {
node: val,
mut span,
} = self.parse_number(predicate)?;
let unit = if let Some(tok) = self.toks.peek() {
let Token { kind, .. } = *tok;
let Token { kind, .. } = tok;
match kind {
'a'..='z' | 'A'..='Z' | '_' | '\\' | '\u{7f}'..=std::char::MAX => {
let u = self.parse_identifier_no_interpolation(true)?;
@ -488,7 +485,7 @@ impl<'a> Parser<'a> {
Unit::from(u.node)
}
'-' => {
if let Some(Token { kind, .. }) = self.toks.peek_next().copied() {
if let Some(Token { kind, .. }) = self.toks.peek_next() {
self.toks.reset_cursor();
if matches!(kind, 'a'..='z' | 'A'..='Z' | '_' | '\\' | '\u{7f}'..=std::char::MAX)
{
@ -679,7 +676,7 @@ impl<'a> Parser<'a> {
fn in_interpolated_identifier_body(&mut self) -> bool {
match self.toks.peek() {
Some(Token { kind: '\\', .. }) => true,
Some(Token { kind, .. }) if is_name(*kind) => true,
Some(Token { kind, .. }) if is_name(kind) => true,
Some(Token { kind: '#', .. }) => {
let next_is_curly = matches!(self.toks.peek_next(), Some(Token { kind: '{', .. }));
self.toks.reset_cursor();
@ -701,9 +698,9 @@ impl<'a> Parser<'a> {
for _ in 0..6 {
if let Some(Token { kind, pos }) = self.toks.peek() {
if kind.is_ascii_hexdigit() {
span = span.merge(*pos);
self.span_before = *pos;
buf.push(*kind);
span = span.merge(pos);
self.span_before = pos;
buf.push(kind);
self.toks.next();
} else {
break;
@ -715,8 +712,8 @@ impl<'a> Parser<'a> {
buf.push('?');
for _ in 0..(8_usize.saturating_sub(buf.len())) {
if let Some(Token { kind: '?', pos }) = self.toks.peek() {
span = span.merge(*pos);
self.span_before = *pos;
span = span.merge(pos);
self.span_before = pos;
buf.push('?');
self.toks.next();
} else {
@ -743,9 +740,9 @@ impl<'a> Parser<'a> {
found_hex_digit = true;
if let Some(Token { kind, pos }) = self.toks.peek() {
if kind.is_ascii_hexdigit() {
span = span.merge(*pos);
self.span_before = *pos;
buf.push(*kind);
span = span.merge(pos);
self.span_before = pos;
buf.push(kind);
self.toks.next();
} else {
break;
@ -773,7 +770,7 @@ impl<'a> Parser<'a> {
fn parse_intermediate_value(
&mut self,
predicate: &dyn Fn(&mut PeekMoreIterator<IntoIter<Token>>) -> bool,
predicate: &dyn Fn(&mut Lexer) -> bool,
) -> Option<SassResult<Spanned<IntermediateValue>>> {
if predicate(self.toks) {
return None;
@ -832,7 +829,7 @@ impl<'a> Parser<'a> {
}
'#' => {
if let Some(Token { kind: '{', pos }) = self.toks.peek_forward(1) {
self.span_before = *pos;
self.span_before = pos;
self.toks.reset_cursor();
return Some(self.parse_ident_value(predicate));
}
@ -1048,7 +1045,7 @@ impl<'a> Parser<'a> {
struct IntermediateValueIterator<'a, 'b: 'a> {
parser: &'a mut Parser<'b>,
peek: Option<SassResult<Spanned<IntermediateValue>>>,
predicate: &'a dyn Fn(&mut PeekMoreIterator<IntoIter<Token>>) -> bool,
predicate: &'a dyn Fn(&mut Lexer) -> bool,
}
impl<'a, 'b: 'a> Iterator for IntermediateValueIterator<'a, 'b> {
@ -1063,10 +1060,7 @@ impl<'a, 'b: 'a> Iterator for IntermediateValueIterator<'a, 'b> {
}
impl<'a, 'b: 'a> IntermediateValueIterator<'a, 'b> {
pub fn new(
parser: &'a mut Parser<'b>,
predicate: &'a dyn Fn(&mut PeekMoreIterator<IntoIter<Token>>) -> bool,
) -> Self {
pub fn new(parser: &'a mut Parser<'b>, predicate: &'a dyn Fn(&mut Lexer) -> bool) -> Self {
Self {
parser,
peek: None,

View File

@ -141,7 +141,7 @@ impl Attribute {
};
parser.whitespace();
let modifier = match parser.toks.peek().copied() {
let modifier = match parser.toks.peek() {
Some(Token {
kind: c @ 'a'..='z',
..

View File

@ -198,7 +198,7 @@ impl<'a, 'b> SelectorParser<'a, 'b> {
let mut components = vec![self.parse_simple_selector(None)?];
while let Some(Token { kind, .. }) = self.parser.toks.peek() {
if !is_simple_selector_start(*kind) {
if !is_simple_selector_start(kind) {
break;
}
@ -219,13 +219,13 @@ impl<'a, 'b> SelectorParser<'a, 'b> {
/// [the CSS algorithm]: https://drafts.csswg.org/css-syntax-3/#would-start-an-identifier
fn looking_at_identifier(&mut self) -> bool {
match self.parser.toks.peek() {
Some(Token { kind, .. }) if is_name_start(*kind) || kind == &'\\' => return true,
Some(Token { kind, .. }) if is_name_start(kind) || kind == '\\' => return true,
Some(Token { kind: '-', .. }) => {}
Some(..) | None => return false,
}
match self.parser.toks.peek_forward(1) {
Some(Token { kind, .. }) if is_name_start(*kind) || kind == &'-' || kind == &'\\' => {
Some(Token { kind, .. }) if is_name_start(kind) || kind == '-' || kind == '\\' => {
self.parser.toks.reset_cursor();
true
}
@ -391,7 +391,7 @@ impl<'a, 'b> SelectorParser<'a, 'b> {
match self.parser.toks.peek() {
Some(Token { kind: '*', pos }) => {
self.parser.span_before = self.parser.span_before.merge(*pos);
self.parser.span_before = self.parser.span_before.merge(pos);
self.parser.toks.next();
if let Some(Token { kind: '|', .. }) = self.parser.toks.peek() {
self.parser.toks.next();
@ -409,7 +409,7 @@ impl<'a, 'b> SelectorParser<'a, 'b> {
return Ok(SimpleSelector::Universal(Namespace::None));
}
Some(Token { kind: '|', pos }) => {
self.parser.span_before = self.parser.span_before.merge(*pos);
self.parser.span_before = self.parser.span_before.merge(pos);
self.parser.toks.next();
match self.parser.toks.peek() {
Some(Token { kind: '*', .. }) => {

View File

@ -1,8 +1,4 @@
use std::vec::IntoIter;
use peekmore::PeekMoreIterator;
use crate::Token;
use crate::{lexer::Lexer, Token};
use super::peek_until_newline;
@ -16,9 +12,7 @@ impl IsWhitespace for char {
}
}
pub(crate) fn devour_whitespace<I: Iterator<Item = W>, W: IsWhitespace>(
s: &mut PeekMoreIterator<I>,
) -> bool {
pub(crate) fn devour_whitespace(s: &mut Lexer) -> bool {
let mut found_whitespace = false;
while let Some(w) = s.peek() {
if !w.is_whitespace() {
@ -30,7 +24,7 @@ pub(crate) fn devour_whitespace<I: Iterator<Item = W>, W: IsWhitespace>(
found_whitespace
}
pub(crate) fn peek_whitespace(s: &mut PeekMoreIterator<IntoIter<Token>>) -> bool {
pub(crate) fn peek_whitespace(s: &mut Lexer) -> bool {
let mut found_whitespace = false;
while let Some(w) = s.peek() {
if !w.is_whitespace() {
@ -42,7 +36,7 @@ pub(crate) fn peek_whitespace(s: &mut PeekMoreIterator<IntoIter<Token>>) -> bool
found_whitespace
}
pub(crate) fn peek_whitespace_or_comment(s: &mut PeekMoreIterator<IntoIter<Token>>) -> bool {
pub(crate) fn peek_whitespace_or_comment(s: &mut Lexer) -> bool {
let mut found_whitespace = false;
while let Some(w) = s.peek() {
match w.kind {
@ -83,7 +77,7 @@ pub(crate) fn peek_whitespace_or_comment(s: &mut PeekMoreIterator<IntoIter<Token
/// We only have to check for \n as the lexing step normalizes all newline characters
///
/// The newline is consumed
pub(crate) fn read_until_newline<I: Iterator<Item = Token>>(toks: &mut PeekMoreIterator<I>) {
pub(crate) fn read_until_newline(toks: &mut Lexer) {
for tok in toks {
if tok.kind == '\n' {
return;

View File

@ -1,8 +1,4 @@
use std::vec::IntoIter;
use peekmore::PeekMoreIterator;
use crate::Token;
use crate::lexer::Lexer;
#[derive(Debug)]
pub(crate) struct ParsedNumber {
@ -46,7 +42,7 @@ impl ParsedNumber {
}
}
pub(crate) fn eat_whole_number(toks: &mut PeekMoreIterator<IntoIter<Token>>) -> String {
pub(crate) fn eat_whole_number(toks: &mut Lexer) -> String {
let mut buf = String::new();
while let Some(c) = toks.peek() {
if !c.kind.is_ascii_digit() {

View File

@ -1,19 +1,13 @@
use std::vec::IntoIter;
use codemap::{Span, Spanned};
use peekmore::PeekMoreIterator;
use crate::{error::SassResult, Token};
use crate::{error::SassResult, lexer::Lexer, Token};
use super::{as_hex, hex_char_for, is_name, is_name_start, peek_whitespace};
pub(crate) fn peek_until_closing_curly_brace(
toks: &mut PeekMoreIterator<IntoIter<Token>>,
) -> SassResult<Vec<Token>> {
pub(crate) fn peek_until_closing_curly_brace(toks: &mut Lexer) -> SassResult<Vec<Token>> {
let mut t = Vec::new();
let mut nesting = 0;
while let Some(tok) = toks.peek().copied() {
while let Some(tok) = toks.peek() {
match tok.kind {
q @ '"' | q @ '\'' => {
t.push(tok);
@ -35,7 +29,7 @@ pub(crate) fn peek_until_closing_curly_brace(
toks.advance_cursor();
}
'/' => {
let next = *toks
let next = toks
.peek_forward(1)
.ok_or(("Expected expression.", tok.pos))?;
match toks.peek() {
@ -54,12 +48,9 @@ pub(crate) fn peek_until_closing_curly_brace(
Ok(t)
}
fn peek_until_closing_quote(
toks: &mut PeekMoreIterator<IntoIter<Token>>,
q: char,
) -> SassResult<Vec<Token>> {
fn peek_until_closing_quote(toks: &mut Lexer, q: char) -> SassResult<Vec<Token>> {
let mut t = Vec::new();
while let Some(tok) = toks.peek().copied() {
while let Some(tok) = toks.peek() {
match tok.kind {
'"' if q == '"' => {
t.push(tok);
@ -74,7 +65,7 @@ fn peek_until_closing_quote(
'\\' => {
t.push(tok);
t.push(match toks.peek_forward(1) {
Some(tok) => *tok,
Some(tok) => tok,
None => return Err((format!("Expected {}.", q), tok.pos).into()),
});
}
@ -85,7 +76,7 @@ fn peek_until_closing_quote(
None => return Err((format!("Expected {}.", q), tok.pos).into()),
};
if next.kind == '{' {
t.push(*next);
t.push(next);
toks.peek_forward(1);
t.append(&mut peek_until_closing_curly_brace(toks)?);
}
@ -97,7 +88,7 @@ fn peek_until_closing_quote(
Ok(t)
}
pub(crate) fn peek_until_newline(toks: &mut PeekMoreIterator<IntoIter<Token>>) {
pub(crate) fn peek_until_newline(toks: &mut Lexer) {
while let Some(tok) = toks.peek() {
if tok.kind == '\n' {
break;
@ -106,10 +97,10 @@ pub(crate) fn peek_until_newline(toks: &mut PeekMoreIterator<IntoIter<Token>>) {
}
}
pub(crate) fn peek_escape(toks: &mut PeekMoreIterator<IntoIter<Token>>) -> SassResult<String> {
pub(crate) fn peek_escape(toks: &mut Lexer) -> SassResult<String> {
let mut value = 0;
let first = match toks.peek() {
Some(t) => *t,
Some(t) => t,
None => return Ok(String::new()),
};
let mut span = first.pos;
@ -155,7 +146,7 @@ pub(crate) fn peek_escape(toks: &mut PeekMoreIterator<IntoIter<Token>>) -> SassR
}
pub(crate) fn peek_ident_no_interpolation(
toks: &mut PeekMoreIterator<IntoIter<Token>>,
toks: &mut Lexer,
unit: bool,
span_before: Span,
) -> SassResult<Spanned<String>> {
@ -200,7 +191,7 @@ pub(crate) fn peek_ident_no_interpolation(
}
fn peek_ident_body_no_interpolation(
toks: &mut PeekMoreIterator<IntoIter<Token>>,
toks: &mut Lexer,
unit: bool,
mut span: Span,
) -> SassResult<Spanned<String>> {
@ -210,7 +201,7 @@ fn peek_ident_body_no_interpolation(
if unit && tok.kind == '-' {
// Disallow `-` followed by a dot or a digit digit in units.
let second = match toks.peek_forward(1) {
Some(v) => *v,
Some(v) => v,
None => break,
};

View File

@ -1,17 +1,11 @@
use std::vec::IntoIter;
use peekmore::PeekMoreIterator;
use crate::{error::SassResult, Token};
use crate::{error::SassResult, lexer::Lexer, Token};
use super::{devour_whitespace, read_until_newline};
// Eat tokens until an open curly brace
//
// Does not consume the open curly brace
pub(crate) fn read_until_open_curly_brace(
toks: &mut PeekMoreIterator<IntoIter<Token>>,
) -> SassResult<Vec<Token>> {
pub(crate) fn read_until_open_curly_brace(toks: &mut Lexer) -> SassResult<Vec<Token>> {
let mut t = Vec::new();
let mut n = 0;
while let Some(tok) = toks.peek() {
@ -49,9 +43,7 @@ pub(crate) fn read_until_open_curly_brace(
Ok(t)
}
pub(crate) fn read_until_closing_curly_brace(
toks: &mut PeekMoreIterator<IntoIter<Token>>,
) -> SassResult<Vec<Token>> {
pub(crate) fn read_until_closing_curly_brace(toks: &mut Lexer) -> SassResult<Vec<Token>> {
let mut buf = Vec::new();
let mut nesting = 0;
while let Some(tok) = toks.peek() {
@ -104,10 +96,7 @@ pub(crate) fn read_until_closing_curly_brace(
/// Read tokens into a vector until a matching closing quote is found
///
/// The closing quote is included in the output
pub(crate) fn read_until_closing_quote(
toks: &mut PeekMoreIterator<IntoIter<Token>>,
q: char,
) -> SassResult<Vec<Token>> {
pub(crate) fn read_until_closing_quote(toks: &mut Lexer, q: char) -> SassResult<Vec<Token>> {
let mut t = Vec::new();
while let Some(tok) = toks.next() {
match tok.kind {
@ -130,7 +119,7 @@ pub(crate) fn read_until_closing_quote(
t.push(tok);
match toks.peek() {
Some(tok @ Token { kind: '{', .. }) => {
t.push(*tok);
t.push(tok);
toks.next();
t.append(&mut read_until_closing_curly_brace(toks)?);
}
@ -151,7 +140,7 @@ pub(crate) fn read_until_closing_quote(
}
pub(crate) fn read_until_semicolon_or_closing_curly_brace(
toks: &mut PeekMoreIterator<IntoIter<Token>>,
toks: &mut Lexer,
) -> SassResult<Vec<Token>> {
let mut t = Vec::new();
let mut nesting = 0;
@ -202,9 +191,7 @@ pub(crate) fn read_until_semicolon_or_closing_curly_brace(
Ok(t)
}
pub(crate) fn read_until_closing_paren(
toks: &mut PeekMoreIterator<IntoIter<Token>>,
) -> SassResult<Vec<Token>> {
pub(crate) fn read_until_closing_paren(toks: &mut Lexer) -> SassResult<Vec<Token>> {
let mut t = Vec::new();
let mut scope = 0;
while let Some(tok) = toks.next() {

View File

@ -1,13 +1,12 @@
use std::cmp::Ordering;
use peekmore::PeekMore;
use codemap::{Span, Spanned};
use crate::{
color::Color,
common::{Brackets, ListSeparator, Op, QuoteKind},
error::SassResult,
lexer::Lexer,
parse::Parser,
selector::Selector,
unit::Unit,
@ -511,12 +510,12 @@ impl Value {
None => return Err((format!("${}: {} is not a valid selector: it must be a string, a list of strings, or a list of lists of strings.", name, self.inspect(parser.span_before)?), parser.span_before).into()),
};
Ok(Parser {
toks: &mut string
.chars()
.map(|c| Token::new(parser.span_before, c))
.collect::<Vec<Token>>()
.into_iter()
.peekmore(),
toks: &mut Lexer::new(
string
.chars()
.map(|c| Token::new(parser.span_before, c))
.collect::<Vec<Token>>(),
),
map: parser.map,
path: parser.path,
scopes: parser.scopes,