use crate::common::{Keyword, QuoteKind, Symbol}; use crate::error::SassResult; use crate::lexer::Lexer; use crate::value::Value; use crate::{Scope, Token, TokenKind}; use std::iter::{Iterator, Peekable}; pub(crate) trait IsWhitespace { fn is_whitespace(&self) -> bool; } pub(crate) fn devour_whitespace, W: IsWhitespace>( s: &mut Peekable, ) -> bool { let mut found_whitespace = false; while let Some(w) = s.peek() { if !w.is_whitespace() { break; } found_whitespace = true; s.next(); } found_whitespace } pub(crate) trait IsComment { fn is_comment(&self) -> bool; } pub(crate) fn devour_whitespace_or_comment, W: IsWhitespace + IsComment>( s: &mut Peekable, ) -> bool { let mut found_whitespace = false; while let Some(w) = s.peek() { if !w.is_whitespace() && !w.is_comment() { break; } found_whitespace = true; s.next(); } found_whitespace } pub(crate) fn parse_interpolation>( tokens: &mut I, scope: &Scope, ) -> SassResult> { let mut val = Vec::new(); while let Some(tok) = tokens.next() { match tok.kind { TokenKind::Symbol(Symbol::CloseCurlyBrace) => break, TokenKind::Symbol(Symbol::OpenCurlyBrace) => { todo!("invalid character in interpolation") } TokenKind::Variable(ref v) => { val.extend(Lexer::new(&scope.get_var(v)?.to_string()).collect::>()) } TokenKind::Interpolation => val.extend(parse_interpolation(tokens, scope)?), _ => val.push(tok), } } Ok(Lexer::new( &Value::from_tokens(&mut val.into_iter().peekable(), scope) .unwrap() .to_string() .replace("\"", "") .replace("'", ""), ) .collect::>()) } pub(crate) struct VariableDecl { pub val: Value, pub default: bool, } impl VariableDecl { pub const fn new(val: Value, default: bool) -> VariableDecl { VariableDecl { val, default } } } pub(crate) fn eat_variable_value>( toks: &mut Peekable, scope: &Scope, ) -> SassResult { devour_whitespace(toks); let mut default = false; let mut raw: Vec = Vec::new(); let mut nesting = 0; while let Some(tok) = toks.peek() { match tok.kind { TokenKind::Symbol(Symbol::SemiColon) => { toks.next(); break; } TokenKind::Keyword(Keyword::Default) => { toks.next(); default = true } TokenKind::Interpolation | TokenKind::Symbol(Symbol::OpenCurlyBrace) => { nesting += 1; raw.push(toks.next().unwrap()); } TokenKind::Symbol(Symbol::CloseCurlyBrace) => { if nesting == 0 { break; } else { nesting -= 1; raw.push(toks.next().unwrap()); } } _ => raw.push(toks.next().unwrap()), } } devour_whitespace(toks); let val = Value::from_tokens(&mut raw.into_iter().peekable(), scope).unwrap(); Ok(VariableDecl::new(val, default)) } pub(crate) fn flatten_ident>( toks: &mut Peekable, scope: &Scope, ) -> SassResult { let mut s = String::new(); while let Some(tok) = toks.peek() { match tok.kind.clone() { TokenKind::Interpolation => { toks.next(); s.push_str( &parse_interpolation(toks, scope)? .iter() .map(|x| x.kind.to_string()) .collect::(), ) } TokenKind::Ident(ref i) => { toks.next(); s.push_str(i) } TokenKind::Number(ref n) => { toks.next(); s.push_str(n) } _ => break, } } Ok(s) } pub(crate) fn parse_quoted_string>( toks: &mut Peekable, scope: &Scope, q: TokenKind, ) -> SassResult { let mut s = String::new(); let mut is_escaped = false; while let Some(tok) = toks.next() { match tok.kind { TokenKind::Symbol(Symbol::DoubleQuote) if !is_escaped && q == TokenKind::Symbol(Symbol::DoubleQuote) => { break } TokenKind::Symbol(Symbol::SingleQuote) if !is_escaped && q == TokenKind::Symbol(Symbol::SingleQuote) => { break } TokenKind::Symbol(Symbol::BackSlash) if !is_escaped => is_escaped = true, TokenKind::Symbol(Symbol::BackSlash) => s.push('\\'), TokenKind::Interpolation => { s.push_str( &parse_interpolation(toks, scope)? .iter() .map(|x| x.kind.to_string()) .collect::(), ); continue; } _ => {} } if is_escaped && tok.kind != TokenKind::Symbol(Symbol::BackSlash) { is_escaped = false; } s.push_str(&tok.kind.to_string()); } let quotes = match q { TokenKind::Symbol(Symbol::DoubleQuote) => QuoteKind::Double, TokenKind::Symbol(Symbol::SingleQuote) => QuoteKind::Single, _ => unreachable!(), }; Ok(format!("{}{}{}", quotes, s, quotes)) }