diff --git a/src/parse/mod.rs b/src/parse/mod.rs index 001183f..59f3cb3 100644 --- a/src/parse/mod.rs +++ b/src/parse/mod.rs @@ -459,7 +459,13 @@ impl<'a> Parser<'a> { found_whitespace } - fn read_until_newline(&mut self) { + /// Eat tokens until a newline + /// + /// This exists largely to eat silent comments, "//" + /// We only have to check for \n as the lexing step normalizes all newline characters + /// + /// The newline is consumed + pub fn read_until_newline(&mut self) { while let Some(tok) = self.toks.next() { if tok.kind == '\n' { break; diff --git a/src/parse/variable.rs b/src/parse/variable.rs index 40428f4..3de8565 100644 --- a/src/parse/variable.rs +++ b/src/parse/variable.rs @@ -3,10 +3,7 @@ use codemap::Spanned; use crate::{ common::Identifier, error::SassResult, - utils::{ - peek_ident_no_interpolation, read_until_closing_paren, read_until_closing_quote, - read_until_newline, - }, + utils::{peek_ident_no_interpolation, read_until_closing_paren, read_until_closing_quote}, value::Value, Token, }; @@ -124,7 +121,7 @@ impl<'a> Parser<'a> { '/' => { let next = self.toks.next().unwrap(); match self.toks.peek() { - Some(Token { kind: '/', .. }) => read_until_newline(self.toks), + Some(Token { kind: '/', .. }) => self.read_until_newline(), Some(..) | None => val_toks.push(next), }; continue;