remove add_token method from interpolation

This commit is contained in:
Connor Skees 2022-12-28 10:37:22 -05:00
parent d63f0a54d2
commit 00845ad518
3 changed files with 31 additions and 40 deletions

View File

@ -44,15 +44,6 @@ impl Interpolation {
} }
} }
pub fn add_token(&mut self, tok: Token) {
match self.contents.last_mut() {
Some(InterpolationPart::String(existing)) => existing.push(tok.kind),
_ => self
.contents
.push(InterpolationPart::String(tok.kind.to_string())),
}
}
pub fn add_char(&mut self, c: char) { pub fn add_char(&mut self, c: char) {
match self.contents.last_mut() { match self.contents.last_mut() {
Some(InterpolationPart::String(existing)) => existing.push(c), Some(InterpolationPart::String(existing)) => existing.push(c),

View File

@ -1025,11 +1025,11 @@ pub(crate) trait StylesheetParser<'a>: BaseParser<'a> + Sized {
buffer.add_interpolation(self.parse_single_interpolation()?); buffer.add_interpolation(self.parse_single_interpolation()?);
} else { } else {
self.toks_mut().next(); self.toks_mut().next();
buffer.add_token(next); buffer.add_char(next.kind);
} }
} }
_ => { _ => {
buffer.add_token(next); buffer.add_char(next.kind);
self.toks_mut().next(); self.toks_mut().next();
} }
} }
@ -1791,7 +1791,7 @@ pub(crate) trait StylesheetParser<'a>: BaseParser<'a> + Sized {
// Allow the "*prop: val", ":prop: val", "#prop: val", and ".prop: val" // Allow the "*prop: val", ":prop: val", "#prop: val", and ".prop: val"
// hacks. // hacks.
let mut name_buffer = Interpolation::new(); let mut name_buffer = Interpolation::new();
name_buffer.add_token(self.toks_mut().next().unwrap()); name_buffer.add_char(self.toks_mut().next().unwrap().kind);
name_buffer.add_string(self.raw_text(Self::whitespace)); name_buffer.add_string(self.raw_text(Self::whitespace));
name_buffer.add_interpolation(self.parse_interpolated_identifier()?); name_buffer.add_interpolation(self.parse_interpolated_identifier()?);
name_buffer name_buffer
@ -1913,7 +1913,7 @@ pub(crate) trait StylesheetParser<'a>: BaseParser<'a> + Sized {
while let Some(next) = self.toks().peek() { while let Some(next) = self.toks().peek() {
match next.kind { match next.kind {
'a'..='z' | 'A'..='Z' | '0'..='9' | '_' | '-' | '\u{80}'..=std::char::MAX => { 'a'..='z' | 'A'..='Z' | '0'..='9' | '_' | '-' | '\u{80}'..=std::char::MAX => {
buffer.add_token(next); buffer.add_char(next.kind);
self.toks_mut().next(); self.toks_mut().next();
} }
'\\' => { '\\' => {
@ -1944,7 +1944,7 @@ pub(crate) trait StylesheetParser<'a>: BaseParser<'a> + Sized {
match self.toks().peek() { match self.toks().peek() {
Some(tok) if is_name_start(tok.kind) => { Some(tok) if is_name_start(tok.kind) => {
buffer.add_token(tok); buffer.add_char(tok.kind);
self.toks_mut().next(); self.toks_mut().next();
} }
Some(Token { kind: '\\', .. }) => { Some(Token { kind: '\\', .. }) => {
@ -2006,12 +2006,12 @@ pub(crate) trait StylesheetParser<'a>: BaseParser<'a> + Sized {
buffer.add_interpolation(self.parse_single_interpolation()?); buffer.add_interpolation(self.parse_single_interpolation()?);
} else { } else {
self.toks_mut().next(); self.toks_mut().next();
buffer.add_token(tok); buffer.add_char(tok.kind);
} }
} }
'*' => { '*' => {
self.toks_mut().next(); self.toks_mut().next();
buffer.add_token(tok); buffer.add_char(tok.kind);
if self.scan_char('/') { if self.scan_char('/') {
buffer.add_char('/'); buffer.add_char('/');
@ -2030,7 +2030,7 @@ pub(crate) trait StylesheetParser<'a>: BaseParser<'a> + Sized {
} }
} }
_ => { _ => {
buffer.add_token(tok); buffer.add_char(tok.kind);
self.toks_mut().next(); self.toks_mut().next();
} }
} }
@ -2073,7 +2073,7 @@ pub(crate) trait StylesheetParser<'a>: BaseParser<'a> + Sized {
buffer.add_string(comment); buffer.add_string(comment);
} else { } else {
self.toks_mut().next(); self.toks_mut().next();
buffer.add_token(tok); buffer.add_char(tok.kind);
} }
wrote_newline = false; wrote_newline = false;
@ -2085,7 +2085,7 @@ pub(crate) trait StylesheetParser<'a>: BaseParser<'a> + Sized {
buffer.add_interpolation(self.parse_interpolated_identifier()?); buffer.add_interpolation(self.parse_interpolated_identifier()?);
} else { } else {
self.toks_mut().next(); self.toks_mut().next();
buffer.add_token(tok); buffer.add_char(tok.kind);
} }
wrote_newline = false; wrote_newline = false;
@ -2101,7 +2101,7 @@ pub(crate) trait StylesheetParser<'a>: BaseParser<'a> + Sized {
) )
{ {
self.toks_mut().next(); self.toks_mut().next();
buffer.add_token(tok); buffer.add_char(tok.kind);
} else { } else {
self.toks_mut().next(); self.toks_mut().next();
} }
@ -2124,7 +2124,7 @@ pub(crate) trait StylesheetParser<'a>: BaseParser<'a> + Sized {
} }
'(' | '{' | '[' => { '(' | '{' | '[' => {
self.toks_mut().next(); self.toks_mut().next();
buffer.add_token(tok); buffer.add_char(tok.kind);
brackets.push(opposite_bracket(tok.kind)); brackets.push(opposite_bracket(tok.kind));
wrote_newline = false; wrote_newline = false;
} }
@ -2132,7 +2132,7 @@ pub(crate) trait StylesheetParser<'a>: BaseParser<'a> + Sized {
if brackets.is_empty() { if brackets.is_empty() {
break; break;
} }
buffer.add_token(tok); buffer.add_char(tok.kind);
self.expect_char(brackets.pop().unwrap())?; self.expect_char(brackets.pop().unwrap())?;
wrote_newline = false; wrote_newline = false;
} }
@ -2140,7 +2140,7 @@ pub(crate) trait StylesheetParser<'a>: BaseParser<'a> + Sized {
if !allow_semicolon && brackets.is_empty() { if !allow_semicolon && brackets.is_empty() {
break; break;
} }
buffer.add_token(tok); buffer.add_char(tok.kind);
self.toks_mut().next(); self.toks_mut().next();
wrote_newline = false; wrote_newline = false;
} }
@ -2148,7 +2148,7 @@ pub(crate) trait StylesheetParser<'a>: BaseParser<'a> + Sized {
if !allow_colon && brackets.is_empty() { if !allow_colon && brackets.is_empty() {
break; break;
} }
buffer.add_token(tok); buffer.add_char(tok.kind);
self.toks_mut().next(); self.toks_mut().next();
wrote_newline = false; wrote_newline = false;
} }
@ -2156,7 +2156,7 @@ pub(crate) trait StylesheetParser<'a>: BaseParser<'a> + Sized {
let before_url = self.toks().cursor(); let before_url = self.toks().cursor();
if !self.scan_identifier("url", false)? { if !self.scan_identifier("url", false)? {
buffer.add_token(tok); buffer.add_char(tok.kind);
self.toks_mut().next(); self.toks_mut().next();
wrote_newline = false; wrote_newline = false;
continue; continue;
@ -2168,7 +2168,7 @@ pub(crate) trait StylesheetParser<'a>: BaseParser<'a> + Sized {
} }
None => { None => {
self.toks_mut().set_cursor(before_url); self.toks_mut().set_cursor(before_url);
buffer.add_token(tok); buffer.add_char(tok.kind);
self.toks_mut().next(); self.toks_mut().next();
} }
} }
@ -2179,7 +2179,7 @@ pub(crate) trait StylesheetParser<'a>: BaseParser<'a> + Sized {
if self.looking_at_identifier() { if self.looking_at_identifier() {
buffer.add_string(self.parse_identifier(false, false)?); buffer.add_string(self.parse_identifier(false, false)?);
} else { } else {
buffer.add_token(tok); buffer.add_char(tok.kind);
self.toks_mut().next(); self.toks_mut().next();
} }
wrote_newline = false; wrote_newline = false;
@ -2336,7 +2336,7 @@ pub(crate) trait StylesheetParser<'a>: BaseParser<'a> + Sized {
&& !matches!(self.toks().peek_n(1), Some(Token { kind: '{', .. }))) && !matches!(self.toks().peek_n(1), Some(Token { kind: '{', .. })))
{ {
starts_with_punctuation = true; starts_with_punctuation = true;
name_buffer.add_token(self.toks_mut().next().unwrap()); name_buffer.add_char(self.toks_mut().next().unwrap().kind);
name_buffer.add_string(self.raw_text(Self::whitespace)); name_buffer.add_string(self.raw_text(Self::whitespace));
} }
@ -2720,10 +2720,10 @@ pub(crate) trait StylesheetParser<'a>: BaseParser<'a> + Sized {
match tok.kind { match tok.kind {
'\\' => { '\\' => {
// Write a literal backslash because this text will be re-parsed. // Write a literal backslash because this text will be re-parsed.
buffer.add_token(tok); buffer.add_char(tok.kind);
self.toks_mut().next(); self.toks_mut().next();
match self.toks_mut().next() { match self.toks_mut().next() {
Some(tok) => buffer.add_token(tok), Some(tok) => buffer.add_char(tok.kind),
None => { None => {
return Err(("expected more input.", self.toks().current_span()).into()) return Err(("expected more input.", self.toks().current_span()).into())
} }
@ -2743,7 +2743,7 @@ pub(crate) trait StylesheetParser<'a>: BaseParser<'a> + Sized {
buffer.add_string(self.toks().raw_text(comment_start)); buffer.add_string(self.toks().raw_text(comment_start));
} }
} else { } else {
buffer.add_token(self.toks_mut().next().unwrap()); buffer.add_char(self.toks_mut().next().unwrap().kind);
} }
} }
'#' => { '#' => {
@ -2753,21 +2753,21 @@ pub(crate) trait StylesheetParser<'a>: BaseParser<'a> + Sized {
buffer.add_interpolation(self.parse_interpolated_identifier()?); buffer.add_interpolation(self.parse_interpolated_identifier()?);
} else { } else {
self.toks_mut().next(); self.toks_mut().next();
buffer.add_token(tok); buffer.add_char(tok.kind);
} }
} }
'\r' | '\n' => { '\r' | '\n' => {
if self.is_indented() { if self.is_indented() {
break; break;
} }
buffer.add_token(self.toks_mut().next().unwrap()); buffer.add_char(self.toks_mut().next().unwrap().kind);
} }
'!' | ';' | '{' | '}' => break, '!' | ';' | '{' | '}' => break,
'u' | 'U' => { 'u' | 'U' => {
let before_url = self.toks().cursor(); let before_url = self.toks().cursor();
if !self.scan_identifier("url", false)? { if !self.scan_identifier("url", false)? {
self.toks_mut().next(); self.toks_mut().next();
buffer.add_token(tok); buffer.add_char(tok.kind);
continue; continue;
} }
@ -2776,7 +2776,7 @@ pub(crate) trait StylesheetParser<'a>: BaseParser<'a> + Sized {
None => { None => {
self.toks_mut().set_cursor(before_url); self.toks_mut().set_cursor(before_url);
self.toks_mut().next(); self.toks_mut().next();
buffer.add_token(tok); buffer.add_char(tok.kind);
} }
} }
} }
@ -2784,7 +2784,7 @@ pub(crate) trait StylesheetParser<'a>: BaseParser<'a> + Sized {
if self.looking_at_identifier() { if self.looking_at_identifier() {
buffer.add_string(self.parse_identifier(false, false)?); buffer.add_string(self.parse_identifier(false, false)?);
} else { } else {
buffer.add_token(self.toks_mut().next().unwrap()); buffer.add_char(self.toks_mut().next().unwrap().kind);
} }
} }
} }
@ -2914,7 +2914,7 @@ pub(crate) trait StylesheetParser<'a>: BaseParser<'a> + Sized {
) { ) {
let next = next.unwrap().kind; let next = next.unwrap().kind;
buf.add_char(' '); buf.add_char(' ');
buf.add_token(self.toks_mut().next().unwrap()); buf.add_char(self.toks_mut().next().unwrap().kind);
if (next == '<' || next == '>') && self.scan_char('=') { if (next == '<' || next == '>') && self.scan_char('=') {
buf.add_char('='); buf.add_char('=');

View File

@ -1393,19 +1393,19 @@ impl<'a, 'c, P: StylesheetParser<'a>> ValueParser<'a, 'c, P> {
} }
'!' | '%' | '&' | '*'..='~' | '\u{80}'..=char::MAX => { '!' | '%' | '&' | '*'..='~' | '\u{80}'..=char::MAX => {
parser.toks_mut().next(); parser.toks_mut().next();
buffer.add_token(next); buffer.add_char(next.kind);
} }
'#' => { '#' => {
if matches!(parser.toks().peek_n(1), Some(Token { kind: '{', .. })) { if matches!(parser.toks().peek_n(1), Some(Token { kind: '{', .. })) {
buffer.add_interpolation(parser.parse_single_interpolation()?); buffer.add_interpolation(parser.parse_single_interpolation()?);
} else { } else {
parser.toks_mut().next(); parser.toks_mut().next();
buffer.add_token(next); buffer.add_char(next.kind);
} }
} }
')' => { ')' => {
parser.toks_mut().next(); parser.toks_mut().next();
buffer.add_token(next); buffer.add_char(next.kind);
return Ok(Some(buffer)); return Ok(Some(buffer));
} }