resolve edge case when interpolated parent selector expands to be larger than source span _and_ it contains an escaped character
This commit is contained in:
parent
0ec8616e11
commit
3cb5e66fda
@ -2,12 +2,7 @@ use std::fmt::{self, Write};
|
|||||||
|
|
||||||
use codemap::Span;
|
use codemap::Span;
|
||||||
|
|
||||||
use crate::{
|
use crate::{ast::CssStmt, error::SassResult, lexer::Lexer, parse::MediaQueryParser};
|
||||||
ast::CssStmt,
|
|
||||||
error::SassResult,
|
|
||||||
lexer::{Lexer, TokenLexer},
|
|
||||||
parse::MediaQueryParser,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub(crate) struct MediaRule {
|
pub(crate) struct MediaRule {
|
||||||
@ -59,7 +54,7 @@ impl MediaQuery {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_list(list: &str, span: Span) -> SassResult<Vec<Self>> {
|
pub fn parse_list(list: &str, span: Span) -> SassResult<Vec<Self>> {
|
||||||
let toks = Lexer::new(TokenLexer::new(list.chars().peekable()).collect(), span);
|
let toks = Lexer::new_from_string(list, span);
|
||||||
|
|
||||||
MediaQueryParser::new(toks).parse()
|
MediaQueryParser::new(toks).parse()
|
||||||
}
|
}
|
||||||
|
@ -25,7 +25,7 @@ use crate::{
|
|||||||
common::{unvendor, BinaryOp, Identifier, ListSeparator, QuoteKind, UnaryOp},
|
common::{unvendor, BinaryOp, Identifier, ListSeparator, QuoteKind, UnaryOp},
|
||||||
error::{SassError, SassResult},
|
error::{SassError, SassResult},
|
||||||
interner::InternedString,
|
interner::InternedString,
|
||||||
lexer::{Lexer, TokenLexer},
|
lexer::Lexer,
|
||||||
parse::{
|
parse::{
|
||||||
AtRootQueryParser, CssParser, KeyframesSelectorParser, SassParser, ScssParser,
|
AtRootQueryParser, CssParser, KeyframesSelectorParser, SassParser, ScssParser,
|
||||||
StylesheetParser,
|
StylesheetParser,
|
||||||
@ -977,8 +977,7 @@ impl<'a> Visitor<'a> {
|
|||||||
|
|
||||||
let span = query.span;
|
let span = query.span;
|
||||||
|
|
||||||
let query_toks =
|
let query_toks = Lexer::new_from_string(&resolved, span);
|
||||||
Lexer::new(TokenLexer::new(resolved.chars().peekable()).collect(), span);
|
|
||||||
|
|
||||||
AtRootQueryParser::new(query_toks).parse()?
|
AtRootQueryParser::new(query_toks).parse()?
|
||||||
}
|
}
|
||||||
@ -1138,10 +1137,7 @@ impl<'a> Visitor<'a> {
|
|||||||
allows_placeholder: bool,
|
allows_placeholder: bool,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> SassResult<SelectorList> {
|
) -> SassResult<SelectorList> {
|
||||||
let sel_toks = Lexer::new(
|
let sel_toks = Lexer::new_from_string(&selector_text, span);
|
||||||
TokenLexer::new(selector_text.chars().peekable()).collect(),
|
|
||||||
span,
|
|
||||||
);
|
|
||||||
|
|
||||||
SelectorParser::new(sel_toks, allows_parent, allows_placeholder, span).parse()
|
SelectorParser::new(sel_toks, allows_parent, allows_placeholder, span).parse()
|
||||||
}
|
}
|
||||||
@ -2783,10 +2779,7 @@ impl<'a> Visitor<'a> {
|
|||||||
|
|
||||||
if self.flags.in_keyframes() {
|
if self.flags.in_keyframes() {
|
||||||
let span = ruleset.selector_span;
|
let span = ruleset.selector_span;
|
||||||
let sel_toks = Lexer::new(
|
let sel_toks = Lexer::new_from_string(&selector_text, span);
|
||||||
TokenLexer::new(selector_text.chars().peekable()).collect(),
|
|
||||||
span,
|
|
||||||
);
|
|
||||||
let parsed_selector =
|
let parsed_selector =
|
||||||
KeyframesSelectorParser::new(sel_toks).parse_keyframes_selector()?;
|
KeyframesSelectorParser::new(sel_toks).parse_keyframes_selector()?;
|
||||||
|
|
||||||
|
@ -16,6 +16,9 @@ pub(crate) struct Lexer<'a> {
|
|||||||
buf: Cow<'a, [Token]>,
|
buf: Cow<'a, [Token]>,
|
||||||
entire_span: Span,
|
entire_span: Span,
|
||||||
cursor: usize,
|
cursor: usize,
|
||||||
|
/// If the input this lexer is spanned over is larger than the original span.
|
||||||
|
/// This is possible due to interpolation.
|
||||||
|
is_expanded: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Lexer<'a> {
|
impl<'a> Lexer<'a> {
|
||||||
@ -34,6 +37,10 @@ impl<'a> Lexer<'a> {
|
|||||||
/// bounds, it returns the span of the last character. If the input is empty,
|
/// bounds, it returns the span of the last character. If the input is empty,
|
||||||
/// it returns an empty span
|
/// it returns an empty span
|
||||||
fn span_at_index(&self, idx: usize) -> Span {
|
fn span_at_index(&self, idx: usize) -> Span {
|
||||||
|
if self.is_expanded {
|
||||||
|
return self.entire_span;
|
||||||
|
}
|
||||||
|
|
||||||
let (start, len) = match self.buf.get(idx) {
|
let (start, len) = match self.buf.get(idx) {
|
||||||
Some(tok) => (tok.pos, tok.kind.len_utf8()),
|
Some(tok) => (tok.pos, tok.kind.len_utf8()),
|
||||||
None => match self.buf.last() {
|
None => match self.buf.last() {
|
||||||
@ -146,14 +153,22 @@ impl<'a> Iterator for TokenLexer<'a> {
|
|||||||
impl<'a> Lexer<'a> {
|
impl<'a> Lexer<'a> {
|
||||||
pub fn new_from_file(file: &Arc<File>) -> Self {
|
pub fn new_from_file(file: &Arc<File>) -> Self {
|
||||||
let buf = TokenLexer::new(file.source().chars().peekable()).collect();
|
let buf = TokenLexer::new(file.source().chars().peekable()).collect();
|
||||||
Self::new(buf, file.span)
|
Self::new(buf, file.span, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new(buf: Vec<Token>, entire_span: Span) -> Self {
|
pub fn new_from_string(s: &str, entire_span: Span) -> Self {
|
||||||
|
let is_expanded = s.len() as u64 > entire_span.len();
|
||||||
|
let buf = TokenLexer::new(s.chars().peekable()).collect();
|
||||||
|
|
||||||
|
Self::new(buf, entire_span, is_expanded)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn new(buf: Vec<Token>, entire_span: Span, is_expanded: bool) -> Self {
|
||||||
Lexer {
|
Lexer {
|
||||||
buf: Cow::Owned(buf),
|
buf: Cow::Owned(buf),
|
||||||
cursor: 0,
|
cursor: 0,
|
||||||
entire_span,
|
entire_span,
|
||||||
|
is_expanded,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -247,7 +247,7 @@ pub(crate) trait BaseParser<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let c = std::char::from_u32(value)
|
let c = std::char::from_u32(value)
|
||||||
.ok_or(("Invalid Unicode code point.", self.toks().span_from(start)))?;
|
.ok_or_else(|| ("Invalid Unicode code point.", self.toks().span_from(start)))?;
|
||||||
if (identifier_start && is_name_start(c) && !c.is_ascii_digit())
|
if (identifier_start && is_name_start(c) && !c.is_ascii_digit())
|
||||||
|| (!identifier_start && is_name(c))
|
|| (!identifier_start && is_name(c))
|
||||||
{
|
{
|
||||||
|
@ -13,7 +13,7 @@ use crate::{
|
|||||||
ast::*,
|
ast::*,
|
||||||
common::{unvendor, Identifier, QuoteKind},
|
common::{unvendor, Identifier, QuoteKind},
|
||||||
error::SassResult,
|
error::SassResult,
|
||||||
lexer::{Lexer, TokenLexer},
|
lexer::Lexer,
|
||||||
utils::{is_name, is_name_start, is_plain_css_import, opposite_bracket},
|
utils::{is_name, is_name_start, is_plain_css_import, opposite_bracket},
|
||||||
ContextFlags, Options, Token,
|
ContextFlags, Options, Token,
|
||||||
};
|
};
|
||||||
@ -1534,10 +1534,7 @@ pub(crate) trait StylesheetParser<'a>: BaseParser<'a> + Sized {
|
|||||||
&base_name[start..end]
|
&base_name[start..end]
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut toks = Lexer::new(
|
let mut toks = Lexer::new_from_string(&namespace, url_span);
|
||||||
TokenLexer::new(namespace.chars().peekable()).collect(),
|
|
||||||
url_span,
|
|
||||||
);
|
|
||||||
|
|
||||||
// if namespace is empty, avoid attempting to parse an identifier from
|
// if namespace is empty, avoid attempting to parse an identifier from
|
||||||
// an empty string, as there will be no span to emit
|
// an empty string, as there will be no span to emit
|
||||||
|
@ -45,7 +45,7 @@ fn attribute_name(parser: &mut SelectorParser) -> SassResult<QualifiedName> {
|
|||||||
let next = parser
|
let next = parser
|
||||||
.toks
|
.toks
|
||||||
.peek()
|
.peek()
|
||||||
.ok_or(("Expected identifier.", parser.toks.current_span()))?;
|
.ok_or_else(|| ("Expected identifier.", parser.toks.current_span()))?;
|
||||||
if next.kind == '*' {
|
if next.kind == '*' {
|
||||||
parser.toks.next();
|
parser.toks.next();
|
||||||
parser.expect_char('|')?;
|
parser.expect_char('|')?;
|
||||||
@ -110,7 +110,7 @@ impl Attribute {
|
|||||||
if parser
|
if parser
|
||||||
.toks
|
.toks
|
||||||
.peek()
|
.peek()
|
||||||
.ok_or(("expected more input.", parser.toks.current_span()))?
|
.ok_or_else(|| ("expected more input.", parser.toks.current_span()))?
|
||||||
.kind
|
.kind
|
||||||
== ']'
|
== ']'
|
||||||
{
|
{
|
||||||
@ -130,7 +130,7 @@ impl Attribute {
|
|||||||
let peek = parser
|
let peek = parser
|
||||||
.toks
|
.toks
|
||||||
.peek()
|
.peek()
|
||||||
.ok_or(("expected more input.", parser.toks.current_span()))?;
|
.ok_or_else(|| ("expected more input.", parser.toks.current_span()))?;
|
||||||
|
|
||||||
let value = match peek.kind {
|
let value = match peek.kind {
|
||||||
'\'' | '"' => parser.parse_string()?,
|
'\'' | '"' => parser.parse_string()?,
|
||||||
|
@ -913,6 +913,33 @@ test!(
|
|||||||
}"#,
|
}"#,
|
||||||
"::foo(/a/b/) {\n color: ::foo(/a/b/);\n}\n"
|
"::foo(/a/b/) {\n color: ::foo(/a/b/);\n}\n"
|
||||||
);
|
);
|
||||||
|
test!(
|
||||||
|
interpolated_parent_selector_as_child_to_selector_with_escape_and_length_greater_than_child,
|
||||||
|
r#"abcde \a {
|
||||||
|
#{&} {
|
||||||
|
color: red;
|
||||||
|
}
|
||||||
|
}"#,
|
||||||
|
"abcde \\a abcde \\a {\n color: red;\n}\n"
|
||||||
|
);
|
||||||
|
error!(
|
||||||
|
interpolated_parent_selector_as_child_to_selector_with_escape_and_invalid_escape_and_length_greater_than_child,
|
||||||
|
r#"abcde \a {
|
||||||
|
#{&} \1111111 {
|
||||||
|
color: red;
|
||||||
|
}
|
||||||
|
}"#,
|
||||||
|
"Error: Invalid Unicode code point."
|
||||||
|
);
|
||||||
|
test!(
|
||||||
|
interpolated_parent_selector_as_child_to_selector_with_attribute_selector_and_length_greater_than_child,
|
||||||
|
r#"abcde [a] {
|
||||||
|
#{&} {
|
||||||
|
color: red;
|
||||||
|
}
|
||||||
|
}"#,
|
||||||
|
"abcde [a] abcde [a] {\n color: red;\n}\n"
|
||||||
|
);
|
||||||
error!(
|
error!(
|
||||||
pseudo_element_interpolated_semicolon_no_brackets,
|
pseudo_element_interpolated_semicolon_no_brackets,
|
||||||
r#"::foo(#{";"}) {
|
r#"::foo(#{";"}) {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user