upgrade dependencies
This commit is contained in:
parent
1a5301d0fa
commit
47c4a421ac
20
Cargo.toml
20
Cargo.toml
@ -20,7 +20,7 @@ required-features = ["commandline"]
|
|||||||
[lib]
|
[lib]
|
||||||
name = "grass"
|
name = "grass"
|
||||||
path = "src/lib.rs"
|
path = "src/lib.rs"
|
||||||
crate-type = ["cdylib", "rlib"]
|
# crate-type = ["cdylib", "rlib"]
|
||||||
bench = false
|
bench = false
|
||||||
|
|
||||||
[[bench]]
|
[[bench]]
|
||||||
@ -45,15 +45,15 @@ harness = false
|
|||||||
|
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
clap = { version = "2.33.0", optional = true }
|
clap = { version = "2.33.1", optional = true }
|
||||||
num-rational = "0.2.3"
|
num-rational = "0.3.0"
|
||||||
num-bigint = "0.2.6"
|
num-bigint = "0.3.0"
|
||||||
num-traits = "0.2.11"
|
num-traits = "0.2.12"
|
||||||
once_cell = "1.3.1"
|
once_cell = "1.4.0"
|
||||||
rand = { version = "0.7.3", optional = true }
|
rand = { version = "0.7.3", optional = true }
|
||||||
codemap = "0.1.3"
|
codemap = "0.1.3"
|
||||||
peekmore = "0.4.0"
|
peekmore = "0.5.1"
|
||||||
wasm-bindgen = { version = "0.2.60", optional = true }
|
wasm-bindgen = { version = "0.2.63", optional = true }
|
||||||
beef = "0.4.4"
|
beef = "0.4.4"
|
||||||
# criterion is not a dev-dependency because it makes tests take too
|
# criterion is not a dev-dependency because it makes tests take too
|
||||||
# long to compile, and you cannot make dev-dependencies optional
|
# long to compile, and you cannot make dev-dependencies optional
|
||||||
@ -76,8 +76,8 @@ profiling = []
|
|||||||
bench = ["criterion"]
|
bench = ["criterion"]
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
tempfile = "3"
|
tempfile = "3.1.0"
|
||||||
paste = "0.1"
|
paste = "0.1.17"
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
debug = true
|
debug = true
|
||||||
|
@ -66,7 +66,7 @@ impl<'a> Parser<'a> {
|
|||||||
let interpolation = self.parse_interpolation()?;
|
let interpolation = self.parse_interpolation()?;
|
||||||
buf.push_str(&interpolation.node.to_css_string(interpolation.span)?);
|
buf.push_str(&interpolation.node.to_css_string(interpolation.span)?);
|
||||||
} else {
|
} else {
|
||||||
self.toks.reset_view();
|
self.toks.reset_cursor();
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -479,7 +479,7 @@ impl<'a> Parser<'a> {
|
|||||||
self.read_until_newline();
|
self.read_until_newline();
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
self.toks.reset_view();
|
self.toks.reset_cursor();
|
||||||
return found_whitespace;
|
return found_whitespace;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -520,7 +520,7 @@ impl<'a> Parser<'a> {
|
|||||||
self.toks.peek_forward(1);
|
self.toks.peek_forward(1);
|
||||||
let ident = peek_ident_no_interpolation(self.toks, false, pos)?;
|
let ident = peek_ident_no_interpolation(self.toks, false, pos)?;
|
||||||
if ident.as_str() != "else" {
|
if ident.as_str() != "else" {
|
||||||
self.toks.reset_view();
|
self.toks.reset_cursor();
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
self.toks.take(4).for_each(drop);
|
self.toks.take(4).for_each(drop);
|
||||||
|
@ -49,11 +49,11 @@ impl<'a> Parser<'a> {
|
|||||||
while let Some(tok) = self.toks.peek() {
|
while let Some(tok) = self.toks.peek() {
|
||||||
match tok.kind {
|
match tok.kind {
|
||||||
';' | '}' => {
|
';' | '}' => {
|
||||||
self.toks.reset_view();
|
self.toks.reset_cursor();
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
'{' => {
|
'{' => {
|
||||||
self.toks.reset_view();
|
self.toks.reset_cursor();
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
'(' => {
|
'(' => {
|
||||||
@ -98,10 +98,10 @@ impl<'a> Parser<'a> {
|
|||||||
if let Some(first_char) = self.toks.peek() {
|
if let Some(first_char) = self.toks.peek() {
|
||||||
if first_char.kind == '#' {
|
if first_char.kind == '#' {
|
||||||
if !matches!(self.toks.peek_forward(1), Some(Token { kind: '{', .. })) {
|
if !matches!(self.toks.peek_forward(1), Some(Token { kind: '{', .. })) {
|
||||||
self.toks.reset_view();
|
self.toks.reset_cursor();
|
||||||
return Ok(SelectorOrStyle::Selector(String::new()));
|
return Ok(SelectorOrStyle::Selector(String::new()));
|
||||||
}
|
}
|
||||||
self.toks.reset_view();
|
self.toks.reset_cursor();
|
||||||
} else if !is_name_start(first_char.kind) && first_char.kind != '-' {
|
} else if !is_name_start(first_char.kind) && first_char.kind != '-' {
|
||||||
return Ok(SelectorOrStyle::Selector(String::new()));
|
return Ok(SelectorOrStyle::Selector(String::new()));
|
||||||
}
|
}
|
||||||
|
@ -382,10 +382,10 @@ impl<'a> Parser<'a> {
|
|||||||
'#' => {
|
'#' => {
|
||||||
if let Some(Token { kind: '{', pos }) = self.toks.peek_forward(1) {
|
if let Some(Token { kind: '{', pos }) = self.toks.peek_forward(1) {
|
||||||
self.span_before = *pos;
|
self.span_before = *pos;
|
||||||
self.toks.reset_view();
|
self.toks.reset_cursor();
|
||||||
return Some(self.parse_ident_value());
|
return Some(self.parse_ident_value());
|
||||||
}
|
}
|
||||||
self.toks.reset_view();
|
self.toks.reset_cursor();
|
||||||
self.toks.next();
|
self.toks.next();
|
||||||
let hex = match self.parse_hex() {
|
let hex = match self.parse_hex() {
|
||||||
Ok(v) => v,
|
Ok(v) => v,
|
||||||
@ -660,7 +660,7 @@ impl<'a> Parser<'a> {
|
|||||||
peek_counter += peek_whitespace(self.toks);
|
peek_counter += peek_whitespace(self.toks);
|
||||||
while let Some(tok) = self.toks.peek() {
|
while let Some(tok) = self.toks.peek() {
|
||||||
let kind = tok.kind;
|
let kind = tok.kind;
|
||||||
self.toks.move_forward(1);
|
self.toks.advance_cursor();
|
||||||
peek_counter += 1;
|
peek_counter += 1;
|
||||||
if kind == '!'
|
if kind == '!'
|
||||||
|| kind == '%'
|
|| kind == '%'
|
||||||
@ -673,7 +673,7 @@ impl<'a> Parser<'a> {
|
|||||||
buf.push_str(&self.peek_escape()?);
|
buf.push_str(&self.peek_escape()?);
|
||||||
} else if kind == '#' {
|
} else if kind == '#' {
|
||||||
if let Some(Token { kind: '{', .. }) = self.toks.peek() {
|
if let Some(Token { kind: '{', .. }) = self.toks.peek() {
|
||||||
self.toks.move_forward(1);
|
self.toks.advance_cursor();
|
||||||
peek_counter += 1;
|
peek_counter += 1;
|
||||||
let (interpolation, count) = self.peek_interpolation()?;
|
let (interpolation, count) = self.peek_interpolation()?;
|
||||||
peek_counter += count;
|
peek_counter += count;
|
||||||
@ -705,14 +705,14 @@ impl<'a> Parser<'a> {
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.toks.reset_view();
|
self.toks.reset_cursor();
|
||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn peek_interpolation(&mut self) -> SassResult<(Spanned<Value>, usize)> {
|
fn peek_interpolation(&mut self) -> SassResult<(Spanned<Value>, usize)> {
|
||||||
let vec = peek_until_closing_curly_brace(self.toks)?;
|
let vec = peek_until_closing_curly_brace(self.toks)?;
|
||||||
let peek_counter = vec.len();
|
let peek_counter = vec.len();
|
||||||
self.toks.move_forward(1);
|
self.toks.advance_cursor();
|
||||||
let val = self.parse_value_from_vec(vec)?;
|
let val = self.parse_value_from_vec(vec)?;
|
||||||
Ok((
|
Ok((
|
||||||
Spanned {
|
Spanned {
|
||||||
|
@ -171,7 +171,7 @@ impl<'a> Parser<'a> {
|
|||||||
default = true;
|
default = true;
|
||||||
}
|
}
|
||||||
"important" => {
|
"important" => {
|
||||||
self.toks.reset_view();
|
self.toks.reset_cursor();
|
||||||
val_toks.push(self.toks.next().unwrap());
|
val_toks.push(self.toks.next().unwrap());
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -46,14 +46,14 @@ fn attribute_name(parser: &mut Parser<'_>, start: Span) -> SassResult<QualifiedN
|
|||||||
}
|
}
|
||||||
match parser.toks.peek_forward(1) {
|
match parser.toks.peek_forward(1) {
|
||||||
Some(v) if v.kind == '=' => {
|
Some(v) if v.kind == '=' => {
|
||||||
parser.toks.reset_view();
|
parser.toks.reset_cursor();
|
||||||
return Ok(QualifiedName {
|
return Ok(QualifiedName {
|
||||||
ident: name_or_namespace.node,
|
ident: name_or_namespace.node,
|
||||||
namespace: Namespace::None,
|
namespace: Namespace::None,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
Some(..) => {
|
Some(..) => {
|
||||||
parser.toks.reset_view();
|
parser.toks.reset_cursor();
|
||||||
}
|
}
|
||||||
None => return Err(("expected more input.", name_or_namespace.span).into()),
|
None => return Err(("expected more input.", name_or_namespace.span).into()),
|
||||||
}
|
}
|
||||||
|
@ -222,11 +222,11 @@ impl<'a, 'b> SelectorParser<'a, 'b> {
|
|||||||
|
|
||||||
match self.parser.toks.peek_forward(1) {
|
match self.parser.toks.peek_forward(1) {
|
||||||
Some(Token { kind, .. }) if is_name_start(*kind) || kind == &'-' || kind == &'\\' => {
|
Some(Token { kind, .. }) if is_name_start(*kind) || kind == &'-' || kind == &'\\' => {
|
||||||
self.parser.toks.reset_view();
|
self.parser.toks.reset_cursor();
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
Some(..) | None => {
|
Some(..) | None => {
|
||||||
self.parser.toks.reset_view();
|
self.parser.toks.reset_cursor();
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -106,7 +106,7 @@ pub(crate) fn eat_number<I: Iterator<Item = Token>>(
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
toks.reset_view();
|
toks.reset_cursor();
|
||||||
|
|
||||||
whole.push_str(&dec);
|
whole.push_str(&dec);
|
||||||
|
|
||||||
|
@ -15,13 +15,13 @@ pub(crate) fn peek_until_closing_curly_brace<I: Iterator<Item = Token>>(
|
|||||||
match tok.kind {
|
match tok.kind {
|
||||||
q @ '"' | q @ '\'' => {
|
q @ '"' | q @ '\'' => {
|
||||||
t.push(tok);
|
t.push(tok);
|
||||||
toks.move_forward(1);
|
toks.advance_cursor();
|
||||||
t.extend(peek_until_closing_quote(toks, q)?);
|
t.extend(peek_until_closing_quote(toks, q)?);
|
||||||
}
|
}
|
||||||
'{' => {
|
'{' => {
|
||||||
nesting += 1;
|
nesting += 1;
|
||||||
t.push(tok);
|
t.push(tok);
|
||||||
toks.move_forward(1);
|
toks.advance_cursor();
|
||||||
}
|
}
|
||||||
'}' => {
|
'}' => {
|
||||||
if nesting == 0 {
|
if nesting == 0 {
|
||||||
@ -29,7 +29,7 @@ pub(crate) fn peek_until_closing_curly_brace<I: Iterator<Item = Token>>(
|
|||||||
} else {
|
} else {
|
||||||
nesting -= 1;
|
nesting -= 1;
|
||||||
t.push(tok);
|
t.push(tok);
|
||||||
toks.move_forward(1);
|
toks.advance_cursor();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
'/' => {
|
'/' => {
|
||||||
@ -44,7 +44,7 @@ pub(crate) fn peek_until_closing_curly_brace<I: Iterator<Item = Token>>(
|
|||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
t.push(tok);
|
t.push(tok);
|
||||||
toks.move_forward(1);
|
toks.advance_cursor();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -61,12 +61,12 @@ fn peek_until_closing_quote<I: Iterator<Item = Token>>(
|
|||||||
match tok.kind {
|
match tok.kind {
|
||||||
'"' if q == '"' => {
|
'"' if q == '"' => {
|
||||||
t.push(tok);
|
t.push(tok);
|
||||||
toks.move_forward(1);
|
toks.advance_cursor();
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
'\'' if q == '\'' => {
|
'\'' if q == '\'' => {
|
||||||
t.push(tok);
|
t.push(tok);
|
||||||
toks.move_forward(1);
|
toks.advance_cursor();
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
'\\' => {
|
'\\' => {
|
||||||
@ -90,7 +90,7 @@ fn peek_until_closing_quote<I: Iterator<Item = Token>>(
|
|||||||
}
|
}
|
||||||
_ => t.push(tok),
|
_ => t.push(tok),
|
||||||
}
|
}
|
||||||
toks.move_forward(1);
|
toks.advance_cursor();
|
||||||
}
|
}
|
||||||
Ok(t)
|
Ok(t)
|
||||||
}
|
}
|
||||||
@ -100,7 +100,7 @@ fn peek_until_newline<I: Iterator<Item = Token>>(toks: &mut PeekMoreIterator<I>)
|
|||||||
if tok.kind == '\n' {
|
if tok.kind == '\n' {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
toks.move_forward(1);
|
toks.advance_cursor();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -111,7 +111,7 @@ fn peek_whitespace<I: Iterator<Item = W>, W: IsWhitespace>(s: &mut PeekMoreItera
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
found_whitespace = true;
|
found_whitespace = true;
|
||||||
s.move_forward(1);
|
s.advance_cursor();
|
||||||
}
|
}
|
||||||
found_whitespace
|
found_whitespace
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user