Start work on heredocs
This commit is contained in:
parent
d2ac184c81
commit
6be8857926
|
@ -8,7 +8,8 @@
|
|||
],
|
||||
"sources": [
|
||||
"src/parser.c",
|
||||
"src/binding.cc"
|
||||
"src/binding.cc",
|
||||
"src/scanner.cc",
|
||||
],
|
||||
"cflags_c": [
|
||||
"-std=c99",
|
||||
|
|
|
@ -101,6 +101,29 @@ cat a b 2> /dev/null
|
|||
(file_redirect (file_descriptor) (file_descriptor))
|
||||
(command_name))))
|
||||
|
||||
===============================
|
||||
Heredoc redirects
|
||||
===============================
|
||||
|
||||
node <<JS
|
||||
console.log("hi")
|
||||
JS
|
||||
|
||||
bash -c <<JS
|
||||
echo hi
|
||||
JS
|
||||
|
||||
---
|
||||
|
||||
(program
|
||||
(command (simple_command
|
||||
(command_name)
|
||||
(heredoc_redirect (heredoc))))
|
||||
(command (simple_command
|
||||
(command_name)
|
||||
(argument)
|
||||
(heredoc_redirect (heredoc)))))
|
||||
|
||||
===============================
|
||||
Variable expansions
|
||||
===============================
|
||||
|
|
16
grammar.js
16
grammar.js
|
@ -3,6 +3,10 @@ module.exports = grammar({
|
|||
|
||||
inline: $ => [$.control_operator],
|
||||
|
||||
externals: $ => [
|
||||
$.heredoc
|
||||
],
|
||||
|
||||
rules: {
|
||||
program: $ => repeat($.command),
|
||||
|
||||
|
@ -29,9 +33,10 @@ module.exports = grammar({
|
|||
$.operator_expansion
|
||||
))
|
||||
)),
|
||||
repeat(
|
||||
$.file_redirect
|
||||
)
|
||||
repeat(choice(
|
||||
$.file_redirect,
|
||||
$.heredoc_redirect
|
||||
))
|
||||
),
|
||||
|
||||
pipeline: $ => prec.left(seq(
|
||||
|
@ -83,6 +88,11 @@ module.exports = grammar({
|
|||
)
|
||||
),
|
||||
|
||||
heredoc_redirect: $ => seq(
|
||||
choice('<<', '<<-'),
|
||||
$.heredoc
|
||||
),
|
||||
|
||||
file_descriptor: $ => token(prec(1, /\d+/)),
|
||||
|
||||
leading_word: $ => /[^\s=|;:{}]+/,
|
||||
|
|
|
@ -105,8 +105,17 @@
|
|||
{
|
||||
"type": "REPEAT",
|
||||
"content": {
|
||||
"type": "SYMBOL",
|
||||
"name": "file_redirect"
|
||||
"type": "CHOICE",
|
||||
"members": [
|
||||
{
|
||||
"type": "SYMBOL",
|
||||
"name": "file_redirect"
|
||||
},
|
||||
{
|
||||
"type": "SYMBOL",
|
||||
"name": "heredoc_redirect"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
|
@ -349,6 +358,28 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
"heredoc_redirect": {
|
||||
"type": "SEQ",
|
||||
"members": [
|
||||
{
|
||||
"type": "CHOICE",
|
||||
"members": [
|
||||
{
|
||||
"type": "STRING",
|
||||
"value": "<<"
|
||||
},
|
||||
{
|
||||
"type": "STRING",
|
||||
"value": "<<-"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "SYMBOL",
|
||||
"name": "heredoc"
|
||||
}
|
||||
]
|
||||
},
|
||||
"file_descriptor": {
|
||||
"type": "TOKEN",
|
||||
"content": {
|
||||
|
@ -389,7 +420,12 @@
|
|||
}
|
||||
],
|
||||
"conflicts": [],
|
||||
"externals": [],
|
||||
"externals": [
|
||||
{
|
||||
"type": "SYMBOL",
|
||||
"name": "heredoc"
|
||||
}
|
||||
],
|
||||
"inline": [
|
||||
"control_operator"
|
||||
]
|
||||
|
|
2524
src/parser.c
2524
src/parser.c
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,90 @@
|
|||
#include <tree_sitter/parser.h>
|
||||
#include <string>
|
||||
#include <cwctype>
|
||||
|
||||
namespace {
|
||||
|
||||
using std::wstring;
|
||||
|
||||
enum TokenType {
|
||||
HEREDOC
|
||||
};
|
||||
|
||||
struct Scanner {
|
||||
void skip(TSLexer *lexer) {
|
||||
lexer->advance(lexer, true);
|
||||
}
|
||||
|
||||
void advance(TSLexer *lexer) {
|
||||
lexer->advance(lexer, false);
|
||||
}
|
||||
|
||||
void reset() {}
|
||||
|
||||
bool serialize(TSExternalTokenState state) { return true; }
|
||||
|
||||
void deserialize(TSExternalTokenState state) {}
|
||||
|
||||
bool scan(TSLexer *lexer, const bool *valid_symbols) {
|
||||
wstring heredoc_content;
|
||||
|
||||
while (iswalpha(lexer->lookahead)) {
|
||||
heredoc_content += lexer->lookahead;
|
||||
advance(lexer);
|
||||
}
|
||||
|
||||
if (lexer->lookahead != '\n') return false;
|
||||
|
||||
wstring leading_word;
|
||||
|
||||
for (;;) {
|
||||
advance(lexer);
|
||||
|
||||
while (iswalpha(lexer->lookahead)) {
|
||||
leading_word += lexer->lookahead;
|
||||
advance(lexer);
|
||||
}
|
||||
|
||||
if (leading_word == heredoc_content || lexer->lookahead == '\0') break;
|
||||
if (lexer->lookahead == '\n') leading_word.clear();
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
extern "C" {
|
||||
|
||||
void *tree_sitter_bash_external_scanner_create() {
|
||||
return new Scanner();
|
||||
}
|
||||
|
||||
bool tree_sitter_bash_external_scanner_scan(void *payload, TSLexer *lexer,
|
||||
const bool *valid_symbols) {
|
||||
Scanner *scanner = static_cast<Scanner *>(payload);
|
||||
return scanner->scan(lexer, valid_symbols);
|
||||
}
|
||||
|
||||
void tree_sitter_bash_external_scanner_reset(void *payload) {
|
||||
Scanner *scanner = static_cast<Scanner *>(payload);
|
||||
scanner->reset();
|
||||
}
|
||||
|
||||
bool tree_sitter_bash_external_scanner_serialize(void *payload, TSExternalTokenState state) {
|
||||
Scanner *scanner = static_cast<Scanner *>(payload);
|
||||
return scanner->serialize(state);
|
||||
}
|
||||
|
||||
void tree_sitter_bash_external_scanner_deserialize(void *payload, TSExternalTokenState state) {
|
||||
Scanner *scanner = static_cast<Scanner *>(payload);
|
||||
scanner->deserialize(state);
|
||||
}
|
||||
|
||||
void tree_sitter_bash_external_scanner_destroy(void *payload) {
|
||||
Scanner *scanner = static_cast<Scanner *>(payload);
|
||||
delete scanner;
|
||||
}
|
||||
|
||||
}
|
Loading…
Reference in New Issue