Updated Grammar
This commit is contained in:
parent
74d642f297
commit
175efe0f2f
1246 changed files with 109558 additions and 114993 deletions
|
|
@ -14,63 +14,42 @@
|
|||
|
||||
bool lex_normal_s315(t_lexer *lexer, t_lexer_state *s)
|
||||
{
|
||||
lex_accept_token(aux_sym_heredoc_redirect_token1, lexer, \
|
||||
s);
|
||||
if (s->lookahead == '\n')
|
||||
return (lex_advance(315, lexer, s));
|
||||
if (s->lookahead == '\\')
|
||||
return (lex_advance(149, lexer, s));
|
||||
if ((s->lookahead >= 0x0b && s->lookahead <= '\r'))
|
||||
return (lex_advance(483, lexer, s));
|
||||
lex_accept_token(anon_sym_DASH_EQ, lexer, s);
|
||||
return (lex_end_state(lexer, s));
|
||||
}
|
||||
|
||||
bool lex_normal_s316(t_lexer *lexer, t_lexer_state *s)
|
||||
{
|
||||
lex_accept_token(aux_sym_heredoc_redirect_token1, lexer, \
|
||||
s);
|
||||
if (s->lookahead == '\n')
|
||||
return (lex_advance(316, lexer, s));
|
||||
if (s->lookahead == '\\')
|
||||
return (lex_advance(25, lexer, s));
|
||||
if ((s->lookahead >= 0x0b && s->lookahead <= '\r'))
|
||||
return (lex_advance(484, lexer, s));
|
||||
lex_accept_token(anon_sym_STAR_EQ, lexer, s);
|
||||
return (lex_end_state(lexer, s));
|
||||
}
|
||||
|
||||
bool lex_normal_s317(t_lexer *lexer, t_lexer_state *s)
|
||||
{
|
||||
lex_accept_token(aux_sym_heredoc_redirect_token1, lexer, \
|
||||
s);
|
||||
if (s->lookahead == '\n')
|
||||
return (lex_advance(317, lexer, s));
|
||||
lex_accept_token(anon_sym_STAR_EQ, lexer, s);
|
||||
if (s->lookahead == '\\')
|
||||
return (lex_advance(53, lexer, s));
|
||||
return (lex_advance(218, lexer, s));
|
||||
if ((!s->eof && \
|
||||
set_contains(sym__comment_word_character_set_1(), 10, \
|
||||
s->lookahead)))
|
||||
return (lex_advance(497, lexer, s));
|
||||
return (lex_end_state(lexer, s));
|
||||
}
|
||||
|
||||
bool lex_normal_s318(t_lexer *lexer, t_lexer_state *s)
|
||||
{
|
||||
lex_accept_token(aux_sym_heredoc_redirect_token1, lexer, \
|
||||
s);
|
||||
if (s->lookahead == '\n')
|
||||
return (lex_advance(318, lexer, s));
|
||||
if (s->lookahead == '\\')
|
||||
return (lex_advance(22, lexer, s));
|
||||
if ((s->lookahead >= 0x0b && s->lookahead <= '\r'))
|
||||
return (lex_advance(485, lexer, s));
|
||||
lex_accept_token(anon_sym_SLASH_EQ, lexer, s);
|
||||
return (lex_end_state(lexer, s));
|
||||
}
|
||||
|
||||
bool lex_normal_s319(t_lexer *lexer, t_lexer_state *s)
|
||||
{
|
||||
lex_accept_token(aux_sym_heredoc_redirect_token1, lexer, \
|
||||
s);
|
||||
if (s->lookahead == '\n')
|
||||
return (lex_advance(319, lexer, s));
|
||||
lex_accept_token(anon_sym_SLASH_EQ, lexer, s);
|
||||
if (s->lookahead == '\\')
|
||||
return (lex_advance(151, lexer, s));
|
||||
if ((s->lookahead >= 0x0b && s->lookahead <= '\r'))
|
||||
return (lex_advance(486, lexer, s));
|
||||
return (lex_advance(218, lexer, s));
|
||||
if ((!s->eof && \
|
||||
set_contains(sym__comment_word_character_set_1(), 10, \
|
||||
s->lookahead)))
|
||||
return (lex_advance(497, lexer, s));
|
||||
return (lex_end_state(lexer, s));
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue