Updated Grammar
This commit is contained in:
parent
74d642f297
commit
175efe0f2f
1246 changed files with 109558 additions and 114993 deletions
|
|
@ -14,38 +14,61 @@
|
|||
|
||||
bool lex_normal_s305(t_lexer *lexer, t_lexer_state *s)
|
||||
{
|
||||
lex_accept_token(anon_sym_GT_AMP_DASH, lexer, s);
|
||||
lex_accept_token(aux_sym_heredoc_redirect_token1, lexer, \
|
||||
s);
|
||||
if (s->lookahead == '\n')
|
||||
return (lex_advance(305, lexer, s));
|
||||
if (s->lookahead == '\\')
|
||||
return (lex_advance(39, lexer, s));
|
||||
if ((s->lookahead >= 0x0b && s->lookahead <= '\r'))
|
||||
return (lex_advance(464, lexer, s));
|
||||
return (lex_end_state(lexer, s));
|
||||
}
|
||||
|
||||
bool lex_normal_s306(t_lexer *lexer, t_lexer_state *s)
|
||||
{
|
||||
lex_accept_token(anon_sym_LT_LT, lexer, s);
|
||||
if (s->lookahead == '-')
|
||||
return (lex_advance(309, lexer, s));
|
||||
lex_accept_token(aux_sym_heredoc_redirect_token1, lexer, \
|
||||
s);
|
||||
if (s->lookahead == '\n')
|
||||
return (lex_advance(306, lexer, s));
|
||||
if (s->lookahead == '\\')
|
||||
return (lex_advance(97, lexer, s));
|
||||
return (lex_end_state(lexer, s));
|
||||
}
|
||||
|
||||
bool lex_normal_s307(t_lexer *lexer, t_lexer_state *s)
|
||||
{
|
||||
lex_accept_token(anon_sym_LT_LT, lexer, s);
|
||||
if (s->lookahead == '-')
|
||||
return (lex_advance(309, lexer, s));
|
||||
if (s->lookahead == '=')
|
||||
return (lex_advance(348, lexer, s));
|
||||
lex_accept_token(aux_sym_heredoc_redirect_token1, lexer, \
|
||||
s);
|
||||
if (s->lookahead == '\n')
|
||||
return (lex_advance(307, lexer, s));
|
||||
if (s->lookahead == '\\')
|
||||
return (lex_advance(94, lexer, s));
|
||||
if ((s->lookahead >= 0x0b && s->lookahead <= '\r'))
|
||||
return (lex_advance(465, lexer, s));
|
||||
return (lex_end_state(lexer, s));
|
||||
}
|
||||
|
||||
bool lex_normal_s308(t_lexer *lexer, t_lexer_state *s)
|
||||
{
|
||||
lex_accept_token(anon_sym_LT_LT, lexer, s);
|
||||
if (s->lookahead == '=')
|
||||
return (lex_advance(348, lexer, s));
|
||||
lex_accept_token(aux_sym_heredoc_redirect_token1, lexer, \
|
||||
s);
|
||||
if (s->lookahead == '\n')
|
||||
return (lex_advance(308, lexer, s));
|
||||
if (s->lookahead == '\\')
|
||||
return (lex_advance(76, lexer, s));
|
||||
return (lex_end_state(lexer, s));
|
||||
}
|
||||
|
||||
bool lex_normal_s309(t_lexer *lexer, t_lexer_state *s)
|
||||
{
|
||||
lex_accept_token(anon_sym_LT_LT_DASH, lexer, s);
|
||||
lex_accept_token(aux_sym_heredoc_redirect_token1, lexer, \
|
||||
s);
|
||||
if (s->lookahead == '\n')
|
||||
return (lex_advance(309, lexer, s));
|
||||
if (s->lookahead == '\\')
|
||||
return (lex_advance(68, lexer, s));
|
||||
if ((s->lookahead >= 0x0b && s->lookahead <= '\r'))
|
||||
return (lex_advance(466, lexer, s));
|
||||
return (lex_end_state(lexer, s));
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue