Changed Grammar

This commit is contained in:
Maix0 2024-06-09 19:16:09 +02:00
parent b0da66b7d8
commit fbb9977f4a
2132 changed files with 110314 additions and 233488 deletions

View file

@ -14,55 +14,42 @@
bool lex_normal_s275(t_lexer *lexer, t_lexer_state *s)
{
lex_accept_token(aux_sym_heredoc_redirect_token1, lexer, \
s);
if (s->lookahead == '\n')
return (lex_advance(275, lexer, s));
if (s->lookahead == '\\')
return (lex_advance(14, lexer, s));
lex_accept_token(anon_sym_SEMI_SEMI, lexer, s);
return (lex_end_state(lexer, s));
}
bool lex_normal_s276(t_lexer *lexer, t_lexer_state *s)
{
lex_accept_token(aux_sym_heredoc_redirect_token1, lexer, \
s);
if (s->lookahead == '\n')
return (lex_advance(276, lexer, s));
lex_accept_token(anon_sym_LBRACE, lexer, s);
if (s->lookahead == '\\')
return (lex_advance(16, lexer, s));
return (lex_advance(234, lexer, s));
if ((!s->eof && \
set_contains(sym__comment_word_character_set_1(), 10, \
s->lookahead)))
return (lex_advance(533, lexer, s));
return (lex_end_state(lexer, s));
}
bool lex_normal_s277(t_lexer *lexer, t_lexer_state *s)
{
lex_accept_token(aux_sym_heredoc_redirect_token1, lexer, \
s);
if (s->lookahead == '\n')
return (lex_advance(277, lexer, s));
if (s->lookahead == '\\')
return (lex_advance(22, lexer, s));
lex_accept_token(anon_sym_RBRACE, lexer, s);
return (lex_end_state(lexer, s));
}
bool lex_normal_s278(t_lexer *lexer, t_lexer_state *s)
{
lex_accept_token(aux_sym_heredoc_redirect_token1, lexer, \
s);
if (s->lookahead == '\n')
return (lex_advance(278, lexer, s));
lex_accept_token(anon_sym_RBRACE, lexer, s);
if (s->lookahead == '\\')
return (lex_advance(83, lexer, s));
return (lex_advance(234, lexer, s));
if ((!s->eof && \
set_contains(sym__comment_word_character_set_1(), 10, \
s->lookahead)))
return (lex_advance(533, lexer, s));
return (lex_end_state(lexer, s));
}
bool lex_normal_s279(t_lexer *lexer, t_lexer_state *s)
{
lex_accept_token(aux_sym_heredoc_redirect_token1, lexer, \
s);
if (s->lookahead == '\n')
return (lex_advance(279, lexer, s));
if (s->lookahead == '\\')
return (lex_advance(24, lexer, s));
lex_accept_token(anon_sym_AMP_AMP, lexer, s);
return (lex_end_state(lexer, s));
}