Changed Grammar

This commit is contained in:
Maix0 2024-06-09 19:16:09 +02:00
parent b0da66b7d8
commit fbb9977f4a
2132 changed files with 110314 additions and 233488 deletions

View file

@ -14,52 +14,42 @@
bool lex_normal_s255(t_lexer *lexer, t_lexer_state *s)
{
lex_accept_token(anon_sym_LT, lexer, s);
if (s->lookahead == '<')
return (lex_advance(270, lexer, s));
if (s->lookahead == '=')
return (lex_advance(339, lexer, s));
lex_accept_token(anon_sym_do, lexer, s);
return (lex_end_state(lexer, s));
}
bool lex_normal_s256(t_lexer *lexer, t_lexer_state *s)
{
lex_accept_token(anon_sym_GT, lexer, s);
if (s->lookahead == '&')
return (lex_advance(264, lexer, s));
if (s->lookahead == '=')
return (lex_advance(340, lexer, s));
if (s->lookahead == '>')
return (lex_advance(260, lexer, s));
if (s->lookahead == '|')
return (lex_advance(265, lexer, s));
lex_accept_token(anon_sym_do, lexer, s);
if (s->lookahead == '\\')
return (lex_advance(234, lexer, s));
if ((!s->eof && \
set_contains(sym__comment_word_character_set_1(), 10, \
s->lookahead)))
return (lex_advance(533, lexer, s));
return (lex_end_state(lexer, s));
}
bool lex_normal_s257(t_lexer *lexer, t_lexer_state *s)
{
lex_accept_token(anon_sym_GT, lexer, s);
if (s->lookahead == '&')
return (lex_advance(264, lexer, s));
if (s->lookahead == '>')
return (lex_advance(259, lexer, s));
if (s->lookahead == '|')
return (lex_advance(265, lexer, s));
lex_accept_token(anon_sym_fi, lexer, s);
return (lex_end_state(lexer, s));
}
bool lex_normal_s258(t_lexer *lexer, t_lexer_state *s)
{
lex_accept_token(anon_sym_GT, lexer, s);
if (s->lookahead == '=')
return (lex_advance(340, lexer, s));
if (s->lookahead == '>')
return (lex_advance(260, lexer, s));
lex_accept_token(anon_sym_fi, lexer, s);
if (s->lookahead == '\\')
return (lex_advance(234, lexer, s));
if ((!s->eof && \
set_contains(sym__comment_word_character_set_1(), 10, \
s->lookahead)))
return (lex_advance(533, lexer, s));
return (lex_end_state(lexer, s));
}
bool lex_normal_s259(t_lexer *lexer, t_lexer_state *s)
{
lex_accept_token(anon_sym_GT_GT, lexer, s);
lex_accept_token(anon_sym_elif, lexer, s);
return (lex_end_state(lexer, s));
}