Updated Grammar
This commit is contained in:
parent
74d642f297
commit
175efe0f2f
1246 changed files with 109558 additions and 114993 deletions
|
|
@ -14,46 +14,32 @@
|
|||
|
||||
bool lex_normal_s355(t_lexer *lexer, t_lexer_state *s)
|
||||
{
|
||||
lex_accept_token(anon_sym_CARET, lexer, s);
|
||||
if (s->lookahead == '=')
|
||||
return (lex_advance(351, lexer, s));
|
||||
lex_accept_token(anon_sym_PLUS_PLUS, lexer, s);
|
||||
return (lex_end_state(lexer, s));
|
||||
}
|
||||
|
||||
bool lex_normal_s356(t_lexer *lexer, t_lexer_state *s)
|
||||
{
|
||||
lex_accept_token(anon_sym_AMP, lexer, s);
|
||||
lex_accept_token(anon_sym_DASH_DASH, lexer, s);
|
||||
return (lex_end_state(lexer, s));
|
||||
}
|
||||
|
||||
bool lex_normal_s357(t_lexer *lexer, t_lexer_state *s)
|
||||
{
|
||||
lex_accept_token(anon_sym_AMP, lexer, s);
|
||||
if (s->lookahead == '&')
|
||||
return (lex_advance(283, lexer, s));
|
||||
if (s->lookahead == '=')
|
||||
return (lex_advance(350, lexer, s));
|
||||
lex_accept_token(anon_sym_DASH2, lexer, s);
|
||||
return (lex_end_state(lexer, s));
|
||||
}
|
||||
|
||||
bool lex_normal_s358(t_lexer *lexer, t_lexer_state *s)
|
||||
{
|
||||
lex_accept_token(anon_sym_AMP, lexer, s);
|
||||
if (s->lookahead == '&')
|
||||
return (lex_advance(283, lexer, s));
|
||||
if (s->lookahead == '=')
|
||||
return (lex_advance(350, lexer, s));
|
||||
if (s->lookahead == '>')
|
||||
return (lex_advance(299, lexer, s));
|
||||
lex_accept_token(anon_sym_DASH2, lexer, s);
|
||||
if (s->lookahead == '-')
|
||||
return (lex_advance(356, lexer, s));
|
||||
return (lex_end_state(lexer, s));
|
||||
}
|
||||
|
||||
bool lex_normal_s359(t_lexer *lexer, t_lexer_state *s)
|
||||
{
|
||||
lex_accept_token(anon_sym_AMP, lexer, s);
|
||||
if (s->lookahead == '&')
|
||||
return (lex_advance(283, lexer, s));
|
||||
if (s->lookahead == '>')
|
||||
return (lex_advance(299, lexer, s));
|
||||
lex_accept_token(anon_sym_PLUS2, lexer, s);
|
||||
return (lex_end_state(lexer, s));
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue