Changed Grammar
This commit is contained in:
parent
b0da66b7d8
commit
fbb9977f4a
2132 changed files with 110314 additions and 233488 deletions
|
|
@ -14,46 +14,50 @@
|
|||
|
||||
bool lex_normal_s350(t_lexer *lexer, t_lexer_state *s)
|
||||
{
|
||||
lex_accept_token(anon_sym_SLASH, lexer, s);
|
||||
lex_accept_token(anon_sym_CARET, lexer, s);
|
||||
if (s->lookahead == '=')
|
||||
return (lex_advance(321, lexer, s));
|
||||
return (lex_advance(348, lexer, s));
|
||||
if (s->lookahead == '\\')
|
||||
return (lex_advance(234, lexer, s));
|
||||
if ((!s->eof && \
|
||||
set_contains(sym__comment_word_character_set_1(), 10, \
|
||||
s->lookahead)))
|
||||
return (lex_advance(533, lexer, s));
|
||||
return (lex_end_state(lexer, s));
|
||||
}
|
||||
|
||||
bool lex_normal_s351(t_lexer *lexer, t_lexer_state *s)
|
||||
{
|
||||
lex_accept_token(anon_sym_PERCENT, lexer, s);
|
||||
if (s->lookahead == '%')
|
||||
return (lex_advance(412, lexer, s));
|
||||
lex_accept_token(anon_sym_CARET, lexer, s);
|
||||
if (s->lookahead == '=')
|
||||
return (lex_advance(324, lexer, s));
|
||||
if (s->lookahead == '\\')
|
||||
return (lex_advance(213, lexer, s));
|
||||
if ((!s->eof && \
|
||||
set_contains(sym__comment_word_character_set_1(), 12, \
|
||||
s->lookahead)))
|
||||
return (lex_advance(482, lexer, s));
|
||||
return (lex_advance(347, lexer, s));
|
||||
return (lex_end_state(lexer, s));
|
||||
}
|
||||
|
||||
bool lex_normal_s352(t_lexer *lexer, t_lexer_state *s)
|
||||
{
|
||||
lex_accept_token(anon_sym_PERCENT, lexer, s);
|
||||
if (s->lookahead == '%')
|
||||
return (lex_advance(411, lexer, s));
|
||||
lex_accept_token(anon_sym_AMP, lexer, s);
|
||||
return (lex_end_state(lexer, s));
|
||||
}
|
||||
|
||||
bool lex_normal_s353(t_lexer *lexer, t_lexer_state *s)
|
||||
{
|
||||
lex_accept_token(anon_sym_PERCENT, lexer, s);
|
||||
lex_accept_token(anon_sym_AMP, lexer, s);
|
||||
if (s->lookahead == '&')
|
||||
return (lex_advance(279, lexer, s));
|
||||
if (s->lookahead == '=')
|
||||
return (lex_advance(323, lexer, s));
|
||||
return (lex_advance(346, lexer, s));
|
||||
return (lex_end_state(lexer, s));
|
||||
}
|
||||
|
||||
bool lex_normal_s354(t_lexer *lexer, t_lexer_state *s)
|
||||
{
|
||||
lex_accept_token(anon_sym_QMARK, lexer, s);
|
||||
lex_accept_token(anon_sym_AMP, lexer, s);
|
||||
if (s->lookahead == '&')
|
||||
return (lex_advance(279, lexer, s));
|
||||
if (s->lookahead == '=')
|
||||
return (lex_advance(346, lexer, s));
|
||||
if (s->lookahead == '>')
|
||||
return (lex_advance(295, lexer, s));
|
||||
return (lex_end_state(lexer, s));
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue