changed grammar
This commit is contained in:
parent
0b5b5e4d7e
commit
e5b22489e1
1219 changed files with 96283 additions and 113028 deletions
|
|
@ -14,54 +14,46 @@
|
|||
|
||||
bool lex_normal_s345(t_lexer *lexer, t_lexer_state *s)
|
||||
{
|
||||
lex_accept_token(anon_sym_SLASH, lexer, s);
|
||||
lex_accept_token(anon_sym_COLON, lexer, s);
|
||||
if (s->lookahead == '=')
|
||||
return (lex_advance(319, lexer, s));
|
||||
return (lex_advance(381, lexer, s));
|
||||
if (s->lookahead == '?')
|
||||
return (lex_advance(385, lexer, s));
|
||||
if (s->lookahead == '\\')
|
||||
return (lex_advance(218, lexer, s));
|
||||
return (lex_advance(215, lexer, s));
|
||||
if ((!s->eof && \
|
||||
set_contains(sym__comment_word_character_set_1(), 10, \
|
||||
s->lookahead)))
|
||||
return (lex_advance(497, lexer, s));
|
||||
return (lex_advance(488, lexer, s));
|
||||
return (lex_end_state(lexer, s));
|
||||
}
|
||||
|
||||
bool lex_normal_s346(t_lexer *lexer, t_lexer_state *s)
|
||||
{
|
||||
lex_accept_token(anon_sym_SLASH, lexer, s);
|
||||
if (s->lookahead == '=')
|
||||
return (lex_advance(318, lexer, s));
|
||||
lex_accept_token(anon_sym_COLON, lexer, s);
|
||||
if (s->lookahead == '\\')
|
||||
return (lex_advance(215, lexer, s));
|
||||
if ((!s->eof && \
|
||||
set_contains(sym__comment_word_character_set_1(), 10, \
|
||||
s->lookahead)))
|
||||
return (lex_advance(488, lexer, s));
|
||||
return (lex_end_state(lexer, s));
|
||||
}
|
||||
|
||||
bool lex_normal_s347(t_lexer *lexer, t_lexer_state *s)
|
||||
{
|
||||
lex_accept_token(anon_sym_PERCENT, lexer, s);
|
||||
if (s->lookahead == '%')
|
||||
return (lex_advance(399, lexer, s));
|
||||
if (s->lookahead == '=')
|
||||
return (lex_advance(321, lexer, s));
|
||||
if (s->lookahead == '\\')
|
||||
return (lex_advance(218, lexer, s));
|
||||
if ((!s->eof && \
|
||||
set_contains(sym__comment_word_character_set_1(), 10, \
|
||||
s->lookahead)))
|
||||
return (lex_advance(497, lexer, s));
|
||||
lex_accept_token(anon_sym_PLUS_PLUS, lexer, s);
|
||||
return (lex_end_state(lexer, s));
|
||||
}
|
||||
|
||||
bool lex_normal_s348(t_lexer *lexer, t_lexer_state *s)
|
||||
{
|
||||
lex_accept_token(anon_sym_PERCENT, lexer, s);
|
||||
if (s->lookahead == '%')
|
||||
return (lex_advance(398, lexer, s));
|
||||
lex_accept_token(anon_sym_DASH_DASH, lexer, s);
|
||||
return (lex_end_state(lexer, s));
|
||||
}
|
||||
|
||||
bool lex_normal_s349(t_lexer *lexer, t_lexer_state *s)
|
||||
{
|
||||
lex_accept_token(anon_sym_PERCENT, lexer, s);
|
||||
if (s->lookahead == '=')
|
||||
return (lex_advance(320, lexer, s));
|
||||
lex_accept_token(anon_sym_DASH2, lexer, s);
|
||||
return (lex_end_state(lexer, s));
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue