Started working on ast
This commit is contained in:
parent
9aee1f2272
commit
1d4dc219db
1344 changed files with 118659 additions and 107610 deletions
|
|
@ -14,58 +14,38 @@
|
|||
|
||||
bool lex_normal_s390(t_lexer *lexer, t_lexer_state *s)
|
||||
{
|
||||
lex_accept_token(anon_sym_DOLLAR, lexer, s);
|
||||
if (s->lookahead == '(')
|
||||
return (lex_advance(430, lexer, s));
|
||||
if (s->lookahead == '{')
|
||||
return (lex_advance(414, lexer, s));
|
||||
lex_accept_token(anon_sym_DASH_DASH2, lexer, s);
|
||||
return (lex_end_state(lexer, s));
|
||||
}
|
||||
|
||||
bool lex_normal_s391(t_lexer *lexer, t_lexer_state *s)
|
||||
{
|
||||
lex_accept_token(anon_sym_DQUOTE, lexer, s);
|
||||
lex_accept_token(aux_sym_concatenation_token1, lexer, s);
|
||||
return (lex_end_state(lexer, s));
|
||||
}
|
||||
|
||||
bool lex_normal_s392(t_lexer *lexer, t_lexer_state *s)
|
||||
{
|
||||
lex_accept_token(sym_string_content, lexer, s);
|
||||
if (s->lookahead == '\n')
|
||||
return (lex_advance(396, lexer, s));
|
||||
if (s->lookahead == '\\')
|
||||
return (lex_advance(129, lexer, s));
|
||||
if ((s->lookahead != 0 && s->lookahead != '\r' && \
|
||||
s->lookahead != '"' && s->lookahead != '$' && s->lookahead \
|
||||
!= '`'))
|
||||
return (lex_advance(399, lexer, s));
|
||||
lex_accept_token(anon_sym_DOLLAR, lexer, s);
|
||||
return (lex_end_state(lexer, s));
|
||||
}
|
||||
|
||||
bool lex_normal_s393(t_lexer *lexer, t_lexer_state *s)
|
||||
{
|
||||
lex_accept_token(sym_string_content, lexer, s);
|
||||
if (s->lookahead == '\n')
|
||||
return (lex_advance(399, lexer, s));
|
||||
if (s->lookahead == '\\')
|
||||
return (lex_advance(434, lexer, s));
|
||||
if ((s->lookahead != 0 && s->lookahead != '\r' && \
|
||||
s->lookahead != '"' && s->lookahead != '$' && s->lookahead \
|
||||
!= '`'))
|
||||
return (lex_advance(401, lexer, s));
|
||||
lex_accept_token(anon_sym_DOLLAR, lexer, s);
|
||||
if (s->lookahead == '(')
|
||||
return (lex_advance(429, lexer, s));
|
||||
if (s->lookahead == '{')
|
||||
return (lex_advance(412, lexer, s));
|
||||
return (lex_end_state(lexer, s));
|
||||
}
|
||||
|
||||
bool lex_normal_s394(t_lexer *lexer, t_lexer_state *s)
|
||||
{
|
||||
lex_accept_token(sym_string_content, lexer, s);
|
||||
if (s->lookahead == '\n')
|
||||
return (lex_advance(397, lexer, s));
|
||||
if (s->lookahead == '\\')
|
||||
return (lex_advance(129, lexer, s));
|
||||
if ((s->lookahead != 0 && s->lookahead != '\r' && \
|
||||
s->lookahead != '"' && s->lookahead != '$' && s->lookahead \
|
||||
!= '`'))
|
||||
return (lex_advance(399, lexer, s));
|
||||
lex_accept_token(anon_sym_DOLLAR, lexer, s);
|
||||
if (s->lookahead == '(')
|
||||
return (lex_advance(428, lexer, s));
|
||||
if (s->lookahead == '{')
|
||||
return (lex_advance(412, lexer, s));
|
||||
return (lex_end_state(lexer, s));
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue