Started from buttom go to the sky
This commit is contained in:
parent
96215449bd
commit
f811e55dea
4781 changed files with 10121 additions and 1743 deletions
63
parser/static/lex_funcs/lex_keywords/state_0.c
Normal file
63
parser/static/lex_funcs/lex_keywords/state_0.c
Normal file
|
|
@ -0,0 +1,63 @@
|
|||
/* ************************************************************************** */
|
||||
/* */
|
||||
/* ::: :::::::: */
|
||||
/* state_0.c :+: :+: :+: */
|
||||
/* +:+ +:+ +:+ */
|
||||
/* By: maiboyer <maiboyer@student.42.fr> +#+ +:+ +#+ */
|
||||
/* +#+#+#+#+#+ +#+ */
|
||||
/* Created: 2024/04/14 19:17:54 by maiboyer #+# #+# */
|
||||
/* Updated: 2024/04/14 19:18:20 by maiboyer ### ########.fr */
|
||||
/* */
|
||||
/* ************************************************************************** */
|
||||
|
||||
#include "./lex_keywords_funcs.h"
|
||||
|
||||
bool lex_keywords_s0(t_lexer *lexer, t_lexer_state *s)
|
||||
{
|
||||
if (lex_keywords_s0_bis(lexer, s))
|
||||
return (true);
|
||||
if (s->lookahead == 'i')
|
||||
return (lex_advance(14, lexer, s));
|
||||
if (s->lookahead == 'k')
|
||||
return (lex_advance(15, lexer, s));
|
||||
if (s->lookahead == 'l')
|
||||
return (lex_advance(16, lexer, s));
|
||||
if (s->lookahead == 'r')
|
||||
return (lex_advance(17, lexer, s));
|
||||
if (s->lookahead == 's')
|
||||
return (lex_advance(18, lexer, s));
|
||||
if (s->lookahead == 't')
|
||||
return (lex_advance(19, lexer, s));
|
||||
if (s->lookahead == 'u')
|
||||
return (lex_advance(20, lexer, s));
|
||||
if (s->lookahead == 'w')
|
||||
return (lex_advance(21, lexer, s));
|
||||
if (((s->lookahead >= '\t' && s->lookahead <= '\r') || \
|
||||
s->lookahead == ' '))
|
||||
return (lex_skip(22, lexer, s));
|
||||
return (lex_end_state(lexer, s));
|
||||
}
|
||||
|
||||
bool lex_keywords_s1(t_lexer *lexer, t_lexer_state *s)
|
||||
{
|
||||
lex_accept_token(anon_sym_A, lexer, s);
|
||||
return (lex_end_state(lexer, s));
|
||||
}
|
||||
|
||||
bool lex_keywords_s2(t_lexer *lexer, t_lexer_state *s)
|
||||
{
|
||||
lex_accept_token(anon_sym_E, lexer, s);
|
||||
return (lex_end_state(lexer, s));
|
||||
}
|
||||
|
||||
bool lex_keywords_s3(t_lexer *lexer, t_lexer_state *s)
|
||||
{
|
||||
lex_accept_token(anon_sym_K, lexer, s);
|
||||
return (lex_end_state(lexer, s));
|
||||
}
|
||||
|
||||
bool lex_keywords_s4(t_lexer *lexer, t_lexer_state *s)
|
||||
{
|
||||
lex_accept_token(anon_sym_L, lexer, s);
|
||||
return (lex_end_state(lexer, s));
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue