update: fix word
This commit is contained in:
parent
0df91727b0
commit
9c2c977545
4 changed files with 13 additions and 11 deletions
|
|
@ -5,6 +5,7 @@ passes/fold_double_amp \
|
|||
passes/fold_double_carret \
|
||||
passes/fold_double_paren \
|
||||
passes/fold_double_pipe \
|
||||
passes/fold_double_quote \
|
||||
passes/fold_expansion \
|
||||
passes/fold_no_quote \
|
||||
passes/fold_redir \
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@
|
|||
/* By: maiboyer <maiboyer@student.42.fr> +#+ +:+ +#+ */
|
||||
/* +#+#+#+#+#+ +#+ */
|
||||
/* Created: 2024/10/02 18:43:41 by maiboyer #+# #+# */
|
||||
/* Updated: 2024/10/06 13:12:04 by maiboyer ### ########.fr */
|
||||
/* Updated: 2024/10/06 15:27:24 by maiboyer ### ########.fr */
|
||||
/* */
|
||||
/* ************************************************************************** */
|
||||
|
||||
|
|
@ -49,6 +49,6 @@ t_error ts_fold_expension(t_vec_token input, t_vec_token *output);
|
|||
t_error ts_fold_redir(t_vec_token input, t_vec_token *output);
|
||||
t_error ts_split_paren(t_vec_token input, t_vec_token *output);
|
||||
t_error ts_paren_to_noquote(t_vec_token input, t_vec_token *output);
|
||||
|
||||
t_error ts_fold_into_word(t_vec_token input, t_vec_token *output);
|
||||
|
||||
#endif /* PASSES_H */
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@
|
|||
/* By: maiboyer <maiboyer@student.42.fr> +#+ +:+ +#+ */
|
||||
/* +#+#+#+#+#+ +#+ */
|
||||
/* Created: 2024/10/02 18:41:16 by maiboyer #+# #+# */
|
||||
/* Updated: 2024/10/06 14:53:20 by rparodi ### ########.fr */
|
||||
/* Updated: 2024/10/06 15:27:27 by maiboyer ### ########.fr */
|
||||
/* */
|
||||
/* ************************************************************************** */
|
||||
|
||||
|
|
@ -55,6 +55,7 @@ static const struct s_ts_pass_def g_ts_passes[] = {\
|
|||
{ts_double_rparen, "double rparen => drparen"}, \
|
||||
{ts_double_lcarret, "double lcarret => dlcarret"}, \
|
||||
{ts_double_rcarret, "double rcarrer => drcarret"}, \
|
||||
{ts_fold_into_word, "fold into words"}, \
|
||||
// there should be an ts_fold_arith here
|
||||
{ts_split_paren, "split double parenthesis"}, \
|
||||
{ts_fold_redir, "fold redir+argument"}, \
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@
|
|||
/* By: maiboyer <maiboyer@student.42.fr> +#+ +:+ +#+ */
|
||||
/* +#+#+#+#+#+ +#+ */
|
||||
/* Created: 2024/10/02 19:04:32 by maiboyer #+# #+# */
|
||||
/* Updated: 2024/10/06 15:21:47 by rparodi ### ########.fr */
|
||||
/* Updated: 2024/10/06 15:30:07 by maiboyer ### ########.fr */
|
||||
/* */
|
||||
/* ************************************************************************** */
|
||||
|
||||
|
|
@ -15,7 +15,7 @@
|
|||
#include "me/types.h"
|
||||
#include "me/vec/vec_token.h"
|
||||
#include "parser/token.h"
|
||||
#include "stdbool.h"
|
||||
#include <stdbool.h>
|
||||
|
||||
/// This is a sample pass
|
||||
///
|
||||
|
|
@ -37,7 +37,7 @@ bool _type_extansion(enum e_token type)
|
|||
return (false);
|
||||
}
|
||||
|
||||
t_error ts_fold_double_quote(t_vec_token input, t_vec_token *output)
|
||||
t_error ts_fold_into_word(t_vec_token input, t_vec_token *output)
|
||||
{
|
||||
t_vec_token out;
|
||||
t_usize i;
|
||||
|
|
@ -48,14 +48,14 @@ t_error ts_fold_double_quote(t_vec_token input, t_vec_token *output)
|
|||
out = vec_token_new(input.len, token_free);
|
||||
while (i < input.len)
|
||||
{
|
||||
if (token_is_noquote(input.buffer[i].type))
|
||||
if (_type_extansion(input.buffer[i].type))
|
||||
{
|
||||
j = 0;
|
||||
tmp = token_new(TOK_WORD);
|
||||
tmp = token_new_meta(TOK_WORD);
|
||||
while (i + j < input.len \
|
||||
&& token_is_noquote(input.buffer[i + j].type))
|
||||
if (_type_extansion(input.buffer->type))
|
||||
vec_token_push(&tmp.subtokens, token_clone(&input.buffer[i+j]));
|
||||
&& _type_extansion(input.buffer[i + j].type))
|
||||
vec_token_push(&tmp.subtokens, \
|
||||
token_clone(&input.buffer[i + j++]));
|
||||
vec_token_push(&out, tmp);
|
||||
i += j;
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue