update: fold <CARRET><WORD> into a TOK_REDIR token
This commit is contained in:
parent
3287b6a2a7
commit
7cb3582b1a
6 changed files with 64 additions and 4 deletions
|
|
@ -6,6 +6,7 @@ passes/fold_double_carret \
|
||||||
passes/fold_double_pipe \
|
passes/fold_double_pipe \
|
||||||
passes/fold_expansion \
|
passes/fold_expansion \
|
||||||
passes/fold_no_quote \
|
passes/fold_no_quote \
|
||||||
|
passes/fold_redir \
|
||||||
passes/fold_whitespace \
|
passes/fold_whitespace \
|
||||||
passes/template_file \
|
passes/template_file \
|
||||||
token_lifetime \
|
token_lifetime \
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@
|
||||||
/* By: maiboyer <maiboyer@student.42.fr> +#+ +:+ +#+ */
|
/* By: maiboyer <maiboyer@student.42.fr> +#+ +:+ +#+ */
|
||||||
/* +#+#+#+#+#+ +#+ */
|
/* +#+#+#+#+#+ +#+ */
|
||||||
/* Created: 2024/10/02 18:43:41 by maiboyer #+# #+# */
|
/* Created: 2024/10/02 18:43:41 by maiboyer #+# #+# */
|
||||||
/* Updated: 2024/10/05 13:10:56 by maiboyer ### ########.fr */
|
/* Updated: 2024/10/05 18:03:54 by maiboyer ### ########.fr */
|
||||||
/* */
|
/* */
|
||||||
/* ************************************************************************** */
|
/* ************************************************************************** */
|
||||||
|
|
||||||
|
|
@ -43,6 +43,7 @@ t_error ts_double_string_pass(t_vec_token input, t_vec_token *output);
|
||||||
t_error ts_fold_no_quote(t_vec_token input, t_vec_token *output);
|
t_error ts_fold_no_quote(t_vec_token input, t_vec_token *output);
|
||||||
t_error ts_fold_whitespace(t_vec_token input, t_vec_token *output);
|
t_error ts_fold_whitespace(t_vec_token input, t_vec_token *output);
|
||||||
t_error ts_do_fuck_all(t_vec_token input, t_vec_token *output);
|
t_error ts_do_fuck_all(t_vec_token input, t_vec_token *output);
|
||||||
|
t_error ts_fold_redir(t_vec_token input, t_vec_token *output);
|
||||||
t_error ts_fold_expension(t_vec_token input, t_vec_token *output);
|
t_error ts_fold_expension(t_vec_token input, t_vec_token *output);
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@
|
||||||
/* By: maiboyer <maiboyer@student.42.fr> +#+ +:+ +#+ */
|
/* By: maiboyer <maiboyer@student.42.fr> +#+ +:+ +#+ */
|
||||||
/* +#+#+#+#+#+ +#+ */
|
/* +#+#+#+#+#+ +#+ */
|
||||||
/* Created: 2024/09/26 17:59:23 by maiboyer #+# #+# */
|
/* Created: 2024/09/26 17:59:23 by maiboyer #+# #+# */
|
||||||
/* Updated: 2024/10/05 13:11:34 by maiboyer ### ########.fr */
|
/* Updated: 2024/10/05 18:02:03 by maiboyer ### ########.fr */
|
||||||
/* */
|
/* */
|
||||||
/* ************************************************************************** */
|
/* ************************************************************************** */
|
||||||
|
|
||||||
|
|
@ -39,6 +39,7 @@ enum e_token
|
||||||
TOK_WHITESPACE, // whitespace outside of quoted strings
|
TOK_WHITESPACE, // whitespace outside of quoted strings
|
||||||
TOK_NALPHANUM, // a non alphanumeric character, used in the expansion folding, then folded back into NQUOTE
|
TOK_NALPHANUM, // a non alphanumeric character, used in the expansion folding, then folded back into NQUOTE
|
||||||
TOK_WORD, // a meta token, which contains subtokens
|
TOK_WORD, // a meta token, which contains subtokens
|
||||||
|
TOK_REDIR, // a meta token, which contains <OPERATOR> being an [D](L|R)CARRET and the arg being a WORD
|
||||||
};
|
};
|
||||||
|
|
||||||
typedef struct s_token
|
typedef struct s_token
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@
|
||||||
/* By: maiboyer <maiboyer@student.42.fr> +#+ +:+ +#+ */
|
/* By: maiboyer <maiboyer@student.42.fr> +#+ +:+ +#+ */
|
||||||
/* +#+#+#+#+#+ +#+ */
|
/* +#+#+#+#+#+ +#+ */
|
||||||
/* Created: 2024/10/02 18:41:16 by maiboyer #+# #+# */
|
/* Created: 2024/10/02 18:41:16 by maiboyer #+# #+# */
|
||||||
/* Updated: 2024/10/05 13:12:08 by maiboyer ### ########.fr */
|
/* Updated: 2024/10/05 18:03:39 by maiboyer ### ########.fr */
|
||||||
/* */
|
/* */
|
||||||
/* ************************************************************************** */
|
/* ************************************************************************** */
|
||||||
|
|
||||||
|
|
@ -42,6 +42,7 @@ static const struct s_ts_pass_def g_ts_passes[] = {\
|
||||||
{ts_double_pipe, "double pipe => or"},
|
{ts_double_pipe, "double pipe => or"},
|
||||||
{ts_double_lcarret, "double lcarret => dlcarret"},
|
{ts_double_lcarret, "double lcarret => dlcarret"},
|
||||||
{ts_double_rcarret, "double rcarrer => drcarret"},
|
{ts_double_rcarret, "double rcarrer => drcarret"},
|
||||||
|
{ts_fold_redir, "fold redir+argument"},
|
||||||
};
|
};
|
||||||
|
|
||||||
t_error ts_apply_passes(t_vec_token ts, t_vec_token *out)
|
t_error ts_apply_passes(t_vec_token ts, t_vec_token *out)
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@
|
||||||
/* By: maiboyer <maiboyer@student.42.fr> +#+ +:+ +#+ */
|
/* By: maiboyer <maiboyer@student.42.fr> +#+ +:+ +#+ */
|
||||||
/* +#+#+#+#+#+ +#+ */
|
/* +#+#+#+#+#+ +#+ */
|
||||||
/* Created: 2024/10/02 19:04:32 by maiboyer #+# #+# */
|
/* Created: 2024/10/02 19:04:32 by maiboyer #+# #+# */
|
||||||
/* Updated: 2024/10/05 13:19:50 by maiboyer ### ########.fr */
|
/* Updated: 2024/10/05 18:05:49 by maiboyer ### ########.fr */
|
||||||
/* */
|
/* */
|
||||||
/* ************************************************************************** */
|
/* ************************************************************************** */
|
||||||
|
|
||||||
|
|
|
||||||
56
parser/src/passes/fold_redir.c
Normal file
56
parser/src/passes/fold_redir.c
Normal file
|
|
@ -0,0 +1,56 @@
|
||||||
|
/* ************************************************************************** */
|
||||||
|
/* */
|
||||||
|
/* ::: :::::::: */
|
||||||
|
/* fold_redir.c :+: :+: :+: */
|
||||||
|
/* +:+ +:+ +:+ */
|
||||||
|
/* By: maiboyer <maiboyer@student.42.fr> +#+ +:+ +#+ */
|
||||||
|
/* +#+#+#+#+#+ +#+ */
|
||||||
|
/* Created: 2024/10/02 19:04:32 by maiboyer #+# #+# */
|
||||||
|
/* Updated: 2024/10/05 18:02:25 by maiboyer ### ########.fr */
|
||||||
|
/* */
|
||||||
|
/* ************************************************************************** */
|
||||||
|
|
||||||
|
#include "parser/passes.h"
|
||||||
|
#include "me/types.h"
|
||||||
|
#include "me/vec/vec_token.h"
|
||||||
|
#include "parser/token.h"
|
||||||
|
|
||||||
|
bool _is_token_carret(enum e_token ttype)
|
||||||
|
{
|
||||||
|
return (ttype == TOK_LCARRET || ttype == TOK_DLCARRET || ttype == TOK_RCARRET || ttype == TOK_DRCARRET);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This is a sample pass
|
||||||
|
///
|
||||||
|
/// There is a few rules the rest of the tokenizer machinery assumes
|
||||||
|
/// theses function follows:
|
||||||
|
/// - the input vec WILL be freed when the function return, even in
|
||||||
|
/// case of error
|
||||||
|
/// - the output vector isn't populated if the function returns an error,
|
||||||
|
/// thus it shouldn't be freed in case of error
|
||||||
|
/// - the output tokens may not be direct copy of the input tokens,
|
||||||
|
/// but need to be cloned (different allocations for stuff)
|
||||||
|
/// This function will take any ``
|
||||||
|
t_error ts_fold_redir(t_vec_token input, t_vec_token *output)
|
||||||
|
{
|
||||||
|
t_vec_token out;
|
||||||
|
t_usize i;
|
||||||
|
t_token tmp;
|
||||||
|
|
||||||
|
i = 0;
|
||||||
|
out = vec_token_new(input.len, token_free);
|
||||||
|
while (i < input.len)
|
||||||
|
{
|
||||||
|
if (vec_token_get(&input, i + 1) != NULL && _is_token_carret(vec_token_get(&input, i)->type) && vec_token_get(&input, i + 1)->type == TOK_WORD)
|
||||||
|
{
|
||||||
|
tmp = token_new_meta(TOK_REDIR);
|
||||||
|
vec_token_push(&tmp.subtokens, token_clone(vec_token_get(&input, i++)));
|
||||||
|
vec_token_push(&tmp.subtokens, token_clone(vec_token_get(&input, i++)));
|
||||||
|
vec_token_push(&out, tmp);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
vec_token_push(&out, token_clone(&input.buffer[i++]));
|
||||||
|
}
|
||||||
|
vec_token_free(input);
|
||||||
|
return (*output = out, NO_ERROR);
|
||||||
|
}
|
||||||
Loading…
Add table
Add a link
Reference in a new issue