update: exec works !

This commit is contained in:
maix0 2024-10-10 17:41:23 +02:00
parent 2363fadd02
commit 77e7f65b41
24 changed files with 192 additions and 499 deletions

View file

@ -6,7 +6,7 @@
# By: rparodi <rparodi@student.42.fr> +#+ +:+ +#+ #
# +#+#+#+#+#+ +#+ #
# Created: 2023/11/12 11:05:05 by rparodi #+# #+# #
# Updated: 2024/10/07 12:09:42 by maiboyer ### ########.fr #
# Updated: 2024/10/10 17:25:22 by maiboyer ### ########.fr #
# #
# **************************************************************************** #
@ -51,7 +51,7 @@ endif
# CFLAGS_ADDITIONAL += -O0
# CFLAGS_ADDITIONAL += -Wno-cpp -Wno-type-limits -Wno-unused-command-line-argument
CFLAGS_ADDITIONAL += -gcolumn-info -g3 -fno-builtin
# CFLAGS_ADDITIONAL += '-DERROR=((void)printf("ERROR HERE: " __FILE__ ":%d in %s\n", __LINE__, __func__), 1)'
CFLAGS_ADDITIONAL += '-DERROR=((void)printf("ERROR HERE: " __FILE__ ":%d in %s\n", __LINE__, __func__), 1)'
CFLAGS_ADDITIONAL += -O2
# CFLAGS_ADDITIONAL += -fuse-ld=gold -Wl,--print-symbol-counts -Wl,/tmp/symbols_count.log
# CFLAGS_ADDITIONAL += -fuse-ld=lld -ffunction-sections -fdata-sections -Wl,--gc-sections -Wl,-O3

View file

@ -6,7 +6,7 @@
/* By: maiboyer <maiboyer@student.42.fr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2024/07/03 20:38:29 by maiboyer #+# #+# */
/* Updated: 2024/08/12 16:34:50 by maiboyer ### ########.fr */
/* Updated: 2024/10/10 17:17:58 by maiboyer ### ########.fr */
/* */
/* ************************************************************************** */
@ -45,14 +45,20 @@ static inline void ast_print_block1(t_ast_node self);
static inline void ast_print_block2(t_ast_node self);
static inline void ast_print_block3(t_ast_node self);
static inline void ast_print_notdone(t_ast_node self)
{
printf(" <ast_print_notdone> ");
(void)(self);
}
static inline void ast_print_block1(t_ast_node self)
{
if (self->kind == AST_ARITHMETIC_EXPANSION)
return (ast_print_node_arithmetic_expansion(self));
if (self->kind == AST_CASE)
return (ast_print_node_case(self));
return (ast_print_notdone(self));
if (self->kind == AST_CASE_ITEM)
return (ast_print_node_case_item(self));
return (ast_print_notdone(self));
if (self->kind == AST_COMMAND)
return (ast_print_node_command(self));
if (self->kind == AST_COMMAND_SUBSTITUTION)
@ -60,9 +66,9 @@ static inline void ast_print_block1(t_ast_node self)
if (self->kind == AST_COMPOUND_STATEMENT)
return (ast_print_node_compound_statement(self));
if (self->kind == AST_ELIF)
return (ast_print_node_elif(self));
return (ast_print_notdone(self));
if (self->kind == AST_ELSE)
return (ast_print_node_else(self));
return (ast_print_notdone(self));
if (self->kind == AST_EMPTY)
return ;
ast_print_block2(self);
@ -77,13 +83,13 @@ static inline void ast_print_block2(t_ast_node self)
if (self->kind == AST_FILE_REDIRECTION)
return (ast_print_node_file_redirection(self));
if (self->kind == AST_FOR)
return (ast_print_node_for(self));
return (ast_print_notdone(self));
if (self->kind == AST_FUNCTION_DEFINITION)
return (ast_print_node_function_definition(self));
if (self->kind == AST_HEREDOC_REDIRECTION)
return (ast_print_node_heredoc_redirection(self));
return (ast_print_notdone(self));
if (self->kind == AST_IF)
return (ast_print_node_if(self));
return (ast_print_notdone(self));
if (self->kind == AST_LIST)
return (ast_print_node_list(self));
ast_print_block3(self);
@ -102,11 +108,11 @@ static inline void ast_print_block3(t_ast_node self)
if (self->kind == AST_SUBSHELL)
return (ast_print_node_subshell(self));
if (self->kind == AST_UNTIL)
return (ast_print_node_until(self));
return (ast_print_notdone(self));
if (self->kind == AST_VARIABLE_ASSIGNMENT)
return (ast_print_node_variable_assignment(self));
if (self->kind == AST_WHILE)
return (ast_print_node_while(self));
return (ast_print_notdone(self));
if (self->kind == AST_WORD)
return (ast_print_node_word(self));
printf("Unknown ast->kind: %#04x\n", self->kind);

View file

@ -1,12 +1,12 @@
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* print_arithmetic.c :+: :+: :+: */
/* ast_print_arithmetic.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: rparodi <rparodi@student.42.fr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2024/07/26 13:05:36 by rparodi #+# #+# */
/* Updated: 2024/07/26 13:08:22 by rparodi ### ########.fr */
/* Updated: 2024/10/10 16:57:18 by maiboyer ### ########.fr */
/* */
/* ************************************************************************** */
@ -19,7 +19,7 @@ void ast_print_node_arithmetic_expansion(t_ast_node self)
if (self->kind != AST_ARITHMETIC_EXPANSION)
return ;
printf("$((");
ast_print_node(self->data.arithmetic_expansion.expr);
ast_print(self->data.arithmetic_expansion.expr);
printf("))");
}
@ -35,7 +35,7 @@ void ast_print_node_function_definition(t_ast_node self)
i = 0;
while (i < self->data.function_definition.body.len)
{
ast_print_node(self->data.function_definition.body.buffer[i++]);
ast_print(self->data.function_definition.body.buffer[i++]);
printf(" ");
}
}

View file

@ -6,7 +6,7 @@
/* By: maiboyer <maiboyer@student.42.fr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2024/07/03 20:38:29 by maiboyer #+# #+# */
/* Updated: 2024/08/12 16:33:47 by maiboyer ### ########.fr */
/* Updated: 2024/10/10 16:56:59 by maiboyer ### ########.fr */
/* */
/* ************************************************************************** */
@ -24,7 +24,7 @@ void ast_print_node_command_substitution(t_ast_node self)
i = 0;
while (i < self->data.command_substitution.body.len)
{
ast_print_node(self->data.command_substitution.body.buffer[i++]);
ast_print(self->data.command_substitution.body.buffer[i++]);
}
printf(")");
}
@ -36,13 +36,13 @@ void ast_print_node_command_helper(t_ast_node self)
i = 0;
while (i < self->data.command.cmd_word.len)
{
ast_print_node(self->data.command.cmd_word.buffer[i++]);
ast_print(self->data.command.cmd_word.buffer[i++]);
printf(" ");
}
i = 0;
while (i < self->data.command.suffixes_redirections.len)
{
ast_print_node(self->data.command.suffixes_redirections.buffer[i++]);
ast_print(self->data.command.suffixes_redirections.buffer[i++]);
printf(" ");
}
_print_term(self->data.command.term);
@ -61,7 +61,7 @@ void ast_print_node_command(t_ast_node self)
i = 0;
while (i < self->data.command.prefixes.len)
{
ast_print_node(self->data.command.prefixes.buffer[i++]);
ast_print(self->data.command.prefixes.buffer[i++]);
printf(" ");
}
return (ast_print_node_command_helper(self));
@ -104,6 +104,6 @@ void ast_print_node_expansion(t_ast_node self)
ast_print_node_expansion_choose_op(self);
i = 0;
while (i < self->data.expansion.args.len)
ast_print_node(self->data.expansion.args.buffer[i++]);
ast_print(self->data.expansion.args.buffer[i++]);
printf("}");
}

View file

@ -6,7 +6,7 @@
/* By: rparodi <rparodi@student.42.fr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2024/07/26 13:07:12 by rparodi #+# #+# */
/* Updated: 2024/07/26 13:10:06 by rparodi ### ########.fr */
/* Updated: 2024/10/10 17:01:14 by maiboyer ### ########.fr */
/* */
/* ************************************************************************** */
@ -22,7 +22,7 @@ void ast_print_node_variable_assignment(t_ast_node self)
printf("! ");
printf("%s=", self->data.variable_assignment.name);
if (self->data.variable_assignment.value != NULL)
ast_print_node(self->data.variable_assignment.value);
ast_print(self->data.variable_assignment.value);
printf(" ");
}
@ -39,18 +39,18 @@ void ast_print_node_pipeline(t_ast_node self)
if (self->data.pipeline.statements.len != 0)
{
i = 0;
ast_print_node(self->data.pipeline.statements.buffer[i++]);
ast_print(self->data.pipeline.statements.buffer[i++]);
while (i < self->data.pipeline.statements.len)
{
printf(" | ");
ast_print_node(self->data.pipeline.statements.buffer[i++]);
ast_print(self->data.pipeline.statements.buffer[i++]);
}
}
i = 0;
while (i < self->data.pipeline.suffixes_redirections.len)
{
printf(" ");
ast_print_node(self->data.pipeline.suffixes_redirections.buffer[i++]);
ast_print(self->data.pipeline.suffixes_redirections.buffer[i++]);
}
_print_term(self->data.pipeline.term);
}
@ -63,16 +63,16 @@ void ast_print_node_list(t_ast_node self)
return ;
if (self->kind != AST_LIST)
return ;
ast_print_node(self->data.list.left);
ast_print(self->data.list.left);
if (self->data.list.op == AST_LIST_OR)
printf(" || ");
if (self->data.list.op == AST_LIST_AND)
printf(" && ");
ast_print_node(self->data.list.right);
ast_print(self->data.list.right);
i = 0;
while (i < self->data.list.suffixes_redirections.len)
{
ast_print_node(self->data.list.suffixes_redirections.buffer[i++]);
ast_print(self->data.list.suffixes_redirections.buffer[i++]);
printf(" ");
}
_print_term(self->data.list.term);

View file

@ -6,7 +6,7 @@
/* By: rparodi <rparodi@student.42.fr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2024/07/26 13:07:12 by rparodi #+# #+# */
/* Updated: 2024/07/26 13:25:29 by rparodi ### ########.fr */
/* Updated: 2024/10/10 16:57:52 by maiboyer ### ########.fr */
/* */
/* ************************************************************************** */
@ -30,7 +30,7 @@ void ast_print_node_file_redirection(t_ast_node self)
if (self->kind != AST_FILE_REDIRECTION)
return ;
if (self->data.file_redirection.input != NULL)
ast_print_node(self->data.file_redirection.input);
ast_print(self->data.file_redirection.input);
if (self->data.file_redirection.op == AST_REDIR_INPUT)
printf("<");
if (self->data.file_redirection.op == AST_REDIR_OUTPUT)
@ -46,6 +46,6 @@ void ast_print_node_file_redirection(t_ast_node self)
if (self->data.file_redirection.op == AST_REDIR_OUTPUT_CLOBBER)
printf(">|");
if (self->data.file_redirection.output != NULL)
ast_print_node(self->data.file_redirection.output);
ast_print(self->data.file_redirection.output);
return ((void) ast_print_node_file_redirection_heredoc(self));
}

View file

@ -6,7 +6,7 @@
/* By: rparodi <rparodi@student.42.fr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2024/07/26 13:27:30 by rparodi #+# #+# */
/* Updated: 2024/07/27 13:50:17 by maiboyer ### ########.fr */
/* Updated: 2024/10/10 16:58:13 by maiboyer ### ########.fr */
/* */
/* ************************************************************************** */
@ -26,7 +26,7 @@ void ast_print_node_subshell(t_ast_node self)
printf("( ");
while (i < self->data.subshell.body.len)
{
ast_print_node(self->data.subshell.body.buffer[i++]);
ast_print(self->data.subshell.body.buffer[i++]);
printf(" ");
}
printf(")");
@ -34,7 +34,7 @@ void ast_print_node_subshell(t_ast_node self)
while (i < self->data.subshell.suffixes_redirections.len)
{
printf(" ");
ast_print_node(self->data.subshell.suffixes_redirections.buffer[i++]);
ast_print(self->data.subshell.suffixes_redirections.buffer[i++]);
}
printf(" ");
_print_term(self->data.subshell.term);
@ -51,7 +51,7 @@ void ast_print_node_program(t_ast_node self)
i = 0;
while (i < self->data.program.body.len)
{
ast_print_node(self->data.program.body.buffer[i++]);
ast_print(self->data.program.body.buffer[i++]);
printf(" ");
}
}
@ -70,7 +70,7 @@ void ast_print_node_compound_statement(t_ast_node self)
printf("{ ");
while (i < self->data.compound_statement.body.len)
{
ast_print_node(self->data.compound_statement.body.buffer[i++]);
ast_print(self->data.compound_statement.body.buffer[i++]);
printf(" ");
}
printf("}");
@ -78,7 +78,7 @@ void ast_print_node_compound_statement(t_ast_node self)
while (i < self->data.compound_statement.suffixes_redirections.len)
{
printf(" ");
ast_print_node \
ast_print \
(self->data.compound_statement.suffixes_redirections.buffer[i++]);
}
printf(" ");
@ -102,6 +102,6 @@ void ast_print_node_word(t_ast_node self)
i = 0;
printf("%s", quote_type);
while (i < self->data.word.inner.len)
ast_print_node(self->data.word.inner.buffer[i++]);
ast_print(self->data.word.inner.buffer[i++]);
printf("%s", quote_type);
}

View file

@ -6,7 +6,7 @@
/* By: maiboyer <maiboyer@student.42.fr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2024/09/14 12:24:49 by maiboyer #+# #+# */
/* Updated: 2024/09/18 21:07:23 by maiboyer ### ########.fr */
/* Updated: 2024/10/10 16:25:43 by maiboyer ### ########.fr */
/* */
/* ************************************************************************** */
@ -110,6 +110,7 @@ t_error _handle_builtin(t_spawn_info info, t_state *state, t_cmd_pipe cmd_pipe,
t_builtin_func actual_func;
t_builtin_spawn_info binfo;
mem_set_zero(&binfo, sizeof(binfo));
binfo.state = state;
mem_set_zero(out, sizeof(*out));
if (_find_builtin(&info, &actual_func))

View file

@ -6,11 +6,12 @@
/* By: maiboyer <maiboyer@student.42.fr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2024/09/14 12:35:02 by maiboyer #+# #+# */
/* Updated: 2024/10/06 14:21:41 by maiboyer ### ########.fr */
/* Updated: 2024/10/10 16:31:30 by maiboyer ### ########.fr */
/* */
/* ************************************************************************** */
#include "exec/_run_ast.h"
#include "me/mem/mem.h"
#include "me/os/os.h"
#include <sys/wait.h>
@ -92,6 +93,7 @@ t_error run_subshell(t_ast_subshell *subshell, t_state *state,
if (subshell == NULL || state == NULL || out == NULL)
return (ERROR);
mem_set_zero(&sinfo, sizeof(sinfo));
if (_setup_redirection(&info, state, cmd_pipe, \
&subshell->suffixes_redirections))
return (ERROR);

View file

@ -6,7 +6,7 @@
/* By: maiboyer <maiboyer@student.42.fr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2024/10/02 18:41:16 by maiboyer #+# #+# */
/* Updated: 2024/10/10 15:27:12 by maiboyer ### ########.fr */
/* Updated: 2024/10/10 17:25:40 by maiboyer ### ########.fr */
/* */
/* ************************************************************************** */
@ -70,7 +70,6 @@ t_error ts_apply_passes(t_vec_token ts, t_vec_token *out)
else
me_printf("Applied '%s' pass\n", g_ts_passes[i].name);
ts = next;
ts_print(&ts);
i++;
}
return (*out = ts, NO_ERROR);
@ -98,7 +97,6 @@ t_error ts_dq_apply_passes(t_vec_token ts, t_vec_token *out)
else
me_printf("Applied '%s' dq_pass\n", g_ts_dq_passes[i].name);
ts = next;
ts_print(&ts);
i++;
}
return (*out = ts, NO_ERROR);

View file

@ -6,7 +6,7 @@
/* By: maiboyer <maiboyer@student.42.fr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2024/10/09 12:44:53 by maiboyer #+# #+# */
/* Updated: 2024/10/10 15:16:38 by maiboyer ### ########.fr */
/* Updated: 2024/10/10 17:19:30 by maiboyer ### ########.fr */
/* */
/* ************************************************************************** */
@ -47,7 +47,7 @@ static t_error _create_ast_redir(enum e_token ty, t_ast_node *out)
return (*out = ret, NO_ERROR);
}
t_const_str _token_to_string(t_token *arg)
t_const_str _token_to_string(t_token *arg, bool dollar_exp)
{
t_usize i;
t_string s;
@ -58,7 +58,7 @@ t_const_str _token_to_string(t_token *arg)
s = string_new(16);
if (arg->string.buf != NULL)
{
if (arg->type == TOK_EXPENSION)
if (dollar_exp && arg->type == TOK_EXPENSION)
string_push_char(&s, '$');
string_push(&s, arg->string.buf);
}
@ -67,7 +67,7 @@ t_const_str _token_to_string(t_token *arg)
i = 0;
while (i < arg->subtokens.len)
{
tmp = _token_to_string(&arg->subtokens.buffer[i++]);
tmp = _token_to_string(&arg->subtokens.buffer[i++], false);
string_push(&s, tmp);
str_free((t_str)tmp);
}
@ -80,7 +80,7 @@ static t_error _ast_set_redir_arg(t_ast_node node, t_token *arg)
if (node == NULL || arg == NULL || (node->kind != AST_HEREDOC_REDIRECTION && node->kind != AST_FILE_REDIRECTION))
return (ERROR);
if (node->kind == AST_HEREDOC_REDIRECTION)
node->data.heredoc_redirection.delimiter = (t_str)_token_to_string(arg);
node->data.heredoc_redirection.delimiter = (t_str)_token_to_string(arg, true);
else if (handle_tok_word_inner(arg, &node->data.file_redirection.output))
return (ERROR);
return (NO_ERROR);
@ -104,7 +104,7 @@ t_error _tok_word_expansion(t_token *tok, t_ast_node *out)
t_ast_node ret;
ret = ast_alloc(AST_EXPANSION);
ret->data.expansion.var_name = (t_str)_token_to_string(tok);
ret->data.expansion.var_name = (t_str)_token_to_string(tok, false);
return (*out = ret, NO_ERROR);
}
t_error _tok_word_nquote(t_token *tok, t_ast_node *out)
@ -115,7 +115,7 @@ t_error _tok_word_nquote(t_token *tok, t_ast_node *out)
ret = ast_alloc(AST_WORD);
ret->data.word.kind = AST_WORD_NO_QUOTE;
tmp = ast_alloc(AST_RAW_STRING);
tmp->data.raw_string.str = (t_str)_token_to_string(tok);
tmp->data.raw_string.str = (t_str)_token_to_string(tok, false);
vec_ast_push(&ret->data.word.inner, tmp);
return (*out = ret, NO_ERROR);
}
@ -127,7 +127,7 @@ t_error _tok_word_squote(t_token *tok, t_ast_node *out)
ret = ast_alloc(AST_WORD);
ret->data.word.kind = AST_WORD_SINGLE_QUOTE;
tmp = ast_alloc(AST_RAW_STRING);
tmp->data.raw_string.str = (t_str)_token_to_string(tok);
tmp->data.raw_string.str = (t_str)_token_to_string(tok, false);
vec_ast_push(&ret->data.word.inner, tmp);
return (*out = ret, NO_ERROR);
}
@ -142,7 +142,7 @@ t_error _tok_word_dquote(t_token *tok, t_ast_node *out)
i = 0;
while (i < tok->subtokens.len)
{
if (_tok_word(&tok->subtokens.buffer[i], &tmp))
if (_tok_word(&tok->subtokens.buffer[i++], &tmp))
return (ast_free(ret), ERROR);
vec_ast_push(&ret->data.word.inner, tmp);
}
@ -221,13 +221,13 @@ t_error handle_tok_redir(t_ast_node cmd, t_token *tok)
/// les noms peuvent etre different idk
/// a terme la fonction utilisera t_error et tt;
/// struct s_ast_command `ast/include/ast/_raw_structs.h`
t_ast_node ast_from_cmd(t_token tok)
t_error ast_from_cmd(t_token tok, t_vec_ast *output_queue)
{
t_ast_node ret;
t_usize i;
if (tok.type != TOK_CMD)
me_abort("tok.type != TOK_CMD");
return (ERROR);
ret = ast_alloc(AST_COMMAND);
i = 0;
while (i < tok.subtokens.len)
@ -235,15 +235,17 @@ t_ast_node ast_from_cmd(t_token tok)
if (tok.subtokens.buffer[i].type == TOK_REDIR)
{
if (handle_tok_redir(ret, &tok.subtokens.buffer[i]))
me_abort("handle_tok_redir error");
return (ast_free(ret), ERROR);
}
else if (tok.subtokens.buffer[i].type == TOK_WORD)
{
if (handle_tok_word(ret, &tok.subtokens.buffer[i]))
me_abort("handle_tok_word error");
return (ast_free(ret), ERROR);
}
else
me_abort("handle_tok_cmd not word|redir");
return (ast_free(ret), ERROR);
i++;
}
return (ret);
token_free(tok);
return (vec_ast_push(output_queue, ret), NO_ERROR);
}

View file

@ -6,7 +6,7 @@
/* By: maiboyer <maiboyer@student.42.fr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2024/10/09 12:44:53 by maiboyer #+# #+# */
/* Updated: 2024/10/10 16:22:39 by maiboyer ### ########.fr */
/* Updated: 2024/10/10 17:26:38 by maiboyer ### ########.fr */
/* */
/* ************************************************************************** */
@ -16,6 +16,40 @@
#include "me/vec/vec_token.h"
#include "parser/token.h"
static enum e_ast_list_kind _ast_list_get_op(enum e_token ty)
{
if (ty == TOK_AND)
return (AST_LIST_AND);
else if (ty == TOK_OR)
return (AST_LIST_OR);
me_abort("invalid token type for ast_list operator");
return (-1);
}
static t_error _tok_pipeline(t_vec_ast *output_queue, t_ast_node rhs, t_ast_node lhs)
{
t_ast_node ret;
if (rhs->kind == AST_PIPELINE)
{
vec_ast_push_front(&rhs->data.pipeline.statements, lhs);
vec_ast_push(output_queue, rhs);
}
else if (lhs->kind == AST_PIPELINE)
{
vec_ast_push(&lhs->data.pipeline.statements, rhs);
vec_ast_push(output_queue, lhs);
}
else
{
ret = ast_alloc(AST_PIPELINE);
vec_ast_push(&ret->data.pipeline.statements, lhs);
vec_ast_push(&ret->data.pipeline.statements, rhs);
vec_ast_push(output_queue, ret);
}
return (NO_ERROR);
}
/// en fonction de op, qui peut etre: TOK_AND TOK_PIPE TOK_OR
/// choisir le bon ast_node a faire (t_ast_node->data.list + set operator ou t_asdt_node->data.pipeline)
/// pop deux element de output_queue. pour l'instant la fonction doit print une error si il n'y as pas asser d'element
@ -30,39 +64,28 @@
///
/// in the end we should change to using `t_error` and pushing the ast_node directly to output_queue in the function,
/// will change that later tho :)
t_ast_node ast_from_op(t_token tok, t_vec_ast *output_queue)
t_error ast_from_op(t_token tok, t_vec_ast *output_queue)
{
t_ast_node ret;
t_ast_node tmp;
t_ast_node lhs;
t_ast_node rhs;
// this needs have a protection in case output_queue is smaller than 2 elements
// otherwise it is good :)
// you could also make it so TOK_AND and TOK_OR share the same code to win some lines
ret = NULL;
if (tok.type == TOK_AND)
if (!(tok.type == TOK_AND || tok.type == TOK_OR || tok.type == TOK_PIPE))
return (ERROR);
if (output_queue == NULL || output_queue->len < 2)
return (ERROR);
vec_ast_pop(output_queue, &rhs);
vec_ast_pop(output_queue, &lhs);
if (tok.type == TOK_AND || tok.type == TOK_OR)
{
ret = ast_alloc(AST_LIST);
ret->data.list.op = AST_LIST_AND;
vec_ast_pop(output_queue, &ret->data.list.right);
vec_ast_pop(output_queue, &ret->data.list.left);
ret->data.list.op = _ast_list_get_op(tok.type);
ret->data.list.left = lhs;
ret->data.list.right = rhs;
vec_ast_push(output_queue, ret);
}
else if (tok.type == TOK_OR)
{
ret = ast_alloc(AST_LIST);
ret->data.list.op = AST_LIST_OR;
vec_ast_pop(output_queue, &ret->data.list.right);
vec_ast_pop(output_queue, &ret->data.list.left);
}
else if (tok.type == TOK_PIPE)
{
// Here there is some kind of optimization that could be done in the future: if one node is already a AST_PIPELINE, just pus the other node into the right place in it and return the non created AST_PIPELINE node !
ret = ast_alloc(AST_PIPELINE);
vec_ast_pop(output_queue, &tmp);
vec_ast_push(&ret->data.pipeline.statements, tmp);
vec_ast_pop(output_queue, &tmp);
vec_ast_push(&ret->data.pipeline.statements, tmp);
}
else
me_abort("ast_from_op not the good token type gived !\n");
return (ret);
else if (tok.type == TOK_PIPE && _tok_pipeline(output_queue, rhs, lhs))
return (ERROR);
token_free(tok);
return (NO_ERROR);
}

View file

@ -6,7 +6,7 @@
/* By: rparodi <rparodi@student.42.fr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2024/10/07 18:04:13 by rparodi #+# #+# */
/* Updated: 2024/10/09 12:44:24 by maiboyer ### ########.fr */
/* Updated: 2024/10/10 17:28:21 by maiboyer ### ########.fr */
/* */
/* ************************************************************************** */
@ -37,7 +37,7 @@ t_str token_name(t_token *token);
/// les noms peuvent etre different idk
/// a terme la fonction utilisera t_error et tt;
/// struct s_ast_command `ast/include/ast/_raw_structs.h`
t_ast_node ast_from_cmd(t_token tok);
t_error ast_from_cmd(t_token tok, t_vec_ast *output_queue);
/// en fonction de op, qui peut etre: TOK_AND TOK_PIPE TOK_OR
/// choisir le bon ast_node a faire (t_ast_node->data.list + set operator ou t_asdt_node->data.pipeline)
@ -49,7 +49,7 @@ t_ast_node ast_from_cmd(t_token tok);
/// struct s_ast_list if (tok.type == TOK_AND || tok.type == TOK_OR)
/// struct s_ast_pipeline if (tok.type == TOK_PIPE)
/// `ast/include/ast/_raw_structs.h`
t_ast_node ast_from_op(t_token tok, t_vec_ast *output_queue);
t_error ast_from_op(t_token tok, t_vec_ast *output_queue);
t_error yarn(t_vec_token ts, t_vec_ast *out)
{
@ -63,7 +63,10 @@ t_error yarn(t_vec_token ts, t_vec_ast *out)
while (!vec_token_pop_front(&ts, &tok))
{
if (tok.type == TOK_CMD)
vec_ast_push(&output_queue, ast_from_cmd(tok));
{
if (ast_from_cmd(tok, &output_queue))
return (vec_token_free(stack), vec_ast_free(output_queue), token_free(tok), ERROR);
}
else if (tok.type == TOK_LPAREN)
vec_token_push(&stack, tok);
else if (tok.type == TOK_OR || tok.type == TOK_AND || tok.type == TOK_PIPE)
@ -71,31 +74,38 @@ t_error yarn(t_vec_token ts, t_vec_ast *out)
while (vec_token_last(&stack) != NULL && vec_token_last(&stack)->type != TOK_LPAREN && _get_precedance(vec_token_last(&stack)) > _get_precedance(&tok))
{
vec_token_pop(&stack, &op);
vec_ast_push(&output_queue, ast_from_op(op, &output_queue));
if (ast_from_op(op, &output_queue))
return (vec_token_free(stack), vec_ast_free(output_queue), token_free(tok), token_free(op), ERROR);
}
vec_token_push(&stack, tok);
}
else if (tok.type == TOK_RPAREN)
{
token_free(tok);
// ici il faut modifier pour push dans un ast_node->data.subshell
// je m'occuperai de ca ce soir/after
while (vec_token_last(&stack) != NULL && vec_token_last(&stack)->type != TOK_LPAREN)
{
vec_token_pop(&stack, &op);
vec_ast_push(&output_queue, ast_from_op(op, &output_queue));
if (ast_from_op(op, &output_queue))
return (vec_token_free(stack), vec_ast_free(output_queue), token_free(op), ERROR);
}
if (!(vec_token_last(&stack) != NULL && vec_token_last(&stack)->type == TOK_LPAREN))
return (ERROR);
return (vec_token_free(stack), vec_ast_free(output_queue), ERROR);
vec_token_pop(&stack, &tok);
token_free(tok);
t_ast_node snode;
t_ast_node tmp;
snode = ast_alloc(AST_SUBSHELL);
vec_ast_pop(&output_queue, &tmp);
vec_ast_push(&snode->data.subshell.body, tmp);
vec_ast_push(&output_queue, snode);
}
}
while (!vec_token_pop(&stack, &op))
{
if (op.type == TOK_LPAREN)
return (token_free(tok), ERROR);
vec_ast_push(&output_queue, ast_from_op(op, &output_queue));
return (token_free(op), ERROR);
if (ast_from_op(op, &output_queue))
return (vec_token_free(stack), vec_ast_free(output_queue), token_free(op), ERROR);
}
vec_token_free(ts);
vec_token_free(stack);

View file

@ -1 +0,0 @@
__pycache__

View file

@ -1,12 +0,0 @@
import collapse
import concat
import str_to_token
import ttoken
s = input("> ")
print(s)
tokens = str_to_token.str_to_token(s)
concated_tokens = concat.concat(tokens)
collapsed_tokens = collapse.collapse(concated_tokens)
ttoken.print_tokenlist(collapsed_tokens)

View file

@ -1,38 +0,0 @@
from ttoken import *
TT = TokenType
# This function will transform some tokens into others depending on what follows them
def collapse(tokens: list[Token]):
i = 0
out = []
while i < len(tokens):
tok = tokens[i]
peek = tokens[i + 1] if i + 1 < len(tokens) else None
if peek is None:
out.append(tok)
i += 1
continue
if tok.ty == TT.PIPE and peek.ty == TT.PIPE:
out.append(Token(TT.OR, string="||"))
i += 2
elif tok.ty == TT.AMP and peek.ty == TT.AMP:
out.append(Token(TT.AND, string="&&"))
i += 2
elif tok.ty == TT.CARRET and tok.string == "<" and peek.ty == TT.CARRET and peek.string == "<":
out.append(Token(TT.DLCARRET, string="<<"))
i += 2
elif tok.ty == TT.CARRET and tok.string == ">" and peek.ty == TT.CARRET and peek.string == ">":
out.append(Token(TT.DRCARRET, string=">>"))
i += 2
elif tok.ty == TT.CARRET and tok.string == "<" :
out.append(Token(TT.LCARRET, string="<"))
i += 1
elif tok.ty == TT.CARRET and tok.string == ">" :
out.append(Token(TT.RCARRET, string=">"))
i += 1
else:
out.append(tok)
i += 1
return out

View file

@ -1,25 +0,0 @@
from ttoken import *
# This function will make a "big" token that will represent a word in the shell sense
def concat(tokens: list[Token]) -> list[Token]:
i = 0
out = []
while i < len(tokens):
tok = tokens[i]
# if the token is a token that can be inside a word, then we start createing a WORD "metaToken"
if tok.is_word():
word = Token(TokenType.WORD, subtokens=[])
word.subtokens.append(tok)
j = 1
# then we get every token after the first that is also a word and we push them
while i + j < len(tokens) and (tokens[i + j]).is_word():
word.subtokens.append(tokens[i + j])
j += 1
i += j
out.append(word)
else:
# otherwise we just push the token alone
out.append(tok)
i += 1
return out

View file

@ -1,60 +0,0 @@
{
"nodes": {
"flake-utils": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1726560853,
"narHash": "sha256-X6rJYSESBVr3hBoH0WbKE5KvhPU5bloyZ2L4K60/fPQ=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "c1dfcf08411b08f6b8615f7d8971a2bfa81d5e8a",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1726931411,
"narHash": "sha256-Oxfw+YhT/RDdOmzYbtrFSkU2SwdO7UfbjXWuU6Bo4+o=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "c0e65bb8293c21f3aa0fdc9fae8dcccec187c1cf",
"type": "github"
},
"original": {
"owner": "nixos",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

View file

@ -1,35 +0,0 @@
{
description = "Flake utils demo";
inputs.nixpkgs.url = "github:nixos/nixpkgs";
inputs.flake-utils.url = "github:numtide/flake-utils";
outputs = {
self,
nixpkgs,
flake-utils,
}:
flake-utils.lib.eachDefaultSystem (
system: let
pkgs = nixpkgs.legacyPackages.${system};
in {
devShell = pkgs.mkShell {
packages = with pkgs;
[
gnumake
llvmPackages_18.bintools
tokei
coreutils
python312
tree
]
++ (
if system == "x86_64-linux"
then [valgrind valgrind.dev]
else []
);
#ASAN_OPTIONS = "strict_string_checks=1:detect_stack_use_after_return=1:check_initialization_order=1:strict_init_order=1";
};
}
);
}

View file

@ -1,107 +0,0 @@
from ttoken import *
TT = TokenType
def is_quote(c: str) -> bool:
return c == "'" or c == '"'
# This function takes the string and seperate them into different tokens depending on the quotes
def str_to_token(s: str) -> list[Token]:
tokens = []
current_token = None
quote = 0
i = 0
while i < len(s):
c = s[i]
if quote == 0:
# if we have a quote, juste push the current token if any, then switch the the correct quote token
if is_quote(c):
if current_token != None:
tokens.append(current_token)
quote = c
current_token = Token(TT.DQUOTE if c == '"' else TT.SQUOTE, string="")
else:
# here we have no quote, so we first create a token if none exist, then handle special stuff
# like whitespace for example, or any character we want to spit in a single token of their own (; $ | &)
if current_token == None:
current_token = Token(TT.NQUOTE, string="")
if c.isspace():
# we have a whitespace, then create a whitespace token, and push the current token
# if it isn't empty and not whitesace
if (
len(current_token.string) != 0
and current_token.ty != TT.WHITESPACE
):
tokens.append(current_token)
current_token = Token(TT.WHITESPACE, string="")
i += 1;
continue;
else:
# we DON'T have a whitespace, then if the current token is a whitespace, just push it and set the new token to raw_string
if current_token.ty == TT.WHITESPACE:
tokens.append(current_token)
current_token = Token(TT.NQUOTE, string="")
if c == "$":
tokens.append(current_token)
current_token = None
tokens.append(Token(TT.DOLLAR, string="$"))
elif c == "(":
tokens.append(current_token)
current_token = None
tokens.append(Token(TT.LPAREN, string="("))
elif c == ")":
tokens.append(current_token)
current_token = None
tokens.append(Token(TT.RPAREN, string=")"))
elif c == "|":
tokens.append(current_token)
current_token = None
tokens.append(Token(TT.PIPE, string="|"))
elif c == "&":
tokens.append(current_token)
current_token = None
tokens.append(Token(TT.AMP, string="&"))
elif c == ";":
tokens.append(current_token)
current_token = None
tokens.append(Token(TT.SEMICOLON, string=";"))
elif c == ">" or c == "<":
tokens.append(current_token)
current_token = None
tokens.append(Token(TT.CARRET, string=c))
else:
current_token.append_char(c)
elif quote == "'":
# we are in a single quotem basically we push until we have another single quote
if c == "'":
tokens.append(current_token)
current_token = None
quote = 0
else:
if current_token == None:
current_token = Token(TT.SQUOTE, string="")
current_token.append_char(c)
elif quote == '"':
# we are in a double quotem basically we push until we have another double quote
if c == '"':
tokens.append(current_token)
current_token = None
quote = 0
else:
if current_token == None:
current_token = Token(TT.DQUOTE, string="")
current_token.append_char(c)
else:
print("you fucked up you quote thingy")
i += 1
# if the current token is not none and the current token is "no quote" then we push it
if current_token != None and current_token.ty == TT.NQUOTE:
tokens.append(current_token)
# cleanup the empty tokens that may be here
out = []
for tok in tokens:
if not (tok.ty == TT.NQUOTE and len(tok.string) == 0):
out.append(tok)
return out

View file

@ -1,63 +0,0 @@
from enum import Enum
from dataclasses import dataclass
TokenType = Enum(
"TokenType",
[
"AMP", # ampersand == &
"AND", # and == &&
"CARRET", # any carret == < > << >>
"DLCARRET", # double left carret == <<
"DOLLAR", # dollar == $
"DQUOTE", # double quote string
"DRCARRET", # double right carret == >>
"EXPENSION", # an expension == $<no_quote_word>
"LCARRET", # left carret == <
"LPAREN", # left parenthesis == (
"NQUOTE", # no quote string
"OR", # or == ||
"PIPE", # pipe == |
"RCARRET", # right carret == >
"RPAREN", # right parenthesis == )
"SEMICOLON", # semicolor == ;
"SQUOTE", # single quote string
"WHITESPACE", # whitespace outside of quoted strings
"WORD", # a meta token, which contains subtokens
],
)
@dataclass
class Token:
ty: TokenType
string: str = None
subtokens: list = None
def is_metatoken(self) -> bool:
return self.subtokens != None
def append_char(self, c: str):
if self.string is None:
raise Exception(
f"Tried to push a char on a token that contains subtokens, TT={self.ty}"
)
self.string += c
def is_word(self):
return (
self.ty == TokenType.SQUOTE
or self.ty == TokenType.DQUOTE
or self.ty == TokenType.NQUOTE
or self.ty == TokenType.DOLLAR
)
def print_tokenlist(tokens: list[Token], *, depth=0):
for tok in tokens:
if tok.is_metatoken():
print_tokenlist(tok.subtokens, depth=depth + 1)
else:
print(f"{'\t' * depth}{tok.ty.name:>10} => \x1b[31;40m{tok.string}\x1b[0m")
__all__ = ["TokenType", "Token", "print_tokenlist"]

View file

@ -6,7 +6,7 @@
/* By: rparodi <rparodi@student.42.fr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2024/09/06 16:31:41 by rparodi #+# #+# */
/* Updated: 2024/10/03 21:44:22 by maiboyer ### ########.fr */
/* Updated: 2024/10/10 17:39:40 by maiboyer ### ########.fr */
/* */
/* ************************************************************************** */
@ -54,12 +54,18 @@ t_error get_user_input(t_state *state)
void exec_shcat(t_state *state)
{
t_program_result prog_res;
t_ast_node prog;
prog_res = (t_program_result){.exit = 0};
if (state->ast != NULL && run_program(\
&state->ast->data.program, state, &prog_res))
if (state->ast->kind != AST_PROGRAM)
{
prog = ast_alloc(AST_PROGRAM);
vec_ast_push(&prog->data.program.body, state->ast);
state->ast = prog;
}
if (state->ast != NULL && run_program(&state->ast->data.program, state, &prog_res))
printf("Error when execting the Command \n");
// ast_free(state->ast);
ast_free(state->ast);
}
void ft_take_args(t_state *state)

View file

@ -6,7 +6,7 @@
/* By: rparodi <rparodi@student.42.fr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2024/03/28 14:40:38 by rparodi #+# #+# */
/* Updated: 2024/10/09 12:40:13 by maiboyer ### ########.fr */
/* Updated: 2024/10/10 17:37:45 by maiboyer ### ########.fr */
/* */
/* ************************************************************************** */
@ -42,8 +42,7 @@ void ft_take_args(t_state *state);
void ast_free(t_ast_node node);
t_error split_str_first(\
t_const_str s, char splitter, t_str *before, t_str *after)
t_error split_str_first(t_const_str s, char splitter, t_str *before, t_str *after)
{
t_usize i;
@ -82,40 +81,27 @@ t_error populate_env(t_hashmap_env *env, t_str envp[])
return (NO_ERROR);
}
/*
void print_node_data(t_node *t, t_usize depth)
{
t_usize idx;
idx = 0;
if (t->kind == 7)
return;
printf("\x1b[%im[%-6s](%lu)\x1b[0m", t->field_str == NULL ? \
90 : 32, t->field_str == NULL ? "nil" : t->field_str, t->field);
while (idx++ < depth + 1)
printf("\t");
idx = 0;
printf("%s(%lu) = %s\n", t->kind_str, t->kind, node_getstr(t));
while (idx < t->childs_count)
print_node_data(&t->childs[idx++], depth + 1);
}
*/
t_error yarn(t_vec_token ts, t_vec_token *output);
t_error yarn(t_vec_token ts, t_vec_ast *output);
void parse_str(t_state *state)
{
t_vec_token tokens;
t_vec_ast ast;
if (tokenize(state->str_input, &tokens))
return;
if (ts_apply_passes(tokens, &tokens))
return;
//if (yarn(tokens, &tokens))
// return ;
printf("\n\nEND TOKENS\n");
// TODO: remove
ts_print(&tokens);
vec_token_free(tokens);
if (yarn(tokens, &ast))
return ((void)printf("failed to ast build\n"));
if (ast.len != 1)
me_abort("Unhandled error: ast.len != 1");
vec_ast_pop(&ast, &state->ast);
ast_print(state->ast);
printf("\nast\n");
vec_ast_free(ast);
}
t_i32 main(t_i32 argc, t_str argv[], t_str envp[])

View file

@ -1,3 +1,3 @@
#!/usr/bin/env bash
make && valgrind --leak-check=full --show-leak-kinds=all --track-origins=yes --track-fds=yes --trace-children=yes --read-var-info=yes --read-inline-info=yes ./minishell <<<'cat </dev/null | (cat | cat "./.envrc") | banane | truc'
make bonus && valgrind --leak-check=full --show-leak-kinds=all --track-origins=yes --track-fds=yes --trace-children=yes --read-var-info=yes --read-inline-info=yes ./minishell_bonus <<<'cat </dev/null | cat | cat "./.envrc" | cat | cat'