update: moved yarn and fixed leak

This commit is contained in:
maix0 2024-10-08 13:44:54 +02:00
parent 9e6ae52a30
commit a9a4417919
6 changed files with 21 additions and 16 deletions

View file

@ -20,6 +20,7 @@ token_utils \
tokenizer \ tokenizer \
tokenizer_utils \ tokenizer_utils \
ts_print \ ts_print \
yarn/yarn \
GEN_FILES = \ GEN_FILES = \
\ \

View file

@ -6,7 +6,7 @@
/* By: maiboyer <maiboyer@student.42.fr> +#+ +:+ +#+ */ /* By: maiboyer <maiboyer@student.42.fr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */ /* +#+#+#+#+#+ +#+ */
/* Created: 2024/10/02 18:41:16 by maiboyer #+# #+# */ /* Created: 2024/10/02 18:41:16 by maiboyer #+# #+# */
/* Updated: 2024/10/07 18:15:14 by rparodi ### ########.fr */ /* Updated: 2024/10/08 13:40:48 by maiboyer ### ########.fr */
/* */ /* */
/* ************************************************************************** */ /* ************************************************************************** */
@ -15,6 +15,7 @@
#include "me/types.h" #include "me/types.h"
#include "me/vec/vec_token.h" #include "me/vec/vec_token.h"
#include "parser/token.h" #include "parser/token.h"
#include <stdio.h>
/// This is a list what of kind of passes we need to make on the tokenstream /// This is a list what of kind of passes we need to make on the tokenstream
/// they'll all have the same function signature, basically taking a token /// they'll all have the same function signature, basically taking a token
@ -71,7 +72,7 @@ t_error ts_apply_passes(t_vec_token ts, t_vec_token *out)
else else
me_printf("Applied '%s' pass\n", g_ts_passes[i].name); me_printf("Applied '%s' pass\n", g_ts_passes[i].name);
ts = next; ts = next;
ts_print(&ts); //ts_print(&ts);
i++; i++;
} }
return (*out = ts, NO_ERROR); return (*out = ts, NO_ERROR);
@ -103,7 +104,7 @@ t_error ts_dq_apply_passes(t_vec_token ts, t_vec_token *out)
else else
me_printf("Applied '%s' dq_pass\n", g_ts_dq_passes[i].name); me_printf("Applied '%s' dq_pass\n", g_ts_dq_passes[i].name);
ts = next; ts = next;
ts_print(&ts); //ts_print(&ts);
i++; i++;
} }
return (*out = ts, NO_ERROR); return (*out = ts, NO_ERROR);

View file

@ -6,7 +6,7 @@
/* By: maiboyer <maiboyer@student.42.fr> +#+ +:+ +#+ */ /* By: maiboyer <maiboyer@student.42.fr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */ /* +#+#+#+#+#+ +#+ */
/* Created: 2024/10/02 19:04:32 by maiboyer #+# #+# */ /* Created: 2024/10/02 19:04:32 by maiboyer #+# #+# */
/* Updated: 2024/10/07 16:45:41 by maiboyer ### ########.fr */ /* Updated: 2024/10/08 13:40:23 by maiboyer ### ########.fr */
/* */ /* */
/* ************************************************************************** */ /* ************************************************************************** */
@ -46,7 +46,7 @@ t_error ts_fold_cmd(t_vec_token input, t_vec_token *output)
if (_is_cmd_node(input.buffer[i].type)) if (_is_cmd_node(input.buffer[i].type))
{ {
j = 0; j = 0;
tmp = token_new(TOK_CMD); tmp = token_new_meta(TOK_CMD);
while (i + j < input.len \ while (i + j < input.len \
&& _is_cmd_node(input.buffer[i + j].type)) && _is_cmd_node(input.buffer[i + j].type))
vec_token_push(&tmp.subtokens, token_clone(&input.buffer[i + j++])); vec_token_push(&tmp.subtokens, token_clone(&input.buffer[i + j++]));

View file

@ -6,34 +6,37 @@
/* By: maiboyer <maiboyer@student.42.fr> +#+ +:+ +#+ */ /* By: maiboyer <maiboyer@student.42.fr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */ /* +#+#+#+#+#+ +#+ */
/* Created: 2024/10/06 13:33:12 by maiboyer #+# #+# */ /* Created: 2024/10/06 13:33:12 by maiboyer #+# #+# */
/* Updated: 2024/10/06 13:33:48 by maiboyer ### ########.fr */ /* Updated: 2024/10/08 13:40:57 by maiboyer ### ########.fr */
/* */ /* */
/* ************************************************************************** */ /* ************************************************************************** */
#include "me/types.h" #include "me/types.h"
#include "parser/token.h" #include "parser/token.h"
#include <stdio.h>
t_str token_name(t_token *token);
t_token token_clone(t_token *tok) t_token token_clone(t_token *tok)
{ {
t_token out; t_token ret;
t_usize i; t_usize i;
out = token_new_none(); ret = token_new_none();
out.type = tok->type; ret.type = tok->type;
if (tok->string.buf != NULL) if (tok->string.buf != NULL)
{ {
out.string = string_new(tok->string.capacity); ret.string = string_new(tok->string.capacity);
string_push(&out.string, tok->string.buf); string_push(&ret.string, tok->string.buf);
} }
if (tok->subtokens.buffer != NULL) if (tok->subtokens.buffer != NULL)
{ {
out.subtokens = vec_token_new(tok->subtokens.capacity, token_free); ret.subtokens = vec_token_new(tok->subtokens.capacity, token_free);
i = 0; i = 0;
while (i < tok->subtokens.len) while (i < tok->subtokens.len)
vec_token_push(&out.subtokens, \ vec_token_push(&ret.subtokens, token_clone(&tok->subtokens.buffer[i++]));
token_clone(&tok->subtokens.buffer[i++]));
} }
return (out); return (ret);
} }
bool token_is_noquote(enum e_token ttype) bool token_is_noquote(enum e_token ttype)

View file

@ -6,7 +6,7 @@
/* By: rparodi <rparodi@student.42.fr> +#+ +:+ +#+ */ /* By: rparodi <rparodi@student.42.fr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */ /* +#+#+#+#+#+ +#+ */
/* Created: 2024/03/28 14:40:38 by rparodi #+# #+# */ /* Created: 2024/03/28 14:40:38 by rparodi #+# #+# */
/* Updated: 2024/10/05 18:54:45 by maiboyer ### ########.fr */ /* Updated: 2024/10/08 13:29:30 by maiboyer ### ########.fr */
/* */ /* */
/* ************************************************************************** */ /* ************************************************************************** */