update: made some small fixes

This commit is contained in:
maix0 2024-10-03 21:38:27 +02:00
parent 11898cba9b
commit 4f1a08cbe6
8 changed files with 47 additions and 17 deletions

View file

@ -6,7 +6,7 @@
/* By: maiboyer <maiboyer@student.42.fr> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2024/10/02 19:04:32 by maiboyer #+# #+# */
/* Updated: 2024/10/02 19:13:31 by maiboyer ### ########.fr */
/* Updated: 2024/10/03 21:37:04 by maiboyer ### ########.fr */
/* */
/* ************************************************************************** */
@ -15,11 +15,22 @@
#include "me/vec/vec_token.h"
#include "parser/token.h"
/// This is a sample pass
///
/// There is a few rules the rest of the tokenizer machinery assumes
/// theses function follows:
/// - the input vec WILL be freed when the function return, even in
/// case of error
/// - the output vector isn't populated if the function returns an error,
/// thus it shouldn't be freed in case of error
/// - the output tokens may not be direct copy of the input tokens,
/// but need to be cloned (different allocations for stuff)
t_error do_fuck_all(t_vec_token input, t_vec_token *output)
{
t_vec_token out;
t_usize i;
i = 0;
out = vec_token_new(input.len, token_free);
while (i < input.len)
{