Normed lexer
This commit is contained in:
parent
5ebadce4f8
commit
4a8fb259dc
7 changed files with 191 additions and 158 deletions
|
|
@ -6,7 +6,7 @@
|
|||
/* By: maiboyer <maiboyer@student.42.fr> +#+ +:+ +#+ */
|
||||
/* +#+#+#+#+#+ +#+ */
|
||||
/* Created: 2024/08/31 18:06:07 by maiboyer #+# #+# */
|
||||
/* Updated: 2024/08/31 18:06:39 by maiboyer ### ########.fr */
|
||||
/* Updated: 2024/08/31 18:23:07 by maiboyer ### ########.fr */
|
||||
/* */
|
||||
/* ************************************************************************** */
|
||||
|
||||
|
|
@ -23,9 +23,16 @@ void ts_lexer__mark_end(TSLexer *_self);
|
|||
void ts_lexer_advance_to_end(Lexer *self);
|
||||
void ts_lexer_goto(Lexer *self, Length position);
|
||||
|
||||
bool ts_lexer__do_advance_loop(Lexer *self, const TSRange **current_range);
|
||||
void ts_lexer__do_advance_after_loop(Lexer *self, bool skip,
|
||||
const TSRange *cur);
|
||||
|
||||
// Intended to be called only from functions that control logging.
|
||||
void ts_lexer__do_advance(Lexer *self, bool skip)
|
||||
{
|
||||
const TSRange *cur = \
|
||||
&self->included_ranges[self->current_included_range_index];
|
||||
|
||||
if (self->lookahead_size)
|
||||
{
|
||||
self->current_position.bytes += self->lookahead_size;
|
||||
|
|
@ -35,42 +42,58 @@ void ts_lexer__do_advance(Lexer *self, bool skip)
|
|||
self->current_position.extent.column = 0;
|
||||
}
|
||||
else
|
||||
{
|
||||
self->current_position.extent.column += self->lookahead_size;
|
||||
}
|
||||
}
|
||||
while (self->current_position.bytes >= cur->end_byte
|
||||
|| cur->end_byte == cur->start_byte)
|
||||
if (ts_lexer__do_advance_loop(self, &cur))
|
||||
break ;
|
||||
ts_lexer__do_advance_after_loop(self, skip, cur);
|
||||
}
|
||||
|
||||
const TSRange *current_range = &self->included_ranges[self->current_included_range_index];
|
||||
while (self->current_position.bytes >= current_range->end_byte || current_range->end_byte == current_range->start_byte)
|
||||
{
|
||||
// Advance to the next character in the source code, retrieving a new
|
||||
// chunk of source code if needed.
|
||||
void ts_lexer__advance(TSLexer *_self, bool skip)
|
||||
{
|
||||
Lexer *self;
|
||||
|
||||
self = (Lexer *)_self;
|
||||
if (!self->chunk)
|
||||
return ;
|
||||
ts_lexer__do_advance(self, skip);
|
||||
}
|
||||
|
||||
bool ts_lexer__do_advance_loop(Lexer *self, const TSRange **current_range)
|
||||
{
|
||||
if (self->current_included_range_index < self->included_range_count)
|
||||
{
|
||||
self->current_included_range_index++;
|
||||
}
|
||||
if (self->current_included_range_index < self->included_range_count)
|
||||
{
|
||||
current_range++;
|
||||
(*current_range)++;
|
||||
self->current_position = (Length){
|
||||
current_range->start_byte,
|
||||
current_range->start_point,
|
||||
(*current_range)->start_byte,
|
||||
(*current_range)->start_point,
|
||||
};
|
||||
}
|
||||
else
|
||||
{
|
||||
current_range = NULL;
|
||||
break;
|
||||
}
|
||||
(*current_range) = NULL;
|
||||
return (true);
|
||||
}
|
||||
return (false);
|
||||
}
|
||||
|
||||
void ts_lexer__do_advance_after_loop(Lexer *self, bool skip,
|
||||
const TSRange *cur)
|
||||
{
|
||||
if (skip)
|
||||
self->token_start_position = self->current_position;
|
||||
|
||||
if (current_range)
|
||||
{
|
||||
if (self->current_position.bytes < self->chunk_start || self->current_position.bytes >= self->chunk_start + self->chunk_size)
|
||||
if (cur)
|
||||
{
|
||||
if (self->current_position.bytes < self->chunk_start
|
||||
|| self->current_position.bytes >= self->chunk_start
|
||||
+ self->chunk_size)
|
||||
ts_lexer__get_chunk(self);
|
||||
}
|
||||
ts_lexer__get_lookahead(self);
|
||||
}
|
||||
else
|
||||
|
|
@ -80,13 +103,3 @@ void ts_lexer__do_advance(Lexer *self, bool skip)
|
|||
self->lookahead_size = 1;
|
||||
}
|
||||
}
|
||||
|
||||
// Advance to the next character in the source code, retrieving a new
|
||||
// chunk of source code if needed.
|
||||
void ts_lexer__advance(TSLexer *_self, bool skip)
|
||||
{
|
||||
Lexer *self = (Lexer *)_self;
|
||||
if (!self->chunk)
|
||||
return;
|
||||
ts_lexer__do_advance(self, skip);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -38,7 +38,9 @@ void ts_lexer__clear_chunk(Lexer *self)
|
|||
void ts_lexer__get_chunk(Lexer *self)
|
||||
{
|
||||
self->chunk_start = self->current_position.bytes;
|
||||
self->chunk = self->input.read(self->input.payload, self->current_position.bytes, self->current_position.extent, &self->chunk_size);
|
||||
self->chunk = self->input.read(self->input.payload,
|
||||
self->current_position.bytes, self->current_position.extent,
|
||||
&self->chunk_size);
|
||||
if (!self->chunk_size)
|
||||
{
|
||||
self->current_included_range_index = self->included_range_count;
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@
|
|||
/* By: maiboyer <maiboyer@student.42.fr> +#+ +:+ +#+ */
|
||||
/* +#+#+#+#+#+ +#+ */
|
||||
/* Created: 2024/08/31 18:07:07 by maiboyer #+# #+# */
|
||||
/* Updated: 2024/08/31 18:07:21 by maiboyer ### ########.fr */
|
||||
/* Updated: 2024/08/31 18:12:10 by maiboyer ### ########.fr */
|
||||
/* */
|
||||
/* ************************************************************************** */
|
||||
|
||||
|
|
@ -39,21 +39,24 @@ bool ts_lexer__eof(const TSLexer *_self)
|
|||
// times if a longer match is found later.
|
||||
void ts_lexer__mark_end(TSLexer *_self)
|
||||
{
|
||||
Lexer *self = (Lexer *)_self;
|
||||
Lexer *self;
|
||||
TSRange *current_included_range;
|
||||
TSRange *previous_included_range;
|
||||
|
||||
self = (Lexer *)_self;
|
||||
if (!ts_lexer__eof(&self->data))
|
||||
{
|
||||
// If the lexer is right at the beginning of included range,
|
||||
// then the token should be considered to end at the *end* of the
|
||||
// previous included range, rather than here.
|
||||
TSRange *current_included_range = &self->included_ranges[self->current_included_range_index];
|
||||
if (self->current_included_range_index > 0 && self->current_position.bytes == current_included_range->start_byte)
|
||||
current_included_range = \
|
||||
&self->included_ranges[self->current_included_range_index];
|
||||
if (self->current_included_range_index > 0 \
|
||||
&& self->current_position.bytes == current_included_range->start_byte)
|
||||
{
|
||||
TSRange *previous_included_range = current_included_range - 1;
|
||||
previous_included_range = current_included_range - 1;
|
||||
self->token_end_position = (Length){
|
||||
previous_included_range->end_byte,
|
||||
previous_included_range->end_point,
|
||||
};
|
||||
return;
|
||||
return ;
|
||||
}
|
||||
}
|
||||
self->token_end_position = self->current_position;
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@
|
|||
/* By: maiboyer <maiboyer@student.42.fr> +#+ +:+ +#+ */
|
||||
/* +#+#+#+#+#+ +#+ */
|
||||
/* Created: 2024/08/31 18:04:55 by maiboyer #+# #+# */
|
||||
/* Updated: 2024/08/31 18:05:47 by maiboyer ### ########.fr */
|
||||
/* Updated: 2024/08/31 18:18:31 by maiboyer ### ########.fr */
|
||||
/* */
|
||||
/* ************************************************************************** */
|
||||
|
||||
|
|
@ -26,20 +26,18 @@ void ts_lexer_goto(Lexer *self, Length position);
|
|||
|
||||
t_u32 ts_lexer__get_column(TSLexer *_self)
|
||||
{
|
||||
Lexer *self = (Lexer *)_self;
|
||||
|
||||
t_u32 goal_byte = self->current_position.bytes;
|
||||
Lexer *self;
|
||||
t_u32 goal_byte;
|
||||
t_u32 result;
|
||||
|
||||
self = (Lexer *)_self;
|
||||
goal_byte = self->current_position.bytes;
|
||||
self->did_get_column = true;
|
||||
self->current_position.bytes -= self->current_position.extent.column;
|
||||
self->current_position.extent.column = 0;
|
||||
|
||||
if (self->current_position.bytes < self->chunk_start)
|
||||
{
|
||||
ts_lexer__get_chunk(self);
|
||||
}
|
||||
|
||||
t_u32 result = 0;
|
||||
result = 0;
|
||||
if (!ts_lexer__eof(_self))
|
||||
{
|
||||
ts_lexer__get_lookahead(self);
|
||||
|
|
@ -48,9 +46,8 @@ t_u32 ts_lexer__get_column(TSLexer *_self)
|
|||
result++;
|
||||
ts_lexer__do_advance(self, false);
|
||||
if (ts_lexer__eof(_self))
|
||||
break;
|
||||
break ;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
return (result);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@
|
|||
/* By: maiboyer <maiboyer@student.42.fr> +#+ +:+ +#+ */
|
||||
/* +#+#+#+#+#+ +#+ */
|
||||
/* Created: 2024/08/31 18:08:11 by maiboyer #+# #+# */
|
||||
/* Updated: 2024/08/31 18:08:20 by maiboyer ### ########.fr */
|
||||
/* Updated: 2024/08/31 18:25:58 by maiboyer ### ########.fr */
|
||||
/* */
|
||||
/* ************************************************************************** */
|
||||
|
||||
|
|
@ -24,19 +24,38 @@ void ts_lexer__mark_end(TSLexer *_self);
|
|||
void ts_lexer_advance_to_end(Lexer *self);
|
||||
void ts_lexer_goto(Lexer *self, Length position);
|
||||
|
||||
void ts_lexer_goto_inside_loop(Lexer *self, bool *found_included_range,
|
||||
TSRange *included_range, t_usize i);
|
||||
void ts_lexer_goto_after_loop(Lexer *self, bool found_included_range);
|
||||
|
||||
void ts_lexer_goto(Lexer *self, Length position)
|
||||
{
|
||||
bool found_included_range;
|
||||
TSRange *included_range;
|
||||
TSRange *last_included_range;
|
||||
t_u32 i;
|
||||
|
||||
included_range = NULL;
|
||||
found_included_range = false;
|
||||
self->current_position = position;
|
||||
for (t_u32 i = 0; i < self->included_range_count; i++)
|
||||
i = 0;
|
||||
while (i < self->included_range_count)
|
||||
{
|
||||
included_range = &self->included_ranges[i];
|
||||
if (included_range->end_byte > self->current_position.bytes && included_range->end_byte > included_range->start_byte)
|
||||
if (included_range->end_byte > self->current_position.bytes
|
||||
&& included_range->end_byte > included_range->start_byte)
|
||||
{
|
||||
ts_lexer_goto_inside_loop(self, &found_included_range,
|
||||
included_range, i);
|
||||
break ;
|
||||
}
|
||||
i++;
|
||||
}
|
||||
ts_lexer_goto_after_loop(self, found_included_range);
|
||||
}
|
||||
|
||||
void ts_lexer_goto_inside_loop(Lexer *self, bool *found_included_range,
|
||||
TSRange *included_range, t_usize i)
|
||||
{
|
||||
if (included_range->start_byte >= self->current_position.bytes)
|
||||
{
|
||||
self->current_position = (Length){
|
||||
|
|
@ -44,31 +63,28 @@ void ts_lexer_goto(Lexer *self, Length position)
|
|||
.extent = included_range->start_point,
|
||||
};
|
||||
}
|
||||
|
||||
self->current_included_range_index = i;
|
||||
found_included_range = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
*found_included_range = true;
|
||||
}
|
||||
|
||||
void ts_lexer_goto_after_loop(Lexer *self, bool found_included_range)
|
||||
{
|
||||
TSRange *last_included_range;
|
||||
|
||||
if (found_included_range)
|
||||
{
|
||||
// If the current position is outside of the current chunk of text,
|
||||
// then clear out the current chunk of text.
|
||||
if (self->chunk &&
|
||||
(self->current_position.bytes < self->chunk_start || self->current_position.bytes >= self->chunk_start + self->chunk_size))
|
||||
{
|
||||
if (self->chunk && (self->current_position.bytes < self->chunk_start
|
||||
|| self->current_position.bytes >= self->chunk_start
|
||||
+ self->chunk_size))
|
||||
ts_lexer__clear_chunk(self);
|
||||
}
|
||||
|
||||
self->lookahead_size = 0;
|
||||
self->data.lookahead = '\0';
|
||||
}
|
||||
// If the given position is beyond any of included ranges, move to the EOF
|
||||
// state - past the end of the included ranges.
|
||||
else
|
||||
{
|
||||
self->current_included_range_index = self->included_range_count;
|
||||
last_included_range = &self->included_ranges[self->included_range_count - 1];
|
||||
last_included_range = &self->included_ranges[self->included_range_count
|
||||
- 1];
|
||||
self->current_position = (Length){
|
||||
.bytes = last_included_range->end_byte,
|
||||
.extent = last_included_range->end_point,
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@
|
|||
/* By: maiboyer <maiboyer@student.42.fr> +#+ +:+ +#+ */
|
||||
/* +#+#+#+#+#+ +#+ */
|
||||
/* Created: 2024/08/31 17:58:01 by maiboyer #+# #+# */
|
||||
/* Updated: 2024/08/31 18:03:48 by maiboyer ### ########.fr */
|
||||
/* Updated: 2024/08/31 18:25:16 by maiboyer ### ########.fr */
|
||||
/* */
|
||||
/* ************************************************************************** */
|
||||
|
||||
|
|
@ -27,10 +27,9 @@ void ts_lexer_goto(Lexer *self, Length position);
|
|||
|
||||
void ts_lexer_init(Lexer *self)
|
||||
{
|
||||
static TSRange DEFAULT_RANGE = {.start_point = \
|
||||
{ .row = 0, .column = 0, }, \
|
||||
.end_point = { .row = UINT32_MAX, .column = UINT32_MAX, }, \
|
||||
.start_byte = 0, .end_byte = UINT32_MAX};
|
||||
static TSRange default_range = {.start_point = {\
|
||||
.row = 0, .column = 0, }, .end_point = {.row = UINT32_MAX, \
|
||||
.column = UINT32_MAX, }, .start_byte = 0, .end_byte = UINT32_MAX};
|
||||
|
||||
*self = (Lexer){
|
||||
.data = {
|
||||
|
|
@ -40,9 +39,11 @@ void ts_lexer_init(Lexer *self)
|
|||
.eof = ts_lexer__eof,
|
||||
.lookahead = 0,
|
||||
.result_symbol = 0, },
|
||||
.chunk = NULL, .chunk_size = 0, .chunk_start = 0, \
|
||||
.chunk = NULL,
|
||||
.chunk_size = 0,
|
||||
.chunk_start = 0,
|
||||
.current_position = {0, {0, 0}},
|
||||
.included_ranges = (void *)&DEFAULT_RANGE,
|
||||
.included_ranges = (void *)&default_range,
|
||||
.included_range_count = 1,
|
||||
.current_included_range_index = 0,
|
||||
};
|
||||
|
|
@ -73,7 +74,8 @@ void ts_lexer_start(Lexer *self)
|
|||
ts_lexer__get_chunk(self);
|
||||
if (!self->lookahead_size)
|
||||
ts_lexer__get_lookahead(self);
|
||||
if (self->current_position.bytes == 0 && self->data.lookahead == BYTE_ORDER_MARK)
|
||||
if (self->current_position.bytes == 0
|
||||
&& self->data.lookahead == BYTE_ORDER_MARK)
|
||||
ts_lexer__advance(&self->data, true);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ void ts_lexer__get_lookahead(Lexer *self)
|
|||
{
|
||||
self->lookahead_size = 1;
|
||||
self->data.lookahead = '\0';
|
||||
return;
|
||||
return ;
|
||||
}
|
||||
chunk = (const t_u8 *)self->chunk + position_in_chunk;
|
||||
self->lookahead_size = ts_decode_ascii(chunk, size, &self->data.lookahead);
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue