aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authordec05eba <dec05eba@protonmail.com>2019-03-02 00:40:08 +0100
committerdec05eba <dec05eba@protonmail.com>2020-07-25 14:36:46 +0200
commit00ab5c3488c02beab5c3f4e371f5196404334e3c (patch)
tree51179376d5ff4754f0ddc6366b8b56d57a12c334 /src
parent971e0015e2d2008a5bc87e77894327c14c01b253 (diff)
Fix crash in parser import, optimize tokenizer_consume_if to not reparse if already parsed
Diffstat (limited to 'src')
-rw-r--r--src/parser.c29
-rw-r--r--src/tokenizer.c37
2 files changed, 48 insertions, 18 deletions
diff --git a/src/parser.c b/src/parser.c
index 81f0a92..e63814f 100644
--- a/src/parser.c
+++ b/src/parser.c
@@ -70,7 +70,7 @@ static CHECK_RESULT int parser_parse_lhs(Parser *self, LhsExpr **result) {
}
/*
-FUNC_DECL = '(' PARAM* ')' '{' BODY* '}'
+CLOSURE = '(' PARAM* ')' '{' BODY* '}'
*/
static CHECK_RESULT int parser_parse_function_decl(Parser *self, FunctionDecl **func_decl) {
bool result;
@@ -101,7 +101,7 @@ static CHECK_RESULT int parser_parse_function_decl(Parser *self, FunctionDecl **
}
/*
-FUNC_CALL = IDENTIFIER '(' ARGS* ')'
+FUNC_CALL = IDENTIFIER '(' RHS* ')'
*/
static CHECK_RESULT int parser_parse_function_call(Parser *self, FunctionCall **func_call) {
bool result;
@@ -127,6 +127,7 @@ IMPORT = IMPORT_SYMBOL
*/
static CHECK_RESULT int parser_parse_import(Parser *self, Import **import) {
bool result;
+ *import = NULL;
return_if_error(tokenizer_consume_if(&self->tokenizer, TOK_IMPORT, &result));
if(!result)
@@ -138,20 +139,25 @@ static CHECK_RESULT int parser_parse_import(Parser *self, Import **import) {
}
/*
-RHS = FUNC_DECL | FUNC_CALL | IMPORT
+RHS = CLOSURE | FUNC_CALL | IMPORT
*/
static CHECK_RESULT int parser_parse_rhs(Parser *self, Ast *rhs_expr) {
FunctionDecl *func_decl;
FunctionCall *func_call;
Import *import;
+ /* bool result;*/
- return_if_error(parser_parse_function_decl(self, &func_decl));
- if(func_decl) {
- rhs_expr->type = AST_FUNCTION_DECL;
- rhs_expr->value.func_decl = func_decl;
+/*
+ return_if_error(tokenizer_consume_if(&self->tokenizer, TOK_STRING, &result));
+ if(result) {
+ String *string;
+ return_if_error(scoped_allocator_alloc(self->allocator, sizeof(String), (void**)&string));
+ string_init(string, self->tokenizer.value.string);
+ rhs_expr->type = AST_STRING;
+ rhs_expr->value.string = func_call;
return PARSER_OK;
}
-
+*/
return_if_error(parser_parse_function_call(self, &func_call));
if(func_call) {
rhs_expr->type = AST_FUNCTION_CALL;
@@ -159,6 +165,13 @@ static CHECK_RESULT int parser_parse_rhs(Parser *self, Ast *rhs_expr) {
return PARSER_OK;
}
+ return_if_error(parser_parse_function_decl(self, &func_decl));
+ if(func_decl) {
+ rhs_expr->type = AST_FUNCTION_DECL;
+ rhs_expr->value.func_decl = func_decl;
+ return PARSER_OK;
+ }
+
return_if_error(parser_parse_import(self, &import));
if(import) {
rhs_expr->type = AST_IMPORT;
diff --git a/src/tokenizer.c b/src/tokenizer.c
index 742f9ca..b9f0ad3 100644
--- a/src/tokenizer.c
+++ b/src/tokenizer.c
@@ -25,6 +25,8 @@ int tokenizer_init(Tokenizer *self, BufferView code, BufferView code_name) {
self->index = 0;
self->prev_index = 0;
self->line = 1;
+ self->token = TOK_NONE;
+ self->needs_update = bool_true;
self->code_name = code_name.data ? code_name : create_buffer_view("<buffer>", 8);
return 0;
}
@@ -49,10 +51,12 @@ static Token tokenizer_skip_whitespace(Tokenizer *self) {
case '\t':
break;
default:
+ self->prev_index = self->index;
return TOK_NONE;
}
++self->index;
}
+ self->prev_index = self->index;
}
/* Returns -1 if end of string can't be found */
@@ -73,7 +77,9 @@ static int find_end_of_string(BufferView buf, int index) {
return -1;
}
-int tokenizer_next(Tokenizer *self, Token *token) {
+static CHECK_RESULT int tokenizer_next(Tokenizer *self, Token *token);
+
+static CHECK_RESULT int __tokenizer_next(Tokenizer *self, Token *token) {
Token last_token;
int c;
int result;
@@ -84,7 +90,6 @@ int tokenizer_next(Tokenizer *self, Token *token) {
return TOKENIZER_OK;
}
- self->prev_index = self->index;
c = tokenizer_get_char(self);
if(isAlpha(c) || c == '_') {
int identifier_start;
@@ -181,11 +186,21 @@ int tokenizer_next(Tokenizer *self, Token *token) {
return TOKENIZER_OK;
}
+/* Wrapper around __tokenizer_next to store last parsed token */
+int tokenizer_next(Tokenizer *self, Token *token) {
+ int result;
+ result = __tokenizer_next(self, token);
+ self->token = *token;
+ return result;
+}
+
int tokenizer_accept(Tokenizer *self, Token expected_token) {
Token actual_token;
return_if_error(tokenizer_next(self, &actual_token));
- if(actual_token == expected_token)
+ if(actual_token == expected_token) {
+ self->needs_update = bool_true;
return TOKENIZER_OK;
+ }
/* Todo: convert token to string */
tokenizer_print_error(self, "Expected %d, got %d", expected_token, actual_token);
@@ -193,19 +208,21 @@ int tokenizer_accept(Tokenizer *self, Token expected_token) {
}
int tokenizer_consume_if(Tokenizer *self, Token expected_token, bool *result) {
- int index;
- int line;
Token actual_token;
- index = self->index;
- line = self->line;
+ if(!self->needs_update) {
+ *result = (self->token == expected_token);
+ if(*result)
+ self->needs_update = bool_true;
+ return TOKENIZER_OK;
+ }
+
return_if_error(tokenizer_next(self, &actual_token));
if(actual_token == expected_token) {
+ self->needs_update = bool_true;
*result = bool_true;
} else {
- self->index = index;
- self->prev_index = index;
- self->line = line;
+ self->needs_update = bool_false;
*result = bool_false;
}
return TOKENIZER_OK;