aboutsummaryrefslogtreecommitdiff
path: root/src/tokenizer.c
diff options
context:
space:
mode:
authordec05eba <dec05eba@protonmail.com>2019-03-31 13:44:27 +0200
committerdec05eba <dec05eba@protonmail.com>2020-07-25 14:36:46 +0200
commit6a9466da5377d0bc73c7e5aa48deca3740d3de6f (patch)
tree87f4630f72fe77d037fe19d17bdc00618929f678 /src/tokenizer.c
parent6927b6338b9655974db79c429e6ffc73037ab5e0 (diff)
Test errors, stop working on error
Diffstat (limited to 'src/tokenizer.c')
-rw-r--r--src/tokenizer.c81
1 files changed, 64 insertions, 17 deletions
diff --git a/src/tokenizer.c b/src/tokenizer.c
index d873b0e..7f6d08e 100644
--- a/src/tokenizer.c
+++ b/src/tokenizer.c
@@ -25,8 +25,9 @@ static int tokenizer_get_end_of_line_from_index(Tokenizer *self, int index);
/* Returns -1 if end of multiline comment was not found */
static int tokenizer_get_end_of_multiline_comment(Tokenizer *self, int index);
-int tokenizer_init(Tokenizer *self, ScopedAllocator *allocator, BufferView code, BufferView code_name) {
+int tokenizer_init(Tokenizer *self, ScopedAllocator *allocator, BufferView code, BufferView code_name, const amal_compiler_options *compiler_options) {
assert(code.size <= INT_MAX);
+ assert(compiler_options);
self->code = code;
self->index = 0;
self->prev_index = 0;
@@ -35,6 +36,7 @@ int tokenizer_init(Tokenizer *self, ScopedAllocator *allocator, BufferView code,
self->code_name = code_name.data ? code_name : create_buffer_view("<buffer>", 8);
self->number_is_integer = bool_false;
self->allocator = allocator;
+ self->compiler_options = compiler_options;
return 0;
}
@@ -569,7 +571,18 @@ int tokenizer_get_end_of_line_from_index(Tokenizer *self, int index) {
}
return index;
}
-
+/*
+static int find_non_whitespace(const char *str, usize size) {
+ usize i;
+ for(i = 0; i < size; ++i) {
+ char c;
+ c = str[i];
+ if(c != ' ' && c != '\t')
+ return i;
+ }
+ return -1;
+}
+*/
int tokenizer_get_end_of_multiline_comment(Tokenizer *self, int index) {
char c;
int comment_count;
@@ -608,30 +621,66 @@ static int tokenizer_get_line_by_index(Tokenizer *self, int index) {
return line;
}
+static int max(int a, int b) {
+ return a > b ? a : b;
+}
+
void tokenizer_print_error(Tokenizer *self, int index, const char *fmt, ...) {
va_list args;
int line;
int line_start;
int line_end;
+ /*int code_start;*/
int prev_column;
int i;
- amal_mutex *mutex;
- mutex = amal_log_get_mutex();
- ignore_result_int(amal_mutex_lock(mutex, "tokenizer_print_error"));
- va_start(args, fmt);
line = tokenizer_get_line_by_index(self, index);
line_start = tokenizer_get_start_of_line_from_index(self, index);
line_end = tokenizer_get_end_of_line_from_index(self, index);
+ /*code_start = find_non_whitespace(&self->code.data[line_start], line_end - line_start);
+ if(code_start != -1)
+ line_start += code_start;*/
prev_column = index - line_start;
- fprintf(stderr, "\x1b[1;37m%.*s:%d:%d:\x1b[0m \x1b[1;31merror:\x1b[0m ", (int)self->code_name.size, self->code_name.data, line, 1 + prev_column);
- vfprintf(stderr, fmt, args);
- fprintf(stderr, "\n%.*s\n", line_end - line_start, self->code.data + line_start);
- for(i = 0; i < prev_column; ++i)
- fprintf(stderr, " ");
- fprintf(stderr, "\x1b[1;32m^\x1b[0m\n");
- va_end(args);
- ignore_result_int(amal_mutex_unlock(mutex));
+
+ if(self->compiler_options->error_callback) {
+ char buffer[2048];
+ int bytes_copied;
+
+ bytes_copied = 0;
+ bytes_copied += max(0, snprintf(buffer + bytes_copied, sizeof(buffer) - bytes_copied, "%.*s:%d:%d: error: ", (int)self->code_name.size, self->code_name.data, line, 1 + prev_column));
+
+ if(sizeof(buffer) - bytes_copied > 0) {
+ va_start(args, fmt);
+ bytes_copied += max(0, vsnprintf(buffer + bytes_copied, sizeof(buffer) - bytes_copied, fmt, args));
+ va_end(args);
+ }
+
+ if(sizeof(buffer) - bytes_copied > 0)
+ bytes_copied += max(0, snprintf(buffer + bytes_copied, sizeof(buffer) - bytes_copied, "\n%.*s\n", line_end - line_start, self->code.data + line_start));
+
+ if(sizeof(buffer) - bytes_copied > 0) {
+ for(i = 0; i < prev_column; ++i)
+ bytes_copied += max(0, snprintf(buffer + bytes_copied, sizeof(buffer) - bytes_copied, " "));
+ }
+
+ if(sizeof(buffer) - bytes_copied > 0)
+ bytes_copied += max(0, snprintf(buffer + bytes_copied, sizeof(buffer) - bytes_copied, "^\n"));
+
+ self->compiler_options->error_callback(buffer, bytes_copied, self->compiler_options->error_callback_userdata);
+ } else {
+ amal_mutex *mutex;
+ mutex = amal_log_get_mutex();
+ ignore_result_int(amal_mutex_lock(mutex, "tokenizer_print_error"));
+ va_start(args, fmt);
+ fprintf(stderr, "\x1b[1;37m%.*s:%d:%d:\x1b[0m \x1b[1;31merror:\x1b[0m ", (int)self->code_name.size, self->code_name.data, line, 1 + prev_column);
+ vfprintf(stderr, fmt, args);
+ fprintf(stderr, "\n%.*s\n", line_end - line_start, self->code.data + line_start);
+ for(i = 0; i < prev_column; ++i)
+ fprintf(stderr, " ");
+ fprintf(stderr, "\x1b[1;32m^\x1b[0m\n");
+ va_end(args);
+ ignore_result_int(amal_mutex_unlock(mutex));
+ }
}
void tokenizer_print_error_object(Tokenizer *self, TokenizerError *error) {
@@ -645,10 +694,8 @@ TokenizerError tokenizer_create_error(Tokenizer *self, int index, const char *fm
int bytes_copied;
va_start(args, fmt);
- bytes_copied = vsnprintf(buffer, sizeof(buffer), fmt, args);
+ bytes_copied = max(0, vsnprintf(buffer, sizeof(buffer), fmt, args));
va_end(args);
- if(bytes_copied < 0)
- bytes_copied = 0;
result.index = index;
result.str = NULL;