aboutsummaryrefslogtreecommitdiff
path: root/src/std/arena_allocator.c
diff options
context:
space:
mode:
Diffstat (limited to 'src/std/arena_allocator.c')
-rw-r--r--src/std/arena_allocator.c110
1 files changed, 110 insertions, 0 deletions
diff --git a/src/std/arena_allocator.c b/src/std/arena_allocator.c
new file mode 100644
index 0000000..20f0394
--- /dev/null
+++ b/src/std/arena_allocator.c
@@ -0,0 +1,110 @@
+#include "../../include/std/arena_allocator.h"
+#include "../../include/std/alloc.h"
+#include "../../include/std/thread.h"
+#include "../../include/std/log.h"
+#include <assert.h>
+
+#define ALLOC_NODE_SIZE 4096
+
+int arena_allocator_node_init(ArenaAllocatorNode *self) {
+ self->data = NULL;
+ self->size = 0;
+ self->next = NULL;
+ return am_malloc(ALLOC_NODE_SIZE, (void**)&self->data);
+}
+
+void arena_allocator_node_deinit(ArenaAllocatorNode *self) {
+ am_free(self->data);
+ self->data = NULL;
+ self->size = 0;
+ if(self->next) {
+ arena_allocator_node_deinit(self->next);
+ am_free(self->next);
+ self->next = NULL;
+ }
+}
+
+int arena_allocator_init(ArenaAllocator *self) {
+ return_if_error(arena_allocator_node_init(&self->head));
+ self->current = &self->head;
+ return buffer_init(&self->mems, NULL);
+}
+
+static void arena_allocator_deinit_buffers(ArenaAllocator *self) {
+ void **mem;
+ void **mems_end;
+ mem = buffer_begin(&self->mems);
+ mems_end = buffer_end(&self->mems);
+ while(mem != mems_end) {
+ am_free(*mem);
+ ++mem;
+ }
+ buffer_deinit(&self->mems);
+}
+
+void arena_allocator_deinit(ArenaAllocator *self) {
+ self->current = NULL;
+ arena_allocator_deinit_buffers(self);
+ arena_allocator_node_deinit(&self->head);
+}
+
+static CHECK_RESULT int arena_allocator_ensure_capacity_for(ArenaAllocator *self, usize size) {
+ void *new_node;
+ new_node = NULL;
+
+ if(self->current->size + size > ALLOC_NODE_SIZE) {
+ return_if_error(am_malloc(sizeof(ArenaAllocatorNode), &new_node));
+ cleanup_if_error(arena_allocator_node_init(new_node));
+ self->current->next = new_node;
+ self->current = new_node;
+ }
+ return ALLOC_OK;
+
+ cleanup:
+ if(new_node)
+ am_free(new_node);
+ return ALLOC_FAIL;
+}
+
+static void* align_ptr_ceil(void *ptr, uintptr_t alignment) {
+ uintptr_t ptrval;
+ ptrval = (uintptr_t)ptr;
+ return (void*)((ptrval + alignment + 1) & ~(alignment - 1));
+}
+
+static usize align_ptr_ceil_offset(void *ptr, uintptr_t alignment) {
+ return (uintptr_t)align_ptr_ceil(ptr, alignment) - (uintptr_t)ptr;
+}
+
+#define SCOPED_ALLOC_ALIGNMENT 8
+
+int arena_allocator_alloc(ArenaAllocator *self, usize size, void **mem) {
+ ArenaAllocatorNode *current;
+ usize alloc_size;
+ assert(self->current);
+ current = self->current;
+
+ if(size >= ALLOC_NODE_SIZE) {
+ amal_log_error("arena_allocator_alloc: tried to alloc memory of size %lu. Max allowed alloc size is %lu", size, ALLOC_NODE_SIZE);
+ return -1;
+ }
+
+ alloc_size = size + align_ptr_ceil_offset(self->current->data + self->current->size, SCOPED_ALLOC_ALIGNMENT);
+ return_if_error(arena_allocator_ensure_capacity_for(self, alloc_size));
+ /* Reallocated (new node created) */
+ if(self->current != current) {
+ *mem = self->current->data;
+ self->current->size += size;
+ } else {
+ *mem = align_ptr_ceil(self->current->data + self->current->size, SCOPED_ALLOC_ALIGNMENT);
+ self->current->size += alloc_size;
+ }
+ return 0;
+}
+
+int arena_allocator_add_mem(ArenaAllocator *self, usize *result_index) {
+ void *null_data;
+ null_data = NULL;
+ *result_index = buffer_get_size(&self->mems, sizeof(void*));
+ return buffer_append(&self->mems, &null_data, sizeof(void*));
+}