aboutsummaryrefslogtreecommitdiff
path: root/src/std/arena_allocator.c
blob: 20f0394e1af44d081bfaffb25cc9ec1c53a52827 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
#include "../../include/std/arena_allocator.h"
#include "../../include/std/alloc.h"
#include "../../include/std/thread.h"
#include "../../include/std/log.h"
#include <assert.h>

#define ALLOC_NODE_SIZE 4096

int arena_allocator_node_init(ArenaAllocatorNode *self) {
    self->data = NULL;
    self->size = 0;
    self->next = NULL;
    return am_malloc(ALLOC_NODE_SIZE, (void**)&self->data);
}

void arena_allocator_node_deinit(ArenaAllocatorNode *self) {
    am_free(self->data);
    self->data = NULL;
    self->size = 0;
    if(self->next) {
        arena_allocator_node_deinit(self->next);
        am_free(self->next);
        self->next = NULL;
    }
}

int arena_allocator_init(ArenaAllocator *self) {
    return_if_error(arena_allocator_node_init(&self->head));
    self->current = &self->head;
    return buffer_init(&self->mems, NULL);
}

static void arena_allocator_deinit_buffers(ArenaAllocator *self) {
    void **mem;
    void **mems_end;
    mem = buffer_begin(&self->mems);
    mems_end = buffer_end(&self->mems);
    while(mem != mems_end) {
        am_free(*mem);
        ++mem;
    }
    buffer_deinit(&self->mems);
}

void arena_allocator_deinit(ArenaAllocator *self) {
    self->current = NULL;
    arena_allocator_deinit_buffers(self);
    arena_allocator_node_deinit(&self->head);
}

static CHECK_RESULT int arena_allocator_ensure_capacity_for(ArenaAllocator *self, usize size) {
    void *new_node;
    new_node = NULL;
    
    if(self->current->size + size > ALLOC_NODE_SIZE) {
        return_if_error(am_malloc(sizeof(ArenaAllocatorNode), &new_node));
        cleanup_if_error(arena_allocator_node_init(new_node));
        self->current->next = new_node;
        self->current = new_node;
    }
    return ALLOC_OK;

    cleanup:
    if(new_node)
        am_free(new_node);
    return ALLOC_FAIL;
}

static void* align_ptr_ceil(void *ptr, uintptr_t alignment) {
    uintptr_t ptrval;
    ptrval = (uintptr_t)ptr;
    return (void*)((ptrval + alignment + 1) & ~(alignment - 1));
}

static usize align_ptr_ceil_offset(void *ptr, uintptr_t alignment) {
    return (uintptr_t)align_ptr_ceil(ptr, alignment) - (uintptr_t)ptr;
}

#define SCOPED_ALLOC_ALIGNMENT 8

int arena_allocator_alloc(ArenaAllocator *self, usize size, void **mem) {
    ArenaAllocatorNode *current;
    usize alloc_size;
    assert(self->current);
    current = self->current;

    if(size >= ALLOC_NODE_SIZE) {
        amal_log_error("arena_allocator_alloc: tried to alloc memory of size %lu. Max allowed alloc size is %lu", size, ALLOC_NODE_SIZE);
        return -1;
    }

    alloc_size = size + align_ptr_ceil_offset(self->current->data + self->current->size, SCOPED_ALLOC_ALIGNMENT);
    return_if_error(arena_allocator_ensure_capacity_for(self, alloc_size));
    /* Reallocated (new node created) */
    if(self->current != current) {
        *mem = self->current->data;
        self->current->size += size;
    } else {
        *mem = align_ptr_ceil(self->current->data + self->current->size, SCOPED_ALLOC_ALIGNMENT);
        self->current->size += alloc_size;
    }
    return 0;
}

int arena_allocator_add_mem(ArenaAllocator *self, usize *result_index) {
    void *null_data;
    null_data = NULL;
    *result_index = buffer_get_size(&self->mems, sizeof(void*));
    return buffer_append(&self->mems, &null_data, sizeof(void*));
}