aboutsummaryrefslogtreecommitdiff
path: root/executor/x86_64
diff options
context:
space:
mode:
Diffstat (limited to 'executor/x86_64')
-rw-r--r--executor/x86_64/asm.c11
-rw-r--r--executor/x86_64/executor.c14
2 files changed, 20 insertions, 5 deletions
diff --git a/executor/x86_64/asm.c b/executor/x86_64/asm.c
index e29130e..c633db8 100644
--- a/executor/x86_64/asm.c
+++ b/executor/x86_64/asm.c
@@ -210,16 +210,21 @@ static void asm_print_code_hex(Asm *self) {
}
#endif
+typedef union {
+ u8 *data;
+ int (*func)(void);
+} RawFuncCallPtr;
+
int asm_execute(Asm *self, u32 offset) {
- void (*func)();
+ RawFuncCallPtr raw_func_ptr;
if(mprotect(self->code, self->allocated_size, PROT_READ | PROT_EXEC) != 0)
return -errno;
/*asm_print_code_hex(self);*/
/* TODO: Verify if this is valid on all platforms. According to ISO C standard it isn't? */
- *(void**)(&func) = (u8*)self->code + offset;
- func();
+ raw_func_ptr.data = (u8*)self->code + offset;
+ raw_func_ptr.func();
return 0;
}
diff --git a/executor/x86_64/executor.c b/executor/x86_64/executor.c
index c442da8..f747e4a 100644
--- a/executor/x86_64/executor.c
+++ b/executor/x86_64/executor.c
@@ -284,6 +284,7 @@ int amal_exec_call(amal_executor *self, u32 code_offset, u8 num_args, i8 dst_reg
isize asm_offset = asm_get_size(&impl->asm);
ASM_ENSURE_CAPACITY
+ assert(num_args % 2 == 0 && "TODO: Align stack to 16-bytes before calling functions");
assert(code_offset < asm_offset);
asm_call_rel32(&impl->asm, (isize)code_offset - asm_offset);
@@ -318,7 +319,8 @@ int amal_exec_calle(amal_executor *self, void *func, u8 num_args, i8 dst_reg) {
AsmPtr dst;
IMPL_START
- /* TODO: Support R and XMM registers so more than 5 arguments can be used for functions */
+ assert(num_args % 2 == 0 && "TODO: Align stack to 16-bytes before calling functions");
+ /* TODO: Support R and XMM registers so more than 4 arguments can be used for functions */
assert(num_args < 5);
{
/*
@@ -427,6 +429,10 @@ int amal_exec_ret(amal_executor *self, i8 reg) {
return amal_exec_func_end(self);
}
+static u32 get_next_uneven_number(u32 value) {
+ return value + !(value & 1);
+}
+
int amal_exec_func_start(amal_executor *self, u16 num_regs) {
/*
TODO: Validate stack size, or maybe remove all validation? do we really need validation?
@@ -443,7 +449,11 @@ int amal_exec_func_start(amal_executor *self, u16 num_regs) {
asm_pushr(&impl->asm, RBX);
asm_pushr(&impl->asm, RBP);
asm_mov_rr(&impl->asm, RBP, RSP);
- asm_sub_rm64_imm(&impl->asm, RSP, num_regs * sizeof(isize));
+ /*
+ Functions are entered with a stack alignment of 8 (because of call return address is pushed to stack).
+ Make sure to align to to next 16-byte even if the extra bytes are not used.
+ */
+ asm_sub_rm64_imm(&impl->asm, RSP, get_next_uneven_number(num_regs) * sizeof(isize));
return 0;
}