diff options
author | dec05eba <dec05eba@protonmail.com> | 2019-10-02 01:00:59 +0200 |
---|---|---|
committer | dec05eba <dec05eba@protonmail.com> | 2020-07-25 14:36:46 +0200 |
commit | b124548bcee1ab6d034d4499fe695073566ae37d (patch) | |
tree | e4014070ac69a2b821e12cc9264ba54aaa8089f4 /executor/x86_64 | |
parent | 7eb8642c3ace697b03c4fc6edc90ea0ada715689 (diff) |
Add !=,<,<=,>,>=; both signed and not
Diffstat (limited to 'executor/x86_64')
-rw-r--r-- | executor/x86_64/asm.c | 89 | ||||
-rw-r--r-- | executor/x86_64/asm.h | 19 | ||||
-rw-r--r-- | executor/x86_64/executor.c | 187 |
3 files changed, 288 insertions, 7 deletions
diff --git a/executor/x86_64/asm.c b/executor/x86_64/asm.c index f7bc19c..a400656 100644 --- a/executor/x86_64/asm.c +++ b/executor/x86_64/asm.c @@ -410,6 +410,14 @@ void asm_mov_rr(Asm *self, Reg64 dst, Reg64 src) { ins_end(self, "mov %s, %s", reg64_to_str(dst), reg64_to_str(src)); } +void asm_and_mr(Asm *self, AsmPtr *dst, Reg64 src) { + ins_start(self); + *self->code_it++ = rex_rm(dst, src); + *self->code_it++ = 0x21; + asm_rm(self, dst, src); + ins_end(self, "and %s, %s", asm_ptr_to_string(dst), reg64_to_str(src)); +} + void asm_add_rr(Asm *self, Reg64 dst, Reg64 src) { ins_start(self); *self->code_it++ = rex_rr(dst, src); @@ -515,6 +523,87 @@ void asm_sete_r(Asm *self, Reg64 dst) { ins_end(self, "sete %s", reg64_to_str(dst)); } +void asm_setne_r(Asm *self, Reg64 dst) { + assert(dst != RSP && dst != RBP && dst != RSI && dst != RDI); + ins_start(self); + *self->code_it++ = 0x0F; + *self->code_it++ = 0x95; + asm_rr(self, dst, 0x0); /* the @src bits are not used */ + ins_end(self, "setne %s", reg64_to_str(dst)); +} + +void asm_setb_r(Asm *self, Reg64 dst) { + assert(dst != RSP && dst != RBP && dst != RSI && dst != RDI); + ins_start(self); + *self->code_it++ = 0x0F; + *self->code_it++ = 0x92; + asm_rr(self, dst, 0x0); /* the @src bits are not used */ + ins_end(self, "setb %s", reg64_to_str(dst)); +} + +void asm_setbe_r(Asm *self, Reg64 dst) { + assert(dst != RSP && dst != RBP && dst != RSI && dst != RDI); + ins_start(self); + *self->code_it++ = 0x0F; + *self->code_it++ = 0x96; + asm_rr(self, dst, 0x0); /* the @src bits are not used */ + ins_end(self, "setbe %s", reg64_to_str(dst)); +} + +void asm_seta_r(Asm *self, Reg64 dst) { + assert(dst != RSP && dst != RBP && dst != RSI && dst != RDI); + ins_start(self); + *self->code_it++ = 0x0F; + *self->code_it++ = 0x97; + asm_rr(self, dst, 0x0); /* the @src bits are not used */ + ins_end(self, "seta %s", reg64_to_str(dst)); +} + +void asm_setae_r(Asm *self, Reg64 dst) { + assert(dst != RSP && dst != RBP && dst != RSI && dst != RDI); + ins_start(self); + *self->code_it++ = 0x0F; + *self->code_it++ = 0x93; + asm_rr(self, dst, 0x0); /* the @src bits are not used */ + ins_end(self, "setae %s", reg64_to_str(dst)); +} + +void asm_setl_r(Asm *self, Reg64 dst) { + assert(dst != RSP && dst != RBP && dst != RSI && dst != RDI); + ins_start(self); + *self->code_it++ = 0x0F; + *self->code_it++ = 0x9C; + asm_rr(self, dst, 0x0); /* the @src bits are not used */ + ins_end(self, "setl %s", reg64_to_str(dst)); +} + +void asm_setle_r(Asm *self, Reg64 dst) { + assert(dst != RSP && dst != RBP && dst != RSI && dst != RDI); + ins_start(self); + *self->code_it++ = 0x0F; + *self->code_it++ = 0x9E; + asm_rr(self, dst, 0x0); /* the @src bits are not used */ + ins_end(self, "setle %s", reg64_to_str(dst)); +} + +void asm_setg_r(Asm *self, Reg64 dst) { + assert(dst != RSP && dst != RBP && dst != RSI && dst != RDI); + ins_start(self); + *self->code_it++ = 0x0F; + *self->code_it++ = 0x9F; + asm_rr(self, dst, 0x0); /* the @src bits are not used */ + ins_end(self, "setg %s", reg64_to_str(dst)); +} + +void asm_setge_r(Asm *self, Reg64 dst) { + assert(dst != RSP && dst != RBP && dst != RSI && dst != RDI); + ins_start(self); + *self->code_it++ = 0x0F; + *self->code_it++ = 0x9D; + asm_rr(self, dst, 0x0); /* the @src bits are not used */ + ins_end(self, "setge %s", reg64_to_str(dst)); +} + /* Note: This is sometimes called with @relative INT32_MAX-(2 or 6) (will print jz 0x7ffffff9), in which case it's most likely a dummy jump until the relative position is later changed with @asm_overwrite_jcc_rel32. diff --git a/executor/x86_64/asm.h b/executor/x86_64/asm.h index dacc248..7d68bc0 100644 --- a/executor/x86_64/asm.h +++ b/executor/x86_64/asm.h @@ -80,6 +80,8 @@ void asm_mov_rm(Asm *self, Reg64 dst, AsmPtr *src); void asm_mov_ri(Asm *self, Reg64 dst, i64 immediate); void asm_mov_rr(Asm *self, Reg64 dst, Reg64 src); +void asm_and_mr(Asm *self, AsmPtr *dst, Reg64 src); + void asm_add_rr(Asm *self, Reg64 dst, Reg64 src); void asm_sub_rr(Asm *self, Reg64 dst, Reg64 src); void asm_imul_rr(Asm *self, Reg64 dst, Reg64 src); @@ -110,6 +112,23 @@ void asm_cmp_rm(Asm *self, Reg64 reg1, AsmPtr *reg2); */ void asm_sete_m(Asm *self, AsmPtr *dst); void asm_sete_r(Asm *self, Reg64 dst); +void asm_setne_r(Asm *self, Reg64 dst); +/* Unsigned */ +void asm_setb_r(Asm *self, Reg64 dst); +/* Unsigned */ +void asm_setbe_r(Asm *self, Reg64 dst); +/* Unsigned */ +void asm_seta_r(Asm *self, Reg64 dst); +/* Unsigned */ +void asm_setae_r(Asm *self, Reg64 dst); +/* Signed */ +void asm_setl_r(Asm *self, Reg64 dst); +/* Signed */ +void asm_setle_r(Asm *self, Reg64 dst); +/* Signed */ +void asm_setg_r(Asm *self, Reg64 dst); +/* Signed */ +void asm_setge_r(Asm *self, Reg64 dst); /* In x86 assembly, the @relative position starts from the next instruction. This offset shouldn't be calculated by the caller and is instead managed diff --git a/executor/x86_64/executor.c b/executor/x86_64/executor.c index 6f3c1de..7591dcf 100644 --- a/executor/x86_64/executor.c +++ b/executor/x86_64/executor.c @@ -389,10 +389,15 @@ int amal_exec_call(amal_executor *self, u32 code_offset, AmalReg dst_reg) { /* TODO: This assumes all arguments are isize */ /* Do the function call */ isize asm_offset = asm_get_size(&impl->asm); - int num_pushed_stack = impl->num_pushed_values + impl->num_saved_params_for_call - (int)NUM_REG_PARAMS; + /* TODO: Do not push */ + int num_pushed_stack = impl->num_pushed_values;/* + impl->num_saved_params_for_call - (int)NUM_REG_PARAMS;*/ ASM_ENSURE_CAPACITY - assert((num_pushed_stack <= 0 || num_pushed_stack % 2 == 0) && "TODO: Align stack to 16-bytes before calling functions"); + /*assert((num_pushed_stack <= 0 || num_pushed_stack % 2 == 0) && "TODO: Align stack to 16-bytes before calling functions");*/ + if(num_pushed_stack & 1) { + ++num_pushed_stack; + asm_sub_rm64_imm(&impl->asm, RSP, sizeof(isize)); + } assert(code_offset < asm_offset); asm_call_rel32(&impl->asm, (isize)code_offset - asm_offset); @@ -427,10 +432,15 @@ int amal_exec_calle(amal_executor *self, void *func, AmalReg dst_reg) { AsmOperand dst_op = amal_reg_to_asm_operand(dst_reg); AsmOperand rax_op = asm_reg_to_operand(RAX); amal_executor_impl *impl = (amal_executor_impl*)self; - int num_pushed_stack = impl->num_pushed_values + impl->num_saved_params_for_call - (int)NUM_REG_PARAMS; + /* TODO: Do not push */ + int num_pushed_stack = impl->num_pushed_values;/* + impl->num_saved_params_for_call - (int)NUM_REG_PARAMS;*/ ASM_ENSURE_CAPACITY - assert((num_pushed_stack <= 0 || num_pushed_stack % 2 == 0) && "TODO: Align stack to 16-bytes before calling functions"); + /*assert((num_pushed_stack <= 0 || num_pushed_stack % 2 == 0) && "TODO: Align stack to 16-bytes before calling functions");*/ + if(num_pushed_stack & 1) { + ++num_pushed_stack; + asm_sub_rm64_imm(&impl->asm, RSP, sizeof(isize)); + } /* TODO: Preserve necessary registers before call? */ /* TODO: This assumes all arguments are isize */ @@ -449,7 +459,7 @@ int amal_exec_callr(AmalReg dst_reg, BufferView data) { } */ -int amal_exec_cmp(amal_executor *self, AmalReg dst_reg, AmalReg src_reg1, AmalReg src_reg2) { +int amal_exec_eq(amal_executor *self, AmalReg dst_reg, AmalReg src_reg1, AmalReg src_reg2) { AsmOperand dst_op = amal_reg_to_asm_operand(dst_reg); AsmOperand src_op1 = amal_reg_to_asm_operand(src_reg1); AsmOperand src_op2 = amal_reg_to_asm_operand(src_reg2); @@ -458,9 +468,7 @@ int amal_exec_cmp(amal_executor *self, AmalReg dst_reg, AmalReg src_reg1, AmalRe amal_executor_impl *impl = (amal_executor_impl*)self; ASM_ENSURE_CAPACITY - asm_mov(&impl->asm, &rcx_op, &dst_op); asm_xor_rm64(&impl->asm, rcx_op.value.reg, rcx_op.value.reg); - asm_mov(&impl->asm, &rax_op, &src_op1); asm_cmp(&impl->asm, &rax_op, &src_op2); asm_sete_r(&impl->asm, rcx_op.value.reg); @@ -468,6 +476,171 @@ int amal_exec_cmp(amal_executor *self, AmalReg dst_reg, AmalReg src_reg1, AmalRe return 0; } +int amal_exec_neq(amal_executor *self, AmalReg dst_reg, AmalReg src_reg1, AmalReg src_reg2) { + AsmOperand dst_op = amal_reg_to_asm_operand(dst_reg); + AsmOperand src_op1 = amal_reg_to_asm_operand(src_reg1); + AsmOperand src_op2 = amal_reg_to_asm_operand(src_reg2); + AsmOperand rax_op = asm_reg_to_operand(RAX); + AsmOperand rcx_op = asm_reg_to_operand(RCX); + amal_executor_impl *impl = (amal_executor_impl*)self; + ASM_ENSURE_CAPACITY + + asm_xor_rm64(&impl->asm, rcx_op.value.reg, rcx_op.value.reg); + asm_mov(&impl->asm, &rax_op, &src_op1); + asm_cmp(&impl->asm, &rax_op, &src_op2); + asm_setne_r(&impl->asm, rcx_op.value.reg); + asm_mov(&impl->asm, &dst_op, &rcx_op); + return 0; +} + +int amal_exec_ilt(amal_executor *self, AmalReg dst_reg, AmalReg src_reg1, AmalReg src_reg2) { + AsmOperand dst_op = amal_reg_to_asm_operand(dst_reg); + AsmOperand src_op1 = amal_reg_to_asm_operand(src_reg1); + AsmOperand src_op2 = amal_reg_to_asm_operand(src_reg2); + AsmOperand rax_op = asm_reg_to_operand(RAX); + AsmOperand rcx_op = asm_reg_to_operand(RCX); + amal_executor_impl *impl = (amal_executor_impl*)self; + ASM_ENSURE_CAPACITY + + asm_xor_rm64(&impl->asm, rcx_op.value.reg, rcx_op.value.reg); + asm_mov(&impl->asm, &rax_op, &src_op1); + asm_cmp(&impl->asm, &rax_op, &src_op2); + asm_setb_r(&impl->asm, rcx_op.value.reg); + asm_mov(&impl->asm, &dst_op, &rcx_op); + return 0; +} + +int amal_exec_ile(amal_executor *self, AmalReg dst_reg, AmalReg src_reg1, AmalReg src_reg2) { + AsmOperand dst_op = amal_reg_to_asm_operand(dst_reg); + AsmOperand src_op1 = amal_reg_to_asm_operand(src_reg1); + AsmOperand src_op2 = amal_reg_to_asm_operand(src_reg2); + AsmOperand rax_op = asm_reg_to_operand(RAX); + AsmOperand rcx_op = asm_reg_to_operand(RCX); + amal_executor_impl *impl = (amal_executor_impl*)self; + ASM_ENSURE_CAPACITY + + asm_xor_rm64(&impl->asm, rcx_op.value.reg, rcx_op.value.reg); + asm_mov(&impl->asm, &rax_op, &src_op1); + asm_cmp(&impl->asm, &rax_op, &src_op2); + asm_setbe_r(&impl->asm, rcx_op.value.reg); + asm_mov(&impl->asm, &dst_op, &rcx_op); + return 0; +} + +int amal_exec_igt(amal_executor *self, AmalReg dst_reg, AmalReg src_reg1, AmalReg src_reg2) { + AsmOperand dst_op = amal_reg_to_asm_operand(dst_reg); + AsmOperand src_op1 = amal_reg_to_asm_operand(src_reg1); + AsmOperand src_op2 = amal_reg_to_asm_operand(src_reg2); + AsmOperand rax_op = asm_reg_to_operand(RAX); + AsmOperand rcx_op = asm_reg_to_operand(RCX); + amal_executor_impl *impl = (amal_executor_impl*)self; + ASM_ENSURE_CAPACITY + + asm_xor_rm64(&impl->asm, rcx_op.value.reg, rcx_op.value.reg); + asm_mov(&impl->asm, &rax_op, &src_op1); + asm_cmp(&impl->asm, &rax_op, &src_op2); + asm_seta_r(&impl->asm, rcx_op.value.reg); + asm_mov(&impl->asm, &dst_op, &rcx_op); + return 0; +} + +int amal_exec_ige(amal_executor *self, AmalReg dst_reg, AmalReg src_reg1, AmalReg src_reg2) { + AsmOperand dst_op = amal_reg_to_asm_operand(dst_reg); + AsmOperand src_op1 = amal_reg_to_asm_operand(src_reg1); + AsmOperand src_op2 = amal_reg_to_asm_operand(src_reg2); + AsmOperand rax_op = asm_reg_to_operand(RAX); + AsmOperand rcx_op = asm_reg_to_operand(RCX); + amal_executor_impl *impl = (amal_executor_impl*)self; + ASM_ENSURE_CAPACITY + + asm_xor_rm64(&impl->asm, rcx_op.value.reg, rcx_op.value.reg); + asm_mov(&impl->asm, &rax_op, &src_op1); + asm_cmp(&impl->asm, &rax_op, &src_op2); + asm_setae_r(&impl->asm, rcx_op.value.reg); + asm_mov(&impl->asm, &dst_op, &rcx_op); + return 0; +} + +int amal_exec_lt(amal_executor *self, AmalReg dst_reg, AmalReg src_reg1, AmalReg src_reg2) { + AsmOperand dst_op = amal_reg_to_asm_operand(dst_reg); + AsmOperand src_op1 = amal_reg_to_asm_operand(src_reg1); + AsmOperand src_op2 = amal_reg_to_asm_operand(src_reg2); + AsmOperand rax_op = asm_reg_to_operand(RAX); + AsmOperand rcx_op = asm_reg_to_operand(RCX); + amal_executor_impl *impl = (amal_executor_impl*)self; + ASM_ENSURE_CAPACITY + + asm_xor_rm64(&impl->asm, rcx_op.value.reg, rcx_op.value.reg); + asm_mov(&impl->asm, &rax_op, &src_op1); + asm_cmp(&impl->asm, &rax_op, &src_op2); + asm_setl_r(&impl->asm, rcx_op.value.reg); + asm_mov(&impl->asm, &dst_op, &rcx_op); + return 0; +} + +int amal_exec_le(amal_executor *self, AmalReg dst_reg, AmalReg src_reg1, AmalReg src_reg2) { + AsmOperand dst_op = amal_reg_to_asm_operand(dst_reg); + AsmOperand src_op1 = amal_reg_to_asm_operand(src_reg1); + AsmOperand src_op2 = amal_reg_to_asm_operand(src_reg2); + AsmOperand rax_op = asm_reg_to_operand(RAX); + AsmOperand rcx_op = asm_reg_to_operand(RCX); + amal_executor_impl *impl = (amal_executor_impl*)self; + ASM_ENSURE_CAPACITY + + asm_xor_rm64(&impl->asm, rcx_op.value.reg, rcx_op.value.reg); + asm_mov(&impl->asm, &rax_op, &src_op1); + asm_cmp(&impl->asm, &rax_op, &src_op2); + asm_setle_r(&impl->asm, rcx_op.value.reg); + asm_mov(&impl->asm, &dst_op, &rcx_op); + return 0; +} + +int amal_exec_gt(amal_executor *self, AmalReg dst_reg, AmalReg src_reg1, AmalReg src_reg2) { + AsmOperand dst_op = amal_reg_to_asm_operand(dst_reg); + AsmOperand src_op1 = amal_reg_to_asm_operand(src_reg1); + AsmOperand src_op2 = amal_reg_to_asm_operand(src_reg2); + AsmOperand rax_op = asm_reg_to_operand(RAX); + AsmOperand rcx_op = asm_reg_to_operand(RCX); + amal_executor_impl *impl = (amal_executor_impl*)self; + ASM_ENSURE_CAPACITY + + asm_xor_rm64(&impl->asm, rcx_op.value.reg, rcx_op.value.reg); + asm_mov(&impl->asm, &rax_op, &src_op1); + asm_cmp(&impl->asm, &rax_op, &src_op2); + asm_setg_r(&impl->asm, rcx_op.value.reg); + asm_mov(&impl->asm, &dst_op, &rcx_op); + return 0; +} + +int amal_exec_ge(amal_executor *self, AmalReg dst_reg, AmalReg src_reg1, AmalReg src_reg2) { + AsmOperand dst_op = amal_reg_to_asm_operand(dst_reg); + AsmOperand src_op1 = amal_reg_to_asm_operand(src_reg1); + AsmOperand src_op2 = amal_reg_to_asm_operand(src_reg2); + AsmOperand rax_op = asm_reg_to_operand(RAX); + AsmOperand rcx_op = asm_reg_to_operand(RCX); + amal_executor_impl *impl = (amal_executor_impl*)self; + ASM_ENSURE_CAPACITY + + asm_xor_rm64(&impl->asm, rcx_op.value.reg, rcx_op.value.reg); + asm_mov(&impl->asm, &rax_op, &src_op1); + asm_cmp(&impl->asm, &rax_op, &src_op2); + asm_setge_r(&impl->asm, rcx_op.value.reg); + asm_mov(&impl->asm, &dst_op, &rcx_op); + return 0; +} + +int amal_exec_and(amal_executor *self, AmalReg dst_reg, AmalReg src_reg1, AmalReg src_reg2) { + AsmOperand dst_op = amal_reg_to_asm_operand(dst_reg); + AsmOperand src_op1 = amal_reg_to_asm_operand(src_reg1); + AsmOperand src_op2 = amal_reg_to_asm_operand(src_reg2); + amal_executor_impl *impl = (amal_executor_impl*)self; + ASM_ENSURE_CAPACITY + + asm_and_rm64(&impl->asm, src_op1.value.reg, src_op2.value.reg); + asm_mov(&impl->asm, &dst_op, &src_op1); + return 0; +} + int amal_exec_jz(amal_executor *self, AmalReg reg, u16 target_label) { AsmOperand op = amal_reg_to_asm_operand(reg); AsmOperand rax_op = asm_reg_to_operand(RAX); |