#ifndef AMAL_EXECUTOR_X86_64_ASM_H #define AMAL_EXECUTOR_X86_64_ASM_H #include "../../include/std/misc.h" #include "../../include/std/types.h" typedef struct { void *code; u8 *code_it; usize allocated_size; } Asm; typedef enum { EAX, ECX, EDX, EBX, ESP, EBP, ESI, EDI } Reg32; #define REG64_EXTENDED_REG_BIT (1 << 3) #define REG64_REG_BITS 0x7 typedef enum { RAX = 0, RCX = 1, RDX = 2, RBX = 3, RSP = 4, RBP = 5, RSI = 6, RDI = 7, R8 = REG64_EXTENDED_REG_BIT | RAX, R9 = REG64_EXTENDED_REG_BIT | RCX, R10 = REG64_EXTENDED_REG_BIT | RDX, R11 = REG64_EXTENDED_REG_BIT | RBX, R12 = REG64_EXTENDED_REG_BIT | RSP, R13 = REG64_EXTENDED_REG_BIT | RBP, R14 = REG64_EXTENDED_REG_BIT | RSI, R15 = REG64_EXTENDED_REG_BIT | RDI } Reg64; typedef struct { Reg64 base; Reg64 index; i32 disp; u8 scale; } AsmPtr; void asm_ptr_init(AsmPtr *self, Reg64 base); void asm_ptr_init_index(AsmPtr *self, Reg64 base, Reg64 index); void asm_ptr_init_disp(AsmPtr *self, Reg64 base, i32 disp); void asm_ptr_init_index_disp(AsmPtr *self, Reg64 base, Reg64 index, i32 disp); CHECK_RESULT int asm_init(Asm *self); void asm_deinit(Asm *self); usize asm_get_size(Asm *self); CHECK_RESULT int asm_execute(Asm *self, u32 offset); CHECK_RESULT int asm_ensure_capacity(Asm *self, usize size); void asm_nop(Asm *self); void asm_mov_mi(Asm *self, AsmPtr *dst, i32 immediate); void asm_mov_mr(Asm *self, AsmPtr *dst, Reg64 src); void asm_mov_rm(Asm *self, Reg64 dst, AsmPtr *src); void asm_mov_ri(Asm *self, Reg64 dst, i64 immediate); void asm_mov_rr(Asm *self, Reg64 dst, Reg64 src); void asm_add_rr(Asm *self, Reg64 dst, Reg64 src); void asm_sub_rr(Asm *self, Reg64 dst, Reg64 src); void asm_imul_rr(Asm *self, Reg64 dst, Reg64 src); /* Sign extend RAX into RDX, this is needed for some operations, such as idiv */ void asm_cqo(Asm *self); /* Divide RDX:RAX by @src. Store the quotient in RAX and the remainder in RDX. @asm_cqo should be called before this, since RAX needs to be sign extended into RDX */ void asm_idiv_rr(Asm *self, Reg64 src); void asm_pushr(Asm *self, Reg64 reg); void asm_popr(Asm *self, Reg64 reg); void asm_callr(Asm *self, Reg64 reg); /* In x86 assembly, the @relative position starts from the next instruction. This offset shouldn't be calculated by the caller and is instead managed by this asm library itself. */ void asm_call_rel32(Asm *self, i32 relative); void asm_overwrite_call_rel32(Asm *self, u32 asm_index, i32 new_relative); void asm_cmp_rm(Asm *self, Reg64 reg1, AsmPtr *reg2); /* Sets the 8 bit memory operand to 1 if the last cmp was equals, otherwise set it to 0. Note: this instruction doesn't work with AH (RSP), CH (RBP), DH (RSI) and BH (RDI). TODO: When ST, MM AND XMM registers are implemented, also check for them as they are also invalid */ void asm_sete_m(Asm *self, AsmPtr *dst); void asm_sete_r(Asm *self, Reg64 dst); /* In x86 assembly, the @relative position starts from the next instruction. This offset shouldn't be calculated by the caller and is instead managed by this asm library itself. */ void asm_jz(Asm *self, i32 relative); /* Overwrite conditional jump target */ void asm_overwrite_jcc_rel32(Asm *self, u32 asm_index, i32 new_relative); /* In x86 assembly, the @relative position starts from the next instruction. This offset shouldn't be calculated by the caller and is instead managed by this asm library itself. */ void asm_jmp(Asm *self, i32 relative); void asm_overwrite_jmp_rel32(Asm *self, u32 asm_index, i32 new_relative); void asm_mov_rm32(Asm *self, Reg32 dst, Reg32 src); void asm_add_rm32(Asm *self, Reg32 dst, Reg32 src); void asm_sub_rm32(Asm *self, Reg32 dst, Reg32 src); void asm_and_rm32(Asm *self, Reg32 dst, Reg32 src); void asm_or_rm32(Asm *self, Reg32 dst, Reg32 src); void asm_xor_rm32(Asm *self, Reg32 dst, Reg32 src); void asm_cmp_rm32(Asm *self, Reg32 dst, Reg32 src); void asm_add_rm32_imm(Asm *self, Reg32 reg, i32 immediate); void asm_or_rm32_imm(Asm *self, Reg32 reg, i32 immediate); void asm_adc_rm32_imm(Asm *self, Reg32 reg, i32 immediate); void asm_sbb_rm32_imm(Asm *self, Reg32 reg, i32 immediate); void asm_and_rm32_imm(Asm *self, Reg32 reg, i32 immediate); void asm_sub_rm32_imm(Asm *self, Reg32 reg, i32 immediate); void asm_xor_rm32_imm(Asm *self, Reg32 reg, i32 immediate); void asm_cmp_rm32_imm(Asm *self, Reg32 reg, i32 immediate); void asm_rol_rm32_imm(Asm *self, Reg32 reg, i8 immediate); void asm_ror_rm32_imm(Asm *self, Reg32 reg, i8 immediate); void asm_rcl_rm32_imm(Asm *self, Reg32 reg, i8 immediate); void asm_rcr_rm32_imm(Asm *self, Reg32 reg, i8 immediate); void asm_shl_rm32_imm(Asm *self, Reg32 reg, i8 immediate); void asm_shr_rm32_imm(Asm *self, Reg32 reg, i8 immediate); void asm_sar_rm32_imm(Asm *self, Reg32 reg, i8 immediate); void asm_mov_rm64(Asm *self, Reg64 dst, Reg64 src); void asm_add_rm64(Asm *self, Reg64 dst, Reg64 src); void asm_sub_rm64(Asm *self, Reg64 dst, Reg64 src); void asm_and_rm64(Asm *self, Reg64 dst, Reg64 src); void asm_or_rm64(Asm *self, Reg64 dst, Reg64 src); void asm_xor_rm64(Asm *self, Reg64 dst, Reg64 src); void asm_cmp_rm64(Asm *self, Reg64 dst, Reg64 src); void asm_add_rm64_imm(Asm *self, Reg64 reg, i32 immediate); void asm_or_rm64_imm(Asm *self, Reg64 reg, i32 immediate); void asm_adc_rm64_imm(Asm *self, Reg64 reg, i32 immediate); void asm_sbb_rm64_imm(Asm *self, Reg64 reg, i32 immediate); void asm_and_rm64_imm(Asm *self, Reg64 reg, i32 immediate); void asm_sub_rm64_imm(Asm *self, Reg64 reg, i32 immediate); void asm_xor_rm64_imm(Asm *self, Reg64 reg, i32 immediate); void asm_cmp_rm64_imm(Asm *self, Reg64 reg, i32 immediate); void asm_rol_rm64_imm(Asm *self, Reg64 reg, i8 immediate); void asm_ror_rm64_imm(Asm *self, Reg64 reg, i8 immediate); void asm_rcl_rm64_imm(Asm *self, Reg64 reg, i8 immediate); void asm_rcr_rm64_imm(Asm *self, Reg64 reg, i8 immediate); void asm_shl_rm64_imm(Asm *self, Reg64 reg, i8 immediate); void asm_shr_rm64_imm(Asm *self, Reg64 reg, i8 immediate); void asm_sar_rm64_imm(Asm *self, Reg64 reg, i8 immediate); void asm_ret(Asm *self, u16 bytes); #endif