aboutsummaryrefslogtreecommitdiff
path: root/executor/x86_64/asm.h
blob: 7d68bc06b409b8b51a6d0cca4d5c52e6c077b93b (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
#ifndef AMAL_EXECUTOR_X86_64_ASM_H
#define AMAL_EXECUTOR_X86_64_ASM_H

#include "../../include/std/misc.h"
#include "../../include/std/types.h"

typedef struct {
    void *code;
    u8 *code_it;
    usize allocated_size;
} Asm;

typedef enum {
    EAX,
    ECX,
    EDX,
    EBX,
    ESP,
    EBP,
    ESI,
    EDI
} Reg32;

#define REG64_EXTENDED_REG_BIT (1 << 3)
#define REG64_REG_BITS 0x7

typedef enum {
    RAX =   0,
    RCX =   1,
    RDX =   2,
    RBX =   3,
    RSP =   4,
    RBP =   5,
    RSI =   6,
    RDI =   7,

    R8 =    REG64_EXTENDED_REG_BIT | RAX,
    R9 =    REG64_EXTENDED_REG_BIT | RCX,
    R10 =   REG64_EXTENDED_REG_BIT | RDX,
    R11 =   REG64_EXTENDED_REG_BIT | RBX,
    R12 =   REG64_EXTENDED_REG_BIT | RSP,
    R13 =   REG64_EXTENDED_REG_BIT | RBP,
    R14 =   REG64_EXTENDED_REG_BIT | RSI,
    R15 =   REG64_EXTENDED_REG_BIT | RDI
} Reg64;

typedef struct {
    Reg64 base;
    Reg64 index;
    i32 disp;
    u8 scale;
} AsmPtr;

void asm_ptr_init(AsmPtr *self, Reg64 base);
void asm_ptr_init_index(AsmPtr *self, Reg64 base, Reg64 index);
void asm_ptr_init_disp(AsmPtr *self, Reg64 base, i32 disp);
void asm_ptr_init_index_disp(AsmPtr *self, Reg64 base, Reg64 index, i32 disp);

CHECK_RESULT int asm_init(Asm *self);
void asm_deinit(Asm *self);

usize asm_get_size(Asm *self);

CHECK_RESULT int asm_execute(Asm *self, u32 offset);
CHECK_RESULT int asm_ensure_capacity(Asm *self, usize size);

void asm_nop(Asm *self);









void asm_mov_mi(Asm *self, AsmPtr *dst, i32 immediate);
void asm_mov_mr(Asm *self, AsmPtr *dst, Reg64 src);
void asm_mov_rm(Asm *self, Reg64 dst, AsmPtr *src);
void asm_mov_ri(Asm *self, Reg64 dst, i64 immediate);
void asm_mov_rr(Asm *self, Reg64 dst, Reg64 src);

void asm_and_mr(Asm *self, AsmPtr *dst, Reg64 src);

void asm_add_rr(Asm *self, Reg64 dst, Reg64 src);
void asm_sub_rr(Asm *self, Reg64 dst, Reg64 src);
void asm_imul_rr(Asm *self, Reg64 dst, Reg64 src);
/* Sign extend RAX into RDX, this is needed for some operations, such as idiv */
void asm_cqo(Asm *self);
/*
    Divide RDX:RAX by @src. Store the quotient in RAX and the remainder in RDX.
    @asm_cqo should be called before this, since RAX needs to be sign extended into RDX
*/
void asm_idiv_rax_r(Asm *self, Reg64 src);

void asm_pushr(Asm *self, Reg64 reg);
void asm_popr(Asm *self, Reg64 reg);
void asm_callr(Asm *self, Reg64 reg);
/*
    In x86 assembly, the @relative position starts from the next instruction.
    This offset shouldn't be calculated by the caller and is instead managed
    by this asm library itself.
*/
void asm_call_rel32(Asm *self, i32 relative);
void asm_overwrite_call_rel32(Asm *self, u32 asm_index, i32 new_relative);

void asm_cmp_rm(Asm *self, Reg64 reg1, AsmPtr *reg2);
/*
    Sets the 8 bit memory operand to 1 if the last cmp was equals, otherwise set it to 0.
    Note: this instruction doesn't work with AH (RSP), CH (RBP), DH (RSI) and BH (RDI).
    TODO: When ST, MM AND XMM registers are implemented, also check for them as they are also invalid
*/
void asm_sete_m(Asm *self, AsmPtr *dst);
void asm_sete_r(Asm *self, Reg64 dst);
void asm_setne_r(Asm *self, Reg64 dst);
/* Unsigned */
void asm_setb_r(Asm *self, Reg64 dst);
/* Unsigned */
void asm_setbe_r(Asm *self, Reg64 dst);
/* Unsigned */
void asm_seta_r(Asm *self, Reg64 dst);
/* Unsigned */
void asm_setae_r(Asm *self, Reg64 dst);
/* Signed */
void asm_setl_r(Asm *self, Reg64 dst);
/* Signed */
void asm_setle_r(Asm *self, Reg64 dst);
/* Signed */
void asm_setg_r(Asm *self, Reg64 dst);
/* Signed */
void asm_setge_r(Asm *self, Reg64 dst);
/*
    In x86 assembly, the @relative position starts from the next instruction.
    This offset shouldn't be calculated by the caller and is instead managed
    by this asm library itself.
*/
void asm_jz(Asm *self, i32 relative);
/* Overwrite conditional jump target */
void asm_overwrite_jcc_rel32(Asm *self, u32 asm_index, i32 new_relative);
/*
    In x86 assembly, the @relative position starts from the next instruction.
    This offset shouldn't be calculated by the caller and is instead managed
    by this asm library itself.
*/
void asm_jmp(Asm *self, i32 relative);
void asm_overwrite_jmp_rel32(Asm *self, u32 asm_index, i32 new_relative);












void asm_mov_rm32(Asm *self, Reg32 dst, Reg32 src);
void asm_add_rm32(Asm *self, Reg32 dst, Reg32 src);
void asm_sub_rm32(Asm *self, Reg32 dst, Reg32 src);
void asm_and_rm32(Asm *self, Reg32 dst, Reg32 src);
void asm_or_rm32(Asm *self, Reg32 dst, Reg32 src);
void asm_xor_rm32(Asm *self, Reg32 dst, Reg32 src);
void asm_cmp_rm32(Asm *self, Reg32 dst, Reg32 src);
void asm_add_rm32_imm(Asm *self, Reg32 reg, i32 immediate);
void asm_or_rm32_imm(Asm *self, Reg32 reg, i32 immediate);
void asm_adc_rm32_imm(Asm *self, Reg32 reg, i32 immediate);
void asm_sbb_rm32_imm(Asm *self, Reg32 reg, i32 immediate);
void asm_and_rm32_imm(Asm *self, Reg32 reg, i32 immediate);
void asm_sub_rm32_imm(Asm *self, Reg32 reg, i32 immediate);
void asm_xor_rm32_imm(Asm *self, Reg32 reg, i32 immediate);
void asm_cmp_rm32_imm(Asm *self, Reg32 reg, i32 immediate);
void asm_rol_rm32_imm(Asm *self, Reg32 reg, i8 immediate);
void asm_ror_rm32_imm(Asm *self, Reg32 reg, i8 immediate);
void asm_rcl_rm32_imm(Asm *self, Reg32 reg, i8 immediate);
void asm_rcr_rm32_imm(Asm *self, Reg32 reg, i8 immediate);
void asm_shl_rm32_imm(Asm *self, Reg32 reg, i8 immediate);
void asm_shr_rm32_imm(Asm *self, Reg32 reg, i8 immediate);
void asm_sar_rm32_imm(Asm *self, Reg32 reg, i8 immediate);

void asm_mov_rm64(Asm *self, Reg64 dst, Reg64 src);
void asm_add_rm64(Asm *self, Reg64 dst, Reg64 src);
void asm_sub_rm64(Asm *self, Reg64 dst, Reg64 src);
void asm_and_rm64(Asm *self, Reg64 dst, Reg64 src);
void asm_or_rm64(Asm *self, Reg64 dst, Reg64 src);
void asm_xor_rm64(Asm *self, Reg64 dst, Reg64 src);
void asm_cmp_rm64(Asm *self, Reg64 dst, Reg64 src);
void asm_add_rm64_imm(Asm *self, Reg64 reg, i32 immediate);
void asm_or_rm64_imm(Asm *self, Reg64 reg, i32 immediate);
void asm_adc_rm64_imm(Asm *self, Reg64 reg, i32 immediate);
void asm_sbb_rm64_imm(Asm *self, Reg64 reg, i32 immediate);
void asm_and_rm64_imm(Asm *self, Reg64 reg, i32 immediate);
void asm_sub_rm64_imm(Asm *self, Reg64 reg, i32 immediate);
void asm_xor_rm64_imm(Asm *self, Reg64 reg, i32 immediate);
void asm_cmp_rm64_imm(Asm *self, Reg64 reg, i32 immediate);
void asm_rol_rm64_imm(Asm *self, Reg64 reg, i8 immediate);
void asm_ror_rm64_imm(Asm *self, Reg64 reg, i8 immediate);
void asm_rcl_rm64_imm(Asm *self, Reg64 reg, i8 immediate);
void asm_rcr_rm64_imm(Asm *self, Reg64 reg, i8 immediate);
void asm_shl_rm64_imm(Asm *self, Reg64 reg, i8 immediate);
void asm_shr_rm64_imm(Asm *self, Reg64 reg, i8 immediate);
void asm_sar_rm64_imm(Asm *self, Reg64 reg, i8 immediate);

void asm_ret(Asm *self, u16 bytes);

#endif