1 | /* |
2 | * This file is part of the MicroPython project, http://micropython.org/ |
3 | * |
4 | * The MIT License (MIT) |
5 | * |
6 | * Copyright (c) 2013, 2014 Damien P. George |
7 | * |
8 | * Permission is hereby granted, free of charge, to any person obtaining a copy |
9 | * of this software and associated documentation files (the "Software"), to deal |
10 | * in the Software without restriction, including without limitation the rights |
11 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
12 | * copies of the Software, and to permit persons to whom the Software is |
13 | * furnished to do so, subject to the following conditions: |
14 | * |
15 | * The above copyright notice and this permission notice shall be included in |
16 | * all copies or substantial portions of the Software. |
17 | * |
18 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
19 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
20 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
21 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
22 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
23 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN |
24 | * THE SOFTWARE. |
25 | */ |
26 | #ifndef MICROPY_INCLUDED_PY_ASMX64_H |
27 | #define MICROPY_INCLUDED_PY_ASMX64_H |
28 | |
29 | #include "py/mpconfig.h" |
30 | #include "py/misc.h" |
31 | #include "py/asmbase.h" |
32 | |
33 | // AMD64 calling convention is: |
34 | // - args pass in: RDI, RSI, RDX, RCX, R08, R09 |
35 | // - return value in RAX |
36 | // - stack must be aligned on a 16-byte boundary before all calls |
37 | // - RAX, RCX, RDX, RSI, RDI, R08, R09, R10, R11 are caller-save |
38 | // - RBX, RBP, R12, R13, R14, R15 are callee-save |
39 | |
40 | // In the functions below, argument order follows x86 docs and generally |
41 | // the destination is the first argument. |
42 | // NOTE: this is a change from the old convention used in this file and |
43 | // some functions still use the old (reverse) convention. |
44 | |
45 | #define ASM_X64_REG_RAX (0) |
46 | #define ASM_X64_REG_RCX (1) |
47 | #define ASM_X64_REG_RDX (2) |
48 | #define ASM_X64_REG_RBX (3) |
49 | #define ASM_X64_REG_RSP (4) |
50 | #define ASM_X64_REG_RBP (5) |
51 | #define ASM_X64_REG_RSI (6) |
52 | #define ASM_X64_REG_RDI (7) |
53 | #define ASM_X64_REG_R08 (8) |
54 | #define ASM_X64_REG_R09 (9) |
55 | #define ASM_X64_REG_R10 (10) |
56 | #define ASM_X64_REG_R11 (11) |
57 | #define ASM_X64_REG_R12 (12) |
58 | #define ASM_X64_REG_R13 (13) |
59 | #define ASM_X64_REG_R14 (14) |
60 | #define ASM_X64_REG_R15 (15) |
61 | |
62 | // condition codes, used for jcc and setcc (despite their j-name!) |
63 | #define ASM_X64_CC_JB (0x2) // below, unsigned |
64 | #define ASM_X64_CC_JAE (0x3) // above or equal, unsigned |
65 | #define ASM_X64_CC_JZ (0x4) |
66 | #define ASM_X64_CC_JE (0x4) |
67 | #define ASM_X64_CC_JNZ (0x5) |
68 | #define ASM_X64_CC_JNE (0x5) |
69 | #define ASM_X64_CC_JBE (0x6) // below or equal, unsigned |
70 | #define ASM_X64_CC_JA (0x7) // above, unsigned |
71 | #define ASM_X64_CC_JL (0xc) // less, signed |
72 | #define ASM_X64_CC_JGE (0xd) // greater or equal, signed |
73 | #define ASM_X64_CC_JLE (0xe) // less or equal, signed |
74 | #define ASM_X64_CC_JG (0xf) // greater, signed |
75 | |
76 | typedef struct _asm_x64_t { |
77 | mp_asm_base_t base; |
78 | int num_locals; |
79 | } asm_x64_t; |
80 | |
81 | static inline void asm_x64_end_pass(asm_x64_t *as) { |
82 | (void)as; |
83 | } |
84 | |
85 | void asm_x64_nop(asm_x64_t *as); |
86 | void asm_x64_push_r64(asm_x64_t *as, int src_r64); |
87 | void asm_x64_pop_r64(asm_x64_t *as, int dest_r64); |
88 | void asm_x64_mov_r64_r64(asm_x64_t *as, int dest_r64, int src_r64); |
89 | size_t asm_x64_mov_i32_to_r64(asm_x64_t *as, int src_i32, int dest_r64); |
90 | void asm_x64_mov_i64_to_r64(asm_x64_t *as, int64_t src_i64, int dest_r64); |
91 | void asm_x64_mov_i64_to_r64_optimised(asm_x64_t *as, int64_t src_i64, int dest_r64); |
92 | void asm_x64_mov_r8_to_mem8(asm_x64_t *as, int src_r64, int dest_r64, int dest_disp); |
93 | void asm_x64_mov_r16_to_mem16(asm_x64_t *as, int src_r64, int dest_r64, int dest_disp); |
94 | void asm_x64_mov_r32_to_mem32(asm_x64_t *as, int src_r64, int dest_r64, int dest_disp); |
95 | void asm_x64_mov_r64_to_mem64(asm_x64_t *as, int src_r64, int dest_r64, int dest_disp); |
96 | void asm_x64_mov_mem8_to_r64zx(asm_x64_t *as, int src_r64, int src_disp, int dest_r64); |
97 | void asm_x64_mov_mem16_to_r64zx(asm_x64_t *as, int src_r64, int src_disp, int dest_r64); |
98 | void asm_x64_mov_mem32_to_r64zx(asm_x64_t *as, int src_r64, int src_disp, int dest_r64); |
99 | void asm_x64_mov_mem64_to_r64(asm_x64_t *as, int src_r64, int src_disp, int dest_r64); |
100 | void asm_x64_and_r64_r64(asm_x64_t *as, int dest_r64, int src_r64); |
101 | void asm_x64_or_r64_r64(asm_x64_t *as, int dest_r64, int src_r64); |
102 | void asm_x64_xor_r64_r64(asm_x64_t *as, int dest_r64, int src_r64); |
103 | void asm_x64_shl_r64_cl(asm_x64_t *as, int dest_r64); |
104 | void asm_x64_shr_r64_cl(asm_x64_t *as, int dest_r64); |
105 | void asm_x64_sar_r64_cl(asm_x64_t *as, int dest_r64); |
106 | void asm_x64_add_r64_r64(asm_x64_t *as, int dest_r64, int src_r64); |
107 | void asm_x64_sub_r64_r64(asm_x64_t *as, int dest_r64, int src_r64); |
108 | void asm_x64_mul_r64_r64(asm_x64_t *as, int dest_r64, int src_r64); |
109 | void asm_x64_cmp_r64_with_r64(asm_x64_t *as, int src_r64_a, int src_r64_b); |
110 | void asm_x64_test_r8_with_r8(asm_x64_t *as, int src_r64_a, int src_r64_b); |
111 | void asm_x64_test_r64_with_r64(asm_x64_t *as, int src_r64_a, int src_r64_b); |
112 | void asm_x64_setcc_r8(asm_x64_t *as, int jcc_type, int dest_r8); |
113 | void asm_x64_jmp_reg(asm_x64_t *as, int src_r64); |
114 | void asm_x64_jmp_label(asm_x64_t *as, mp_uint_t label); |
115 | void asm_x64_jcc_label(asm_x64_t *as, int jcc_type, mp_uint_t label); |
116 | void asm_x64_entry(asm_x64_t *as, int num_locals); |
117 | void asm_x64_exit(asm_x64_t *as); |
118 | void asm_x64_mov_local_to_r64(asm_x64_t *as, int src_local_num, int dest_r64); |
119 | void asm_x64_mov_r64_to_local(asm_x64_t *as, int src_r64, int dest_local_num); |
120 | void asm_x64_mov_local_addr_to_r64(asm_x64_t *as, int local_num, int dest_r64); |
121 | void asm_x64_mov_reg_pcrel(asm_x64_t *as, int dest_r64, mp_uint_t label); |
122 | void asm_x64_call_ind(asm_x64_t *as, size_t fun_id, int temp_r32); |
123 | |
124 | // Holds a pointer to mp_fun_table |
125 | #define ASM_X64_REG_FUN_TABLE ASM_X64_REG_RBP |
126 | |
127 | #if GENERIC_ASM_API |
128 | |
129 | // The following macros provide a (mostly) arch-independent API to |
130 | // generate native code, and are used by the native emitter. |
131 | |
132 | #define ASM_WORD_SIZE (8) |
133 | |
134 | #define REG_RET ASM_X64_REG_RAX |
135 | #define REG_ARG_1 ASM_X64_REG_RDI |
136 | #define REG_ARG_2 ASM_X64_REG_RSI |
137 | #define REG_ARG_3 ASM_X64_REG_RDX |
138 | #define REG_ARG_4 ASM_X64_REG_RCX |
139 | #define REG_ARG_5 ASM_X64_REG_R08 |
140 | |
141 | // caller-save |
142 | #define REG_TEMP0 ASM_X64_REG_RAX |
143 | #define REG_TEMP1 ASM_X64_REG_RDI |
144 | #define REG_TEMP2 ASM_X64_REG_RSI |
145 | |
146 | // callee-save |
147 | #define REG_LOCAL_1 ASM_X64_REG_RBX |
148 | #define REG_LOCAL_2 ASM_X64_REG_R12 |
149 | #define REG_LOCAL_3 ASM_X64_REG_R13 |
150 | #define REG_LOCAL_NUM (3) |
151 | |
152 | // Holds a pointer to mp_fun_table |
153 | #define REG_FUN_TABLE ASM_X64_REG_FUN_TABLE |
154 | |
155 | #define ASM_T asm_x64_t |
156 | #define ASM_END_PASS asm_x64_end_pass |
157 | #define ASM_ENTRY asm_x64_entry |
158 | #define ASM_EXIT asm_x64_exit |
159 | |
160 | #define ASM_JUMP asm_x64_jmp_label |
161 | #define ASM_JUMP_IF_REG_ZERO(as, reg, label, bool_test) \ |
162 | do { \ |
163 | if (bool_test) { \ |
164 | asm_x64_test_r8_with_r8((as), (reg), (reg)); \ |
165 | } else { \ |
166 | asm_x64_test_r64_with_r64((as), (reg), (reg)); \ |
167 | } \ |
168 | asm_x64_jcc_label(as, ASM_X64_CC_JZ, label); \ |
169 | } while (0) |
170 | #define ASM_JUMP_IF_REG_NONZERO(as, reg, label, bool_test) \ |
171 | do { \ |
172 | if (bool_test) { \ |
173 | asm_x64_test_r8_with_r8((as), (reg), (reg)); \ |
174 | } else { \ |
175 | asm_x64_test_r64_with_r64((as), (reg), (reg)); \ |
176 | } \ |
177 | asm_x64_jcc_label(as, ASM_X64_CC_JNZ, label); \ |
178 | } while (0) |
179 | #define ASM_JUMP_IF_REG_EQ(as, reg1, reg2, label) \ |
180 | do { \ |
181 | asm_x64_cmp_r64_with_r64(as, reg1, reg2); \ |
182 | asm_x64_jcc_label(as, ASM_X64_CC_JE, label); \ |
183 | } while (0) |
184 | #define ASM_JUMP_REG(as, reg) asm_x64_jmp_reg((as), (reg)) |
185 | #define ASM_CALL_IND(as, idx) asm_x64_call_ind(as, idx, ASM_X64_REG_RAX) |
186 | |
187 | #define ASM_MOV_LOCAL_REG(as, local_num, reg_src) asm_x64_mov_r64_to_local((as), (reg_src), (local_num)) |
188 | #define ASM_MOV_REG_IMM(as, reg_dest, imm) asm_x64_mov_i64_to_r64_optimised((as), (imm), (reg_dest)) |
189 | #define ASM_MOV_REG_IMM_FIX_U16(as, reg_dest, imm) asm_x64_mov_i32_to_r64((as), (imm), (reg_dest)) |
190 | #define ASM_MOV_REG_IMM_FIX_WORD(as, reg_dest, imm) asm_x64_mov_i32_to_r64((as), (imm), (reg_dest)) |
191 | #define ASM_MOV_REG_LOCAL(as, reg_dest, local_num) asm_x64_mov_local_to_r64((as), (local_num), (reg_dest)) |
192 | #define ASM_MOV_REG_REG(as, reg_dest, reg_src) asm_x64_mov_r64_r64((as), (reg_dest), (reg_src)) |
193 | #define ASM_MOV_REG_LOCAL_ADDR(as, reg_dest, local_num) asm_x64_mov_local_addr_to_r64((as), (local_num), (reg_dest)) |
194 | #define ASM_MOV_REG_PCREL(as, reg_dest, label) asm_x64_mov_reg_pcrel((as), (reg_dest), (label)) |
195 | |
196 | #define ASM_LSL_REG(as, reg) asm_x64_shl_r64_cl((as), (reg)) |
197 | #define ASM_LSR_REG(as, reg) asm_x64_shr_r64_cl((as), (reg)) |
198 | #define ASM_ASR_REG(as, reg) asm_x64_sar_r64_cl((as), (reg)) |
199 | #define ASM_OR_REG_REG(as, reg_dest, reg_src) asm_x64_or_r64_r64((as), (reg_dest), (reg_src)) |
200 | #define ASM_XOR_REG_REG(as, reg_dest, reg_src) asm_x64_xor_r64_r64((as), (reg_dest), (reg_src)) |
201 | #define ASM_AND_REG_REG(as, reg_dest, reg_src) asm_x64_and_r64_r64((as), (reg_dest), (reg_src)) |
202 | #define ASM_ADD_REG_REG(as, reg_dest, reg_src) asm_x64_add_r64_r64((as), (reg_dest), (reg_src)) |
203 | #define ASM_SUB_REG_REG(as, reg_dest, reg_src) asm_x64_sub_r64_r64((as), (reg_dest), (reg_src)) |
204 | #define ASM_MUL_REG_REG(as, reg_dest, reg_src) asm_x64_mul_r64_r64((as), (reg_dest), (reg_src)) |
205 | |
206 | #define ASM_LOAD_REG_REG(as, reg_dest, reg_base) asm_x64_mov_mem64_to_r64((as), (reg_base), 0, (reg_dest)) |
207 | #define ASM_LOAD_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) asm_x64_mov_mem64_to_r64((as), (reg_base), 8 * (word_offset), (reg_dest)) |
208 | #define ASM_LOAD8_REG_REG(as, reg_dest, reg_base) asm_x64_mov_mem8_to_r64zx((as), (reg_base), 0, (reg_dest)) |
209 | #define ASM_LOAD16_REG_REG(as, reg_dest, reg_base) asm_x64_mov_mem16_to_r64zx((as), (reg_base), 0, (reg_dest)) |
210 | #define ASM_LOAD32_REG_REG(as, reg_dest, reg_base) asm_x64_mov_mem32_to_r64zx((as), (reg_base), 0, (reg_dest)) |
211 | |
212 | #define ASM_STORE_REG_REG(as, reg_src, reg_base) asm_x64_mov_r64_to_mem64((as), (reg_src), (reg_base), 0) |
213 | #define ASM_STORE_REG_REG_OFFSET(as, reg_src, reg_base, word_offset) asm_x64_mov_r64_to_mem64((as), (reg_src), (reg_base), 8 * (word_offset)) |
214 | #define ASM_STORE8_REG_REG(as, reg_src, reg_base) asm_x64_mov_r8_to_mem8((as), (reg_src), (reg_base), 0) |
215 | #define ASM_STORE16_REG_REG(as, reg_src, reg_base) asm_x64_mov_r16_to_mem16((as), (reg_src), (reg_base), 0) |
216 | #define ASM_STORE32_REG_REG(as, reg_src, reg_base) asm_x64_mov_r32_to_mem32((as), (reg_src), (reg_base), 0) |
217 | |
218 | #endif // GENERIC_ASM_API |
219 | |
220 | #endif // MICROPY_INCLUDED_PY_ASMX64_H |
221 | |