1 | /* |
2 | * This file is part of the MicroPython project, http://micropython.org/ |
3 | * |
4 | * The MIT License (MIT) |
5 | * |
6 | * Copyright (c) 2013, 2014 Damien P. George |
7 | * |
8 | * Permission is hereby granted, free of charge, to any person obtaining a copy |
9 | * of this software and associated documentation files (the "Software"), to deal |
10 | * in the Software without restriction, including without limitation the rights |
11 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
12 | * copies of the Software, and to permit persons to whom the Software is |
13 | * furnished to do so, subject to the following conditions: |
14 | * |
15 | * The above copyright notice and this permission notice shall be included in |
16 | * all copies or substantial portions of the Software. |
17 | * |
18 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
19 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
20 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
21 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
22 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
23 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN |
24 | * THE SOFTWARE. |
25 | */ |
26 | #ifndef MICROPY_INCLUDED_PY_ASMTHUMB_H |
27 | #define MICROPY_INCLUDED_PY_ASMTHUMB_H |
28 | |
29 | #include <assert.h> |
30 | #include "py/misc.h" |
31 | #include "py/asmbase.h" |
32 | |
33 | #define ASM_THUMB_REG_R0 (0) |
34 | #define ASM_THUMB_REG_R1 (1) |
35 | #define ASM_THUMB_REG_R2 (2) |
36 | #define ASM_THUMB_REG_R3 (3) |
37 | #define ASM_THUMB_REG_R4 (4) |
38 | #define ASM_THUMB_REG_R5 (5) |
39 | #define ASM_THUMB_REG_R6 (6) |
40 | #define ASM_THUMB_REG_R7 (7) |
41 | #define ASM_THUMB_REG_R8 (8) |
42 | #define ASM_THUMB_REG_R9 (9) |
43 | #define ASM_THUMB_REG_R10 (10) |
44 | #define ASM_THUMB_REG_R11 (11) |
45 | #define ASM_THUMB_REG_R12 (12) |
46 | #define ASM_THUMB_REG_R13 (13) |
47 | #define ASM_THUMB_REG_R14 (14) |
48 | #define ASM_THUMB_REG_R15 (15) |
49 | #define ASM_THUMB_REG_SP (ASM_THUMB_REG_R13) |
50 | #define ASM_THUMB_REG_LR (REG_R14) |
51 | |
52 | #define ASM_THUMB_CC_EQ (0x0) |
53 | #define ASM_THUMB_CC_NE (0x1) |
54 | #define ASM_THUMB_CC_CS (0x2) |
55 | #define ASM_THUMB_CC_CC (0x3) |
56 | #define ASM_THUMB_CC_MI (0x4) |
57 | #define ASM_THUMB_CC_PL (0x5) |
58 | #define ASM_THUMB_CC_VS (0x6) |
59 | #define ASM_THUMB_CC_VC (0x7) |
60 | #define ASM_THUMB_CC_HI (0x8) |
61 | #define ASM_THUMB_CC_LS (0x9) |
62 | #define ASM_THUMB_CC_GE (0xa) |
63 | #define ASM_THUMB_CC_LT (0xb) |
64 | #define ASM_THUMB_CC_GT (0xc) |
65 | #define ASM_THUMB_CC_LE (0xd) |
66 | |
67 | typedef struct _asm_thumb_t { |
68 | mp_asm_base_t base; |
69 | uint32_t push_reglist; |
70 | uint32_t stack_adjust; |
71 | } asm_thumb_t; |
72 | |
73 | void asm_thumb_end_pass(asm_thumb_t *as); |
74 | |
75 | void asm_thumb_entry(asm_thumb_t *as, int num_locals); |
76 | void asm_thumb_exit(asm_thumb_t *as); |
77 | |
78 | // argument order follows ARM, in general dest is first |
79 | // note there is a difference between movw and mov.w, and many others! |
80 | |
81 | #define ASM_THUMB_OP_IT (0xbf00) |
82 | #define ASM_THUMB_OP_ITE_EQ (0xbf0c) |
83 | #define ASM_THUMB_OP_ITE_NE (0xbf14) |
84 | #define ASM_THUMB_OP_ITE_CS (0xbf2c) |
85 | #define ASM_THUMB_OP_ITE_CC (0xbf34) |
86 | #define ASM_THUMB_OP_ITE_MI (0xbf4c) |
87 | #define ASM_THUMB_OP_ITE_PL (0xbf54) |
88 | #define ASM_THUMB_OP_ITE_VS (0xbf6c) |
89 | #define ASM_THUMB_OP_ITE_VC (0xbf74) |
90 | #define ASM_THUMB_OP_ITE_HI (0xbf8c) |
91 | #define ASM_THUMB_OP_ITE_LS (0xbf94) |
92 | #define ASM_THUMB_OP_ITE_GE (0xbfac) |
93 | #define ASM_THUMB_OP_ITE_LT (0xbfb4) |
94 | #define ASM_THUMB_OP_ITE_GT (0xbfcc) |
95 | #define ASM_THUMB_OP_ITE_LE (0xbfd4) |
96 | |
97 | #define ASM_THUMB_OP_NOP (0xbf00) |
98 | #define ASM_THUMB_OP_WFI (0xbf30) |
99 | #define ASM_THUMB_OP_CPSID_I (0xb672) // cpsid i, disable irq |
100 | #define ASM_THUMB_OP_CPSIE_I (0xb662) // cpsie i, enable irq |
101 | |
102 | void asm_thumb_op16(asm_thumb_t *as, uint op); |
103 | void asm_thumb_op32(asm_thumb_t *as, uint op1, uint op2); |
104 | |
105 | static inline void asm_thumb_it_cc(asm_thumb_t *as, uint cc, uint mask) { |
106 | asm_thumb_op16(as, ASM_THUMB_OP_IT | (cc << 4) | mask); |
107 | } |
108 | |
109 | // FORMAT 1: move shifted register |
110 | |
111 | #define ASM_THUMB_FORMAT_1_LSL (0x0000) |
112 | #define ASM_THUMB_FORMAT_1_LSR (0x0800) |
113 | #define ASM_THUMB_FORMAT_1_ASR (0x1000) |
114 | |
115 | #define ASM_THUMB_FORMAT_1_ENCODE(op, rlo_dest, rlo_src, offset) \ |
116 | ((op) | ((offset) << 6) | ((rlo_src) << 3) | (rlo_dest)) |
117 | |
118 | static inline void asm_thumb_format_1(asm_thumb_t *as, uint op, uint rlo_dest, uint rlo_src, uint offset) { |
119 | assert(rlo_dest < ASM_THUMB_REG_R8); |
120 | assert(rlo_src < ASM_THUMB_REG_R8); |
121 | asm_thumb_op16(as, ASM_THUMB_FORMAT_1_ENCODE(op, rlo_dest, rlo_src, offset)); |
122 | } |
123 | |
124 | // FORMAT 2: add/subtract |
125 | |
126 | #define ASM_THUMB_FORMAT_2_ADD (0x1800) |
127 | #define ASM_THUMB_FORMAT_2_SUB (0x1a00) |
128 | #define ASM_THUMB_FORMAT_2_REG_OPERAND (0x0000) |
129 | #define ASM_THUMB_FORMAT_2_IMM_OPERAND (0x0400) |
130 | |
131 | #define ASM_THUMB_FORMAT_2_ENCODE(op, rlo_dest, rlo_src, src_b) \ |
132 | ((op) | ((src_b) << 6) | ((rlo_src) << 3) | (rlo_dest)) |
133 | |
134 | static inline void asm_thumb_format_2(asm_thumb_t *as, uint op, uint rlo_dest, uint rlo_src, int src_b) { |
135 | assert(rlo_dest < ASM_THUMB_REG_R8); |
136 | assert(rlo_src < ASM_THUMB_REG_R8); |
137 | asm_thumb_op16(as, ASM_THUMB_FORMAT_2_ENCODE(op, rlo_dest, rlo_src, src_b)); |
138 | } |
139 | |
140 | static inline void asm_thumb_add_rlo_rlo_rlo(asm_thumb_t *as, uint rlo_dest, uint rlo_src_a, uint rlo_src_b) { |
141 | asm_thumb_format_2(as, ASM_THUMB_FORMAT_2_ADD | ASM_THUMB_FORMAT_2_REG_OPERAND, rlo_dest, rlo_src_a, rlo_src_b); |
142 | } |
143 | static inline void asm_thumb_add_rlo_rlo_i3(asm_thumb_t *as, uint rlo_dest, uint rlo_src_a, int i3_src) { |
144 | asm_thumb_format_2(as, ASM_THUMB_FORMAT_2_ADD | ASM_THUMB_FORMAT_2_IMM_OPERAND, rlo_dest, rlo_src_a, i3_src); |
145 | } |
146 | static inline void asm_thumb_sub_rlo_rlo_rlo(asm_thumb_t *as, uint rlo_dest, uint rlo_src_a, uint rlo_src_b) { |
147 | asm_thumb_format_2(as, ASM_THUMB_FORMAT_2_SUB | ASM_THUMB_FORMAT_2_REG_OPERAND, rlo_dest, rlo_src_a, rlo_src_b); |
148 | } |
149 | static inline void asm_thumb_sub_rlo_rlo_i3(asm_thumb_t *as, uint rlo_dest, uint rlo_src_a, int i3_src) { |
150 | asm_thumb_format_2(as, ASM_THUMB_FORMAT_2_SUB | ASM_THUMB_FORMAT_2_IMM_OPERAND, rlo_dest, rlo_src_a, i3_src); |
151 | } |
152 | |
153 | // FORMAT 3: move/compare/add/subtract immediate |
154 | // These instructions all do zero extension of the i8 value |
155 | |
156 | #define ASM_THUMB_FORMAT_3_MOV (0x2000) |
157 | #define ASM_THUMB_FORMAT_3_CMP (0x2800) |
158 | #define ASM_THUMB_FORMAT_3_ADD (0x3000) |
159 | #define ASM_THUMB_FORMAT_3_SUB (0x3800) |
160 | #define ASM_THUMB_FORMAT_3_LDR (0x4800) |
161 | |
162 | #define ASM_THUMB_FORMAT_3_ENCODE(op, rlo, i8) ((op) | ((rlo) << 8) | (i8)) |
163 | |
164 | static inline void asm_thumb_format_3(asm_thumb_t *as, uint op, uint rlo, int i8) { |
165 | assert(rlo < ASM_THUMB_REG_R8); |
166 | asm_thumb_op16(as, ASM_THUMB_FORMAT_3_ENCODE(op, rlo, i8)); |
167 | } |
168 | |
169 | static inline void asm_thumb_mov_rlo_i8(asm_thumb_t *as, uint rlo, int i8) { |
170 | asm_thumb_format_3(as, ASM_THUMB_FORMAT_3_MOV, rlo, i8); |
171 | } |
172 | static inline void asm_thumb_cmp_rlo_i8(asm_thumb_t *as, uint rlo, int i8) { |
173 | asm_thumb_format_3(as, ASM_THUMB_FORMAT_3_CMP, rlo, i8); |
174 | } |
175 | static inline void asm_thumb_add_rlo_i8(asm_thumb_t *as, uint rlo, int i8) { |
176 | asm_thumb_format_3(as, ASM_THUMB_FORMAT_3_ADD, rlo, i8); |
177 | } |
178 | static inline void asm_thumb_sub_rlo_i8(asm_thumb_t *as, uint rlo, int i8) { |
179 | asm_thumb_format_3(as, ASM_THUMB_FORMAT_3_SUB, rlo, i8); |
180 | } |
181 | static inline void asm_thumb_ldr_rlo_pcrel_i8(asm_thumb_t *as, uint rlo, uint i8) { |
182 | asm_thumb_format_3(as, ASM_THUMB_FORMAT_3_LDR, rlo, i8); |
183 | } |
184 | |
185 | // FORMAT 4: ALU operations |
186 | |
187 | #define ASM_THUMB_FORMAT_4_AND (0x4000) |
188 | #define ASM_THUMB_FORMAT_4_EOR (0x4040) |
189 | #define ASM_THUMB_FORMAT_4_LSL (0x4080) |
190 | #define ASM_THUMB_FORMAT_4_LSR (0x40c0) |
191 | #define ASM_THUMB_FORMAT_4_ASR (0x4100) |
192 | #define ASM_THUMB_FORMAT_4_ADC (0x4140) |
193 | #define ASM_THUMB_FORMAT_4_SBC (0x4180) |
194 | #define ASM_THUMB_FORMAT_4_ROR (0x41c0) |
195 | #define ASM_THUMB_FORMAT_4_TST (0x4200) |
196 | #define ASM_THUMB_FORMAT_4_NEG (0x4240) |
197 | #define ASM_THUMB_FORMAT_4_CMP (0x4280) |
198 | #define ASM_THUMB_FORMAT_4_CMN (0x42c0) |
199 | #define ASM_THUMB_FORMAT_4_ORR (0x4300) |
200 | #define ASM_THUMB_FORMAT_4_MUL (0x4340) |
201 | #define ASM_THUMB_FORMAT_4_BIC (0x4380) |
202 | #define ASM_THUMB_FORMAT_4_MVN (0x43c0) |
203 | |
204 | void asm_thumb_format_4(asm_thumb_t *as, uint op, uint rlo_dest, uint rlo_src); |
205 | |
206 | static inline void asm_thumb_cmp_rlo_rlo(asm_thumb_t *as, uint rlo_dest, uint rlo_src) { |
207 | asm_thumb_format_4(as, ASM_THUMB_FORMAT_4_CMP, rlo_dest, rlo_src); |
208 | } |
209 | static inline void asm_thumb_mvn_rlo_rlo(asm_thumb_t *as, uint rlo_dest, uint rlo_src) { |
210 | asm_thumb_format_4(as, ASM_THUMB_FORMAT_4_MVN, rlo_dest, rlo_src); |
211 | } |
212 | static inline void asm_thumb_neg_rlo_rlo(asm_thumb_t *as, uint rlo_dest, uint rlo_src) { |
213 | asm_thumb_format_4(as, ASM_THUMB_FORMAT_4_NEG, rlo_dest, rlo_src); |
214 | } |
215 | |
216 | // FORMAT 5: hi register operations (add, cmp, mov, bx) |
217 | // For add/cmp/mov, at least one of the args must be a high register |
218 | |
219 | #define ASM_THUMB_FORMAT_5_ADD (0x4400) |
220 | #define ASM_THUMB_FORMAT_5_BX (0x4700) |
221 | |
222 | #define ASM_THUMB_FORMAT_5_ENCODE(op, r_dest, r_src) \ |
223 | ((op) | ((r_dest) << 4 & 0x0080) | ((r_src) << 3) | ((r_dest) & 0x0007)) |
224 | |
225 | static inline void asm_thumb_format_5(asm_thumb_t *as, uint op, uint r_dest, uint r_src) { |
226 | asm_thumb_op16(as, ASM_THUMB_FORMAT_5_ENCODE(op, r_dest, r_src)); |
227 | } |
228 | |
229 | static inline void asm_thumb_add_reg_reg(asm_thumb_t *as, uint r_dest, uint r_src) { |
230 | asm_thumb_format_5(as, ASM_THUMB_FORMAT_5_ADD, r_dest, r_src); |
231 | } |
232 | static inline void asm_thumb_bx_reg(asm_thumb_t *as, uint r_src) { |
233 | asm_thumb_format_5(as, ASM_THUMB_FORMAT_5_BX, 0, r_src); |
234 | } |
235 | |
236 | // FORMAT 9: load/store with immediate offset |
237 | // For word transfers the offset must be aligned, and >>2 |
238 | |
239 | // FORMAT 10: load/store halfword |
240 | // The offset must be aligned, and >>1 |
241 | // The load is zero extended into the register |
242 | |
243 | #define ASM_THUMB_FORMAT_9_STR (0x6000) |
244 | #define ASM_THUMB_FORMAT_9_LDR (0x6800) |
245 | #define ASM_THUMB_FORMAT_9_WORD_TRANSFER (0x0000) |
246 | #define ASM_THUMB_FORMAT_9_BYTE_TRANSFER (0x1000) |
247 | |
248 | #define ASM_THUMB_FORMAT_10_STRH (0x8000) |
249 | #define ASM_THUMB_FORMAT_10_LDRH (0x8800) |
250 | |
251 | #define ASM_THUMB_FORMAT_9_10_ENCODE(op, rlo_dest, rlo_base, offset) \ |
252 | ((op) | (((offset) << 6) & 0x07c0) | ((rlo_base) << 3) | (rlo_dest)) |
253 | |
254 | static inline void asm_thumb_format_9_10(asm_thumb_t *as, uint op, uint rlo_dest, uint rlo_base, uint offset) { |
255 | asm_thumb_op16(as, ASM_THUMB_FORMAT_9_10_ENCODE(op, rlo_dest, rlo_base, offset)); |
256 | } |
257 | |
258 | static inline void asm_thumb_str_rlo_rlo_i5(asm_thumb_t *as, uint rlo_src, uint rlo_base, uint word_offset) { |
259 | asm_thumb_format_9_10(as, ASM_THUMB_FORMAT_9_STR | ASM_THUMB_FORMAT_9_WORD_TRANSFER, rlo_src, rlo_base, word_offset); |
260 | } |
261 | static inline void asm_thumb_strb_rlo_rlo_i5(asm_thumb_t *as, uint rlo_src, uint rlo_base, uint byte_offset) { |
262 | asm_thumb_format_9_10(as, ASM_THUMB_FORMAT_9_STR | ASM_THUMB_FORMAT_9_BYTE_TRANSFER, rlo_src, rlo_base, byte_offset); |
263 | } |
264 | static inline void asm_thumb_strh_rlo_rlo_i5(asm_thumb_t *as, uint rlo_src, uint rlo_base, uint byte_offset) { |
265 | asm_thumb_format_9_10(as, ASM_THUMB_FORMAT_10_STRH, rlo_src, rlo_base, byte_offset); |
266 | } |
267 | static inline void asm_thumb_ldr_rlo_rlo_i5(asm_thumb_t *as, uint rlo_dest, uint rlo_base, uint word_offset) { |
268 | asm_thumb_format_9_10(as, ASM_THUMB_FORMAT_9_LDR | ASM_THUMB_FORMAT_9_WORD_TRANSFER, rlo_dest, rlo_base, word_offset); |
269 | } |
270 | static inline void asm_thumb_ldrb_rlo_rlo_i5(asm_thumb_t *as, uint rlo_dest, uint rlo_base, uint byte_offset) { |
271 | asm_thumb_format_9_10(as, ASM_THUMB_FORMAT_9_LDR | ASM_THUMB_FORMAT_9_BYTE_TRANSFER, rlo_dest, rlo_base, byte_offset); |
272 | } |
273 | static inline void asm_thumb_ldrh_rlo_rlo_i5(asm_thumb_t *as, uint rlo_dest, uint rlo_base, uint byte_offset) { |
274 | asm_thumb_format_9_10(as, ASM_THUMB_FORMAT_10_LDRH, rlo_dest, rlo_base, byte_offset); |
275 | } |
276 | static inline void asm_thumb_lsl_rlo_rlo_i5(asm_thumb_t *as, uint rlo_dest, uint rlo_src, uint shift) { |
277 | asm_thumb_format_1(as, ASM_THUMB_FORMAT_1_LSL, rlo_dest, rlo_src, shift); |
278 | } |
279 | static inline void asm_thumb_asr_rlo_rlo_i5(asm_thumb_t *as, uint rlo_dest, uint rlo_src, uint shift) { |
280 | asm_thumb_format_1(as, ASM_THUMB_FORMAT_1_ASR, rlo_dest, rlo_src, shift); |
281 | } |
282 | |
283 | // FORMAT 11: sign/zero extend |
284 | |
285 | #define ASM_THUMB_FORMAT_11_ENCODE(op, rlo_dest, rlo_src) \ |
286 | ((op) | ((rlo_src) << 3) | (rlo_dest)) |
287 | |
288 | #define ASM_THUMB_FORMAT_11_SXTH (0xb200) |
289 | #define ASM_THUMB_FORMAT_11_SXTB (0xb240) |
290 | #define ASM_THUMB_FORMAT_11_UXTH (0xb280) |
291 | #define ASM_THUMB_FORMAT_11_UXTB (0xb2c0) |
292 | |
293 | static inline void asm_thumb_format_11(asm_thumb_t *as, uint op, uint rlo_dest, uint rlo_src) { |
294 | assert(rlo_dest < ASM_THUMB_REG_R8); |
295 | assert(rlo_src < ASM_THUMB_REG_R8); |
296 | asm_thumb_op16(as, ASM_THUMB_FORMAT_11_ENCODE(op, rlo_dest, rlo_src)); |
297 | } |
298 | |
299 | static inline void asm_thumb_sxth_rlo_rlo(asm_thumb_t *as, uint rlo_dest, uint rlo_src) { |
300 | asm_thumb_format_11(as, ASM_THUMB_FORMAT_11_SXTH, rlo_dest, rlo_src); |
301 | } |
302 | |
303 | // TODO convert these to above format style |
304 | |
305 | #define ASM_THUMB_OP_MOVW (0xf240) |
306 | #define ASM_THUMB_OP_MOVT (0xf2c0) |
307 | |
308 | void asm_thumb_mov_reg_reg(asm_thumb_t *as, uint reg_dest, uint reg_src); |
309 | |
310 | #if MICROPY_EMIT_THUMB_ARMV7M |
311 | size_t asm_thumb_mov_reg_i16(asm_thumb_t *as, uint mov_op, uint reg_dest, int i16_src); |
312 | #else |
313 | void asm_thumb_mov_rlo_i16(asm_thumb_t *as, uint rlo_dest, int i16_src); |
314 | #endif |
315 | |
316 | // these return true if the destination is in range, false otherwise |
317 | bool asm_thumb_b_n_label(asm_thumb_t *as, uint label); |
318 | bool asm_thumb_bcc_nw_label(asm_thumb_t *as, int cond, uint label, bool wide); |
319 | bool asm_thumb_bl_label(asm_thumb_t *as, uint label); |
320 | |
321 | size_t asm_thumb_mov_reg_i32(asm_thumb_t *as, uint reg_dest, mp_uint_t i32_src); // convenience |
322 | void asm_thumb_mov_reg_i32_optimised(asm_thumb_t *as, uint reg_dest, int i32_src); // convenience |
323 | void asm_thumb_mov_local_reg(asm_thumb_t *as, int local_num_dest, uint rlo_src); // convenience |
324 | void asm_thumb_mov_reg_local(asm_thumb_t *as, uint rlo_dest, int local_num); // convenience |
325 | void asm_thumb_mov_reg_local_addr(asm_thumb_t *as, uint rlo_dest, int local_num); // convenience |
326 | void asm_thumb_mov_reg_pcrel(asm_thumb_t *as, uint rlo_dest, uint label); |
327 | |
328 | void asm_thumb_ldr_reg_reg_i12_optimised(asm_thumb_t *as, uint reg_dest, uint reg_base, uint byte_offset); // convenience |
329 | |
330 | void asm_thumb_b_label(asm_thumb_t *as, uint label); // convenience: picks narrow or wide branch |
331 | void asm_thumb_bcc_label(asm_thumb_t *as, int cc, uint label); // convenience: picks narrow or wide branch |
332 | void asm_thumb_bl_ind(asm_thumb_t *as, uint fun_id, uint reg_temp); // convenience |
333 | void asm_thumb_bcc_rel9(asm_thumb_t *as, int cc, int rel); |
334 | void asm_thumb_b_rel12(asm_thumb_t *as, int rel); |
335 | |
336 | // Holds a pointer to mp_fun_table |
337 | #define ASM_THUMB_REG_FUN_TABLE ASM_THUMB_REG_R7 |
338 | |
339 | #if GENERIC_ASM_API |
340 | |
341 | // The following macros provide a (mostly) arch-independent API to |
342 | // generate native code, and are used by the native emitter. |
343 | |
344 | #define ASM_WORD_SIZE (4) |
345 | |
346 | #define REG_RET ASM_THUMB_REG_R0 |
347 | #define REG_ARG_1 ASM_THUMB_REG_R0 |
348 | #define REG_ARG_2 ASM_THUMB_REG_R1 |
349 | #define REG_ARG_3 ASM_THUMB_REG_R2 |
350 | #define REG_ARG_4 ASM_THUMB_REG_R3 |
351 | // rest of args go on stack |
352 | |
353 | #define REG_TEMP0 ASM_THUMB_REG_R0 |
354 | #define REG_TEMP1 ASM_THUMB_REG_R1 |
355 | #define REG_TEMP2 ASM_THUMB_REG_R2 |
356 | |
357 | #define REG_LOCAL_1 ASM_THUMB_REG_R4 |
358 | #define REG_LOCAL_2 ASM_THUMB_REG_R5 |
359 | #define REG_LOCAL_3 ASM_THUMB_REG_R6 |
360 | #define REG_LOCAL_NUM (3) |
361 | |
362 | #define REG_FUN_TABLE ASM_THUMB_REG_FUN_TABLE |
363 | |
364 | #define ASM_T asm_thumb_t |
365 | #define ASM_END_PASS asm_thumb_end_pass |
366 | #define ASM_ENTRY asm_thumb_entry |
367 | #define ASM_EXIT asm_thumb_exit |
368 | |
369 | #define ASM_JUMP asm_thumb_b_label |
370 | #define ASM_JUMP_IF_REG_ZERO(as, reg, label, bool_test) \ |
371 | do { \ |
372 | asm_thumb_cmp_rlo_i8(as, reg, 0); \ |
373 | asm_thumb_bcc_label(as, ASM_THUMB_CC_EQ, label); \ |
374 | } while (0) |
375 | #define ASM_JUMP_IF_REG_NONZERO(as, reg, label, bool_test) \ |
376 | do { \ |
377 | asm_thumb_cmp_rlo_i8(as, reg, 0); \ |
378 | asm_thumb_bcc_label(as, ASM_THUMB_CC_NE, label); \ |
379 | } while (0) |
380 | #define ASM_JUMP_IF_REG_EQ(as, reg1, reg2, label) \ |
381 | do { \ |
382 | asm_thumb_cmp_rlo_rlo(as, reg1, reg2); \ |
383 | asm_thumb_bcc_label(as, ASM_THUMB_CC_EQ, label); \ |
384 | } while (0) |
385 | #define ASM_JUMP_REG(as, reg) asm_thumb_bx_reg((as), (reg)) |
386 | #define ASM_CALL_IND(as, idx) asm_thumb_bl_ind(as, idx, ASM_THUMB_REG_R3) |
387 | |
388 | #define ASM_MOV_LOCAL_REG(as, local_num, reg) asm_thumb_mov_local_reg((as), (local_num), (reg)) |
389 | #define ASM_MOV_REG_IMM(as, reg_dest, imm) asm_thumb_mov_reg_i32_optimised((as), (reg_dest), (imm)) |
390 | #if MICROPY_EMIT_THUMB_ARMV7M |
391 | #define ASM_MOV_REG_IMM_FIX_U16(as, reg_dest, imm) asm_thumb_mov_reg_i16((as), ASM_THUMB_OP_MOVW, (reg_dest), (imm)) |
392 | #else |
393 | #define ASM_MOV_REG_IMM_FIX_U16(as, reg_dest, imm) asm_thumb_mov_rlo_i16((as), (reg_dest), (imm)) |
394 | #endif |
395 | #define ASM_MOV_REG_IMM_FIX_WORD(as, reg_dest, imm) asm_thumb_mov_reg_i32((as), (reg_dest), (imm)) |
396 | #define ASM_MOV_REG_LOCAL(as, reg_dest, local_num) asm_thumb_mov_reg_local((as), (reg_dest), (local_num)) |
397 | #define ASM_MOV_REG_REG(as, reg_dest, reg_src) asm_thumb_mov_reg_reg((as), (reg_dest), (reg_src)) |
398 | #define ASM_MOV_REG_LOCAL_ADDR(as, reg_dest, local_num) asm_thumb_mov_reg_local_addr((as), (reg_dest), (local_num)) |
399 | #define ASM_MOV_REG_PCREL(as, rlo_dest, label) asm_thumb_mov_reg_pcrel((as), (rlo_dest), (label)) |
400 | |
401 | #define ASM_LSL_REG_REG(as, reg_dest, reg_shift) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_LSL, (reg_dest), (reg_shift)) |
402 | #define ASM_LSR_REG_REG(as, reg_dest, reg_shift) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_LSR, (reg_dest), (reg_shift)) |
403 | #define ASM_ASR_REG_REG(as, reg_dest, reg_shift) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_ASR, (reg_dest), (reg_shift)) |
404 | #define ASM_OR_REG_REG(as, reg_dest, reg_src) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_ORR, (reg_dest), (reg_src)) |
405 | #define ASM_XOR_REG_REG(as, reg_dest, reg_src) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_EOR, (reg_dest), (reg_src)) |
406 | #define ASM_AND_REG_REG(as, reg_dest, reg_src) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_AND, (reg_dest), (reg_src)) |
407 | #define ASM_ADD_REG_REG(as, reg_dest, reg_src) asm_thumb_add_rlo_rlo_rlo((as), (reg_dest), (reg_dest), (reg_src)) |
408 | #define ASM_SUB_REG_REG(as, reg_dest, reg_src) asm_thumb_sub_rlo_rlo_rlo((as), (reg_dest), (reg_dest), (reg_src)) |
409 | #define ASM_MUL_REG_REG(as, reg_dest, reg_src) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_MUL, (reg_dest), (reg_src)) |
410 | |
411 | #define ASM_LOAD_REG_REG(as, reg_dest, reg_base) asm_thumb_ldr_rlo_rlo_i5((as), (reg_dest), (reg_base), 0) |
412 | #define ASM_LOAD_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) asm_thumb_ldr_reg_reg_i12_optimised((as), (reg_dest), (reg_base), (word_offset)) |
413 | #define ASM_LOAD8_REG_REG(as, reg_dest, reg_base) asm_thumb_ldrb_rlo_rlo_i5((as), (reg_dest), (reg_base), 0) |
414 | #define ASM_LOAD16_REG_REG(as, reg_dest, reg_base) asm_thumb_ldrh_rlo_rlo_i5((as), (reg_dest), (reg_base), 0) |
415 | #define ASM_LOAD32_REG_REG(as, reg_dest, reg_base) asm_thumb_ldr_rlo_rlo_i5((as), (reg_dest), (reg_base), 0) |
416 | |
417 | #define ASM_STORE_REG_REG(as, reg_src, reg_base) asm_thumb_str_rlo_rlo_i5((as), (reg_src), (reg_base), 0) |
418 | #define ASM_STORE_REG_REG_OFFSET(as, reg_src, reg_base, word_offset) asm_thumb_str_rlo_rlo_i5((as), (reg_src), (reg_base), (word_offset)) |
419 | #define ASM_STORE8_REG_REG(as, reg_src, reg_base) asm_thumb_strb_rlo_rlo_i5((as), (reg_src), (reg_base), 0) |
420 | #define ASM_STORE16_REG_REG(as, reg_src, reg_base) asm_thumb_strh_rlo_rlo_i5((as), (reg_src), (reg_base), 0) |
421 | #define ASM_STORE32_REG_REG(as, reg_src, reg_base) asm_thumb_str_rlo_rlo_i5((as), (reg_src), (reg_base), 0) |
422 | |
423 | #endif // GENERIC_ASM_API |
424 | |
425 | #endif // MICROPY_INCLUDED_PY_ASMTHUMB_H |
426 | |