1/*
2 * This file is part of the MicroPython project, http://micropython.org/
3 *
4 * The MIT License (MIT)
5 *
6 * Copyright (c) 2013, 2014 Damien P. George
7 *
8 * Permission is hereby granted, free of charge, to any person obtaining a copy
9 * of this software and associated documentation files (the "Software"), to deal
10 * in the Software without restriction, including without limitation the rights
11 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
12 * copies of the Software, and to permit persons to whom the Software is
13 * furnished to do so, subject to the following conditions:
14 *
15 * The above copyright notice and this permission notice shall be included in
16 * all copies or substantial portions of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
21 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
23 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
24 * THE SOFTWARE.
25 */
26
27// Essentially normal Python has 1 type: Python objects
28// Viper has more than 1 type, and is just a more complicated (a superset of) Python.
29// If you declare everything in Viper as a Python object (ie omit type decls) then
30// it should in principle be exactly the same as Python native.
31// Having types means having more opcodes, like binary_op_nat_nat, binary_op_nat_obj etc.
32// In practice we won't have a VM but rather do this in asm which is actually very minimal.
33
34// Because it breaks strict Python equivalence it should be a completely separate
35// decorator. It breaks equivalence because overflow on integers wraps around.
36// It shouldn't break equivalence if you don't use the new types, but since the
37// type decls might be used in normal Python for other reasons, it's probably safest,
38// cleanest and clearest to make it a separate decorator.
39
40// Actually, it does break equivalence because integers default to native integers,
41// not Python objects.
42
43// for x in l[0:8]: can be compiled into a native loop if l has pointer type
44
45#include <stdio.h>
46#include <string.h>
47#include <assert.h>
48
49#include "py/emit.h"
50#include "py/nativeglue.h"
51#include "py/objstr.h"
52
53#if MICROPY_DEBUG_VERBOSE // print debugging info
54#define DEBUG_PRINT (1)
55#define DEBUG_printf DEBUG_printf
56#else // don't print debugging info
57#define DEBUG_printf(...) (void)0
58#endif
59
60// wrapper around everything in this file
61#if N_X64 || N_X86 || N_THUMB || N_ARM || N_XTENSA || N_XTENSAWIN
62
63// C stack layout for native functions:
64// 0: nlr_buf_t [optional]
65// emit->code_state_start: mp_code_state_t
66// emit->stack_start: Python object stack | emit->n_state
67// locals (reversed, L0 at end) |
68//
69// C stack layout for native generator functions:
70// 0=emit->stack_start: nlr_buf_t
71//
72// Then REG_GENERATOR_STATE points to:
73// 0=emit->code_state_start: mp_code_state_t
74// emit->stack_start: Python object stack | emit->n_state
75// locals (reversed, L0 at end) |
76//
77// C stack layout for viper functions:
78// 0: nlr_buf_t [optional]
79// emit->code_state_start: fun_obj, old_globals [optional]
80// emit->stack_start: Python object stack | emit->n_state
81// locals (reversed, L0 at end) |
82// (L0-L2 may be in regs instead)
83
84// Native emitter needs to know the following sizes and offsets of C structs (on the target):
85#if MICROPY_DYNAMIC_COMPILER
86#define SIZEOF_NLR_BUF (2 + mp_dynamic_compiler.nlr_buf_num_regs + 1) // the +1 is conservative in case MICROPY_ENABLE_PYSTACK enabled
87#else
88#define SIZEOF_NLR_BUF (sizeof(nlr_buf_t) / sizeof(uintptr_t))
89#endif
90#define SIZEOF_CODE_STATE (sizeof(mp_code_state_t) / sizeof(uintptr_t))
91#define OFFSETOF_CODE_STATE_STATE (offsetof(mp_code_state_t, state) / sizeof(uintptr_t))
92#define OFFSETOF_CODE_STATE_FUN_BC (offsetof(mp_code_state_t, fun_bc) / sizeof(uintptr_t))
93#define OFFSETOF_CODE_STATE_IP (offsetof(mp_code_state_t, ip) / sizeof(uintptr_t))
94#define OFFSETOF_CODE_STATE_SP (offsetof(mp_code_state_t, sp) / sizeof(uintptr_t))
95#define OFFSETOF_OBJ_FUN_BC_GLOBALS (offsetof(mp_obj_fun_bc_t, globals) / sizeof(uintptr_t))
96#define OFFSETOF_OBJ_FUN_BC_BYTECODE (offsetof(mp_obj_fun_bc_t, bytecode) / sizeof(uintptr_t))
97#define OFFSETOF_OBJ_FUN_BC_CONST_TABLE (offsetof(mp_obj_fun_bc_t, const_table) / sizeof(uintptr_t))
98
99// If not already defined, set parent args to same as child call registers
100#ifndef REG_PARENT_RET
101#define REG_PARENT_RET REG_RET
102#define REG_PARENT_ARG_1 REG_ARG_1
103#define REG_PARENT_ARG_2 REG_ARG_2
104#define REG_PARENT_ARG_3 REG_ARG_3
105#define REG_PARENT_ARG_4 REG_ARG_4
106#endif
107
108// Word index of nlr_buf_t.ret_val
109#define NLR_BUF_IDX_RET_VAL (1)
110
111// Whether the viper function needs access to fun_obj
112#define NEED_FUN_OBJ(emit) ((emit)->scope->exc_stack_size > 0 \
113 || ((emit)->scope->scope_flags & (MP_SCOPE_FLAG_REFGLOBALS | MP_SCOPE_FLAG_HASCONSTS)))
114
115// Whether the native/viper function needs to be wrapped in an exception handler
116#define NEED_GLOBAL_EXC_HANDLER(emit) ((emit)->scope->exc_stack_size > 0 \
117 || ((emit)->scope->scope_flags & (MP_SCOPE_FLAG_GENERATOR | MP_SCOPE_FLAG_REFGLOBALS)))
118
119// Whether registers can be used to store locals (only true if there are no
120// exception handlers, because otherwise an nlr_jump will restore registers to
121// their state at the start of the function and updates to locals will be lost)
122#define CAN_USE_REGS_FOR_LOCALS(emit) ((emit)->scope->exc_stack_size == 0 && !(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR))
123
124// Indices within the local C stack for various variables
125#define LOCAL_IDX_EXC_VAL(emit) (NLR_BUF_IDX_RET_VAL)
126#define LOCAL_IDX_EXC_HANDLER_PC(emit) (NLR_BUF_IDX_LOCAL_1)
127#define LOCAL_IDX_EXC_HANDLER_UNWIND(emit) (NLR_BUF_IDX_LOCAL_2)
128#define LOCAL_IDX_RET_VAL(emit) (NLR_BUF_IDX_LOCAL_3)
129#define LOCAL_IDX_FUN_OBJ(emit) ((emit)->code_state_start + OFFSETOF_CODE_STATE_FUN_BC)
130#define LOCAL_IDX_OLD_GLOBALS(emit) ((emit)->code_state_start + OFFSETOF_CODE_STATE_IP)
131#define LOCAL_IDX_GEN_PC(emit) ((emit)->code_state_start + OFFSETOF_CODE_STATE_IP)
132#define LOCAL_IDX_LOCAL_VAR(emit, local_num) ((emit)->stack_start + (emit)->n_state - 1 - (local_num))
133
134#define REG_GENERATOR_STATE (REG_LOCAL_3)
135
136#define EMIT_NATIVE_VIPER_TYPE_ERROR(emit, ...) do { \
137 *emit->error_slot = mp_obj_new_exception_msg_varg(&mp_type_ViperTypeError, __VA_ARGS__); \
138} while (0)
139
140typedef enum {
141 STACK_VALUE,
142 STACK_REG,
143 STACK_IMM,
144} stack_info_kind_t;
145
146// these enums must be distinct and the bottom 4 bits
147// must correspond to the correct MP_NATIVE_TYPE_xxx value
148typedef enum {
149 VTYPE_PYOBJ = 0x00 | MP_NATIVE_TYPE_OBJ,
150 VTYPE_BOOL = 0x00 | MP_NATIVE_TYPE_BOOL,
151 VTYPE_INT = 0x00 | MP_NATIVE_TYPE_INT,
152 VTYPE_UINT = 0x00 | MP_NATIVE_TYPE_UINT,
153 VTYPE_PTR = 0x00 | MP_NATIVE_TYPE_PTR,
154 VTYPE_PTR8 = 0x00 | MP_NATIVE_TYPE_PTR8,
155 VTYPE_PTR16 = 0x00 | MP_NATIVE_TYPE_PTR16,
156 VTYPE_PTR32 = 0x00 | MP_NATIVE_TYPE_PTR32,
157
158 VTYPE_PTR_NONE = 0x50 | MP_NATIVE_TYPE_PTR,
159
160 VTYPE_UNBOUND = 0x60 | MP_NATIVE_TYPE_OBJ,
161 VTYPE_BUILTIN_CAST = 0x70 | MP_NATIVE_TYPE_OBJ,
162} vtype_kind_t;
163
164STATIC qstr vtype_to_qstr(vtype_kind_t vtype) {
165 switch (vtype) {
166 case VTYPE_PYOBJ:
167 return MP_QSTR_object;
168 case VTYPE_BOOL:
169 return MP_QSTR_bool;
170 case VTYPE_INT:
171 return MP_QSTR_int;
172 case VTYPE_UINT:
173 return MP_QSTR_uint;
174 case VTYPE_PTR:
175 return MP_QSTR_ptr;
176 case VTYPE_PTR8:
177 return MP_QSTR_ptr8;
178 case VTYPE_PTR16:
179 return MP_QSTR_ptr16;
180 case VTYPE_PTR32:
181 return MP_QSTR_ptr32;
182 case VTYPE_PTR_NONE:
183 default:
184 return MP_QSTR_None;
185 }
186}
187
188typedef struct _stack_info_t {
189 vtype_kind_t vtype;
190 stack_info_kind_t kind;
191 union {
192 int u_reg;
193 mp_int_t u_imm;
194 } data;
195} stack_info_t;
196
197#define UNWIND_LABEL_UNUSED (0x7fff)
198#define UNWIND_LABEL_DO_FINAL_UNWIND (0x7ffe)
199
200typedef struct _exc_stack_entry_t {
201 uint16_t label : 15;
202 uint16_t is_finally : 1;
203 uint16_t unwind_label : 15;
204 uint16_t is_active : 1;
205} exc_stack_entry_t;
206
207struct _emit_t {
208 mp_obj_t *error_slot;
209 uint *label_slot;
210 uint exit_label;
211 int pass;
212
213 bool do_viper_types;
214 bool prelude_offset_uses_u16_encoding;
215
216 mp_uint_t local_vtype_alloc;
217 vtype_kind_t *local_vtype;
218
219 mp_uint_t stack_info_alloc;
220 stack_info_t *stack_info;
221 vtype_kind_t saved_stack_vtype;
222
223 size_t exc_stack_alloc;
224 size_t exc_stack_size;
225 exc_stack_entry_t *exc_stack;
226
227 int prelude_offset;
228 int start_offset;
229 int n_state;
230 uint16_t code_state_start;
231 uint16_t stack_start;
232 int stack_size;
233 uint16_t n_cell;
234
235 uint16_t const_table_cur_obj;
236 uint16_t const_table_num_obj;
237 uint16_t const_table_cur_raw_code;
238 mp_uint_t *const_table;
239
240 #if MICROPY_PERSISTENT_CODE_SAVE
241 uint16_t qstr_link_cur;
242 mp_qstr_link_entry_t *qstr_link;
243 #endif
244
245 bool last_emit_was_return_value;
246
247 scope_t *scope;
248
249 ASM_T *as;
250};
251
252STATIC const uint8_t reg_local_table[REG_LOCAL_NUM] = {REG_LOCAL_1, REG_LOCAL_2, REG_LOCAL_3};
253
254STATIC void emit_native_global_exc_entry(emit_t *emit);
255STATIC void emit_native_global_exc_exit(emit_t *emit);
256STATIC void emit_native_load_const_obj(emit_t *emit, mp_obj_t obj);
257
258emit_t *EXPORT_FUN(new)(mp_obj_t * error_slot, uint *label_slot, mp_uint_t max_num_labels) {
259 emit_t *emit = m_new0(emit_t, 1);
260 emit->error_slot = error_slot;
261 emit->label_slot = label_slot;
262 emit->stack_info_alloc = 8;
263 emit->stack_info = m_new(stack_info_t, emit->stack_info_alloc);
264 emit->exc_stack_alloc = 8;
265 emit->exc_stack = m_new(exc_stack_entry_t, emit->exc_stack_alloc);
266 emit->as = m_new0(ASM_T, 1);
267 mp_asm_base_init(&emit->as->base, max_num_labels);
268 return emit;
269}
270
271void EXPORT_FUN(free)(emit_t * emit) {
272 mp_asm_base_deinit(&emit->as->base, false);
273 m_del_obj(ASM_T, emit->as);
274 m_del(exc_stack_entry_t, emit->exc_stack, emit->exc_stack_alloc);
275 m_del(vtype_kind_t, emit->local_vtype, emit->local_vtype_alloc);
276 m_del(stack_info_t, emit->stack_info, emit->stack_info_alloc);
277 m_del_obj(emit_t, emit);
278}
279
280STATIC void emit_call_with_imm_arg(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val, int arg_reg);
281
282STATIC void emit_native_mov_reg_const(emit_t *emit, int reg_dest, int const_val) {
283 ASM_LOAD_REG_REG_OFFSET(emit->as, reg_dest, REG_FUN_TABLE, const_val);
284}
285
286STATIC void emit_native_mov_state_reg(emit_t *emit, int local_num, int reg_src) {
287 if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
288 ASM_STORE_REG_REG_OFFSET(emit->as, reg_src, REG_GENERATOR_STATE, local_num);
289 } else {
290 ASM_MOV_LOCAL_REG(emit->as, local_num, reg_src);
291 }
292}
293
294STATIC void emit_native_mov_reg_state(emit_t *emit, int reg_dest, int local_num) {
295 if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
296 ASM_LOAD_REG_REG_OFFSET(emit->as, reg_dest, REG_GENERATOR_STATE, local_num);
297 } else {
298 ASM_MOV_REG_LOCAL(emit->as, reg_dest, local_num);
299 }
300}
301
302STATIC void emit_native_mov_reg_state_addr(emit_t *emit, int reg_dest, int local_num) {
303 if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
304 ASM_MOV_REG_IMM(emit->as, reg_dest, local_num * ASM_WORD_SIZE);
305 ASM_ADD_REG_REG(emit->as, reg_dest, REG_GENERATOR_STATE);
306 } else {
307 ASM_MOV_REG_LOCAL_ADDR(emit->as, reg_dest, local_num);
308 }
309}
310
311STATIC void emit_native_mov_reg_qstr(emit_t *emit, int arg_reg, qstr qst) {
312 #if MICROPY_PERSISTENT_CODE_SAVE
313 size_t loc = ASM_MOV_REG_IMM_FIX_U16(emit->as, arg_reg, qst);
314 size_t link_idx = emit->qstr_link_cur++;
315 if (emit->pass == MP_PASS_EMIT) {
316 emit->qstr_link[link_idx].off = loc << 2 | 1;
317 emit->qstr_link[link_idx].qst = qst;
318 }
319 #else
320 ASM_MOV_REG_IMM(emit->as, arg_reg, qst);
321 #endif
322}
323
324STATIC void emit_native_mov_reg_qstr_obj(emit_t *emit, int reg_dest, qstr qst) {
325 #if MICROPY_PERSISTENT_CODE_SAVE
326 size_t loc = ASM_MOV_REG_IMM_FIX_WORD(emit->as, reg_dest, (mp_uint_t)MP_OBJ_NEW_QSTR(qst));
327 size_t link_idx = emit->qstr_link_cur++;
328 if (emit->pass == MP_PASS_EMIT) {
329 emit->qstr_link[link_idx].off = loc << 2 | 2;
330 emit->qstr_link[link_idx].qst = qst;
331 }
332 #else
333 ASM_MOV_REG_IMM(emit->as, reg_dest, (mp_uint_t)MP_OBJ_NEW_QSTR(qst));
334 #endif
335}
336
337#define emit_native_mov_state_imm_via(emit, local_num, imm, reg_temp) \
338 do { \
339 ASM_MOV_REG_IMM((emit)->as, (reg_temp), (imm)); \
340 emit_native_mov_state_reg((emit), (local_num), (reg_temp)); \
341 } while (false)
342
343#define emit_native_mov_state_imm_fix_u16_via(emit, local_num, imm, reg_temp) \
344 do { \
345 ASM_MOV_REG_IMM_FIX_U16((emit)->as, (reg_temp), (imm)); \
346 emit_native_mov_state_reg((emit), (local_num), (reg_temp)); \
347 } while (false)
348
349#define emit_native_mov_state_imm_fix_word_via(emit, local_num, imm, reg_temp) \
350 do { \
351 ASM_MOV_REG_IMM_FIX_WORD((emit)->as, (reg_temp), (imm)); \
352 emit_native_mov_state_reg((emit), (local_num), (reg_temp)); \
353 } while (false)
354
355STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scope) {
356 DEBUG_printf("start_pass(pass=%u, scope=%p)\n", pass, scope);
357
358 emit->pass = pass;
359 emit->do_viper_types = scope->emit_options == MP_EMIT_OPT_VIPER;
360 emit->stack_size = 0;
361 #if N_PRELUDE_AS_BYTES_OBJ
362 emit->const_table_cur_obj = emit->do_viper_types ? 0 : 1; // reserve first obj for prelude bytes obj
363 #else
364 emit->const_table_cur_obj = 0;
365 #endif
366 emit->const_table_cur_raw_code = 0;
367 #if MICROPY_PERSISTENT_CODE_SAVE
368 emit->qstr_link_cur = 0;
369 #endif
370 emit->last_emit_was_return_value = false;
371 emit->scope = scope;
372
373 // allocate memory for keeping track of the types of locals
374 if (emit->local_vtype_alloc < scope->num_locals) {
375 emit->local_vtype = m_renew(vtype_kind_t, emit->local_vtype, emit->local_vtype_alloc, scope->num_locals);
376 emit->local_vtype_alloc = scope->num_locals;
377 }
378
379 // set default type for arguments
380 mp_uint_t num_args = emit->scope->num_pos_args + emit->scope->num_kwonly_args;
381 if (scope->scope_flags & MP_SCOPE_FLAG_VARARGS) {
382 num_args += 1;
383 }
384 if (scope->scope_flags & MP_SCOPE_FLAG_VARKEYWORDS) {
385 num_args += 1;
386 }
387 for (mp_uint_t i = 0; i < num_args; i++) {
388 emit->local_vtype[i] = VTYPE_PYOBJ;
389 }
390
391 // Set viper type for arguments
392 if (emit->do_viper_types) {
393 for (int i = 0; i < emit->scope->id_info_len; ++i) {
394 id_info_t *id = &emit->scope->id_info[i];
395 if (id->flags & ID_FLAG_IS_PARAM) {
396 assert(id->local_num < emit->local_vtype_alloc);
397 emit->local_vtype[id->local_num] = id->flags >> ID_FLAG_VIPER_TYPE_POS;
398 }
399 }
400 }
401
402 // local variables begin unbound, and have unknown type
403 for (mp_uint_t i = num_args; i < emit->local_vtype_alloc; i++) {
404 emit->local_vtype[i] = VTYPE_UNBOUND;
405 }
406
407 // values on stack begin unbound
408 for (mp_uint_t i = 0; i < emit->stack_info_alloc; i++) {
409 emit->stack_info[i].kind = STACK_VALUE;
410 emit->stack_info[i].vtype = VTYPE_UNBOUND;
411 }
412
413 mp_asm_base_start_pass(&emit->as->base, pass == MP_PASS_EMIT ? MP_ASM_PASS_EMIT : MP_ASM_PASS_COMPUTE);
414
415 // generate code for entry to function
416
417 // Work out start of code state (mp_code_state_t or reduced version for viper)
418 emit->code_state_start = 0;
419 if (NEED_GLOBAL_EXC_HANDLER(emit)) {
420 emit->code_state_start = SIZEOF_NLR_BUF;
421 }
422
423 if (emit->do_viper_types) {
424 // Work out size of state (locals plus stack)
425 // n_state counts all stack and locals, even those in registers
426 emit->n_state = scope->num_locals + scope->stack_size;
427 int num_locals_in_regs = 0;
428 if (CAN_USE_REGS_FOR_LOCALS(emit)) {
429 num_locals_in_regs = scope->num_locals;
430 if (num_locals_in_regs > REG_LOCAL_NUM) {
431 num_locals_in_regs = REG_LOCAL_NUM;
432 }
433 // Need a spot for REG_LOCAL_3 if 4 or more args (see below)
434 if (scope->num_pos_args >= 4) {
435 --num_locals_in_regs;
436 }
437 }
438
439 // Work out where the locals and Python stack start within the C stack
440 if (NEED_GLOBAL_EXC_HANDLER(emit)) {
441 // Reserve 2 words for function object and old globals
442 emit->stack_start = emit->code_state_start + 2;
443 } else if (scope->scope_flags & MP_SCOPE_FLAG_HASCONSTS) {
444 // Reserve 1 word for function object, to access const table
445 emit->stack_start = emit->code_state_start + 1;
446 } else {
447 emit->stack_start = emit->code_state_start + 0;
448 }
449
450 // Entry to function
451 ASM_ENTRY(emit->as, emit->stack_start + emit->n_state - num_locals_in_regs);
452
453 #if N_X86
454 asm_x86_mov_arg_to_r32(emit->as, 0, REG_PARENT_ARG_1);
455 #endif
456
457 // Load REG_FUN_TABLE with a pointer to mp_fun_table, found in the const_table
458 ASM_LOAD_REG_REG_OFFSET(emit->as, REG_LOCAL_3, REG_PARENT_ARG_1, OFFSETOF_OBJ_FUN_BC_CONST_TABLE);
459 ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_LOCAL_3, 0);
460
461 // Store function object (passed as first arg) to stack if needed
462 if (NEED_FUN_OBJ(emit)) {
463 ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_FUN_OBJ(emit), REG_PARENT_ARG_1);
464 }
465
466 // Put n_args in REG_ARG_1, n_kw in REG_ARG_2, args array in REG_LOCAL_3
467 #if N_X86
468 asm_x86_mov_arg_to_r32(emit->as, 1, REG_ARG_1);
469 asm_x86_mov_arg_to_r32(emit->as, 2, REG_ARG_2);
470 asm_x86_mov_arg_to_r32(emit->as, 3, REG_LOCAL_3);
471 #else
472 ASM_MOV_REG_REG(emit->as, REG_ARG_1, REG_PARENT_ARG_2);
473 ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_PARENT_ARG_3);
474 ASM_MOV_REG_REG(emit->as, REG_LOCAL_3, REG_PARENT_ARG_4);
475 #endif
476
477 // Check number of args matches this function, and call mp_arg_check_num_sig if not
478 ASM_JUMP_IF_REG_NONZERO(emit->as, REG_ARG_2, *emit->label_slot + 4, true);
479 ASM_MOV_REG_IMM(emit->as, REG_ARG_3, scope->num_pos_args);
480 ASM_JUMP_IF_REG_EQ(emit->as, REG_ARG_1, REG_ARG_3, *emit->label_slot + 5);
481 mp_asm_base_label_assign(&emit->as->base, *emit->label_slot + 4);
482 ASM_MOV_REG_IMM(emit->as, REG_ARG_3, MP_OBJ_FUN_MAKE_SIG(scope->num_pos_args, scope->num_pos_args, false));
483 ASM_CALL_IND(emit->as, MP_F_ARG_CHECK_NUM_SIG);
484 mp_asm_base_label_assign(&emit->as->base, *emit->label_slot + 5);
485
486 // Store arguments into locals (reg or stack), converting to native if needed
487 for (int i = 0; i < emit->scope->num_pos_args; i++) {
488 int r = REG_ARG_1;
489 ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_1, REG_LOCAL_3, i);
490 if (emit->local_vtype[i] != VTYPE_PYOBJ) {
491 emit_call_with_imm_arg(emit, MP_F_CONVERT_OBJ_TO_NATIVE, emit->local_vtype[i], REG_ARG_2);
492 r = REG_RET;
493 }
494 // REG_LOCAL_3 points to the args array so be sure not to overwrite it if it's still needed
495 if (i < REG_LOCAL_NUM && CAN_USE_REGS_FOR_LOCALS(emit) && (i != 2 || emit->scope->num_pos_args == 3)) {
496 ASM_MOV_REG_REG(emit->as, reg_local_table[i], r);
497 } else {
498 emit_native_mov_state_reg(emit, LOCAL_IDX_LOCAL_VAR(emit, i), r);
499 }
500 }
501 // Get 3rd local from the stack back into REG_LOCAL_3 if this reg couldn't be written to above
502 if (emit->scope->num_pos_args >= 4 && CAN_USE_REGS_FOR_LOCALS(emit)) {
503 ASM_MOV_REG_LOCAL(emit->as, REG_LOCAL_3, LOCAL_IDX_LOCAL_VAR(emit, 2));
504 }
505
506 emit_native_global_exc_entry(emit);
507
508 } else {
509 // work out size of state (locals plus stack)
510 emit->n_state = scope->num_locals + scope->stack_size;
511
512 if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
513 emit->code_state_start = 0;
514 emit->stack_start = SIZEOF_CODE_STATE;
515 #if N_PRELUDE_AS_BYTES_OBJ
516 // Load index of prelude bytes object in const_table
517 mp_asm_base_data(&emit->as->base, ASM_WORD_SIZE, (uintptr_t)(emit->scope->num_pos_args + emit->scope->num_kwonly_args + 1));
518 #else
519 mp_asm_base_data(&emit->as->base, ASM_WORD_SIZE, (uintptr_t)emit->prelude_offset);
520 #endif
521 mp_asm_base_data(&emit->as->base, ASM_WORD_SIZE, (uintptr_t)emit->start_offset);
522 ASM_ENTRY(emit->as, SIZEOF_NLR_BUF);
523
524 // Put address of code_state into REG_GENERATOR_STATE
525 #if N_X86
526 asm_x86_mov_arg_to_r32(emit->as, 0, REG_GENERATOR_STATE);
527 #else
528 ASM_MOV_REG_REG(emit->as, REG_GENERATOR_STATE, REG_PARENT_ARG_1);
529 #endif
530
531 // Put throw value into LOCAL_IDX_EXC_VAL slot, for yield/yield-from
532 #if N_X86
533 asm_x86_mov_arg_to_r32(emit->as, 1, REG_PARENT_ARG_2);
534 #endif
535 ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_VAL(emit), REG_PARENT_ARG_2);
536
537 // Load REG_FUN_TABLE with a pointer to mp_fun_table, found in the const_table
538 ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_GENERATOR_STATE, LOCAL_IDX_FUN_OBJ(emit));
539 ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_OBJ_FUN_BC_CONST_TABLE);
540 ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_TEMP0, emit->scope->num_pos_args + emit->scope->num_kwonly_args);
541 } else {
542 // The locals and stack start after the code_state structure
543 emit->stack_start = emit->code_state_start + SIZEOF_CODE_STATE;
544
545 // Allocate space on C-stack for code_state structure, which includes state
546 ASM_ENTRY(emit->as, emit->stack_start + emit->n_state);
547
548 // Prepare incoming arguments for call to mp_setup_code_state
549
550 #if N_X86
551 asm_x86_mov_arg_to_r32(emit->as, 0, REG_PARENT_ARG_1);
552 asm_x86_mov_arg_to_r32(emit->as, 1, REG_PARENT_ARG_2);
553 asm_x86_mov_arg_to_r32(emit->as, 2, REG_PARENT_ARG_3);
554 asm_x86_mov_arg_to_r32(emit->as, 3, REG_PARENT_ARG_4);
555 #endif
556
557 // Load REG_FUN_TABLE with a pointer to mp_fun_table, found in the const_table
558 ASM_LOAD_REG_REG_OFFSET(emit->as, REG_LOCAL_3, REG_PARENT_ARG_1, OFFSETOF_OBJ_FUN_BC_CONST_TABLE);
559 ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_LOCAL_3, emit->scope->num_pos_args + emit->scope->num_kwonly_args);
560
561 // Set code_state.fun_bc
562 ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_FUN_OBJ(emit), REG_PARENT_ARG_1);
563
564 // Set code_state.ip (offset from start of this function to prelude info)
565 int code_state_ip_local = emit->code_state_start + OFFSETOF_CODE_STATE_IP;
566 #if N_PRELUDE_AS_BYTES_OBJ
567 // Prelude is a bytes object in const_table; store ip = prelude->data - fun_bc->bytecode
568 ASM_LOAD_REG_REG_OFFSET(emit->as, REG_LOCAL_3, REG_LOCAL_3, emit->scope->num_pos_args + emit->scope->num_kwonly_args + 1);
569 ASM_LOAD_REG_REG_OFFSET(emit->as, REG_LOCAL_3, REG_LOCAL_3, offsetof(mp_obj_str_t, data) / sizeof(uintptr_t));
570 ASM_LOAD_REG_REG_OFFSET(emit->as, REG_PARENT_ARG_1, REG_PARENT_ARG_1, OFFSETOF_OBJ_FUN_BC_BYTECODE);
571 ASM_SUB_REG_REG(emit->as, REG_LOCAL_3, REG_PARENT_ARG_1);
572 emit_native_mov_state_reg(emit, code_state_ip_local, REG_LOCAL_3);
573 #else
574 if (emit->pass == MP_PASS_CODE_SIZE) {
575 // Commit to the encoding size based on the value of prelude_offset in this pass.
576 // By using 32768 as the cut-off it is highly unlikely that prelude_offset will
577 // grow beyond 65535 by the end of thiss pass, and so require the larger encoding.
578 emit->prelude_offset_uses_u16_encoding = emit->prelude_offset < 32768;
579 }
580 if (emit->prelude_offset_uses_u16_encoding) {
581 assert(emit->prelude_offset <= 65535);
582 emit_native_mov_state_imm_fix_u16_via(emit, code_state_ip_local, emit->prelude_offset, REG_PARENT_ARG_1);
583 } else {
584 emit_native_mov_state_imm_fix_word_via(emit, code_state_ip_local, emit->prelude_offset, REG_PARENT_ARG_1);
585 }
586 #endif
587
588 // Set code_state.n_state (only works on little endian targets due to n_state being uint16_t)
589 emit_native_mov_state_imm_via(emit, emit->code_state_start + offsetof(mp_code_state_t, n_state) / sizeof(uintptr_t), emit->n_state, REG_ARG_1);
590
591 // Put address of code_state into first arg
592 ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, emit->code_state_start);
593
594 // Copy next 3 args if needed
595 #if REG_ARG_2 != REG_PARENT_ARG_2
596 ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_PARENT_ARG_2);
597 #endif
598 #if REG_ARG_3 != REG_PARENT_ARG_3
599 ASM_MOV_REG_REG(emit->as, REG_ARG_3, REG_PARENT_ARG_3);
600 #endif
601 #if REG_ARG_4 != REG_PARENT_ARG_4
602 ASM_MOV_REG_REG(emit->as, REG_ARG_4, REG_PARENT_ARG_4);
603 #endif
604
605 // Call mp_setup_code_state to prepare code_state structure
606 #if N_THUMB
607 asm_thumb_bl_ind(emit->as, MP_F_SETUP_CODE_STATE, ASM_THUMB_REG_R4);
608 #elif N_ARM
609 asm_arm_bl_ind(emit->as, MP_F_SETUP_CODE_STATE, ASM_ARM_REG_R4);
610 #else
611 ASM_CALL_IND(emit->as, MP_F_SETUP_CODE_STATE);
612 #endif
613 }
614
615 emit_native_global_exc_entry(emit);
616
617 // cache some locals in registers, but only if no exception handlers
618 if (CAN_USE_REGS_FOR_LOCALS(emit)) {
619 for (int i = 0; i < REG_LOCAL_NUM && i < scope->num_locals; ++i) {
620 ASM_MOV_REG_LOCAL(emit->as, reg_local_table[i], LOCAL_IDX_LOCAL_VAR(emit, i));
621 }
622 }
623
624 // set the type of closed over variables
625 for (mp_uint_t i = 0; i < scope->id_info_len; i++) {
626 id_info_t *id = &scope->id_info[i];
627 if (id->kind == ID_INFO_KIND_CELL) {
628 emit->local_vtype[id->local_num] = VTYPE_PYOBJ;
629 }
630 }
631
632 if (pass == MP_PASS_EMIT) {
633 // write argument names as qstr objects
634 // see comment in corresponding part of emitbc.c about the logic here
635 for (int i = 0; i < scope->num_pos_args + scope->num_kwonly_args; i++) {
636 qstr qst = MP_QSTR__star_;
637 for (int j = 0; j < scope->id_info_len; ++j) {
638 id_info_t *id = &scope->id_info[j];
639 if ((id->flags & ID_FLAG_IS_PARAM) && id->local_num == i) {
640 qst = id->qst;
641 break;
642 }
643 }
644 emit->const_table[i] = (mp_uint_t)MP_OBJ_NEW_QSTR(qst);
645 }
646 }
647 }
648
649}
650
651static inline void emit_native_write_code_info_byte(emit_t *emit, byte val) {
652 mp_asm_base_data(&emit->as->base, 1, val);
653}
654
655STATIC void emit_native_end_pass(emit_t *emit) {
656 emit_native_global_exc_exit(emit);
657
658 if (!emit->do_viper_types) {
659 emit->prelude_offset = mp_asm_base_get_code_pos(&emit->as->base);
660
661 size_t n_state = emit->n_state;
662 size_t n_exc_stack = 0; // exc-stack not needed for native code
663 MP_BC_PRELUDE_SIG_ENCODE(n_state, n_exc_stack, emit->scope, emit_native_write_code_info_byte, emit);
664
665 #if MICROPY_PERSISTENT_CODE
666 size_t n_info = 4;
667 #else
668 size_t n_info = 1;
669 #endif
670 MP_BC_PRELUDE_SIZE_ENCODE(n_info, emit->n_cell, emit_native_write_code_info_byte, emit);
671
672 #if MICROPY_PERSISTENT_CODE
673 mp_asm_base_data(&emit->as->base, 1, emit->scope->simple_name);
674 mp_asm_base_data(&emit->as->base, 1, emit->scope->simple_name >> 8);
675 mp_asm_base_data(&emit->as->base, 1, emit->scope->source_file);
676 mp_asm_base_data(&emit->as->base, 1, emit->scope->source_file >> 8);
677 #else
678 mp_asm_base_data(&emit->as->base, 1, 1);
679 #endif
680
681 // bytecode prelude: initialise closed over variables
682 size_t cell_start = mp_asm_base_get_code_pos(&emit->as->base);
683 for (int i = 0; i < emit->scope->id_info_len; i++) {
684 id_info_t *id = &emit->scope->id_info[i];
685 if (id->kind == ID_INFO_KIND_CELL) {
686 assert(id->local_num <= 255);
687 mp_asm_base_data(&emit->as->base, 1, id->local_num); // write the local which should be converted to a cell
688 }
689 }
690 emit->n_cell = mp_asm_base_get_code_pos(&emit->as->base) - cell_start;
691
692 #if N_PRELUDE_AS_BYTES_OBJ
693 // Prelude bytes object is after qstr arg names and mp_fun_table
694 size_t table_off = emit->scope->num_pos_args + emit->scope->num_kwonly_args + 1;
695 if (emit->pass == MP_PASS_EMIT) {
696 void *buf = emit->as->base.code_base + emit->prelude_offset;
697 size_t n = emit->as->base.code_offset - emit->prelude_offset;
698 emit->const_table[table_off] = (uintptr_t)mp_obj_new_bytes(buf, n);
699 }
700 #endif
701 }
702
703 ASM_END_PASS(emit->as);
704
705 // check stack is back to zero size
706 assert(emit->stack_size == 0);
707 assert(emit->exc_stack_size == 0);
708
709 // Deal with const table accounting
710 assert(emit->pass <= MP_PASS_STACK_SIZE || (emit->const_table_num_obj == emit->const_table_cur_obj));
711 emit->const_table_num_obj = emit->const_table_cur_obj;
712 if (emit->pass == MP_PASS_CODE_SIZE) {
713 size_t const_table_alloc = 1 + emit->const_table_num_obj + emit->const_table_cur_raw_code;
714 size_t nqstr = 0;
715 if (!emit->do_viper_types) {
716 // Add room for qstr names of arguments
717 nqstr = emit->scope->num_pos_args + emit->scope->num_kwonly_args;
718 const_table_alloc += nqstr;
719 }
720 emit->const_table = m_new(mp_uint_t, const_table_alloc);
721 #if !MICROPY_DYNAMIC_COMPILER
722 // Store mp_fun_table pointer just after qstrs
723 // (but in dynamic-compiler mode eliminate dependency on mp_fun_table)
724 emit->const_table[nqstr] = (mp_uint_t)(uintptr_t)&mp_fun_table;
725 #endif
726
727 #if MICROPY_PERSISTENT_CODE_SAVE
728 size_t qstr_link_alloc = emit->qstr_link_cur;
729 if (qstr_link_alloc > 0) {
730 emit->qstr_link = m_new(mp_qstr_link_entry_t, qstr_link_alloc);
731 }
732 #endif
733 }
734
735 if (emit->pass == MP_PASS_EMIT) {
736 void *f = mp_asm_base_get_code(&emit->as->base);
737 mp_uint_t f_len = mp_asm_base_get_code_size(&emit->as->base);
738
739 mp_emit_glue_assign_native(emit->scope->raw_code,
740 emit->do_viper_types ? MP_CODE_NATIVE_VIPER : MP_CODE_NATIVE_PY,
741 f, f_len, emit->const_table,
742 #if MICROPY_PERSISTENT_CODE_SAVE
743 emit->prelude_offset,
744 emit->const_table_cur_obj, emit->const_table_cur_raw_code,
745 emit->qstr_link_cur, emit->qstr_link,
746 #endif
747 emit->scope->num_pos_args, emit->scope->scope_flags, 0);
748 }
749}
750
751STATIC bool emit_native_last_emit_was_return_value(emit_t *emit) {
752 return emit->last_emit_was_return_value;
753}
754
755STATIC void ensure_extra_stack(emit_t *emit, size_t delta) {
756 if (emit->stack_size + delta > emit->stack_info_alloc) {
757 size_t new_alloc = (emit->stack_size + delta + 8) & ~3;
758 emit->stack_info = m_renew(stack_info_t, emit->stack_info, emit->stack_info_alloc, new_alloc);
759 emit->stack_info_alloc = new_alloc;
760 }
761}
762
763STATIC void adjust_stack(emit_t *emit, mp_int_t stack_size_delta) {
764 assert((mp_int_t)emit->stack_size + stack_size_delta >= 0);
765 assert((mp_int_t)emit->stack_size + stack_size_delta <= (mp_int_t)emit->stack_info_alloc);
766 emit->stack_size += stack_size_delta;
767 if (emit->pass > MP_PASS_SCOPE && emit->stack_size > emit->scope->stack_size) {
768 emit->scope->stack_size = emit->stack_size;
769 }
770 #ifdef DEBUG_PRINT
771 DEBUG_printf(" adjust_stack; stack_size=%d+%d; stack now:", emit->stack_size - stack_size_delta, stack_size_delta);
772 for (int i = 0; i < emit->stack_size; i++) {
773 stack_info_t *si = &emit->stack_info[i];
774 DEBUG_printf(" (v=%d k=%d %d)", si->vtype, si->kind, si->data.u_reg);
775 }
776 DEBUG_printf("\n");
777 #endif
778}
779
780STATIC void emit_native_adjust_stack_size(emit_t *emit, mp_int_t delta) {
781 DEBUG_printf("adjust_stack_size(" INT_FMT ")\n", delta);
782 if (delta > 0) {
783 ensure_extra_stack(emit, delta);
784 }
785 // If we are adjusting the stack in a positive direction (pushing) then we
786 // need to fill in values for the stack kind and vtype of the newly-pushed
787 // entries. These should be set to "value" (ie not reg or imm) because we
788 // should only need to adjust the stack due to a jump to this part in the
789 // code (and hence we have settled the stack before the jump).
790 for (mp_int_t i = 0; i < delta; i++) {
791 stack_info_t *si = &emit->stack_info[emit->stack_size + i];
792 si->kind = STACK_VALUE;
793 // TODO we don't know the vtype to use here. At the moment this is a
794 // hack to get the case of multi comparison working.
795 if (delta == 1) {
796 si->vtype = emit->saved_stack_vtype;
797 } else {
798 si->vtype = VTYPE_PYOBJ;
799 }
800 }
801 adjust_stack(emit, delta);
802}
803
804STATIC void emit_native_set_source_line(emit_t *emit, mp_uint_t source_line) {
805 (void)emit;
806 (void)source_line;
807}
808
809// this must be called at start of emit functions
810STATIC void emit_native_pre(emit_t *emit) {
811 emit->last_emit_was_return_value = false;
812}
813
814// depth==0 is top, depth==1 is before top, etc
815STATIC stack_info_t *peek_stack(emit_t *emit, mp_uint_t depth) {
816 return &emit->stack_info[emit->stack_size - 1 - depth];
817}
818
819// depth==0 is top, depth==1 is before top, etc
820STATIC vtype_kind_t peek_vtype(emit_t *emit, mp_uint_t depth) {
821 if (emit->do_viper_types) {
822 return peek_stack(emit, depth)->vtype;
823 } else {
824 // Type is always PYOBJ even if the intermediate stored value is not
825 return VTYPE_PYOBJ;
826 }
827}
828
829// pos=1 is TOS, pos=2 is next, etc
830// use pos=0 for no skipping
831STATIC void need_reg_single(emit_t *emit, int reg_needed, int skip_stack_pos) {
832 skip_stack_pos = emit->stack_size - skip_stack_pos;
833 for (int i = 0; i < emit->stack_size; i++) {
834 if (i != skip_stack_pos) {
835 stack_info_t *si = &emit->stack_info[i];
836 if (si->kind == STACK_REG && si->data.u_reg == reg_needed) {
837 si->kind = STACK_VALUE;
838 emit_native_mov_state_reg(emit, emit->stack_start + i, si->data.u_reg);
839 }
840 }
841 }
842}
843
844STATIC void need_reg_all(emit_t *emit) {
845 for (int i = 0; i < emit->stack_size; i++) {
846 stack_info_t *si = &emit->stack_info[i];
847 if (si->kind == STACK_REG) {
848 si->kind = STACK_VALUE;
849 emit_native_mov_state_reg(emit, emit->stack_start + i, si->data.u_reg);
850 }
851 }
852}
853
854STATIC vtype_kind_t load_reg_stack_imm(emit_t *emit, int reg_dest, const stack_info_t *si, bool convert_to_pyobj) {
855 if (!convert_to_pyobj && emit->do_viper_types) {
856 ASM_MOV_REG_IMM(emit->as, reg_dest, si->data.u_imm);
857 return si->vtype;
858 } else {
859 if (si->vtype == VTYPE_PYOBJ) {
860 ASM_MOV_REG_IMM(emit->as, reg_dest, si->data.u_imm);
861 } else if (si->vtype == VTYPE_BOOL) {
862 emit_native_mov_reg_const(emit, reg_dest, MP_F_CONST_FALSE_OBJ + si->data.u_imm);
863 } else if (si->vtype == VTYPE_INT || si->vtype == VTYPE_UINT) {
864 ASM_MOV_REG_IMM(emit->as, reg_dest, (uintptr_t)MP_OBJ_NEW_SMALL_INT(si->data.u_imm));
865 } else if (si->vtype == VTYPE_PTR_NONE) {
866 emit_native_mov_reg_const(emit, reg_dest, MP_F_CONST_NONE_OBJ);
867 } else {
868 mp_raise_NotImplementedError(MP_ERROR_TEXT("conversion to object"));
869 }
870 return VTYPE_PYOBJ;
871 }
872}
873
874STATIC void need_stack_settled(emit_t *emit) {
875 DEBUG_printf(" need_stack_settled; stack_size=%d\n", emit->stack_size);
876 for (int i = 0; i < emit->stack_size; i++) {
877 stack_info_t *si = &emit->stack_info[i];
878 if (si->kind == STACK_REG) {
879 DEBUG_printf(" reg(%u) to local(%u)\n", si->data.u_reg, emit->stack_start + i);
880 si->kind = STACK_VALUE;
881 emit_native_mov_state_reg(emit, emit->stack_start + i, si->data.u_reg);
882 }
883 }
884 for (int i = 0; i < emit->stack_size; i++) {
885 stack_info_t *si = &emit->stack_info[i];
886 if (si->kind == STACK_IMM) {
887 DEBUG_printf(" imm(" INT_FMT ") to local(%u)\n", si->data.u_imm, emit->stack_start + i);
888 si->kind = STACK_VALUE;
889 si->vtype = load_reg_stack_imm(emit, REG_TEMP0, si, false);
890 emit_native_mov_state_reg(emit, emit->stack_start + i, REG_TEMP0);
891 }
892 }
893}
894
895// pos=1 is TOS, pos=2 is next, etc
896STATIC void emit_access_stack(emit_t *emit, int pos, vtype_kind_t *vtype, int reg_dest) {
897 need_reg_single(emit, reg_dest, pos);
898 stack_info_t *si = &emit->stack_info[emit->stack_size - pos];
899 *vtype = si->vtype;
900 switch (si->kind) {
901 case STACK_VALUE:
902 emit_native_mov_reg_state(emit, reg_dest, emit->stack_start + emit->stack_size - pos);
903 break;
904
905 case STACK_REG:
906 if (si->data.u_reg != reg_dest) {
907 ASM_MOV_REG_REG(emit->as, reg_dest, si->data.u_reg);
908 }
909 break;
910
911 case STACK_IMM:
912 *vtype = load_reg_stack_imm(emit, reg_dest, si, false);
913 break;
914 }
915}
916
917// does an efficient X=pop(); discard(); push(X)
918// needs a (non-temp) register in case the poped element was stored in the stack
919STATIC void emit_fold_stack_top(emit_t *emit, int reg_dest) {
920 stack_info_t *si = &emit->stack_info[emit->stack_size - 2];
921 si[0] = si[1];
922 if (si->kind == STACK_VALUE) {
923 // if folded element was on the stack we need to put it in a register
924 emit_native_mov_reg_state(emit, reg_dest, emit->stack_start + emit->stack_size - 1);
925 si->kind = STACK_REG;
926 si->data.u_reg = reg_dest;
927 }
928 adjust_stack(emit, -1);
929}
930
931// If stacked value is in a register and the register is not r1 or r2, then
932// *reg_dest is set to that register. Otherwise the value is put in *reg_dest.
933STATIC void emit_pre_pop_reg_flexible(emit_t *emit, vtype_kind_t *vtype, int *reg_dest, int not_r1, int not_r2) {
934 emit->last_emit_was_return_value = false;
935 stack_info_t *si = peek_stack(emit, 0);
936 if (si->kind == STACK_REG && si->data.u_reg != not_r1 && si->data.u_reg != not_r2) {
937 *vtype = si->vtype;
938 *reg_dest = si->data.u_reg;
939 need_reg_single(emit, *reg_dest, 1);
940 } else {
941 emit_access_stack(emit, 1, vtype, *reg_dest);
942 }
943 adjust_stack(emit, -1);
944}
945
946STATIC void emit_pre_pop_discard(emit_t *emit) {
947 emit->last_emit_was_return_value = false;
948 adjust_stack(emit, -1);
949}
950
951STATIC void emit_pre_pop_reg(emit_t *emit, vtype_kind_t *vtype, int reg_dest) {
952 emit->last_emit_was_return_value = false;
953 emit_access_stack(emit, 1, vtype, reg_dest);
954 adjust_stack(emit, -1);
955}
956
957STATIC void emit_pre_pop_reg_reg(emit_t *emit, vtype_kind_t *vtypea, int rega, vtype_kind_t *vtypeb, int regb) {
958 emit_pre_pop_reg(emit, vtypea, rega);
959 emit_pre_pop_reg(emit, vtypeb, regb);
960}
961
962STATIC void emit_pre_pop_reg_reg_reg(emit_t *emit, vtype_kind_t *vtypea, int rega, vtype_kind_t *vtypeb, int regb, vtype_kind_t *vtypec, int regc) {
963 emit_pre_pop_reg(emit, vtypea, rega);
964 emit_pre_pop_reg(emit, vtypeb, regb);
965 emit_pre_pop_reg(emit, vtypec, regc);
966}
967
968STATIC void emit_post(emit_t *emit) {
969 (void)emit;
970}
971
972STATIC void emit_post_top_set_vtype(emit_t *emit, vtype_kind_t new_vtype) {
973 stack_info_t *si = &emit->stack_info[emit->stack_size - 1];
974 si->vtype = new_vtype;
975}
976
977STATIC void emit_post_push_reg(emit_t *emit, vtype_kind_t vtype, int reg) {
978 ensure_extra_stack(emit, 1);
979 stack_info_t *si = &emit->stack_info[emit->stack_size];
980 si->vtype = vtype;
981 si->kind = STACK_REG;
982 si->data.u_reg = reg;
983 adjust_stack(emit, 1);
984}
985
986STATIC void emit_post_push_imm(emit_t *emit, vtype_kind_t vtype, mp_int_t imm) {
987 ensure_extra_stack(emit, 1);
988 stack_info_t *si = &emit->stack_info[emit->stack_size];
989 si->vtype = vtype;
990 si->kind = STACK_IMM;
991 si->data.u_imm = imm;
992 adjust_stack(emit, 1);
993}
994
995STATIC void emit_post_push_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb) {
996 emit_post_push_reg(emit, vtypea, rega);
997 emit_post_push_reg(emit, vtypeb, regb);
998}
999
1000STATIC void emit_post_push_reg_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb, vtype_kind_t vtypec, int regc) {
1001 emit_post_push_reg(emit, vtypea, rega);
1002 emit_post_push_reg(emit, vtypeb, regb);
1003 emit_post_push_reg(emit, vtypec, regc);
1004}
1005
1006STATIC void emit_post_push_reg_reg_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb, vtype_kind_t vtypec, int regc, vtype_kind_t vtyped, int regd) {
1007 emit_post_push_reg(emit, vtypea, rega);
1008 emit_post_push_reg(emit, vtypeb, regb);
1009 emit_post_push_reg(emit, vtypec, regc);
1010 emit_post_push_reg(emit, vtyped, regd);
1011}
1012
1013STATIC void emit_call(emit_t *emit, mp_fun_kind_t fun_kind) {
1014 need_reg_all(emit);
1015 ASM_CALL_IND(emit->as, fun_kind);
1016}
1017
1018STATIC void emit_call_with_imm_arg(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val, int arg_reg) {
1019 need_reg_all(emit);
1020 ASM_MOV_REG_IMM(emit->as, arg_reg, arg_val);
1021 ASM_CALL_IND(emit->as, fun_kind);
1022}
1023
1024STATIC void emit_call_with_2_imm_args(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val1, int arg_reg1, mp_int_t arg_val2, int arg_reg2) {
1025 need_reg_all(emit);
1026 ASM_MOV_REG_IMM(emit->as, arg_reg1, arg_val1);
1027 ASM_MOV_REG_IMM(emit->as, arg_reg2, arg_val2);
1028 ASM_CALL_IND(emit->as, fun_kind);
1029}
1030
1031STATIC void emit_call_with_qstr_arg(emit_t *emit, mp_fun_kind_t fun_kind, qstr qst, int arg_reg) {
1032 need_reg_all(emit);
1033 emit_native_mov_reg_qstr(emit, arg_reg, qst);
1034 ASM_CALL_IND(emit->as, fun_kind);
1035}
1036
1037// vtype of all n_pop objects is VTYPE_PYOBJ
1038// Will convert any items that are not VTYPE_PYOBJ to this type and put them back on the stack.
1039// If any conversions of non-immediate values are needed, then it uses REG_ARG_1, REG_ARG_2 and REG_RET.
1040// Otherwise, it does not use any temporary registers (but may use reg_dest before loading it with stack pointer).
1041STATIC void emit_get_stack_pointer_to_reg_for_pop(emit_t *emit, mp_uint_t reg_dest, mp_uint_t n_pop) {
1042 need_reg_all(emit);
1043
1044 // First, store any immediate values to their respective place on the stack.
1045 for (mp_uint_t i = 0; i < n_pop; i++) {
1046 stack_info_t *si = &emit->stack_info[emit->stack_size - 1 - i];
1047 // must push any imm's to stack
1048 // must convert them to VTYPE_PYOBJ for viper code
1049 if (si->kind == STACK_IMM) {
1050 si->kind = STACK_VALUE;
1051 si->vtype = load_reg_stack_imm(emit, reg_dest, si, true);
1052 emit_native_mov_state_reg(emit, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
1053 }
1054
1055 // verify that this value is on the stack
1056 assert(si->kind == STACK_VALUE);
1057 }
1058
1059 // Second, convert any non-VTYPE_PYOBJ to that type.
1060 for (mp_uint_t i = 0; i < n_pop; i++) {
1061 stack_info_t *si = &emit->stack_info[emit->stack_size - 1 - i];
1062 if (si->vtype != VTYPE_PYOBJ) {
1063 mp_uint_t local_num = emit->stack_start + emit->stack_size - 1 - i;
1064 emit_native_mov_reg_state(emit, REG_ARG_1, local_num);
1065 emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, si->vtype, REG_ARG_2); // arg2 = type
1066 emit_native_mov_state_reg(emit, local_num, REG_RET);
1067 si->vtype = VTYPE_PYOBJ;
1068 DEBUG_printf(" convert_native_to_obj(local_num=" UINT_FMT ")\n", local_num);
1069 }
1070 }
1071
1072 // Adujust the stack for a pop of n_pop items, and load the stack pointer into reg_dest.
1073 adjust_stack(emit, -n_pop);
1074 emit_native_mov_reg_state_addr(emit, reg_dest, emit->stack_start + emit->stack_size);
1075}
1076
1077// vtype of all n_push objects is VTYPE_PYOBJ
1078STATIC void emit_get_stack_pointer_to_reg_for_push(emit_t *emit, mp_uint_t reg_dest, mp_uint_t n_push) {
1079 need_reg_all(emit);
1080 ensure_extra_stack(emit, n_push);
1081 for (mp_uint_t i = 0; i < n_push; i++) {
1082 emit->stack_info[emit->stack_size + i].kind = STACK_VALUE;
1083 emit->stack_info[emit->stack_size + i].vtype = VTYPE_PYOBJ;
1084 }
1085 emit_native_mov_reg_state_addr(emit, reg_dest, emit->stack_start + emit->stack_size);
1086 adjust_stack(emit, n_push);
1087}
1088
1089STATIC void emit_native_push_exc_stack(emit_t *emit, uint label, bool is_finally) {
1090 if (emit->exc_stack_size + 1 > emit->exc_stack_alloc) {
1091 size_t new_alloc = emit->exc_stack_alloc + 4;
1092 emit->exc_stack = m_renew(exc_stack_entry_t, emit->exc_stack, emit->exc_stack_alloc, new_alloc);
1093 emit->exc_stack_alloc = new_alloc;
1094 }
1095
1096 exc_stack_entry_t *e = &emit->exc_stack[emit->exc_stack_size++];
1097 e->label = label;
1098 e->is_finally = is_finally;
1099 e->unwind_label = UNWIND_LABEL_UNUSED;
1100 e->is_active = true;
1101
1102 ASM_MOV_REG_PCREL(emit->as, REG_RET, label);
1103 ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_PC(emit), REG_RET);
1104}
1105
1106STATIC void emit_native_leave_exc_stack(emit_t *emit, bool start_of_handler) {
1107 assert(emit->exc_stack_size > 0);
1108
1109 // Get current exception handler and deactivate it
1110 exc_stack_entry_t *e = &emit->exc_stack[emit->exc_stack_size - 1];
1111 e->is_active = false;
1112
1113 // Find next innermost active exception handler, to restore as current handler
1114 for (--e; e >= emit->exc_stack && !e->is_active; --e) {
1115 }
1116
1117 // Update the PC of the new exception handler
1118 if (e < emit->exc_stack) {
1119 // No active handler, clear handler PC to zero
1120 if (start_of_handler) {
1121 // Optimisation: PC is already cleared by global exc handler
1122 return;
1123 }
1124 ASM_XOR_REG_REG(emit->as, REG_RET, REG_RET);
1125 } else {
1126 // Found new active handler, get its PC
1127 ASM_MOV_REG_PCREL(emit->as, REG_RET, e->label);
1128 }
1129 ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_PC(emit), REG_RET);
1130}
1131
1132STATIC exc_stack_entry_t *emit_native_pop_exc_stack(emit_t *emit) {
1133 assert(emit->exc_stack_size > 0);
1134 exc_stack_entry_t *e = &emit->exc_stack[--emit->exc_stack_size];
1135 assert(e->is_active == false);
1136 return e;
1137}
1138
1139STATIC void emit_load_reg_with_ptr(emit_t *emit, int reg, mp_uint_t ptr, size_t table_off) {
1140 if (!emit->do_viper_types) {
1141 // Skip qstr names of arguments
1142 table_off += emit->scope->num_pos_args + emit->scope->num_kwonly_args;
1143 }
1144 if (emit->pass == MP_PASS_EMIT) {
1145 emit->const_table[table_off] = ptr;
1146 }
1147 emit_native_mov_reg_state(emit, REG_TEMP0, LOCAL_IDX_FUN_OBJ(emit));
1148 ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_OBJ_FUN_BC_CONST_TABLE);
1149 ASM_LOAD_REG_REG_OFFSET(emit->as, reg, REG_TEMP0, table_off);
1150}
1151
1152STATIC void emit_load_reg_with_object(emit_t *emit, int reg, mp_obj_t obj) {
1153 // First entry is for mp_fun_table
1154 size_t table_off = 1 + emit->const_table_cur_obj++;
1155 emit_load_reg_with_ptr(emit, reg, (mp_uint_t)obj, table_off);
1156}
1157
1158STATIC void emit_load_reg_with_raw_code(emit_t *emit, int reg, mp_raw_code_t *rc) {
1159 // First entry is for mp_fun_table, then constant objects
1160 size_t table_off = 1 + emit->const_table_num_obj + emit->const_table_cur_raw_code++;
1161 emit_load_reg_with_ptr(emit, reg, (mp_uint_t)rc, table_off);
1162}
1163
1164STATIC void emit_native_label_assign(emit_t *emit, mp_uint_t l) {
1165 DEBUG_printf("label_assign(" UINT_FMT ")\n", l);
1166
1167 bool is_finally = false;
1168 if (emit->exc_stack_size > 0) {
1169 exc_stack_entry_t *e = &emit->exc_stack[emit->exc_stack_size - 1];
1170 is_finally = e->is_finally && e->label == l;
1171 }
1172
1173 if (is_finally) {
1174 // Label is at start of finally handler: store TOS into exception slot
1175 vtype_kind_t vtype;
1176 emit_pre_pop_reg(emit, &vtype, REG_TEMP0);
1177 ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_VAL(emit), REG_TEMP0);
1178 }
1179
1180 emit_native_pre(emit);
1181 // need to commit stack because we can jump here from elsewhere
1182 need_stack_settled(emit);
1183 mp_asm_base_label_assign(&emit->as->base, l);
1184 emit_post(emit);
1185
1186 if (is_finally) {
1187 // Label is at start of finally handler: pop exception stack
1188 emit_native_leave_exc_stack(emit, false);
1189 }
1190}
1191
1192STATIC void emit_native_global_exc_entry(emit_t *emit) {
1193 // Note: 4 labels are reserved for this function, starting at *emit->label_slot
1194
1195 emit->exit_label = *emit->label_slot;
1196
1197 if (NEED_GLOBAL_EXC_HANDLER(emit)) {
1198 mp_uint_t nlr_label = *emit->label_slot + 1;
1199 mp_uint_t start_label = *emit->label_slot + 2;
1200 mp_uint_t global_except_label = *emit->label_slot + 3;
1201
1202 if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
1203 // Set new globals
1204 emit_native_mov_reg_state(emit, REG_ARG_1, LOCAL_IDX_FUN_OBJ(emit));
1205 ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_1, REG_ARG_1, OFFSETOF_OBJ_FUN_BC_GLOBALS);
1206 emit_call(emit, MP_F_NATIVE_SWAP_GLOBALS);
1207
1208 // Save old globals (or NULL if globals didn't change)
1209 emit_native_mov_state_reg(emit, LOCAL_IDX_OLD_GLOBALS(emit), REG_RET);
1210 }
1211
1212 if (emit->scope->exc_stack_size == 0) {
1213 if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
1214 // Optimisation: if globals didn't change don't push the nlr context
1215 ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, start_label, false);
1216 }
1217
1218 // Wrap everything in an nlr context
1219 ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, 0);
1220 emit_call(emit, MP_F_NLR_PUSH);
1221 #if N_NLR_SETJMP
1222 ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, 2);
1223 emit_call(emit, MP_F_SETJMP);
1224 #endif
1225 ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, start_label, true);
1226 } else {
1227 // Clear the unwind state
1228 ASM_XOR_REG_REG(emit->as, REG_TEMP0, REG_TEMP0);
1229 ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_UNWIND(emit), REG_TEMP0);
1230
1231 // Put PC of start code block into REG_LOCAL_1
1232 ASM_MOV_REG_PCREL(emit->as, REG_LOCAL_1, start_label);
1233
1234 // Wrap everything in an nlr context
1235 emit_native_label_assign(emit, nlr_label);
1236 ASM_MOV_REG_LOCAL(emit->as, REG_LOCAL_2, LOCAL_IDX_EXC_HANDLER_UNWIND(emit));
1237 ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, 0);
1238 emit_call(emit, MP_F_NLR_PUSH);
1239 #if N_NLR_SETJMP
1240 ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, 2);
1241 emit_call(emit, MP_F_SETJMP);
1242 #endif
1243 ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_UNWIND(emit), REG_LOCAL_2);
1244 ASM_JUMP_IF_REG_NONZERO(emit->as, REG_RET, global_except_label, true);
1245
1246 // Clear PC of current code block, and jump there to resume execution
1247 ASM_XOR_REG_REG(emit->as, REG_TEMP0, REG_TEMP0);
1248 ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_PC(emit), REG_TEMP0);
1249 ASM_JUMP_REG(emit->as, REG_LOCAL_1);
1250
1251 // Global exception handler: check for valid exception handler
1252 emit_native_label_assign(emit, global_except_label);
1253 #if N_NLR_SETJMP
1254 // Reload REG_FUN_TABLE, since it may be clobbered by longjmp
1255 emit_native_mov_reg_state(emit, REG_LOCAL_1, LOCAL_IDX_FUN_OBJ(emit));
1256 ASM_LOAD_REG_REG_OFFSET(emit->as, REG_LOCAL_1, REG_LOCAL_1, offsetof(mp_obj_fun_bc_t, const_table) / sizeof(uintptr_t));
1257 ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_LOCAL_1, emit->scope->num_pos_args + emit->scope->num_kwonly_args);
1258 #endif
1259 ASM_MOV_REG_LOCAL(emit->as, REG_LOCAL_1, LOCAL_IDX_EXC_HANDLER_PC(emit));
1260 ASM_JUMP_IF_REG_NONZERO(emit->as, REG_LOCAL_1, nlr_label, false);
1261 }
1262
1263 if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
1264 // Restore old globals
1265 emit_native_mov_reg_state(emit, REG_ARG_1, LOCAL_IDX_OLD_GLOBALS(emit));
1266 emit_call(emit, MP_F_NATIVE_SWAP_GLOBALS);
1267 }
1268
1269 if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
1270 // Store return value in state[0]
1271 ASM_MOV_REG_LOCAL(emit->as, REG_TEMP0, LOCAL_IDX_EXC_VAL(emit));
1272 ASM_STORE_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_GENERATOR_STATE, OFFSETOF_CODE_STATE_STATE);
1273
1274 // Load return kind
1275 ASM_MOV_REG_IMM(emit->as, REG_PARENT_RET, MP_VM_RETURN_EXCEPTION);
1276
1277 ASM_EXIT(emit->as);
1278 } else {
1279 // Re-raise exception out to caller
1280 ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_EXC_VAL(emit));
1281 emit_call(emit, MP_F_NATIVE_RAISE);
1282 }
1283
1284 // Label for start of function
1285 emit_native_label_assign(emit, start_label);
1286
1287 if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
1288 emit_native_mov_reg_state(emit, REG_TEMP0, LOCAL_IDX_GEN_PC(emit));
1289 ASM_JUMP_REG(emit->as, REG_TEMP0);
1290 emit->start_offset = mp_asm_base_get_code_pos(&emit->as->base);
1291
1292 // This is the first entry of the generator
1293
1294 // Check LOCAL_IDX_EXC_VAL for any injected value
1295 ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_EXC_VAL(emit));
1296 emit_call(emit, MP_F_NATIVE_RAISE);
1297 }
1298 }
1299}
1300
1301STATIC void emit_native_global_exc_exit(emit_t *emit) {
1302 // Label for end of function
1303 emit_native_label_assign(emit, emit->exit_label);
1304
1305 if (NEED_GLOBAL_EXC_HANDLER(emit)) {
1306 // Get old globals
1307 if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
1308 emit_native_mov_reg_state(emit, REG_ARG_1, LOCAL_IDX_OLD_GLOBALS(emit));
1309
1310 if (emit->scope->exc_stack_size == 0) {
1311 // Optimisation: if globals didn't change then don't restore them and don't do nlr_pop
1312 ASM_JUMP_IF_REG_ZERO(emit->as, REG_ARG_1, emit->exit_label + 1, false);
1313 }
1314
1315 // Restore old globals
1316 emit_call(emit, MP_F_NATIVE_SWAP_GLOBALS);
1317 }
1318
1319 // Pop the nlr context
1320 emit_call(emit, MP_F_NLR_POP);
1321
1322 if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
1323 if (emit->scope->exc_stack_size == 0) {
1324 // Destination label for above optimisation
1325 emit_native_label_assign(emit, emit->exit_label + 1);
1326 }
1327 }
1328
1329 // Load return value
1330 ASM_MOV_REG_LOCAL(emit->as, REG_PARENT_RET, LOCAL_IDX_RET_VAL(emit));
1331 }
1332
1333 ASM_EXIT(emit->as);
1334}
1335
1336STATIC void emit_native_import_name(emit_t *emit, qstr qst) {
1337 DEBUG_printf("import_name %s\n", qstr_str(qst));
1338
1339 // get arguments from stack: arg2 = fromlist, arg3 = level
1340 // If using viper types these arguments must be converted to proper objects, and
1341 // to accomplish this viper types are turned off for the emit_pre_pop_reg_reg call.
1342 bool orig_do_viper_types = emit->do_viper_types;
1343 emit->do_viper_types = false;
1344 vtype_kind_t vtype_fromlist;
1345 vtype_kind_t vtype_level;
1346 emit_pre_pop_reg_reg(emit, &vtype_fromlist, REG_ARG_2, &vtype_level, REG_ARG_3);
1347 assert(vtype_fromlist == VTYPE_PYOBJ);
1348 assert(vtype_level == VTYPE_PYOBJ);
1349 emit->do_viper_types = orig_do_viper_types;
1350
1351 emit_call_with_qstr_arg(emit, MP_F_IMPORT_NAME, qst, REG_ARG_1); // arg1 = import name
1352 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1353}
1354
1355STATIC void emit_native_import_from(emit_t *emit, qstr qst) {
1356 DEBUG_printf("import_from %s\n", qstr_str(qst));
1357 emit_native_pre(emit);
1358 vtype_kind_t vtype_module;
1359 emit_access_stack(emit, 1, &vtype_module, REG_ARG_1); // arg1 = module
1360 assert(vtype_module == VTYPE_PYOBJ);
1361 emit_call_with_qstr_arg(emit, MP_F_IMPORT_FROM, qst, REG_ARG_2); // arg2 = import name
1362 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1363}
1364
1365STATIC void emit_native_import_star(emit_t *emit) {
1366 DEBUG_printf("import_star\n");
1367 vtype_kind_t vtype_module;
1368 emit_pre_pop_reg(emit, &vtype_module, REG_ARG_1); // arg1 = module
1369 assert(vtype_module == VTYPE_PYOBJ);
1370 emit_call(emit, MP_F_IMPORT_ALL);
1371 emit_post(emit);
1372}
1373
1374STATIC void emit_native_import(emit_t *emit, qstr qst, int kind) {
1375 if (kind == MP_EMIT_IMPORT_NAME) {
1376 emit_native_import_name(emit, qst);
1377 } else if (kind == MP_EMIT_IMPORT_FROM) {
1378 emit_native_import_from(emit, qst);
1379 } else {
1380 emit_native_import_star(emit);
1381 }
1382}
1383
1384STATIC void emit_native_load_const_tok(emit_t *emit, mp_token_kind_t tok) {
1385 DEBUG_printf("load_const_tok(tok=%u)\n", tok);
1386 if (tok == MP_TOKEN_ELLIPSIS) {
1387 #if MICROPY_PERSISTENT_CODE_SAVE
1388 emit_native_load_const_obj(emit, MP_OBJ_FROM_PTR(&mp_const_ellipsis_obj));
1389 #else
1390 emit_post_push_imm(emit, VTYPE_PYOBJ, (mp_uint_t)MP_OBJ_FROM_PTR(&mp_const_ellipsis_obj));
1391 #endif
1392 } else {
1393 emit_native_pre(emit);
1394 if (tok == MP_TOKEN_KW_NONE) {
1395 emit_post_push_imm(emit, VTYPE_PTR_NONE, 0);
1396 } else {
1397 emit_post_push_imm(emit, VTYPE_BOOL, tok == MP_TOKEN_KW_FALSE ? 0 : 1);
1398 }
1399 }
1400}
1401
1402STATIC void emit_native_load_const_small_int(emit_t *emit, mp_int_t arg) {
1403 DEBUG_printf("load_const_small_int(int=" INT_FMT ")\n", arg);
1404 emit_native_pre(emit);
1405 emit_post_push_imm(emit, VTYPE_INT, arg);
1406}
1407
1408STATIC void emit_native_load_const_str(emit_t *emit, qstr qst) {
1409 emit_native_pre(emit);
1410 // TODO: Eventually we want to be able to work with raw pointers in viper to
1411 // do native array access. For now we just load them as any other object.
1412 /*
1413 if (emit->do_viper_types) {
1414 // load a pointer to the asciiz string?
1415 emit_post_push_imm(emit, VTYPE_PTR, (mp_uint_t)qstr_str(qst));
1416 } else
1417 */
1418 {
1419 need_reg_single(emit, REG_TEMP0, 0);
1420 emit_native_mov_reg_qstr_obj(emit, REG_TEMP0, qst);
1421 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_TEMP0);
1422 }
1423}
1424
1425STATIC void emit_native_load_const_obj(emit_t *emit, mp_obj_t obj) {
1426 emit->scope->scope_flags |= MP_SCOPE_FLAG_HASCONSTS;
1427 emit_native_pre(emit);
1428 need_reg_single(emit, REG_RET, 0);
1429 emit_load_reg_with_object(emit, REG_RET, obj);
1430 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1431}
1432
1433STATIC void emit_native_load_null(emit_t *emit) {
1434 emit_native_pre(emit);
1435 emit_post_push_imm(emit, VTYPE_PYOBJ, 0);
1436}
1437
1438STATIC void emit_native_load_fast(emit_t *emit, qstr qst, mp_uint_t local_num) {
1439 DEBUG_printf("load_fast(%s, " UINT_FMT ")\n", qstr_str(qst), local_num);
1440 vtype_kind_t vtype = emit->local_vtype[local_num];
1441 if (vtype == VTYPE_UNBOUND) {
1442 EMIT_NATIVE_VIPER_TYPE_ERROR(emit, MP_ERROR_TEXT("local '%q' used before type known"), qst);
1443 }
1444 emit_native_pre(emit);
1445 if (local_num < REG_LOCAL_NUM && CAN_USE_REGS_FOR_LOCALS(emit)) {
1446 emit_post_push_reg(emit, vtype, reg_local_table[local_num]);
1447 } else {
1448 need_reg_single(emit, REG_TEMP0, 0);
1449 emit_native_mov_reg_state(emit, REG_TEMP0, LOCAL_IDX_LOCAL_VAR(emit, local_num));
1450 emit_post_push_reg(emit, vtype, REG_TEMP0);
1451 }
1452}
1453
1454STATIC void emit_native_load_deref(emit_t *emit, qstr qst, mp_uint_t local_num) {
1455 DEBUG_printf("load_deref(%s, " UINT_FMT ")\n", qstr_str(qst), local_num);
1456 need_reg_single(emit, REG_RET, 0);
1457 emit_native_load_fast(emit, qst, local_num);
1458 vtype_kind_t vtype;
1459 int reg_base = REG_RET;
1460 emit_pre_pop_reg_flexible(emit, &vtype, &reg_base, -1, -1);
1461 ASM_LOAD_REG_REG_OFFSET(emit->as, REG_RET, reg_base, 1);
1462 // closed over vars are always Python objects
1463 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1464}
1465
1466STATIC void emit_native_load_local(emit_t *emit, qstr qst, mp_uint_t local_num, int kind) {
1467 if (kind == MP_EMIT_IDOP_LOCAL_FAST) {
1468 emit_native_load_fast(emit, qst, local_num);
1469 } else {
1470 emit_native_load_deref(emit, qst, local_num);
1471 }
1472}
1473
1474STATIC void emit_native_load_global(emit_t *emit, qstr qst, int kind) {
1475 MP_STATIC_ASSERT(MP_F_LOAD_NAME + MP_EMIT_IDOP_GLOBAL_NAME == MP_F_LOAD_NAME);
1476 MP_STATIC_ASSERT(MP_F_LOAD_NAME + MP_EMIT_IDOP_GLOBAL_GLOBAL == MP_F_LOAD_GLOBAL);
1477 emit_native_pre(emit);
1478 if (kind == MP_EMIT_IDOP_GLOBAL_NAME) {
1479 DEBUG_printf("load_name(%s)\n", qstr_str(qst));
1480 } else {
1481 DEBUG_printf("load_global(%s)\n", qstr_str(qst));
1482 if (emit->do_viper_types) {
1483 // check for builtin casting operators
1484 int native_type = mp_native_type_from_qstr(qst);
1485 if (native_type >= MP_NATIVE_TYPE_BOOL) {
1486 emit_post_push_imm(emit, VTYPE_BUILTIN_CAST, native_type);
1487 return;
1488 }
1489 }
1490 }
1491 emit_call_with_qstr_arg(emit, MP_F_LOAD_NAME + kind, qst, REG_ARG_1);
1492 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1493}
1494
1495STATIC void emit_native_load_attr(emit_t *emit, qstr qst) {
1496 // depends on type of subject:
1497 // - integer, function, pointer to integers: error
1498 // - pointer to structure: get member, quite easy
1499 // - Python object: call mp_load_attr, and needs to be typed to convert result
1500 vtype_kind_t vtype_base;
1501 emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = base
1502 assert(vtype_base == VTYPE_PYOBJ);
1503 emit_call_with_qstr_arg(emit, MP_F_LOAD_ATTR, qst, REG_ARG_2); // arg2 = attribute name
1504 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1505}
1506
1507STATIC void emit_native_load_method(emit_t *emit, qstr qst, bool is_super) {
1508 if (is_super) {
1509 emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_2, 3); // arg2 = dest ptr
1510 emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_2, 2); // arg2 = dest ptr
1511 emit_call_with_qstr_arg(emit, MP_F_LOAD_SUPER_METHOD, qst, REG_ARG_1); // arg1 = method name
1512 } else {
1513 vtype_kind_t vtype_base;
1514 emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = base
1515 assert(vtype_base == VTYPE_PYOBJ);
1516 emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, 2); // arg3 = dest ptr
1517 emit_call_with_qstr_arg(emit, MP_F_LOAD_METHOD, qst, REG_ARG_2); // arg2 = method name
1518 }
1519}
1520
1521STATIC void emit_native_load_build_class(emit_t *emit) {
1522 emit_native_pre(emit);
1523 emit_call(emit, MP_F_LOAD_BUILD_CLASS);
1524 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1525}
1526
1527STATIC void emit_native_load_subscr(emit_t *emit) {
1528 DEBUG_printf("load_subscr\n");
1529 // need to compile: base[index]
1530
1531 // pop: index, base
1532 // optimise case where index is an immediate
1533 vtype_kind_t vtype_base = peek_vtype(emit, 1);
1534
1535 if (vtype_base == VTYPE_PYOBJ) {
1536 // standard Python subscr
1537 // TODO factor this implicit cast code with other uses of it
1538 vtype_kind_t vtype_index = peek_vtype(emit, 0);
1539 if (vtype_index == VTYPE_PYOBJ) {
1540 emit_pre_pop_reg(emit, &vtype_index, REG_ARG_2);
1541 } else {
1542 emit_pre_pop_reg(emit, &vtype_index, REG_ARG_1);
1543 emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, vtype_index, REG_ARG_2); // arg2 = type
1544 ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_RET);
1545 }
1546 emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1);
1547 emit_call_with_imm_arg(emit, MP_F_OBJ_SUBSCR, (mp_uint_t)MP_OBJ_SENTINEL, REG_ARG_3);
1548 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
1549 } else {
1550 // viper load
1551 // TODO The different machine architectures have very different
1552 // capabilities and requirements for loads, so probably best to
1553 // write a completely separate load-optimiser for each one.
1554 stack_info_t *top = peek_stack(emit, 0);
1555 if (top->vtype == VTYPE_INT && top->kind == STACK_IMM) {
1556 // index is an immediate
1557 mp_int_t index_value = top->data.u_imm;
1558 emit_pre_pop_discard(emit); // discard index
1559 int reg_base = REG_ARG_1;
1560 int reg_index = REG_ARG_2;
1561 emit_pre_pop_reg_flexible(emit, &vtype_base, &reg_base, reg_index, reg_index);
1562 switch (vtype_base) {
1563 case VTYPE_PTR8: {
1564 // pointer to 8-bit memory
1565 // TODO optimise to use thumb ldrb r1, [r2, r3]
1566 if (index_value != 0) {
1567 // index is non-zero
1568 #if N_THUMB
1569 if (index_value > 0 && index_value < 32) {
1570 asm_thumb_ldrb_rlo_rlo_i5(emit->as, REG_RET, reg_base, index_value);
1571 break;
1572 }
1573 #endif
1574 ASM_MOV_REG_IMM(emit->as, reg_index, index_value);
1575 ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add index to base
1576 reg_base = reg_index;
1577 }
1578 ASM_LOAD8_REG_REG(emit->as, REG_RET, reg_base); // load from (base+index)
1579 break;
1580 }
1581 case VTYPE_PTR16: {
1582 // pointer to 16-bit memory
1583 if (index_value != 0) {
1584 // index is a non-zero immediate
1585 #if N_THUMB
1586 if (index_value > 0 && index_value < 32) {
1587 asm_thumb_ldrh_rlo_rlo_i5(emit->as, REG_RET, reg_base, index_value);
1588 break;
1589 }
1590 #endif
1591 ASM_MOV_REG_IMM(emit->as, reg_index, index_value << 1);
1592 ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add 2*index to base
1593 reg_base = reg_index;
1594 }
1595 ASM_LOAD16_REG_REG(emit->as, REG_RET, reg_base); // load from (base+2*index)
1596 break;
1597 }
1598 case VTYPE_PTR32: {
1599 // pointer to 32-bit memory
1600 if (index_value != 0) {
1601 // index is a non-zero immediate
1602 #if N_THUMB
1603 if (index_value > 0 && index_value < 32) {
1604 asm_thumb_ldr_rlo_rlo_i5(emit->as, REG_RET, reg_base, index_value);
1605 break;
1606 }
1607 #endif
1608 ASM_MOV_REG_IMM(emit->as, reg_index, index_value << 2);
1609 ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add 4*index to base
1610 reg_base = reg_index;
1611 }
1612 ASM_LOAD32_REG_REG(emit->as, REG_RET, reg_base); // load from (base+4*index)
1613 break;
1614 }
1615 default:
1616 EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1617 MP_ERROR_TEXT("can't load from '%q'"), vtype_to_qstr(vtype_base));
1618 }
1619 } else {
1620 // index is not an immediate
1621 vtype_kind_t vtype_index;
1622 int reg_index = REG_ARG_2;
1623 emit_pre_pop_reg_flexible(emit, &vtype_index, &reg_index, REG_ARG_1, REG_ARG_1);
1624 emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1);
1625 if (vtype_index != VTYPE_INT && vtype_index != VTYPE_UINT) {
1626 EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1627 MP_ERROR_TEXT("can't load with '%q' index"), vtype_to_qstr(vtype_index));
1628 }
1629 switch (vtype_base) {
1630 case VTYPE_PTR8: {
1631 // pointer to 8-bit memory
1632 // TODO optimise to use thumb ldrb r1, [r2, r3]
1633 ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1634 ASM_LOAD8_REG_REG(emit->as, REG_RET, REG_ARG_1); // store value to (base+index)
1635 break;
1636 }
1637 case VTYPE_PTR16: {
1638 // pointer to 16-bit memory
1639 ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1640 ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1641 ASM_LOAD16_REG_REG(emit->as, REG_RET, REG_ARG_1); // load from (base+2*index)
1642 break;
1643 }
1644 case VTYPE_PTR32: {
1645 // pointer to word-size memory
1646 ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1647 ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1648 ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1649 ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1650 ASM_LOAD32_REG_REG(emit->as, REG_RET, REG_ARG_1); // load from (base+4*index)
1651 break;
1652 }
1653 default:
1654 EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1655 MP_ERROR_TEXT("can't load from '%q'"), vtype_to_qstr(vtype_base));
1656 }
1657 }
1658 emit_post_push_reg(emit, VTYPE_INT, REG_RET);
1659 }
1660}
1661
1662STATIC void emit_native_store_fast(emit_t *emit, qstr qst, mp_uint_t local_num) {
1663 vtype_kind_t vtype;
1664 if (local_num < REG_LOCAL_NUM && CAN_USE_REGS_FOR_LOCALS(emit)) {
1665 emit_pre_pop_reg(emit, &vtype, reg_local_table[local_num]);
1666 } else {
1667 emit_pre_pop_reg(emit, &vtype, REG_TEMP0);
1668 emit_native_mov_state_reg(emit, LOCAL_IDX_LOCAL_VAR(emit, local_num), REG_TEMP0);
1669 }
1670 emit_post(emit);
1671
1672 // check types
1673 if (emit->local_vtype[local_num] == VTYPE_UNBOUND) {
1674 // first time this local is assigned, so give it a type of the object stored in it
1675 emit->local_vtype[local_num] = vtype;
1676 } else if (emit->local_vtype[local_num] != vtype) {
1677 // type of local is not the same as object stored in it
1678 EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1679 MP_ERROR_TEXT("local '%q' has type '%q' but source is '%q'"),
1680 qst, vtype_to_qstr(emit->local_vtype[local_num]), vtype_to_qstr(vtype));
1681 }
1682}
1683
1684STATIC void emit_native_store_deref(emit_t *emit, qstr qst, mp_uint_t local_num) {
1685 DEBUG_printf("store_deref(%s, " UINT_FMT ")\n", qstr_str(qst), local_num);
1686 need_reg_single(emit, REG_TEMP0, 0);
1687 need_reg_single(emit, REG_TEMP1, 0);
1688 emit_native_load_fast(emit, qst, local_num);
1689 vtype_kind_t vtype;
1690 int reg_base = REG_TEMP0;
1691 emit_pre_pop_reg_flexible(emit, &vtype, &reg_base, -1, -1);
1692 int reg_src = REG_TEMP1;
1693 emit_pre_pop_reg_flexible(emit, &vtype, &reg_src, reg_base, reg_base);
1694 ASM_STORE_REG_REG_OFFSET(emit->as, reg_src, reg_base, 1);
1695 emit_post(emit);
1696}
1697
1698STATIC void emit_native_store_local(emit_t *emit, qstr qst, mp_uint_t local_num, int kind) {
1699 if (kind == MP_EMIT_IDOP_LOCAL_FAST) {
1700 emit_native_store_fast(emit, qst, local_num);
1701 } else {
1702 emit_native_store_deref(emit, qst, local_num);
1703 }
1704}
1705
1706STATIC void emit_native_store_global(emit_t *emit, qstr qst, int kind) {
1707 MP_STATIC_ASSERT(MP_F_STORE_NAME + MP_EMIT_IDOP_GLOBAL_NAME == MP_F_STORE_NAME);
1708 MP_STATIC_ASSERT(MP_F_STORE_NAME + MP_EMIT_IDOP_GLOBAL_GLOBAL == MP_F_STORE_GLOBAL);
1709 if (kind == MP_EMIT_IDOP_GLOBAL_NAME) {
1710 // mp_store_name, but needs conversion of object (maybe have mp_viper_store_name(obj, type))
1711 vtype_kind_t vtype;
1712 emit_pre_pop_reg(emit, &vtype, REG_ARG_2);
1713 assert(vtype == VTYPE_PYOBJ);
1714 } else {
1715 vtype_kind_t vtype = peek_vtype(emit, 0);
1716 if (vtype == VTYPE_PYOBJ) {
1717 emit_pre_pop_reg(emit, &vtype, REG_ARG_2);
1718 } else {
1719 emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
1720 emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, vtype, REG_ARG_2); // arg2 = type
1721 ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_RET);
1722 }
1723 }
1724 emit_call_with_qstr_arg(emit, MP_F_STORE_NAME + kind, qst, REG_ARG_1); // arg1 = name
1725 emit_post(emit);
1726}
1727
1728STATIC void emit_native_store_attr(emit_t *emit, qstr qst) {
1729 vtype_kind_t vtype_base, vtype_val;
1730 emit_pre_pop_reg_reg(emit, &vtype_base, REG_ARG_1, &vtype_val, REG_ARG_3); // arg1 = base, arg3 = value
1731 assert(vtype_base == VTYPE_PYOBJ);
1732 assert(vtype_val == VTYPE_PYOBJ);
1733 emit_call_with_qstr_arg(emit, MP_F_STORE_ATTR, qst, REG_ARG_2); // arg2 = attribute name
1734 emit_post(emit);
1735}
1736
1737STATIC void emit_native_store_subscr(emit_t *emit) {
1738 DEBUG_printf("store_subscr\n");
1739 // need to compile: base[index] = value
1740
1741 // pop: index, base, value
1742 // optimise case where index is an immediate
1743 vtype_kind_t vtype_base = peek_vtype(emit, 1);
1744
1745 if (vtype_base == VTYPE_PYOBJ) {
1746 // standard Python subscr
1747 vtype_kind_t vtype_index = peek_vtype(emit, 0);
1748 vtype_kind_t vtype_value = peek_vtype(emit, 2);
1749 if (vtype_index != VTYPE_PYOBJ || vtype_value != VTYPE_PYOBJ) {
1750 // need to implicitly convert non-objects to objects
1751 // TODO do this properly
1752 emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_1, 3);
1753 adjust_stack(emit, 3);
1754 }
1755 emit_pre_pop_reg_reg_reg(emit, &vtype_index, REG_ARG_2, &vtype_base, REG_ARG_1, &vtype_value, REG_ARG_3);
1756 emit_call(emit, MP_F_OBJ_SUBSCR);
1757 } else {
1758 // viper store
1759 // TODO The different machine architectures have very different
1760 // capabilities and requirements for stores, so probably best to
1761 // write a completely separate store-optimiser for each one.
1762 stack_info_t *top = peek_stack(emit, 0);
1763 if (top->vtype == VTYPE_INT && top->kind == STACK_IMM) {
1764 // index is an immediate
1765 mp_int_t index_value = top->data.u_imm;
1766 emit_pre_pop_discard(emit); // discard index
1767 vtype_kind_t vtype_value;
1768 int reg_base = REG_ARG_1;
1769 int reg_index = REG_ARG_2;
1770 int reg_value = REG_ARG_3;
1771 emit_pre_pop_reg_flexible(emit, &vtype_base, &reg_base, reg_index, reg_value);
1772 #if N_X86
1773 // special case: x86 needs byte stores to be from lower 4 regs (REG_ARG_3 is EDX)
1774 emit_pre_pop_reg(emit, &vtype_value, reg_value);
1775 #else
1776 emit_pre_pop_reg_flexible(emit, &vtype_value, &reg_value, reg_base, reg_index);
1777 #endif
1778 if (vtype_value != VTYPE_BOOL && vtype_value != VTYPE_INT && vtype_value != VTYPE_UINT) {
1779 EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1780 MP_ERROR_TEXT("can't store '%q'"), vtype_to_qstr(vtype_value));
1781 }
1782 switch (vtype_base) {
1783 case VTYPE_PTR8: {
1784 // pointer to 8-bit memory
1785 // TODO optimise to use thumb strb r1, [r2, r3]
1786 if (index_value != 0) {
1787 // index is non-zero
1788 #if N_THUMB
1789 if (index_value > 0 && index_value < 32) {
1790 asm_thumb_strb_rlo_rlo_i5(emit->as, reg_value, reg_base, index_value);
1791 break;
1792 }
1793 #endif
1794 ASM_MOV_REG_IMM(emit->as, reg_index, index_value);
1795 #if N_ARM
1796 asm_arm_strb_reg_reg_reg(emit->as, reg_value, reg_base, reg_index);
1797 return;
1798 #endif
1799 ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add index to base
1800 reg_base = reg_index;
1801 }
1802 ASM_STORE8_REG_REG(emit->as, reg_value, reg_base); // store value to (base+index)
1803 break;
1804 }
1805 case VTYPE_PTR16: {
1806 // pointer to 16-bit memory
1807 if (index_value != 0) {
1808 // index is a non-zero immediate
1809 #if N_THUMB
1810 if (index_value > 0 && index_value < 32) {
1811 asm_thumb_strh_rlo_rlo_i5(emit->as, reg_value, reg_base, index_value);
1812 break;
1813 }
1814 #endif
1815 ASM_MOV_REG_IMM(emit->as, reg_index, index_value << 1);
1816 ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add 2*index to base
1817 reg_base = reg_index;
1818 }
1819 ASM_STORE16_REG_REG(emit->as, reg_value, reg_base); // store value to (base+2*index)
1820 break;
1821 }
1822 case VTYPE_PTR32: {
1823 // pointer to 32-bit memory
1824 if (index_value != 0) {
1825 // index is a non-zero immediate
1826 #if N_THUMB
1827 if (index_value > 0 && index_value < 32) {
1828 asm_thumb_str_rlo_rlo_i5(emit->as, reg_value, reg_base, index_value);
1829 break;
1830 }
1831 #endif
1832 #if N_ARM
1833 ASM_MOV_REG_IMM(emit->as, reg_index, index_value);
1834 asm_arm_str_reg_reg_reg(emit->as, reg_value, reg_base, reg_index);
1835 return;
1836 #endif
1837 ASM_MOV_REG_IMM(emit->as, reg_index, index_value << 2);
1838 ASM_ADD_REG_REG(emit->as, reg_index, reg_base); // add 4*index to base
1839 reg_base = reg_index;
1840 }
1841 ASM_STORE32_REG_REG(emit->as, reg_value, reg_base); // store value to (base+4*index)
1842 break;
1843 }
1844 default:
1845 EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1846 MP_ERROR_TEXT("can't store to '%q'"), vtype_to_qstr(vtype_base));
1847 }
1848 } else {
1849 // index is not an immediate
1850 vtype_kind_t vtype_index, vtype_value;
1851 int reg_index = REG_ARG_2;
1852 int reg_value = REG_ARG_3;
1853 emit_pre_pop_reg_flexible(emit, &vtype_index, &reg_index, REG_ARG_1, reg_value);
1854 emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1);
1855 if (vtype_index != VTYPE_INT && vtype_index != VTYPE_UINT) {
1856 EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1857 MP_ERROR_TEXT("can't store with '%q' index"), vtype_to_qstr(vtype_index));
1858 }
1859 #if N_X86
1860 // special case: x86 needs byte stores to be from lower 4 regs (REG_ARG_3 is EDX)
1861 emit_pre_pop_reg(emit, &vtype_value, reg_value);
1862 #else
1863 emit_pre_pop_reg_flexible(emit, &vtype_value, &reg_value, REG_ARG_1, reg_index);
1864 #endif
1865 if (vtype_value != VTYPE_BOOL && vtype_value != VTYPE_INT && vtype_value != VTYPE_UINT) {
1866 EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1867 MP_ERROR_TEXT("can't store '%q'"), vtype_to_qstr(vtype_value));
1868 }
1869 switch (vtype_base) {
1870 case VTYPE_PTR8: {
1871 // pointer to 8-bit memory
1872 // TODO optimise to use thumb strb r1, [r2, r3]
1873 #if N_ARM
1874 asm_arm_strb_reg_reg_reg(emit->as, reg_value, REG_ARG_1, reg_index);
1875 break;
1876 #endif
1877 ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1878 ASM_STORE8_REG_REG(emit->as, reg_value, REG_ARG_1); // store value to (base+index)
1879 break;
1880 }
1881 case VTYPE_PTR16: {
1882 // pointer to 16-bit memory
1883 #if N_ARM
1884 asm_arm_strh_reg_reg_reg(emit->as, reg_value, REG_ARG_1, reg_index);
1885 break;
1886 #endif
1887 ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1888 ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1889 ASM_STORE16_REG_REG(emit->as, reg_value, REG_ARG_1); // store value to (base+2*index)
1890 break;
1891 }
1892 case VTYPE_PTR32: {
1893 // pointer to 32-bit memory
1894 #if N_ARM
1895 asm_arm_str_reg_reg_reg(emit->as, reg_value, REG_ARG_1, reg_index);
1896 break;
1897 #endif
1898 ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1899 ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1900 ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1901 ASM_ADD_REG_REG(emit->as, REG_ARG_1, reg_index); // add index to base
1902 ASM_STORE32_REG_REG(emit->as, reg_value, REG_ARG_1); // store value to (base+4*index)
1903 break;
1904 }
1905 default:
1906 EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
1907 MP_ERROR_TEXT("can't store to '%q'"), vtype_to_qstr(vtype_base));
1908 }
1909 }
1910
1911 }
1912}
1913
1914STATIC void emit_native_delete_local(emit_t *emit, qstr qst, mp_uint_t local_num, int kind) {
1915 if (kind == MP_EMIT_IDOP_LOCAL_FAST) {
1916 // TODO: This is not compliant implementation. We could use MP_OBJ_SENTINEL
1917 // to mark deleted vars but then every var would need to be checked on
1918 // each access. Very inefficient, so just set value to None to enable GC.
1919 emit_native_load_const_tok(emit, MP_TOKEN_KW_NONE);
1920 emit_native_store_fast(emit, qst, local_num);
1921 } else {
1922 // TODO implement me!
1923 }
1924}
1925
1926STATIC void emit_native_delete_global(emit_t *emit, qstr qst, int kind) {
1927 MP_STATIC_ASSERT(MP_F_DELETE_NAME + MP_EMIT_IDOP_GLOBAL_NAME == MP_F_DELETE_NAME);
1928 MP_STATIC_ASSERT(MP_F_DELETE_NAME + MP_EMIT_IDOP_GLOBAL_GLOBAL == MP_F_DELETE_GLOBAL);
1929 emit_native_pre(emit);
1930 emit_call_with_qstr_arg(emit, MP_F_DELETE_NAME + kind, qst, REG_ARG_1);
1931 emit_post(emit);
1932}
1933
1934STATIC void emit_native_delete_attr(emit_t *emit, qstr qst) {
1935 vtype_kind_t vtype_base;
1936 emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = base
1937 assert(vtype_base == VTYPE_PYOBJ);
1938 ASM_XOR_REG_REG(emit->as, REG_ARG_3, REG_ARG_3); // arg3 = value (null for delete)
1939 emit_call_with_qstr_arg(emit, MP_F_STORE_ATTR, qst, REG_ARG_2); // arg2 = attribute name
1940 emit_post(emit);
1941}
1942
1943STATIC void emit_native_delete_subscr(emit_t *emit) {
1944 vtype_kind_t vtype_index, vtype_base;
1945 emit_pre_pop_reg_reg(emit, &vtype_index, REG_ARG_2, &vtype_base, REG_ARG_1); // index, base
1946 assert(vtype_index == VTYPE_PYOBJ);
1947 assert(vtype_base == VTYPE_PYOBJ);
1948 emit_call_with_imm_arg(emit, MP_F_OBJ_SUBSCR, (mp_uint_t)MP_OBJ_NULL, REG_ARG_3);
1949}
1950
1951STATIC void emit_native_subscr(emit_t *emit, int kind) {
1952 if (kind == MP_EMIT_SUBSCR_LOAD) {
1953 emit_native_load_subscr(emit);
1954 } else if (kind == MP_EMIT_SUBSCR_STORE) {
1955 emit_native_store_subscr(emit);
1956 } else {
1957 emit_native_delete_subscr(emit);
1958 }
1959}
1960
1961STATIC void emit_native_attr(emit_t *emit, qstr qst, int kind) {
1962 if (kind == MP_EMIT_ATTR_LOAD) {
1963 emit_native_load_attr(emit, qst);
1964 } else if (kind == MP_EMIT_ATTR_STORE) {
1965 emit_native_store_attr(emit, qst);
1966 } else {
1967 emit_native_delete_attr(emit, qst);
1968 }
1969}
1970
1971STATIC void emit_native_dup_top(emit_t *emit) {
1972 DEBUG_printf("dup_top\n");
1973 vtype_kind_t vtype;
1974 int reg = REG_TEMP0;
1975 emit_pre_pop_reg_flexible(emit, &vtype, &reg, -1, -1);
1976 emit_post_push_reg_reg(emit, vtype, reg, vtype, reg);
1977}
1978
1979STATIC void emit_native_dup_top_two(emit_t *emit) {
1980 vtype_kind_t vtype0, vtype1;
1981 emit_pre_pop_reg_reg(emit, &vtype0, REG_TEMP0, &vtype1, REG_TEMP1);
1982 emit_post_push_reg_reg_reg_reg(emit, vtype1, REG_TEMP1, vtype0, REG_TEMP0, vtype1, REG_TEMP1, vtype0, REG_TEMP0);
1983}
1984
1985STATIC void emit_native_pop_top(emit_t *emit) {
1986 DEBUG_printf("pop_top\n");
1987 emit_pre_pop_discard(emit);
1988 emit_post(emit);
1989}
1990
1991STATIC void emit_native_rot_two(emit_t *emit) {
1992 DEBUG_printf("rot_two\n");
1993 vtype_kind_t vtype0, vtype1;
1994 emit_pre_pop_reg_reg(emit, &vtype0, REG_TEMP0, &vtype1, REG_TEMP1);
1995 emit_post_push_reg_reg(emit, vtype0, REG_TEMP0, vtype1, REG_TEMP1);
1996}
1997
1998STATIC void emit_native_rot_three(emit_t *emit) {
1999 DEBUG_printf("rot_three\n");
2000 vtype_kind_t vtype0, vtype1, vtype2;
2001 emit_pre_pop_reg_reg_reg(emit, &vtype0, REG_TEMP0, &vtype1, REG_TEMP1, &vtype2, REG_TEMP2);
2002 emit_post_push_reg_reg_reg(emit, vtype0, REG_TEMP0, vtype2, REG_TEMP2, vtype1, REG_TEMP1);
2003}
2004
2005STATIC void emit_native_jump(emit_t *emit, mp_uint_t label) {
2006 DEBUG_printf("jump(label=" UINT_FMT ")\n", label);
2007 emit_native_pre(emit);
2008 // need to commit stack because we are jumping elsewhere
2009 need_stack_settled(emit);
2010 ASM_JUMP(emit->as, label);
2011 emit_post(emit);
2012}
2013
2014STATIC void emit_native_jump_helper(emit_t *emit, bool cond, mp_uint_t label, bool pop) {
2015 vtype_kind_t vtype = peek_vtype(emit, 0);
2016 if (vtype == VTYPE_PYOBJ) {
2017 emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
2018 if (!pop) {
2019 adjust_stack(emit, 1);
2020 }
2021 emit_call(emit, MP_F_OBJ_IS_TRUE);
2022 } else {
2023 emit_pre_pop_reg(emit, &vtype, REG_RET);
2024 if (!pop) {
2025 adjust_stack(emit, 1);
2026 }
2027 if (!(vtype == VTYPE_BOOL || vtype == VTYPE_INT || vtype == VTYPE_UINT)) {
2028 EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
2029 MP_ERROR_TEXT("can't implicitly convert '%q' to 'bool'"), vtype_to_qstr(vtype));
2030 }
2031 }
2032 // For non-pop need to save the vtype so that emit_native_adjust_stack_size
2033 // can use it. This is a bit of a hack.
2034 if (!pop) {
2035 emit->saved_stack_vtype = vtype;
2036 }
2037 // need to commit stack because we may jump elsewhere
2038 need_stack_settled(emit);
2039 // Emit the jump
2040 if (cond) {
2041 ASM_JUMP_IF_REG_NONZERO(emit->as, REG_RET, label, vtype == VTYPE_PYOBJ);
2042 } else {
2043 ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, label, vtype == VTYPE_PYOBJ);
2044 }
2045 if (!pop) {
2046 adjust_stack(emit, -1);
2047 }
2048 emit_post(emit);
2049}
2050
2051STATIC void emit_native_pop_jump_if(emit_t *emit, bool cond, mp_uint_t label) {
2052 DEBUG_printf("pop_jump_if(cond=%u, label=" UINT_FMT ")\n", cond, label);
2053 emit_native_jump_helper(emit, cond, label, true);
2054}
2055
2056STATIC void emit_native_jump_if_or_pop(emit_t *emit, bool cond, mp_uint_t label) {
2057 DEBUG_printf("jump_if_or_pop(cond=%u, label=" UINT_FMT ")\n", cond, label);
2058 emit_native_jump_helper(emit, cond, label, false);
2059}
2060
2061STATIC void emit_native_unwind_jump(emit_t *emit, mp_uint_t label, mp_uint_t except_depth) {
2062 if (except_depth > 0) {
2063 exc_stack_entry_t *first_finally = NULL;
2064 exc_stack_entry_t *prev_finally = NULL;
2065 exc_stack_entry_t *e = &emit->exc_stack[emit->exc_stack_size - 1];
2066 for (; except_depth > 0; --except_depth, --e) {
2067 if (e->is_finally && e->is_active) {
2068 // Found an active finally handler
2069 if (first_finally == NULL) {
2070 first_finally = e;
2071 }
2072 if (prev_finally != NULL) {
2073 // Mark prev finally as needed to unwind a jump
2074 prev_finally->unwind_label = e->label;
2075 }
2076 prev_finally = e;
2077 }
2078 }
2079 if (prev_finally == NULL) {
2080 // No finally, handle the jump ourselves
2081 // First, restore the exception handler address for the jump
2082 if (e < emit->exc_stack) {
2083 ASM_XOR_REG_REG(emit->as, REG_RET, REG_RET);
2084 } else {
2085 ASM_MOV_REG_PCREL(emit->as, REG_RET, e->label);
2086 }
2087 ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_PC(emit), REG_RET);
2088 } else {
2089 // Last finally should do our jump for us
2090 // Mark finally as needing to decide the type of jump
2091 prev_finally->unwind_label = UNWIND_LABEL_DO_FINAL_UNWIND;
2092 ASM_MOV_REG_PCREL(emit->as, REG_RET, label & ~MP_EMIT_BREAK_FROM_FOR);
2093 ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_UNWIND(emit), REG_RET);
2094 // Cancel any active exception (see also emit_native_pop_except_jump)
2095 ASM_MOV_REG_IMM(emit->as, REG_RET, (mp_uint_t)MP_OBJ_NULL);
2096 ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_VAL(emit), REG_RET);
2097 // Jump to the innermost active finally
2098 label = first_finally->label;
2099 }
2100 }
2101 emit_native_jump(emit, label & ~MP_EMIT_BREAK_FROM_FOR);
2102}
2103
2104STATIC void emit_native_setup_with(emit_t *emit, mp_uint_t label) {
2105 // the context manager is on the top of the stack
2106 // stack: (..., ctx_mgr)
2107
2108 // get __exit__ method
2109 vtype_kind_t vtype;
2110 emit_access_stack(emit, 1, &vtype, REG_ARG_1); // arg1 = ctx_mgr
2111 assert(vtype == VTYPE_PYOBJ);
2112 emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, 2); // arg3 = dest ptr
2113 emit_call_with_qstr_arg(emit, MP_F_LOAD_METHOD, MP_QSTR___exit__, REG_ARG_2);
2114 // stack: (..., ctx_mgr, __exit__, self)
2115
2116 emit_pre_pop_reg(emit, &vtype, REG_ARG_3); // self
2117 emit_pre_pop_reg(emit, &vtype, REG_ARG_2); // __exit__
2118 emit_pre_pop_reg(emit, &vtype, REG_ARG_1); // ctx_mgr
2119 emit_post_push_reg(emit, vtype, REG_ARG_2); // __exit__
2120 emit_post_push_reg(emit, vtype, REG_ARG_3); // self
2121 // stack: (..., __exit__, self)
2122 // REG_ARG_1=ctx_mgr
2123
2124 // get __enter__ method
2125 emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, 2); // arg3 = dest ptr
2126 emit_call_with_qstr_arg(emit, MP_F_LOAD_METHOD, MP_QSTR___enter__, REG_ARG_2); // arg2 = method name
2127 // stack: (..., __exit__, self, __enter__, self)
2128
2129 // call __enter__ method
2130 emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 2); // pointer to items, including meth and self
2131 emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW, 0, REG_ARG_1, 0, REG_ARG_2);
2132 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET); // push return value of __enter__
2133 // stack: (..., __exit__, self, as_value)
2134
2135 // need to commit stack because we may jump elsewhere
2136 need_stack_settled(emit);
2137 emit_native_push_exc_stack(emit, label, true);
2138
2139 emit_native_dup_top(emit);
2140 // stack: (..., __exit__, self, as_value, as_value)
2141}
2142
2143STATIC void emit_native_setup_block(emit_t *emit, mp_uint_t label, int kind) {
2144 if (kind == MP_EMIT_SETUP_BLOCK_WITH) {
2145 emit_native_setup_with(emit, label);
2146 } else {
2147 // Set up except and finally
2148 emit_native_pre(emit);
2149 need_stack_settled(emit);
2150 emit_native_push_exc_stack(emit, label, kind == MP_EMIT_SETUP_BLOCK_FINALLY);
2151 emit_post(emit);
2152 }
2153}
2154
2155STATIC void emit_native_with_cleanup(emit_t *emit, mp_uint_t label) {
2156 // Note: 3 labels are reserved for this function, starting at *emit->label_slot
2157
2158 // stack: (..., __exit__, self, as_value)
2159 emit_native_pre(emit);
2160 emit_native_leave_exc_stack(emit, false);
2161 adjust_stack(emit, -1);
2162 // stack: (..., __exit__, self)
2163
2164 // Label for case where __exit__ is called from an unwind jump
2165 emit_native_label_assign(emit, *emit->label_slot + 2);
2166
2167 // call __exit__
2168 emit_post_push_imm(emit, VTYPE_PTR_NONE, 0);
2169 emit_post_push_imm(emit, VTYPE_PTR_NONE, 0);
2170 emit_post_push_imm(emit, VTYPE_PTR_NONE, 0);
2171 emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 5);
2172 emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW, 3, REG_ARG_1, 0, REG_ARG_2);
2173
2174 // Replace exc with None and finish
2175 emit_native_jump(emit, *emit->label_slot);
2176
2177 // nlr_catch
2178 // Don't use emit_native_label_assign because this isn't a real finally label
2179 mp_asm_base_label_assign(&emit->as->base, label);
2180
2181 // Leave with's exception handler
2182 emit_native_leave_exc_stack(emit, true);
2183
2184 // Adjust stack counter for: __exit__, self (implicitly discard as_value which is above self)
2185 emit_native_adjust_stack_size(emit, 2);
2186 // stack: (..., __exit__, self)
2187
2188 ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_EXC_VAL(emit)); // get exc
2189
2190 // Check if exc is MP_OBJ_NULL (i.e. zero) and jump to non-exc handler if it is
2191 ASM_JUMP_IF_REG_ZERO(emit->as, REG_ARG_1, *emit->label_slot + 2, false);
2192
2193 ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_2, REG_ARG_1, 0); // get type(exc)
2194 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_ARG_2); // push type(exc)
2195 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_ARG_1); // push exc value
2196 emit_post_push_imm(emit, VTYPE_PTR_NONE, 0); // traceback info
2197 // Stack: (..., __exit__, self, type(exc), exc, traceback)
2198
2199 // call __exit__ method
2200 emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 5);
2201 emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW, 3, REG_ARG_1, 0, REG_ARG_2);
2202 // Stack: (...)
2203
2204 // If REG_RET is true then we need to replace exception with None (swallow exception)
2205 if (REG_ARG_1 != REG_RET) {
2206 ASM_MOV_REG_REG(emit->as, REG_ARG_1, REG_RET);
2207 }
2208 emit_call(emit, MP_F_OBJ_IS_TRUE);
2209 ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, *emit->label_slot + 1, true);
2210
2211 // Replace exception with MP_OBJ_NULL.
2212 emit_native_label_assign(emit, *emit->label_slot);
2213 ASM_MOV_REG_IMM(emit->as, REG_TEMP0, (mp_uint_t)MP_OBJ_NULL);
2214 ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_VAL(emit), REG_TEMP0);
2215
2216 // end of with cleanup nlr_catch block
2217 emit_native_label_assign(emit, *emit->label_slot + 1);
2218
2219 // Exception is in nlr_buf.ret_val slot
2220}
2221
2222STATIC void emit_native_end_finally(emit_t *emit) {
2223 // logic:
2224 // exc = pop_stack
2225 // if exc == None: pass
2226 // else: raise exc
2227 // the check if exc is None is done in the MP_F_NATIVE_RAISE stub
2228 emit_native_pre(emit);
2229 ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_EXC_VAL(emit));
2230 emit_call(emit, MP_F_NATIVE_RAISE);
2231
2232 // Get state for this finally and see if we need to unwind
2233 exc_stack_entry_t *e = emit_native_pop_exc_stack(emit);
2234 if (e->unwind_label != UNWIND_LABEL_UNUSED) {
2235 ASM_MOV_REG_LOCAL(emit->as, REG_RET, LOCAL_IDX_EXC_HANDLER_UNWIND(emit));
2236 ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, *emit->label_slot, false);
2237 if (e->unwind_label == UNWIND_LABEL_DO_FINAL_UNWIND) {
2238 ASM_JUMP_REG(emit->as, REG_RET);
2239 } else {
2240 emit_native_jump(emit, e->unwind_label);
2241 }
2242 emit_native_label_assign(emit, *emit->label_slot);
2243 }
2244
2245 emit_post(emit);
2246}
2247
2248STATIC void emit_native_get_iter(emit_t *emit, bool use_stack) {
2249 // perhaps the difficult one, as we want to rewrite for loops using native code
2250 // in cases where we iterate over a Python object, can we use normal runtime calls?
2251
2252 vtype_kind_t vtype;
2253 emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
2254 assert(vtype == VTYPE_PYOBJ);
2255 if (use_stack) {
2256 emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_2, MP_OBJ_ITER_BUF_NSLOTS);
2257 emit_call(emit, MP_F_NATIVE_GETITER);
2258 } else {
2259 // mp_getiter will allocate the iter_buf on the heap
2260 ASM_MOV_REG_IMM(emit->as, REG_ARG_2, 0);
2261 emit_call(emit, MP_F_NATIVE_GETITER);
2262 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2263 }
2264}
2265
2266STATIC void emit_native_for_iter(emit_t *emit, mp_uint_t label) {
2267 emit_native_pre(emit);
2268 emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_1, MP_OBJ_ITER_BUF_NSLOTS);
2269 adjust_stack(emit, MP_OBJ_ITER_BUF_NSLOTS);
2270 emit_call(emit, MP_F_NATIVE_ITERNEXT);
2271 #if MICROPY_DEBUG_MP_OBJ_SENTINELS
2272 ASM_MOV_REG_IMM(emit->as, REG_TEMP1, (mp_uint_t)MP_OBJ_STOP_ITERATION);
2273 ASM_JUMP_IF_REG_EQ(emit->as, REG_RET, REG_TEMP1, label);
2274 #else
2275 MP_STATIC_ASSERT(MP_OBJ_STOP_ITERATION == 0);
2276 ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, label, false);
2277 #endif
2278 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2279}
2280
2281STATIC void emit_native_for_iter_end(emit_t *emit) {
2282 // adjust stack counter (we get here from for_iter ending, which popped the value for us)
2283 emit_native_pre(emit);
2284 adjust_stack(emit, -MP_OBJ_ITER_BUF_NSLOTS);
2285 emit_post(emit);
2286}
2287
2288STATIC void emit_native_pop_except_jump(emit_t *emit, mp_uint_t label, bool within_exc_handler) {
2289 if (within_exc_handler) {
2290 // Cancel any active exception so subsequent handlers don't see it
2291 ASM_MOV_REG_IMM(emit->as, REG_TEMP0, (mp_uint_t)MP_OBJ_NULL);
2292 ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_VAL(emit), REG_TEMP0);
2293 } else {
2294 emit_native_leave_exc_stack(emit, false);
2295 }
2296 emit_native_jump(emit, label);
2297}
2298
2299STATIC void emit_native_unary_op(emit_t *emit, mp_unary_op_t op) {
2300 vtype_kind_t vtype;
2301 emit_pre_pop_reg(emit, &vtype, REG_ARG_2);
2302 if (vtype == VTYPE_PYOBJ) {
2303 emit_call_with_imm_arg(emit, MP_F_UNARY_OP, op, REG_ARG_1);
2304 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2305 } else {
2306 adjust_stack(emit, 1);
2307 EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
2308 MP_ERROR_TEXT("unary op %q not implemented"), mp_unary_op_method_name[op]);
2309 }
2310}
2311
2312STATIC void emit_native_binary_op(emit_t *emit, mp_binary_op_t op) {
2313 DEBUG_printf("binary_op(" UINT_FMT ")\n", op);
2314 vtype_kind_t vtype_lhs = peek_vtype(emit, 1);
2315 vtype_kind_t vtype_rhs = peek_vtype(emit, 0);
2316 if ((vtype_lhs == VTYPE_INT || vtype_lhs == VTYPE_UINT)
2317 && (vtype_rhs == VTYPE_INT || vtype_rhs == VTYPE_UINT)) {
2318 // for integers, inplace and normal ops are equivalent, so use just normal ops
2319 if (MP_BINARY_OP_INPLACE_OR <= op && op <= MP_BINARY_OP_INPLACE_POWER) {
2320 op += MP_BINARY_OP_OR - MP_BINARY_OP_INPLACE_OR;
2321 }
2322
2323 #if N_X64 || N_X86
2324 // special cases for x86 and shifting
2325 if (op == MP_BINARY_OP_LSHIFT || op == MP_BINARY_OP_RSHIFT) {
2326 #if N_X64
2327 emit_pre_pop_reg_reg(emit, &vtype_rhs, ASM_X64_REG_RCX, &vtype_lhs, REG_RET);
2328 #else
2329 emit_pre_pop_reg_reg(emit, &vtype_rhs, ASM_X86_REG_ECX, &vtype_lhs, REG_RET);
2330 #endif
2331 if (op == MP_BINARY_OP_LSHIFT) {
2332 ASM_LSL_REG(emit->as, REG_RET);
2333 } else {
2334 if (vtype_lhs == VTYPE_UINT) {
2335 ASM_LSR_REG(emit->as, REG_RET);
2336 } else {
2337 ASM_ASR_REG(emit->as, REG_RET);
2338 }
2339 }
2340 emit_post_push_reg(emit, vtype_lhs, REG_RET);
2341 return;
2342 }
2343 #endif
2344
2345 // special cases for floor-divide and module because we dispatch to helper functions
2346 if (op == MP_BINARY_OP_FLOOR_DIVIDE || op == MP_BINARY_OP_MODULO) {
2347 emit_pre_pop_reg_reg(emit, &vtype_rhs, REG_ARG_2, &vtype_lhs, REG_ARG_1);
2348 if (vtype_lhs != VTYPE_INT) {
2349 EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
2350 MP_ERROR_TEXT("div/mod not implemented for uint"), mp_binary_op_method_name[op]);
2351 }
2352 if (op == MP_BINARY_OP_FLOOR_DIVIDE) {
2353 emit_call(emit, MP_F_SMALL_INT_FLOOR_DIVIDE);
2354 } else {
2355 emit_call(emit, MP_F_SMALL_INT_MODULO);
2356 }
2357 emit_post_push_reg(emit, VTYPE_INT, REG_RET);
2358 return;
2359 }
2360
2361 int reg_rhs = REG_ARG_3;
2362 emit_pre_pop_reg_flexible(emit, &vtype_rhs, &reg_rhs, REG_RET, REG_ARG_2);
2363 emit_pre_pop_reg(emit, &vtype_lhs, REG_ARG_2);
2364
2365 #if !(N_X64 || N_X86)
2366 if (op == MP_BINARY_OP_LSHIFT || op == MP_BINARY_OP_RSHIFT) {
2367 if (op == MP_BINARY_OP_LSHIFT) {
2368 ASM_LSL_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2369 } else {
2370 if (vtype_lhs == VTYPE_UINT) {
2371 ASM_LSR_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2372 } else {
2373 ASM_ASR_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2374 }
2375 }
2376 emit_post_push_reg(emit, vtype_lhs, REG_ARG_2);
2377 return;
2378 }
2379 #endif
2380
2381 if (op == MP_BINARY_OP_OR) {
2382 ASM_OR_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2383 emit_post_push_reg(emit, vtype_lhs, REG_ARG_2);
2384 } else if (op == MP_BINARY_OP_XOR) {
2385 ASM_XOR_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2386 emit_post_push_reg(emit, vtype_lhs, REG_ARG_2);
2387 } else if (op == MP_BINARY_OP_AND) {
2388 ASM_AND_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2389 emit_post_push_reg(emit, vtype_lhs, REG_ARG_2);
2390 } else if (op == MP_BINARY_OP_ADD) {
2391 ASM_ADD_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2392 emit_post_push_reg(emit, vtype_lhs, REG_ARG_2);
2393 } else if (op == MP_BINARY_OP_SUBTRACT) {
2394 ASM_SUB_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2395 emit_post_push_reg(emit, vtype_lhs, REG_ARG_2);
2396 } else if (op == MP_BINARY_OP_MULTIPLY) {
2397 ASM_MUL_REG_REG(emit->as, REG_ARG_2, reg_rhs);
2398 emit_post_push_reg(emit, vtype_lhs, REG_ARG_2);
2399 } else if (MP_BINARY_OP_LESS <= op && op <= MP_BINARY_OP_NOT_EQUAL) {
2400 // comparison ops are (in enum order):
2401 // MP_BINARY_OP_LESS
2402 // MP_BINARY_OP_MORE
2403 // MP_BINARY_OP_EQUAL
2404 // MP_BINARY_OP_LESS_EQUAL
2405 // MP_BINARY_OP_MORE_EQUAL
2406 // MP_BINARY_OP_NOT_EQUAL
2407
2408 if (vtype_lhs != vtype_rhs) {
2409 EMIT_NATIVE_VIPER_TYPE_ERROR(emit, MP_ERROR_TEXT("comparison of int and uint"));
2410 }
2411
2412 size_t op_idx = op - MP_BINARY_OP_LESS + (vtype_lhs == VTYPE_UINT ? 0 : 6);
2413
2414 need_reg_single(emit, REG_RET, 0);
2415 #if N_X64
2416 asm_x64_xor_r64_r64(emit->as, REG_RET, REG_RET);
2417 asm_x64_cmp_r64_with_r64(emit->as, reg_rhs, REG_ARG_2);
2418 static byte ops[6 + 6] = {
2419 // unsigned
2420 ASM_X64_CC_JB,
2421 ASM_X64_CC_JA,
2422 ASM_X64_CC_JE,
2423 ASM_X64_CC_JBE,
2424 ASM_X64_CC_JAE,
2425 ASM_X64_CC_JNE,
2426 // signed
2427 ASM_X64_CC_JL,
2428 ASM_X64_CC_JG,
2429 ASM_X64_CC_JE,
2430 ASM_X64_CC_JLE,
2431 ASM_X64_CC_JGE,
2432 ASM_X64_CC_JNE,
2433 };
2434 asm_x64_setcc_r8(emit->as, ops[op_idx], REG_RET);
2435 #elif N_X86
2436 asm_x86_xor_r32_r32(emit->as, REG_RET, REG_RET);
2437 asm_x86_cmp_r32_with_r32(emit->as, reg_rhs, REG_ARG_2);
2438 static byte ops[6 + 6] = {
2439 // unsigned
2440 ASM_X86_CC_JB,
2441 ASM_X86_CC_JA,
2442 ASM_X86_CC_JE,
2443 ASM_X86_CC_JBE,
2444 ASM_X86_CC_JAE,
2445 ASM_X86_CC_JNE,
2446 // signed
2447 ASM_X86_CC_JL,
2448 ASM_X86_CC_JG,
2449 ASM_X86_CC_JE,
2450 ASM_X86_CC_JLE,
2451 ASM_X86_CC_JGE,
2452 ASM_X86_CC_JNE,
2453 };
2454 asm_x86_setcc_r8(emit->as, ops[op_idx], REG_RET);
2455 #elif N_THUMB
2456 asm_thumb_cmp_rlo_rlo(emit->as, REG_ARG_2, reg_rhs);
2457 #if MICROPY_EMIT_THUMB_ARMV7M
2458 static uint16_t ops[6 + 6] = {
2459 // unsigned
2460 ASM_THUMB_OP_ITE_CC,
2461 ASM_THUMB_OP_ITE_HI,
2462 ASM_THUMB_OP_ITE_EQ,
2463 ASM_THUMB_OP_ITE_LS,
2464 ASM_THUMB_OP_ITE_CS,
2465 ASM_THUMB_OP_ITE_NE,
2466 // signed
2467 ASM_THUMB_OP_ITE_LT,
2468 ASM_THUMB_OP_ITE_GT,
2469 ASM_THUMB_OP_ITE_EQ,
2470 ASM_THUMB_OP_ITE_LE,
2471 ASM_THUMB_OP_ITE_GE,
2472 ASM_THUMB_OP_ITE_NE,
2473 };
2474 asm_thumb_op16(emit->as, ops[op_idx]);
2475 asm_thumb_mov_rlo_i8(emit->as, REG_RET, 1);
2476 asm_thumb_mov_rlo_i8(emit->as, REG_RET, 0);
2477 #else
2478 static uint16_t ops[6 + 6] = {
2479 // unsigned
2480 ASM_THUMB_CC_CC,
2481 ASM_THUMB_CC_HI,
2482 ASM_THUMB_CC_EQ,
2483 ASM_THUMB_CC_LS,
2484 ASM_THUMB_CC_CS,
2485 ASM_THUMB_CC_NE,
2486 // signed
2487 ASM_THUMB_CC_LT,
2488 ASM_THUMB_CC_GT,
2489 ASM_THUMB_CC_EQ,
2490 ASM_THUMB_CC_LE,
2491 ASM_THUMB_CC_GE,
2492 ASM_THUMB_CC_NE,
2493 };
2494 asm_thumb_bcc_rel9(emit->as, ops[op_idx], 6);
2495 asm_thumb_mov_rlo_i8(emit->as, REG_RET, 0);
2496 asm_thumb_b_rel12(emit->as, 4);
2497 asm_thumb_mov_rlo_i8(emit->as, REG_RET, 1);
2498 #endif
2499 #elif N_ARM
2500 asm_arm_cmp_reg_reg(emit->as, REG_ARG_2, reg_rhs);
2501 static uint ccs[6 + 6] = {
2502 // unsigned
2503 ASM_ARM_CC_CC,
2504 ASM_ARM_CC_HI,
2505 ASM_ARM_CC_EQ,
2506 ASM_ARM_CC_LS,
2507 ASM_ARM_CC_CS,
2508 ASM_ARM_CC_NE,
2509 // signed
2510 ASM_ARM_CC_LT,
2511 ASM_ARM_CC_GT,
2512 ASM_ARM_CC_EQ,
2513 ASM_ARM_CC_LE,
2514 ASM_ARM_CC_GE,
2515 ASM_ARM_CC_NE,
2516 };
2517 asm_arm_setcc_reg(emit->as, REG_RET, ccs[op_idx]);
2518 #elif N_XTENSA || N_XTENSAWIN
2519 static uint8_t ccs[6 + 6] = {
2520 // unsigned
2521 ASM_XTENSA_CC_LTU,
2522 0x80 | ASM_XTENSA_CC_LTU, // for GTU we'll swap args
2523 ASM_XTENSA_CC_EQ,
2524 0x80 | ASM_XTENSA_CC_GEU, // for LEU we'll swap args
2525 ASM_XTENSA_CC_GEU,
2526 ASM_XTENSA_CC_NE,
2527 // signed
2528 ASM_XTENSA_CC_LT,
2529 0x80 | ASM_XTENSA_CC_LT, // for GT we'll swap args
2530 ASM_XTENSA_CC_EQ,
2531 0x80 | ASM_XTENSA_CC_GE, // for LE we'll swap args
2532 ASM_XTENSA_CC_GE,
2533 ASM_XTENSA_CC_NE,
2534 };
2535 uint8_t cc = ccs[op_idx];
2536 if ((cc & 0x80) == 0) {
2537 asm_xtensa_setcc_reg_reg_reg(emit->as, cc, REG_RET, REG_ARG_2, reg_rhs);
2538 } else {
2539 asm_xtensa_setcc_reg_reg_reg(emit->as, cc & ~0x80, REG_RET, reg_rhs, REG_ARG_2);
2540 }
2541 #else
2542 #error not implemented
2543 #endif
2544 emit_post_push_reg(emit, VTYPE_BOOL, REG_RET);
2545 } else {
2546 // TODO other ops not yet implemented
2547 adjust_stack(emit, 1);
2548 EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
2549 MP_ERROR_TEXT("binary op %q not implemented"), mp_binary_op_method_name[op]);
2550 }
2551 } else if (vtype_lhs == VTYPE_PYOBJ && vtype_rhs == VTYPE_PYOBJ) {
2552 emit_pre_pop_reg_reg(emit, &vtype_rhs, REG_ARG_3, &vtype_lhs, REG_ARG_2);
2553 bool invert = false;
2554 if (op == MP_BINARY_OP_NOT_IN) {
2555 invert = true;
2556 op = MP_BINARY_OP_IN;
2557 } else if (op == MP_BINARY_OP_IS_NOT) {
2558 invert = true;
2559 op = MP_BINARY_OP_IS;
2560 }
2561 emit_call_with_imm_arg(emit, MP_F_BINARY_OP, op, REG_ARG_1);
2562 if (invert) {
2563 ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_RET);
2564 emit_call_with_imm_arg(emit, MP_F_UNARY_OP, MP_UNARY_OP_NOT, REG_ARG_1);
2565 }
2566 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2567 } else {
2568 adjust_stack(emit, -1);
2569 EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
2570 MP_ERROR_TEXT("can't do binary op between '%q' and '%q'"),
2571 vtype_to_qstr(vtype_lhs), vtype_to_qstr(vtype_rhs));
2572 }
2573}
2574
2575#if MICROPY_PY_BUILTINS_SLICE
2576STATIC void emit_native_build_slice(emit_t *emit, mp_uint_t n_args);
2577#endif
2578
2579STATIC void emit_native_build(emit_t *emit, mp_uint_t n_args, int kind) {
2580 // for viper: call runtime, with types of args
2581 // if wrapped in byte_array, or something, allocates memory and fills it
2582 MP_STATIC_ASSERT(MP_F_BUILD_TUPLE + MP_EMIT_BUILD_TUPLE == MP_F_BUILD_TUPLE);
2583 MP_STATIC_ASSERT(MP_F_BUILD_TUPLE + MP_EMIT_BUILD_LIST == MP_F_BUILD_LIST);
2584 MP_STATIC_ASSERT(MP_F_BUILD_TUPLE + MP_EMIT_BUILD_MAP == MP_F_BUILD_MAP);
2585 MP_STATIC_ASSERT(MP_F_BUILD_TUPLE + MP_EMIT_BUILD_SET == MP_F_BUILD_SET);
2586 #if MICROPY_PY_BUILTINS_SLICE
2587 if (kind == MP_EMIT_BUILD_SLICE) {
2588 emit_native_build_slice(emit, n_args);
2589 return;
2590 }
2591 #endif
2592 emit_native_pre(emit);
2593 if (kind == MP_EMIT_BUILD_TUPLE || kind == MP_EMIT_BUILD_LIST || kind == MP_EMIT_BUILD_SET) {
2594 emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_2, n_args); // pointer to items
2595 }
2596 emit_call_with_imm_arg(emit, MP_F_BUILD_TUPLE + kind, n_args, REG_ARG_1);
2597 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET); // new tuple/list/map/set
2598}
2599
2600STATIC void emit_native_store_map(emit_t *emit) {
2601 vtype_kind_t vtype_key, vtype_value, vtype_map;
2602 emit_pre_pop_reg_reg_reg(emit, &vtype_key, REG_ARG_2, &vtype_value, REG_ARG_3, &vtype_map, REG_ARG_1); // key, value, map
2603 assert(vtype_key == VTYPE_PYOBJ);
2604 assert(vtype_value == VTYPE_PYOBJ);
2605 assert(vtype_map == VTYPE_PYOBJ);
2606 emit_call(emit, MP_F_STORE_MAP);
2607 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET); // map
2608}
2609
2610#if MICROPY_PY_BUILTINS_SLICE
2611STATIC void emit_native_build_slice(emit_t *emit, mp_uint_t n_args) {
2612 DEBUG_printf("build_slice %d\n", n_args);
2613 if (n_args == 2) {
2614 vtype_kind_t vtype_start, vtype_stop;
2615 emit_pre_pop_reg_reg(emit, &vtype_stop, REG_ARG_2, &vtype_start, REG_ARG_1); // arg1 = start, arg2 = stop
2616 assert(vtype_start == VTYPE_PYOBJ);
2617 assert(vtype_stop == VTYPE_PYOBJ);
2618 emit_native_mov_reg_const(emit, REG_ARG_3, MP_F_CONST_NONE_OBJ); // arg3 = step
2619 } else {
2620 assert(n_args == 3);
2621 vtype_kind_t vtype_start, vtype_stop, vtype_step;
2622 emit_pre_pop_reg_reg_reg(emit, &vtype_step, REG_ARG_3, &vtype_stop, REG_ARG_2, &vtype_start, REG_ARG_1); // arg1 = start, arg2 = stop, arg3 = step
2623 assert(vtype_start == VTYPE_PYOBJ);
2624 assert(vtype_stop == VTYPE_PYOBJ);
2625 assert(vtype_step == VTYPE_PYOBJ);
2626 }
2627 emit_call(emit, MP_F_NEW_SLICE);
2628 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2629}
2630#endif
2631
2632STATIC void emit_native_store_comp(emit_t *emit, scope_kind_t kind, mp_uint_t collection_index) {
2633 mp_fun_kind_t f;
2634 if (kind == SCOPE_LIST_COMP) {
2635 vtype_kind_t vtype_item;
2636 emit_pre_pop_reg(emit, &vtype_item, REG_ARG_2);
2637 assert(vtype_item == VTYPE_PYOBJ);
2638 f = MP_F_LIST_APPEND;
2639 #if MICROPY_PY_BUILTINS_SET
2640 } else if (kind == SCOPE_SET_COMP) {
2641 vtype_kind_t vtype_item;
2642 emit_pre_pop_reg(emit, &vtype_item, REG_ARG_2);
2643 assert(vtype_item == VTYPE_PYOBJ);
2644 f = MP_F_STORE_SET;
2645 #endif
2646 } else {
2647 // SCOPE_DICT_COMP
2648 vtype_kind_t vtype_key, vtype_value;
2649 emit_pre_pop_reg_reg(emit, &vtype_key, REG_ARG_2, &vtype_value, REG_ARG_3);
2650 assert(vtype_key == VTYPE_PYOBJ);
2651 assert(vtype_value == VTYPE_PYOBJ);
2652 f = MP_F_STORE_MAP;
2653 }
2654 vtype_kind_t vtype_collection;
2655 emit_access_stack(emit, collection_index, &vtype_collection, REG_ARG_1);
2656 assert(vtype_collection == VTYPE_PYOBJ);
2657 emit_call(emit, f);
2658 emit_post(emit);
2659}
2660
2661STATIC void emit_native_unpack_sequence(emit_t *emit, mp_uint_t n_args) {
2662 DEBUG_printf("unpack_sequence %d\n", n_args);
2663 vtype_kind_t vtype_base;
2664 emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = seq
2665 assert(vtype_base == VTYPE_PYOBJ);
2666 emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, n_args); // arg3 = dest ptr
2667 emit_call_with_imm_arg(emit, MP_F_UNPACK_SEQUENCE, n_args, REG_ARG_2); // arg2 = n_args
2668}
2669
2670STATIC void emit_native_unpack_ex(emit_t *emit, mp_uint_t n_left, mp_uint_t n_right) {
2671 DEBUG_printf("unpack_ex %d %d\n", n_left, n_right);
2672 vtype_kind_t vtype_base;
2673 emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = seq
2674 assert(vtype_base == VTYPE_PYOBJ);
2675 emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, n_left + n_right + 1); // arg3 = dest ptr
2676 emit_call_with_imm_arg(emit, MP_F_UNPACK_EX, n_left | (n_right << 8), REG_ARG_2); // arg2 = n_left + n_right
2677}
2678
2679STATIC void emit_native_make_function(emit_t *emit, scope_t *scope, mp_uint_t n_pos_defaults, mp_uint_t n_kw_defaults) {
2680 // call runtime, with type info for args, or don't support dict/default params, or only support Python objects for them
2681 emit_native_pre(emit);
2682 if (n_pos_defaults == 0 && n_kw_defaults == 0) {
2683 need_reg_all(emit);
2684 ASM_MOV_REG_IMM(emit->as, REG_ARG_2, (mp_uint_t)MP_OBJ_NULL);
2685 ASM_MOV_REG_IMM(emit->as, REG_ARG_3, (mp_uint_t)MP_OBJ_NULL);
2686 } else {
2687 vtype_kind_t vtype_def_tuple, vtype_def_dict;
2688 emit_pre_pop_reg_reg(emit, &vtype_def_dict, REG_ARG_3, &vtype_def_tuple, REG_ARG_2);
2689 assert(vtype_def_tuple == VTYPE_PYOBJ);
2690 assert(vtype_def_dict == VTYPE_PYOBJ);
2691 need_reg_all(emit);
2692 }
2693 emit_load_reg_with_raw_code(emit, REG_ARG_1, scope->raw_code);
2694 ASM_CALL_IND(emit->as, MP_F_MAKE_FUNCTION_FROM_RAW_CODE);
2695 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2696}
2697
2698STATIC void emit_native_make_closure(emit_t *emit, scope_t *scope, mp_uint_t n_closed_over, mp_uint_t n_pos_defaults, mp_uint_t n_kw_defaults) {
2699 emit_native_pre(emit);
2700 if (n_pos_defaults == 0 && n_kw_defaults == 0) {
2701 emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_closed_over);
2702 ASM_MOV_REG_IMM(emit->as, REG_ARG_2, n_closed_over);
2703 } else {
2704 emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_closed_over + 2);
2705 ASM_MOV_REG_IMM(emit->as, REG_ARG_2, 0x100 | n_closed_over);
2706 }
2707 emit_load_reg_with_raw_code(emit, REG_ARG_1, scope->raw_code);
2708 ASM_CALL_IND(emit->as, MP_F_MAKE_CLOSURE_FROM_RAW_CODE);
2709 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2710}
2711
2712STATIC void emit_native_call_function(emit_t *emit, mp_uint_t n_positional, mp_uint_t n_keyword, mp_uint_t star_flags) {
2713 DEBUG_printf("call_function(n_pos=" UINT_FMT ", n_kw=" UINT_FMT ", star_flags=" UINT_FMT ")\n", n_positional, n_keyword, star_flags);
2714
2715 // TODO: in viper mode, call special runtime routine with type info for args,
2716 // and wanted type info for return, to remove need for boxing/unboxing
2717
2718 emit_native_pre(emit);
2719 vtype_kind_t vtype_fun = peek_vtype(emit, n_positional + 2 * n_keyword);
2720 if (vtype_fun == VTYPE_BUILTIN_CAST) {
2721 // casting operator
2722 assert(n_positional == 1 && n_keyword == 0);
2723 assert(!star_flags);
2724 DEBUG_printf(" cast to %d\n", vtype_fun);
2725 vtype_kind_t vtype_cast = peek_stack(emit, 1)->data.u_imm;
2726 switch (peek_vtype(emit, 0)) {
2727 case VTYPE_PYOBJ: {
2728 vtype_kind_t vtype;
2729 emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
2730 emit_pre_pop_discard(emit);
2731 emit_call_with_imm_arg(emit, MP_F_CONVERT_OBJ_TO_NATIVE, vtype_cast, REG_ARG_2); // arg2 = type
2732 emit_post_push_reg(emit, vtype_cast, REG_RET);
2733 break;
2734 }
2735 case VTYPE_BOOL:
2736 case VTYPE_INT:
2737 case VTYPE_UINT:
2738 case VTYPE_PTR:
2739 case VTYPE_PTR8:
2740 case VTYPE_PTR16:
2741 case VTYPE_PTR32:
2742 case VTYPE_PTR_NONE:
2743 emit_fold_stack_top(emit, REG_ARG_1);
2744 emit_post_top_set_vtype(emit, vtype_cast);
2745 break;
2746 default:
2747 // this can happen when casting a cast: int(int)
2748 mp_raise_NotImplementedError(MP_ERROR_TEXT("casting"));
2749 }
2750 } else {
2751 assert(vtype_fun == VTYPE_PYOBJ);
2752 if (star_flags) {
2753 emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_positional + 2 * n_keyword + 3); // pointer to args
2754 emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW_VAR, 0, REG_ARG_1, n_positional | (n_keyword << 8), REG_ARG_2);
2755 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2756 } else {
2757 if (n_positional != 0 || n_keyword != 0) {
2758 emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_positional + 2 * n_keyword); // pointer to args
2759 }
2760 emit_pre_pop_reg(emit, &vtype_fun, REG_ARG_1); // the function
2761 emit_call_with_imm_arg(emit, MP_F_NATIVE_CALL_FUNCTION_N_KW, n_positional | (n_keyword << 8), REG_ARG_2);
2762 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2763 }
2764 }
2765}
2766
2767STATIC void emit_native_call_method(emit_t *emit, mp_uint_t n_positional, mp_uint_t n_keyword, mp_uint_t star_flags) {
2768 if (star_flags) {
2769 emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, n_positional + 2 * n_keyword + 4); // pointer to args
2770 emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW_VAR, 1, REG_ARG_1, n_positional | (n_keyword << 8), REG_ARG_2);
2771 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2772 } else {
2773 emit_native_pre(emit);
2774 emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 2 + n_positional + 2 * n_keyword); // pointer to items, including meth and self
2775 emit_call_with_2_imm_args(emit, MP_F_CALL_METHOD_N_KW, n_positional, REG_ARG_1, n_keyword, REG_ARG_2);
2776 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
2777 }
2778}
2779
2780STATIC void emit_native_return_value(emit_t *emit) {
2781 DEBUG_printf("return_value\n");
2782
2783 if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
2784 // Save pointer to current stack position for caller to access return value
2785 emit_get_stack_pointer_to_reg_for_pop(emit, REG_TEMP0, 1);
2786 emit_native_mov_state_reg(emit, OFFSETOF_CODE_STATE_SP, REG_TEMP0);
2787
2788 // Put return type in return value slot
2789 ASM_MOV_REG_IMM(emit->as, REG_TEMP0, MP_VM_RETURN_NORMAL);
2790 ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_RET_VAL(emit), REG_TEMP0);
2791
2792 // Do the unwinding jump to get to the return handler
2793 emit_native_unwind_jump(emit, emit->exit_label, emit->exc_stack_size);
2794 emit->last_emit_was_return_value = true;
2795 return;
2796 }
2797
2798 if (emit->do_viper_types) {
2799 vtype_kind_t return_vtype = emit->scope->scope_flags >> MP_SCOPE_FLAG_VIPERRET_POS;
2800 if (peek_vtype(emit, 0) == VTYPE_PTR_NONE) {
2801 emit_pre_pop_discard(emit);
2802 if (return_vtype == VTYPE_PYOBJ) {
2803 emit_native_mov_reg_const(emit, REG_PARENT_RET, MP_F_CONST_NONE_OBJ);
2804 } else {
2805 ASM_MOV_REG_IMM(emit->as, REG_ARG_1, 0);
2806 }
2807 } else {
2808 vtype_kind_t vtype;
2809 emit_pre_pop_reg(emit, &vtype, return_vtype == VTYPE_PYOBJ ? REG_PARENT_RET : REG_ARG_1);
2810 if (vtype != return_vtype) {
2811 EMIT_NATIVE_VIPER_TYPE_ERROR(emit,
2812 MP_ERROR_TEXT("return expected '%q' but got '%q'"),
2813 vtype_to_qstr(return_vtype), vtype_to_qstr(vtype));
2814 }
2815 }
2816 if (return_vtype != VTYPE_PYOBJ) {
2817 emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, return_vtype, REG_ARG_2);
2818 #if REG_RET != REG_PARENT_RET
2819 ASM_MOV_REG_REG(emit->as, REG_PARENT_RET, REG_RET);
2820 #endif
2821 }
2822 } else {
2823 vtype_kind_t vtype;
2824 emit_pre_pop_reg(emit, &vtype, REG_PARENT_RET);
2825 assert(vtype == VTYPE_PYOBJ);
2826 }
2827 if (NEED_GLOBAL_EXC_HANDLER(emit)) {
2828 // Save return value for the global exception handler to use
2829 ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_RET_VAL(emit), REG_PARENT_RET);
2830 }
2831 emit_native_unwind_jump(emit, emit->exit_label, emit->exc_stack_size);
2832 emit->last_emit_was_return_value = true;
2833}
2834
2835STATIC void emit_native_raise_varargs(emit_t *emit, mp_uint_t n_args) {
2836 (void)n_args;
2837 assert(n_args == 1);
2838 vtype_kind_t vtype_exc;
2839 emit_pre_pop_reg(emit, &vtype_exc, REG_ARG_1); // arg1 = object to raise
2840 if (vtype_exc != VTYPE_PYOBJ) {
2841 EMIT_NATIVE_VIPER_TYPE_ERROR(emit, MP_ERROR_TEXT("must raise an object"));
2842 }
2843 // TODO probably make this 1 call to the runtime (which could even call convert, native_raise(obj, type))
2844 emit_call(emit, MP_F_NATIVE_RAISE);
2845}
2846
2847STATIC void emit_native_yield(emit_t *emit, int kind) {
2848 // Note: 1 (yield) or 3 (yield from) labels are reserved for this function, starting at *emit->label_slot
2849
2850 if (emit->do_viper_types) {
2851 mp_raise_NotImplementedError(MP_ERROR_TEXT("native yield"));
2852 }
2853 emit->scope->scope_flags |= MP_SCOPE_FLAG_GENERATOR;
2854
2855 need_stack_settled(emit);
2856
2857 if (kind == MP_EMIT_YIELD_FROM) {
2858
2859 // Top of yield-from loop, conceptually implementing:
2860 // for item in generator:
2861 // yield item
2862
2863 // Jump to start of loop
2864 emit_native_jump(emit, *emit->label_slot + 2);
2865
2866 // Label for top of loop
2867 emit_native_label_assign(emit, *emit->label_slot + 1);
2868 }
2869
2870 // Save pointer to current stack position for caller to access yielded value
2871 emit_get_stack_pointer_to_reg_for_pop(emit, REG_TEMP0, 1);
2872 emit_native_mov_state_reg(emit, OFFSETOF_CODE_STATE_SP, REG_TEMP0);
2873
2874 // Put return type in return value slot
2875 ASM_MOV_REG_IMM(emit->as, REG_TEMP0, MP_VM_RETURN_YIELD);
2876 ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_RET_VAL(emit), REG_TEMP0);
2877
2878 // Save re-entry PC
2879 ASM_MOV_REG_PCREL(emit->as, REG_TEMP0, *emit->label_slot);
2880 emit_native_mov_state_reg(emit, LOCAL_IDX_GEN_PC(emit), REG_TEMP0);
2881
2882 // Jump to exit handler
2883 ASM_JUMP(emit->as, emit->exit_label);
2884
2885 // Label re-entry point
2886 mp_asm_base_label_assign(&emit->as->base, *emit->label_slot);
2887
2888 // Re-open any active exception handler
2889 if (emit->exc_stack_size > 0) {
2890 // Find innermost active exception handler, to restore as current handler
2891 exc_stack_entry_t *e = &emit->exc_stack[emit->exc_stack_size - 1];
2892 for (; e >= emit->exc_stack; --e) {
2893 if (e->is_active) {
2894 // Found active handler, get its PC
2895 ASM_MOV_REG_PCREL(emit->as, REG_RET, e->label);
2896 ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_PC(emit), REG_RET);
2897 break;
2898 }
2899 }
2900 }
2901
2902 emit_native_adjust_stack_size(emit, 1); // send_value
2903
2904 if (kind == MP_EMIT_YIELD_VALUE) {
2905 // Check LOCAL_IDX_EXC_VAL for any injected value
2906 ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_EXC_VAL(emit));
2907 emit_call(emit, MP_F_NATIVE_RAISE);
2908 } else {
2909 // Label loop entry
2910 emit_native_label_assign(emit, *emit->label_slot + 2);
2911
2912 // Get the next item from the delegate generator
2913 vtype_kind_t vtype;
2914 emit_pre_pop_reg(emit, &vtype, REG_ARG_2); // send_value
2915 emit_access_stack(emit, 1, &vtype, REG_ARG_1); // generator
2916 ASM_MOV_REG_LOCAL(emit->as, REG_ARG_3, LOCAL_IDX_EXC_VAL(emit)); // throw_value
2917 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_ARG_3);
2918 emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 1); // ret_value
2919 emit_call(emit, MP_F_NATIVE_YIELD_FROM);
2920
2921 // If returned non-zero then generator continues
2922 ASM_JUMP_IF_REG_NONZERO(emit->as, REG_RET, *emit->label_slot + 1, true);
2923
2924 // Pop exhausted gen, replace with ret_value
2925 emit_native_adjust_stack_size(emit, 1); // ret_value
2926 emit_fold_stack_top(emit, REG_ARG_1);
2927 }
2928}
2929
2930STATIC void emit_native_start_except_handler(emit_t *emit) {
2931 // Protected block has finished so leave the current exception handler
2932 emit_native_leave_exc_stack(emit, true);
2933
2934 // Get and push nlr_buf.ret_val
2935 ASM_MOV_REG_LOCAL(emit->as, REG_TEMP0, LOCAL_IDX_EXC_VAL(emit));
2936 emit_post_push_reg(emit, VTYPE_PYOBJ, REG_TEMP0);
2937}
2938
2939STATIC void emit_native_end_except_handler(emit_t *emit) {
2940 adjust_stack(emit, -1); // pop the exception (end_finally didn't use it)
2941}
2942
2943const emit_method_table_t EXPORT_FUN(method_table) = {
2944 #if MICROPY_DYNAMIC_COMPILER
2945 EXPORT_FUN(new),
2946 EXPORT_FUN(free),
2947 #endif
2948
2949 emit_native_start_pass,
2950 emit_native_end_pass,
2951 emit_native_last_emit_was_return_value,
2952 emit_native_adjust_stack_size,
2953 emit_native_set_source_line,
2954
2955 {
2956 emit_native_load_local,
2957 emit_native_load_global,
2958 },
2959 {
2960 emit_native_store_local,
2961 emit_native_store_global,
2962 },
2963 {
2964 emit_native_delete_local,
2965 emit_native_delete_global,
2966 },
2967
2968 emit_native_label_assign,
2969 emit_native_import,
2970 emit_native_load_const_tok,
2971 emit_native_load_const_small_int,
2972 emit_native_load_const_str,
2973 emit_native_load_const_obj,
2974 emit_native_load_null,
2975 emit_native_load_method,
2976 emit_native_load_build_class,
2977 emit_native_subscr,
2978 emit_native_attr,
2979 emit_native_dup_top,
2980 emit_native_dup_top_two,
2981 emit_native_pop_top,
2982 emit_native_rot_two,
2983 emit_native_rot_three,
2984 emit_native_jump,
2985 emit_native_pop_jump_if,
2986 emit_native_jump_if_or_pop,
2987 emit_native_unwind_jump,
2988 emit_native_setup_block,
2989 emit_native_with_cleanup,
2990 emit_native_end_finally,
2991 emit_native_get_iter,
2992 emit_native_for_iter,
2993 emit_native_for_iter_end,
2994 emit_native_pop_except_jump,
2995 emit_native_unary_op,
2996 emit_native_binary_op,
2997 emit_native_build,
2998 emit_native_store_map,
2999 emit_native_store_comp,
3000 emit_native_unpack_sequence,
3001 emit_native_unpack_ex,
3002 emit_native_make_function,
3003 emit_native_make_closure,
3004 emit_native_call_function,
3005 emit_native_call_method,
3006 emit_native_return_value,
3007 emit_native_raise_varargs,
3008 emit_native_yield,
3009
3010 emit_native_start_except_handler,
3011 emit_native_end_except_handler,
3012};
3013
3014#endif
3015