| 1 | /* |
| 2 | * This file is part of the MicroPython project, http://micropython.org/ |
| 3 | * |
| 4 | * The MIT License (MIT) |
| 5 | * |
| 6 | * Copyright (c) 2013-2020 Damien P. George |
| 7 | * |
| 8 | * Permission is hereby granted, free of charge, to any person obtaining a copy |
| 9 | * of this software and associated documentation files (the "Software"), to deal |
| 10 | * in the Software without restriction, including without limitation the rights |
| 11 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
| 12 | * copies of the Software, and to permit persons to whom the Software is |
| 13 | * furnished to do so, subject to the following conditions: |
| 14 | * |
| 15 | * The above copyright notice and this permission notice shall be included in |
| 16 | * all copies or substantial portions of the Software. |
| 17 | * |
| 18 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
| 19 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
| 20 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
| 21 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
| 22 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
| 23 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN |
| 24 | * THE SOFTWARE. |
| 25 | */ |
| 26 | |
| 27 | #include <stdbool.h> |
| 28 | #include <stdint.h> |
| 29 | #include <stdio.h> |
| 30 | #include <string.h> |
| 31 | #include <assert.h> |
| 32 | |
| 33 | #include "py/scope.h" |
| 34 | #include "py/emit.h" |
| 35 | #include "py/compile.h" |
| 36 | #include "py/runtime.h" |
| 37 | #include "py/asmbase.h" |
| 38 | #include "py/persistentcode.h" |
| 39 | |
| 40 | #if MICROPY_ENABLE_COMPILER |
| 41 | |
| 42 | // TODO need to mangle __attr names |
| 43 | |
| 44 | #define INVALID_LABEL (0xffff) |
| 45 | |
| 46 | typedef enum { |
| 47 | // define rules with a compile function |
| 48 | #define DEF_RULE(rule, comp, kind, ...) PN_##rule, |
| 49 | #define DEF_RULE_NC(rule, kind, ...) |
| 50 | #include "py/grammar.h" |
| 51 | #undef DEF_RULE |
| 52 | #undef DEF_RULE_NC |
| 53 | PN_const_object, // special node for a constant, generic Python object |
| 54 | // define rules without a compile function |
| 55 | #define DEF_RULE(rule, comp, kind, ...) |
| 56 | #define DEF_RULE_NC(rule, kind, ...) PN_##rule, |
| 57 | #include "py/grammar.h" |
| 58 | #undef DEF_RULE |
| 59 | #undef DEF_RULE_NC |
| 60 | } pn_kind_t; |
| 61 | |
| 62 | #define NEED_METHOD_TABLE MICROPY_EMIT_NATIVE |
| 63 | |
| 64 | #if NEED_METHOD_TABLE |
| 65 | |
| 66 | // we need a method table to do the lookup for the emitter functions |
| 67 | #define EMIT(fun) (comp->emit_method_table->fun(comp->emit)) |
| 68 | #define EMIT_ARG(fun, ...) (comp->emit_method_table->fun(comp->emit, __VA_ARGS__)) |
| 69 | #define EMIT_LOAD_FAST(qst, local_num) (comp->emit_method_table->load_id.local(comp->emit, qst, local_num, MP_EMIT_IDOP_LOCAL_FAST)) |
| 70 | #define EMIT_LOAD_GLOBAL(qst) (comp->emit_method_table->load_id.global(comp->emit, qst, MP_EMIT_IDOP_GLOBAL_GLOBAL)) |
| 71 | |
| 72 | #else |
| 73 | |
| 74 | // if we only have the bytecode emitter enabled then we can do a direct call to the functions |
| 75 | #define EMIT(fun) (mp_emit_bc_##fun(comp->emit)) |
| 76 | #define EMIT_ARG(fun, ...) (mp_emit_bc_##fun(comp->emit, __VA_ARGS__)) |
| 77 | #define EMIT_LOAD_FAST(qst, local_num) (mp_emit_bc_load_local(comp->emit, qst, local_num, MP_EMIT_IDOP_LOCAL_FAST)) |
| 78 | #define EMIT_LOAD_GLOBAL(qst) (mp_emit_bc_load_global(comp->emit, qst, MP_EMIT_IDOP_GLOBAL_GLOBAL)) |
| 79 | |
| 80 | #endif |
| 81 | |
| 82 | #if MICROPY_EMIT_NATIVE && MICROPY_DYNAMIC_COMPILER |
| 83 | |
| 84 | #define NATIVE_EMITTER(f) emit_native_table[mp_dynamic_compiler.native_arch]->emit_##f |
| 85 | #define NATIVE_EMITTER_TABLE emit_native_table[mp_dynamic_compiler.native_arch] |
| 86 | |
| 87 | STATIC const emit_method_table_t *emit_native_table[] = { |
| 88 | NULL, |
| 89 | &emit_native_x86_method_table, |
| 90 | &emit_native_x64_method_table, |
| 91 | &emit_native_arm_method_table, |
| 92 | &emit_native_thumb_method_table, |
| 93 | &emit_native_thumb_method_table, |
| 94 | &emit_native_thumb_method_table, |
| 95 | &emit_native_thumb_method_table, |
| 96 | &emit_native_thumb_method_table, |
| 97 | &emit_native_xtensa_method_table, |
| 98 | &emit_native_xtensawin_method_table, |
| 99 | }; |
| 100 | |
| 101 | #elif MICROPY_EMIT_NATIVE |
| 102 | // define a macro to access external native emitter |
| 103 | #if MICROPY_EMIT_X64 |
| 104 | #define NATIVE_EMITTER(f) emit_native_x64_##f |
| 105 | #elif MICROPY_EMIT_X86 |
| 106 | #define NATIVE_EMITTER(f) emit_native_x86_##f |
| 107 | #elif MICROPY_EMIT_THUMB |
| 108 | #define NATIVE_EMITTER(f) emit_native_thumb_##f |
| 109 | #elif MICROPY_EMIT_ARM |
| 110 | #define NATIVE_EMITTER(f) emit_native_arm_##f |
| 111 | #elif MICROPY_EMIT_XTENSA |
| 112 | #define NATIVE_EMITTER(f) emit_native_xtensa_##f |
| 113 | #elif MICROPY_EMIT_XTENSAWIN |
| 114 | #define NATIVE_EMITTER(f) emit_native_xtensawin_##f |
| 115 | #else |
| 116 | #error "unknown native emitter" |
| 117 | #endif |
| 118 | #define NATIVE_EMITTER_TABLE &NATIVE_EMITTER(method_table) |
| 119 | #endif |
| 120 | |
| 121 | #if MICROPY_EMIT_INLINE_ASM && MICROPY_DYNAMIC_COMPILER |
| 122 | |
| 123 | #define ASM_EMITTER(f) emit_asm_table[mp_dynamic_compiler.native_arch]->asm_##f |
| 124 | #define ASM_EMITTER_TABLE emit_asm_table[mp_dynamic_compiler.native_arch] |
| 125 | |
| 126 | STATIC const emit_inline_asm_method_table_t *emit_asm_table[] = { |
| 127 | NULL, |
| 128 | NULL, |
| 129 | NULL, |
| 130 | &emit_inline_thumb_method_table, |
| 131 | &emit_inline_thumb_method_table, |
| 132 | &emit_inline_thumb_method_table, |
| 133 | &emit_inline_thumb_method_table, |
| 134 | &emit_inline_thumb_method_table, |
| 135 | &emit_inline_thumb_method_table, |
| 136 | &emit_inline_xtensa_method_table, |
| 137 | NULL, |
| 138 | }; |
| 139 | |
| 140 | #elif MICROPY_EMIT_INLINE_ASM |
| 141 | // define macros for inline assembler |
| 142 | #if MICROPY_EMIT_INLINE_THUMB |
| 143 | #define ASM_DECORATOR_QSTR MP_QSTR_asm_thumb |
| 144 | #define ASM_EMITTER(f) emit_inline_thumb_##f |
| 145 | #elif MICROPY_EMIT_INLINE_XTENSA |
| 146 | #define ASM_DECORATOR_QSTR MP_QSTR_asm_xtensa |
| 147 | #define ASM_EMITTER(f) emit_inline_xtensa_##f |
| 148 | #else |
| 149 | #error "unknown asm emitter" |
| 150 | #endif |
| 151 | #define ASM_EMITTER_TABLE &ASM_EMITTER(method_table) |
| 152 | #endif |
| 153 | |
| 154 | #define EMIT_INLINE_ASM(fun) (comp->emit_inline_asm_method_table->fun(comp->emit_inline_asm)) |
| 155 | #define EMIT_INLINE_ASM_ARG(fun, ...) (comp->emit_inline_asm_method_table->fun(comp->emit_inline_asm, __VA_ARGS__)) |
| 156 | |
| 157 | // elements in this struct are ordered to make it compact |
| 158 | typedef struct _compiler_t { |
| 159 | qstr source_file; |
| 160 | |
| 161 | uint8_t is_repl; |
| 162 | uint8_t pass; // holds enum type pass_kind_t |
| 163 | uint8_t have_star; |
| 164 | |
| 165 | // try to keep compiler clean from nlr |
| 166 | mp_obj_t compile_error; // set to an exception object if there's an error |
| 167 | size_t compile_error_line; // set to best guess of line of error |
| 168 | |
| 169 | uint next_label; |
| 170 | |
| 171 | uint16_t num_dict_params; |
| 172 | uint16_t num_default_params; |
| 173 | |
| 174 | uint16_t break_label; // highest bit set indicates we are breaking out of a for loop |
| 175 | uint16_t continue_label; |
| 176 | uint16_t cur_except_level; // increased for SETUP_EXCEPT, SETUP_FINALLY; decreased for POP_BLOCK, POP_EXCEPT |
| 177 | uint16_t break_continue_except_level; |
| 178 | |
| 179 | scope_t *scope_head; |
| 180 | scope_t *scope_cur; |
| 181 | |
| 182 | emit_t *emit; // current emitter |
| 183 | #if NEED_METHOD_TABLE |
| 184 | const emit_method_table_t *emit_method_table; // current emit method table |
| 185 | #endif |
| 186 | |
| 187 | #if MICROPY_EMIT_INLINE_ASM |
| 188 | emit_inline_asm_t *emit_inline_asm; // current emitter for inline asm |
| 189 | const emit_inline_asm_method_table_t *emit_inline_asm_method_table; // current emit method table for inline asm |
| 190 | #endif |
| 191 | } compiler_t; |
| 192 | |
| 193 | STATIC void compile_error_set_line(compiler_t *comp, mp_parse_node_t pn) { |
| 194 | // if the line of the error is unknown then try to update it from the pn |
| 195 | if (comp->compile_error_line == 0 && MP_PARSE_NODE_IS_STRUCT(pn)) { |
| 196 | comp->compile_error_line = ((mp_parse_node_struct_t *)pn)->source_line; |
| 197 | } |
| 198 | } |
| 199 | |
| 200 | STATIC void compile_syntax_error(compiler_t *comp, mp_parse_node_t pn, mp_rom_error_text_t msg) { |
| 201 | // only register the error if there has been no other error |
| 202 | if (comp->compile_error == MP_OBJ_NULL) { |
| 203 | comp->compile_error = mp_obj_new_exception_msg(&mp_type_SyntaxError, msg); |
| 204 | compile_error_set_line(comp, pn); |
| 205 | } |
| 206 | } |
| 207 | |
| 208 | STATIC void compile_trailer_paren_helper(compiler_t *comp, mp_parse_node_t pn_arglist, bool is_method_call, int ); |
| 209 | STATIC void compile_comprehension(compiler_t *comp, mp_parse_node_struct_t *pns, scope_kind_t kind); |
| 210 | STATIC void compile_atom_brace_helper(compiler_t *comp, mp_parse_node_struct_t *pns, bool create_map); |
| 211 | STATIC void compile_node(compiler_t *comp, mp_parse_node_t pn); |
| 212 | |
| 213 | STATIC uint comp_next_label(compiler_t *comp) { |
| 214 | return comp->next_label++; |
| 215 | } |
| 216 | |
| 217 | #if MICROPY_EMIT_NATIVE |
| 218 | STATIC void reserve_labels_for_native(compiler_t *comp, int n) { |
| 219 | if (comp->scope_cur->emit_options != MP_EMIT_OPT_BYTECODE) { |
| 220 | comp->next_label += n; |
| 221 | } |
| 222 | } |
| 223 | #else |
| 224 | #define reserve_labels_for_native(comp, n) |
| 225 | #endif |
| 226 | |
| 227 | STATIC void compile_increase_except_level(compiler_t *comp, uint label, int kind) { |
| 228 | EMIT_ARG(setup_block, label, kind); |
| 229 | comp->cur_except_level += 1; |
| 230 | if (comp->cur_except_level > comp->scope_cur->exc_stack_size) { |
| 231 | comp->scope_cur->exc_stack_size = comp->cur_except_level; |
| 232 | } |
| 233 | } |
| 234 | |
| 235 | STATIC void compile_decrease_except_level(compiler_t *comp) { |
| 236 | assert(comp->cur_except_level > 0); |
| 237 | comp->cur_except_level -= 1; |
| 238 | EMIT(end_finally); |
| 239 | reserve_labels_for_native(comp, 1); |
| 240 | } |
| 241 | |
| 242 | STATIC scope_t *scope_new_and_link(compiler_t *comp, scope_kind_t kind, mp_parse_node_t pn, uint emit_options) { |
| 243 | scope_t *scope = scope_new(kind, pn, comp->source_file, emit_options); |
| 244 | scope->parent = comp->scope_cur; |
| 245 | scope->next = NULL; |
| 246 | if (comp->scope_head == NULL) { |
| 247 | comp->scope_head = scope; |
| 248 | } else { |
| 249 | scope_t *s = comp->scope_head; |
| 250 | while (s->next != NULL) { |
| 251 | s = s->next; |
| 252 | } |
| 253 | s->next = scope; |
| 254 | } |
| 255 | return scope; |
| 256 | } |
| 257 | |
| 258 | typedef void (*apply_list_fun_t)(compiler_t *comp, mp_parse_node_t pn); |
| 259 | |
| 260 | STATIC void apply_to_single_or_list(compiler_t *comp, mp_parse_node_t pn, pn_kind_t pn_list_kind, apply_list_fun_t f) { |
| 261 | if (MP_PARSE_NODE_IS_STRUCT_KIND(pn, pn_list_kind)) { |
| 262 | mp_parse_node_struct_t *pns = (mp_parse_node_struct_t *)pn; |
| 263 | int num_nodes = MP_PARSE_NODE_STRUCT_NUM_NODES(pns); |
| 264 | for (int i = 0; i < num_nodes; i++) { |
| 265 | f(comp, pns->nodes[i]); |
| 266 | } |
| 267 | } else if (!MP_PARSE_NODE_IS_NULL(pn)) { |
| 268 | f(comp, pn); |
| 269 | } |
| 270 | } |
| 271 | |
| 272 | STATIC void compile_generic_all_nodes(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 273 | int num_nodes = MP_PARSE_NODE_STRUCT_NUM_NODES(pns); |
| 274 | for (int i = 0; i < num_nodes; i++) { |
| 275 | compile_node(comp, pns->nodes[i]); |
| 276 | if (comp->compile_error != MP_OBJ_NULL) { |
| 277 | // add line info for the error in case it didn't have a line number |
| 278 | compile_error_set_line(comp, pns->nodes[i]); |
| 279 | return; |
| 280 | } |
| 281 | } |
| 282 | } |
| 283 | |
| 284 | STATIC void compile_load_id(compiler_t *comp, qstr qst) { |
| 285 | if (comp->pass == MP_PASS_SCOPE) { |
| 286 | mp_emit_common_get_id_for_load(comp->scope_cur, qst); |
| 287 | } else { |
| 288 | #if NEED_METHOD_TABLE |
| 289 | mp_emit_common_id_op(comp->emit, &comp->emit_method_table->load_id, comp->scope_cur, qst); |
| 290 | #else |
| 291 | mp_emit_common_id_op(comp->emit, &mp_emit_bc_method_table_load_id_ops, comp->scope_cur, qst); |
| 292 | #endif |
| 293 | } |
| 294 | } |
| 295 | |
| 296 | STATIC void compile_store_id(compiler_t *comp, qstr qst) { |
| 297 | if (comp->pass == MP_PASS_SCOPE) { |
| 298 | mp_emit_common_get_id_for_modification(comp->scope_cur, qst); |
| 299 | } else { |
| 300 | #if NEED_METHOD_TABLE |
| 301 | mp_emit_common_id_op(comp->emit, &comp->emit_method_table->store_id, comp->scope_cur, qst); |
| 302 | #else |
| 303 | mp_emit_common_id_op(comp->emit, &mp_emit_bc_method_table_store_id_ops, comp->scope_cur, qst); |
| 304 | #endif |
| 305 | } |
| 306 | } |
| 307 | |
| 308 | STATIC void compile_delete_id(compiler_t *comp, qstr qst) { |
| 309 | if (comp->pass == MP_PASS_SCOPE) { |
| 310 | mp_emit_common_get_id_for_modification(comp->scope_cur, qst); |
| 311 | } else { |
| 312 | #if NEED_METHOD_TABLE |
| 313 | mp_emit_common_id_op(comp->emit, &comp->emit_method_table->delete_id, comp->scope_cur, qst); |
| 314 | #else |
| 315 | mp_emit_common_id_op(comp->emit, &mp_emit_bc_method_table_delete_id_ops, comp->scope_cur, qst); |
| 316 | #endif |
| 317 | } |
| 318 | } |
| 319 | |
| 320 | STATIC void c_tuple(compiler_t *comp, mp_parse_node_t pn, mp_parse_node_struct_t *pns_list) { |
| 321 | int total = 0; |
| 322 | if (!MP_PARSE_NODE_IS_NULL(pn)) { |
| 323 | compile_node(comp, pn); |
| 324 | total += 1; |
| 325 | } |
| 326 | if (pns_list != NULL) { |
| 327 | int n = MP_PARSE_NODE_STRUCT_NUM_NODES(pns_list); |
| 328 | for (int i = 0; i < n; i++) { |
| 329 | compile_node(comp, pns_list->nodes[i]); |
| 330 | } |
| 331 | total += n; |
| 332 | } |
| 333 | EMIT_ARG(build, total, MP_EMIT_BUILD_TUPLE); |
| 334 | } |
| 335 | |
| 336 | STATIC void compile_generic_tuple(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 337 | // a simple tuple expression |
| 338 | c_tuple(comp, MP_PARSE_NODE_NULL, pns); |
| 339 | } |
| 340 | |
| 341 | STATIC void c_if_cond(compiler_t *comp, mp_parse_node_t pn, bool jump_if, int label) { |
| 342 | if (mp_parse_node_is_const_false(pn)) { |
| 343 | if (jump_if == false) { |
| 344 | EMIT_ARG(jump, label); |
| 345 | } |
| 346 | return; |
| 347 | } else if (mp_parse_node_is_const_true(pn)) { |
| 348 | if (jump_if == true) { |
| 349 | EMIT_ARG(jump, label); |
| 350 | } |
| 351 | return; |
| 352 | } else if (MP_PARSE_NODE_IS_STRUCT(pn)) { |
| 353 | mp_parse_node_struct_t *pns = (mp_parse_node_struct_t *)pn; |
| 354 | int n = MP_PARSE_NODE_STRUCT_NUM_NODES(pns); |
| 355 | if (MP_PARSE_NODE_STRUCT_KIND(pns) == PN_or_test) { |
| 356 | if (jump_if == false) { |
| 357 | and_or_logic1:; |
| 358 | uint label2 = comp_next_label(comp); |
| 359 | for (int i = 0; i < n - 1; i++) { |
| 360 | c_if_cond(comp, pns->nodes[i], !jump_if, label2); |
| 361 | } |
| 362 | c_if_cond(comp, pns->nodes[n - 1], jump_if, label); |
| 363 | EMIT_ARG(label_assign, label2); |
| 364 | } else { |
| 365 | and_or_logic2: |
| 366 | for (int i = 0; i < n; i++) { |
| 367 | c_if_cond(comp, pns->nodes[i], jump_if, label); |
| 368 | } |
| 369 | } |
| 370 | return; |
| 371 | } else if (MP_PARSE_NODE_STRUCT_KIND(pns) == PN_and_test) { |
| 372 | if (jump_if == false) { |
| 373 | goto and_or_logic2; |
| 374 | } else { |
| 375 | goto and_or_logic1; |
| 376 | } |
| 377 | } else if (MP_PARSE_NODE_STRUCT_KIND(pns) == PN_not_test_2) { |
| 378 | c_if_cond(comp, pns->nodes[0], !jump_if, label); |
| 379 | return; |
| 380 | } else if (MP_PARSE_NODE_STRUCT_KIND(pns) == PN_atom_paren) { |
| 381 | // cond is something in parenthesis |
| 382 | if (MP_PARSE_NODE_IS_NULL(pns->nodes[0])) { |
| 383 | // empty tuple, acts as false for the condition |
| 384 | if (jump_if == false) { |
| 385 | EMIT_ARG(jump, label); |
| 386 | } |
| 387 | } else { |
| 388 | assert(MP_PARSE_NODE_IS_STRUCT_KIND(pns->nodes[0], PN_testlist_comp)); |
| 389 | // non-empty tuple, acts as true for the condition |
| 390 | if (jump_if == true) { |
| 391 | EMIT_ARG(jump, label); |
| 392 | } |
| 393 | } |
| 394 | return; |
| 395 | } |
| 396 | } |
| 397 | |
| 398 | // nothing special, fall back to default compiling for node and jump |
| 399 | compile_node(comp, pn); |
| 400 | EMIT_ARG(pop_jump_if, jump_if, label); |
| 401 | } |
| 402 | |
| 403 | typedef enum { ASSIGN_STORE, ASSIGN_AUG_LOAD, ASSIGN_AUG_STORE } assign_kind_t; |
| 404 | STATIC void c_assign(compiler_t *comp, mp_parse_node_t pn, assign_kind_t kind); |
| 405 | |
| 406 | STATIC void c_assign_atom_expr(compiler_t *comp, mp_parse_node_struct_t *pns, assign_kind_t assign_kind) { |
| 407 | if (assign_kind != ASSIGN_AUG_STORE) { |
| 408 | compile_node(comp, pns->nodes[0]); |
| 409 | } |
| 410 | |
| 411 | if (MP_PARSE_NODE_IS_STRUCT(pns->nodes[1])) { |
| 412 | mp_parse_node_struct_t *pns1 = (mp_parse_node_struct_t *)pns->nodes[1]; |
| 413 | if (MP_PARSE_NODE_STRUCT_KIND(pns1) == PN_atom_expr_trailers) { |
| 414 | int n = MP_PARSE_NODE_STRUCT_NUM_NODES(pns1); |
| 415 | if (assign_kind != ASSIGN_AUG_STORE) { |
| 416 | for (int i = 0; i < n - 1; i++) { |
| 417 | compile_node(comp, pns1->nodes[i]); |
| 418 | } |
| 419 | } |
| 420 | assert(MP_PARSE_NODE_IS_STRUCT(pns1->nodes[n - 1])); |
| 421 | pns1 = (mp_parse_node_struct_t *)pns1->nodes[n - 1]; |
| 422 | } |
| 423 | if (MP_PARSE_NODE_STRUCT_KIND(pns1) == PN_trailer_bracket) { |
| 424 | if (assign_kind == ASSIGN_AUG_STORE) { |
| 425 | EMIT(rot_three); |
| 426 | EMIT_ARG(subscr, MP_EMIT_SUBSCR_STORE); |
| 427 | } else { |
| 428 | compile_node(comp, pns1->nodes[0]); |
| 429 | if (assign_kind == ASSIGN_AUG_LOAD) { |
| 430 | EMIT(dup_top_two); |
| 431 | EMIT_ARG(subscr, MP_EMIT_SUBSCR_LOAD); |
| 432 | } else { |
| 433 | EMIT_ARG(subscr, MP_EMIT_SUBSCR_STORE); |
| 434 | } |
| 435 | } |
| 436 | return; |
| 437 | } else if (MP_PARSE_NODE_STRUCT_KIND(pns1) == PN_trailer_period) { |
| 438 | assert(MP_PARSE_NODE_IS_ID(pns1->nodes[0])); |
| 439 | if (assign_kind == ASSIGN_AUG_LOAD) { |
| 440 | EMIT(dup_top); |
| 441 | EMIT_ARG(attr, MP_PARSE_NODE_LEAF_ARG(pns1->nodes[0]), MP_EMIT_ATTR_LOAD); |
| 442 | } else { |
| 443 | if (assign_kind == ASSIGN_AUG_STORE) { |
| 444 | EMIT(rot_two); |
| 445 | } |
| 446 | EMIT_ARG(attr, MP_PARSE_NODE_LEAF_ARG(pns1->nodes[0]), MP_EMIT_ATTR_STORE); |
| 447 | } |
| 448 | return; |
| 449 | } |
| 450 | } |
| 451 | |
| 452 | compile_syntax_error(comp, (mp_parse_node_t)pns, MP_ERROR_TEXT("can't assign to expression" )); |
| 453 | } |
| 454 | |
| 455 | // we need to allow for a caller passing in 1 initial node (node_head) followed by an array of nodes (nodes_tail) |
| 456 | STATIC void c_assign_tuple(compiler_t *comp, mp_parse_node_t node_head, uint num_tail, mp_parse_node_t *nodes_tail) { |
| 457 | uint num_head = (node_head == MP_PARSE_NODE_NULL) ? 0 : 1; |
| 458 | |
| 459 | // look for star expression |
| 460 | uint have_star_index = -1; |
| 461 | if (num_head != 0 && MP_PARSE_NODE_IS_STRUCT_KIND(node_head, PN_star_expr)) { |
| 462 | EMIT_ARG(unpack_ex, 0, num_tail); |
| 463 | have_star_index = 0; |
| 464 | } |
| 465 | for (uint i = 0; i < num_tail; i++) { |
| 466 | if (MP_PARSE_NODE_IS_STRUCT_KIND(nodes_tail[i], PN_star_expr)) { |
| 467 | if (have_star_index == (uint)-1) { |
| 468 | EMIT_ARG(unpack_ex, num_head + i, num_tail - i - 1); |
| 469 | have_star_index = num_head + i; |
| 470 | } else { |
| 471 | compile_syntax_error(comp, nodes_tail[i], MP_ERROR_TEXT("multiple *x in assignment" )); |
| 472 | return; |
| 473 | } |
| 474 | } |
| 475 | } |
| 476 | if (have_star_index == (uint)-1) { |
| 477 | EMIT_ARG(unpack_sequence, num_head + num_tail); |
| 478 | } |
| 479 | if (num_head != 0) { |
| 480 | if (0 == have_star_index) { |
| 481 | c_assign(comp, ((mp_parse_node_struct_t *)node_head)->nodes[0], ASSIGN_STORE); |
| 482 | } else { |
| 483 | c_assign(comp, node_head, ASSIGN_STORE); |
| 484 | } |
| 485 | } |
| 486 | for (uint i = 0; i < num_tail; i++) { |
| 487 | if (num_head + i == have_star_index) { |
| 488 | c_assign(comp, ((mp_parse_node_struct_t *)nodes_tail[i])->nodes[0], ASSIGN_STORE); |
| 489 | } else { |
| 490 | c_assign(comp, nodes_tail[i], ASSIGN_STORE); |
| 491 | } |
| 492 | } |
| 493 | } |
| 494 | |
| 495 | // assigns top of stack to pn |
| 496 | STATIC void c_assign(compiler_t *comp, mp_parse_node_t pn, assign_kind_t assign_kind) { |
| 497 | assert(!MP_PARSE_NODE_IS_NULL(pn)); |
| 498 | if (MP_PARSE_NODE_IS_LEAF(pn)) { |
| 499 | if (MP_PARSE_NODE_IS_ID(pn)) { |
| 500 | qstr arg = MP_PARSE_NODE_LEAF_ARG(pn); |
| 501 | switch (assign_kind) { |
| 502 | case ASSIGN_STORE: |
| 503 | case ASSIGN_AUG_STORE: |
| 504 | compile_store_id(comp, arg); |
| 505 | break; |
| 506 | case ASSIGN_AUG_LOAD: |
| 507 | default: |
| 508 | compile_load_id(comp, arg); |
| 509 | break; |
| 510 | } |
| 511 | } else { |
| 512 | goto cannot_assign; |
| 513 | } |
| 514 | } else { |
| 515 | // pn must be a struct |
| 516 | mp_parse_node_struct_t *pns = (mp_parse_node_struct_t *)pn; |
| 517 | switch (MP_PARSE_NODE_STRUCT_KIND(pns)) { |
| 518 | case PN_atom_expr_normal: |
| 519 | // lhs is an index or attribute |
| 520 | c_assign_atom_expr(comp, pns, assign_kind); |
| 521 | break; |
| 522 | |
| 523 | case PN_testlist_star_expr: |
| 524 | case PN_exprlist: |
| 525 | // lhs is a tuple |
| 526 | if (assign_kind != ASSIGN_STORE) { |
| 527 | goto cannot_assign; |
| 528 | } |
| 529 | c_assign_tuple(comp, MP_PARSE_NODE_NULL, MP_PARSE_NODE_STRUCT_NUM_NODES(pns), pns->nodes); |
| 530 | break; |
| 531 | |
| 532 | case PN_atom_paren: |
| 533 | // lhs is something in parenthesis |
| 534 | if (MP_PARSE_NODE_IS_NULL(pns->nodes[0])) { |
| 535 | // empty tuple |
| 536 | goto cannot_assign; |
| 537 | } else { |
| 538 | assert(MP_PARSE_NODE_IS_STRUCT_KIND(pns->nodes[0], PN_testlist_comp)); |
| 539 | if (assign_kind != ASSIGN_STORE) { |
| 540 | goto cannot_assign; |
| 541 | } |
| 542 | pns = (mp_parse_node_struct_t *)pns->nodes[0]; |
| 543 | goto testlist_comp; |
| 544 | } |
| 545 | break; |
| 546 | |
| 547 | case PN_atom_bracket: |
| 548 | // lhs is something in brackets |
| 549 | if (assign_kind != ASSIGN_STORE) { |
| 550 | goto cannot_assign; |
| 551 | } |
| 552 | if (MP_PARSE_NODE_IS_NULL(pns->nodes[0])) { |
| 553 | // empty list, assignment allowed |
| 554 | c_assign_tuple(comp, MP_PARSE_NODE_NULL, 0, NULL); |
| 555 | } else if (MP_PARSE_NODE_IS_STRUCT_KIND(pns->nodes[0], PN_testlist_comp)) { |
| 556 | pns = (mp_parse_node_struct_t *)pns->nodes[0]; |
| 557 | goto testlist_comp; |
| 558 | } else { |
| 559 | // brackets around 1 item |
| 560 | c_assign_tuple(comp, pns->nodes[0], 0, NULL); |
| 561 | } |
| 562 | break; |
| 563 | |
| 564 | default: |
| 565 | goto cannot_assign; |
| 566 | } |
| 567 | return; |
| 568 | |
| 569 | testlist_comp: |
| 570 | // lhs is a sequence |
| 571 | if (MP_PARSE_NODE_IS_STRUCT(pns->nodes[1])) { |
| 572 | mp_parse_node_struct_t *pns2 = (mp_parse_node_struct_t *)pns->nodes[1]; |
| 573 | if (MP_PARSE_NODE_STRUCT_KIND(pns2) == PN_testlist_comp_3b) { |
| 574 | // sequence of one item, with trailing comma |
| 575 | assert(MP_PARSE_NODE_IS_NULL(pns2->nodes[0])); |
| 576 | c_assign_tuple(comp, pns->nodes[0], 0, NULL); |
| 577 | } else if (MP_PARSE_NODE_STRUCT_KIND(pns2) == PN_testlist_comp_3c) { |
| 578 | // sequence of many items |
| 579 | uint n = MP_PARSE_NODE_STRUCT_NUM_NODES(pns2); |
| 580 | c_assign_tuple(comp, pns->nodes[0], n, pns2->nodes); |
| 581 | } else if (MP_PARSE_NODE_STRUCT_KIND(pns2) == PN_comp_for) { |
| 582 | goto cannot_assign; |
| 583 | } else { |
| 584 | // sequence with 2 items |
| 585 | goto sequence_with_2_items; |
| 586 | } |
| 587 | } else { |
| 588 | // sequence with 2 items |
| 589 | sequence_with_2_items: |
| 590 | c_assign_tuple(comp, MP_PARSE_NODE_NULL, 2, pns->nodes); |
| 591 | } |
| 592 | return; |
| 593 | } |
| 594 | return; |
| 595 | |
| 596 | cannot_assign: |
| 597 | compile_syntax_error(comp, pn, MP_ERROR_TEXT("can't assign to expression" )); |
| 598 | } |
| 599 | |
| 600 | // stuff for lambda and comprehensions and generators: |
| 601 | // if n_pos_defaults > 0 then there is a tuple on the stack with the positional defaults |
| 602 | // if n_kw_defaults > 0 then there is a dictionary on the stack with the keyword defaults |
| 603 | // if both exist, the tuple is above the dictionary (ie the first pop gets the tuple) |
| 604 | STATIC void close_over_variables_etc(compiler_t *comp, scope_t *this_scope, int n_pos_defaults, int n_kw_defaults) { |
| 605 | assert(n_pos_defaults >= 0); |
| 606 | assert(n_kw_defaults >= 0); |
| 607 | |
| 608 | // set flags |
| 609 | if (n_kw_defaults > 0) { |
| 610 | this_scope->scope_flags |= MP_SCOPE_FLAG_DEFKWARGS; |
| 611 | } |
| 612 | this_scope->num_def_pos_args = n_pos_defaults; |
| 613 | |
| 614 | #if MICROPY_EMIT_NATIVE |
| 615 | // When creating a function/closure it will take a reference to the current globals |
| 616 | comp->scope_cur->scope_flags |= MP_SCOPE_FLAG_REFGLOBALS | MP_SCOPE_FLAG_HASCONSTS; |
| 617 | #endif |
| 618 | |
| 619 | // make closed over variables, if any |
| 620 | // ensure they are closed over in the order defined in the outer scope (mainly to agree with CPython) |
| 621 | int nfree = 0; |
| 622 | if (comp->scope_cur->kind != SCOPE_MODULE) { |
| 623 | for (int i = 0; i < comp->scope_cur->id_info_len; i++) { |
| 624 | id_info_t *id = &comp->scope_cur->id_info[i]; |
| 625 | if (id->kind == ID_INFO_KIND_CELL || id->kind == ID_INFO_KIND_FREE) { |
| 626 | for (int j = 0; j < this_scope->id_info_len; j++) { |
| 627 | id_info_t *id2 = &this_scope->id_info[j]; |
| 628 | if (id2->kind == ID_INFO_KIND_FREE && id->qst == id2->qst) { |
| 629 | // in MicroPython we load closures using LOAD_FAST |
| 630 | EMIT_LOAD_FAST(id->qst, id->local_num); |
| 631 | nfree += 1; |
| 632 | } |
| 633 | } |
| 634 | } |
| 635 | } |
| 636 | } |
| 637 | |
| 638 | // make the function/closure |
| 639 | if (nfree == 0) { |
| 640 | EMIT_ARG(make_function, this_scope, n_pos_defaults, n_kw_defaults); |
| 641 | } else { |
| 642 | EMIT_ARG(make_closure, this_scope, nfree, n_pos_defaults, n_kw_defaults); |
| 643 | } |
| 644 | } |
| 645 | |
| 646 | STATIC void compile_funcdef_lambdef_param(compiler_t *comp, mp_parse_node_t pn) { |
| 647 | // For efficiency of the code below we extract the parse-node kind here |
| 648 | int pn_kind; |
| 649 | if (MP_PARSE_NODE_IS_ID(pn)) { |
| 650 | pn_kind = -1; |
| 651 | } else { |
| 652 | assert(MP_PARSE_NODE_IS_STRUCT(pn)); |
| 653 | pn_kind = MP_PARSE_NODE_STRUCT_KIND((mp_parse_node_struct_t *)pn); |
| 654 | } |
| 655 | |
| 656 | if (pn_kind == PN_typedargslist_star || pn_kind == PN_varargslist_star) { |
| 657 | comp->have_star = true; |
| 658 | /* don't need to distinguish bare from named star |
| 659 | mp_parse_node_struct_t *pns = (mp_parse_node_struct_t*)pn; |
| 660 | if (MP_PARSE_NODE_IS_NULL(pns->nodes[0])) { |
| 661 | // bare star |
| 662 | } else { |
| 663 | // named star |
| 664 | } |
| 665 | */ |
| 666 | |
| 667 | } else if (pn_kind == PN_typedargslist_dbl_star || pn_kind == PN_varargslist_dbl_star) { |
| 668 | // named double star |
| 669 | // TODO do we need to do anything with this? |
| 670 | |
| 671 | } else { |
| 672 | mp_parse_node_t pn_id; |
| 673 | mp_parse_node_t pn_equal; |
| 674 | if (pn_kind == -1) { |
| 675 | // this parameter is just an id |
| 676 | |
| 677 | pn_id = pn; |
| 678 | pn_equal = MP_PARSE_NODE_NULL; |
| 679 | |
| 680 | } else if (pn_kind == PN_typedargslist_name) { |
| 681 | // this parameter has a colon and/or equal specifier |
| 682 | |
| 683 | mp_parse_node_struct_t *pns = (mp_parse_node_struct_t *)pn; |
| 684 | pn_id = pns->nodes[0]; |
| 685 | // pn_colon = pns->nodes[1]; // unused |
| 686 | pn_equal = pns->nodes[2]; |
| 687 | |
| 688 | } else { |
| 689 | assert(pn_kind == PN_varargslist_name); // should be |
| 690 | // this parameter has an equal specifier |
| 691 | |
| 692 | mp_parse_node_struct_t *pns = (mp_parse_node_struct_t *)pn; |
| 693 | pn_id = pns->nodes[0]; |
| 694 | pn_equal = pns->nodes[1]; |
| 695 | } |
| 696 | |
| 697 | if (MP_PARSE_NODE_IS_NULL(pn_equal)) { |
| 698 | // this parameter does not have a default value |
| 699 | |
| 700 | // check for non-default parameters given after default parameters (allowed by parser, but not syntactically valid) |
| 701 | if (!comp->have_star && comp->num_default_params != 0) { |
| 702 | compile_syntax_error(comp, pn, MP_ERROR_TEXT("non-default argument follows default argument" )); |
| 703 | return; |
| 704 | } |
| 705 | |
| 706 | } else { |
| 707 | // this parameter has a default value |
| 708 | // in CPython, None (and True, False?) as default parameters are loaded with LOAD_NAME; don't understandy why |
| 709 | |
| 710 | if (comp->have_star) { |
| 711 | comp->num_dict_params += 1; |
| 712 | // in MicroPython we put the default dict parameters into a dictionary using the bytecode |
| 713 | if (comp->num_dict_params == 1) { |
| 714 | // in MicroPython we put the default positional parameters into a tuple using the bytecode |
| 715 | // we need to do this here before we start building the map for the default keywords |
| 716 | if (comp->num_default_params > 0) { |
| 717 | EMIT_ARG(build, comp->num_default_params, MP_EMIT_BUILD_TUPLE); |
| 718 | } else { |
| 719 | EMIT(load_null); // sentinel indicating empty default positional args |
| 720 | } |
| 721 | // first default dict param, so make the map |
| 722 | EMIT_ARG(build, 0, MP_EMIT_BUILD_MAP); |
| 723 | } |
| 724 | |
| 725 | // compile value then key, then store it to the dict |
| 726 | compile_node(comp, pn_equal); |
| 727 | EMIT_ARG(load_const_str, MP_PARSE_NODE_LEAF_ARG(pn_id)); |
| 728 | EMIT(store_map); |
| 729 | } else { |
| 730 | comp->num_default_params += 1; |
| 731 | compile_node(comp, pn_equal); |
| 732 | } |
| 733 | } |
| 734 | } |
| 735 | } |
| 736 | |
| 737 | STATIC void compile_funcdef_lambdef(compiler_t *comp, scope_t *scope, mp_parse_node_t pn_params, pn_kind_t pn_list_kind) { |
| 738 | // When we call compile_funcdef_lambdef_param below it can compile an arbitrary |
| 739 | // expression for default arguments, which may contain a lambda. The lambda will |
| 740 | // call here in a nested way, so we must save and restore the relevant state. |
| 741 | bool orig_have_star = comp->have_star; |
| 742 | uint16_t orig_num_dict_params = comp->num_dict_params; |
| 743 | uint16_t orig_num_default_params = comp->num_default_params; |
| 744 | |
| 745 | // compile default parameters |
| 746 | comp->have_star = false; |
| 747 | comp->num_dict_params = 0; |
| 748 | comp->num_default_params = 0; |
| 749 | apply_to_single_or_list(comp, pn_params, pn_list_kind, compile_funcdef_lambdef_param); |
| 750 | |
| 751 | if (comp->compile_error != MP_OBJ_NULL) { |
| 752 | return; |
| 753 | } |
| 754 | |
| 755 | // in MicroPython we put the default positional parameters into a tuple using the bytecode |
| 756 | // the default keywords args may have already made the tuple; if not, do it now |
| 757 | if (comp->num_default_params > 0 && comp->num_dict_params == 0) { |
| 758 | EMIT_ARG(build, comp->num_default_params, MP_EMIT_BUILD_TUPLE); |
| 759 | EMIT(load_null); // sentinel indicating empty default keyword args |
| 760 | } |
| 761 | |
| 762 | // make the function |
| 763 | close_over_variables_etc(comp, scope, comp->num_default_params, comp->num_dict_params); |
| 764 | |
| 765 | // restore state |
| 766 | comp->have_star = orig_have_star; |
| 767 | comp->num_dict_params = orig_num_dict_params; |
| 768 | comp->num_default_params = orig_num_default_params; |
| 769 | } |
| 770 | |
| 771 | // leaves function object on stack |
| 772 | // returns function name |
| 773 | STATIC qstr compile_funcdef_helper(compiler_t *comp, mp_parse_node_struct_t *pns, uint emit_options) { |
| 774 | if (comp->pass == MP_PASS_SCOPE) { |
| 775 | // create a new scope for this function |
| 776 | scope_t *s = scope_new_and_link(comp, SCOPE_FUNCTION, (mp_parse_node_t)pns, emit_options); |
| 777 | // store the function scope so the compiling function can use it at each pass |
| 778 | pns->nodes[4] = (mp_parse_node_t)s; |
| 779 | } |
| 780 | |
| 781 | // get the scope for this function |
| 782 | scope_t *fscope = (scope_t *)pns->nodes[4]; |
| 783 | |
| 784 | // compile the function definition |
| 785 | compile_funcdef_lambdef(comp, fscope, pns->nodes[1], PN_typedargslist); |
| 786 | |
| 787 | // return its name (the 'f' in "def f(...):") |
| 788 | return fscope->simple_name; |
| 789 | } |
| 790 | |
| 791 | // leaves class object on stack |
| 792 | // returns class name |
| 793 | STATIC qstr compile_classdef_helper(compiler_t *comp, mp_parse_node_struct_t *pns, uint emit_options) { |
| 794 | if (comp->pass == MP_PASS_SCOPE) { |
| 795 | // create a new scope for this class |
| 796 | scope_t *s = scope_new_and_link(comp, SCOPE_CLASS, (mp_parse_node_t)pns, emit_options); |
| 797 | // store the class scope so the compiling function can use it at each pass |
| 798 | pns->nodes[3] = (mp_parse_node_t)s; |
| 799 | } |
| 800 | |
| 801 | EMIT(load_build_class); |
| 802 | |
| 803 | // scope for this class |
| 804 | scope_t *cscope = (scope_t *)pns->nodes[3]; |
| 805 | |
| 806 | // compile the class |
| 807 | close_over_variables_etc(comp, cscope, 0, 0); |
| 808 | |
| 809 | // get its name |
| 810 | EMIT_ARG(load_const_str, cscope->simple_name); |
| 811 | |
| 812 | // nodes[1] has parent classes, if any |
| 813 | // empty parenthesis (eg class C():) gets here as an empty PN_classdef_2 and needs special handling |
| 814 | mp_parse_node_t parents = pns->nodes[1]; |
| 815 | if (MP_PARSE_NODE_IS_STRUCT_KIND(parents, PN_classdef_2)) { |
| 816 | parents = MP_PARSE_NODE_NULL; |
| 817 | } |
| 818 | compile_trailer_paren_helper(comp, parents, false, 2); |
| 819 | |
| 820 | // return its name (the 'C' in class C(...):") |
| 821 | return cscope->simple_name; |
| 822 | } |
| 823 | |
| 824 | // returns true if it was a built-in decorator (even if the built-in had an error) |
| 825 | STATIC bool compile_built_in_decorator(compiler_t *comp, size_t name_len, mp_parse_node_t *name_nodes, uint *emit_options) { |
| 826 | if (MP_PARSE_NODE_LEAF_ARG(name_nodes[0]) != MP_QSTR_micropython) { |
| 827 | return false; |
| 828 | } |
| 829 | |
| 830 | if (name_len != 2) { |
| 831 | compile_syntax_error(comp, name_nodes[0], MP_ERROR_TEXT("invalid micropython decorator" )); |
| 832 | return true; |
| 833 | } |
| 834 | |
| 835 | qstr attr = MP_PARSE_NODE_LEAF_ARG(name_nodes[1]); |
| 836 | if (attr == MP_QSTR_bytecode) { |
| 837 | *emit_options = MP_EMIT_OPT_BYTECODE; |
| 838 | #if MICROPY_EMIT_NATIVE |
| 839 | } else if (attr == MP_QSTR_native) { |
| 840 | *emit_options = MP_EMIT_OPT_NATIVE_PYTHON; |
| 841 | } else if (attr == MP_QSTR_viper) { |
| 842 | *emit_options = MP_EMIT_OPT_VIPER; |
| 843 | #endif |
| 844 | #if MICROPY_EMIT_INLINE_ASM |
| 845 | #if MICROPY_DYNAMIC_COMPILER |
| 846 | } else if (attr == MP_QSTR_asm_thumb) { |
| 847 | *emit_options = MP_EMIT_OPT_ASM; |
| 848 | } else if (attr == MP_QSTR_asm_xtensa) { |
| 849 | *emit_options = MP_EMIT_OPT_ASM; |
| 850 | #else |
| 851 | } else if (attr == ASM_DECORATOR_QSTR) { |
| 852 | *emit_options = MP_EMIT_OPT_ASM; |
| 853 | #endif |
| 854 | #endif |
| 855 | } else { |
| 856 | compile_syntax_error(comp, name_nodes[1], MP_ERROR_TEXT("invalid micropython decorator" )); |
| 857 | } |
| 858 | |
| 859 | #if MICROPY_DYNAMIC_COMPILER |
| 860 | if (*emit_options == MP_EMIT_OPT_NATIVE_PYTHON || *emit_options == MP_EMIT_OPT_VIPER) { |
| 861 | if (emit_native_table[mp_dynamic_compiler.native_arch] == NULL) { |
| 862 | compile_syntax_error(comp, name_nodes[1], MP_ERROR_TEXT("invalid arch" )); |
| 863 | } |
| 864 | } else if (*emit_options == MP_EMIT_OPT_ASM) { |
| 865 | if (emit_asm_table[mp_dynamic_compiler.native_arch] == NULL) { |
| 866 | compile_syntax_error(comp, name_nodes[1], MP_ERROR_TEXT("invalid arch" )); |
| 867 | } |
| 868 | } |
| 869 | #endif |
| 870 | |
| 871 | return true; |
| 872 | } |
| 873 | |
| 874 | STATIC void compile_decorated(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 875 | // get the list of decorators |
| 876 | mp_parse_node_t *nodes; |
| 877 | size_t n = mp_parse_node_extract_list(&pns->nodes[0], PN_decorators, &nodes); |
| 878 | |
| 879 | // inherit emit options for this function/class definition |
| 880 | uint emit_options = comp->scope_cur->emit_options; |
| 881 | |
| 882 | // compile each decorator |
| 883 | size_t num_built_in_decorators = 0; |
| 884 | for (size_t i = 0; i < n; i++) { |
| 885 | assert(MP_PARSE_NODE_IS_STRUCT_KIND(nodes[i], PN_decorator)); // should be |
| 886 | mp_parse_node_struct_t *pns_decorator = (mp_parse_node_struct_t *)nodes[i]; |
| 887 | |
| 888 | // nodes[0] contains the decorator function, which is a dotted name |
| 889 | mp_parse_node_t *name_nodes; |
| 890 | size_t name_len = mp_parse_node_extract_list(&pns_decorator->nodes[0], PN_dotted_name, &name_nodes); |
| 891 | |
| 892 | // check for built-in decorators |
| 893 | if (compile_built_in_decorator(comp, name_len, name_nodes, &emit_options)) { |
| 894 | // this was a built-in |
| 895 | num_built_in_decorators += 1; |
| 896 | |
| 897 | } else { |
| 898 | // not a built-in, compile normally |
| 899 | |
| 900 | // compile the decorator function |
| 901 | compile_node(comp, name_nodes[0]); |
| 902 | for (size_t j = 1; j < name_len; j++) { |
| 903 | assert(MP_PARSE_NODE_IS_ID(name_nodes[j])); // should be |
| 904 | EMIT_ARG(attr, MP_PARSE_NODE_LEAF_ARG(name_nodes[j]), MP_EMIT_ATTR_LOAD); |
| 905 | } |
| 906 | |
| 907 | // nodes[1] contains arguments to the decorator function, if any |
| 908 | if (!MP_PARSE_NODE_IS_NULL(pns_decorator->nodes[1])) { |
| 909 | // call the decorator function with the arguments in nodes[1] |
| 910 | compile_node(comp, pns_decorator->nodes[1]); |
| 911 | } |
| 912 | } |
| 913 | } |
| 914 | |
| 915 | // compile the body (funcdef, async funcdef or classdef) and get its name |
| 916 | mp_parse_node_struct_t *pns_body = (mp_parse_node_struct_t *)pns->nodes[1]; |
| 917 | qstr body_name = 0; |
| 918 | if (MP_PARSE_NODE_STRUCT_KIND(pns_body) == PN_funcdef) { |
| 919 | body_name = compile_funcdef_helper(comp, pns_body, emit_options); |
| 920 | #if MICROPY_PY_ASYNC_AWAIT |
| 921 | } else if (MP_PARSE_NODE_STRUCT_KIND(pns_body) == PN_async_funcdef) { |
| 922 | assert(MP_PARSE_NODE_IS_STRUCT(pns_body->nodes[0])); |
| 923 | mp_parse_node_struct_t *pns0 = (mp_parse_node_struct_t *)pns_body->nodes[0]; |
| 924 | body_name = compile_funcdef_helper(comp, pns0, emit_options); |
| 925 | scope_t *fscope = (scope_t *)pns0->nodes[4]; |
| 926 | fscope->scope_flags |= MP_SCOPE_FLAG_GENERATOR; |
| 927 | #endif |
| 928 | } else { |
| 929 | assert(MP_PARSE_NODE_STRUCT_KIND(pns_body) == PN_classdef); // should be |
| 930 | body_name = compile_classdef_helper(comp, pns_body, emit_options); |
| 931 | } |
| 932 | |
| 933 | // call each decorator |
| 934 | for (size_t i = 0; i < n - num_built_in_decorators; i++) { |
| 935 | EMIT_ARG(call_function, 1, 0, 0); |
| 936 | } |
| 937 | |
| 938 | // store func/class object into name |
| 939 | compile_store_id(comp, body_name); |
| 940 | } |
| 941 | |
| 942 | STATIC void compile_funcdef(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 943 | qstr fname = compile_funcdef_helper(comp, pns, comp->scope_cur->emit_options); |
| 944 | // store function object into function name |
| 945 | compile_store_id(comp, fname); |
| 946 | } |
| 947 | |
| 948 | STATIC void c_del_stmt(compiler_t *comp, mp_parse_node_t pn) { |
| 949 | if (MP_PARSE_NODE_IS_ID(pn)) { |
| 950 | compile_delete_id(comp, MP_PARSE_NODE_LEAF_ARG(pn)); |
| 951 | } else if (MP_PARSE_NODE_IS_STRUCT_KIND(pn, PN_atom_expr_normal)) { |
| 952 | mp_parse_node_struct_t *pns = (mp_parse_node_struct_t *)pn; |
| 953 | |
| 954 | compile_node(comp, pns->nodes[0]); // base of the atom_expr_normal node |
| 955 | |
| 956 | if (MP_PARSE_NODE_IS_STRUCT(pns->nodes[1])) { |
| 957 | mp_parse_node_struct_t *pns1 = (mp_parse_node_struct_t *)pns->nodes[1]; |
| 958 | if (MP_PARSE_NODE_STRUCT_KIND(pns1) == PN_atom_expr_trailers) { |
| 959 | int n = MP_PARSE_NODE_STRUCT_NUM_NODES(pns1); |
| 960 | for (int i = 0; i < n - 1; i++) { |
| 961 | compile_node(comp, pns1->nodes[i]); |
| 962 | } |
| 963 | assert(MP_PARSE_NODE_IS_STRUCT(pns1->nodes[n - 1])); |
| 964 | pns1 = (mp_parse_node_struct_t *)pns1->nodes[n - 1]; |
| 965 | } |
| 966 | if (MP_PARSE_NODE_STRUCT_KIND(pns1) == PN_trailer_bracket) { |
| 967 | compile_node(comp, pns1->nodes[0]); |
| 968 | EMIT_ARG(subscr, MP_EMIT_SUBSCR_DELETE); |
| 969 | } else if (MP_PARSE_NODE_STRUCT_KIND(pns1) == PN_trailer_period) { |
| 970 | assert(MP_PARSE_NODE_IS_ID(pns1->nodes[0])); |
| 971 | EMIT_ARG(attr, MP_PARSE_NODE_LEAF_ARG(pns1->nodes[0]), MP_EMIT_ATTR_DELETE); |
| 972 | } else { |
| 973 | goto cannot_delete; |
| 974 | } |
| 975 | } else { |
| 976 | goto cannot_delete; |
| 977 | } |
| 978 | |
| 979 | } else if (MP_PARSE_NODE_IS_STRUCT_KIND(pn, PN_atom_paren)) { |
| 980 | pn = ((mp_parse_node_struct_t *)pn)->nodes[0]; |
| 981 | if (MP_PARSE_NODE_IS_NULL(pn)) { |
| 982 | goto cannot_delete; |
| 983 | } else { |
| 984 | assert(MP_PARSE_NODE_IS_STRUCT_KIND(pn, PN_testlist_comp)); |
| 985 | mp_parse_node_struct_t *pns = (mp_parse_node_struct_t *)pn; |
| 986 | // TODO perhaps factorise testlist_comp code with other uses of PN_testlist_comp |
| 987 | |
| 988 | if (MP_PARSE_NODE_IS_STRUCT(pns->nodes[1])) { |
| 989 | mp_parse_node_struct_t *pns1 = (mp_parse_node_struct_t *)pns->nodes[1]; |
| 990 | if (MP_PARSE_NODE_STRUCT_KIND(pns1) == PN_testlist_comp_3b) { |
| 991 | // sequence of one item, with trailing comma |
| 992 | assert(MP_PARSE_NODE_IS_NULL(pns1->nodes[0])); |
| 993 | c_del_stmt(comp, pns->nodes[0]); |
| 994 | } else if (MP_PARSE_NODE_STRUCT_KIND(pns1) == PN_testlist_comp_3c) { |
| 995 | // sequence of many items |
| 996 | int n = MP_PARSE_NODE_STRUCT_NUM_NODES(pns1); |
| 997 | c_del_stmt(comp, pns->nodes[0]); |
| 998 | for (int i = 0; i < n; i++) { |
| 999 | c_del_stmt(comp, pns1->nodes[i]); |
| 1000 | } |
| 1001 | } else if (MP_PARSE_NODE_STRUCT_KIND(pns1) == PN_comp_for) { |
| 1002 | goto cannot_delete; |
| 1003 | } else { |
| 1004 | // sequence with 2 items |
| 1005 | goto sequence_with_2_items; |
| 1006 | } |
| 1007 | } else { |
| 1008 | // sequence with 2 items |
| 1009 | sequence_with_2_items: |
| 1010 | c_del_stmt(comp, pns->nodes[0]); |
| 1011 | c_del_stmt(comp, pns->nodes[1]); |
| 1012 | } |
| 1013 | } |
| 1014 | } else { |
| 1015 | // some arbitrary statement that we can't delete (eg del 1) |
| 1016 | goto cannot_delete; |
| 1017 | } |
| 1018 | |
| 1019 | return; |
| 1020 | |
| 1021 | cannot_delete: |
| 1022 | compile_syntax_error(comp, (mp_parse_node_t)pn, MP_ERROR_TEXT("can't delete expression" )); |
| 1023 | } |
| 1024 | |
| 1025 | STATIC void compile_del_stmt(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 1026 | apply_to_single_or_list(comp, pns->nodes[0], PN_exprlist, c_del_stmt); |
| 1027 | } |
| 1028 | |
| 1029 | STATIC void compile_break_cont_stmt(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 1030 | uint16_t label; |
| 1031 | if (MP_PARSE_NODE_STRUCT_KIND(pns) == PN_break_stmt) { |
| 1032 | label = comp->break_label; |
| 1033 | } else { |
| 1034 | label = comp->continue_label; |
| 1035 | } |
| 1036 | if (label == INVALID_LABEL) { |
| 1037 | compile_syntax_error(comp, (mp_parse_node_t)pns, MP_ERROR_TEXT("'break'/'continue' outside loop" )); |
| 1038 | } |
| 1039 | assert(comp->cur_except_level >= comp->break_continue_except_level); |
| 1040 | EMIT_ARG(unwind_jump, label, comp->cur_except_level - comp->break_continue_except_level); |
| 1041 | } |
| 1042 | |
| 1043 | STATIC void compile_return_stmt(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 1044 | #if MICROPY_CPYTHON_COMPAT |
| 1045 | if (comp->scope_cur->kind != SCOPE_FUNCTION) { |
| 1046 | compile_syntax_error(comp, (mp_parse_node_t)pns, MP_ERROR_TEXT("'return' outside function" )); |
| 1047 | return; |
| 1048 | } |
| 1049 | #endif |
| 1050 | if (MP_PARSE_NODE_IS_NULL(pns->nodes[0])) { |
| 1051 | // no argument to 'return', so return None |
| 1052 | EMIT_ARG(load_const_tok, MP_TOKEN_KW_NONE); |
| 1053 | } else if (MICROPY_COMP_RETURN_IF_EXPR |
| 1054 | && MP_PARSE_NODE_IS_STRUCT_KIND(pns->nodes[0], PN_test_if_expr)) { |
| 1055 | // special case when returning an if-expression; to match CPython optimisation |
| 1056 | mp_parse_node_struct_t *pns_test_if_expr = (mp_parse_node_struct_t *)pns->nodes[0]; |
| 1057 | mp_parse_node_struct_t *pns_test_if_else = (mp_parse_node_struct_t *)pns_test_if_expr->nodes[1]; |
| 1058 | |
| 1059 | uint l_fail = comp_next_label(comp); |
| 1060 | c_if_cond(comp, pns_test_if_else->nodes[0], false, l_fail); // condition |
| 1061 | compile_node(comp, pns_test_if_expr->nodes[0]); // success value |
| 1062 | EMIT(return_value); |
| 1063 | EMIT_ARG(label_assign, l_fail); |
| 1064 | compile_node(comp, pns_test_if_else->nodes[1]); // failure value |
| 1065 | } else { |
| 1066 | compile_node(comp, pns->nodes[0]); |
| 1067 | } |
| 1068 | EMIT(return_value); |
| 1069 | } |
| 1070 | |
| 1071 | STATIC void compile_yield_stmt(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 1072 | compile_node(comp, pns->nodes[0]); |
| 1073 | EMIT(pop_top); |
| 1074 | } |
| 1075 | |
| 1076 | STATIC void compile_raise_stmt(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 1077 | if (MP_PARSE_NODE_IS_NULL(pns->nodes[0])) { |
| 1078 | // raise |
| 1079 | EMIT_ARG(raise_varargs, 0); |
| 1080 | } else if (MP_PARSE_NODE_IS_STRUCT_KIND(pns->nodes[0], PN_raise_stmt_arg)) { |
| 1081 | // raise x from y |
| 1082 | pns = (mp_parse_node_struct_t *)pns->nodes[0]; |
| 1083 | compile_node(comp, pns->nodes[0]); |
| 1084 | compile_node(comp, pns->nodes[1]); |
| 1085 | EMIT_ARG(raise_varargs, 2); |
| 1086 | } else { |
| 1087 | // raise x |
| 1088 | compile_node(comp, pns->nodes[0]); |
| 1089 | EMIT_ARG(raise_varargs, 1); |
| 1090 | } |
| 1091 | } |
| 1092 | |
| 1093 | // q_base holds the base of the name |
| 1094 | // eg a -> q_base=a |
| 1095 | // a.b.c -> q_base=a |
| 1096 | STATIC void do_import_name(compiler_t *comp, mp_parse_node_t pn, qstr *q_base) { |
| 1097 | bool is_as = false; |
| 1098 | if (MP_PARSE_NODE_IS_STRUCT_KIND(pn, PN_dotted_as_name)) { |
| 1099 | mp_parse_node_struct_t *pns = (mp_parse_node_struct_t *)pn; |
| 1100 | // a name of the form x as y; unwrap it |
| 1101 | *q_base = MP_PARSE_NODE_LEAF_ARG(pns->nodes[1]); |
| 1102 | pn = pns->nodes[0]; |
| 1103 | is_as = true; |
| 1104 | } |
| 1105 | if (MP_PARSE_NODE_IS_NULL(pn)) { |
| 1106 | // empty name (eg, from . import x) |
| 1107 | *q_base = MP_QSTR_; |
| 1108 | EMIT_ARG(import, MP_QSTR_, MP_EMIT_IMPORT_NAME); // import the empty string |
| 1109 | } else if (MP_PARSE_NODE_IS_ID(pn)) { |
| 1110 | // just a simple name |
| 1111 | qstr q_full = MP_PARSE_NODE_LEAF_ARG(pn); |
| 1112 | if (!is_as) { |
| 1113 | *q_base = q_full; |
| 1114 | } |
| 1115 | EMIT_ARG(import, q_full, MP_EMIT_IMPORT_NAME); |
| 1116 | } else { |
| 1117 | assert(MP_PARSE_NODE_IS_STRUCT_KIND(pn, PN_dotted_name)); // should be |
| 1118 | mp_parse_node_struct_t *pns = (mp_parse_node_struct_t *)pn; |
| 1119 | { |
| 1120 | // a name of the form a.b.c |
| 1121 | if (!is_as) { |
| 1122 | *q_base = MP_PARSE_NODE_LEAF_ARG(pns->nodes[0]); |
| 1123 | } |
| 1124 | int n = MP_PARSE_NODE_STRUCT_NUM_NODES(pns); |
| 1125 | int len = n - 1; |
| 1126 | for (int i = 0; i < n; i++) { |
| 1127 | len += qstr_len(MP_PARSE_NODE_LEAF_ARG(pns->nodes[i])); |
| 1128 | } |
| 1129 | char *q_ptr = mp_local_alloc(len); |
| 1130 | char *str_dest = q_ptr; |
| 1131 | for (int i = 0; i < n; i++) { |
| 1132 | if (i > 0) { |
| 1133 | *str_dest++ = '.'; |
| 1134 | } |
| 1135 | size_t str_src_len; |
| 1136 | const byte *str_src = qstr_data(MP_PARSE_NODE_LEAF_ARG(pns->nodes[i]), &str_src_len); |
| 1137 | memcpy(str_dest, str_src, str_src_len); |
| 1138 | str_dest += str_src_len; |
| 1139 | } |
| 1140 | qstr q_full = qstr_from_strn(q_ptr, len); |
| 1141 | mp_local_free(q_ptr); |
| 1142 | EMIT_ARG(import, q_full, MP_EMIT_IMPORT_NAME); |
| 1143 | if (is_as) { |
| 1144 | for (int i = 1; i < n; i++) { |
| 1145 | EMIT_ARG(attr, MP_PARSE_NODE_LEAF_ARG(pns->nodes[i]), MP_EMIT_ATTR_LOAD); |
| 1146 | } |
| 1147 | } |
| 1148 | } |
| 1149 | } |
| 1150 | } |
| 1151 | |
| 1152 | STATIC void compile_dotted_as_name(compiler_t *comp, mp_parse_node_t pn) { |
| 1153 | EMIT_ARG(load_const_small_int, 0); // level 0 import |
| 1154 | EMIT_ARG(load_const_tok, MP_TOKEN_KW_NONE); // not importing from anything |
| 1155 | qstr q_base; |
| 1156 | do_import_name(comp, pn, &q_base); |
| 1157 | compile_store_id(comp, q_base); |
| 1158 | } |
| 1159 | |
| 1160 | STATIC void compile_import_name(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 1161 | apply_to_single_or_list(comp, pns->nodes[0], PN_dotted_as_names, compile_dotted_as_name); |
| 1162 | } |
| 1163 | |
| 1164 | STATIC void compile_import_from(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 1165 | mp_parse_node_t pn_import_source = pns->nodes[0]; |
| 1166 | |
| 1167 | // extract the preceding .'s (if any) for a relative import, to compute the import level |
| 1168 | uint import_level = 0; |
| 1169 | do { |
| 1170 | mp_parse_node_t pn_rel; |
| 1171 | if (MP_PARSE_NODE_IS_TOKEN(pn_import_source) || MP_PARSE_NODE_IS_STRUCT_KIND(pn_import_source, PN_one_or_more_period_or_ellipsis)) { |
| 1172 | // This covers relative imports with dots only like "from .. import" |
| 1173 | pn_rel = pn_import_source; |
| 1174 | pn_import_source = MP_PARSE_NODE_NULL; |
| 1175 | } else if (MP_PARSE_NODE_IS_STRUCT_KIND(pn_import_source, PN_import_from_2b)) { |
| 1176 | // This covers relative imports starting with dot(s) like "from .foo import" |
| 1177 | mp_parse_node_struct_t *pns_2b = (mp_parse_node_struct_t *)pn_import_source; |
| 1178 | pn_rel = pns_2b->nodes[0]; |
| 1179 | pn_import_source = pns_2b->nodes[1]; |
| 1180 | assert(!MP_PARSE_NODE_IS_NULL(pn_import_source)); // should not be |
| 1181 | } else { |
| 1182 | // Not a relative import |
| 1183 | break; |
| 1184 | } |
| 1185 | |
| 1186 | // get the list of . and/or ...'s |
| 1187 | mp_parse_node_t *nodes; |
| 1188 | size_t n = mp_parse_node_extract_list(&pn_rel, PN_one_or_more_period_or_ellipsis, &nodes); |
| 1189 | |
| 1190 | // count the total number of .'s |
| 1191 | for (size_t i = 0; i < n; i++) { |
| 1192 | if (MP_PARSE_NODE_IS_TOKEN_KIND(nodes[i], MP_TOKEN_DEL_PERIOD)) { |
| 1193 | import_level++; |
| 1194 | } else { |
| 1195 | // should be an MP_TOKEN_ELLIPSIS |
| 1196 | import_level += 3; |
| 1197 | } |
| 1198 | } |
| 1199 | } while (0); |
| 1200 | |
| 1201 | if (MP_PARSE_NODE_IS_TOKEN_KIND(pns->nodes[1], MP_TOKEN_OP_STAR)) { |
| 1202 | #if MICROPY_CPYTHON_COMPAT |
| 1203 | if (comp->scope_cur->kind != SCOPE_MODULE) { |
| 1204 | compile_syntax_error(comp, (mp_parse_node_t)pns, MP_ERROR_TEXT("import * not at module level" )); |
| 1205 | return; |
| 1206 | } |
| 1207 | #endif |
| 1208 | |
| 1209 | EMIT_ARG(load_const_small_int, import_level); |
| 1210 | |
| 1211 | // build the "fromlist" tuple |
| 1212 | EMIT_ARG(load_const_str, MP_QSTR__star_); |
| 1213 | EMIT_ARG(build, 1, MP_EMIT_BUILD_TUPLE); |
| 1214 | |
| 1215 | // do the import |
| 1216 | qstr dummy_q; |
| 1217 | do_import_name(comp, pn_import_source, &dummy_q); |
| 1218 | EMIT_ARG(import, MP_QSTRnull, MP_EMIT_IMPORT_STAR); |
| 1219 | |
| 1220 | } else { |
| 1221 | EMIT_ARG(load_const_small_int, import_level); |
| 1222 | |
| 1223 | // build the "fromlist" tuple |
| 1224 | mp_parse_node_t *pn_nodes; |
| 1225 | size_t n = mp_parse_node_extract_list(&pns->nodes[1], PN_import_as_names, &pn_nodes); |
| 1226 | for (size_t i = 0; i < n; i++) { |
| 1227 | assert(MP_PARSE_NODE_IS_STRUCT_KIND(pn_nodes[i], PN_import_as_name)); |
| 1228 | mp_parse_node_struct_t *pns3 = (mp_parse_node_struct_t *)pn_nodes[i]; |
| 1229 | qstr id2 = MP_PARSE_NODE_LEAF_ARG(pns3->nodes[0]); // should be id |
| 1230 | EMIT_ARG(load_const_str, id2); |
| 1231 | } |
| 1232 | EMIT_ARG(build, n, MP_EMIT_BUILD_TUPLE); |
| 1233 | |
| 1234 | // do the import |
| 1235 | qstr dummy_q; |
| 1236 | do_import_name(comp, pn_import_source, &dummy_q); |
| 1237 | for (size_t i = 0; i < n; i++) { |
| 1238 | assert(MP_PARSE_NODE_IS_STRUCT_KIND(pn_nodes[i], PN_import_as_name)); |
| 1239 | mp_parse_node_struct_t *pns3 = (mp_parse_node_struct_t *)pn_nodes[i]; |
| 1240 | qstr id2 = MP_PARSE_NODE_LEAF_ARG(pns3->nodes[0]); // should be id |
| 1241 | EMIT_ARG(import, id2, MP_EMIT_IMPORT_FROM); |
| 1242 | if (MP_PARSE_NODE_IS_NULL(pns3->nodes[1])) { |
| 1243 | compile_store_id(comp, id2); |
| 1244 | } else { |
| 1245 | compile_store_id(comp, MP_PARSE_NODE_LEAF_ARG(pns3->nodes[1])); |
| 1246 | } |
| 1247 | } |
| 1248 | EMIT(pop_top); |
| 1249 | } |
| 1250 | } |
| 1251 | |
| 1252 | STATIC void compile_declare_global(compiler_t *comp, mp_parse_node_t pn, id_info_t *id_info) { |
| 1253 | if (id_info->kind != ID_INFO_KIND_UNDECIDED && id_info->kind != ID_INFO_KIND_GLOBAL_EXPLICIT) { |
| 1254 | compile_syntax_error(comp, pn, MP_ERROR_TEXT("identifier redefined as global" )); |
| 1255 | return; |
| 1256 | } |
| 1257 | id_info->kind = ID_INFO_KIND_GLOBAL_EXPLICIT; |
| 1258 | |
| 1259 | // if the id exists in the global scope, set its kind to EXPLICIT_GLOBAL |
| 1260 | id_info = scope_find_global(comp->scope_cur, id_info->qst); |
| 1261 | if (id_info != NULL) { |
| 1262 | id_info->kind = ID_INFO_KIND_GLOBAL_EXPLICIT; |
| 1263 | } |
| 1264 | } |
| 1265 | |
| 1266 | STATIC void compile_declare_nonlocal(compiler_t *comp, mp_parse_node_t pn, id_info_t *id_info) { |
| 1267 | if (id_info->kind == ID_INFO_KIND_UNDECIDED) { |
| 1268 | id_info->kind = ID_INFO_KIND_GLOBAL_IMPLICIT; |
| 1269 | scope_check_to_close_over(comp->scope_cur, id_info); |
| 1270 | if (id_info->kind == ID_INFO_KIND_GLOBAL_IMPLICIT) { |
| 1271 | compile_syntax_error(comp, pn, MP_ERROR_TEXT("no binding for nonlocal found" )); |
| 1272 | } |
| 1273 | } else if (id_info->kind != ID_INFO_KIND_FREE) { |
| 1274 | compile_syntax_error(comp, pn, MP_ERROR_TEXT("identifier redefined as nonlocal" )); |
| 1275 | } |
| 1276 | } |
| 1277 | |
| 1278 | STATIC void compile_global_nonlocal_stmt(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 1279 | if (comp->pass == MP_PASS_SCOPE) { |
| 1280 | bool is_global = MP_PARSE_NODE_STRUCT_KIND(pns) == PN_global_stmt; |
| 1281 | |
| 1282 | if (!is_global && comp->scope_cur->kind == SCOPE_MODULE) { |
| 1283 | compile_syntax_error(comp, (mp_parse_node_t)pns, MP_ERROR_TEXT("can't declare nonlocal in outer code" )); |
| 1284 | return; |
| 1285 | } |
| 1286 | |
| 1287 | mp_parse_node_t *nodes; |
| 1288 | size_t n = mp_parse_node_extract_list(&pns->nodes[0], PN_name_list, &nodes); |
| 1289 | for (size_t i = 0; i < n; i++) { |
| 1290 | qstr qst = MP_PARSE_NODE_LEAF_ARG(nodes[i]); |
| 1291 | id_info_t *id_info = scope_find_or_add_id(comp->scope_cur, qst, ID_INFO_KIND_UNDECIDED); |
| 1292 | if (is_global) { |
| 1293 | compile_declare_global(comp, (mp_parse_node_t)pns, id_info); |
| 1294 | } else { |
| 1295 | compile_declare_nonlocal(comp, (mp_parse_node_t)pns, id_info); |
| 1296 | } |
| 1297 | } |
| 1298 | } |
| 1299 | } |
| 1300 | |
| 1301 | STATIC void compile_assert_stmt(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 1302 | // with optimisations enabled we don't compile assertions |
| 1303 | if (MP_STATE_VM(mp_optimise_value) != 0) { |
| 1304 | return; |
| 1305 | } |
| 1306 | |
| 1307 | uint l_end = comp_next_label(comp); |
| 1308 | c_if_cond(comp, pns->nodes[0], true, l_end); |
| 1309 | EMIT_LOAD_GLOBAL(MP_QSTR_AssertionError); // we load_global instead of load_id, to be consistent with CPython |
| 1310 | if (!MP_PARSE_NODE_IS_NULL(pns->nodes[1])) { |
| 1311 | // assertion message |
| 1312 | compile_node(comp, pns->nodes[1]); |
| 1313 | EMIT_ARG(call_function, 1, 0, 0); |
| 1314 | } |
| 1315 | EMIT_ARG(raise_varargs, 1); |
| 1316 | EMIT_ARG(label_assign, l_end); |
| 1317 | } |
| 1318 | |
| 1319 | STATIC void compile_if_stmt(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 1320 | uint l_end = comp_next_label(comp); |
| 1321 | |
| 1322 | // optimisation: don't emit anything when "if False" |
| 1323 | if (!mp_parse_node_is_const_false(pns->nodes[0])) { |
| 1324 | uint l_fail = comp_next_label(comp); |
| 1325 | c_if_cond(comp, pns->nodes[0], false, l_fail); // if condition |
| 1326 | |
| 1327 | compile_node(comp, pns->nodes[1]); // if block |
| 1328 | |
| 1329 | // optimisation: skip everything else when "if True" |
| 1330 | if (mp_parse_node_is_const_true(pns->nodes[0])) { |
| 1331 | goto done; |
| 1332 | } |
| 1333 | |
| 1334 | if ( |
| 1335 | // optimisation: don't jump over non-existent elif/else blocks |
| 1336 | !(MP_PARSE_NODE_IS_NULL(pns->nodes[2]) && MP_PARSE_NODE_IS_NULL(pns->nodes[3])) |
| 1337 | // optimisation: don't jump if last instruction was return |
| 1338 | && !EMIT(last_emit_was_return_value) |
| 1339 | ) { |
| 1340 | // jump over elif/else blocks |
| 1341 | EMIT_ARG(jump, l_end); |
| 1342 | } |
| 1343 | |
| 1344 | EMIT_ARG(label_assign, l_fail); |
| 1345 | } |
| 1346 | |
| 1347 | // compile elif blocks (if any) |
| 1348 | mp_parse_node_t *pn_elif; |
| 1349 | size_t n_elif = mp_parse_node_extract_list(&pns->nodes[2], PN_if_stmt_elif_list, &pn_elif); |
| 1350 | for (size_t i = 0; i < n_elif; i++) { |
| 1351 | assert(MP_PARSE_NODE_IS_STRUCT_KIND(pn_elif[i], PN_if_stmt_elif)); // should be |
| 1352 | mp_parse_node_struct_t *pns_elif = (mp_parse_node_struct_t *)pn_elif[i]; |
| 1353 | |
| 1354 | // optimisation: don't emit anything when "if False" |
| 1355 | if (!mp_parse_node_is_const_false(pns_elif->nodes[0])) { |
| 1356 | uint l_fail = comp_next_label(comp); |
| 1357 | c_if_cond(comp, pns_elif->nodes[0], false, l_fail); // elif condition |
| 1358 | |
| 1359 | compile_node(comp, pns_elif->nodes[1]); // elif block |
| 1360 | |
| 1361 | // optimisation: skip everything else when "elif True" |
| 1362 | if (mp_parse_node_is_const_true(pns_elif->nodes[0])) { |
| 1363 | goto done; |
| 1364 | } |
| 1365 | |
| 1366 | // optimisation: don't jump if last instruction was return |
| 1367 | if (!EMIT(last_emit_was_return_value)) { |
| 1368 | EMIT_ARG(jump, l_end); |
| 1369 | } |
| 1370 | EMIT_ARG(label_assign, l_fail); |
| 1371 | } |
| 1372 | } |
| 1373 | |
| 1374 | // compile else block |
| 1375 | compile_node(comp, pns->nodes[3]); // can be null |
| 1376 | |
| 1377 | done: |
| 1378 | EMIT_ARG(label_assign, l_end); |
| 1379 | } |
| 1380 | |
| 1381 | #define START_BREAK_CONTINUE_BLOCK \ |
| 1382 | uint16_t old_break_label = comp->break_label; \ |
| 1383 | uint16_t old_continue_label = comp->continue_label; \ |
| 1384 | uint16_t old_break_continue_except_level = comp->break_continue_except_level; \ |
| 1385 | uint break_label = comp_next_label(comp); \ |
| 1386 | uint continue_label = comp_next_label(comp); \ |
| 1387 | comp->break_label = break_label; \ |
| 1388 | comp->continue_label = continue_label; \ |
| 1389 | comp->break_continue_except_level = comp->cur_except_level; |
| 1390 | |
| 1391 | #define END_BREAK_CONTINUE_BLOCK \ |
| 1392 | comp->break_label = old_break_label; \ |
| 1393 | comp->continue_label = old_continue_label; \ |
| 1394 | comp->break_continue_except_level = old_break_continue_except_level; |
| 1395 | |
| 1396 | STATIC void compile_while_stmt(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 1397 | START_BREAK_CONTINUE_BLOCK |
| 1398 | |
| 1399 | if (!mp_parse_node_is_const_false(pns->nodes[0])) { // optimisation: don't emit anything for "while False" |
| 1400 | uint top_label = comp_next_label(comp); |
| 1401 | if (!mp_parse_node_is_const_true(pns->nodes[0])) { // optimisation: don't jump to cond for "while True" |
| 1402 | EMIT_ARG(jump, continue_label); |
| 1403 | } |
| 1404 | EMIT_ARG(label_assign, top_label); |
| 1405 | compile_node(comp, pns->nodes[1]); // body |
| 1406 | EMIT_ARG(label_assign, continue_label); |
| 1407 | c_if_cond(comp, pns->nodes[0], true, top_label); // condition |
| 1408 | } |
| 1409 | |
| 1410 | // break/continue apply to outer loop (if any) in the else block |
| 1411 | END_BREAK_CONTINUE_BLOCK |
| 1412 | |
| 1413 | compile_node(comp, pns->nodes[2]); // else |
| 1414 | |
| 1415 | EMIT_ARG(label_assign, break_label); |
| 1416 | } |
| 1417 | |
| 1418 | // This function compiles an optimised for-loop of the form: |
| 1419 | // for <var> in range(<start>, <end>, <step>): |
| 1420 | // <body> |
| 1421 | // else: |
| 1422 | // <else> |
| 1423 | // <var> must be an identifier and <step> must be a small-int. |
| 1424 | // |
| 1425 | // Semantics of for-loop require: |
| 1426 | // - final failing value should not be stored in the loop variable |
| 1427 | // - if the loop never runs, the loop variable should never be assigned |
| 1428 | // - assignments to <var>, <end> or <step> in the body do not alter the loop |
| 1429 | // (<step> is a constant for us, so no need to worry about it changing) |
| 1430 | // |
| 1431 | // If <end> is a small-int, then the stack during the for-loop contains just |
| 1432 | // the current value of <var>. Otherwise, the stack contains <end> then the |
| 1433 | // current value of <var>. |
| 1434 | STATIC void compile_for_stmt_optimised_range(compiler_t *comp, mp_parse_node_t pn_var, mp_parse_node_t pn_start, mp_parse_node_t pn_end, mp_parse_node_t pn_step, mp_parse_node_t pn_body, mp_parse_node_t pn_else) { |
| 1435 | START_BREAK_CONTINUE_BLOCK |
| 1436 | |
| 1437 | uint top_label = comp_next_label(comp); |
| 1438 | uint entry_label = comp_next_label(comp); |
| 1439 | |
| 1440 | // put the end value on the stack if it's not a small-int constant |
| 1441 | bool end_on_stack = !MP_PARSE_NODE_IS_SMALL_INT(pn_end); |
| 1442 | if (end_on_stack) { |
| 1443 | compile_node(comp, pn_end); |
| 1444 | } |
| 1445 | |
| 1446 | // compile: start |
| 1447 | compile_node(comp, pn_start); |
| 1448 | |
| 1449 | EMIT_ARG(jump, entry_label); |
| 1450 | EMIT_ARG(label_assign, top_label); |
| 1451 | |
| 1452 | // duplicate next value and store it to var |
| 1453 | EMIT(dup_top); |
| 1454 | c_assign(comp, pn_var, ASSIGN_STORE); |
| 1455 | |
| 1456 | // compile body |
| 1457 | compile_node(comp, pn_body); |
| 1458 | |
| 1459 | EMIT_ARG(label_assign, continue_label); |
| 1460 | |
| 1461 | // compile: var + step |
| 1462 | compile_node(comp, pn_step); |
| 1463 | EMIT_ARG(binary_op, MP_BINARY_OP_INPLACE_ADD); |
| 1464 | |
| 1465 | EMIT_ARG(label_assign, entry_label); |
| 1466 | |
| 1467 | // compile: if var <cond> end: goto top |
| 1468 | if (end_on_stack) { |
| 1469 | EMIT(dup_top_two); |
| 1470 | EMIT(rot_two); |
| 1471 | } else { |
| 1472 | EMIT(dup_top); |
| 1473 | compile_node(comp, pn_end); |
| 1474 | } |
| 1475 | assert(MP_PARSE_NODE_IS_SMALL_INT(pn_step)); |
| 1476 | if (MP_PARSE_NODE_LEAF_SMALL_INT(pn_step) >= 0) { |
| 1477 | EMIT_ARG(binary_op, MP_BINARY_OP_LESS); |
| 1478 | } else { |
| 1479 | EMIT_ARG(binary_op, MP_BINARY_OP_MORE); |
| 1480 | } |
| 1481 | EMIT_ARG(pop_jump_if, true, top_label); |
| 1482 | |
| 1483 | // break/continue apply to outer loop (if any) in the else block |
| 1484 | END_BREAK_CONTINUE_BLOCK |
| 1485 | |
| 1486 | // Compile the else block. We must pop the iterator variables before |
| 1487 | // executing the else code because it may contain break/continue statements. |
| 1488 | uint end_label = 0; |
| 1489 | if (!MP_PARSE_NODE_IS_NULL(pn_else)) { |
| 1490 | // discard final value of "var", and possible "end" value |
| 1491 | EMIT(pop_top); |
| 1492 | if (end_on_stack) { |
| 1493 | EMIT(pop_top); |
| 1494 | } |
| 1495 | compile_node(comp, pn_else); |
| 1496 | end_label = comp_next_label(comp); |
| 1497 | EMIT_ARG(jump, end_label); |
| 1498 | EMIT_ARG(adjust_stack_size, 1 + end_on_stack); |
| 1499 | } |
| 1500 | |
| 1501 | EMIT_ARG(label_assign, break_label); |
| 1502 | |
| 1503 | // discard final value of var that failed the loop condition |
| 1504 | EMIT(pop_top); |
| 1505 | |
| 1506 | // discard <end> value if it's on the stack |
| 1507 | if (end_on_stack) { |
| 1508 | EMIT(pop_top); |
| 1509 | } |
| 1510 | |
| 1511 | if (!MP_PARSE_NODE_IS_NULL(pn_else)) { |
| 1512 | EMIT_ARG(label_assign, end_label); |
| 1513 | } |
| 1514 | } |
| 1515 | |
| 1516 | STATIC void compile_for_stmt(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 1517 | // this bit optimises: for <x> in range(...), turning it into an explicitly incremented variable |
| 1518 | // this is actually slower, but uses no heap memory |
| 1519 | // for viper it will be much, much faster |
| 1520 | if (/*comp->scope_cur->emit_options == MP_EMIT_OPT_VIPER &&*/ MP_PARSE_NODE_IS_ID(pns->nodes[0]) && MP_PARSE_NODE_IS_STRUCT_KIND(pns->nodes[1], PN_atom_expr_normal)) { |
| 1521 | mp_parse_node_struct_t *pns_it = (mp_parse_node_struct_t *)pns->nodes[1]; |
| 1522 | if (MP_PARSE_NODE_IS_ID(pns_it->nodes[0]) |
| 1523 | && MP_PARSE_NODE_LEAF_ARG(pns_it->nodes[0]) == MP_QSTR_range |
| 1524 | && MP_PARSE_NODE_STRUCT_KIND((mp_parse_node_struct_t *)pns_it->nodes[1]) == PN_trailer_paren) { |
| 1525 | mp_parse_node_t pn_range_args = ((mp_parse_node_struct_t *)pns_it->nodes[1])->nodes[0]; |
| 1526 | mp_parse_node_t *args; |
| 1527 | size_t n_args = mp_parse_node_extract_list(&pn_range_args, PN_arglist, &args); |
| 1528 | mp_parse_node_t pn_range_start; |
| 1529 | mp_parse_node_t pn_range_end; |
| 1530 | mp_parse_node_t pn_range_step; |
| 1531 | bool optimize = false; |
| 1532 | if (1 <= n_args && n_args <= 3) { |
| 1533 | optimize = true; |
| 1534 | if (n_args == 1) { |
| 1535 | pn_range_start = mp_parse_node_new_small_int(0); |
| 1536 | pn_range_end = args[0]; |
| 1537 | pn_range_step = mp_parse_node_new_small_int(1); |
| 1538 | } else if (n_args == 2) { |
| 1539 | pn_range_start = args[0]; |
| 1540 | pn_range_end = args[1]; |
| 1541 | pn_range_step = mp_parse_node_new_small_int(1); |
| 1542 | } else { |
| 1543 | pn_range_start = args[0]; |
| 1544 | pn_range_end = args[1]; |
| 1545 | pn_range_step = args[2]; |
| 1546 | // the step must be a non-zero constant integer to do the optimisation |
| 1547 | if (!MP_PARSE_NODE_IS_SMALL_INT(pn_range_step) |
| 1548 | || MP_PARSE_NODE_LEAF_SMALL_INT(pn_range_step) == 0) { |
| 1549 | optimize = false; |
| 1550 | } |
| 1551 | } |
| 1552 | // arguments must be able to be compiled as standard expressions |
| 1553 | if (optimize && MP_PARSE_NODE_IS_STRUCT(pn_range_start)) { |
| 1554 | int k = MP_PARSE_NODE_STRUCT_KIND((mp_parse_node_struct_t *)pn_range_start); |
| 1555 | if (k == PN_arglist_star || k == PN_arglist_dbl_star || k == PN_argument) { |
| 1556 | optimize = false; |
| 1557 | } |
| 1558 | } |
| 1559 | if (optimize && MP_PARSE_NODE_IS_STRUCT(pn_range_end)) { |
| 1560 | int k = MP_PARSE_NODE_STRUCT_KIND((mp_parse_node_struct_t *)pn_range_end); |
| 1561 | if (k == PN_arglist_star || k == PN_arglist_dbl_star || k == PN_argument) { |
| 1562 | optimize = false; |
| 1563 | } |
| 1564 | } |
| 1565 | } |
| 1566 | if (optimize) { |
| 1567 | compile_for_stmt_optimised_range(comp, pns->nodes[0], pn_range_start, pn_range_end, pn_range_step, pns->nodes[2], pns->nodes[3]); |
| 1568 | return; |
| 1569 | } |
| 1570 | } |
| 1571 | } |
| 1572 | |
| 1573 | START_BREAK_CONTINUE_BLOCK |
| 1574 | comp->break_label |= MP_EMIT_BREAK_FROM_FOR; |
| 1575 | |
| 1576 | uint pop_label = comp_next_label(comp); |
| 1577 | |
| 1578 | compile_node(comp, pns->nodes[1]); // iterator |
| 1579 | EMIT_ARG(get_iter, true); |
| 1580 | EMIT_ARG(label_assign, continue_label); |
| 1581 | EMIT_ARG(for_iter, pop_label); |
| 1582 | c_assign(comp, pns->nodes[0], ASSIGN_STORE); // variable |
| 1583 | compile_node(comp, pns->nodes[2]); // body |
| 1584 | if (!EMIT(last_emit_was_return_value)) { |
| 1585 | EMIT_ARG(jump, continue_label); |
| 1586 | } |
| 1587 | EMIT_ARG(label_assign, pop_label); |
| 1588 | EMIT(for_iter_end); |
| 1589 | |
| 1590 | // break/continue apply to outer loop (if any) in the else block |
| 1591 | END_BREAK_CONTINUE_BLOCK |
| 1592 | |
| 1593 | compile_node(comp, pns->nodes[3]); // else (may be empty) |
| 1594 | |
| 1595 | EMIT_ARG(label_assign, break_label); |
| 1596 | } |
| 1597 | |
| 1598 | STATIC void compile_try_except(compiler_t *comp, mp_parse_node_t pn_body, int n_except, mp_parse_node_t *pn_excepts, mp_parse_node_t pn_else) { |
| 1599 | // setup code |
| 1600 | uint l1 = comp_next_label(comp); |
| 1601 | uint success_label = comp_next_label(comp); |
| 1602 | |
| 1603 | compile_increase_except_level(comp, l1, MP_EMIT_SETUP_BLOCK_EXCEPT); |
| 1604 | |
| 1605 | compile_node(comp, pn_body); // body |
| 1606 | EMIT_ARG(pop_except_jump, success_label, false); // jump over exception handler |
| 1607 | |
| 1608 | EMIT_ARG(label_assign, l1); // start of exception handler |
| 1609 | EMIT(start_except_handler); |
| 1610 | |
| 1611 | // at this point the top of the stack contains the exception instance that was raised |
| 1612 | |
| 1613 | uint l2 = comp_next_label(comp); |
| 1614 | |
| 1615 | for (int i = 0; i < n_except; i++) { |
| 1616 | assert(MP_PARSE_NODE_IS_STRUCT_KIND(pn_excepts[i], PN_try_stmt_except)); // should be |
| 1617 | mp_parse_node_struct_t *pns_except = (mp_parse_node_struct_t *)pn_excepts[i]; |
| 1618 | |
| 1619 | qstr qstr_exception_local = 0; |
| 1620 | uint end_finally_label = comp_next_label(comp); |
| 1621 | #if MICROPY_PY_SYS_SETTRACE |
| 1622 | EMIT_ARG(set_source_line, pns_except->source_line); |
| 1623 | #endif |
| 1624 | |
| 1625 | if (MP_PARSE_NODE_IS_NULL(pns_except->nodes[0])) { |
| 1626 | // this is a catch all exception handler |
| 1627 | if (i + 1 != n_except) { |
| 1628 | compile_syntax_error(comp, pn_excepts[i], MP_ERROR_TEXT("default 'except' must be last" )); |
| 1629 | compile_decrease_except_level(comp); |
| 1630 | return; |
| 1631 | } |
| 1632 | } else { |
| 1633 | // this exception handler requires a match to a certain type of exception |
| 1634 | mp_parse_node_t pns_exception_expr = pns_except->nodes[0]; |
| 1635 | if (MP_PARSE_NODE_IS_STRUCT(pns_exception_expr)) { |
| 1636 | mp_parse_node_struct_t *pns3 = (mp_parse_node_struct_t *)pns_exception_expr; |
| 1637 | if (MP_PARSE_NODE_STRUCT_KIND(pns3) == PN_try_stmt_as_name) { |
| 1638 | // handler binds the exception to a local |
| 1639 | pns_exception_expr = pns3->nodes[0]; |
| 1640 | qstr_exception_local = MP_PARSE_NODE_LEAF_ARG(pns3->nodes[1]); |
| 1641 | } |
| 1642 | } |
| 1643 | EMIT(dup_top); |
| 1644 | compile_node(comp, pns_exception_expr); |
| 1645 | EMIT_ARG(binary_op, MP_BINARY_OP_EXCEPTION_MATCH); |
| 1646 | EMIT_ARG(pop_jump_if, false, end_finally_label); |
| 1647 | } |
| 1648 | |
| 1649 | // either discard or store the exception instance |
| 1650 | if (qstr_exception_local == 0) { |
| 1651 | EMIT(pop_top); |
| 1652 | } else { |
| 1653 | compile_store_id(comp, qstr_exception_local); |
| 1654 | } |
| 1655 | |
| 1656 | // If the exception is bound to a variable <e> then the <body> of the |
| 1657 | // exception handler is wrapped in a try-finally so that the name <e> can |
| 1658 | // be deleted (per Python semantics) even if the <body> has an exception. |
| 1659 | // In such a case the generated code for the exception handler is: |
| 1660 | // try: |
| 1661 | // <body> |
| 1662 | // finally: |
| 1663 | // <e> = None |
| 1664 | // del <e> |
| 1665 | uint l3 = 0; |
| 1666 | if (qstr_exception_local != 0) { |
| 1667 | l3 = comp_next_label(comp); |
| 1668 | compile_increase_except_level(comp, l3, MP_EMIT_SETUP_BLOCK_FINALLY); |
| 1669 | } |
| 1670 | compile_node(comp, pns_except->nodes[1]); // the <body> |
| 1671 | if (qstr_exception_local != 0) { |
| 1672 | EMIT_ARG(load_const_tok, MP_TOKEN_KW_NONE); |
| 1673 | EMIT_ARG(label_assign, l3); |
| 1674 | EMIT_ARG(load_const_tok, MP_TOKEN_KW_NONE); |
| 1675 | compile_store_id(comp, qstr_exception_local); |
| 1676 | compile_delete_id(comp, qstr_exception_local); |
| 1677 | compile_decrease_except_level(comp); |
| 1678 | } |
| 1679 | |
| 1680 | EMIT_ARG(pop_except_jump, l2, true); |
| 1681 | EMIT_ARG(label_assign, end_finally_label); |
| 1682 | EMIT_ARG(adjust_stack_size, 1); // stack adjust for the exception instance |
| 1683 | } |
| 1684 | |
| 1685 | compile_decrease_except_level(comp); |
| 1686 | EMIT(end_except_handler); |
| 1687 | |
| 1688 | EMIT_ARG(label_assign, success_label); |
| 1689 | compile_node(comp, pn_else); // else block, can be null |
| 1690 | EMIT_ARG(label_assign, l2); |
| 1691 | } |
| 1692 | |
| 1693 | STATIC void compile_try_finally(compiler_t *comp, mp_parse_node_t pn_body, int n_except, mp_parse_node_t *pn_except, mp_parse_node_t pn_else, mp_parse_node_t pn_finally) { |
| 1694 | uint l_finally_block = comp_next_label(comp); |
| 1695 | |
| 1696 | compile_increase_except_level(comp, l_finally_block, MP_EMIT_SETUP_BLOCK_FINALLY); |
| 1697 | |
| 1698 | if (n_except == 0) { |
| 1699 | assert(MP_PARSE_NODE_IS_NULL(pn_else)); |
| 1700 | EMIT_ARG(adjust_stack_size, 3); // stack adjust for possible UNWIND_JUMP state |
| 1701 | compile_node(comp, pn_body); |
| 1702 | EMIT_ARG(adjust_stack_size, -3); |
| 1703 | } else { |
| 1704 | compile_try_except(comp, pn_body, n_except, pn_except, pn_else); |
| 1705 | } |
| 1706 | EMIT_ARG(load_const_tok, MP_TOKEN_KW_NONE); |
| 1707 | EMIT_ARG(label_assign, l_finally_block); |
| 1708 | compile_node(comp, pn_finally); |
| 1709 | |
| 1710 | compile_decrease_except_level(comp); |
| 1711 | } |
| 1712 | |
| 1713 | STATIC void compile_try_stmt(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 1714 | assert(MP_PARSE_NODE_IS_STRUCT(pns->nodes[1])); // should be |
| 1715 | { |
| 1716 | mp_parse_node_struct_t *pns2 = (mp_parse_node_struct_t *)pns->nodes[1]; |
| 1717 | if (MP_PARSE_NODE_STRUCT_KIND(pns2) == PN_try_stmt_finally) { |
| 1718 | // just try-finally |
| 1719 | compile_try_finally(comp, pns->nodes[0], 0, NULL, MP_PARSE_NODE_NULL, pns2->nodes[0]); |
| 1720 | } else if (MP_PARSE_NODE_STRUCT_KIND(pns2) == PN_try_stmt_except_and_more) { |
| 1721 | // try-except and possibly else and/or finally |
| 1722 | mp_parse_node_t *pn_excepts; |
| 1723 | size_t n_except = mp_parse_node_extract_list(&pns2->nodes[0], PN_try_stmt_except_list, &pn_excepts); |
| 1724 | if (MP_PARSE_NODE_IS_NULL(pns2->nodes[2])) { |
| 1725 | // no finally |
| 1726 | compile_try_except(comp, pns->nodes[0], n_except, pn_excepts, pns2->nodes[1]); |
| 1727 | } else { |
| 1728 | // have finally |
| 1729 | compile_try_finally(comp, pns->nodes[0], n_except, pn_excepts, pns2->nodes[1], ((mp_parse_node_struct_t *)pns2->nodes[2])->nodes[0]); |
| 1730 | } |
| 1731 | } else { |
| 1732 | // just try-except |
| 1733 | mp_parse_node_t *pn_excepts; |
| 1734 | size_t n_except = mp_parse_node_extract_list(&pns->nodes[1], PN_try_stmt_except_list, &pn_excepts); |
| 1735 | compile_try_except(comp, pns->nodes[0], n_except, pn_excepts, MP_PARSE_NODE_NULL); |
| 1736 | } |
| 1737 | } |
| 1738 | } |
| 1739 | |
| 1740 | STATIC void compile_with_stmt_helper(compiler_t *comp, size_t n, mp_parse_node_t *nodes, mp_parse_node_t body) { |
| 1741 | if (n == 0) { |
| 1742 | // no more pre-bits, compile the body of the with |
| 1743 | compile_node(comp, body); |
| 1744 | } else { |
| 1745 | uint l_end = comp_next_label(comp); |
| 1746 | if (MP_PARSE_NODE_IS_STRUCT_KIND(nodes[0], PN_with_item)) { |
| 1747 | // this pre-bit is of the form "a as b" |
| 1748 | mp_parse_node_struct_t *pns = (mp_parse_node_struct_t *)nodes[0]; |
| 1749 | compile_node(comp, pns->nodes[0]); |
| 1750 | compile_increase_except_level(comp, l_end, MP_EMIT_SETUP_BLOCK_WITH); |
| 1751 | c_assign(comp, pns->nodes[1], ASSIGN_STORE); |
| 1752 | } else { |
| 1753 | // this pre-bit is just an expression |
| 1754 | compile_node(comp, nodes[0]); |
| 1755 | compile_increase_except_level(comp, l_end, MP_EMIT_SETUP_BLOCK_WITH); |
| 1756 | EMIT(pop_top); |
| 1757 | } |
| 1758 | // compile additional pre-bits and the body |
| 1759 | compile_with_stmt_helper(comp, n - 1, nodes + 1, body); |
| 1760 | // finish this with block |
| 1761 | EMIT_ARG(with_cleanup, l_end); |
| 1762 | reserve_labels_for_native(comp, 3); // used by native's with_cleanup |
| 1763 | compile_decrease_except_level(comp); |
| 1764 | } |
| 1765 | } |
| 1766 | |
| 1767 | STATIC void compile_with_stmt(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 1768 | // get the nodes for the pre-bit of the with (the a as b, c as d, ... bit) |
| 1769 | mp_parse_node_t *nodes; |
| 1770 | size_t n = mp_parse_node_extract_list(&pns->nodes[0], PN_with_stmt_list, &nodes); |
| 1771 | assert(n > 0); |
| 1772 | |
| 1773 | // compile in a nested fashion |
| 1774 | compile_with_stmt_helper(comp, n, nodes, pns->nodes[1]); |
| 1775 | } |
| 1776 | |
| 1777 | STATIC void compile_yield_from(compiler_t *comp) { |
| 1778 | EMIT_ARG(get_iter, false); |
| 1779 | EMIT_ARG(load_const_tok, MP_TOKEN_KW_NONE); |
| 1780 | EMIT_ARG(yield, MP_EMIT_YIELD_FROM); |
| 1781 | reserve_labels_for_native(comp, 3); |
| 1782 | } |
| 1783 | |
| 1784 | #if MICROPY_PY_ASYNC_AWAIT |
| 1785 | STATIC void compile_await_object_method(compiler_t *comp, qstr method) { |
| 1786 | EMIT_ARG(load_method, method, false); |
| 1787 | EMIT_ARG(call_method, 0, 0, 0); |
| 1788 | compile_yield_from(comp); |
| 1789 | } |
| 1790 | |
| 1791 | STATIC void compile_async_for_stmt(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 1792 | // comp->break_label |= MP_EMIT_BREAK_FROM_FOR; |
| 1793 | |
| 1794 | qstr context = MP_PARSE_NODE_LEAF_ARG(pns->nodes[1]); |
| 1795 | uint while_else_label = comp_next_label(comp); |
| 1796 | uint try_exception_label = comp_next_label(comp); |
| 1797 | uint try_else_label = comp_next_label(comp); |
| 1798 | uint try_finally_label = comp_next_label(comp); |
| 1799 | |
| 1800 | compile_node(comp, pns->nodes[1]); // iterator |
| 1801 | EMIT_ARG(load_method, MP_QSTR___aiter__, false); |
| 1802 | EMIT_ARG(call_method, 0, 0, 0); |
| 1803 | compile_store_id(comp, context); |
| 1804 | |
| 1805 | START_BREAK_CONTINUE_BLOCK |
| 1806 | |
| 1807 | EMIT_ARG(label_assign, continue_label); |
| 1808 | |
| 1809 | compile_increase_except_level(comp, try_exception_label, MP_EMIT_SETUP_BLOCK_EXCEPT); |
| 1810 | |
| 1811 | compile_load_id(comp, context); |
| 1812 | compile_await_object_method(comp, MP_QSTR___anext__); |
| 1813 | c_assign(comp, pns->nodes[0], ASSIGN_STORE); // variable |
| 1814 | EMIT_ARG(pop_except_jump, try_else_label, false); |
| 1815 | |
| 1816 | EMIT_ARG(label_assign, try_exception_label); |
| 1817 | EMIT(start_except_handler); |
| 1818 | EMIT(dup_top); |
| 1819 | EMIT_LOAD_GLOBAL(MP_QSTR_StopAsyncIteration); |
| 1820 | EMIT_ARG(binary_op, MP_BINARY_OP_EXCEPTION_MATCH); |
| 1821 | EMIT_ARG(pop_jump_if, false, try_finally_label); |
| 1822 | EMIT(pop_top); // pop exception instance |
| 1823 | EMIT_ARG(pop_except_jump, while_else_label, true); |
| 1824 | |
| 1825 | EMIT_ARG(label_assign, try_finally_label); |
| 1826 | EMIT_ARG(adjust_stack_size, 1); // if we jump here, the exc is on the stack |
| 1827 | compile_decrease_except_level(comp); |
| 1828 | EMIT(end_except_handler); |
| 1829 | |
| 1830 | EMIT_ARG(label_assign, try_else_label); |
| 1831 | compile_node(comp, pns->nodes[2]); // body |
| 1832 | |
| 1833 | EMIT_ARG(jump, continue_label); |
| 1834 | // break/continue apply to outer loop (if any) in the else block |
| 1835 | END_BREAK_CONTINUE_BLOCK |
| 1836 | |
| 1837 | EMIT_ARG(label_assign, while_else_label); |
| 1838 | compile_node(comp, pns->nodes[3]); // else |
| 1839 | |
| 1840 | EMIT_ARG(label_assign, break_label); |
| 1841 | } |
| 1842 | |
| 1843 | STATIC void compile_async_with_stmt_helper(compiler_t *comp, size_t n, mp_parse_node_t *nodes, mp_parse_node_t body) { |
| 1844 | if (n == 0) { |
| 1845 | // no more pre-bits, compile the body of the with |
| 1846 | compile_node(comp, body); |
| 1847 | } else { |
| 1848 | uint l_finally_block = comp_next_label(comp); |
| 1849 | uint l_aexit_no_exc = comp_next_label(comp); |
| 1850 | uint l_ret_unwind_jump = comp_next_label(comp); |
| 1851 | uint l_end = comp_next_label(comp); |
| 1852 | |
| 1853 | if (MP_PARSE_NODE_IS_STRUCT_KIND(nodes[0], PN_with_item)) { |
| 1854 | // this pre-bit is of the form "a as b" |
| 1855 | mp_parse_node_struct_t *pns = (mp_parse_node_struct_t *)nodes[0]; |
| 1856 | compile_node(comp, pns->nodes[0]); |
| 1857 | EMIT(dup_top); |
| 1858 | compile_await_object_method(comp, MP_QSTR___aenter__); |
| 1859 | c_assign(comp, pns->nodes[1], ASSIGN_STORE); |
| 1860 | } else { |
| 1861 | // this pre-bit is just an expression |
| 1862 | compile_node(comp, nodes[0]); |
| 1863 | EMIT(dup_top); |
| 1864 | compile_await_object_method(comp, MP_QSTR___aenter__); |
| 1865 | EMIT(pop_top); |
| 1866 | } |
| 1867 | |
| 1868 | // To keep the Python stack size down, and because we can't access values on |
| 1869 | // this stack further down than 3 elements (via rot_three), we don't preload |
| 1870 | // __aexit__ (as per normal with) but rather wait until we need it below. |
| 1871 | |
| 1872 | // Start the try-finally statement |
| 1873 | compile_increase_except_level(comp, l_finally_block, MP_EMIT_SETUP_BLOCK_FINALLY); |
| 1874 | |
| 1875 | // Compile any additional pre-bits of the "async with", and also the body |
| 1876 | EMIT_ARG(adjust_stack_size, 3); // stack adjust for possible UNWIND_JUMP state |
| 1877 | compile_async_with_stmt_helper(comp, n - 1, nodes + 1, body); |
| 1878 | EMIT_ARG(adjust_stack_size, -3); |
| 1879 | |
| 1880 | // We have now finished the "try" block and fall through to the "finally" |
| 1881 | |
| 1882 | // At this point, after the with body has executed, we have 3 cases: |
| 1883 | // 1. no exception, we just fall through to this point; stack: (..., ctx_mgr) |
| 1884 | // 2. exception propagating out, we get to the finally block; stack: (..., ctx_mgr, exc) |
| 1885 | // 3. return or unwind jump, we get to the finally block; stack: (..., ctx_mgr, X, INT) |
| 1886 | |
| 1887 | // Handle case 1: call __aexit__ |
| 1888 | // Stack: (..., ctx_mgr) |
| 1889 | EMIT_ARG(load_const_tok, MP_TOKEN_KW_NONE); // to tell end_finally there's no exception |
| 1890 | EMIT(rot_two); |
| 1891 | EMIT_ARG(jump, l_aexit_no_exc); // jump to code below to call __aexit__ |
| 1892 | |
| 1893 | // Start of "finally" block |
| 1894 | // At this point we have case 2 or 3, we detect which one by the TOS being an exception or not |
| 1895 | EMIT_ARG(label_assign, l_finally_block); |
| 1896 | |
| 1897 | // Detect if TOS an exception or not |
| 1898 | EMIT(dup_top); |
| 1899 | EMIT_LOAD_GLOBAL(MP_QSTR_BaseException); |
| 1900 | EMIT_ARG(binary_op, MP_BINARY_OP_EXCEPTION_MATCH); |
| 1901 | EMIT_ARG(pop_jump_if, false, l_ret_unwind_jump); // if not an exception then we have case 3 |
| 1902 | |
| 1903 | // Handle case 2: call __aexit__ and either swallow or re-raise the exception |
| 1904 | // Stack: (..., ctx_mgr, exc) |
| 1905 | EMIT(dup_top); |
| 1906 | EMIT(rot_three); |
| 1907 | EMIT(rot_two); |
| 1908 | EMIT_ARG(load_method, MP_QSTR___aexit__, false); |
| 1909 | EMIT(rot_three); |
| 1910 | EMIT(rot_three); |
| 1911 | EMIT(dup_top); |
| 1912 | #if MICROPY_CPYTHON_COMPAT |
| 1913 | EMIT_ARG(attr, MP_QSTR___class__, MP_EMIT_ATTR_LOAD); // get type(exc) |
| 1914 | #else |
| 1915 | compile_load_id(comp, MP_QSTR_type); |
| 1916 | EMIT(rot_two); |
| 1917 | EMIT_ARG(call_function, 1, 0, 0); // get type(exc) |
| 1918 | #endif |
| 1919 | EMIT(rot_two); |
| 1920 | EMIT_ARG(load_const_tok, MP_TOKEN_KW_NONE); // dummy traceback value |
| 1921 | // Stack: (..., exc, __aexit__, ctx_mgr, type(exc), exc, None) |
| 1922 | EMIT_ARG(call_method, 3, 0, 0); |
| 1923 | compile_yield_from(comp); |
| 1924 | EMIT_ARG(pop_jump_if, false, l_end); |
| 1925 | EMIT(pop_top); // pop exception |
| 1926 | EMIT_ARG(load_const_tok, MP_TOKEN_KW_NONE); // replace with None to swallow exception |
| 1927 | EMIT_ARG(jump, l_end); |
| 1928 | EMIT_ARG(adjust_stack_size, 2); |
| 1929 | |
| 1930 | // Handle case 3: call __aexit__ |
| 1931 | // Stack: (..., ctx_mgr, X, INT) |
| 1932 | EMIT_ARG(label_assign, l_ret_unwind_jump); |
| 1933 | EMIT(rot_three); |
| 1934 | EMIT(rot_three); |
| 1935 | EMIT_ARG(label_assign, l_aexit_no_exc); |
| 1936 | EMIT_ARG(load_method, MP_QSTR___aexit__, false); |
| 1937 | EMIT_ARG(load_const_tok, MP_TOKEN_KW_NONE); |
| 1938 | EMIT(dup_top); |
| 1939 | EMIT(dup_top); |
| 1940 | EMIT_ARG(call_method, 3, 0, 0); |
| 1941 | compile_yield_from(comp); |
| 1942 | EMIT(pop_top); |
| 1943 | EMIT_ARG(adjust_stack_size, -1); |
| 1944 | |
| 1945 | // End of "finally" block |
| 1946 | // Stack can have one of three configurations: |
| 1947 | // a. (..., None) - from either case 1, or case 2 with swallowed exception |
| 1948 | // b. (..., exc) - from case 2 with re-raised exception |
| 1949 | // c. (..., X, INT) - from case 3 |
| 1950 | EMIT_ARG(label_assign, l_end); |
| 1951 | compile_decrease_except_level(comp); |
| 1952 | } |
| 1953 | } |
| 1954 | |
| 1955 | STATIC void compile_async_with_stmt(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 1956 | // get the nodes for the pre-bit of the with (the a as b, c as d, ... bit) |
| 1957 | mp_parse_node_t *nodes; |
| 1958 | size_t n = mp_parse_node_extract_list(&pns->nodes[0], PN_with_stmt_list, &nodes); |
| 1959 | assert(n > 0); |
| 1960 | |
| 1961 | // compile in a nested fashion |
| 1962 | compile_async_with_stmt_helper(comp, n, nodes, pns->nodes[1]); |
| 1963 | } |
| 1964 | |
| 1965 | STATIC void compile_async_stmt(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 1966 | assert(MP_PARSE_NODE_IS_STRUCT(pns->nodes[0])); |
| 1967 | mp_parse_node_struct_t *pns0 = (mp_parse_node_struct_t *)pns->nodes[0]; |
| 1968 | if (MP_PARSE_NODE_STRUCT_KIND(pns0) == PN_funcdef) { |
| 1969 | // async def |
| 1970 | compile_funcdef(comp, pns0); |
| 1971 | scope_t *fscope = (scope_t *)pns0->nodes[4]; |
| 1972 | fscope->scope_flags |= MP_SCOPE_FLAG_GENERATOR; |
| 1973 | } else if (MP_PARSE_NODE_STRUCT_KIND(pns0) == PN_for_stmt) { |
| 1974 | // async for |
| 1975 | compile_async_for_stmt(comp, pns0); |
| 1976 | } else { |
| 1977 | // async with |
| 1978 | assert(MP_PARSE_NODE_STRUCT_KIND(pns0) == PN_with_stmt); |
| 1979 | compile_async_with_stmt(comp, pns0); |
| 1980 | } |
| 1981 | } |
| 1982 | #endif |
| 1983 | |
| 1984 | STATIC void compile_expr_stmt(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 1985 | mp_parse_node_t pn_rhs = pns->nodes[1]; |
| 1986 | if (MP_PARSE_NODE_IS_NULL(pn_rhs)) { |
| 1987 | if (comp->is_repl && comp->scope_cur->kind == SCOPE_MODULE) { |
| 1988 | // for REPL, evaluate then print the expression |
| 1989 | compile_load_id(comp, MP_QSTR___repl_print__); |
| 1990 | compile_node(comp, pns->nodes[0]); |
| 1991 | EMIT_ARG(call_function, 1, 0, 0); |
| 1992 | EMIT(pop_top); |
| 1993 | |
| 1994 | } else { |
| 1995 | // for non-REPL, evaluate then discard the expression |
| 1996 | if ((MP_PARSE_NODE_IS_LEAF(pns->nodes[0]) && !MP_PARSE_NODE_IS_ID(pns->nodes[0])) |
| 1997 | || MP_PARSE_NODE_IS_STRUCT_KIND(pns->nodes[0], PN_const_object)) { |
| 1998 | // do nothing with a lonely constant |
| 1999 | } else { |
| 2000 | compile_node(comp, pns->nodes[0]); // just an expression |
| 2001 | EMIT(pop_top); // discard last result since this is a statement and leaves nothing on the stack |
| 2002 | } |
| 2003 | } |
| 2004 | } else if (MP_PARSE_NODE_IS_STRUCT(pn_rhs)) { |
| 2005 | mp_parse_node_struct_t *pns1 = (mp_parse_node_struct_t *)pn_rhs; |
| 2006 | int kind = MP_PARSE_NODE_STRUCT_KIND(pns1); |
| 2007 | if (kind == PN_annassign) { |
| 2008 | // the annotation is in pns1->nodes[0] and is ignored |
| 2009 | if (MP_PARSE_NODE_IS_NULL(pns1->nodes[1])) { |
| 2010 | // an annotation of the form "x: y" |
| 2011 | // inside a function this declares "x" as a local |
| 2012 | if (comp->scope_cur->kind == SCOPE_FUNCTION) { |
| 2013 | if (MP_PARSE_NODE_IS_ID(pns->nodes[0])) { |
| 2014 | qstr lhs = MP_PARSE_NODE_LEAF_ARG(pns->nodes[0]); |
| 2015 | scope_find_or_add_id(comp->scope_cur, lhs, ID_INFO_KIND_LOCAL); |
| 2016 | } |
| 2017 | } |
| 2018 | } else { |
| 2019 | // an assigned annotation of the form "x: y = z" |
| 2020 | pn_rhs = pns1->nodes[1]; |
| 2021 | goto plain_assign; |
| 2022 | } |
| 2023 | } else if (kind == PN_expr_stmt_augassign) { |
| 2024 | c_assign(comp, pns->nodes[0], ASSIGN_AUG_LOAD); // lhs load for aug assign |
| 2025 | compile_node(comp, pns1->nodes[1]); // rhs |
| 2026 | assert(MP_PARSE_NODE_IS_TOKEN(pns1->nodes[0])); |
| 2027 | mp_token_kind_t tok = MP_PARSE_NODE_LEAF_ARG(pns1->nodes[0]); |
| 2028 | mp_binary_op_t op = MP_BINARY_OP_INPLACE_OR + (tok - MP_TOKEN_DEL_PIPE_EQUAL); |
| 2029 | EMIT_ARG(binary_op, op); |
| 2030 | c_assign(comp, pns->nodes[0], ASSIGN_AUG_STORE); // lhs store for aug assign |
| 2031 | } else if (kind == PN_expr_stmt_assign_list) { |
| 2032 | int rhs = MP_PARSE_NODE_STRUCT_NUM_NODES(pns1) - 1; |
| 2033 | compile_node(comp, pns1->nodes[rhs]); // rhs |
| 2034 | // following CPython, we store left-most first |
| 2035 | if (rhs > 0) { |
| 2036 | EMIT(dup_top); |
| 2037 | } |
| 2038 | c_assign(comp, pns->nodes[0], ASSIGN_STORE); // lhs store |
| 2039 | for (int i = 0; i < rhs; i++) { |
| 2040 | if (i + 1 < rhs) { |
| 2041 | EMIT(dup_top); |
| 2042 | } |
| 2043 | c_assign(comp, pns1->nodes[i], ASSIGN_STORE); // middle store |
| 2044 | } |
| 2045 | } else { |
| 2046 | plain_assign: |
| 2047 | #if MICROPY_COMP_DOUBLE_TUPLE_ASSIGN |
| 2048 | if (MP_PARSE_NODE_IS_STRUCT_KIND(pn_rhs, PN_testlist_star_expr) |
| 2049 | && MP_PARSE_NODE_IS_STRUCT_KIND(pns->nodes[0], PN_testlist_star_expr)) { |
| 2050 | mp_parse_node_struct_t *pns0 = (mp_parse_node_struct_t *)pns->nodes[0]; |
| 2051 | pns1 = (mp_parse_node_struct_t *)pn_rhs; |
| 2052 | uint32_t n_pns0 = MP_PARSE_NODE_STRUCT_NUM_NODES(pns0); |
| 2053 | // Can only optimise a tuple-to-tuple assignment when all of the following hold: |
| 2054 | // - equal number of items in LHS and RHS tuples |
| 2055 | // - 2 or 3 items in the tuples |
| 2056 | // - there are no star expressions in the LHS tuple |
| 2057 | if (n_pns0 == MP_PARSE_NODE_STRUCT_NUM_NODES(pns1) |
| 2058 | && (n_pns0 == 2 |
| 2059 | #if MICROPY_COMP_TRIPLE_TUPLE_ASSIGN |
| 2060 | || n_pns0 == 3 |
| 2061 | #endif |
| 2062 | ) |
| 2063 | && !MP_PARSE_NODE_IS_STRUCT_KIND(pns0->nodes[0], PN_star_expr) |
| 2064 | && !MP_PARSE_NODE_IS_STRUCT_KIND(pns0->nodes[1], PN_star_expr) |
| 2065 | #if MICROPY_COMP_TRIPLE_TUPLE_ASSIGN |
| 2066 | && (n_pns0 == 2 || !MP_PARSE_NODE_IS_STRUCT_KIND(pns0->nodes[2], PN_star_expr)) |
| 2067 | #endif |
| 2068 | ) { |
| 2069 | // Optimisation for a, b = c, d or a, b, c = d, e, f |
| 2070 | compile_node(comp, pns1->nodes[0]); // rhs |
| 2071 | compile_node(comp, pns1->nodes[1]); // rhs |
| 2072 | #if MICROPY_COMP_TRIPLE_TUPLE_ASSIGN |
| 2073 | if (n_pns0 == 3) { |
| 2074 | compile_node(comp, pns1->nodes[2]); // rhs |
| 2075 | EMIT(rot_three); |
| 2076 | } |
| 2077 | #endif |
| 2078 | EMIT(rot_two); |
| 2079 | c_assign(comp, pns0->nodes[0], ASSIGN_STORE); // lhs store |
| 2080 | c_assign(comp, pns0->nodes[1], ASSIGN_STORE); // lhs store |
| 2081 | #if MICROPY_COMP_TRIPLE_TUPLE_ASSIGN |
| 2082 | if (n_pns0 == 3) { |
| 2083 | c_assign(comp, pns0->nodes[2], ASSIGN_STORE); // lhs store |
| 2084 | } |
| 2085 | #endif |
| 2086 | return; |
| 2087 | } |
| 2088 | } |
| 2089 | #endif |
| 2090 | |
| 2091 | compile_node(comp, pn_rhs); // rhs |
| 2092 | c_assign(comp, pns->nodes[0], ASSIGN_STORE); // lhs store |
| 2093 | } |
| 2094 | } else { |
| 2095 | goto plain_assign; |
| 2096 | } |
| 2097 | } |
| 2098 | |
| 2099 | STATIC void compile_test_if_expr(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 2100 | assert(MP_PARSE_NODE_IS_STRUCT_KIND(pns->nodes[1], PN_test_if_else)); |
| 2101 | mp_parse_node_struct_t *pns_test_if_else = (mp_parse_node_struct_t *)pns->nodes[1]; |
| 2102 | |
| 2103 | uint l_fail = comp_next_label(comp); |
| 2104 | uint l_end = comp_next_label(comp); |
| 2105 | c_if_cond(comp, pns_test_if_else->nodes[0], false, l_fail); // condition |
| 2106 | compile_node(comp, pns->nodes[0]); // success value |
| 2107 | EMIT_ARG(jump, l_end); |
| 2108 | EMIT_ARG(label_assign, l_fail); |
| 2109 | EMIT_ARG(adjust_stack_size, -1); // adjust stack size |
| 2110 | compile_node(comp, pns_test_if_else->nodes[1]); // failure value |
| 2111 | EMIT_ARG(label_assign, l_end); |
| 2112 | } |
| 2113 | |
| 2114 | STATIC void compile_lambdef(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 2115 | if (comp->pass == MP_PASS_SCOPE) { |
| 2116 | // create a new scope for this lambda |
| 2117 | scope_t *s = scope_new_and_link(comp, SCOPE_LAMBDA, (mp_parse_node_t)pns, comp->scope_cur->emit_options); |
| 2118 | // store the lambda scope so the compiling function (this one) can use it at each pass |
| 2119 | pns->nodes[2] = (mp_parse_node_t)s; |
| 2120 | } |
| 2121 | |
| 2122 | // get the scope for this lambda |
| 2123 | scope_t *this_scope = (scope_t *)pns->nodes[2]; |
| 2124 | |
| 2125 | // compile the lambda definition |
| 2126 | compile_funcdef_lambdef(comp, this_scope, pns->nodes[0], PN_varargslist); |
| 2127 | } |
| 2128 | |
| 2129 | #if MICROPY_PY_ASSIGN_EXPR |
| 2130 | STATIC void compile_namedexpr_helper(compiler_t *comp, mp_parse_node_t pn_name, mp_parse_node_t pn_expr) { |
| 2131 | if (!MP_PARSE_NODE_IS_ID(pn_name)) { |
| 2132 | compile_syntax_error(comp, (mp_parse_node_t)pn_name, MP_ERROR_TEXT("can't assign to expression" )); |
| 2133 | } |
| 2134 | compile_node(comp, pn_expr); |
| 2135 | EMIT(dup_top); |
| 2136 | scope_t *old_scope = comp->scope_cur; |
| 2137 | if (SCOPE_IS_COMP_LIKE(comp->scope_cur->kind)) { |
| 2138 | // Use parent's scope for assigned value so it can "escape" |
| 2139 | comp->scope_cur = comp->scope_cur->parent; |
| 2140 | } |
| 2141 | compile_store_id(comp, MP_PARSE_NODE_LEAF_ARG(pn_name)); |
| 2142 | comp->scope_cur = old_scope; |
| 2143 | } |
| 2144 | |
| 2145 | STATIC void compile_namedexpr(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 2146 | compile_namedexpr_helper(comp, pns->nodes[0], pns->nodes[1]); |
| 2147 | } |
| 2148 | #endif |
| 2149 | |
| 2150 | STATIC void compile_or_and_test(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 2151 | bool cond = MP_PARSE_NODE_STRUCT_KIND(pns) == PN_or_test; |
| 2152 | uint l_end = comp_next_label(comp); |
| 2153 | int n = MP_PARSE_NODE_STRUCT_NUM_NODES(pns); |
| 2154 | for (int i = 0; i < n; i += 1) { |
| 2155 | compile_node(comp, pns->nodes[i]); |
| 2156 | if (i + 1 < n) { |
| 2157 | EMIT_ARG(jump_if_or_pop, cond, l_end); |
| 2158 | } |
| 2159 | } |
| 2160 | EMIT_ARG(label_assign, l_end); |
| 2161 | } |
| 2162 | |
| 2163 | STATIC void compile_not_test_2(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 2164 | compile_node(comp, pns->nodes[0]); |
| 2165 | EMIT_ARG(unary_op, MP_UNARY_OP_NOT); |
| 2166 | } |
| 2167 | |
| 2168 | STATIC void compile_comparison(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 2169 | int num_nodes = MP_PARSE_NODE_STRUCT_NUM_NODES(pns); |
| 2170 | compile_node(comp, pns->nodes[0]); |
| 2171 | bool multi = (num_nodes > 3); |
| 2172 | uint l_fail = 0; |
| 2173 | if (multi) { |
| 2174 | l_fail = comp_next_label(comp); |
| 2175 | } |
| 2176 | for (int i = 1; i + 1 < num_nodes; i += 2) { |
| 2177 | compile_node(comp, pns->nodes[i + 1]); |
| 2178 | if (i + 2 < num_nodes) { |
| 2179 | EMIT(dup_top); |
| 2180 | EMIT(rot_three); |
| 2181 | } |
| 2182 | if (MP_PARSE_NODE_IS_TOKEN(pns->nodes[i])) { |
| 2183 | mp_token_kind_t tok = MP_PARSE_NODE_LEAF_ARG(pns->nodes[i]); |
| 2184 | mp_binary_op_t op; |
| 2185 | if (tok == MP_TOKEN_KW_IN) { |
| 2186 | op = MP_BINARY_OP_IN; |
| 2187 | } else { |
| 2188 | op = MP_BINARY_OP_LESS + (tok - MP_TOKEN_OP_LESS); |
| 2189 | } |
| 2190 | EMIT_ARG(binary_op, op); |
| 2191 | } else { |
| 2192 | assert(MP_PARSE_NODE_IS_STRUCT(pns->nodes[i])); // should be |
| 2193 | mp_parse_node_struct_t *pns2 = (mp_parse_node_struct_t *)pns->nodes[i]; |
| 2194 | int kind = MP_PARSE_NODE_STRUCT_KIND(pns2); |
| 2195 | if (kind == PN_comp_op_not_in) { |
| 2196 | EMIT_ARG(binary_op, MP_BINARY_OP_NOT_IN); |
| 2197 | } else { |
| 2198 | assert(kind == PN_comp_op_is); // should be |
| 2199 | if (MP_PARSE_NODE_IS_NULL(pns2->nodes[0])) { |
| 2200 | EMIT_ARG(binary_op, MP_BINARY_OP_IS); |
| 2201 | } else { |
| 2202 | EMIT_ARG(binary_op, MP_BINARY_OP_IS_NOT); |
| 2203 | } |
| 2204 | } |
| 2205 | } |
| 2206 | if (i + 2 < num_nodes) { |
| 2207 | EMIT_ARG(jump_if_or_pop, false, l_fail); |
| 2208 | } |
| 2209 | } |
| 2210 | if (multi) { |
| 2211 | uint l_end = comp_next_label(comp); |
| 2212 | EMIT_ARG(jump, l_end); |
| 2213 | EMIT_ARG(label_assign, l_fail); |
| 2214 | EMIT_ARG(adjust_stack_size, 1); |
| 2215 | EMIT(rot_two); |
| 2216 | EMIT(pop_top); |
| 2217 | EMIT_ARG(label_assign, l_end); |
| 2218 | } |
| 2219 | } |
| 2220 | |
| 2221 | STATIC void compile_star_expr(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 2222 | compile_syntax_error(comp, (mp_parse_node_t)pns, MP_ERROR_TEXT("*x must be assignment target" )); |
| 2223 | } |
| 2224 | |
| 2225 | STATIC void compile_binary_op(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 2226 | MP_STATIC_ASSERT(MP_BINARY_OP_OR + PN_xor_expr - PN_expr == MP_BINARY_OP_XOR); |
| 2227 | MP_STATIC_ASSERT(MP_BINARY_OP_OR + PN_and_expr - PN_expr == MP_BINARY_OP_AND); |
| 2228 | mp_binary_op_t binary_op = MP_BINARY_OP_OR + MP_PARSE_NODE_STRUCT_KIND(pns) - PN_expr; |
| 2229 | int num_nodes = MP_PARSE_NODE_STRUCT_NUM_NODES(pns); |
| 2230 | compile_node(comp, pns->nodes[0]); |
| 2231 | for (int i = 1; i < num_nodes; ++i) { |
| 2232 | compile_node(comp, pns->nodes[i]); |
| 2233 | EMIT_ARG(binary_op, binary_op); |
| 2234 | } |
| 2235 | } |
| 2236 | |
| 2237 | STATIC void compile_term(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 2238 | int num_nodes = MP_PARSE_NODE_STRUCT_NUM_NODES(pns); |
| 2239 | compile_node(comp, pns->nodes[0]); |
| 2240 | for (int i = 1; i + 1 < num_nodes; i += 2) { |
| 2241 | compile_node(comp, pns->nodes[i + 1]); |
| 2242 | mp_token_kind_t tok = MP_PARSE_NODE_LEAF_ARG(pns->nodes[i]); |
| 2243 | mp_binary_op_t op = MP_BINARY_OP_LSHIFT + (tok - MP_TOKEN_OP_DBL_LESS); |
| 2244 | EMIT_ARG(binary_op, op); |
| 2245 | } |
| 2246 | } |
| 2247 | |
| 2248 | STATIC void compile_factor_2(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 2249 | compile_node(comp, pns->nodes[1]); |
| 2250 | mp_token_kind_t tok = MP_PARSE_NODE_LEAF_ARG(pns->nodes[0]); |
| 2251 | mp_unary_op_t op; |
| 2252 | if (tok == MP_TOKEN_OP_TILDE) { |
| 2253 | op = MP_UNARY_OP_INVERT; |
| 2254 | } else { |
| 2255 | assert(tok == MP_TOKEN_OP_PLUS || tok == MP_TOKEN_OP_MINUS); |
| 2256 | op = MP_UNARY_OP_POSITIVE + (tok - MP_TOKEN_OP_PLUS); |
| 2257 | } |
| 2258 | EMIT_ARG(unary_op, op); |
| 2259 | } |
| 2260 | |
| 2261 | STATIC void compile_atom_expr_normal(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 2262 | // compile the subject of the expression |
| 2263 | compile_node(comp, pns->nodes[0]); |
| 2264 | |
| 2265 | // compile_atom_expr_await may call us with a NULL node |
| 2266 | if (MP_PARSE_NODE_IS_NULL(pns->nodes[1])) { |
| 2267 | return; |
| 2268 | } |
| 2269 | |
| 2270 | // get the array of trailers (known to be an array of PARSE_NODE_STRUCT) |
| 2271 | size_t num_trail = 1; |
| 2272 | mp_parse_node_struct_t **pns_trail = (mp_parse_node_struct_t **)&pns->nodes[1]; |
| 2273 | if (MP_PARSE_NODE_STRUCT_KIND(pns_trail[0]) == PN_atom_expr_trailers) { |
| 2274 | num_trail = MP_PARSE_NODE_STRUCT_NUM_NODES(pns_trail[0]); |
| 2275 | pns_trail = (mp_parse_node_struct_t **)&pns_trail[0]->nodes[0]; |
| 2276 | } |
| 2277 | |
| 2278 | // the current index into the array of trailers |
| 2279 | size_t i = 0; |
| 2280 | |
| 2281 | // handle special super() call |
| 2282 | if (comp->scope_cur->kind == SCOPE_FUNCTION |
| 2283 | && MP_PARSE_NODE_IS_ID(pns->nodes[0]) |
| 2284 | && MP_PARSE_NODE_LEAF_ARG(pns->nodes[0]) == MP_QSTR_super |
| 2285 | && MP_PARSE_NODE_STRUCT_KIND(pns_trail[0]) == PN_trailer_paren |
| 2286 | && MP_PARSE_NODE_IS_NULL(pns_trail[0]->nodes[0])) { |
| 2287 | // at this point we have matched "super()" within a function |
| 2288 | |
| 2289 | // load the class for super to search for a parent |
| 2290 | compile_load_id(comp, MP_QSTR___class__); |
| 2291 | |
| 2292 | // look for first argument to function (assumes it's "self") |
| 2293 | bool found = false; |
| 2294 | id_info_t *id = &comp->scope_cur->id_info[0]; |
| 2295 | for (size_t n = comp->scope_cur->id_info_len; n > 0; --n, ++id) { |
| 2296 | if (id->flags & ID_FLAG_IS_PARAM) { |
| 2297 | // first argument found; load it |
| 2298 | compile_load_id(comp, id->qst); |
| 2299 | found = true; |
| 2300 | break; |
| 2301 | } |
| 2302 | } |
| 2303 | if (!found) { |
| 2304 | compile_syntax_error(comp, (mp_parse_node_t)pns_trail[0], |
| 2305 | MP_ERROR_TEXT("super() can't find self" )); // really a TypeError |
| 2306 | return; |
| 2307 | } |
| 2308 | |
| 2309 | if (num_trail >= 3 |
| 2310 | && MP_PARSE_NODE_STRUCT_KIND(pns_trail[1]) == PN_trailer_period |
| 2311 | && MP_PARSE_NODE_STRUCT_KIND(pns_trail[2]) == PN_trailer_paren) { |
| 2312 | // optimisation for method calls super().f(...), to eliminate heap allocation |
| 2313 | mp_parse_node_struct_t *pns_period = pns_trail[1]; |
| 2314 | mp_parse_node_struct_t *pns_paren = pns_trail[2]; |
| 2315 | EMIT_ARG(load_method, MP_PARSE_NODE_LEAF_ARG(pns_period->nodes[0]), true); |
| 2316 | compile_trailer_paren_helper(comp, pns_paren->nodes[0], true, 0); |
| 2317 | i = 3; |
| 2318 | } else { |
| 2319 | // a super() call |
| 2320 | EMIT_ARG(call_function, 2, 0, 0); |
| 2321 | i = 1; |
| 2322 | } |
| 2323 | |
| 2324 | #if MICROPY_COMP_CONST_LITERAL && MICROPY_PY_COLLECTIONS_ORDEREDDICT |
| 2325 | // handle special OrderedDict constructor |
| 2326 | } else if (MP_PARSE_NODE_IS_ID(pns->nodes[0]) |
| 2327 | && MP_PARSE_NODE_LEAF_ARG(pns->nodes[0]) == MP_QSTR_OrderedDict |
| 2328 | && MP_PARSE_NODE_STRUCT_KIND(pns_trail[0]) == PN_trailer_paren |
| 2329 | && MP_PARSE_NODE_IS_STRUCT_KIND(pns_trail[0]->nodes[0], PN_atom_brace)) { |
| 2330 | // at this point we have matched "OrderedDict({...})" |
| 2331 | |
| 2332 | EMIT_ARG(call_function, 0, 0, 0); |
| 2333 | mp_parse_node_struct_t *pns_dict = (mp_parse_node_struct_t *)pns_trail[0]->nodes[0]; |
| 2334 | compile_atom_brace_helper(comp, pns_dict, false); |
| 2335 | i = 1; |
| 2336 | #endif |
| 2337 | } |
| 2338 | |
| 2339 | // compile the remaining trailers |
| 2340 | for (; i < num_trail; i++) { |
| 2341 | if (i + 1 < num_trail |
| 2342 | && MP_PARSE_NODE_STRUCT_KIND(pns_trail[i]) == PN_trailer_period |
| 2343 | && MP_PARSE_NODE_STRUCT_KIND(pns_trail[i + 1]) == PN_trailer_paren) { |
| 2344 | // optimisation for method calls a.f(...), following PyPy |
| 2345 | mp_parse_node_struct_t *pns_period = pns_trail[i]; |
| 2346 | mp_parse_node_struct_t *pns_paren = pns_trail[i + 1]; |
| 2347 | EMIT_ARG(load_method, MP_PARSE_NODE_LEAF_ARG(pns_period->nodes[0]), false); |
| 2348 | compile_trailer_paren_helper(comp, pns_paren->nodes[0], true, 0); |
| 2349 | i += 1; |
| 2350 | } else { |
| 2351 | // node is one of: trailer_paren, trailer_bracket, trailer_period |
| 2352 | compile_node(comp, (mp_parse_node_t)pns_trail[i]); |
| 2353 | } |
| 2354 | } |
| 2355 | } |
| 2356 | |
| 2357 | STATIC void compile_power(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 2358 | compile_generic_all_nodes(comp, pns); // 2 nodes, arguments of power |
| 2359 | EMIT_ARG(binary_op, MP_BINARY_OP_POWER); |
| 2360 | } |
| 2361 | |
| 2362 | STATIC void compile_trailer_paren_helper(compiler_t *comp, mp_parse_node_t pn_arglist, bool is_method_call, int ) { |
| 2363 | // function to call is on top of stack |
| 2364 | |
| 2365 | // get the list of arguments |
| 2366 | mp_parse_node_t *args; |
| 2367 | size_t n_args = mp_parse_node_extract_list(&pn_arglist, PN_arglist, &args); |
| 2368 | |
| 2369 | // compile the arguments |
| 2370 | // Rather than calling compile_node on the list, we go through the list of args |
| 2371 | // explicitly here so that we can count the number of arguments and give sensible |
| 2372 | // error messages. |
| 2373 | int n_positional = n_positional_extra; |
| 2374 | uint n_keyword = 0; |
| 2375 | uint star_flags = 0; |
| 2376 | mp_parse_node_struct_t *star_args_node = NULL, *dblstar_args_node = NULL; |
| 2377 | for (size_t i = 0; i < n_args; i++) { |
| 2378 | if (MP_PARSE_NODE_IS_STRUCT(args[i])) { |
| 2379 | mp_parse_node_struct_t *pns_arg = (mp_parse_node_struct_t *)args[i]; |
| 2380 | if (MP_PARSE_NODE_STRUCT_KIND(pns_arg) == PN_arglist_star) { |
| 2381 | if (star_flags & MP_EMIT_STAR_FLAG_SINGLE) { |
| 2382 | compile_syntax_error(comp, (mp_parse_node_t)pns_arg, MP_ERROR_TEXT("can't have multiple *x" )); |
| 2383 | return; |
| 2384 | } |
| 2385 | star_flags |= MP_EMIT_STAR_FLAG_SINGLE; |
| 2386 | star_args_node = pns_arg; |
| 2387 | } else if (MP_PARSE_NODE_STRUCT_KIND(pns_arg) == PN_arglist_dbl_star) { |
| 2388 | if (star_flags & MP_EMIT_STAR_FLAG_DOUBLE) { |
| 2389 | compile_syntax_error(comp, (mp_parse_node_t)pns_arg, MP_ERROR_TEXT("can't have multiple **x" )); |
| 2390 | return; |
| 2391 | } |
| 2392 | star_flags |= MP_EMIT_STAR_FLAG_DOUBLE; |
| 2393 | dblstar_args_node = pns_arg; |
| 2394 | } else if (MP_PARSE_NODE_STRUCT_KIND(pns_arg) == PN_argument) { |
| 2395 | #if MICROPY_PY_ASSIGN_EXPR |
| 2396 | if (MP_PARSE_NODE_IS_STRUCT_KIND(pns_arg->nodes[1], PN_argument_3)) { |
| 2397 | compile_namedexpr_helper(comp, pns_arg->nodes[0], ((mp_parse_node_struct_t *)pns_arg->nodes[1])->nodes[0]); |
| 2398 | n_positional++; |
| 2399 | } else |
| 2400 | #endif |
| 2401 | if (!MP_PARSE_NODE_IS_STRUCT_KIND(pns_arg->nodes[1], PN_comp_for)) { |
| 2402 | if (!MP_PARSE_NODE_IS_ID(pns_arg->nodes[0])) { |
| 2403 | compile_syntax_error(comp, (mp_parse_node_t)pns_arg, MP_ERROR_TEXT("LHS of keyword arg must be an id" )); |
| 2404 | return; |
| 2405 | } |
| 2406 | EMIT_ARG(load_const_str, MP_PARSE_NODE_LEAF_ARG(pns_arg->nodes[0])); |
| 2407 | compile_node(comp, pns_arg->nodes[1]); |
| 2408 | n_keyword += 1; |
| 2409 | } else { |
| 2410 | compile_comprehension(comp, pns_arg, SCOPE_GEN_EXPR); |
| 2411 | n_positional++; |
| 2412 | } |
| 2413 | } else { |
| 2414 | goto normal_argument; |
| 2415 | } |
| 2416 | } else { |
| 2417 | normal_argument: |
| 2418 | if (star_flags) { |
| 2419 | compile_syntax_error(comp, args[i], MP_ERROR_TEXT("non-keyword arg after */**" )); |
| 2420 | return; |
| 2421 | } |
| 2422 | if (n_keyword > 0) { |
| 2423 | compile_syntax_error(comp, args[i], MP_ERROR_TEXT("non-keyword arg after keyword arg" )); |
| 2424 | return; |
| 2425 | } |
| 2426 | compile_node(comp, args[i]); |
| 2427 | n_positional++; |
| 2428 | } |
| 2429 | } |
| 2430 | |
| 2431 | // compile the star/double-star arguments if we had them |
| 2432 | // if we had one but not the other then we load "null" as a place holder |
| 2433 | if (star_flags != 0) { |
| 2434 | if (star_args_node == NULL) { |
| 2435 | EMIT(load_null); |
| 2436 | } else { |
| 2437 | compile_node(comp, star_args_node->nodes[0]); |
| 2438 | } |
| 2439 | if (dblstar_args_node == NULL) { |
| 2440 | EMIT(load_null); |
| 2441 | } else { |
| 2442 | compile_node(comp, dblstar_args_node->nodes[0]); |
| 2443 | } |
| 2444 | } |
| 2445 | |
| 2446 | // emit the function/method call |
| 2447 | if (is_method_call) { |
| 2448 | EMIT_ARG(call_method, n_positional, n_keyword, star_flags); |
| 2449 | } else { |
| 2450 | EMIT_ARG(call_function, n_positional, n_keyword, star_flags); |
| 2451 | } |
| 2452 | } |
| 2453 | |
| 2454 | // pns needs to have 2 nodes, first is lhs of comprehension, second is PN_comp_for node |
| 2455 | STATIC void compile_comprehension(compiler_t *comp, mp_parse_node_struct_t *pns, scope_kind_t kind) { |
| 2456 | assert(MP_PARSE_NODE_STRUCT_NUM_NODES(pns) == 2); |
| 2457 | assert(MP_PARSE_NODE_IS_STRUCT_KIND(pns->nodes[1], PN_comp_for)); |
| 2458 | mp_parse_node_struct_t *pns_comp_for = (mp_parse_node_struct_t *)pns->nodes[1]; |
| 2459 | |
| 2460 | if (comp->pass == MP_PASS_SCOPE) { |
| 2461 | // create a new scope for this comprehension |
| 2462 | scope_t *s = scope_new_and_link(comp, kind, (mp_parse_node_t)pns, comp->scope_cur->emit_options); |
| 2463 | // store the comprehension scope so the compiling function (this one) can use it at each pass |
| 2464 | pns_comp_for->nodes[3] = (mp_parse_node_t)s; |
| 2465 | } |
| 2466 | |
| 2467 | // get the scope for this comprehension |
| 2468 | scope_t *this_scope = (scope_t *)pns_comp_for->nodes[3]; |
| 2469 | |
| 2470 | // compile the comprehension |
| 2471 | close_over_variables_etc(comp, this_scope, 0, 0); |
| 2472 | |
| 2473 | compile_node(comp, pns_comp_for->nodes[1]); // source of the iterator |
| 2474 | if (kind == SCOPE_GEN_EXPR) { |
| 2475 | EMIT_ARG(get_iter, false); |
| 2476 | } |
| 2477 | EMIT_ARG(call_function, 1, 0, 0); |
| 2478 | } |
| 2479 | |
| 2480 | STATIC void compile_atom_paren(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 2481 | if (MP_PARSE_NODE_IS_NULL(pns->nodes[0])) { |
| 2482 | // an empty tuple |
| 2483 | c_tuple(comp, MP_PARSE_NODE_NULL, NULL); |
| 2484 | } else { |
| 2485 | assert(MP_PARSE_NODE_IS_STRUCT_KIND(pns->nodes[0], PN_testlist_comp)); |
| 2486 | pns = (mp_parse_node_struct_t *)pns->nodes[0]; |
| 2487 | assert(!MP_PARSE_NODE_IS_NULL(pns->nodes[1])); |
| 2488 | if (MP_PARSE_NODE_IS_STRUCT(pns->nodes[1])) { |
| 2489 | mp_parse_node_struct_t *pns2 = (mp_parse_node_struct_t *)pns->nodes[1]; |
| 2490 | if (MP_PARSE_NODE_STRUCT_KIND(pns2) == PN_testlist_comp_3b) { |
| 2491 | // tuple of one item, with trailing comma |
| 2492 | assert(MP_PARSE_NODE_IS_NULL(pns2->nodes[0])); |
| 2493 | c_tuple(comp, pns->nodes[0], NULL); |
| 2494 | } else if (MP_PARSE_NODE_STRUCT_KIND(pns2) == PN_testlist_comp_3c) { |
| 2495 | // tuple of many items |
| 2496 | c_tuple(comp, pns->nodes[0], pns2); |
| 2497 | } else if (MP_PARSE_NODE_STRUCT_KIND(pns2) == PN_comp_for) { |
| 2498 | // generator expression |
| 2499 | compile_comprehension(comp, pns, SCOPE_GEN_EXPR); |
| 2500 | } else { |
| 2501 | // tuple with 2 items |
| 2502 | goto tuple_with_2_items; |
| 2503 | } |
| 2504 | } else { |
| 2505 | // tuple with 2 items |
| 2506 | tuple_with_2_items: |
| 2507 | c_tuple(comp, MP_PARSE_NODE_NULL, pns); |
| 2508 | } |
| 2509 | } |
| 2510 | } |
| 2511 | |
| 2512 | STATIC void compile_atom_bracket(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 2513 | if (MP_PARSE_NODE_IS_NULL(pns->nodes[0])) { |
| 2514 | // empty list |
| 2515 | EMIT_ARG(build, 0, MP_EMIT_BUILD_LIST); |
| 2516 | } else if (MP_PARSE_NODE_IS_STRUCT_KIND(pns->nodes[0], PN_testlist_comp)) { |
| 2517 | mp_parse_node_struct_t *pns2 = (mp_parse_node_struct_t *)pns->nodes[0]; |
| 2518 | if (MP_PARSE_NODE_IS_STRUCT(pns2->nodes[1])) { |
| 2519 | mp_parse_node_struct_t *pns3 = (mp_parse_node_struct_t *)pns2->nodes[1]; |
| 2520 | if (MP_PARSE_NODE_STRUCT_KIND(pns3) == PN_testlist_comp_3b) { |
| 2521 | // list of one item, with trailing comma |
| 2522 | assert(MP_PARSE_NODE_IS_NULL(pns3->nodes[0])); |
| 2523 | compile_node(comp, pns2->nodes[0]); |
| 2524 | EMIT_ARG(build, 1, MP_EMIT_BUILD_LIST); |
| 2525 | } else if (MP_PARSE_NODE_STRUCT_KIND(pns3) == PN_testlist_comp_3c) { |
| 2526 | // list of many items |
| 2527 | compile_node(comp, pns2->nodes[0]); |
| 2528 | compile_generic_all_nodes(comp, pns3); |
| 2529 | EMIT_ARG(build, 1 + MP_PARSE_NODE_STRUCT_NUM_NODES(pns3), MP_EMIT_BUILD_LIST); |
| 2530 | } else if (MP_PARSE_NODE_STRUCT_KIND(pns3) == PN_comp_for) { |
| 2531 | // list comprehension |
| 2532 | compile_comprehension(comp, pns2, SCOPE_LIST_COMP); |
| 2533 | } else { |
| 2534 | // list with 2 items |
| 2535 | goto list_with_2_items; |
| 2536 | } |
| 2537 | } else { |
| 2538 | // list with 2 items |
| 2539 | list_with_2_items: |
| 2540 | compile_node(comp, pns2->nodes[0]); |
| 2541 | compile_node(comp, pns2->nodes[1]); |
| 2542 | EMIT_ARG(build, 2, MP_EMIT_BUILD_LIST); |
| 2543 | } |
| 2544 | } else { |
| 2545 | // list with 1 item |
| 2546 | compile_node(comp, pns->nodes[0]); |
| 2547 | EMIT_ARG(build, 1, MP_EMIT_BUILD_LIST); |
| 2548 | } |
| 2549 | } |
| 2550 | |
| 2551 | STATIC void compile_atom_brace_helper(compiler_t *comp, mp_parse_node_struct_t *pns, bool create_map) { |
| 2552 | mp_parse_node_t pn = pns->nodes[0]; |
| 2553 | if (MP_PARSE_NODE_IS_NULL(pn)) { |
| 2554 | // empty dict |
| 2555 | if (create_map) { |
| 2556 | EMIT_ARG(build, 0, MP_EMIT_BUILD_MAP); |
| 2557 | } |
| 2558 | } else if (MP_PARSE_NODE_IS_STRUCT(pn)) { |
| 2559 | pns = (mp_parse_node_struct_t *)pn; |
| 2560 | if (MP_PARSE_NODE_STRUCT_KIND(pns) == PN_dictorsetmaker_item) { |
| 2561 | // dict with one element |
| 2562 | if (create_map) { |
| 2563 | EMIT_ARG(build, 1, MP_EMIT_BUILD_MAP); |
| 2564 | } |
| 2565 | compile_node(comp, pn); |
| 2566 | EMIT(store_map); |
| 2567 | } else if (MP_PARSE_NODE_STRUCT_KIND(pns) == PN_dictorsetmaker) { |
| 2568 | assert(MP_PARSE_NODE_IS_STRUCT(pns->nodes[1])); // should succeed |
| 2569 | mp_parse_node_struct_t *pns1 = (mp_parse_node_struct_t *)pns->nodes[1]; |
| 2570 | if (MP_PARSE_NODE_STRUCT_KIND(pns1) == PN_dictorsetmaker_list) { |
| 2571 | // dict/set with multiple elements |
| 2572 | |
| 2573 | // get tail elements (2nd, 3rd, ...) |
| 2574 | mp_parse_node_t *nodes; |
| 2575 | size_t n = mp_parse_node_extract_list(&pns1->nodes[0], PN_dictorsetmaker_list2, &nodes); |
| 2576 | |
| 2577 | // first element sets whether it's a dict or set |
| 2578 | bool is_dict; |
| 2579 | if (!MICROPY_PY_BUILTINS_SET || MP_PARSE_NODE_IS_STRUCT_KIND(pns->nodes[0], PN_dictorsetmaker_item)) { |
| 2580 | // a dictionary |
| 2581 | if (create_map) { |
| 2582 | EMIT_ARG(build, 1 + n, MP_EMIT_BUILD_MAP); |
| 2583 | } |
| 2584 | compile_node(comp, pns->nodes[0]); |
| 2585 | EMIT(store_map); |
| 2586 | is_dict = true; |
| 2587 | } else { |
| 2588 | // a set |
| 2589 | compile_node(comp, pns->nodes[0]); // 1st value of set |
| 2590 | is_dict = false; |
| 2591 | } |
| 2592 | |
| 2593 | // process rest of elements |
| 2594 | for (size_t i = 0; i < n; i++) { |
| 2595 | mp_parse_node_t pn_i = nodes[i]; |
| 2596 | bool is_key_value = MP_PARSE_NODE_IS_STRUCT_KIND(pn_i, PN_dictorsetmaker_item); |
| 2597 | compile_node(comp, pn_i); |
| 2598 | if (is_dict) { |
| 2599 | if (!is_key_value) { |
| 2600 | #if MICROPY_ERROR_REPORTING == MICROPY_ERROR_REPORTING_TERSE |
| 2601 | compile_syntax_error(comp, (mp_parse_node_t)pns, MP_ERROR_TEXT("invalid syntax" )); |
| 2602 | #else |
| 2603 | compile_syntax_error(comp, (mp_parse_node_t)pns, MP_ERROR_TEXT("expecting key:value for dict" )); |
| 2604 | #endif |
| 2605 | return; |
| 2606 | } |
| 2607 | EMIT(store_map); |
| 2608 | } else { |
| 2609 | if (is_key_value) { |
| 2610 | #if MICROPY_ERROR_REPORTING == MICROPY_ERROR_REPORTING_TERSE |
| 2611 | compile_syntax_error(comp, (mp_parse_node_t)pns, MP_ERROR_TEXT("invalid syntax" )); |
| 2612 | #else |
| 2613 | compile_syntax_error(comp, (mp_parse_node_t)pns, MP_ERROR_TEXT("expecting just a value for set" )); |
| 2614 | #endif |
| 2615 | return; |
| 2616 | } |
| 2617 | } |
| 2618 | } |
| 2619 | |
| 2620 | #if MICROPY_PY_BUILTINS_SET |
| 2621 | // if it's a set, build it |
| 2622 | if (!is_dict) { |
| 2623 | EMIT_ARG(build, 1 + n, MP_EMIT_BUILD_SET); |
| 2624 | } |
| 2625 | #endif |
| 2626 | } else { |
| 2627 | assert(MP_PARSE_NODE_STRUCT_KIND(pns1) == PN_comp_for); // should be |
| 2628 | // dict/set comprehension |
| 2629 | if (!MICROPY_PY_BUILTINS_SET || MP_PARSE_NODE_IS_STRUCT_KIND(pns->nodes[0], PN_dictorsetmaker_item)) { |
| 2630 | // a dictionary comprehension |
| 2631 | compile_comprehension(comp, pns, SCOPE_DICT_COMP); |
| 2632 | } else { |
| 2633 | // a set comprehension |
| 2634 | compile_comprehension(comp, pns, SCOPE_SET_COMP); |
| 2635 | } |
| 2636 | } |
| 2637 | } else { |
| 2638 | // set with one element |
| 2639 | goto set_with_one_element; |
| 2640 | } |
| 2641 | } else { |
| 2642 | // set with one element |
| 2643 | set_with_one_element: |
| 2644 | #if MICROPY_PY_BUILTINS_SET |
| 2645 | compile_node(comp, pn); |
| 2646 | EMIT_ARG(build, 1, MP_EMIT_BUILD_SET); |
| 2647 | #else |
| 2648 | assert(0); |
| 2649 | #endif |
| 2650 | } |
| 2651 | } |
| 2652 | |
| 2653 | STATIC void compile_atom_brace(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 2654 | compile_atom_brace_helper(comp, pns, true); |
| 2655 | } |
| 2656 | |
| 2657 | STATIC void compile_trailer_paren(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 2658 | compile_trailer_paren_helper(comp, pns->nodes[0], false, 0); |
| 2659 | } |
| 2660 | |
| 2661 | STATIC void compile_trailer_bracket(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 2662 | // object who's index we want is on top of stack |
| 2663 | compile_node(comp, pns->nodes[0]); // the index |
| 2664 | EMIT_ARG(subscr, MP_EMIT_SUBSCR_LOAD); |
| 2665 | } |
| 2666 | |
| 2667 | STATIC void compile_trailer_period(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 2668 | // object who's attribute we want is on top of stack |
| 2669 | EMIT_ARG(attr, MP_PARSE_NODE_LEAF_ARG(pns->nodes[0]), MP_EMIT_ATTR_LOAD); // attribute to get |
| 2670 | } |
| 2671 | |
| 2672 | #if MICROPY_PY_BUILTINS_SLICE |
| 2673 | STATIC void compile_subscript(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 2674 | if (MP_PARSE_NODE_STRUCT_KIND(pns) == PN_subscript_2) { |
| 2675 | compile_node(comp, pns->nodes[0]); // start of slice |
| 2676 | assert(MP_PARSE_NODE_IS_STRUCT(pns->nodes[1])); // should always be |
| 2677 | pns = (mp_parse_node_struct_t *)pns->nodes[1]; |
| 2678 | } else { |
| 2679 | // pns is a PN_subscript_3, load None for start of slice |
| 2680 | EMIT_ARG(load_const_tok, MP_TOKEN_KW_NONE); |
| 2681 | } |
| 2682 | |
| 2683 | assert(MP_PARSE_NODE_STRUCT_KIND(pns) == PN_subscript_3); // should always be |
| 2684 | mp_parse_node_t pn = pns->nodes[0]; |
| 2685 | if (MP_PARSE_NODE_IS_NULL(pn)) { |
| 2686 | // [?:] |
| 2687 | EMIT_ARG(load_const_tok, MP_TOKEN_KW_NONE); |
| 2688 | EMIT_ARG(build, 2, MP_EMIT_BUILD_SLICE); |
| 2689 | } else if (MP_PARSE_NODE_IS_STRUCT(pn)) { |
| 2690 | pns = (mp_parse_node_struct_t *)pn; |
| 2691 | if (MP_PARSE_NODE_STRUCT_KIND(pns) == PN_subscript_3c) { |
| 2692 | EMIT_ARG(load_const_tok, MP_TOKEN_KW_NONE); |
| 2693 | pn = pns->nodes[0]; |
| 2694 | if (MP_PARSE_NODE_IS_NULL(pn)) { |
| 2695 | // [?::] |
| 2696 | EMIT_ARG(build, 2, MP_EMIT_BUILD_SLICE); |
| 2697 | } else { |
| 2698 | // [?::x] |
| 2699 | compile_node(comp, pn); |
| 2700 | EMIT_ARG(build, 3, MP_EMIT_BUILD_SLICE); |
| 2701 | } |
| 2702 | } else if (MP_PARSE_NODE_STRUCT_KIND(pns) == PN_subscript_3d) { |
| 2703 | compile_node(comp, pns->nodes[0]); |
| 2704 | assert(MP_PARSE_NODE_IS_STRUCT(pns->nodes[1])); // should always be |
| 2705 | pns = (mp_parse_node_struct_t *)pns->nodes[1]; |
| 2706 | assert(MP_PARSE_NODE_STRUCT_KIND(pns) == PN_sliceop); // should always be |
| 2707 | if (MP_PARSE_NODE_IS_NULL(pns->nodes[0])) { |
| 2708 | // [?:x:] |
| 2709 | EMIT_ARG(build, 2, MP_EMIT_BUILD_SLICE); |
| 2710 | } else { |
| 2711 | // [?:x:x] |
| 2712 | compile_node(comp, pns->nodes[0]); |
| 2713 | EMIT_ARG(build, 3, MP_EMIT_BUILD_SLICE); |
| 2714 | } |
| 2715 | } else { |
| 2716 | // [?:x] |
| 2717 | compile_node(comp, pn); |
| 2718 | EMIT_ARG(build, 2, MP_EMIT_BUILD_SLICE); |
| 2719 | } |
| 2720 | } else { |
| 2721 | // [?:x] |
| 2722 | compile_node(comp, pn); |
| 2723 | EMIT_ARG(build, 2, MP_EMIT_BUILD_SLICE); |
| 2724 | } |
| 2725 | } |
| 2726 | #endif // MICROPY_PY_BUILTINS_SLICE |
| 2727 | |
| 2728 | STATIC void compile_dictorsetmaker_item(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 2729 | // if this is called then we are compiling a dict key:value pair |
| 2730 | compile_node(comp, pns->nodes[1]); // value |
| 2731 | compile_node(comp, pns->nodes[0]); // key |
| 2732 | } |
| 2733 | |
| 2734 | STATIC void compile_classdef(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 2735 | qstr cname = compile_classdef_helper(comp, pns, comp->scope_cur->emit_options); |
| 2736 | // store class object into class name |
| 2737 | compile_store_id(comp, cname); |
| 2738 | } |
| 2739 | |
| 2740 | STATIC void compile_yield_expr(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 2741 | if (comp->scope_cur->kind != SCOPE_FUNCTION && comp->scope_cur->kind != SCOPE_LAMBDA) { |
| 2742 | compile_syntax_error(comp, (mp_parse_node_t)pns, MP_ERROR_TEXT("'yield' outside function" )); |
| 2743 | return; |
| 2744 | } |
| 2745 | if (MP_PARSE_NODE_IS_NULL(pns->nodes[0])) { |
| 2746 | EMIT_ARG(load_const_tok, MP_TOKEN_KW_NONE); |
| 2747 | EMIT_ARG(yield, MP_EMIT_YIELD_VALUE); |
| 2748 | reserve_labels_for_native(comp, 1); |
| 2749 | } else if (MP_PARSE_NODE_IS_STRUCT_KIND(pns->nodes[0], PN_yield_arg_from)) { |
| 2750 | pns = (mp_parse_node_struct_t *)pns->nodes[0]; |
| 2751 | compile_node(comp, pns->nodes[0]); |
| 2752 | compile_yield_from(comp); |
| 2753 | } else { |
| 2754 | compile_node(comp, pns->nodes[0]); |
| 2755 | EMIT_ARG(yield, MP_EMIT_YIELD_VALUE); |
| 2756 | reserve_labels_for_native(comp, 1); |
| 2757 | } |
| 2758 | } |
| 2759 | |
| 2760 | #if MICROPY_PY_ASYNC_AWAIT |
| 2761 | STATIC void compile_atom_expr_await(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 2762 | if (comp->scope_cur->kind != SCOPE_FUNCTION && comp->scope_cur->kind != SCOPE_LAMBDA) { |
| 2763 | compile_syntax_error(comp, (mp_parse_node_t)pns, MP_ERROR_TEXT("'await' outside function" )); |
| 2764 | return; |
| 2765 | } |
| 2766 | compile_atom_expr_normal(comp, pns); |
| 2767 | compile_yield_from(comp); |
| 2768 | } |
| 2769 | #endif |
| 2770 | |
| 2771 | STATIC mp_obj_t get_const_object(mp_parse_node_struct_t *pns) { |
| 2772 | #if MICROPY_OBJ_REPR == MICROPY_OBJ_REPR_D |
| 2773 | // nodes are 32-bit pointers, but need to extract 64-bit object |
| 2774 | return (uint64_t)pns->nodes[0] | ((uint64_t)pns->nodes[1] << 32); |
| 2775 | #else |
| 2776 | return (mp_obj_t)pns->nodes[0]; |
| 2777 | #endif |
| 2778 | } |
| 2779 | |
| 2780 | STATIC void compile_const_object(compiler_t *comp, mp_parse_node_struct_t *pns) { |
| 2781 | EMIT_ARG(load_const_obj, get_const_object(pns)); |
| 2782 | } |
| 2783 | |
| 2784 | typedef void (*compile_function_t)(compiler_t *, mp_parse_node_struct_t *); |
| 2785 | STATIC const compile_function_t compile_function[] = { |
| 2786 | // only define rules with a compile function |
| 2787 | #define c(f) compile_##f |
| 2788 | #define DEF_RULE(rule, comp, kind, ...) comp, |
| 2789 | #define DEF_RULE_NC(rule, kind, ...) |
| 2790 | #include "py/grammar.h" |
| 2791 | #undef c |
| 2792 | #undef DEF_RULE |
| 2793 | #undef DEF_RULE_NC |
| 2794 | compile_const_object, |
| 2795 | }; |
| 2796 | |
| 2797 | STATIC void compile_node(compiler_t *comp, mp_parse_node_t pn) { |
| 2798 | if (MP_PARSE_NODE_IS_NULL(pn)) { |
| 2799 | // pass |
| 2800 | } else if (MP_PARSE_NODE_IS_SMALL_INT(pn)) { |
| 2801 | mp_int_t arg = MP_PARSE_NODE_LEAF_SMALL_INT(pn); |
| 2802 | #if MICROPY_DYNAMIC_COMPILER |
| 2803 | mp_uint_t sign_mask = -((mp_uint_t)1 << (mp_dynamic_compiler.small_int_bits - 1)); |
| 2804 | if ((arg & sign_mask) == 0 || (arg & sign_mask) == sign_mask) { |
| 2805 | // integer fits in target runtime's small-int |
| 2806 | EMIT_ARG(load_const_small_int, arg); |
| 2807 | } else { |
| 2808 | // integer doesn't fit, so create a multi-precision int object |
| 2809 | // (but only create the actual object on the last pass) |
| 2810 | if (comp->pass != MP_PASS_EMIT) { |
| 2811 | EMIT_ARG(load_const_obj, mp_const_none); |
| 2812 | } else { |
| 2813 | EMIT_ARG(load_const_obj, mp_obj_new_int_from_ll(arg)); |
| 2814 | } |
| 2815 | } |
| 2816 | #else |
| 2817 | EMIT_ARG(load_const_small_int, arg); |
| 2818 | #endif |
| 2819 | } else if (MP_PARSE_NODE_IS_LEAF(pn)) { |
| 2820 | uintptr_t arg = MP_PARSE_NODE_LEAF_ARG(pn); |
| 2821 | switch (MP_PARSE_NODE_LEAF_KIND(pn)) { |
| 2822 | case MP_PARSE_NODE_ID: |
| 2823 | compile_load_id(comp, arg); |
| 2824 | break; |
| 2825 | case MP_PARSE_NODE_STRING: |
| 2826 | EMIT_ARG(load_const_str, arg); |
| 2827 | break; |
| 2828 | case MP_PARSE_NODE_BYTES: |
| 2829 | // only create and load the actual bytes object on the last pass |
| 2830 | if (comp->pass != MP_PASS_EMIT) { |
| 2831 | EMIT_ARG(load_const_obj, mp_const_none); |
| 2832 | } else { |
| 2833 | size_t len; |
| 2834 | const byte *data = qstr_data(arg, &len); |
| 2835 | EMIT_ARG(load_const_obj, mp_obj_new_bytes(data, len)); |
| 2836 | } |
| 2837 | break; |
| 2838 | case MP_PARSE_NODE_TOKEN: |
| 2839 | default: |
| 2840 | if (arg == MP_TOKEN_NEWLINE) { |
| 2841 | // this can occur when file_input lets through a NEWLINE (eg if file starts with a newline) |
| 2842 | // or when single_input lets through a NEWLINE (user enters a blank line) |
| 2843 | // do nothing |
| 2844 | } else { |
| 2845 | EMIT_ARG(load_const_tok, arg); |
| 2846 | } |
| 2847 | break; |
| 2848 | } |
| 2849 | } else { |
| 2850 | mp_parse_node_struct_t *pns = (mp_parse_node_struct_t *)pn; |
| 2851 | EMIT_ARG(set_source_line, pns->source_line); |
| 2852 | assert(MP_PARSE_NODE_STRUCT_KIND(pns) <= PN_const_object); |
| 2853 | compile_function_t f = compile_function[MP_PARSE_NODE_STRUCT_KIND(pns)]; |
| 2854 | f(comp, pns); |
| 2855 | } |
| 2856 | } |
| 2857 | |
| 2858 | #if MICROPY_EMIT_NATIVE |
| 2859 | STATIC int compile_viper_type_annotation(compiler_t *comp, mp_parse_node_t pn_annotation) { |
| 2860 | int native_type = MP_NATIVE_TYPE_OBJ; |
| 2861 | if (MP_PARSE_NODE_IS_NULL(pn_annotation)) { |
| 2862 | // No annotation, type defaults to object |
| 2863 | } else if (MP_PARSE_NODE_IS_ID(pn_annotation)) { |
| 2864 | qstr type_name = MP_PARSE_NODE_LEAF_ARG(pn_annotation); |
| 2865 | native_type = mp_native_type_from_qstr(type_name); |
| 2866 | if (native_type < 0) { |
| 2867 | comp->compile_error = mp_obj_new_exception_msg_varg(&mp_type_ViperTypeError, MP_ERROR_TEXT("unknown type '%q'" ), type_name); |
| 2868 | native_type = 0; |
| 2869 | } |
| 2870 | } else { |
| 2871 | compile_syntax_error(comp, pn_annotation, MP_ERROR_TEXT("annotation must be an identifier" )); |
| 2872 | } |
| 2873 | return native_type; |
| 2874 | } |
| 2875 | #endif |
| 2876 | |
| 2877 | STATIC void compile_scope_func_lambda_param(compiler_t *comp, mp_parse_node_t pn, pn_kind_t pn_name, pn_kind_t pn_star, pn_kind_t pn_dbl_star) { |
| 2878 | (void)pn_dbl_star; |
| 2879 | |
| 2880 | // check that **kw is last |
| 2881 | if ((comp->scope_cur->scope_flags & MP_SCOPE_FLAG_VARKEYWORDS) != 0) { |
| 2882 | compile_syntax_error(comp, pn, MP_ERROR_TEXT("invalid syntax" )); |
| 2883 | return; |
| 2884 | } |
| 2885 | |
| 2886 | qstr param_name = MP_QSTRnull; |
| 2887 | uint param_flag = ID_FLAG_IS_PARAM; |
| 2888 | mp_parse_node_struct_t *pns = NULL; |
| 2889 | if (MP_PARSE_NODE_IS_ID(pn)) { |
| 2890 | param_name = MP_PARSE_NODE_LEAF_ARG(pn); |
| 2891 | if (comp->have_star) { |
| 2892 | // comes after a star, so counts as a keyword-only parameter |
| 2893 | comp->scope_cur->num_kwonly_args += 1; |
| 2894 | } else { |
| 2895 | // comes before a star, so counts as a positional parameter |
| 2896 | comp->scope_cur->num_pos_args += 1; |
| 2897 | } |
| 2898 | } else { |
| 2899 | assert(MP_PARSE_NODE_IS_STRUCT(pn)); |
| 2900 | pns = (mp_parse_node_struct_t *)pn; |
| 2901 | if (MP_PARSE_NODE_STRUCT_KIND(pns) == pn_name) { |
| 2902 | // named parameter with possible annotation |
| 2903 | param_name = MP_PARSE_NODE_LEAF_ARG(pns->nodes[0]); |
| 2904 | if (comp->have_star) { |
| 2905 | // comes after a star, so counts as a keyword-only parameter |
| 2906 | comp->scope_cur->num_kwonly_args += 1; |
| 2907 | } else { |
| 2908 | // comes before a star, so counts as a positional parameter |
| 2909 | comp->scope_cur->num_pos_args += 1; |
| 2910 | } |
| 2911 | } else if (MP_PARSE_NODE_STRUCT_KIND(pns) == pn_star) { |
| 2912 | if (comp->have_star) { |
| 2913 | // more than one star |
| 2914 | compile_syntax_error(comp, pn, MP_ERROR_TEXT("invalid syntax" )); |
| 2915 | return; |
| 2916 | } |
| 2917 | comp->have_star = true; |
| 2918 | param_flag = ID_FLAG_IS_PARAM | ID_FLAG_IS_STAR_PARAM; |
| 2919 | if (MP_PARSE_NODE_IS_NULL(pns->nodes[0])) { |
| 2920 | // bare star |
| 2921 | // TODO see http://www.python.org/dev/peps/pep-3102/ |
| 2922 | // assert(comp->scope_cur->num_dict_params == 0); |
| 2923 | pns = NULL; |
| 2924 | } else if (MP_PARSE_NODE_IS_ID(pns->nodes[0])) { |
| 2925 | // named star |
| 2926 | comp->scope_cur->scope_flags |= MP_SCOPE_FLAG_VARARGS; |
| 2927 | param_name = MP_PARSE_NODE_LEAF_ARG(pns->nodes[0]); |
| 2928 | pns = NULL; |
| 2929 | } else { |
| 2930 | assert(MP_PARSE_NODE_IS_STRUCT_KIND(pns->nodes[0], PN_tfpdef)); // should be |
| 2931 | // named star with possible annotation |
| 2932 | comp->scope_cur->scope_flags |= MP_SCOPE_FLAG_VARARGS; |
| 2933 | pns = (mp_parse_node_struct_t *)pns->nodes[0]; |
| 2934 | param_name = MP_PARSE_NODE_LEAF_ARG(pns->nodes[0]); |
| 2935 | } |
| 2936 | } else { |
| 2937 | // double star with possible annotation |
| 2938 | assert(MP_PARSE_NODE_STRUCT_KIND(pns) == pn_dbl_star); // should be |
| 2939 | param_name = MP_PARSE_NODE_LEAF_ARG(pns->nodes[0]); |
| 2940 | param_flag = ID_FLAG_IS_PARAM | ID_FLAG_IS_DBL_STAR_PARAM; |
| 2941 | comp->scope_cur->scope_flags |= MP_SCOPE_FLAG_VARKEYWORDS; |
| 2942 | } |
| 2943 | } |
| 2944 | |
| 2945 | if (param_name != MP_QSTRnull) { |
| 2946 | id_info_t *id_info = scope_find_or_add_id(comp->scope_cur, param_name, ID_INFO_KIND_UNDECIDED); |
| 2947 | if (id_info->kind != ID_INFO_KIND_UNDECIDED) { |
| 2948 | compile_syntax_error(comp, pn, MP_ERROR_TEXT("argument name reused" )); |
| 2949 | return; |
| 2950 | } |
| 2951 | id_info->kind = ID_INFO_KIND_LOCAL; |
| 2952 | id_info->flags = param_flag; |
| 2953 | |
| 2954 | #if MICROPY_EMIT_NATIVE |
| 2955 | if (comp->scope_cur->emit_options == MP_EMIT_OPT_VIPER && pn_name == PN_typedargslist_name && pns != NULL) { |
| 2956 | id_info->flags |= compile_viper_type_annotation(comp, pns->nodes[1]) << ID_FLAG_VIPER_TYPE_POS; |
| 2957 | } |
| 2958 | #else |
| 2959 | (void)pns; |
| 2960 | #endif |
| 2961 | } |
| 2962 | } |
| 2963 | |
| 2964 | STATIC void compile_scope_func_param(compiler_t *comp, mp_parse_node_t pn) { |
| 2965 | compile_scope_func_lambda_param(comp, pn, PN_typedargslist_name, PN_typedargslist_star, PN_typedargslist_dbl_star); |
| 2966 | } |
| 2967 | |
| 2968 | STATIC void compile_scope_lambda_param(compiler_t *comp, mp_parse_node_t pn) { |
| 2969 | compile_scope_func_lambda_param(comp, pn, PN_varargslist_name, PN_varargslist_star, PN_varargslist_dbl_star); |
| 2970 | } |
| 2971 | |
| 2972 | STATIC void compile_scope_comp_iter(compiler_t *comp, mp_parse_node_struct_t *pns_comp_for, mp_parse_node_t pn_inner_expr, int for_depth) { |
| 2973 | uint l_top = comp_next_label(comp); |
| 2974 | uint l_end = comp_next_label(comp); |
| 2975 | EMIT_ARG(label_assign, l_top); |
| 2976 | EMIT_ARG(for_iter, l_end); |
| 2977 | c_assign(comp, pns_comp_for->nodes[0], ASSIGN_STORE); |
| 2978 | mp_parse_node_t pn_iter = pns_comp_for->nodes[2]; |
| 2979 | |
| 2980 | tail_recursion: |
| 2981 | if (MP_PARSE_NODE_IS_NULL(pn_iter)) { |
| 2982 | // no more nested if/for; compile inner expression |
| 2983 | compile_node(comp, pn_inner_expr); |
| 2984 | if (comp->scope_cur->kind == SCOPE_GEN_EXPR) { |
| 2985 | EMIT_ARG(yield, MP_EMIT_YIELD_VALUE); |
| 2986 | reserve_labels_for_native(comp, 1); |
| 2987 | EMIT(pop_top); |
| 2988 | } else { |
| 2989 | EMIT_ARG(store_comp, comp->scope_cur->kind, 4 * for_depth + 5); |
| 2990 | } |
| 2991 | } else if (MP_PARSE_NODE_STRUCT_KIND((mp_parse_node_struct_t *)pn_iter) == PN_comp_if) { |
| 2992 | // if condition |
| 2993 | mp_parse_node_struct_t *pns_comp_if = (mp_parse_node_struct_t *)pn_iter; |
| 2994 | c_if_cond(comp, pns_comp_if->nodes[0], false, l_top); |
| 2995 | pn_iter = pns_comp_if->nodes[1]; |
| 2996 | goto tail_recursion; |
| 2997 | } else { |
| 2998 | assert(MP_PARSE_NODE_STRUCT_KIND((mp_parse_node_struct_t *)pn_iter) == PN_comp_for); // should be |
| 2999 | // for loop |
| 3000 | mp_parse_node_struct_t *pns_comp_for2 = (mp_parse_node_struct_t *)pn_iter; |
| 3001 | compile_node(comp, pns_comp_for2->nodes[1]); |
| 3002 | EMIT_ARG(get_iter, true); |
| 3003 | compile_scope_comp_iter(comp, pns_comp_for2, pn_inner_expr, for_depth + 1); |
| 3004 | } |
| 3005 | |
| 3006 | EMIT_ARG(jump, l_top); |
| 3007 | EMIT_ARG(label_assign, l_end); |
| 3008 | EMIT(for_iter_end); |
| 3009 | } |
| 3010 | |
| 3011 | STATIC void check_for_doc_string(compiler_t *comp, mp_parse_node_t pn) { |
| 3012 | #if MICROPY_ENABLE_DOC_STRING |
| 3013 | // see http://www.python.org/dev/peps/pep-0257/ |
| 3014 | |
| 3015 | // look for the first statement |
| 3016 | if (MP_PARSE_NODE_IS_STRUCT_KIND(pn, PN_expr_stmt)) { |
| 3017 | // a statement; fall through |
| 3018 | } else if (MP_PARSE_NODE_IS_STRUCT_KIND(pn, PN_file_input_2)) { |
| 3019 | // file input; find the first non-newline node |
| 3020 | mp_parse_node_struct_t *pns = (mp_parse_node_struct_t *)pn; |
| 3021 | int num_nodes = MP_PARSE_NODE_STRUCT_NUM_NODES(pns); |
| 3022 | for (int i = 0; i < num_nodes; i++) { |
| 3023 | pn = pns->nodes[i]; |
| 3024 | if (!(MP_PARSE_NODE_IS_LEAF(pn) && MP_PARSE_NODE_LEAF_KIND(pn) == MP_PARSE_NODE_TOKEN && MP_PARSE_NODE_LEAF_ARG(pn) == MP_TOKEN_NEWLINE)) { |
| 3025 | // not a newline, so this is the first statement; finish search |
| 3026 | break; |
| 3027 | } |
| 3028 | } |
| 3029 | // if we didn't find a non-newline then it's okay to fall through; pn will be a newline and so doc-string test below will fail gracefully |
| 3030 | } else if (MP_PARSE_NODE_IS_STRUCT_KIND(pn, PN_suite_block_stmts)) { |
| 3031 | // a list of statements; get the first one |
| 3032 | pn = ((mp_parse_node_struct_t *)pn)->nodes[0]; |
| 3033 | } else { |
| 3034 | return; |
| 3035 | } |
| 3036 | |
| 3037 | // check the first statement for a doc string |
| 3038 | if (MP_PARSE_NODE_IS_STRUCT_KIND(pn, PN_expr_stmt)) { |
| 3039 | mp_parse_node_struct_t *pns = (mp_parse_node_struct_t *)pn; |
| 3040 | if ((MP_PARSE_NODE_IS_LEAF(pns->nodes[0]) |
| 3041 | && MP_PARSE_NODE_LEAF_KIND(pns->nodes[0]) == MP_PARSE_NODE_STRING) |
| 3042 | || (MP_PARSE_NODE_IS_STRUCT_KIND(pns->nodes[0], PN_const_object) |
| 3043 | && mp_obj_is_str(get_const_object((mp_parse_node_struct_t *)pns->nodes[0])))) { |
| 3044 | // compile the doc string |
| 3045 | compile_node(comp, pns->nodes[0]); |
| 3046 | // store the doc string |
| 3047 | compile_store_id(comp, MP_QSTR___doc__); |
| 3048 | } |
| 3049 | } |
| 3050 | #else |
| 3051 | (void)comp; |
| 3052 | (void)pn; |
| 3053 | #endif |
| 3054 | } |
| 3055 | |
| 3056 | STATIC void compile_scope(compiler_t *comp, scope_t *scope, pass_kind_t pass) { |
| 3057 | comp->pass = pass; |
| 3058 | comp->scope_cur = scope; |
| 3059 | comp->next_label = 0; |
| 3060 | EMIT_ARG(start_pass, pass, scope); |
| 3061 | reserve_labels_for_native(comp, 6); // used by native's start_pass |
| 3062 | |
| 3063 | if (comp->pass == MP_PASS_SCOPE) { |
| 3064 | // reset maximum stack sizes in scope |
| 3065 | // they will be computed in this first pass |
| 3066 | scope->stack_size = 0; |
| 3067 | scope->exc_stack_size = 0; |
| 3068 | } |
| 3069 | |
| 3070 | // compile |
| 3071 | if (MP_PARSE_NODE_IS_STRUCT_KIND(scope->pn, PN_eval_input)) { |
| 3072 | assert(scope->kind == SCOPE_MODULE); |
| 3073 | mp_parse_node_struct_t *pns = (mp_parse_node_struct_t *)scope->pn; |
| 3074 | compile_node(comp, pns->nodes[0]); // compile the expression |
| 3075 | EMIT(return_value); |
| 3076 | } else if (scope->kind == SCOPE_MODULE) { |
| 3077 | if (!comp->is_repl) { |
| 3078 | check_for_doc_string(comp, scope->pn); |
| 3079 | } |
| 3080 | compile_node(comp, scope->pn); |
| 3081 | EMIT_ARG(load_const_tok, MP_TOKEN_KW_NONE); |
| 3082 | EMIT(return_value); |
| 3083 | } else if (scope->kind == SCOPE_FUNCTION) { |
| 3084 | assert(MP_PARSE_NODE_IS_STRUCT(scope->pn)); |
| 3085 | mp_parse_node_struct_t *pns = (mp_parse_node_struct_t *)scope->pn; |
| 3086 | assert(MP_PARSE_NODE_STRUCT_KIND(pns) == PN_funcdef); |
| 3087 | |
| 3088 | // work out number of parameters, keywords and default parameters, and add them to the id_info array |
| 3089 | // must be done before compiling the body so that arguments are numbered first (for LOAD_FAST etc) |
| 3090 | if (comp->pass == MP_PASS_SCOPE) { |
| 3091 | comp->have_star = false; |
| 3092 | apply_to_single_or_list(comp, pns->nodes[1], PN_typedargslist, compile_scope_func_param); |
| 3093 | |
| 3094 | #if MICROPY_EMIT_NATIVE |
| 3095 | if (scope->emit_options == MP_EMIT_OPT_VIPER) { |
| 3096 | // Compile return type; pns->nodes[2] is return/whole function annotation |
| 3097 | scope->scope_flags |= compile_viper_type_annotation(comp, pns->nodes[2]) << MP_SCOPE_FLAG_VIPERRET_POS; |
| 3098 | } |
| 3099 | #endif // MICROPY_EMIT_NATIVE |
| 3100 | } |
| 3101 | |
| 3102 | compile_node(comp, pns->nodes[3]); // 3 is function body |
| 3103 | // emit return if it wasn't the last opcode |
| 3104 | if (!EMIT(last_emit_was_return_value)) { |
| 3105 | EMIT_ARG(load_const_tok, MP_TOKEN_KW_NONE); |
| 3106 | EMIT(return_value); |
| 3107 | } |
| 3108 | } else if (scope->kind == SCOPE_LAMBDA) { |
| 3109 | assert(MP_PARSE_NODE_IS_STRUCT(scope->pn)); |
| 3110 | mp_parse_node_struct_t *pns = (mp_parse_node_struct_t *)scope->pn; |
| 3111 | assert(MP_PARSE_NODE_STRUCT_NUM_NODES(pns) == 3); |
| 3112 | |
| 3113 | // Set the source line number for the start of the lambda |
| 3114 | EMIT_ARG(set_source_line, pns->source_line); |
| 3115 | |
| 3116 | // work out number of parameters, keywords and default parameters, and add them to the id_info array |
| 3117 | // must be done before compiling the body so that arguments are numbered first (for LOAD_FAST etc) |
| 3118 | if (comp->pass == MP_PASS_SCOPE) { |
| 3119 | comp->have_star = false; |
| 3120 | apply_to_single_or_list(comp, pns->nodes[0], PN_varargslist, compile_scope_lambda_param); |
| 3121 | } |
| 3122 | |
| 3123 | compile_node(comp, pns->nodes[1]); // 1 is lambda body |
| 3124 | |
| 3125 | // if the lambda is a generator, then we return None, not the result of the expression of the lambda |
| 3126 | if (scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) { |
| 3127 | EMIT(pop_top); |
| 3128 | EMIT_ARG(load_const_tok, MP_TOKEN_KW_NONE); |
| 3129 | } |
| 3130 | EMIT(return_value); |
| 3131 | } else if (SCOPE_IS_COMP_LIKE(scope->kind)) { |
| 3132 | // a bit of a hack at the moment |
| 3133 | |
| 3134 | assert(MP_PARSE_NODE_IS_STRUCT(scope->pn)); |
| 3135 | mp_parse_node_struct_t *pns = (mp_parse_node_struct_t *)scope->pn; |
| 3136 | assert(MP_PARSE_NODE_STRUCT_NUM_NODES(pns) == 2); |
| 3137 | assert(MP_PARSE_NODE_IS_STRUCT_KIND(pns->nodes[1], PN_comp_for)); |
| 3138 | mp_parse_node_struct_t *pns_comp_for = (mp_parse_node_struct_t *)pns->nodes[1]; |
| 3139 | |
| 3140 | // We need a unique name for the comprehension argument (the iterator). |
| 3141 | // CPython uses .0, but we should be able to use anything that won't |
| 3142 | // clash with a user defined variable. Best to use an existing qstr, |
| 3143 | // so we use the blank qstr. |
| 3144 | qstr qstr_arg = MP_QSTR_; |
| 3145 | if (comp->pass == MP_PASS_SCOPE) { |
| 3146 | scope_find_or_add_id(comp->scope_cur, qstr_arg, ID_INFO_KIND_LOCAL); |
| 3147 | scope->num_pos_args = 1; |
| 3148 | } |
| 3149 | |
| 3150 | // Set the source line number for the start of the comprehension |
| 3151 | EMIT_ARG(set_source_line, pns->source_line); |
| 3152 | |
| 3153 | if (scope->kind == SCOPE_LIST_COMP) { |
| 3154 | EMIT_ARG(build, 0, MP_EMIT_BUILD_LIST); |
| 3155 | } else if (scope->kind == SCOPE_DICT_COMP) { |
| 3156 | EMIT_ARG(build, 0, MP_EMIT_BUILD_MAP); |
| 3157 | #if MICROPY_PY_BUILTINS_SET |
| 3158 | } else if (scope->kind == SCOPE_SET_COMP) { |
| 3159 | EMIT_ARG(build, 0, MP_EMIT_BUILD_SET); |
| 3160 | #endif |
| 3161 | } |
| 3162 | |
| 3163 | // There are 4 slots on the stack for the iterator, and the first one is |
| 3164 | // NULL to indicate that the second one points to the iterator object. |
| 3165 | if (scope->kind == SCOPE_GEN_EXPR) { |
| 3166 | MP_STATIC_ASSERT(MP_OBJ_ITER_BUF_NSLOTS == 4); |
| 3167 | EMIT(load_null); |
| 3168 | compile_load_id(comp, qstr_arg); |
| 3169 | EMIT(load_null); |
| 3170 | EMIT(load_null); |
| 3171 | } else { |
| 3172 | compile_load_id(comp, qstr_arg); |
| 3173 | EMIT_ARG(get_iter, true); |
| 3174 | } |
| 3175 | |
| 3176 | compile_scope_comp_iter(comp, pns_comp_for, pns->nodes[0], 0); |
| 3177 | |
| 3178 | if (scope->kind == SCOPE_GEN_EXPR) { |
| 3179 | EMIT_ARG(load_const_tok, MP_TOKEN_KW_NONE); |
| 3180 | } |
| 3181 | EMIT(return_value); |
| 3182 | } else { |
| 3183 | assert(scope->kind == SCOPE_CLASS); |
| 3184 | assert(MP_PARSE_NODE_IS_STRUCT(scope->pn)); |
| 3185 | mp_parse_node_struct_t *pns = (mp_parse_node_struct_t *)scope->pn; |
| 3186 | assert(MP_PARSE_NODE_STRUCT_KIND(pns) == PN_classdef); |
| 3187 | |
| 3188 | if (comp->pass == MP_PASS_SCOPE) { |
| 3189 | scope_find_or_add_id(scope, MP_QSTR___class__, ID_INFO_KIND_LOCAL); |
| 3190 | } |
| 3191 | |
| 3192 | #if MICROPY_PY_SYS_SETTRACE |
| 3193 | EMIT_ARG(set_source_line, pns->source_line); |
| 3194 | #endif |
| 3195 | compile_load_id(comp, MP_QSTR___name__); |
| 3196 | compile_store_id(comp, MP_QSTR___module__); |
| 3197 | EMIT_ARG(load_const_str, MP_PARSE_NODE_LEAF_ARG(pns->nodes[0])); // 0 is class name |
| 3198 | compile_store_id(comp, MP_QSTR___qualname__); |
| 3199 | |
| 3200 | check_for_doc_string(comp, pns->nodes[2]); |
| 3201 | compile_node(comp, pns->nodes[2]); // 2 is class body |
| 3202 | |
| 3203 | id_info_t *id = scope_find(scope, MP_QSTR___class__); |
| 3204 | assert(id != NULL); |
| 3205 | if (id->kind == ID_INFO_KIND_LOCAL) { |
| 3206 | EMIT_ARG(load_const_tok, MP_TOKEN_KW_NONE); |
| 3207 | } else { |
| 3208 | EMIT_LOAD_FAST(MP_QSTR___class__, id->local_num); |
| 3209 | } |
| 3210 | EMIT(return_value); |
| 3211 | } |
| 3212 | |
| 3213 | EMIT(end_pass); |
| 3214 | |
| 3215 | // make sure we match all the exception levels |
| 3216 | assert(comp->cur_except_level == 0); |
| 3217 | } |
| 3218 | |
| 3219 | #if MICROPY_EMIT_INLINE_ASM |
| 3220 | // requires 3 passes: SCOPE, CODE_SIZE, EMIT |
| 3221 | STATIC void compile_scope_inline_asm(compiler_t *comp, scope_t *scope, pass_kind_t pass) { |
| 3222 | comp->pass = pass; |
| 3223 | comp->scope_cur = scope; |
| 3224 | comp->next_label = 0; |
| 3225 | |
| 3226 | if (scope->kind != SCOPE_FUNCTION) { |
| 3227 | compile_syntax_error(comp, MP_PARSE_NODE_NULL, MP_ERROR_TEXT("inline assembler must be a function" )); |
| 3228 | return; |
| 3229 | } |
| 3230 | |
| 3231 | if (comp->pass > MP_PASS_SCOPE) { |
| 3232 | EMIT_INLINE_ASM_ARG(start_pass, comp->pass, &comp->compile_error); |
| 3233 | } |
| 3234 | |
| 3235 | // get the function definition parse node |
| 3236 | assert(MP_PARSE_NODE_IS_STRUCT(scope->pn)); |
| 3237 | mp_parse_node_struct_t *pns = (mp_parse_node_struct_t *)scope->pn; |
| 3238 | assert(MP_PARSE_NODE_STRUCT_KIND(pns) == PN_funcdef); |
| 3239 | |
| 3240 | // qstr f_id = MP_PARSE_NODE_LEAF_ARG(pns->nodes[0]); // function name |
| 3241 | |
| 3242 | // parameters are in pns->nodes[1] |
| 3243 | if (comp->pass == MP_PASS_CODE_SIZE) { |
| 3244 | mp_parse_node_t *pn_params; |
| 3245 | size_t n_params = mp_parse_node_extract_list(&pns->nodes[1], PN_typedargslist, &pn_params); |
| 3246 | scope->num_pos_args = EMIT_INLINE_ASM_ARG(count_params, n_params, pn_params); |
| 3247 | if (comp->compile_error != MP_OBJ_NULL) { |
| 3248 | goto inline_asm_error; |
| 3249 | } |
| 3250 | } |
| 3251 | |
| 3252 | // pns->nodes[2] is function return annotation |
| 3253 | mp_uint_t type_sig = MP_NATIVE_TYPE_INT; |
| 3254 | mp_parse_node_t pn_annotation = pns->nodes[2]; |
| 3255 | if (!MP_PARSE_NODE_IS_NULL(pn_annotation)) { |
| 3256 | // nodes[2] can be null or a test-expr |
| 3257 | if (MP_PARSE_NODE_IS_ID(pn_annotation)) { |
| 3258 | qstr ret_type = MP_PARSE_NODE_LEAF_ARG(pn_annotation); |
| 3259 | switch (ret_type) { |
| 3260 | case MP_QSTR_object: |
| 3261 | type_sig = MP_NATIVE_TYPE_OBJ; |
| 3262 | break; |
| 3263 | case MP_QSTR_bool: |
| 3264 | type_sig = MP_NATIVE_TYPE_BOOL; |
| 3265 | break; |
| 3266 | case MP_QSTR_int: |
| 3267 | type_sig = MP_NATIVE_TYPE_INT; |
| 3268 | break; |
| 3269 | case MP_QSTR_uint: |
| 3270 | type_sig = MP_NATIVE_TYPE_UINT; |
| 3271 | break; |
| 3272 | default: |
| 3273 | compile_syntax_error(comp, pn_annotation, MP_ERROR_TEXT("unknown type" )); |
| 3274 | return; |
| 3275 | } |
| 3276 | } else { |
| 3277 | compile_syntax_error(comp, pn_annotation, MP_ERROR_TEXT("return annotation must be an identifier" )); |
| 3278 | } |
| 3279 | } |
| 3280 | |
| 3281 | mp_parse_node_t pn_body = pns->nodes[3]; // body |
| 3282 | mp_parse_node_t *nodes; |
| 3283 | size_t num = mp_parse_node_extract_list(&pn_body, PN_suite_block_stmts, &nodes); |
| 3284 | |
| 3285 | for (size_t i = 0; i < num; i++) { |
| 3286 | assert(MP_PARSE_NODE_IS_STRUCT(nodes[i])); |
| 3287 | mp_parse_node_struct_t *pns2 = (mp_parse_node_struct_t *)nodes[i]; |
| 3288 | if (MP_PARSE_NODE_STRUCT_KIND(pns2) == PN_pass_stmt) { |
| 3289 | // no instructions |
| 3290 | continue; |
| 3291 | } else if (MP_PARSE_NODE_STRUCT_KIND(pns2) != PN_expr_stmt) { |
| 3292 | // not an instruction; error |
| 3293 | not_an_instruction: |
| 3294 | compile_syntax_error(comp, nodes[i], MP_ERROR_TEXT("expecting an assembler instruction" )); |
| 3295 | return; |
| 3296 | } |
| 3297 | |
| 3298 | // check structure of parse node |
| 3299 | assert(MP_PARSE_NODE_IS_STRUCT(pns2->nodes[0])); |
| 3300 | if (!MP_PARSE_NODE_IS_NULL(pns2->nodes[1])) { |
| 3301 | goto not_an_instruction; |
| 3302 | } |
| 3303 | pns2 = (mp_parse_node_struct_t *)pns2->nodes[0]; |
| 3304 | if (MP_PARSE_NODE_STRUCT_KIND(pns2) != PN_atom_expr_normal) { |
| 3305 | goto not_an_instruction; |
| 3306 | } |
| 3307 | if (!MP_PARSE_NODE_IS_ID(pns2->nodes[0])) { |
| 3308 | goto not_an_instruction; |
| 3309 | } |
| 3310 | if (!MP_PARSE_NODE_IS_STRUCT_KIND(pns2->nodes[1], PN_trailer_paren)) { |
| 3311 | goto not_an_instruction; |
| 3312 | } |
| 3313 | |
| 3314 | // parse node looks like an instruction |
| 3315 | // get instruction name and args |
| 3316 | qstr op = MP_PARSE_NODE_LEAF_ARG(pns2->nodes[0]); |
| 3317 | pns2 = (mp_parse_node_struct_t *)pns2->nodes[1]; // PN_trailer_paren |
| 3318 | mp_parse_node_t *pn_arg; |
| 3319 | size_t n_args = mp_parse_node_extract_list(&pns2->nodes[0], PN_arglist, &pn_arg); |
| 3320 | |
| 3321 | // emit instructions |
| 3322 | if (op == MP_QSTR_label) { |
| 3323 | if (!(n_args == 1 && MP_PARSE_NODE_IS_ID(pn_arg[0]))) { |
| 3324 | compile_syntax_error(comp, nodes[i], MP_ERROR_TEXT("'label' requires 1 argument" )); |
| 3325 | return; |
| 3326 | } |
| 3327 | uint lab = comp_next_label(comp); |
| 3328 | if (pass > MP_PASS_SCOPE) { |
| 3329 | if (!EMIT_INLINE_ASM_ARG(label, lab, MP_PARSE_NODE_LEAF_ARG(pn_arg[0]))) { |
| 3330 | compile_syntax_error(comp, nodes[i], MP_ERROR_TEXT("label redefined" )); |
| 3331 | return; |
| 3332 | } |
| 3333 | } |
| 3334 | } else if (op == MP_QSTR_align) { |
| 3335 | if (!(n_args == 1 && MP_PARSE_NODE_IS_SMALL_INT(pn_arg[0]))) { |
| 3336 | compile_syntax_error(comp, nodes[i], MP_ERROR_TEXT("'align' requires 1 argument" )); |
| 3337 | return; |
| 3338 | } |
| 3339 | if (pass > MP_PASS_SCOPE) { |
| 3340 | mp_asm_base_align((mp_asm_base_t *)comp->emit_inline_asm, |
| 3341 | MP_PARSE_NODE_LEAF_SMALL_INT(pn_arg[0])); |
| 3342 | } |
| 3343 | } else if (op == MP_QSTR_data) { |
| 3344 | if (!(n_args >= 2 && MP_PARSE_NODE_IS_SMALL_INT(pn_arg[0]))) { |
| 3345 | compile_syntax_error(comp, nodes[i], MP_ERROR_TEXT("'data' requires at least 2 arguments" )); |
| 3346 | return; |
| 3347 | } |
| 3348 | if (pass > MP_PASS_SCOPE) { |
| 3349 | mp_int_t bytesize = MP_PARSE_NODE_LEAF_SMALL_INT(pn_arg[0]); |
| 3350 | for (uint j = 1; j < n_args; j++) { |
| 3351 | if (!MP_PARSE_NODE_IS_SMALL_INT(pn_arg[j])) { |
| 3352 | compile_syntax_error(comp, nodes[i], MP_ERROR_TEXT("'data' requires integer arguments" )); |
| 3353 | return; |
| 3354 | } |
| 3355 | mp_asm_base_data((mp_asm_base_t *)comp->emit_inline_asm, |
| 3356 | bytesize, MP_PARSE_NODE_LEAF_SMALL_INT(pn_arg[j])); |
| 3357 | } |
| 3358 | } |
| 3359 | } else { |
| 3360 | if (pass > MP_PASS_SCOPE) { |
| 3361 | EMIT_INLINE_ASM_ARG(op, op, n_args, pn_arg); |
| 3362 | } |
| 3363 | } |
| 3364 | |
| 3365 | if (comp->compile_error != MP_OBJ_NULL) { |
| 3366 | pns = pns2; // this is the parse node that had the error |
| 3367 | goto inline_asm_error; |
| 3368 | } |
| 3369 | } |
| 3370 | |
| 3371 | if (comp->pass > MP_PASS_SCOPE) { |
| 3372 | EMIT_INLINE_ASM_ARG(end_pass, type_sig); |
| 3373 | |
| 3374 | if (comp->pass == MP_PASS_EMIT) { |
| 3375 | void *f = mp_asm_base_get_code((mp_asm_base_t *)comp->emit_inline_asm); |
| 3376 | mp_emit_glue_assign_native(comp->scope_cur->raw_code, MP_CODE_NATIVE_ASM, |
| 3377 | f, mp_asm_base_get_code_size((mp_asm_base_t *)comp->emit_inline_asm), |
| 3378 | NULL, |
| 3379 | #if MICROPY_PERSISTENT_CODE_SAVE |
| 3380 | 0, 0, 0, 0, NULL, |
| 3381 | #endif |
| 3382 | comp->scope_cur->num_pos_args, 0, type_sig); |
| 3383 | } |
| 3384 | } |
| 3385 | |
| 3386 | if (comp->compile_error != MP_OBJ_NULL) { |
| 3387 | // inline assembler had an error; set line for its exception |
| 3388 | inline_asm_error: |
| 3389 | comp->compile_error_line = pns->source_line; |
| 3390 | } |
| 3391 | } |
| 3392 | #endif |
| 3393 | |
| 3394 | STATIC void scope_compute_things(scope_t *scope) { |
| 3395 | // in MicroPython we put the *x parameter after all other parameters (except **y) |
| 3396 | if (scope->scope_flags & MP_SCOPE_FLAG_VARARGS) { |
| 3397 | id_info_t *id_param = NULL; |
| 3398 | for (int i = scope->id_info_len - 1; i >= 0; i--) { |
| 3399 | id_info_t *id = &scope->id_info[i]; |
| 3400 | if (id->flags & ID_FLAG_IS_STAR_PARAM) { |
| 3401 | if (id_param != NULL) { |
| 3402 | // swap star param with last param |
| 3403 | id_info_t temp = *id_param; |
| 3404 | *id_param = *id; |
| 3405 | *id = temp; |
| 3406 | } |
| 3407 | break; |
| 3408 | } else if (id_param == NULL && id->flags == ID_FLAG_IS_PARAM) { |
| 3409 | id_param = id; |
| 3410 | } |
| 3411 | } |
| 3412 | } |
| 3413 | |
| 3414 | // in functions, turn implicit globals into explicit globals |
| 3415 | // compute the index of each local |
| 3416 | scope->num_locals = 0; |
| 3417 | for (int i = 0; i < scope->id_info_len; i++) { |
| 3418 | id_info_t *id = &scope->id_info[i]; |
| 3419 | if (scope->kind == SCOPE_CLASS && id->qst == MP_QSTR___class__) { |
| 3420 | // __class__ is not counted as a local; if it's used then it becomes a ID_INFO_KIND_CELL |
| 3421 | continue; |
| 3422 | } |
| 3423 | if (SCOPE_IS_FUNC_LIKE(scope->kind) && id->kind == ID_INFO_KIND_GLOBAL_IMPLICIT) { |
| 3424 | id->kind = ID_INFO_KIND_GLOBAL_EXPLICIT; |
| 3425 | } |
| 3426 | #if MICROPY_EMIT_NATIVE |
| 3427 | if (id->kind == ID_INFO_KIND_GLOBAL_EXPLICIT) { |
| 3428 | // This function makes a reference to a global variable |
| 3429 | if (scope->emit_options == MP_EMIT_OPT_VIPER |
| 3430 | && mp_native_type_from_qstr(id->qst) >= MP_NATIVE_TYPE_INT) { |
| 3431 | // A casting operator in viper mode, not a real global reference |
| 3432 | } else { |
| 3433 | scope->scope_flags |= MP_SCOPE_FLAG_REFGLOBALS; |
| 3434 | } |
| 3435 | } |
| 3436 | #endif |
| 3437 | // params always count for 1 local, even if they are a cell |
| 3438 | if (id->kind == ID_INFO_KIND_LOCAL || (id->flags & ID_FLAG_IS_PARAM)) { |
| 3439 | id->local_num = scope->num_locals++; |
| 3440 | } |
| 3441 | } |
| 3442 | |
| 3443 | // compute the index of cell vars |
| 3444 | for (int i = 0; i < scope->id_info_len; i++) { |
| 3445 | id_info_t *id = &scope->id_info[i]; |
| 3446 | // in MicroPython the cells come right after the fast locals |
| 3447 | // parameters are not counted here, since they remain at the start |
| 3448 | // of the locals, even if they are cell vars |
| 3449 | if (id->kind == ID_INFO_KIND_CELL && !(id->flags & ID_FLAG_IS_PARAM)) { |
| 3450 | id->local_num = scope->num_locals; |
| 3451 | scope->num_locals += 1; |
| 3452 | } |
| 3453 | } |
| 3454 | |
| 3455 | // compute the index of free vars |
| 3456 | // make sure they are in the order of the parent scope |
| 3457 | if (scope->parent != NULL) { |
| 3458 | int num_free = 0; |
| 3459 | for (int i = 0; i < scope->parent->id_info_len; i++) { |
| 3460 | id_info_t *id = &scope->parent->id_info[i]; |
| 3461 | if (id->kind == ID_INFO_KIND_CELL || id->kind == ID_INFO_KIND_FREE) { |
| 3462 | for (int j = 0; j < scope->id_info_len; j++) { |
| 3463 | id_info_t *id2 = &scope->id_info[j]; |
| 3464 | if (id2->kind == ID_INFO_KIND_FREE && id->qst == id2->qst) { |
| 3465 | assert(!(id2->flags & ID_FLAG_IS_PARAM)); // free vars should not be params |
| 3466 | // in MicroPython the frees come first, before the params |
| 3467 | id2->local_num = num_free; |
| 3468 | num_free += 1; |
| 3469 | } |
| 3470 | } |
| 3471 | } |
| 3472 | } |
| 3473 | // in MicroPython shift all other locals after the free locals |
| 3474 | if (num_free > 0) { |
| 3475 | for (int i = 0; i < scope->id_info_len; i++) { |
| 3476 | id_info_t *id = &scope->id_info[i]; |
| 3477 | if (id->kind != ID_INFO_KIND_FREE || (id->flags & ID_FLAG_IS_PARAM)) { |
| 3478 | id->local_num += num_free; |
| 3479 | } |
| 3480 | } |
| 3481 | scope->num_pos_args += num_free; // free vars are counted as params for passing them into the function |
| 3482 | scope->num_locals += num_free; |
| 3483 | } |
| 3484 | } |
| 3485 | } |
| 3486 | |
| 3487 | #if !MICROPY_PERSISTENT_CODE_SAVE |
| 3488 | STATIC |
| 3489 | #endif |
| 3490 | mp_raw_code_t *mp_compile_to_raw_code(mp_parse_tree_t *parse_tree, qstr source_file, bool is_repl) { |
| 3491 | // put compiler state on the stack, it's relatively small |
| 3492 | compiler_t comp_state = {0}; |
| 3493 | compiler_t *comp = &comp_state; |
| 3494 | |
| 3495 | comp->source_file = source_file; |
| 3496 | comp->is_repl = is_repl; |
| 3497 | comp->break_label = INVALID_LABEL; |
| 3498 | comp->continue_label = INVALID_LABEL; |
| 3499 | |
| 3500 | // create the module scope |
| 3501 | #if MICROPY_EMIT_NATIVE |
| 3502 | const uint emit_opt = MP_STATE_VM(default_emit_opt); |
| 3503 | #else |
| 3504 | const uint emit_opt = MP_EMIT_OPT_NONE; |
| 3505 | #endif |
| 3506 | scope_t *module_scope = scope_new_and_link(comp, SCOPE_MODULE, parse_tree->root, emit_opt); |
| 3507 | |
| 3508 | // create standard emitter; it's used at least for MP_PASS_SCOPE |
| 3509 | emit_t *emit_bc = emit_bc_new(); |
| 3510 | |
| 3511 | // compile pass 1 |
| 3512 | comp->emit = emit_bc; |
| 3513 | #if MICROPY_EMIT_NATIVE |
| 3514 | comp->emit_method_table = &emit_bc_method_table; |
| 3515 | #endif |
| 3516 | uint max_num_labels = 0; |
| 3517 | for (scope_t *s = comp->scope_head; s != NULL && comp->compile_error == MP_OBJ_NULL; s = s->next) { |
| 3518 | #if MICROPY_EMIT_INLINE_ASM |
| 3519 | if (s->emit_options == MP_EMIT_OPT_ASM) { |
| 3520 | compile_scope_inline_asm(comp, s, MP_PASS_SCOPE); |
| 3521 | } else |
| 3522 | #endif |
| 3523 | { |
| 3524 | compile_scope(comp, s, MP_PASS_SCOPE); |
| 3525 | |
| 3526 | // Check if any implicitly declared variables should be closed over |
| 3527 | for (size_t i = 0; i < s->id_info_len; ++i) { |
| 3528 | id_info_t *id = &s->id_info[i]; |
| 3529 | if (id->kind == ID_INFO_KIND_GLOBAL_IMPLICIT) { |
| 3530 | scope_check_to_close_over(s, id); |
| 3531 | } |
| 3532 | } |
| 3533 | } |
| 3534 | |
| 3535 | // update maximim number of labels needed |
| 3536 | if (comp->next_label > max_num_labels) { |
| 3537 | max_num_labels = comp->next_label; |
| 3538 | } |
| 3539 | } |
| 3540 | |
| 3541 | // compute some things related to scope and identifiers |
| 3542 | for (scope_t *s = comp->scope_head; s != NULL && comp->compile_error == MP_OBJ_NULL; s = s->next) { |
| 3543 | scope_compute_things(s); |
| 3544 | } |
| 3545 | |
| 3546 | // set max number of labels now that it's calculated |
| 3547 | emit_bc_set_max_num_labels(emit_bc, max_num_labels); |
| 3548 | |
| 3549 | // compile pass 2 and 3 |
| 3550 | #if MICROPY_EMIT_NATIVE |
| 3551 | emit_t *emit_native = NULL; |
| 3552 | #endif |
| 3553 | for (scope_t *s = comp->scope_head; s != NULL && comp->compile_error == MP_OBJ_NULL; s = s->next) { |
| 3554 | #if MICROPY_EMIT_INLINE_ASM |
| 3555 | if (s->emit_options == MP_EMIT_OPT_ASM) { |
| 3556 | // inline assembly |
| 3557 | if (comp->emit_inline_asm == NULL) { |
| 3558 | comp->emit_inline_asm = ASM_EMITTER(new)(max_num_labels); |
| 3559 | } |
| 3560 | comp->emit = NULL; |
| 3561 | comp->emit_inline_asm_method_table = ASM_EMITTER_TABLE; |
| 3562 | compile_scope_inline_asm(comp, s, MP_PASS_CODE_SIZE); |
| 3563 | #if MICROPY_EMIT_INLINE_XTENSA |
| 3564 | // Xtensa requires an extra pass to compute size of l32r const table |
| 3565 | // TODO this can be improved by calculating it during SCOPE pass |
| 3566 | // but that requires some other structural changes to the asm emitters |
| 3567 | #if MICROPY_DYNAMIC_COMPILER |
| 3568 | if (mp_dynamic_compiler.native_arch == MP_NATIVE_ARCH_XTENSA) |
| 3569 | #endif |
| 3570 | { |
| 3571 | compile_scope_inline_asm(comp, s, MP_PASS_CODE_SIZE); |
| 3572 | } |
| 3573 | #endif |
| 3574 | if (comp->compile_error == MP_OBJ_NULL) { |
| 3575 | compile_scope_inline_asm(comp, s, MP_PASS_EMIT); |
| 3576 | } |
| 3577 | } else |
| 3578 | #endif |
| 3579 | { |
| 3580 | |
| 3581 | // choose the emit type |
| 3582 | |
| 3583 | switch (s->emit_options) { |
| 3584 | |
| 3585 | #if MICROPY_EMIT_NATIVE |
| 3586 | case MP_EMIT_OPT_NATIVE_PYTHON: |
| 3587 | case MP_EMIT_OPT_VIPER: |
| 3588 | if (emit_native == NULL) { |
| 3589 | emit_native = NATIVE_EMITTER(new)(&comp->compile_error, &comp->next_label, max_num_labels); |
| 3590 | } |
| 3591 | comp->emit_method_table = NATIVE_EMITTER_TABLE; |
| 3592 | comp->emit = emit_native; |
| 3593 | break; |
| 3594 | #endif // MICROPY_EMIT_NATIVE |
| 3595 | |
| 3596 | default: |
| 3597 | comp->emit = emit_bc; |
| 3598 | #if MICROPY_EMIT_NATIVE |
| 3599 | comp->emit_method_table = &emit_bc_method_table; |
| 3600 | #endif |
| 3601 | break; |
| 3602 | } |
| 3603 | |
| 3604 | // need a pass to compute stack size |
| 3605 | compile_scope(comp, s, MP_PASS_STACK_SIZE); |
| 3606 | |
| 3607 | // second last pass: compute code size |
| 3608 | if (comp->compile_error == MP_OBJ_NULL) { |
| 3609 | compile_scope(comp, s, MP_PASS_CODE_SIZE); |
| 3610 | } |
| 3611 | |
| 3612 | // final pass: emit code |
| 3613 | if (comp->compile_error == MP_OBJ_NULL) { |
| 3614 | compile_scope(comp, s, MP_PASS_EMIT); |
| 3615 | } |
| 3616 | } |
| 3617 | } |
| 3618 | |
| 3619 | if (comp->compile_error != MP_OBJ_NULL) { |
| 3620 | // if there is no line number for the error then use the line |
| 3621 | // number for the start of this scope |
| 3622 | compile_error_set_line(comp, comp->scope_cur->pn); |
| 3623 | // add a traceback to the exception using relevant source info |
| 3624 | mp_obj_exception_add_traceback(comp->compile_error, comp->source_file, |
| 3625 | comp->compile_error_line, comp->scope_cur->simple_name); |
| 3626 | } |
| 3627 | |
| 3628 | // free the emitters |
| 3629 | |
| 3630 | emit_bc_free(emit_bc); |
| 3631 | #if MICROPY_EMIT_NATIVE |
| 3632 | if (emit_native != NULL) { |
| 3633 | NATIVE_EMITTER(free)(emit_native); |
| 3634 | } |
| 3635 | #endif |
| 3636 | #if MICROPY_EMIT_INLINE_ASM |
| 3637 | if (comp->emit_inline_asm != NULL) { |
| 3638 | ASM_EMITTER(free)(comp->emit_inline_asm); |
| 3639 | } |
| 3640 | #endif |
| 3641 | |
| 3642 | // free the parse tree |
| 3643 | mp_parse_tree_clear(parse_tree); |
| 3644 | |
| 3645 | // free the scopes |
| 3646 | mp_raw_code_t *outer_raw_code = module_scope->raw_code; |
| 3647 | for (scope_t *s = module_scope; s;) { |
| 3648 | scope_t *next = s->next; |
| 3649 | scope_free(s); |
| 3650 | s = next; |
| 3651 | } |
| 3652 | |
| 3653 | if (comp->compile_error != MP_OBJ_NULL) { |
| 3654 | nlr_raise(comp->compile_error); |
| 3655 | } else { |
| 3656 | return outer_raw_code; |
| 3657 | } |
| 3658 | } |
| 3659 | |
| 3660 | mp_obj_t mp_compile(mp_parse_tree_t *parse_tree, qstr source_file, bool is_repl) { |
| 3661 | mp_raw_code_t *rc = mp_compile_to_raw_code(parse_tree, source_file, is_repl); |
| 3662 | // return function that executes the outer module |
| 3663 | return mp_make_function_from_raw_code(rc, MP_OBJ_NULL, MP_OBJ_NULL); |
| 3664 | } |
| 3665 | |
| 3666 | #endif // MICROPY_ENABLE_COMPILER |
| 3667 | |