1/*
2 * This file is part of the MicroPython project, http://micropython.org/
3 *
4 * The MIT License (MIT)
5 *
6 * Copyright (c) 2013, 2014 Damien P. George
7 * Copyright (c) 2014 Paul Sokolovsky
8 *
9 * Permission is hereby granted, free of charge, to any person obtaining a copy
10 * of this software and associated documentation files (the "Software"), to deal
11 * in the Software without restriction, including without limitation the rights
12 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
13 * copies of the Software, and to permit persons to whom the Software is
14 * furnished to do so, subject to the following conditions:
15 *
16 * The above copyright notice and this permission notice shall be included in
17 * all copies or substantial portions of the Software.
18 *
19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
20 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
21 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
22 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
23 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
24 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
25 * THE SOFTWARE.
26 */
27
28#include <string.h>
29#include <assert.h>
30
31#include "py/objtuple.h"
32#include "py/objfun.h"
33#include "py/runtime.h"
34#include "py/bc.h"
35#include "py/stackctrl.h"
36
37#if MICROPY_DEBUG_VERBOSE // print debugging info
38#define DEBUG_PRINT (1)
39#else // don't print debugging info
40#define DEBUG_PRINT (0)
41#define DEBUG_printf(...) (void)0
42#endif
43
44// Note: the "name" entry in mp_obj_type_t for a function type must be
45// MP_QSTR_function because it is used to determine if an object is of generic
46// function type.
47
48/******************************************************************************/
49/* builtin functions */
50
51STATIC mp_obj_t fun_builtin_0_call(mp_obj_t self_in, size_t n_args, size_t n_kw, const mp_obj_t *args) {
52 (void)args;
53 assert(mp_obj_is_type(self_in, &mp_type_fun_builtin_0));
54 mp_obj_fun_builtin_fixed_t *self = MP_OBJ_TO_PTR(self_in);
55 mp_arg_check_num(n_args, n_kw, 0, 0, false);
56 return self->fun._0();
57}
58
59const mp_obj_type_t mp_type_fun_builtin_0 = {
60 { &mp_type_type },
61 .flags = MP_TYPE_FLAG_BINDS_SELF | MP_TYPE_FLAG_BUILTIN_FUN,
62 .name = MP_QSTR_function,
63 .call = fun_builtin_0_call,
64 .unary_op = mp_generic_unary_op,
65};
66
67STATIC mp_obj_t fun_builtin_1_call(mp_obj_t self_in, size_t n_args, size_t n_kw, const mp_obj_t *args) {
68 assert(mp_obj_is_type(self_in, &mp_type_fun_builtin_1));
69 mp_obj_fun_builtin_fixed_t *self = MP_OBJ_TO_PTR(self_in);
70 mp_arg_check_num(n_args, n_kw, 1, 1, false);
71 return self->fun._1(args[0]);
72}
73
74const mp_obj_type_t mp_type_fun_builtin_1 = {
75 { &mp_type_type },
76 .flags = MP_TYPE_FLAG_BINDS_SELF | MP_TYPE_FLAG_BUILTIN_FUN,
77 .name = MP_QSTR_function,
78 .call = fun_builtin_1_call,
79 .unary_op = mp_generic_unary_op,
80};
81
82STATIC mp_obj_t fun_builtin_2_call(mp_obj_t self_in, size_t n_args, size_t n_kw, const mp_obj_t *args) {
83 assert(mp_obj_is_type(self_in, &mp_type_fun_builtin_2));
84 mp_obj_fun_builtin_fixed_t *self = MP_OBJ_TO_PTR(self_in);
85 mp_arg_check_num(n_args, n_kw, 2, 2, false);
86 return self->fun._2(args[0], args[1]);
87}
88
89const mp_obj_type_t mp_type_fun_builtin_2 = {
90 { &mp_type_type },
91 .flags = MP_TYPE_FLAG_BINDS_SELF | MP_TYPE_FLAG_BUILTIN_FUN,
92 .name = MP_QSTR_function,
93 .call = fun_builtin_2_call,
94 .unary_op = mp_generic_unary_op,
95};
96
97STATIC mp_obj_t fun_builtin_3_call(mp_obj_t self_in, size_t n_args, size_t n_kw, const mp_obj_t *args) {
98 assert(mp_obj_is_type(self_in, &mp_type_fun_builtin_3));
99 mp_obj_fun_builtin_fixed_t *self = MP_OBJ_TO_PTR(self_in);
100 mp_arg_check_num(n_args, n_kw, 3, 3, false);
101 return self->fun._3(args[0], args[1], args[2]);
102}
103
104const mp_obj_type_t mp_type_fun_builtin_3 = {
105 { &mp_type_type },
106 .flags = MP_TYPE_FLAG_BINDS_SELF | MP_TYPE_FLAG_BUILTIN_FUN,
107 .name = MP_QSTR_function,
108 .call = fun_builtin_3_call,
109 .unary_op = mp_generic_unary_op,
110};
111
112STATIC mp_obj_t fun_builtin_var_call(mp_obj_t self_in, size_t n_args, size_t n_kw, const mp_obj_t *args) {
113 assert(mp_obj_is_type(self_in, &mp_type_fun_builtin_var));
114 mp_obj_fun_builtin_var_t *self = MP_OBJ_TO_PTR(self_in);
115
116 // check number of arguments
117 mp_arg_check_num_sig(n_args, n_kw, self->sig);
118
119 if (self->sig & 1) {
120 // function allows keywords
121
122 // we create a map directly from the given args array
123 mp_map_t kw_args;
124 mp_map_init_fixed_table(&kw_args, n_kw, args + n_args);
125
126 return self->fun.kw(n_args, args, &kw_args);
127
128 } else {
129 // function takes a variable number of arguments, but no keywords
130
131 return self->fun.var(n_args, args);
132 }
133}
134
135const mp_obj_type_t mp_type_fun_builtin_var = {
136 { &mp_type_type },
137 .flags = MP_TYPE_FLAG_BINDS_SELF | MP_TYPE_FLAG_BUILTIN_FUN,
138 .name = MP_QSTR_function,
139 .call = fun_builtin_var_call,
140 .unary_op = mp_generic_unary_op,
141};
142
143/******************************************************************************/
144/* byte code functions */
145
146qstr mp_obj_code_get_name(const byte *code_info) {
147 MP_BC_PRELUDE_SIZE_DECODE(code_info);
148 #if MICROPY_PERSISTENT_CODE
149 return code_info[0] | (code_info[1] << 8);
150 #else
151 return mp_decode_uint_value(code_info);
152 #endif
153}
154
155#if MICROPY_EMIT_NATIVE
156STATIC const mp_obj_type_t mp_type_fun_native;
157#endif
158
159qstr mp_obj_fun_get_name(mp_const_obj_t fun_in) {
160 const mp_obj_fun_bc_t *fun = MP_OBJ_TO_PTR(fun_in);
161 #if MICROPY_EMIT_NATIVE
162 if (fun->base.type == &mp_type_fun_native || fun->base.type == &mp_type_native_gen_wrap) {
163 // TODO native functions don't have name stored
164 return MP_QSTR_;
165 }
166 #endif
167
168 const byte *bc = fun->bytecode;
169 MP_BC_PRELUDE_SIG_DECODE(bc);
170 return mp_obj_code_get_name(bc);
171}
172
173#if MICROPY_CPYTHON_COMPAT
174STATIC void fun_bc_print(const mp_print_t *print, mp_obj_t o_in, mp_print_kind_t kind) {
175 (void)kind;
176 mp_obj_fun_bc_t *o = MP_OBJ_TO_PTR(o_in);
177 mp_printf(print, "<function %q at 0x%p>", mp_obj_fun_get_name(o_in), o);
178}
179#endif
180
181#if DEBUG_PRINT
182STATIC void dump_args(const mp_obj_t *a, size_t sz) {
183 DEBUG_printf("%p: ", a);
184 for (size_t i = 0; i < sz; i++) {
185 DEBUG_printf("%p ", a[i]);
186 }
187 DEBUG_printf("\n");
188}
189#else
190#define dump_args(...) (void)0
191#endif
192
193// With this macro you can tune the maximum number of function state bytes
194// that will be allocated on the stack. Any function that needs more
195// than this will try to use the heap, with fallback to stack allocation.
196#define VM_MAX_STATE_ON_STACK (sizeof(mp_uint_t) * 11)
197
198#define DECODE_CODESTATE_SIZE(bytecode, n_state_out_var, state_size_out_var) \
199 { \
200 const uint8_t *ip = bytecode; \
201 size_t n_exc_stack, scope_flags, n_pos_args, n_kwonly_args, n_def_args; \
202 MP_BC_PRELUDE_SIG_DECODE_INTO(ip, n_state_out_var, n_exc_stack, scope_flags, n_pos_args, n_kwonly_args, n_def_args); \
203 \
204 /* state size in bytes */ \
205 state_size_out_var = n_state_out_var * sizeof(mp_obj_t) \
206 + n_exc_stack * sizeof(mp_exc_stack_t); \
207 }
208
209#define INIT_CODESTATE(code_state, _fun_bc, _n_state, n_args, n_kw, args) \
210 code_state->fun_bc = _fun_bc; \
211 code_state->ip = 0; \
212 code_state->n_state = _n_state; \
213 mp_setup_code_state(code_state, n_args, n_kw, args); \
214 code_state->old_globals = mp_globals_get();
215
216#if MICROPY_STACKLESS
217mp_code_state_t *mp_obj_fun_bc_prepare_codestate(mp_obj_t self_in, size_t n_args, size_t n_kw, const mp_obj_t *args) {
218 MP_STACK_CHECK();
219 mp_obj_fun_bc_t *self = MP_OBJ_TO_PTR(self_in);
220
221 size_t n_state, state_size;
222 DECODE_CODESTATE_SIZE(self->bytecode, n_state, state_size);
223
224 mp_code_state_t *code_state;
225 #if MICROPY_ENABLE_PYSTACK
226 code_state = mp_pystack_alloc(sizeof(mp_code_state_t) + state_size);
227 #else
228 // If we use m_new_obj_var(), then on no memory, MemoryError will be
229 // raised. But this is not correct exception for a function call,
230 // RuntimeError should be raised instead. So, we use m_new_obj_var_maybe(),
231 // return NULL, then vm.c takes the needed action (either raise
232 // RuntimeError or fallback to stack allocation).
233 code_state = m_new_obj_var_maybe(mp_code_state_t, byte, state_size);
234 if (!code_state) {
235 return NULL;
236 }
237 #endif
238
239 INIT_CODESTATE(code_state, self, n_state, n_args, n_kw, args);
240
241 // execute the byte code with the correct globals context
242 mp_globals_set(self->globals);
243
244 return code_state;
245}
246#endif
247
248STATIC mp_obj_t fun_bc_call(mp_obj_t self_in, size_t n_args, size_t n_kw, const mp_obj_t *args) {
249 MP_STACK_CHECK();
250
251 DEBUG_printf("Input n_args: " UINT_FMT ", n_kw: " UINT_FMT "\n", n_args, n_kw);
252 DEBUG_printf("Input pos args: ");
253 dump_args(args, n_args);
254 DEBUG_printf("Input kw args: ");
255 dump_args(args + n_args, n_kw * 2);
256
257 mp_obj_fun_bc_t *self = MP_OBJ_TO_PTR(self_in);
258
259 size_t n_state, state_size;
260 DECODE_CODESTATE_SIZE(self->bytecode, n_state, state_size);
261
262 // allocate state for locals and stack
263 mp_code_state_t *code_state = NULL;
264 #if MICROPY_ENABLE_PYSTACK
265 code_state = mp_pystack_alloc(sizeof(mp_code_state_t) + state_size);
266 #else
267 if (state_size > VM_MAX_STATE_ON_STACK) {
268 code_state = m_new_obj_var_maybe(mp_code_state_t, byte, state_size);
269 #if MICROPY_DEBUG_VM_STACK_OVERFLOW
270 if (code_state != NULL) {
271 memset(code_state->state, 0, state_size);
272 }
273 #endif
274 }
275 if (code_state == NULL) {
276 code_state = alloca(sizeof(mp_code_state_t) + state_size);
277 #if MICROPY_DEBUG_VM_STACK_OVERFLOW
278 memset(code_state->state, 0, state_size);
279 #endif
280 state_size = 0; // indicate that we allocated using alloca
281 }
282 #endif
283
284 INIT_CODESTATE(code_state, self, n_state, n_args, n_kw, args);
285
286 // execute the byte code with the correct globals context
287 mp_globals_set(self->globals);
288 mp_vm_return_kind_t vm_return_kind = mp_execute_bytecode(code_state, MP_OBJ_NULL);
289 mp_globals_set(code_state->old_globals);
290
291 #if MICROPY_DEBUG_VM_STACK_OVERFLOW
292 if (vm_return_kind == MP_VM_RETURN_NORMAL) {
293 if (code_state->sp < code_state->state) {
294 mp_printf(MICROPY_DEBUG_PRINTER, "VM stack underflow: " INT_FMT "\n", code_state->sp - code_state->state);
295 assert(0);
296 }
297 }
298 const byte *bytecode_ptr = self->bytecode;
299 size_t n_state_unused, n_exc_stack_unused, scope_flags_unused;
300 size_t n_pos_args, n_kwonly_args, n_def_args_unused;
301 MP_BC_PRELUDE_SIG_DECODE_INTO(bytecode_ptr, n_state_unused, n_exc_stack_unused,
302 scope_flags_unused, n_pos_args, n_kwonly_args, n_def_args_unused);
303 // We can't check the case when an exception is returned in state[0]
304 // and there are no arguments, because in this case our detection slot may have
305 // been overwritten by the returned exception (which is allowed).
306 if (!(vm_return_kind == MP_VM_RETURN_EXCEPTION && n_pos_args + n_kwonly_args == 0)) {
307 // Just check to see that we have at least 1 null object left in the state.
308 bool overflow = true;
309 for (size_t i = 0; i < n_state - n_pos_args - n_kwonly_args; ++i) {
310 if (code_state->state[i] == MP_OBJ_NULL) {
311 overflow = false;
312 break;
313 }
314 }
315 if (overflow) {
316 mp_printf(MICROPY_DEBUG_PRINTER, "VM stack overflow state=%p n_state+1=" UINT_FMT "\n", code_state->state, n_state);
317 assert(0);
318 }
319 }
320 #endif
321
322 mp_obj_t result;
323 if (vm_return_kind == MP_VM_RETURN_NORMAL) {
324 // return value is in *sp
325 result = *code_state->sp;
326 } else {
327 // must be an exception because normal functions can't yield
328 assert(vm_return_kind == MP_VM_RETURN_EXCEPTION);
329 // returned exception is in state[0]
330 result = code_state->state[0];
331 }
332
333 #if MICROPY_ENABLE_PYSTACK
334 mp_pystack_free(code_state);
335 #else
336 // free the state if it was allocated on the heap
337 if (state_size != 0) {
338 m_del_var(mp_code_state_t, byte, state_size, code_state);
339 }
340 #endif
341
342 if (vm_return_kind == MP_VM_RETURN_NORMAL) {
343 return result;
344 } else { // MP_VM_RETURN_EXCEPTION
345 nlr_raise(result);
346 }
347}
348
349#if MICROPY_PY_FUNCTION_ATTRS
350void mp_obj_fun_bc_attr(mp_obj_t self_in, qstr attr, mp_obj_t *dest) {
351 if (dest[0] != MP_OBJ_NULL) {
352 // not load attribute
353 return;
354 }
355 if (attr == MP_QSTR___name__) {
356 dest[0] = MP_OBJ_NEW_QSTR(mp_obj_fun_get_name(self_in));
357 }
358 if (attr == MP_QSTR___globals__) {
359 mp_obj_fun_bc_t *self = MP_OBJ_TO_PTR(self_in);
360 dest[0] = MP_OBJ_FROM_PTR(self->globals);
361 }
362}
363#endif
364
365const mp_obj_type_t mp_type_fun_bc = {
366 { &mp_type_type },
367 .flags = MP_TYPE_FLAG_BINDS_SELF,
368 .name = MP_QSTR_function,
369 #if MICROPY_CPYTHON_COMPAT
370 .print = fun_bc_print,
371 #endif
372 .call = fun_bc_call,
373 .unary_op = mp_generic_unary_op,
374 #if MICROPY_PY_FUNCTION_ATTRS
375 .attr = mp_obj_fun_bc_attr,
376 #endif
377};
378
379mp_obj_t mp_obj_new_fun_bc(mp_obj_t def_args_in, mp_obj_t def_kw_args, const byte *code, const mp_uint_t *const_table) {
380 size_t n_def_args = 0;
381 size_t n_extra_args = 0;
382 mp_obj_tuple_t *def_args = MP_OBJ_TO_PTR(def_args_in);
383 if (def_args_in != MP_OBJ_NULL) {
384 assert(mp_obj_is_type(def_args_in, &mp_type_tuple));
385 n_def_args = def_args->len;
386 n_extra_args = def_args->len;
387 }
388 if (def_kw_args != MP_OBJ_NULL) {
389 n_extra_args += 1;
390 }
391 mp_obj_fun_bc_t *o = m_new_obj_var(mp_obj_fun_bc_t, mp_obj_t, n_extra_args);
392 o->base.type = &mp_type_fun_bc;
393 o->globals = mp_globals_get();
394 o->bytecode = code;
395 o->const_table = const_table;
396 if (def_args != NULL) {
397 memcpy(o->extra_args, def_args->items, n_def_args * sizeof(mp_obj_t));
398 }
399 if (def_kw_args != MP_OBJ_NULL) {
400 o->extra_args[n_def_args] = def_kw_args;
401 }
402 return MP_OBJ_FROM_PTR(o);
403}
404
405/******************************************************************************/
406/* native functions */
407
408#if MICROPY_EMIT_NATIVE
409
410STATIC mp_obj_t fun_native_call(mp_obj_t self_in, size_t n_args, size_t n_kw, const mp_obj_t *args) {
411 MP_STACK_CHECK();
412 mp_obj_fun_bc_t *self = self_in;
413 mp_call_fun_t fun = MICROPY_MAKE_POINTER_CALLABLE((void *)self->bytecode);
414 return fun(self_in, n_args, n_kw, args);
415}
416
417STATIC const mp_obj_type_t mp_type_fun_native = {
418 { &mp_type_type },
419 .flags = MP_TYPE_FLAG_BINDS_SELF,
420 .name = MP_QSTR_function,
421 .call = fun_native_call,
422 .unary_op = mp_generic_unary_op,
423};
424
425mp_obj_t mp_obj_new_fun_native(mp_obj_t def_args_in, mp_obj_t def_kw_args, const void *fun_data, const mp_uint_t *const_table) {
426 mp_obj_fun_bc_t *o = mp_obj_new_fun_bc(def_args_in, def_kw_args, (const byte *)fun_data, const_table);
427 o->base.type = &mp_type_fun_native;
428 return o;
429}
430
431#endif // MICROPY_EMIT_NATIVE
432
433/******************************************************************************/
434/* inline assembler functions */
435
436#if MICROPY_EMIT_INLINE_ASM
437
438typedef struct _mp_obj_fun_asm_t {
439 mp_obj_base_t base;
440 size_t n_args;
441 const void *fun_data; // GC must be able to trace this pointer
442 mp_uint_t type_sig;
443} mp_obj_fun_asm_t;
444
445typedef mp_uint_t (*inline_asm_fun_0_t)(void);
446typedef mp_uint_t (*inline_asm_fun_1_t)(mp_uint_t);
447typedef mp_uint_t (*inline_asm_fun_2_t)(mp_uint_t, mp_uint_t);
448typedef mp_uint_t (*inline_asm_fun_3_t)(mp_uint_t, mp_uint_t, mp_uint_t);
449typedef mp_uint_t (*inline_asm_fun_4_t)(mp_uint_t, mp_uint_t, mp_uint_t, mp_uint_t);
450
451// convert a MicroPython object to a sensible value for inline asm
452STATIC mp_uint_t convert_obj_for_inline_asm(mp_obj_t obj) {
453 // TODO for byte_array, pass pointer to the array
454 if (mp_obj_is_small_int(obj)) {
455 return MP_OBJ_SMALL_INT_VALUE(obj);
456 } else if (obj == mp_const_none) {
457 return 0;
458 } else if (obj == mp_const_false) {
459 return 0;
460 } else if (obj == mp_const_true) {
461 return 1;
462 } else if (mp_obj_is_type(obj, &mp_type_int)) {
463 return mp_obj_int_get_truncated(obj);
464 } else if (mp_obj_is_str(obj)) {
465 // pointer to the string (it's probably constant though!)
466 size_t l;
467 return (mp_uint_t)mp_obj_str_get_data(obj, &l);
468 } else {
469 const mp_obj_type_t *type = mp_obj_get_type(obj);
470 #if MICROPY_PY_BUILTINS_FLOAT
471 if (type == &mp_type_float) {
472 // convert float to int (could also pass in float registers)
473 return (mp_int_t)mp_obj_float_get(obj);
474 }
475 #endif
476 if (type == &mp_type_tuple || type == &mp_type_list) {
477 // pointer to start of tuple (could pass length, but then could use len(x) for that)
478 size_t len;
479 mp_obj_t *items;
480 mp_obj_get_array(obj, &len, &items);
481 return (mp_uint_t)items;
482 } else {
483 mp_buffer_info_t bufinfo;
484 if (mp_get_buffer(obj, &bufinfo, MP_BUFFER_READ)) {
485 // supports the buffer protocol, return a pointer to the data
486 return (mp_uint_t)bufinfo.buf;
487 } else {
488 // just pass along a pointer to the object
489 return (mp_uint_t)obj;
490 }
491 }
492 }
493}
494
495STATIC mp_obj_t fun_asm_call(mp_obj_t self_in, size_t n_args, size_t n_kw, const mp_obj_t *args) {
496 mp_obj_fun_asm_t *self = self_in;
497
498 mp_arg_check_num(n_args, n_kw, self->n_args, self->n_args, false);
499
500 const void *fun = MICROPY_MAKE_POINTER_CALLABLE(self->fun_data);
501
502 mp_uint_t ret;
503 if (n_args == 0) {
504 ret = ((inline_asm_fun_0_t)fun)();
505 } else if (n_args == 1) {
506 ret = ((inline_asm_fun_1_t)fun)(convert_obj_for_inline_asm(args[0]));
507 } else if (n_args == 2) {
508 ret = ((inline_asm_fun_2_t)fun)(convert_obj_for_inline_asm(args[0]), convert_obj_for_inline_asm(args[1]));
509 } else if (n_args == 3) {
510 ret = ((inline_asm_fun_3_t)fun)(convert_obj_for_inline_asm(args[0]), convert_obj_for_inline_asm(args[1]), convert_obj_for_inline_asm(args[2]));
511 } else {
512 // compiler allows at most 4 arguments
513 assert(n_args == 4);
514 ret = ((inline_asm_fun_4_t)fun)(
515 convert_obj_for_inline_asm(args[0]),
516 convert_obj_for_inline_asm(args[1]),
517 convert_obj_for_inline_asm(args[2]),
518 convert_obj_for_inline_asm(args[3])
519 );
520 }
521
522 return mp_native_to_obj(ret, self->type_sig);
523}
524
525STATIC const mp_obj_type_t mp_type_fun_asm = {
526 { &mp_type_type },
527 .flags = MP_TYPE_FLAG_BINDS_SELF,
528 .name = MP_QSTR_function,
529 .call = fun_asm_call,
530 .unary_op = mp_generic_unary_op,
531};
532
533mp_obj_t mp_obj_new_fun_asm(size_t n_args, const void *fun_data, mp_uint_t type_sig) {
534 mp_obj_fun_asm_t *o = m_new_obj(mp_obj_fun_asm_t);
535 o->base.type = &mp_type_fun_asm;
536 o->n_args = n_args;
537 o->fun_data = fun_data;
538 o->type_sig = type_sig;
539 return o;
540}
541
542#endif // MICROPY_EMIT_INLINE_ASM
543