runtime.c 43.5 KB
Newer Older
1
// in principle, rt_xxx functions are called only by vm/native/viper and make assumptions about args
2
// mp_xxx functions are safer and can be called by anyone
3
// note that rt_assign_xxx are called only from emit*, and maybe we can rename them to reflect this
4

Damien's avatar
Damien committed
5
6
7
#include <stdio.h>
#include <string.h>
#include <assert.h>
8
#include <math.h>
Damien's avatar
Damien committed
9

10
#include "nlr.h"
Damien's avatar
Damien committed
11
#include "misc.h"
12
#include "mpconfig.h"
13
#include "qstr.h"
14
#include "obj.h"
15
#include "parsenum.h"
16
#include "runtime0.h"
Damien's avatar
Damien committed
17
#include "runtime.h"
18
19
#include "map.h"
#include "builtin.h"
20
#include "objarray.h"
21
#include "bc.h"
22
#include "intdivmod.h"
23

24
#if 0 // print debugging info
25
#define DEBUG_PRINT (1)
26
#define WRITE_CODE (1)
27
#define DEBUG_printf DEBUG_printf
28
#define DEBUG_OP_printf(...) DEBUG_printf(__VA_ARGS__)
29
#else // don't print debugging info
30
31
#define DEBUG_printf(...) (void)0
#define DEBUG_OP_printf(...) (void)0
32
#endif
Damien's avatar
Damien committed
33

34
// locals and globals need to be pointers because they can be the same in outer module scope
35
36
37
38
STATIC mp_map_t *map_locals;
STATIC mp_map_t *map_globals;
STATIC mp_map_t map_builtins;
STATIC mp_map_t map_loaded_modules; // TODO: expose as sys.modules
39

Damien's avatar
Damien committed
40
typedef enum {
41
42
43
44
45
46
47
    MP_CODE_NONE,
    MP_CODE_BYTE,
    MP_CODE_NATIVE,
    MP_CODE_INLINE_ASM,
} mp_code_kind_t;

typedef struct _mp_code_t {
48
49
50
51
    mp_code_kind_t kind : 8;
    uint scope_flags : 8;
    uint n_args : 16;
    uint n_state : 16;
Damien's avatar
Damien committed
52
53
54
55
56
    union {
        struct {
            byte *code;
            uint len;
        } u_byte;
57
        struct {
58
            mp_fun_t fun;
59
60
        } u_native;
        struct {
61
            void *fun;
62
        } u_inline_asm;
Damien's avatar
Damien committed
63
    };
64
    qstr *arg_names;
65
} mp_code_t;
Damien's avatar
Damien committed
66

67
68
69
STATIC uint next_unique_code_id;
STATIC machine_uint_t unique_codes_alloc = 0;
STATIC mp_code_t *unique_codes = NULL;
Damien's avatar
Damien committed
70

71
72
#ifdef WRITE_CODE
FILE *fp_write_code = NULL;
73
#endif
Damien's avatar
Damien committed
74

75
76
77
78
79
80
81
82
83
84
85
// builtins
// we put this table in ROM because it's always needed and takes up quite a bit of room in RAM
// in fact, it uses less ROM here in table form than the equivalent in code form initialising a dynamic mp_map_t object in RAM
// at the moment it's a linear table, but we could convert it to a const mp_map_t table with a simple preprocessing script
// if we wanted to allow dynamic modification of the builtins, we could provide an mp_map_t object which is searched before this one

typedef struct _mp_builtin_elem_t {
    qstr qstr;
    mp_obj_t fun;
} mp_builtin_elem_t;

86
STATIC const mp_builtin_elem_t builtin_table[] = {
87
88
89
90
91
92
93
    // built-in core functions
    { MP_QSTR___build_class__, (mp_obj_t)&mp_builtin___build_class___obj },
    { MP_QSTR___import__, (mp_obj_t)&mp_builtin___import___obj },
    { MP_QSTR___repl_print__, (mp_obj_t)&mp_builtin___repl_print___obj },

    // built-in types
    { MP_QSTR_bool, (mp_obj_t)&bool_type },
94
    { MP_QSTR_bytes, (mp_obj_t)&bytes_type },
95
#if MICROPY_ENABLE_FLOAT
96
    { MP_QSTR_complex, (mp_obj_t)&mp_type_complex },
97
98
99
100
101
#endif
    { MP_QSTR_dict, (mp_obj_t)&dict_type },
    { MP_QSTR_enumerate, (mp_obj_t)&enumerate_type },
    { MP_QSTR_filter, (mp_obj_t)&filter_type },
#if MICROPY_ENABLE_FLOAT
102
    { MP_QSTR_float, (mp_obj_t)&mp_type_float },
103
104
105
106
#endif
    { MP_QSTR_int, (mp_obj_t)&int_type },
    { MP_QSTR_list, (mp_obj_t)&list_type },
    { MP_QSTR_map, (mp_obj_t)&map_type },
Damien George's avatar
Damien George committed
107
    { MP_QSTR_object, (mp_obj_t)&mp_type_object },
108
    { MP_QSTR_set, (mp_obj_t)&set_type },
109
    { MP_QSTR_str, (mp_obj_t)&str_type },
110
111
    { MP_QSTR_super, (mp_obj_t)&super_type },
    { MP_QSTR_tuple, (mp_obj_t)&tuple_type },
112
    { MP_QSTR_type, (mp_obj_t)&mp_type_type },
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
    { MP_QSTR_zip, (mp_obj_t)&zip_type },

    { MP_QSTR_classmethod, (mp_obj_t)&mp_type_classmethod },
    { MP_QSTR_staticmethod, (mp_obj_t)&mp_type_staticmethod },

    // built-in user functions
    { MP_QSTR_abs, (mp_obj_t)&mp_builtin_abs_obj },
    { MP_QSTR_all, (mp_obj_t)&mp_builtin_all_obj },
    { MP_QSTR_any, (mp_obj_t)&mp_builtin_any_obj },
    { MP_QSTR_callable, (mp_obj_t)&mp_builtin_callable_obj },
    { MP_QSTR_chr, (mp_obj_t)&mp_builtin_chr_obj },
    { MP_QSTR_dir, (mp_obj_t)&mp_builtin_dir_obj },
    { MP_QSTR_divmod, (mp_obj_t)&mp_builtin_divmod_obj },
    { MP_QSTR_eval, (mp_obj_t)&mp_builtin_eval_obj },
    { MP_QSTR_exec, (mp_obj_t)&mp_builtin_exec_obj },
    { MP_QSTR_hash, (mp_obj_t)&mp_builtin_hash_obj },
    { MP_QSTR_id, (mp_obj_t)&mp_builtin_id_obj },
    { MP_QSTR_isinstance, (mp_obj_t)&mp_builtin_isinstance_obj },
    { MP_QSTR_issubclass, (mp_obj_t)&mp_builtin_issubclass_obj },
    { MP_QSTR_iter, (mp_obj_t)&mp_builtin_iter_obj },
    { MP_QSTR_len, (mp_obj_t)&mp_builtin_len_obj },
    { MP_QSTR_max, (mp_obj_t)&mp_builtin_max_obj },
    { MP_QSTR_min, (mp_obj_t)&mp_builtin_min_obj },
    { MP_QSTR_next, (mp_obj_t)&mp_builtin_next_obj },
    { MP_QSTR_ord, (mp_obj_t)&mp_builtin_ord_obj },
    { MP_QSTR_pow, (mp_obj_t)&mp_builtin_pow_obj },
    { MP_QSTR_print, (mp_obj_t)&mp_builtin_print_obj },
    { MP_QSTR_range, (mp_obj_t)&mp_builtin_range_obj },
    { MP_QSTR_repr, (mp_obj_t)&mp_builtin_repr_obj },
    { MP_QSTR_sorted, (mp_obj_t)&mp_builtin_sorted_obj },
    { MP_QSTR_sum, (mp_obj_t)&mp_builtin_sum_obj },
    { MP_QSTR_bytearray, (mp_obj_t)&mp_builtin_bytearray_obj },

146
147
    // built-in exceptions
    { MP_QSTR_BaseException, (mp_obj_t)&mp_type_BaseException },
148
    { MP_QSTR_ArithmeticError, (mp_obj_t)&mp_type_ArithmeticError },
149
150
    { MP_QSTR_AssertionError, (mp_obj_t)&mp_type_AssertionError },
    { MP_QSTR_AttributeError, (mp_obj_t)&mp_type_AttributeError },
151
152
153
154
155
156
157
    { MP_QSTR_BufferError, (mp_obj_t)&mp_type_BufferError },
    { MP_QSTR_EOFError, (mp_obj_t)&mp_type_EOFError },
    { MP_QSTR_EnvironmentError, (mp_obj_t)&mp_type_EnvironmentError },
    { MP_QSTR_Exception, (mp_obj_t)&mp_type_Exception },
    { MP_QSTR_FloatingPointError, (mp_obj_t)&mp_type_FloatingPointError },
    { MP_QSTR_GeneratorExit, (mp_obj_t)&mp_type_GeneratorExit },
    { MP_QSTR_IOError, (mp_obj_t)&mp_type_IOError },
158
159
160
161
    { MP_QSTR_ImportError, (mp_obj_t)&mp_type_ImportError },
    { MP_QSTR_IndentationError, (mp_obj_t)&mp_type_IndentationError },
    { MP_QSTR_IndexError, (mp_obj_t)&mp_type_IndexError },
    { MP_QSTR_KeyError, (mp_obj_t)&mp_type_KeyError },
162
163
    { MP_QSTR_LookupError, (mp_obj_t)&mp_type_LookupError },
    { MP_QSTR_MemoryError, (mp_obj_t)&mp_type_MemoryError },
164
    { MP_QSTR_NameError, (mp_obj_t)&mp_type_NameError },
165
166
167
168
169
    { MP_QSTR_NotImplementedError, (mp_obj_t)&mp_type_NotImplementedError },
    { MP_QSTR_OSError, (mp_obj_t)&mp_type_OSError },
    { MP_QSTR_OverflowError, (mp_obj_t)&mp_type_OverflowError },
    { MP_QSTR_ReferenceError, (mp_obj_t)&mp_type_ReferenceError },
    { MP_QSTR_RuntimeError, (mp_obj_t)&mp_type_RuntimeError },
170
    { MP_QSTR_SyntaxError, (mp_obj_t)&mp_type_SyntaxError },
171
172
173
    { MP_QSTR_SystemError, (mp_obj_t)&mp_type_SystemError },
    { MP_QSTR_SystemExit, (mp_obj_t)&mp_type_SystemExit },
    { MP_QSTR_TabError, (mp_obj_t)&mp_type_TabError },
174
    { MP_QSTR_TypeError, (mp_obj_t)&mp_type_TypeError },
175
    { MP_QSTR_UnboundLocalError, (mp_obj_t)&mp_type_UnboundLocalError },
176
    { MP_QSTR_ValueError, (mp_obj_t)&mp_type_ValueError },
177
178
    { MP_QSTR_ZeroDivisionError, (mp_obj_t)&mp_type_ZeroDivisionError },
    { MP_QSTR_StopIteration, (mp_obj_t)&mp_type_StopIteration },
179
180
181
    // Somehow CPython managed to have OverflowError not inherit from ValueError ;-/
    // TODO: For MICROPY_CPYTHON_COMPAT==0 use ValueError to avoid exc proliferation

182
183
184
    // Extra builtins as defined by a port
    MICROPY_EXTRA_BUILTINS

185
186
187
    { MP_QSTR_, MP_OBJ_NULL }, // end of list sentinel
};

188
// a good optimising compiler will inline this if necessary
189
STATIC void mp_map_add_qstr(mp_map_t *map, qstr qstr, mp_obj_t value) {
190
191
192
    mp_map_lookup(map, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP_ADD_IF_NOT_FOUND)->value = value;
}

193
void rt_init(void) {
194
    // locals = globals for outer module (see Objects/frameobject.c/PyFrame_New())
195
    map_locals = map_globals = mp_map_new(1);
196
    mp_map_add_qstr(map_globals, MP_QSTR___name__, MP_OBJ_NEW_QSTR(MP_QSTR___main__));
197

198
    // init built-in hash table
199
    mp_map_init(&map_builtins, 3);
200

201
202
203
    // init loaded modules table
    mp_map_init(&map_loaded_modules, 3);

Damien George's avatar
Damien George committed
204
    // built-in objects
205
    mp_map_add_qstr(&map_builtins, MP_QSTR_Ellipsis, mp_const_ellipsis);
Damien George's avatar
Damien George committed
206

207
208
209
    mp_obj_t m_array = mp_obj_new_module(MP_QSTR_array);
    rt_store_attr(m_array, MP_QSTR_array, (mp_obj_t)&array_type);

210
211
212
    mp_obj_t m_collections = mp_obj_new_module(MP_QSTR_collections);
    rt_store_attr(m_collections, MP_QSTR_namedtuple, (mp_obj_t)&mp_namedtuple_obj);

213
#if MICROPY_CPYTHON_COMPAT
214
    // Precreate sys module, so "import sys" didn't throw exceptions.
215
216
217
    mp_obj_t m_sys = mp_obj_new_module(MP_QSTR_sys);
    // Avoid warning of unused var
    (void)m_sys;
218
#endif
219
220
221
222
    // init sys.path
    // for efficiency, left to platform-specific startup code
    //sys_path = mp_obj_new_list(0, NULL);
    //rt_store_attr(m_sys, MP_QSTR_path, sys_path);
223

224
225
226
    // we pre-import the micropython module
    // probably shouldn't do this, so we are compatible with CPython
    rt_store_name(MP_QSTR_micropython, (mp_obj_t)&mp_module_micropython);
227

228
    // TODO: wastes one mp_code_t structure in mem
229
    next_unique_code_id = 1; // 0 indicates "no code"
230
    unique_codes_alloc = 0;
Damien's avatar
Damien committed
231
232
    unique_codes = NULL;

233
234
#ifdef WRITE_CODE
    fp_write_code = fopen("out-code", "wb");
235
#endif
Damien's avatar
Damien committed
236
237
}

238
void rt_deinit(void) {
239
    m_del(mp_code_t, unique_codes, unique_codes_alloc);
240
241
242
    mp_map_free(map_globals);
    mp_map_deinit(&map_loaded_modules);
    mp_map_deinit(&map_builtins);
243
244
245
#ifdef WRITE_CODE
    if (fp_write_code != NULL) {
        fclose(fp_write_code);
Damien's avatar
Damien committed
246
    }
247
#endif
Damien's avatar
Damien committed
248
249
}

250
uint rt_get_unique_code_id(void) {
251
    return next_unique_code_id++;
Damien's avatar
Damien committed
252
253
}

254
STATIC void alloc_unique_codes(void) {
255
    if (next_unique_code_id > unique_codes_alloc) {
256
        DEBUG_printf("allocate more unique codes: " UINT_FMT " -> %u\n", unique_codes_alloc, next_unique_code_id);
257
258
        // increase size of unique_codes table
        unique_codes = m_renew(mp_code_t, unique_codes, unique_codes_alloc, next_unique_code_id);
259
        for (uint i = unique_codes_alloc; i < next_unique_code_id; i++) {
260
            unique_codes[i].kind = MP_CODE_NONE;
261
        }
262
        unique_codes_alloc = next_unique_code_id;
Damien's avatar
Damien committed
263
    }
264
265
}

266
void rt_assign_byte_code(uint unique_code_id, byte *code, uint len, int n_args, int n_locals, int n_stack, uint scope_flags, qstr *arg_names) {
267
268
    alloc_unique_codes();

269
    assert(1 <= unique_code_id && unique_code_id < next_unique_code_id && unique_codes[unique_code_id].kind == MP_CODE_NONE);
270
    unique_codes[unique_code_id].kind = MP_CODE_BYTE;
271
    unique_codes[unique_code_id].scope_flags = scope_flags;
272
273
    unique_codes[unique_code_id].n_args = n_args;
    unique_codes[unique_code_id].n_state = n_locals + n_stack;
274
275
    unique_codes[unique_code_id].u_byte.code = code;
    unique_codes[unique_code_id].u_byte.len = len;
276
    unique_codes[unique_code_id].arg_names = arg_names;
277

Damien's avatar
Damien committed
278
    //printf("byte code: %d bytes\n", len);
279
280

#ifdef DEBUG_PRINT
281
    DEBUG_printf("assign byte code: id=%d code=%p len=%u n_args=%d n_locals=%d n_stack=%d\n", unique_code_id, code, len, n_args, n_locals, n_stack);
282
283
284
285
286
287
288
    for (int i = 0; i < 128 && i < len; i++) {
        if (i > 0 && i % 16 == 0) {
            DEBUG_printf("\n");
        }
        DEBUG_printf(" %02x", code[i]);
    }
    DEBUG_printf("\n");
289
290
#if MICROPY_DEBUG_PRINTERS
    mp_byte_code_print(code, len);
291
#endif
292
#endif
293
294
}

295
void rt_assign_native_code(uint unique_code_id, void *fun, uint len, int n_args) {
296
297
    alloc_unique_codes();

298
    assert(1 <= unique_code_id && unique_code_id < next_unique_code_id && unique_codes[unique_code_id].kind == MP_CODE_NONE);
299
    unique_codes[unique_code_id].kind = MP_CODE_NATIVE;
300
    unique_codes[unique_code_id].scope_flags = 0;
301
302
    unique_codes[unique_code_id].n_args = n_args;
    unique_codes[unique_code_id].n_state = 0;
Damien's avatar
Damien committed
303
304
    unique_codes[unique_code_id].u_native.fun = fun;

305
    //printf("native code: %d bytes\n", len);
306

307
#ifdef DEBUG_PRINT
Damien's avatar
Damien committed
308
309
310
311
312
313
314
315
316
317
    DEBUG_printf("assign native code: id=%d fun=%p len=%u n_args=%d\n", unique_code_id, fun, len, n_args);
    byte *fun_data = (byte*)(((machine_uint_t)fun) & (~1)); // need to clear lower bit in case it's thumb code
    for (int i = 0; i < 128 && i < len; i++) {
        if (i > 0 && i % 16 == 0) {
            DEBUG_printf("\n");
        }
        DEBUG_printf(" %02x", fun_data[i]);
    }
    DEBUG_printf("\n");

318
319
320
321
#ifdef WRITE_CODE
    if (fp_write_code != NULL) {
        fwrite(fun_data, len, 1, fp_write_code);
        fflush(fp_write_code);
Damien's avatar
Damien committed
322
    }
323
324
#endif
#endif
Damien's avatar
Damien committed
325
326
}

327
void rt_assign_inline_asm_code(uint unique_code_id, void *fun, uint len, int n_args) {
328
329
    alloc_unique_codes();

330
    assert(1 <= unique_code_id && unique_code_id < next_unique_code_id && unique_codes[unique_code_id].kind == MP_CODE_NONE);
331
    unique_codes[unique_code_id].kind = MP_CODE_INLINE_ASM;
332
    unique_codes[unique_code_id].scope_flags = 0;
333
334
    unique_codes[unique_code_id].n_args = n_args;
    unique_codes[unique_code_id].n_state = 0;
335
    unique_codes[unique_code_id].u_inline_asm.fun = fun;
Damien's avatar
Damien committed
336

337
#ifdef DEBUG_PRINT
338
339
340
341
342
343
344
345
346
347
    DEBUG_printf("assign inline asm code: id=%d fun=%p len=%u n_args=%d\n", unique_code_id, fun, len, n_args);
    byte *fun_data = (byte*)(((machine_uint_t)fun) & (~1)); // need to clear lower bit in case it's thumb code
    for (int i = 0; i < 128 && i < len; i++) {
        if (i > 0 && i % 16 == 0) {
            DEBUG_printf("\n");
        }
        DEBUG_printf(" %02x", fun_data[i]);
    }
    DEBUG_printf("\n");

348
349
350
#ifdef WRITE_CODE
    if (fp_write_code != NULL) {
        fwrite(fun_data, len, 1, fp_write_code);
351
    }
352
353
#endif
#endif
Damien's avatar
Damien committed
354
355
}

356
357
int rt_is_true(mp_obj_t arg) {
    DEBUG_OP_printf("is true %p\n", arg);
358
359
360
361
362
363
364
    if (arg == mp_const_false) {
        return 0;
    } else if (arg == mp_const_true) {
        return 1;
    } else if (arg == mp_const_none) {
        return 0;
    } else if (MP_OBJ_IS_SMALL_INT(arg)) {
365
366
367
368
369
370
        if (MP_OBJ_SMALL_INT_VALUE(arg) == 0) {
            return 0;
        } else {
            return 1;
        }
    } else {
371
372
373
        mp_obj_type_t *type = mp_obj_get_type(arg);
        if (type->unary_op != NULL) {
            mp_obj_t result = type->unary_op(RT_UNARY_OP_BOOL, arg);
374
            if (result != MP_OBJ_NULL) {
375
376
377
378
                return result == mp_const_true;
            }
        }

379
380
381
382
383
        mp_obj_t len = mp_obj_len_maybe(arg);
        if (len != MP_OBJ_NULL) {
            // obj has a length, truth determined if len != 0
            return len != MP_OBJ_NEW_SMALL_INT(0);
        } else {
384
            // any other obj is true per Python semantics
385
386
            return 1;
        }
387
388
389
390
391
392
393
394
    }
}

mp_obj_t rt_list_append(mp_obj_t self_in, mp_obj_t arg) {
    return mp_obj_list_append(self_in, arg);
}

mp_obj_t rt_load_const_dec(qstr qstr) {
Damien's avatar
Damien committed
395
    DEBUG_OP_printf("load '%s'\n", qstr_str(qstr));
396
397
    uint len;
    const byte* data = qstr_data(qstr, &len);
398
    return mp_parse_num_decimal((const char*)data, len, true, false);
Damien's avatar
Damien committed
399
400
}

401
mp_obj_t rt_load_const_str(qstr qstr) {
Damien's avatar
Damien committed
402
    DEBUG_OP_printf("load '%s'\n", qstr_str(qstr));
403
    return MP_OBJ_NEW_QSTR(qstr);
Damien's avatar
Damien committed
404
405
}

406
407
408
409
410
411
412
mp_obj_t rt_load_const_bytes(qstr qstr) {
    DEBUG_OP_printf("load b'%s'\n", qstr_str(qstr));
    uint len;
    const byte *data = qstr_data(qstr, &len);
    return mp_obj_new_bytes(data, len);
}

413
mp_obj_t rt_load_name(qstr qstr) {
Damien's avatar
Damien committed
414
    // logic: search locals, globals, builtins
415
    DEBUG_OP_printf("load name %s\n", qstr_str(qstr));
416
    mp_map_elem_t *elem = mp_map_lookup(map_locals, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP);
417
418
419
420
    if (elem != NULL) {
        return elem->value;
    } else {
        return rt_load_global(qstr);
Damien's avatar
Damien committed
421
422
423
    }
}

424
mp_obj_t rt_load_global(qstr qstr) {
425
426
    // logic: search globals, builtins
    DEBUG_OP_printf("load global %s\n", qstr_str(qstr));
427
    mp_map_elem_t *elem = mp_map_lookup(map_globals, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP);
428
    if (elem == NULL) {
429
        elem = mp_map_lookup(&map_builtins, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP);
430
        if (elem == NULL) {
431
432
433
434
435
            for (const mp_builtin_elem_t *e = &builtin_table[0]; e->qstr != MP_QSTR_; e++) {
                if (e->qstr == qstr) {
                    return e->fun;
                }
            }
436
            nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_NameError, "name '%s' is not defined", qstr_str(qstr)));
437
438
439
        }
    }
    return elem->value;
Damien's avatar
Damien committed
440
441
}

442
mp_obj_t rt_load_build_class(void) {
Damien's avatar
Damien committed
443
    DEBUG_OP_printf("load_build_class\n");
444
    mp_map_elem_t *elem = mp_map_lookup(&map_builtins, MP_OBJ_NEW_QSTR(MP_QSTR___build_class__), MP_MAP_LOOKUP);
445
446
447
448
    if (elem != NULL) {
        return elem->value;
    } else {
        return (mp_obj_t)&mp_builtin___build_class___obj;
Damien's avatar
Damien committed
449
450
451
    }
}

452
453
mp_obj_t rt_get_cell(mp_obj_t cell) {
    return mp_obj_cell_get(cell);
454
455
}

456
457
void rt_set_cell(mp_obj_t cell, mp_obj_t val) {
    mp_obj_cell_set(cell, val);
458
459
}

460
void rt_store_name(qstr qstr, mp_obj_t obj) {
461
    DEBUG_OP_printf("store name %s <- %p\n", qstr_str(qstr), obj);
462
    mp_map_lookup(map_locals, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP_ADD_IF_NOT_FOUND)->value = obj;
463
464
}

465
466
467
468
469
void rt_delete_name(qstr qstr) {
    DEBUG_OP_printf("delete name %s\n", qstr_str(qstr));
    mp_map_lookup(map_locals, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP_REMOVE_IF_FOUND);
}

470
void rt_store_global(qstr qstr, mp_obj_t obj) {
471
    DEBUG_OP_printf("store global %s <- %p\n", qstr_str(qstr), obj);
472
    mp_map_lookup(map_globals, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP_ADD_IF_NOT_FOUND)->value = obj;
Damien's avatar
Damien committed
473
474
}

475
mp_obj_t rt_unary_op(int op, mp_obj_t arg) {
Damien's avatar
Damien committed
476
    DEBUG_OP_printf("unary %d %p\n", op, arg);
477

478
479
    if (MP_OBJ_IS_SMALL_INT(arg)) {
        mp_small_int_t val = MP_OBJ_SMALL_INT_VALUE(arg);
Damien's avatar
Damien committed
480
        switch (op) {
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
            case RT_UNARY_OP_BOOL:
                return MP_BOOL(val != 0);
            case RT_UNARY_OP_POSITIVE:
                return arg;
            case RT_UNARY_OP_NEGATIVE:
                // check for overflow
                if (val == MP_SMALL_INT_MIN) {
                    return mp_obj_new_int(-val);
                } else {
                    return MP_OBJ_NEW_SMALL_INT(-val);
                }
            case RT_UNARY_OP_INVERT:
                return MP_OBJ_NEW_SMALL_INT(~val);
            default:
                assert(0);
                return arg;
497
        }
498
499
500
501
    } else {
        mp_obj_type_t *type = mp_obj_get_type(arg);
        if (type->unary_op != NULL) {
            mp_obj_t result = type->unary_op(op, arg);
502
503
504
            if (result != NULL) {
                return result;
            }
Damien's avatar
Damien committed
505
        }
506
        // TODO specify in error message what the operator is
Damien George's avatar
Damien George committed
507
        nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_TypeError, "bad operand type for unary operator: '%s'", mp_obj_get_type_str(arg)));
Damien's avatar
Damien committed
508
    }
Damien's avatar
Damien committed
509
510
}

511
mp_obj_t rt_binary_op(int op, mp_obj_t lhs, mp_obj_t rhs) {
Damien's avatar
Damien committed
512
    DEBUG_OP_printf("binary %d %p %p\n", op, lhs, rhs);
513
514
515
516
517
518
519
520
521
522

    // TODO correctly distinguish inplace operators for mutable objects
    // lookup logic that CPython uses for +=:
    //   check for implemented +=
    //   then check for implemented +
    //   then check for implemented seq.inplace_concat
    //   then check for implemented seq.concat
    //   then fail
    // note that list does not implement + or +=, so that inplace_concat is reached first for +=

523
524
    // deal with is
    if (op == RT_BINARY_OP_IS) {
525
526
527
        return MP_BOOL(lhs == rhs);
    }

528
    // deal with == and != for all types
529
    if (op == RT_BINARY_OP_EQUAL || op == RT_BINARY_OP_NOT_EQUAL) {
530
        if (mp_obj_equal(lhs, rhs)) {
531
            if (op == RT_BINARY_OP_EQUAL) {
532
533
534
535
536
                return mp_const_true;
            } else {
                return mp_const_false;
            }
        } else {
537
            if (op == RT_BINARY_OP_EQUAL) {
538
539
540
541
542
543
544
545
                return mp_const_false;
            } else {
                return mp_const_true;
            }
        }
    }

    // deal with exception_match for all types
546
    if (op == RT_BINARY_OP_EXCEPTION_MATCH) {
547
548
549
550
551
552
        // rhs must be issubclass(rhs, BaseException)
        if (mp_obj_is_exception_type(rhs)) {
            // if lhs is an instance of an exception, then extract and use its type
            if (mp_obj_is_exception_instance(lhs)) {
                lhs = mp_obj_get_type(lhs);
            }
553
            if (mp_obj_is_subclass_fast(lhs, rhs)) {
554
555
556
557
558
                return mp_const_true;
            } else {
                return mp_const_false;
            }
        }
559
560
        assert(0);
        return mp_const_false;
561
562
    }

563
    if (MP_OBJ_IS_SMALL_INT(lhs)) {
564
        mp_small_int_t lhs_val = MP_OBJ_SMALL_INT_VALUE(lhs);
565
566
        if (MP_OBJ_IS_SMALL_INT(rhs)) {
            mp_small_int_t rhs_val = MP_OBJ_SMALL_INT_VALUE(rhs);
567
568
569
570
571
572
573
574
575
            // This is a binary operation: lhs_val op rhs_val
            // We need to be careful to handle overflow; see CERT INT32-C
            // Operations that can overflow:
            //      +       result always fits in machine_int_t, then handled by SMALL_INT check
            //      -       result always fits in machine_int_t, then handled by SMALL_INT check
            //      *       checked explicitly
            //      /       if lhs=MIN and rhs=-1; result always fits in machine_int_t, then handled by SMALL_INT check
            //      %       if lhs=MIN and rhs=-1; result always fits in machine_int_t, then handled by SMALL_INT check
            //      <<      checked explicitly
576
577
578
579
580
581
582
583
            switch (op) {
                case RT_BINARY_OP_OR:
                case RT_BINARY_OP_INPLACE_OR: lhs_val |= rhs_val; break;
                case RT_BINARY_OP_XOR:
                case RT_BINARY_OP_INPLACE_XOR: lhs_val ^= rhs_val; break;
                case RT_BINARY_OP_AND:
                case RT_BINARY_OP_INPLACE_AND: lhs_val &= rhs_val; break;
                case RT_BINARY_OP_LSHIFT:
584
585
586
587
588
589
590
591
592
593
594
595
596
597
                case RT_BINARY_OP_INPLACE_LSHIFT: {
                    if (rhs_val < 0) {
                        // negative shift not allowed
                        nlr_jump(mp_obj_new_exception_msg(&mp_type_ValueError, "negative shift count"));
                    } else if (rhs_val >= BITS_PER_WORD || lhs_val > (MP_SMALL_INT_MAX >> rhs_val) || lhs_val < (MP_SMALL_INT_MIN >> rhs_val)) {
                        // left-shift will overflow, so use higher precision integer
                        lhs = mp_obj_new_int_from_ll(lhs_val);
                        goto generic_binary_op;
                    } else {
                        // use standard precision
                        lhs_val <<= rhs_val;
                    }
                    break;
                }
598
                case RT_BINARY_OP_RSHIFT:
599
600
601
602
603
604
605
606
607
                case RT_BINARY_OP_INPLACE_RSHIFT:
                    if (rhs_val < 0) {
                        // negative shift not allowed
                        nlr_jump(mp_obj_new_exception_msg(&mp_type_ValueError, "negative shift count"));
                    } else {
                        // standard precision is enough for right-shift
                        lhs_val >>= rhs_val;
                    }
                    break;
608
609
610
611
612
                case RT_BINARY_OP_ADD:
                case RT_BINARY_OP_INPLACE_ADD: lhs_val += rhs_val; break;
                case RT_BINARY_OP_SUBTRACT:
                case RT_BINARY_OP_INPLACE_SUBTRACT: lhs_val -= rhs_val; break;
                case RT_BINARY_OP_MULTIPLY:
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
                case RT_BINARY_OP_INPLACE_MULTIPLY: {

                    // If long long type exists and is larger than machine_int_t, then
                    // we can use the following code to perform overflow-checked multiplication.
                    // Otherwise (eg in x64 case) we must use the branching code below.
                    #if 0
                    // compute result using long long precision
                    long long res = (long long)lhs_val * (long long)rhs_val;
                    if (res > MP_SMALL_INT_MAX || res < MP_SMALL_INT_MIN) {
                        // result overflowed SMALL_INT, so return higher precision integer
                        return mp_obj_new_int_from_ll(res);
                    } else {
                        // use standard precision
                        lhs_val = (mp_small_int_t)res;
                    }
                    #endif

                    if (lhs_val > 0) { // lhs_val is positive
                        if (rhs_val > 0) { // lhs_val and rhs_val are positive
                            if (lhs_val > (MP_SMALL_INT_MAX / rhs_val)) {
                                goto mul_overflow;
                            }
                        } else { // lhs_val positive, rhs_val nonpositive
                            if (rhs_val < (MP_SMALL_INT_MIN / lhs_val)) {
                                goto mul_overflow;
                            }
                        } // lhs_val positive, rhs_val nonpositive
                    } else { // lhs_val is nonpositive
                        if (rhs_val > 0) { // lhs_val is nonpositive, rhs_val is positive
                            if (lhs_val < (MP_SMALL_INT_MIN / rhs_val)) {
                                goto mul_overflow;
                            }
                        } else { // lhs_val and rhs_val are nonpositive
                            if (lhs_val != 0 && rhs_val < (MP_SMALL_INT_MAX / lhs_val)) {
                                goto mul_overflow;
                            }
                        } // End if lhs_val and rhs_val are nonpositive
                    } // End if lhs_val is nonpositive

                    // use standard precision
                    return MP_OBJ_NEW_SMALL_INT(lhs_val * rhs_val);

                mul_overflow:
                    // use higher precision
                    lhs = mp_obj_new_int_from_ll(lhs_val);
                    goto generic_binary_op;

                    break;
                }
662
                case RT_BINARY_OP_FLOOR_DIVIDE:
663
664
665
666
667
                case RT_BINARY_OP_INPLACE_FLOOR_DIVIDE:
                {
                    lhs_val = python_floor_divide(lhs_val, rhs_val);
                    break;
                }
668
                #if MICROPY_ENABLE_FLOAT
669
670
                case RT_BINARY_OP_TRUE_DIVIDE:
                case RT_BINARY_OP_INPLACE_TRUE_DIVIDE: return mp_obj_new_float((mp_float_t)lhs_val / (mp_float_t)rhs_val);
671
                #endif
672
673

                case RT_BINARY_OP_MODULO:
674
675
676
677
678
                case RT_BINARY_OP_INPLACE_MODULO:
                {
                    lhs_val = python_modulo(lhs_val, rhs_val);
                    break;
                }
679
680
                case RT_BINARY_OP_POWER:
                case RT_BINARY_OP_INPLACE_POWER:
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
                    if (rhs_val < 0) {
                        #if MICROPY_ENABLE_FLOAT
                        lhs = mp_obj_new_float(lhs_val);
                        goto generic_binary_op;
                        #else
                        nlr_jump(mp_obj_new_exception_msg(&mp_type_ValueError, "negative power with no float support"));
                        #endif
                    } else {
                        // TODO check for overflow
                        machine_int_t ans = 1;
                        while (rhs_val > 0) {
                            if (rhs_val & 1) {
                                ans *= lhs_val;
                            }
                            lhs_val *= lhs_val;
                            rhs_val /= 2;
697
                        }
698
                        lhs_val = ans;
699
                    }
700
                    break;
701
702
703
704
                case RT_BINARY_OP_LESS: return MP_BOOL(lhs_val < rhs_val); break;
                case RT_BINARY_OP_MORE: return MP_BOOL(lhs_val > rhs_val); break;
                case RT_BINARY_OP_LESS_EQUAL: return MP_BOOL(lhs_val <= rhs_val); break;
                case RT_BINARY_OP_MORE_EQUAL: return MP_BOOL(lhs_val >= rhs_val); break;
705

706
707
                default: assert(0);
            }
708
709
            // TODO: We just should make mp_obj_new_int() inline and use that
            if (MP_OBJ_FITS_SMALL_INT(lhs_val)) {
710
                return MP_OBJ_NEW_SMALL_INT(lhs_val);
711
712
            } else {
                return mp_obj_new_int(lhs_val);
713
            }
714
#if MICROPY_ENABLE_FLOAT
715
        } else if (MP_OBJ_IS_TYPE(rhs, &mp_type_float)) {
716
            return mp_obj_float_binary_op(op, lhs_val, rhs);
717
        } else if (MP_OBJ_IS_TYPE(rhs, &mp_type_complex)) {
718
            return mp_obj_complex_binary_op(op, lhs_val, 0, rhs);
719
#endif
720
        }
721
    }
722

723
    /* deal with `in`
724
725
     *
     * NOTE `a in b` is `b.__contains__(a)`, hence why the generic dispatch
Damien George's avatar
Damien George committed
726
     * needs to go below with swapped arguments
727
     */
728
    if (op == RT_BINARY_OP_IN) {
729
730
731
        mp_obj_type_t *type = mp_obj_get_type(rhs);
        if (type->binary_op != NULL) {
            mp_obj_t res = type->binary_op(op, rhs, lhs);
Damien George's avatar
Damien George committed
732
            if (res != MP_OBJ_NULL) {
733
                return res;
John R. Lenton's avatar
John R. Lenton committed
734
            }
735
736
737
738
739
740
741
        }
        if (type->getiter != NULL) {
            /* second attempt, walk the iterator */
            mp_obj_t next = NULL;
            mp_obj_t iter = rt_getiter(rhs);
            while ((next = rt_iternext(iter)) != mp_const_stop_iteration) {
                if (mp_obj_equal(next, lhs)) {
742
                    return mp_const_true;
John R. Lenton's avatar
John R. Lenton committed
743
                }
744
            }
745
            return mp_const_false;
746
747
748
        }

        nlr_jump(mp_obj_new_exception_msg_varg(
749
                     &mp_type_TypeError, "'%s' object is not iterable",
750
751
752
753
                     mp_obj_get_type_str(rhs)));
        return mp_const_none;
    }

754
    // generic binary_op supplied by type
755
756
757
    mp_obj_type_t *type;
generic_binary_op:
    type = mp_obj_get_type(lhs);
758
759
760
761
    if (type->binary_op != NULL) {
        mp_obj_t result = type->binary_op(op, lhs, rhs);
        if (result != MP_OBJ_NULL) {
            return result;
Damien's avatar
Damien committed
762
763
        }
    }
764

765
766
    // TODO implement dispatch for reverse binary ops

John R. Lenton's avatar
John R. Lenton committed
767
    // TODO specify in error message what the operator is
768
    nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_TypeError,
John R. Lenton's avatar
John R. Lenton committed
769
770
        "unsupported operand types for binary operator: '%s', '%s'",
        mp_obj_get_type_str(lhs), mp_obj_get_type_str(rhs)));
771
    return mp_const_none;
Damien's avatar
Damien committed
772
773
}

774
mp_obj_t rt_make_function_from_id(int unique_code_id, mp_obj_t def_args) {
775
776
    DEBUG_OP_printf("make_function_from_id %d\n", unique_code_id);
    if (unique_code_id < 1 || unique_code_id >= next_unique_code_id) {
Damien's avatar
Damien committed
777
        // illegal code id
778
        return mp_const_none;
Damien's avatar
Damien committed
779
    }
780
781
782
783

    // make the function, depending on the code kind
    mp_code_t *c = &unique_codes[unique_code_id];
    mp_obj_t fun;
Damien's avatar
Damien committed
784
    switch (c->kind) {
785
        case MP_CODE_BYTE:
786
            fun = mp_obj_new_fun_bc(c->scope_flags, c->arg_names, c->n_args, def_args, c->n_state, c->u_byte.code);
787
            break;
788
        case MP_CODE_NATIVE:
789
            fun = rt_make_function_n(c->n_args, c->u_native.fun);
Damien's avatar
Damien committed
790
            break;
791
792
        case MP_CODE_INLINE_ASM:
            fun = mp_obj_new_fun_asm(c->n_args, c->u_inline_asm.fun);
Damien's avatar
Damien committed
793
794
795
            break;
        default:
            assert(0);
796
            fun = mp_const_none;
Damien's avatar
Damien committed
797
    }
798
799

    // check for generator functions and if so wrap in generator object
800
    if ((c->scope_flags & MP_SCOPE_FLAG_GENERATOR) != 0) {
801
        fun = mp_obj_new_gen_wrap(fun);
802
803
    }

804
    return fun;
Damien's avatar
Damien committed
805
806
}

807
mp_obj_t rt_make_closure_from_id(int unique_code_id, mp_obj_t closure_tuple) {
Damien George's avatar
Damien George committed
808
    DEBUG_OP_printf("make_closure_from_id %d\n", unique_code_id);
809
    // make function object
810
    mp_obj_t ffun = rt_make_function_from_id(unique_code_id, MP_OBJ_NULL);
Damien's avatar
Damien committed
811
    // wrap function in closure object
812
    return mp_obj_new_closure(ffun, closure_tuple);
Damien's avatar
Damien committed
813
814
}

815
mp_obj_t rt_call_function_0(mp_obj_t fun) {
816
    return rt_call_function_n_kw(fun, 0, 0, NULL);
817
818
}

819
mp_obj_t rt_call_function_1(mp_obj_t fun, mp_obj_t arg) {
820
    return rt_call_function_n_kw(fun, 1, 0, &arg);
821
822
}

823
824
mp_obj_t rt_call_function_2(mp_obj_t fun, mp_obj_t arg1, mp_obj_t arg2) {
    mp_obj_t args[2];
825
826
827
    args[0] = arg1;
    args[1] = arg2;
    return rt_call_function_n_kw(fun, 2, 0, args);
828
829
}

830
831
832
833
834
835
// wrapper that accepts n_args and n_kw in one argument
// native emitter can only pass at most 3 arguments to a function
mp_obj_t rt_call_function_n_kw_for_native(mp_obj_t fun_in, uint n_args_kw, const mp_obj_t *args) {
    return rt_call_function_n_kw(fun_in, n_args_kw & 0xff, (n_args_kw >> 8) & 0xff, args);
}

836
837
// args contains, eg: arg0  arg1  key0  value0  key1  value1
mp_obj_t rt_call_function_n_kw(mp_obj_t fun_in, uint n_args, uint n_kw, const mp_obj_t *args) {
838
839
    // TODO improve this: fun object can specify its type and we parse here the arguments,
    // passing to the function arrays of fixed and keyword arguments
840

841
842
    DEBUG_OP_printf("calling function %p(n_args=%d, n_kw=%d, args=%p)\n", fun_in, n_args, n_kw, args);

843
844
845
846
847
848
    // get the type
    mp_obj_type_t *type = mp_obj_get_type(fun_in);

    // do the call
    if (type->call != NULL) {
        return type->call(fun_in, n_args, n_kw, args);
849
    } else {
Damien George's avatar
Damien George committed
850
        nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_TypeError, "'%s' object is not callable", mp_obj_get_type_str(fun_in)));
851
    }
852
853
}

854
855
// args contains: fun  self/NULL  arg(0)  ...  arg(n_args-2)  arg(n_args-1)  kw_key(0)  kw_val(0)  ... kw_key(n_kw-1)  kw_val(n_kw-1)
// if n_args==0 and n_kw==0 then there are only fun and self/NULL
856
mp_obj_t rt_call_method_n_kw(uint n_args, uint n_kw, const mp_obj_t *args) {
857
858
859
    DEBUG_OP_printf("call method (fun=%p, self=%p, n_args=%u, n_kw=%u, args=%p)\n", args[0], args[1], n_args, n_kw, args);
    int adjust = (args[1] == NULL) ? 0 : 1;
    return rt_call_function_n_kw(args[0], n_args + adjust, n_kw, args + 2 - adjust);
860
861
}

862
mp_obj_t rt_build_tuple(int n_args, mp_obj_t *items) {
863
    return mp_obj_new_tuple(n_args, items);
864
865
}

866
mp_obj_t rt_build_list(int n_args, mp_obj_t *items) {
867
    return mp_obj_new_list(n_args, items);
Damien's avatar
Damien committed
868
869
}

870
871
mp_obj_t rt_build_set(int n_args, mp_obj_t *items) {
    return mp_obj_new_set(n_args, items);
Damien's avatar
Damien committed
872
873
}

874
mp_obj_t rt_store_set(mp_obj_t set, mp_obj_t item) {
875
    mp_obj_set_store(set, item);
Damien's avatar
Damien committed
876
877
878
    return set;
}

879
// unpacked items are stored in reverse order into the array pointed to by items
880
void rt_unpack_sequence(mp_obj_t seq_in, uint num, mp_obj_t *items) {
881
    uint seq_len;
882
883
884
885
886
887
    if (MP_OBJ_IS_TYPE(seq_in, &tuple_type) || MP_OBJ_IS_TYPE(seq_in, &list_type)) {
        mp_obj_t *seq_items;
        if (MP_OBJ_IS_TYPE(seq_in, &tuple_type)) {
            mp_obj_tuple_get(seq_in, &seq_len, &seq_items);
        } else {
            mp_obj_list_get(seq_in, &seq_len, &seq_items);
888
        }
889
        if (seq_len < num) {
890
            goto too_short;
891
        } else if (seq_len > num) {
892
            goto too_long;
893
        }
894
895
896
        for (uint i = 0; i < num; i++) {
            items[i] = seq_items[num - 1 - i];
        }
897
    } else {
898
899
900
901
902
903
904
905
906
907
908
909
        mp_obj_t iterable = rt_getiter(seq_in);

        for (seq_len = 0; seq_len < num; seq_len++) {
            mp_obj_t el = rt_iternext(iterable);
            if (el == mp_const_stop_iteration) {
                goto too_short;
            }
            items[num - 1 - seq_len] = el;
        }
        if (rt_iternext(iterable) != mp_const_stop_iteration) {
            goto too_long;
        }
910
    }
911
912
913
    return;

too_short:
914
    nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_ValueError, "need more than %d values to unpack", seq_len));
915
too_long:
916
    nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_ValueError, "too many values to unpack (expected %d)", num));
917
918
}

919
920
mp_obj_t rt_build_map(int n_args) {
    return mp_obj_new_dict(n_args);
Damien's avatar
Damien committed
921
922
}

923
924
925
mp_obj_t rt_store_map(mp_obj_t map, mp_obj_t key, mp_obj_t value) {
    // map should always be a dict
    return mp_obj_dict_store(map, key, value);
Damien's avatar
Damien committed
926
927
}

928
mp_obj_t rt_load_attr(mp_obj_t base, qstr attr) {
929
930
931
932
    DEBUG_OP_printf("load attr %p.%s\n", base, qstr_str(attr));
    // use load_method
    mp_obj_t dest[2];
    rt_load_method(base, attr, dest);
933
    if (dest[1] == MP_OBJ_NULL) {
934
        // load_method returned just a normal attribute
935
        return dest[0];
936
937
938
    } else {
        // load_method returned a method, so build a bound method object
        return mp_obj_new_bound_meth(dest[0], dest[1]);
Damien's avatar
Damien committed
939
940
941
    }
}

942
943
944
// no attribute found, returns:     dest[0] == MP_OBJ_NULL, dest[1] == MP_OBJ_NULL
// normal attribute found, returns: dest[0] == <attribute>, dest[1] == MP_OBJ_NULL
// method attribute found, returns: dest[0] == <method>,    dest[1] == <self>
945
STATIC void rt_load_method_maybe(mp_obj_t base, qstr attr, mp_obj_t *dest) {
946
947
948
949
950
951
952
953
954
955
956
957
958
    // clear output to indicate no attribute/method found yet
    dest[0] = MP_OBJ_NULL;
    dest[1] = MP_OBJ_NULL;

    // get the type
    mp_obj_type_t *type = mp_obj_get_type(base);

    // if this type can do its own load, then call it
    if (type->load_attr != NULL) {
        type->load_attr(base, attr, dest);
    }

    // if nothing found yet, look for built-in and generic names
959
    if (dest[0] == MP_OBJ_NULL) {
Damien George's avatar
Damien George committed
960
961
962
963
        if (