runtime.c 39.4 KB
Newer Older
1
// in principle, rt_xxx functions are called only by vm/native/viper and make assumptions about args
2
// mp_xxx functions are safer and can be called by anyone
3
// note that rt_assign_xxx are called only from emit*, and maybe we can rename them to reflect this
4

Damien's avatar
Damien committed
5
6
7
#include <stdio.h>
#include <string.h>
#include <assert.h>
8
#include <math.h>
Damien's avatar
Damien committed
9

10
#include "nlr.h"
Damien's avatar
Damien committed
11
#include "misc.h"
12
#include "mpconfig.h"
13
#include "qstr.h"
14
#include "obj.h"
15
#include "objmodule.h"
16
#include "parsenum.h"
17
#include "runtime0.h"
Damien's avatar
Damien committed
18
#include "runtime.h"
19
20
#include "map.h"
#include "builtin.h"
21
#include "builtintables.h"
22
#include "bc.h"
23
#include "intdivmod.h"
24

25
#if 0 // print debugging info
26
#define DEBUG_PRINT (1)
27
#define WRITE_CODE (1)
28
#define DEBUG_printf DEBUG_printf
29
#define DEBUG_OP_printf(...) DEBUG_printf(__VA_ARGS__)
30
#else // don't print debugging info
31
32
#define DEBUG_printf(...) (void)0
#define DEBUG_OP_printf(...) (void)0
33
#endif
Damien's avatar
Damien committed
34

35
// locals and globals need to be pointers because they can be the same in outer module scope
36
37
38
STATIC mp_map_t *map_locals;
STATIC mp_map_t *map_globals;
STATIC mp_map_t map_builtins;
39

Damien's avatar
Damien committed
40
typedef enum {
41
42
43
44
45
46
47
    MP_CODE_NONE,
    MP_CODE_BYTE,
    MP_CODE_NATIVE,
    MP_CODE_INLINE_ASM,
} mp_code_kind_t;

typedef struct _mp_code_t {
48
49
50
    mp_code_kind_t kind : 8;
    uint scope_flags : 8;
    uint n_args : 16;
Damien's avatar
Damien committed
51
52
53
54
55
    union {
        struct {
            byte *code;
            uint len;
        } u_byte;
56
        struct {
57
            mp_fun_t fun;
58
59
        } u_native;
        struct {
60
            void *fun;
61
        } u_inline_asm;
Damien's avatar
Damien committed
62
    };
63
    qstr *arg_names;
64
} mp_code_t;
Damien's avatar
Damien committed
65

66
67
68
STATIC uint next_unique_code_id;
STATIC machine_uint_t unique_codes_alloc = 0;
STATIC mp_code_t *unique_codes = NULL;
Damien's avatar
Damien committed
69

70
71
#ifdef WRITE_CODE
FILE *fp_write_code = NULL;
72
#endif
Damien's avatar
Damien committed
73

74
// a good optimising compiler will inline this if necessary
75
STATIC void mp_map_add_qstr(mp_map_t *map, qstr qstr, mp_obj_t value) {
76
77
78
    mp_map_lookup(map, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP_ADD_IF_NOT_FOUND)->value = value;
}

79
void rt_init(void) {
80
    // locals = globals for outer module (see Objects/frameobject.c/PyFrame_New())
81
    map_locals = map_globals = mp_map_new(1);
82

83
    // init built-in hash table
84
    mp_map_init(&map_builtins, 3);
85

86
87
    // init global module stuff
    mp_module_init();
88

89
90
    // add some builtins that can't be done in ROM
    mp_map_add_qstr(map_globals, MP_QSTR___name__, MP_OBJ_NEW_QSTR(MP_QSTR___main__));
91
    mp_map_add_qstr(&map_builtins, MP_QSTR_Ellipsis, mp_const_ellipsis);
Damien George's avatar
Damien George committed
92

93
#if MICROPY_CPYTHON_COMPAT
94
    // Precreate sys module, so "import sys" didn't throw exceptions.
95
96
97
    mp_obj_t m_sys = mp_obj_new_module(MP_QSTR_sys);
    // Avoid warning of unused var
    (void)m_sys;
98
#endif
99
100
101
102
    // init sys.path
    // for efficiency, left to platform-specific startup code
    //sys_path = mp_obj_new_list(0, NULL);
    //rt_store_attr(m_sys, MP_QSTR_path, sys_path);
103

104
    // TODO: wastes one mp_code_t structure in mem
105
    next_unique_code_id = 1; // 0 indicates "no code"
106
    unique_codes_alloc = 0;
Damien's avatar
Damien committed
107
108
    unique_codes = NULL;

109
110
#ifdef WRITE_CODE
    fp_write_code = fopen("out-code", "wb");
111
#endif
Damien's avatar
Damien committed
112
113
}

114
void rt_deinit(void) {
115
    m_del(mp_code_t, unique_codes, unique_codes_alloc);
116
117
    mp_map_free(map_globals);
    mp_map_deinit(&map_builtins);
118
    mp_module_deinit();
119
120
121
#ifdef WRITE_CODE
    if (fp_write_code != NULL) {
        fclose(fp_write_code);
Damien's avatar
Damien committed
122
    }
123
#endif
Damien's avatar
Damien committed
124
125
}

126
uint rt_get_unique_code_id(void) {
127
    return next_unique_code_id++;
Damien's avatar
Damien committed
128
129
}

130
STATIC void alloc_unique_codes(void) {
131
    if (next_unique_code_id > unique_codes_alloc) {
132
        DEBUG_printf("allocate more unique codes: " UINT_FMT " -> %u\n", unique_codes_alloc, next_unique_code_id);
133
134
        // increase size of unique_codes table
        unique_codes = m_renew(mp_code_t, unique_codes, unique_codes_alloc, next_unique_code_id);
135
        for (uint i = unique_codes_alloc; i < next_unique_code_id; i++) {
136
            unique_codes[i].kind = MP_CODE_NONE;
137
        }
138
        unique_codes_alloc = next_unique_code_id;
Damien's avatar
Damien committed
139
    }
140
141
}

142
void rt_assign_byte_code(uint unique_code_id, byte *code, uint len, int n_args, int n_locals, int n_stack, uint scope_flags, qstr *arg_names) {
143
144
    alloc_unique_codes();

145
    assert(1 <= unique_code_id && unique_code_id < next_unique_code_id && unique_codes[unique_code_id].kind == MP_CODE_NONE);
146
    unique_codes[unique_code_id].kind = MP_CODE_BYTE;
147
    unique_codes[unique_code_id].scope_flags = scope_flags;
148
    unique_codes[unique_code_id].n_args = n_args;
149
150
    unique_codes[unique_code_id].u_byte.code = code;
    unique_codes[unique_code_id].u_byte.len = len;
151
    unique_codes[unique_code_id].arg_names = arg_names;
152

Damien's avatar
Damien committed
153
    //printf("byte code: %d bytes\n", len);
154
155

#ifdef DEBUG_PRINT
156
    DEBUG_printf("assign byte code: id=%d code=%p len=%u n_args=%d n_locals=%d n_stack=%d\n", unique_code_id, code, len, n_args, n_locals, n_stack);
157
158
159
160
161
162
163
    for (int i = 0; i < 128 && i < len; i++) {
        if (i > 0 && i % 16 == 0) {
            DEBUG_printf("\n");
        }
        DEBUG_printf(" %02x", code[i]);
    }
    DEBUG_printf("\n");
164
165
#if MICROPY_DEBUG_PRINTERS
    mp_byte_code_print(code, len);
166
#endif
167
#endif
168
169
}

170
void rt_assign_native_code(uint unique_code_id, void *fun, uint len, int n_args) {
171
172
    alloc_unique_codes();

173
    assert(1 <= unique_code_id && unique_code_id < next_unique_code_id && unique_codes[unique_code_id].kind == MP_CODE_NONE);
174
    unique_codes[unique_code_id].kind = MP_CODE_NATIVE;
175
    unique_codes[unique_code_id].scope_flags = 0;
176
    unique_codes[unique_code_id].n_args = n_args;
Damien's avatar
Damien committed
177
178
    unique_codes[unique_code_id].u_native.fun = fun;

179
    //printf("native code: %d bytes\n", len);
180

181
#ifdef DEBUG_PRINT
Damien's avatar
Damien committed
182
183
184
185
186
187
188
189
190
191
    DEBUG_printf("assign native code: id=%d fun=%p len=%u n_args=%d\n", unique_code_id, fun, len, n_args);
    byte *fun_data = (byte*)(((machine_uint_t)fun) & (~1)); // need to clear lower bit in case it's thumb code
    for (int i = 0; i < 128 && i < len; i++) {
        if (i > 0 && i % 16 == 0) {
            DEBUG_printf("\n");
        }
        DEBUG_printf(" %02x", fun_data[i]);
    }
    DEBUG_printf("\n");

192
193
194
195
#ifdef WRITE_CODE
    if (fp_write_code != NULL) {
        fwrite(fun_data, len, 1, fp_write_code);
        fflush(fp_write_code);
Damien's avatar
Damien committed
196
    }
197
198
#endif
#endif
Damien's avatar
Damien committed
199
200
}

201
void rt_assign_inline_asm_code(uint unique_code_id, void *fun, uint len, int n_args) {
202
203
    alloc_unique_codes();

204
    assert(1 <= unique_code_id && unique_code_id < next_unique_code_id && unique_codes[unique_code_id].kind == MP_CODE_NONE);
205
    unique_codes[unique_code_id].kind = MP_CODE_INLINE_ASM;
206
    unique_codes[unique_code_id].scope_flags = 0;
207
    unique_codes[unique_code_id].n_args = n_args;
208
    unique_codes[unique_code_id].u_inline_asm.fun = fun;
Damien's avatar
Damien committed
209

210
#ifdef DEBUG_PRINT
211
212
213
214
215
216
217
218
219
220
    DEBUG_printf("assign inline asm code: id=%d fun=%p len=%u n_args=%d\n", unique_code_id, fun, len, n_args);
    byte *fun_data = (byte*)(((machine_uint_t)fun) & (~1)); // need to clear lower bit in case it's thumb code
    for (int i = 0; i < 128 && i < len; i++) {
        if (i > 0 && i % 16 == 0) {
            DEBUG_printf("\n");
        }
        DEBUG_printf(" %02x", fun_data[i]);
    }
    DEBUG_printf("\n");

221
222
223
#ifdef WRITE_CODE
    if (fp_write_code != NULL) {
        fwrite(fun_data, len, 1, fp_write_code);
224
    }
225
226
#endif
#endif
Damien's avatar
Damien committed
227
228
}

229
230
int rt_is_true(mp_obj_t arg) {
    DEBUG_OP_printf("is true %p\n", arg);
231
232
233
234
235
236
237
    if (arg == mp_const_false) {
        return 0;
    } else if (arg == mp_const_true) {
        return 1;
    } else if (arg == mp_const_none) {
        return 0;
    } else if (MP_OBJ_IS_SMALL_INT(arg)) {
238
239
240
241
242
243
        if (MP_OBJ_SMALL_INT_VALUE(arg) == 0) {
            return 0;
        } else {
            return 1;
        }
    } else {
244
245
246
        mp_obj_type_t *type = mp_obj_get_type(arg);
        if (type->unary_op != NULL) {
            mp_obj_t result = type->unary_op(RT_UNARY_OP_BOOL, arg);
247
            if (result != MP_OBJ_NULL) {
248
249
250
251
                return result == mp_const_true;
            }
        }

252
253
254
255
256
        mp_obj_t len = mp_obj_len_maybe(arg);
        if (len != MP_OBJ_NULL) {
            // obj has a length, truth determined if len != 0
            return len != MP_OBJ_NEW_SMALL_INT(0);
        } else {
257
            // any other obj is true per Python semantics
258
259
            return 1;
        }
260
261
262
263
264
265
266
267
    }
}

mp_obj_t rt_list_append(mp_obj_t self_in, mp_obj_t arg) {
    return mp_obj_list_append(self_in, arg);
}

mp_obj_t rt_load_const_dec(qstr qstr) {
Damien's avatar
Damien committed
268
    DEBUG_OP_printf("load '%s'\n", qstr_str(qstr));
269
270
    uint len;
    const byte* data = qstr_data(qstr, &len);
271
    return mp_parse_num_decimal((const char*)data, len, true, false);
Damien's avatar
Damien committed
272
273
}

274
mp_obj_t rt_load_const_str(qstr qstr) {
Damien's avatar
Damien committed
275
    DEBUG_OP_printf("load '%s'\n", qstr_str(qstr));
276
    return MP_OBJ_NEW_QSTR(qstr);
Damien's avatar
Damien committed
277
278
}

279
280
281
282
283
284
285
mp_obj_t rt_load_const_bytes(qstr qstr) {
    DEBUG_OP_printf("load b'%s'\n", qstr_str(qstr));
    uint len;
    const byte *data = qstr_data(qstr, &len);
    return mp_obj_new_bytes(data, len);
}

286
mp_obj_t rt_load_name(qstr qstr) {
Damien's avatar
Damien committed
287
    // logic: search locals, globals, builtins
288
    DEBUG_OP_printf("load name %s\n", qstr_str(qstr));
289
    mp_map_elem_t *elem = mp_map_lookup(map_locals, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP);
290
291
292
293
    if (elem != NULL) {
        return elem->value;
    } else {
        return rt_load_global(qstr);
Damien's avatar
Damien committed
294
295
296
    }
}

297
mp_obj_t rt_load_global(qstr qstr) {
298
299
    // logic: search globals, builtins
    DEBUG_OP_printf("load global %s\n", qstr_str(qstr));
300
    mp_map_elem_t *elem = mp_map_lookup(map_globals, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP);
301
    if (elem == NULL) {
302
        elem = mp_map_lookup(&map_builtins, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP);
303
        if (elem == NULL) {
304
305
306
            mp_obj_t o = mp_builtin_tables_lookup_object(qstr);
            if (o != MP_OBJ_NULL) {
                return o;
307
            }
308
            nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_NameError, "name '%s' is not defined", qstr_str(qstr)));
309
310
311
        }
    }
    return elem->value;
Damien's avatar
Damien committed
312
313
}

314
mp_obj_t rt_load_build_class(void) {
Damien's avatar
Damien committed
315
    DEBUG_OP_printf("load_build_class\n");
316
    // lookup __build_class__ in dynamic table of builtins first
317
    mp_map_elem_t *elem = mp_map_lookup(&map_builtins, MP_OBJ_NEW_QSTR(MP_QSTR___build_class__), MP_MAP_LOOKUP);
318
    if (elem != NULL) {
319
        // found user-defined __build_class__, return it
320
321
        return elem->value;
    } else {
322
        // no user-defined __build_class__, return builtin one
323
        return (mp_obj_t)&mp_builtin___build_class___obj;
Damien's avatar
Damien committed
324
325
326
    }
}

327
328
mp_obj_t rt_get_cell(mp_obj_t cell) {
    return mp_obj_cell_get(cell);
329
330
}

331
332
void rt_set_cell(mp_obj_t cell, mp_obj_t val) {
    mp_obj_cell_set(cell, val);
333
334
}

335
void rt_store_name(qstr qstr, mp_obj_t obj) {
336
    DEBUG_OP_printf("store name %s <- %p\n", qstr_str(qstr), obj);
337
    mp_map_lookup(map_locals, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP_ADD_IF_NOT_FOUND)->value = obj;
338
339
}

340
341
342
343
344
void rt_delete_name(qstr qstr) {
    DEBUG_OP_printf("delete name %s\n", qstr_str(qstr));
    mp_map_lookup(map_locals, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP_REMOVE_IF_FOUND);
}

345
void rt_store_global(qstr qstr, mp_obj_t obj) {
346
    DEBUG_OP_printf("store global %s <- %p\n", qstr_str(qstr), obj);
347
    mp_map_lookup(map_globals, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP_ADD_IF_NOT_FOUND)->value = obj;
Damien's avatar
Damien committed
348
349
}

350
mp_obj_t rt_unary_op(int op, mp_obj_t arg) {
Damien's avatar
Damien committed
351
    DEBUG_OP_printf("unary %d %p\n", op, arg);
352

353
354
    if (MP_OBJ_IS_SMALL_INT(arg)) {
        mp_small_int_t val = MP_OBJ_SMALL_INT_VALUE(arg);
Damien's avatar
Damien committed
355
        switch (op) {
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
            case RT_UNARY_OP_BOOL:
                return MP_BOOL(val != 0);
            case RT_UNARY_OP_POSITIVE:
                return arg;
            case RT_UNARY_OP_NEGATIVE:
                // check for overflow
                if (val == MP_SMALL_INT_MIN) {
                    return mp_obj_new_int(-val);
                } else {
                    return MP_OBJ_NEW_SMALL_INT(-val);
                }
            case RT_UNARY_OP_INVERT:
                return MP_OBJ_NEW_SMALL_INT(~val);
            default:
                assert(0);
                return arg;
372
        }
373
374
375
376
    } else {
        mp_obj_type_t *type = mp_obj_get_type(arg);
        if (type->unary_op != NULL) {
            mp_obj_t result = type->unary_op(op, arg);
377
378
379
            if (result != NULL) {
                return result;
            }
Damien's avatar
Damien committed
380
        }
381
        // TODO specify in error message what the operator is
Damien George's avatar
Damien George committed
382
        nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_TypeError, "bad operand type for unary operator: '%s'", mp_obj_get_type_str(arg)));
Damien's avatar
Damien committed
383
    }
Damien's avatar
Damien committed
384
385
}

386
mp_obj_t rt_binary_op(int op, mp_obj_t lhs, mp_obj_t rhs) {
Damien's avatar
Damien committed
387
    DEBUG_OP_printf("binary %d %p %p\n", op, lhs, rhs);
388
389
390
391
392
393
394
395
396
397

    // TODO correctly distinguish inplace operators for mutable objects
    // lookup logic that CPython uses for +=:
    //   check for implemented +=
    //   then check for implemented +
    //   then check for implemented seq.inplace_concat
    //   then check for implemented seq.concat
    //   then fail
    // note that list does not implement + or +=, so that inplace_concat is reached first for +=

398
399
    // deal with is
    if (op == RT_BINARY_OP_IS) {
400
401
402
        return MP_BOOL(lhs == rhs);
    }

403
    // deal with == and != for all types
404
    if (op == RT_BINARY_OP_EQUAL || op == RT_BINARY_OP_NOT_EQUAL) {
405
        if (mp_obj_equal(lhs, rhs)) {
406
            if (op == RT_BINARY_OP_EQUAL) {
407
408
409
410
411
                return mp_const_true;
            } else {
                return mp_const_false;
            }
        } else {
412
            if (op == RT_BINARY_OP_EQUAL) {
413
414
415
416
417
418
419
420
                return mp_const_false;
            } else {
                return mp_const_true;
            }
        }
    }

    // deal with exception_match for all types
421
    if (op == RT_BINARY_OP_EXCEPTION_MATCH) {
422
423
424
425
426
427
        // rhs must be issubclass(rhs, BaseException)
        if (mp_obj_is_exception_type(rhs)) {
            // if lhs is an instance of an exception, then extract and use its type
            if (mp_obj_is_exception_instance(lhs)) {
                lhs = mp_obj_get_type(lhs);
            }
428
            if (mp_obj_is_subclass_fast(lhs, rhs)) {
429
430
431
432
433
                return mp_const_true;
            } else {
                return mp_const_false;
            }
        }
434
435
        assert(0);
        return mp_const_false;
436
437
    }

438
    if (MP_OBJ_IS_SMALL_INT(lhs)) {
439
        mp_small_int_t lhs_val = MP_OBJ_SMALL_INT_VALUE(lhs);
440
441
        if (MP_OBJ_IS_SMALL_INT(rhs)) {
            mp_small_int_t rhs_val = MP_OBJ_SMALL_INT_VALUE(rhs);
442
443
444
445
446
447
448
449
450
            // This is a binary operation: lhs_val op rhs_val
            // We need to be careful to handle overflow; see CERT INT32-C
            // Operations that can overflow:
            //      +       result always fits in machine_int_t, then handled by SMALL_INT check
            //      -       result always fits in machine_int_t, then handled by SMALL_INT check
            //      *       checked explicitly
            //      /       if lhs=MIN and rhs=-1; result always fits in machine_int_t, then handled by SMALL_INT check
            //      %       if lhs=MIN and rhs=-1; result always fits in machine_int_t, then handled by SMALL_INT check
            //      <<      checked explicitly
451
452
453
454
455
456
457
458
            switch (op) {
                case RT_BINARY_OP_OR:
                case RT_BINARY_OP_INPLACE_OR: lhs_val |= rhs_val; break;
                case RT_BINARY_OP_XOR:
                case RT_BINARY_OP_INPLACE_XOR: lhs_val ^= rhs_val; break;
                case RT_BINARY_OP_AND:
                case RT_BINARY_OP_INPLACE_AND: lhs_val &= rhs_val; break;
                case RT_BINARY_OP_LSHIFT:
459
460
461
462
463
464
465
466
467
468
469
470
471
472
                case RT_BINARY_OP_INPLACE_LSHIFT: {
                    if (rhs_val < 0) {
                        // negative shift not allowed
                        nlr_jump(mp_obj_new_exception_msg(&mp_type_ValueError, "negative shift count"));
                    } else if (rhs_val >= BITS_PER_WORD || lhs_val > (MP_SMALL_INT_MAX >> rhs_val) || lhs_val < (MP_SMALL_INT_MIN >> rhs_val)) {
                        // left-shift will overflow, so use higher precision integer
                        lhs = mp_obj_new_int_from_ll(lhs_val);
                        goto generic_binary_op;
                    } else {
                        // use standard precision
                        lhs_val <<= rhs_val;
                    }
                    break;
                }
473
                case RT_BINARY_OP_RSHIFT:
474
475
476
477
478
479
480
481
482
                case RT_BINARY_OP_INPLACE_RSHIFT:
                    if (rhs_val < 0) {
                        // negative shift not allowed
                        nlr_jump(mp_obj_new_exception_msg(&mp_type_ValueError, "negative shift count"));
                    } else {
                        // standard precision is enough for right-shift
                        lhs_val >>= rhs_val;
                    }
                    break;
483
484
485
486
487
                case RT_BINARY_OP_ADD:
                case RT_BINARY_OP_INPLACE_ADD: lhs_val += rhs_val; break;
                case RT_BINARY_OP_SUBTRACT:
                case RT_BINARY_OP_INPLACE_SUBTRACT: lhs_val -= rhs_val; break;
                case RT_BINARY_OP_MULTIPLY:
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
                case RT_BINARY_OP_INPLACE_MULTIPLY: {

                    // If long long type exists and is larger than machine_int_t, then
                    // we can use the following code to perform overflow-checked multiplication.
                    // Otherwise (eg in x64 case) we must use the branching code below.
                    #if 0
                    // compute result using long long precision
                    long long res = (long long)lhs_val * (long long)rhs_val;
                    if (res > MP_SMALL_INT_MAX || res < MP_SMALL_INT_MIN) {
                        // result overflowed SMALL_INT, so return higher precision integer
                        return mp_obj_new_int_from_ll(res);
                    } else {
                        // use standard precision
                        lhs_val = (mp_small_int_t)res;
                    }
                    #endif

                    if (lhs_val > 0) { // lhs_val is positive
                        if (rhs_val > 0) { // lhs_val and rhs_val are positive
                            if (lhs_val > (MP_SMALL_INT_MAX / rhs_val)) {
                                goto mul_overflow;
                            }
                        } else { // lhs_val positive, rhs_val nonpositive
                            if (rhs_val < (MP_SMALL_INT_MIN / lhs_val)) {
                                goto mul_overflow;
                            }
                        } // lhs_val positive, rhs_val nonpositive
                    } else { // lhs_val is nonpositive
                        if (rhs_val > 0) { // lhs_val is nonpositive, rhs_val is positive
                            if (lhs_val < (MP_SMALL_INT_MIN / rhs_val)) {
                                goto mul_overflow;
                            }
                        } else { // lhs_val and rhs_val are nonpositive
                            if (lhs_val != 0 && rhs_val < (MP_SMALL_INT_MAX / lhs_val)) {
                                goto mul_overflow;
                            }
                        } // End if lhs_val and rhs_val are nonpositive
                    } // End if lhs_val is nonpositive

                    // use standard precision
                    return MP_OBJ_NEW_SMALL_INT(lhs_val * rhs_val);

                mul_overflow:
                    // use higher precision
                    lhs = mp_obj_new_int_from_ll(lhs_val);
                    goto generic_binary_op;

                    break;
                }
537
                case RT_BINARY_OP_FLOOR_DIVIDE:
538
539
540
541
542
                case RT_BINARY_OP_INPLACE_FLOOR_DIVIDE:
                {
                    lhs_val = python_floor_divide(lhs_val, rhs_val);
                    break;
                }
543
                #if MICROPY_ENABLE_FLOAT
544
545
                case RT_BINARY_OP_TRUE_DIVIDE:
                case RT_BINARY_OP_INPLACE_TRUE_DIVIDE: return mp_obj_new_float((mp_float_t)lhs_val / (mp_float_t)rhs_val);
546
                #endif
547
548

                case RT_BINARY_OP_MODULO:
549
550
551
552
553
                case RT_BINARY_OP_INPLACE_MODULO:
                {
                    lhs_val = python_modulo(lhs_val, rhs_val);
                    break;
                }
554
555
                case RT_BINARY_OP_POWER:
                case RT_BINARY_OP_INPLACE_POWER:
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
                    if (rhs_val < 0) {
                        #if MICROPY_ENABLE_FLOAT
                        lhs = mp_obj_new_float(lhs_val);
                        goto generic_binary_op;
                        #else
                        nlr_jump(mp_obj_new_exception_msg(&mp_type_ValueError, "negative power with no float support"));
                        #endif
                    } else {
                        // TODO check for overflow
                        machine_int_t ans = 1;
                        while (rhs_val > 0) {
                            if (rhs_val & 1) {
                                ans *= lhs_val;
                            }
                            lhs_val *= lhs_val;
                            rhs_val /= 2;
572
                        }
573
                        lhs_val = ans;
574
                    }
575
                    break;
576
577
578
579
                case RT_BINARY_OP_LESS: return MP_BOOL(lhs_val < rhs_val); break;
                case RT_BINARY_OP_MORE: return MP_BOOL(lhs_val > rhs_val); break;
                case RT_BINARY_OP_LESS_EQUAL: return MP_BOOL(lhs_val <= rhs_val); break;
                case RT_BINARY_OP_MORE_EQUAL: return MP_BOOL(lhs_val >= rhs_val); break;
580

581
582
                default: assert(0);
            }
583
584
            // TODO: We just should make mp_obj_new_int() inline and use that
            if (MP_OBJ_FITS_SMALL_INT(lhs_val)) {
585
                return MP_OBJ_NEW_SMALL_INT(lhs_val);
586
587
            } else {
                return mp_obj_new_int(lhs_val);
588
            }
589
#if MICROPY_ENABLE_FLOAT
590
        } else if (MP_OBJ_IS_TYPE(rhs, &mp_type_float)) {
591
            return mp_obj_float_binary_op(op, lhs_val, rhs);
592
        } else if (MP_OBJ_IS_TYPE(rhs, &mp_type_complex)) {
593
            return mp_obj_complex_binary_op(op, lhs_val, 0, rhs);
594
#endif
595
        }
596
    }
597

598
    /* deal with `in`
599
600
     *
     * NOTE `a in b` is `b.__contains__(a)`, hence why the generic dispatch
Damien George's avatar
Damien George committed
601
     * needs to go below with swapped arguments
602
     */
603
    if (op == RT_BINARY_OP_IN) {
604
605
606
        mp_obj_type_t *type = mp_obj_get_type(rhs);
        if (type->binary_op != NULL) {
            mp_obj_t res = type->binary_op(op, rhs, lhs);
Damien George's avatar
Damien George committed
607
            if (res != MP_OBJ_NULL) {
608
                return res;
John R. Lenton's avatar
John R. Lenton committed
609
            }
610
611
612
613
614
        }
        if (type->getiter != NULL) {
            /* second attempt, walk the iterator */
            mp_obj_t next = NULL;
            mp_obj_t iter = rt_getiter(rhs);
615
            while ((next = rt_iternext(iter)) != MP_OBJ_NULL) {
616
                if (mp_obj_equal(next, lhs)) {
617
                    return mp_const_true;
John R. Lenton's avatar
John R. Lenton committed
618
                }
619
            }
620
            return mp_const_false;
621
622
623
        }

        nlr_jump(mp_obj_new_exception_msg_varg(
624
                     &mp_type_TypeError, "'%s' object is not iterable",
625
626
627
628
                     mp_obj_get_type_str(rhs)));
        return mp_const_none;
    }

629
    // generic binary_op supplied by type
630
631
632
    mp_obj_type_t *type;
generic_binary_op:
    type = mp_obj_get_type(lhs);
633
634
635
636
    if (type->binary_op != NULL) {
        mp_obj_t result = type->binary_op(op, lhs, rhs);
        if (result != MP_OBJ_NULL) {
            return result;
Damien's avatar
Damien committed
637
638
        }
    }
639

640
641
    // TODO implement dispatch for reverse binary ops

John R. Lenton's avatar
John R. Lenton committed
642
    // TODO specify in error message what the operator is
643
    nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_TypeError,
John R. Lenton's avatar
John R. Lenton committed
644
645
        "unsupported operand types for binary operator: '%s', '%s'",
        mp_obj_get_type_str(lhs), mp_obj_get_type_str(rhs)));
646
    return mp_const_none;
Damien's avatar
Damien committed
647
648
}

649
mp_obj_t rt_make_function_from_id(int unique_code_id, mp_obj_t def_args) {
650
651
    DEBUG_OP_printf("make_function_from_id %d\n", unique_code_id);
    if (unique_code_id < 1 || unique_code_id >= next_unique_code_id) {
Damien's avatar
Damien committed
652
        // illegal code id
653
        return mp_const_none;
Damien's avatar
Damien committed
654
    }
655
656
657
658

    // make the function, depending on the code kind
    mp_code_t *c = &unique_codes[unique_code_id];
    mp_obj_t fun;
Damien's avatar
Damien committed
659
    switch (c->kind) {
660
        case MP_CODE_BYTE:
661
            fun = mp_obj_new_fun_bc(c->scope_flags, c->arg_names, c->n_args, def_args, c->u_byte.code);
662
            break;
663
        case MP_CODE_NATIVE:
664
            fun = rt_make_function_n(c->n_args, c->u_native.fun);
Damien's avatar
Damien committed
665
            break;
666
667
        case MP_CODE_INLINE_ASM:
            fun = mp_obj_new_fun_asm(c->n_args, c->u_inline_asm.fun);
Damien's avatar
Damien committed
668
669
670
            break;
        default:
            assert(0);
671
            fun = mp_const_none;
Damien's avatar
Damien committed
672
    }
673
674

    // check for generator functions and if so wrap in generator object
675
    if ((c->scope_flags & MP_SCOPE_FLAG_GENERATOR) != 0) {
676
        fun = mp_obj_new_gen_wrap(fun);
677
678
    }

679
    return fun;
Damien's avatar
Damien committed
680
681
}

682
mp_obj_t rt_make_closure_from_id(int unique_code_id, mp_obj_t closure_tuple, mp_obj_t def_args) {
Damien George's avatar
Damien George committed
683
    DEBUG_OP_printf("make_closure_from_id %d\n", unique_code_id);
684
    // make function object
685
    mp_obj_t ffun = rt_make_function_from_id(unique_code_id, def_args);
Damien's avatar
Damien committed
686
    // wrap function in closure object
687
    return mp_obj_new_closure(ffun, closure_tuple);
Damien's avatar
Damien committed
688
689
}

690
mp_obj_t rt_call_function_0(mp_obj_t fun) {
691
    return rt_call_function_n_kw(fun, 0, 0, NULL);
692
693
}

694
mp_obj_t rt_call_function_1(mp_obj_t fun, mp_obj_t arg) {
695
    return rt_call_function_n_kw(fun, 1, 0, &arg);
696
697
}

698
699
mp_obj_t rt_call_function_2(mp_obj_t fun, mp_obj_t arg1, mp_obj_t arg2) {
    mp_obj_t args[2];
700
701
702
    args[0] = arg1;
    args[1] = arg2;
    return rt_call_function_n_kw(fun, 2, 0, args);
703
704
}

705
706
707
708
709
710
// wrapper that accepts n_args and n_kw in one argument
// native emitter can only pass at most 3 arguments to a function
mp_obj_t rt_call_function_n_kw_for_native(mp_obj_t fun_in, uint n_args_kw, const mp_obj_t *args) {
    return rt_call_function_n_kw(fun_in, n_args_kw & 0xff, (n_args_kw >> 8) & 0xff, args);
}

711
712
// args contains, eg: arg0  arg1  key0  value0  key1  value1
mp_obj_t rt_call_function_n_kw(mp_obj_t fun_in, uint n_args, uint n_kw, const mp_obj_t *args) {
713
714
    // TODO improve this: fun object can specify its type and we parse here the arguments,
    // passing to the function arrays of fixed and keyword arguments
715

716
717
    DEBUG_OP_printf("calling function %p(n_args=%d, n_kw=%d, args=%p)\n", fun_in, n_args, n_kw, args);

718
719
720
721
722
723
    // get the type
    mp_obj_type_t *type = mp_obj_get_type(fun_in);

    // do the call
    if (type->call != NULL) {
        return type->call(fun_in, n_args, n_kw, args);
724
    } else {
Damien George's avatar
Damien George committed
725
        nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_TypeError, "'%s' object is not callable", mp_obj_get_type_str(fun_in)));
726
    }
727
728
}

729
730
// args contains: fun  self/NULL  arg(0)  ...  arg(n_args-2)  arg(n_args-1)  kw_key(0)  kw_val(0)  ... kw_key(n_kw-1)  kw_val(n_kw-1)
// if n_args==0 and n_kw==0 then there are only fun and self/NULL
731
mp_obj_t rt_call_method_n_kw(uint n_args, uint n_kw, const mp_obj_t *args) {
732
733
734
    DEBUG_OP_printf("call method (fun=%p, self=%p, n_args=%u, n_kw=%u, args=%p)\n", args[0], args[1], n_args, n_kw, args);
    int adjust = (args[1] == NULL) ? 0 : 1;
    return rt_call_function_n_kw(args[0], n_args + adjust, n_kw, args + 2 - adjust);
735
736
}

737
mp_obj_t rt_build_tuple(int n_args, mp_obj_t *items) {
738
    return mp_obj_new_tuple(n_args, items);
739
740
}

741
mp_obj_t rt_build_list(int n_args, mp_obj_t *items) {
742
    return mp_obj_new_list(n_args, items);
Damien's avatar
Damien committed
743
744
}

745
746
mp_obj_t rt_build_set(int n_args, mp_obj_t *items) {
    return mp_obj_new_set(n_args, items);
Damien's avatar
Damien committed
747
748
}

749
mp_obj_t rt_store_set(mp_obj_t set, mp_obj_t item) {
750
    mp_obj_set_store(set, item);
Damien's avatar
Damien committed
751
752
753
    return set;
}

754
// unpacked items are stored in reverse order into the array pointed to by items
755
void rt_unpack_sequence(mp_obj_t seq_in, uint num, mp_obj_t *items) {
756
    uint seq_len;
757
758
759
760
761
762
    if (MP_OBJ_IS_TYPE(seq_in, &tuple_type) || MP_OBJ_IS_TYPE(seq_in, &list_type)) {
        mp_obj_t *seq_items;
        if (MP_OBJ_IS_TYPE(seq_in, &tuple_type)) {
            mp_obj_tuple_get(seq_in, &seq_len, &seq_items);
        } else {
            mp_obj_list_get(seq_in, &seq_len, &seq_items);
763
        }
764
        if (seq_len < num) {
765
            goto too_short;
766
        } else if (seq_len > num) {
767
            goto too_long;
768
        }
769
770
771
        for (uint i = 0; i < num; i++) {
            items[i] = seq_items[num - 1 - i];
        }
772
    } else {
773
774
775
776
        mp_obj_t iterable = rt_getiter(seq_in);

        for (seq_len = 0; seq_len < num; seq_len++) {
            mp_obj_t el = rt_iternext(iterable);
777
            if (el == MP_OBJ_NULL) {
778
779
780
781
                goto too_short;
            }
            items[num - 1 - seq_len] = el;
        }
782
        if (rt_iternext(iterable) != MP_OBJ_NULL) {
783
784
            goto too_long;
        }
785
    }
786
787
788
    return;

too_short:
789
    nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_ValueError, "need more than %d values to unpack", seq_len));
790
too_long:
791
    nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_ValueError, "too many values to unpack (expected %d)", num));
792
793
}

794
795
mp_obj_t rt_build_map(int n_args) {
    return mp_obj_new_dict(n_args);
Damien's avatar
Damien committed
796
797
}

798
799
800
mp_obj_t rt_store_map(mp_obj_t map, mp_obj_t key, mp_obj_t value) {
    // map should always be a dict
    return mp_obj_dict_store(map, key, value);
Damien's avatar
Damien committed
801
802
}

803
mp_obj_t rt_load_attr(mp_obj_t base, qstr attr) {
804
805
806
807
    DEBUG_OP_printf("load attr %p.%s\n", base, qstr_str(attr));
    // use load_method
    mp_obj_t dest[2];
    rt_load_method(base, attr, dest);
808
    if (dest[1] == MP_OBJ_NULL) {
809
        // load_method returned just a normal attribute
810
        return dest[0];
811
812
813
    } else {
        // load_method returned a method, so build a bound method object
        return mp_obj_new_bound_meth(dest[0], dest[1]);
Damien's avatar
Damien committed
814
815
816
    }
}

817
818
819
// no attribute found, returns:     dest[0] == MP_OBJ_NULL, dest[1] == MP_OBJ_NULL
// normal attribute found, returns: dest[0] == <attribute>, dest[1] == MP_OBJ_NULL
// method attribute found, returns: dest[0] == <method>,    dest[1] == <self>
820
STATIC void rt_load_method_maybe(mp_obj_t base, qstr attr, mp_obj_t *dest) {
821
822
823
824
825
826
827
828
829
830
831
832
833
    // clear output to indicate no attribute/method found yet
    dest[0] = MP_OBJ_NULL;
    dest[1] = MP_OBJ_NULL;

    // get the type
    mp_obj_type_t *type = mp_obj_get_type(base);

    // if this type can do its own load, then call it
    if (type->load_attr != NULL) {
        type->load_attr(base, attr, dest);
    }

    // if nothing found yet, look for built-in and generic names
834
    if (dest[0] == MP_OBJ_NULL) {
Damien George's avatar
Damien George committed
835
836
837
838
        if (attr == MP_QSTR___class__) {
            // a.__class__ is equivalent to type(a)
            dest[0] = type;
        } else if (attr == MP_QSTR___next__ && type->iternext != NULL) {
839
840
            dest[0] = (mp_obj_t)&mp_builtin_next_obj;
            dest[1] = base;
841
842
        } else if (type->load_attr == NULL) {
            // generic method lookup if type didn't provide a specific one
843
            // this is a lookup in the object (ie not class or type)
844
845
846
847
848
849
850
851
852
853
854
855
856
857
            if (type->locals_dict != NULL) {
                assert(MP_OBJ_IS_TYPE(type->locals_dict, &dict_type)); // Micro Python restriction, for now
                mp_map_t *locals_map = mp_obj_dict_get_map(type->locals_dict);
                mp_map_elem_t *elem = mp_map_lookup(locals_map, MP_OBJ_NEW_QSTR(attr), MP_MAP_LOOKUP);
                if (elem != NULL) {
                    // check if the methods are functions, static or class methods
                    // see http://docs.python.org/3.3/howto/descriptor.html
                    if (MP_OBJ_IS_TYPE(elem->value, &mp_type_staticmethod)) {
                        // return just the function
                        dest[0] = ((mp_obj_static_class_method_t*)elem->value)->fun;
                    } else if (MP_OBJ_IS_TYPE(elem->value, &mp_type_classmethod)) {
                        // return a bound method, with self being the type of this object
                        dest[0] = ((mp_obj_static_class_method_t*)elem->value)->fun;
                        dest[1] = mp_obj_get_type(base);
858
                    } else if (mp_obj_is_callable(elem->value)) {
859
                        // return a bound method, with self being this object
860
                        dest[0] = elem->value;
861
                        dest[1] = base;
862
863
864
                    } else {
                        // class member is a value, so just return that value
                        dest[0] = elem->value;
865
                    }
866
                }
Damien's avatar
Damien committed
867
868
            }
        }
869
    }
870
871
872
873
874
875
}

void rt_load_method(mp_obj_t base, qstr attr, mp_obj_t *dest) {
    DEBUG_OP_printf("load method %p.%s\n", base, qstr_str(attr));

    rt_load_method_maybe(base, attr, dest);
876

877
    if (dest[0] == MP_OBJ_NULL) {
878
879
        // no attribute/method called attr
        // following CPython, we give a more detailed error message for type objects
880
        if (MP_OBJ_IS_TYPE(base, &mp_type_type)) {
881
882
            nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_AttributeError,
                "type object '%s' has no attribute '%s'", qstr_str(((mp_obj_type_t*)base)->name), qstr_str(attr)));
883
        } else {
884
            nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_AttributeError, "'%s' object has no attribute '%s'", mp_obj_get_type_str(base), qstr_str(attr)));
885
886
        }
    }
887
888
}

889
void rt_store_attr(mp_obj_t base, qstr attr, mp_obj_t value) {
Damien's avatar
Damien committed
890
    DEBUG_OP_printf("store attr %p.%s <- %p\n", base, qstr_str(attr), value);
891
892
893
894
895
    mp_obj_type_t *type = mp_obj_get_type(base);
    if (type->store_attr != NULL) {
        if (type->store_attr(base, attr, value)) {
            return;
        }
896
    }
897
    nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_AttributeError, "'%s' object has no attribute '%s'", mp_obj_get_type_str(base), qstr_str(attr)));
898
899
}

900
void rt_store_subscr(mp_obj_t base, mp_obj_t index, mp_obj_t value) {
Damien's avatar
Damien committed
901
    DEBUG_OP_printf("store subscr %p[%p] <- %p\n", base, index, value);
902
    if (MP_OBJ_IS_TYPE(base, &list_type)) {
903
        // list store
904
905
906
907
        mp_obj_list_store(base, index, value);
    } else if (MP_OBJ_IS_TYPE(base, &dict_type)) {
        // dict store
        mp_obj_dict_store(base, index, value);
Damien's avatar
Damien committed
908
    } else {
Paul Sokolovsky's avatar