runtime.c 38.1 KB
Newer Older
1
// in principle, rt_xxx functions are called only by vm/native/viper and make assumptions about args
2
// mp_xxx functions are safer and can be called by anyone
3
// note that rt_assign_xxx are called only from emit*, and maybe we can rename them to reflect this
4

Damien's avatar
Damien committed
5
6
7
#include <stdio.h>
#include <string.h>
#include <assert.h>
8
#include <math.h>
Damien's avatar
Damien committed
9

10
#include "nlr.h"
Damien's avatar
Damien committed
11
#include "misc.h"
12
#include "mpconfig.h"
13
#include "qstr.h"
14
#include "obj.h"
15
#include "objmodule.h"
16
#include "parsenum.h"
17
#include "runtime0.h"
Damien's avatar
Damien committed
18
#include "runtime.h"
19
20
#include "map.h"
#include "builtin.h"
21
#include "builtintables.h"
22
#include "bc.h"
23
#include "intdivmod.h"
24

25
#if 0 // print debugging info
26
#define DEBUG_PRINT (1)
27
#define WRITE_CODE (1)
28
#define DEBUG_printf DEBUG_printf
29
#define DEBUG_OP_printf(...) DEBUG_printf(__VA_ARGS__)
30
#else // don't print debugging info
31
32
#define DEBUG_printf(...) (void)0
#define DEBUG_OP_printf(...) (void)0
33
#endif
Damien's avatar
Damien committed
34

35
// locals and globals need to be pointers because they can be the same in outer module scope
36
37
38
STATIC mp_map_t *map_locals;
STATIC mp_map_t *map_globals;
STATIC mp_map_t map_builtins;
39

Damien's avatar
Damien committed
40
typedef enum {
41
42
43
44
45
46
47
    MP_CODE_NONE,
    MP_CODE_BYTE,
    MP_CODE_NATIVE,
    MP_CODE_INLINE_ASM,
} mp_code_kind_t;

typedef struct _mp_code_t {
48
49
50
51
    mp_code_kind_t kind : 8;
    uint scope_flags : 8;
    uint n_args : 16;
    uint n_state : 16;
Damien's avatar
Damien committed
52
53
54
55
56
    union {
        struct {
            byte *code;
            uint len;
        } u_byte;
57
        struct {
58
            mp_fun_t fun;
59
60
        } u_native;
        struct {
61
            void *fun;
62
        } u_inline_asm;
Damien's avatar
Damien committed
63
    };
64
    qstr *arg_names;
65
} mp_code_t;
Damien's avatar
Damien committed
66

67
68
69
STATIC uint next_unique_code_id;
STATIC machine_uint_t unique_codes_alloc = 0;
STATIC mp_code_t *unique_codes = NULL;
Damien's avatar
Damien committed
70

71
72
#ifdef WRITE_CODE
FILE *fp_write_code = NULL;
73
#endif
Damien's avatar
Damien committed
74

75
// a good optimising compiler will inline this if necessary
76
STATIC void mp_map_add_qstr(mp_map_t *map, qstr qstr, mp_obj_t value) {
77
78
79
    mp_map_lookup(map, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP_ADD_IF_NOT_FOUND)->value = value;
}

80
void rt_init(void) {
81
    // locals = globals for outer module (see Objects/frameobject.c/PyFrame_New())
82
    map_locals = map_globals = mp_map_new(1);
83

84
    // init built-in hash table
85
    mp_map_init(&map_builtins, 3);
86

87
88
    // init global module stuff
    mp_module_init();
89

90
91
    // add some builtins that can't be done in ROM
    mp_map_add_qstr(map_globals, MP_QSTR___name__, MP_OBJ_NEW_QSTR(MP_QSTR___main__));
92
    mp_map_add_qstr(&map_builtins, MP_QSTR_Ellipsis, mp_const_ellipsis);
Damien George's avatar
Damien George committed
93

94
#if MICROPY_CPYTHON_COMPAT
95
    // Precreate sys module, so "import sys" didn't throw exceptions.
96
97
98
    mp_obj_t m_sys = mp_obj_new_module(MP_QSTR_sys);
    // Avoid warning of unused var
    (void)m_sys;
99
#endif
100
101
102
103
    // init sys.path
    // for efficiency, left to platform-specific startup code
    //sys_path = mp_obj_new_list(0, NULL);
    //rt_store_attr(m_sys, MP_QSTR_path, sys_path);
104

105
    // TODO: wastes one mp_code_t structure in mem
106
    next_unique_code_id = 1; // 0 indicates "no code"
107
    unique_codes_alloc = 0;
Damien's avatar
Damien committed
108
109
    unique_codes = NULL;

110
111
#ifdef WRITE_CODE
    fp_write_code = fopen("out-code", "wb");
112
#endif
Damien's avatar
Damien committed
113
114
}

115
void rt_deinit(void) {
116
    m_del(mp_code_t, unique_codes, unique_codes_alloc);
117
118
    mp_map_free(map_globals);
    mp_map_deinit(&map_builtins);
119
    mp_module_deinit();
120
121
122
#ifdef WRITE_CODE
    if (fp_write_code != NULL) {
        fclose(fp_write_code);
Damien's avatar
Damien committed
123
    }
124
#endif
Damien's avatar
Damien committed
125
126
}

127
uint rt_get_unique_code_id(void) {
128
    return next_unique_code_id++;
Damien's avatar
Damien committed
129
130
}

131
STATIC void alloc_unique_codes(void) {
132
    if (next_unique_code_id > unique_codes_alloc) {
133
        DEBUG_printf("allocate more unique codes: " UINT_FMT " -> %u\n", unique_codes_alloc, next_unique_code_id);
134
135
        // increase size of unique_codes table
        unique_codes = m_renew(mp_code_t, unique_codes, unique_codes_alloc, next_unique_code_id);
136
        for (uint i = unique_codes_alloc; i < next_unique_code_id; i++) {
137
            unique_codes[i].kind = MP_CODE_NONE;
138
        }
139
        unique_codes_alloc = next_unique_code_id;
Damien's avatar
Damien committed
140
    }
141
142
}

143
void rt_assign_byte_code(uint unique_code_id, byte *code, uint len, int n_args, int n_locals, int n_stack, uint scope_flags, qstr *arg_names) {
144
145
    alloc_unique_codes();

146
    assert(1 <= unique_code_id && unique_code_id < next_unique_code_id && unique_codes[unique_code_id].kind == MP_CODE_NONE);
147
    unique_codes[unique_code_id].kind = MP_CODE_BYTE;
148
    unique_codes[unique_code_id].scope_flags = scope_flags;
149
150
    unique_codes[unique_code_id].n_args = n_args;
    unique_codes[unique_code_id].n_state = n_locals + n_stack;
151
152
    unique_codes[unique_code_id].u_byte.code = code;
    unique_codes[unique_code_id].u_byte.len = len;
153
    unique_codes[unique_code_id].arg_names = arg_names;
154

Damien's avatar
Damien committed
155
    //printf("byte code: %d bytes\n", len);
156
157

#ifdef DEBUG_PRINT
158
    DEBUG_printf("assign byte code: id=%d code=%p len=%u n_args=%d n_locals=%d n_stack=%d\n", unique_code_id, code, len, n_args, n_locals, n_stack);
159
160
161
162
163
164
165
    for (int i = 0; i < 128 && i < len; i++) {
        if (i > 0 && i % 16 == 0) {
            DEBUG_printf("\n");
        }
        DEBUG_printf(" %02x", code[i]);
    }
    DEBUG_printf("\n");
166
167
#if MICROPY_DEBUG_PRINTERS
    mp_byte_code_print(code, len);
168
#endif
169
#endif
170
171
}

172
void rt_assign_native_code(uint unique_code_id, void *fun, uint len, int n_args) {
173
174
    alloc_unique_codes();

175
    assert(1 <= unique_code_id && unique_code_id < next_unique_code_id && unique_codes[unique_code_id].kind == MP_CODE_NONE);
176
    unique_codes[unique_code_id].kind = MP_CODE_NATIVE;
177
    unique_codes[unique_code_id].scope_flags = 0;
178
179
    unique_codes[unique_code_id].n_args = n_args;
    unique_codes[unique_code_id].n_state = 0;
Damien's avatar
Damien committed
180
181
    unique_codes[unique_code_id].u_native.fun = fun;

182
    //printf("native code: %d bytes\n", len);
183

184
#ifdef DEBUG_PRINT
Damien's avatar
Damien committed
185
186
187
188
189
190
191
192
193
194
    DEBUG_printf("assign native code: id=%d fun=%p len=%u n_args=%d\n", unique_code_id, fun, len, n_args);
    byte *fun_data = (byte*)(((machine_uint_t)fun) & (~1)); // need to clear lower bit in case it's thumb code
    for (int i = 0; i < 128 && i < len; i++) {
        if (i > 0 && i % 16 == 0) {
            DEBUG_printf("\n");
        }
        DEBUG_printf(" %02x", fun_data[i]);
    }
    DEBUG_printf("\n");

195
196
197
198
#ifdef WRITE_CODE
    if (fp_write_code != NULL) {
        fwrite(fun_data, len, 1, fp_write_code);
        fflush(fp_write_code);
Damien's avatar
Damien committed
199
    }
200
201
#endif
#endif
Damien's avatar
Damien committed
202
203
}

204
void rt_assign_inline_asm_code(uint unique_code_id, void *fun, uint len, int n_args) {
205
206
    alloc_unique_codes();

207
    assert(1 <= unique_code_id && unique_code_id < next_unique_code_id && unique_codes[unique_code_id].kind == MP_CODE_NONE);
208
    unique_codes[unique_code_id].kind = MP_CODE_INLINE_ASM;
209
    unique_codes[unique_code_id].scope_flags = 0;
210
211
    unique_codes[unique_code_id].n_args = n_args;
    unique_codes[unique_code_id].n_state = 0;
212
    unique_codes[unique_code_id].u_inline_asm.fun = fun;
Damien's avatar
Damien committed
213

214
#ifdef DEBUG_PRINT
215
216
217
218
219
220
221
222
223
224
    DEBUG_printf("assign inline asm code: id=%d fun=%p len=%u n_args=%d\n", unique_code_id, fun, len, n_args);
    byte *fun_data = (byte*)(((machine_uint_t)fun) & (~1)); // need to clear lower bit in case it's thumb code
    for (int i = 0; i < 128 && i < len; i++) {
        if (i > 0 && i % 16 == 0) {
            DEBUG_printf("\n");
        }
        DEBUG_printf(" %02x", fun_data[i]);
    }
    DEBUG_printf("\n");

225
226
227
#ifdef WRITE_CODE
    if (fp_write_code != NULL) {
        fwrite(fun_data, len, 1, fp_write_code);
228
    }
229
230
#endif
#endif
Damien's avatar
Damien committed
231
232
}

233
234
int rt_is_true(mp_obj_t arg) {
    DEBUG_OP_printf("is true %p\n", arg);
235
236
237
238
239
240
241
    if (arg == mp_const_false) {
        return 0;
    } else if (arg == mp_const_true) {
        return 1;
    } else if (arg == mp_const_none) {
        return 0;
    } else if (MP_OBJ_IS_SMALL_INT(arg)) {
242
243
244
245
246
247
        if (MP_OBJ_SMALL_INT_VALUE(arg) == 0) {
            return 0;
        } else {
            return 1;
        }
    } else {
248
249
250
        mp_obj_type_t *type = mp_obj_get_type(arg);
        if (type->unary_op != NULL) {
            mp_obj_t result = type->unary_op(RT_UNARY_OP_BOOL, arg);
251
            if (result != MP_OBJ_NULL) {
252
253
254
255
                return result == mp_const_true;
            }
        }

256
257
258
259
260
        mp_obj_t len = mp_obj_len_maybe(arg);
        if (len != MP_OBJ_NULL) {
            // obj has a length, truth determined if len != 0
            return len != MP_OBJ_NEW_SMALL_INT(0);
        } else {
261
            // any other obj is true per Python semantics
262
263
            return 1;
        }
264
265
266
267
268
269
270
271
    }
}

mp_obj_t rt_list_append(mp_obj_t self_in, mp_obj_t arg) {
    return mp_obj_list_append(self_in, arg);
}

mp_obj_t rt_load_const_dec(qstr qstr) {
Damien's avatar
Damien committed
272
    DEBUG_OP_printf("load '%s'\n", qstr_str(qstr));
273
274
    uint len;
    const byte* data = qstr_data(qstr, &len);
275
    return mp_parse_num_decimal((const char*)data, len, true, false);
Damien's avatar
Damien committed
276
277
}

278
mp_obj_t rt_load_const_str(qstr qstr) {
Damien's avatar
Damien committed
279
    DEBUG_OP_printf("load '%s'\n", qstr_str(qstr));
280
    return MP_OBJ_NEW_QSTR(qstr);
Damien's avatar
Damien committed
281
282
}

283
284
285
286
287
288
289
mp_obj_t rt_load_const_bytes(qstr qstr) {
    DEBUG_OP_printf("load b'%s'\n", qstr_str(qstr));
    uint len;
    const byte *data = qstr_data(qstr, &len);
    return mp_obj_new_bytes(data, len);
}

290
mp_obj_t rt_load_name(qstr qstr) {
Damien's avatar
Damien committed
291
    // logic: search locals, globals, builtins
292
    DEBUG_OP_printf("load name %s\n", qstr_str(qstr));
293
    mp_map_elem_t *elem = mp_map_lookup(map_locals, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP);
294
295
296
297
    if (elem != NULL) {
        return elem->value;
    } else {
        return rt_load_global(qstr);
Damien's avatar
Damien committed
298
299
300
    }
}

301
mp_obj_t rt_load_global(qstr qstr) {
302
303
    // logic: search globals, builtins
    DEBUG_OP_printf("load global %s\n", qstr_str(qstr));
304
    mp_map_elem_t *elem = mp_map_lookup(map_globals, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP);
305
    if (elem == NULL) {
306
        elem = mp_map_lookup(&map_builtins, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP);
307
        if (elem == NULL) {
308
309
310
            mp_obj_t o = mp_builtin_tables_lookup_object(qstr);
            if (o != MP_OBJ_NULL) {
                return o;
311
            }
312
            nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_NameError, "name '%s' is not defined", qstr_str(qstr)));
313
314
315
        }
    }
    return elem->value;
Damien's avatar
Damien committed
316
317
}

318
mp_obj_t rt_load_build_class(void) {
Damien's avatar
Damien committed
319
    DEBUG_OP_printf("load_build_class\n");
320
    // lookup __build_class__ in dynamic table of builtins first
321
    mp_map_elem_t *elem = mp_map_lookup(&map_builtins, MP_OBJ_NEW_QSTR(MP_QSTR___build_class__), MP_MAP_LOOKUP);
322
    if (elem != NULL) {
323
        // found user-defined __build_class__, return it
324
325
        return elem->value;
    } else {
326
        // no user-defined __build_class__, return builtin one
327
        return (mp_obj_t)&mp_builtin___build_class___obj;
Damien's avatar
Damien committed
328
329
330
    }
}

331
332
mp_obj_t rt_get_cell(mp_obj_t cell) {
    return mp_obj_cell_get(cell);
333
334
}

335
336
void rt_set_cell(mp_obj_t cell, mp_obj_t val) {
    mp_obj_cell_set(cell, val);
337
338
}

339
void rt_store_name(qstr qstr, mp_obj_t obj) {
340
    DEBUG_OP_printf("store name %s <- %p\n", qstr_str(qstr), obj);
341
    mp_map_lookup(map_locals, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP_ADD_IF_NOT_FOUND)->value = obj;
342
343
}

344
345
346
347
348
void rt_delete_name(qstr qstr) {
    DEBUG_OP_printf("delete name %s\n", qstr_str(qstr));
    mp_map_lookup(map_locals, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP_REMOVE_IF_FOUND);
}

349
void rt_store_global(qstr qstr, mp_obj_t obj) {
350
    DEBUG_OP_printf("store global %s <- %p\n", qstr_str(qstr), obj);
351
    mp_map_lookup(map_globals, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP_ADD_IF_NOT_FOUND)->value = obj;
Damien's avatar
Damien committed
352
353
}

354
mp_obj_t rt_unary_op(int op, mp_obj_t arg) {
Damien's avatar
Damien committed
355
    DEBUG_OP_printf("unary %d %p\n", op, arg);
356

357
358
    if (MP_OBJ_IS_SMALL_INT(arg)) {
        mp_small_int_t val = MP_OBJ_SMALL_INT_VALUE(arg);
Damien's avatar
Damien committed
359
        switch (op) {
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
            case RT_UNARY_OP_BOOL:
                return MP_BOOL(val != 0);
            case RT_UNARY_OP_POSITIVE:
                return arg;
            case RT_UNARY_OP_NEGATIVE:
                // check for overflow
                if (val == MP_SMALL_INT_MIN) {
                    return mp_obj_new_int(-val);
                } else {
                    return MP_OBJ_NEW_SMALL_INT(-val);
                }
            case RT_UNARY_OP_INVERT:
                return MP_OBJ_NEW_SMALL_INT(~val);
            default:
                assert(0);
                return arg;
376
        }
377
378
379
380
    } else {
        mp_obj_type_t *type = mp_obj_get_type(arg);
        if (type->unary_op != NULL) {
            mp_obj_t result = type->unary_op(op, arg);
381
382
383
            if (result != NULL) {
                return result;
            }
Damien's avatar
Damien committed
384
        }
385
        // TODO specify in error message what the operator is
Damien George's avatar
Damien George committed
386
        nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_TypeError, "bad operand type for unary operator: '%s'", mp_obj_get_type_str(arg)));
Damien's avatar
Damien committed
387
    }
Damien's avatar
Damien committed
388
389
}

390
mp_obj_t rt_binary_op(int op, mp_obj_t lhs, mp_obj_t rhs) {
Damien's avatar
Damien committed
391
    DEBUG_OP_printf("binary %d %p %p\n", op, lhs, rhs);
392
393
394
395
396
397
398
399
400
401

    // TODO correctly distinguish inplace operators for mutable objects
    // lookup logic that CPython uses for +=:
    //   check for implemented +=
    //   then check for implemented +
    //   then check for implemented seq.inplace_concat
    //   then check for implemented seq.concat
    //   then fail
    // note that list does not implement + or +=, so that inplace_concat is reached first for +=

402
403
    // deal with is
    if (op == RT_BINARY_OP_IS) {
404
405
406
        return MP_BOOL(lhs == rhs);
    }

407
    // deal with == and != for all types
408
    if (op == RT_BINARY_OP_EQUAL || op == RT_BINARY_OP_NOT_EQUAL) {
409
        if (mp_obj_equal(lhs, rhs)) {
410
            if (op == RT_BINARY_OP_EQUAL) {
411
412
413
414
415
                return mp_const_true;
            } else {
                return mp_const_false;
            }
        } else {
416
            if (op == RT_BINARY_OP_EQUAL) {
417
418
419
420
421
422
423
424
                return mp_const_false;
            } else {
                return mp_const_true;
            }
        }
    }

    // deal with exception_match for all types
425
    if (op == RT_BINARY_OP_EXCEPTION_MATCH) {
426
427
428
429
430
431
        // rhs must be issubclass(rhs, BaseException)
        if (mp_obj_is_exception_type(rhs)) {
            // if lhs is an instance of an exception, then extract and use its type
            if (mp_obj_is_exception_instance(lhs)) {
                lhs = mp_obj_get_type(lhs);
            }
432
            if (mp_obj_is_subclass_fast(lhs, rhs)) {
433
434
435
436
437
                return mp_const_true;
            } else {
                return mp_const_false;
            }
        }
438
439
        assert(0);
        return mp_const_false;
440
441
    }

442
    if (MP_OBJ_IS_SMALL_INT(lhs)) {
443
        mp_small_int_t lhs_val = MP_OBJ_SMALL_INT_VALUE(lhs);
444
445
        if (MP_OBJ_IS_SMALL_INT(rhs)) {
            mp_small_int_t rhs_val = MP_OBJ_SMALL_INT_VALUE(rhs);
446
447
448
449
450
451
452
453
454
            // This is a binary operation: lhs_val op rhs_val
            // We need to be careful to handle overflow; see CERT INT32-C
            // Operations that can overflow:
            //      +       result always fits in machine_int_t, then handled by SMALL_INT check
            //      -       result always fits in machine_int_t, then handled by SMALL_INT check
            //      *       checked explicitly
            //      /       if lhs=MIN and rhs=-1; result always fits in machine_int_t, then handled by SMALL_INT check
            //      %       if lhs=MIN and rhs=-1; result always fits in machine_int_t, then handled by SMALL_INT check
            //      <<      checked explicitly
455
456
457
458
459
460
461
462
            switch (op) {
                case RT_BINARY_OP_OR:
                case RT_BINARY_OP_INPLACE_OR: lhs_val |= rhs_val; break;
                case RT_BINARY_OP_XOR:
                case RT_BINARY_OP_INPLACE_XOR: lhs_val ^= rhs_val; break;
                case RT_BINARY_OP_AND:
                case RT_BINARY_OP_INPLACE_AND: lhs_val &= rhs_val; break;
                case RT_BINARY_OP_LSHIFT:
463
464
465
466
467
468
469
470
471
472
473
474
475
476
                case RT_BINARY_OP_INPLACE_LSHIFT: {
                    if (rhs_val < 0) {
                        // negative shift not allowed
                        nlr_jump(mp_obj_new_exception_msg(&mp_type_ValueError, "negative shift count"));
                    } else if (rhs_val >= BITS_PER_WORD || lhs_val > (MP_SMALL_INT_MAX >> rhs_val) || lhs_val < (MP_SMALL_INT_MIN >> rhs_val)) {
                        // left-shift will overflow, so use higher precision integer
                        lhs = mp_obj_new_int_from_ll(lhs_val);
                        goto generic_binary_op;
                    } else {
                        // use standard precision
                        lhs_val <<= rhs_val;
                    }
                    break;
                }
477
                case RT_BINARY_OP_RSHIFT:
478
479
480
481
482
483
484
485
486
                case RT_BINARY_OP_INPLACE_RSHIFT:
                    if (rhs_val < 0) {
                        // negative shift not allowed
                        nlr_jump(mp_obj_new_exception_msg(&mp_type_ValueError, "negative shift count"));
                    } else {
                        // standard precision is enough for right-shift
                        lhs_val >>= rhs_val;
                    }
                    break;
487
488
489
490
491
                case RT_BINARY_OP_ADD:
                case RT_BINARY_OP_INPLACE_ADD: lhs_val += rhs_val; break;
                case RT_BINARY_OP_SUBTRACT:
                case RT_BINARY_OP_INPLACE_SUBTRACT: lhs_val -= rhs_val; break;
                case RT_BINARY_OP_MULTIPLY:
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
                case RT_BINARY_OP_INPLACE_MULTIPLY: {

                    // If long long type exists and is larger than machine_int_t, then
                    // we can use the following code to perform overflow-checked multiplication.
                    // Otherwise (eg in x64 case) we must use the branching code below.
                    #if 0
                    // compute result using long long precision
                    long long res = (long long)lhs_val * (long long)rhs_val;
                    if (res > MP_SMALL_INT_MAX || res < MP_SMALL_INT_MIN) {
                        // result overflowed SMALL_INT, so return higher precision integer
                        return mp_obj_new_int_from_ll(res);
                    } else {
                        // use standard precision
                        lhs_val = (mp_small_int_t)res;
                    }
                    #endif

                    if (lhs_val > 0) { // lhs_val is positive
                        if (rhs_val > 0) { // lhs_val and rhs_val are positive
                            if (lhs_val > (MP_SMALL_INT_MAX / rhs_val)) {
                                goto mul_overflow;
                            }
                        } else { // lhs_val positive, rhs_val nonpositive
                            if (rhs_val < (MP_SMALL_INT_MIN / lhs_val)) {
                                goto mul_overflow;
                            }
                        } // lhs_val positive, rhs_val nonpositive
                    } else { // lhs_val is nonpositive
                        if (rhs_val > 0) { // lhs_val is nonpositive, rhs_val is positive
                            if (lhs_val < (MP_SMALL_INT_MIN / rhs_val)) {
                                goto mul_overflow;
                            }
                        } else { // lhs_val and rhs_val are nonpositive
                            if (lhs_val != 0 && rhs_val < (MP_SMALL_INT_MAX / lhs_val)) {
                                goto mul_overflow;
                            }
                        } // End if lhs_val and rhs_val are nonpositive
                    } // End if lhs_val is nonpositive

                    // use standard precision
                    return MP_OBJ_NEW_SMALL_INT(lhs_val * rhs_val);

                mul_overflow:
                    // use higher precision
                    lhs = mp_obj_new_int_from_ll(lhs_val);
                    goto generic_binary_op;

                    break;
                }
541
                case RT_BINARY_OP_FLOOR_DIVIDE:
542
543
544
545
546
                case RT_BINARY_OP_INPLACE_FLOOR_DIVIDE:
                {
                    lhs_val = python_floor_divide(lhs_val, rhs_val);
                    break;
                }
547
                #if MICROPY_ENABLE_FLOAT
548
549
                case RT_BINARY_OP_TRUE_DIVIDE:
                case RT_BINARY_OP_INPLACE_TRUE_DIVIDE: return mp_obj_new_float((mp_float_t)lhs_val / (mp_float_t)rhs_val);
550
                #endif
551
552

                case RT_BINARY_OP_MODULO:
553
554
555
556
557
                case RT_BINARY_OP_INPLACE_MODULO:
                {
                    lhs_val = python_modulo(lhs_val, rhs_val);
                    break;
                }
558
559
                case RT_BINARY_OP_POWER:
                case RT_BINARY_OP_INPLACE_POWER:
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
                    if (rhs_val < 0) {
                        #if MICROPY_ENABLE_FLOAT
                        lhs = mp_obj_new_float(lhs_val);
                        goto generic_binary_op;
                        #else
                        nlr_jump(mp_obj_new_exception_msg(&mp_type_ValueError, "negative power with no float support"));
                        #endif
                    } else {
                        // TODO check for overflow
                        machine_int_t ans = 1;
                        while (rhs_val > 0) {
                            if (rhs_val & 1) {
                                ans *= lhs_val;
                            }
                            lhs_val *= lhs_val;
                            rhs_val /= 2;
576
                        }
577
                        lhs_val = ans;
578
                    }
579
                    break;
580
581
582
583
                case RT_BINARY_OP_LESS: return MP_BOOL(lhs_val < rhs_val); break;
                case RT_BINARY_OP_MORE: return MP_BOOL(lhs_val > rhs_val); break;
                case RT_BINARY_OP_LESS_EQUAL: return MP_BOOL(lhs_val <= rhs_val); break;
                case RT_BINARY_OP_MORE_EQUAL: return MP_BOOL(lhs_val >= rhs_val); break;
584

585
586
                default: assert(0);
            }
587
588
            // TODO: We just should make mp_obj_new_int() inline and use that
            if (MP_OBJ_FITS_SMALL_INT(lhs_val)) {
589
                return MP_OBJ_NEW_SMALL_INT(lhs_val);
590
591
            } else {
                return mp_obj_new_int(lhs_val);
592
            }
593
#if MICROPY_ENABLE_FLOAT
594
        } else if (MP_OBJ_IS_TYPE(rhs, &mp_type_float)) {
595
            return mp_obj_float_binary_op(op, lhs_val, rhs);
596
        } else if (MP_OBJ_IS_TYPE(rhs, &mp_type_complex)) {
597
            return mp_obj_complex_binary_op(op, lhs_val, 0, rhs);
598
#endif
599
        }
600
    }
601

602
    /* deal with `in`
603
604
     *
     * NOTE `a in b` is `b.__contains__(a)`, hence why the generic dispatch
Damien George's avatar
Damien George committed
605
     * needs to go below with swapped arguments
606
     */
607
    if (op == RT_BINARY_OP_IN) {
608
609
610
        mp_obj_type_t *type = mp_obj_get_type(rhs);
        if (type->binary_op != NULL) {
            mp_obj_t res = type->binary_op(op, rhs, lhs);
Damien George's avatar
Damien George committed
611
            if (res != MP_OBJ_NULL) {
612
                return res;
John R. Lenton's avatar
John R. Lenton committed
613
            }
614
615
616
617
618
619
620
        }
        if (type->getiter != NULL) {
            /* second attempt, walk the iterator */
            mp_obj_t next = NULL;
            mp_obj_t iter = rt_getiter(rhs);
            while ((next = rt_iternext(iter)) != mp_const_stop_iteration) {
                if (mp_obj_equal(next, lhs)) {
621
                    return mp_const_true;
John R. Lenton's avatar
John R. Lenton committed
622
                }
623
            }
624
            return mp_const_false;
625
626
627
        }

        nlr_jump(mp_obj_new_exception_msg_varg(
628
                     &mp_type_TypeError, "'%s' object is not iterable",
629
630
631
632
                     mp_obj_get_type_str(rhs)));
        return mp_const_none;
    }

633
    // generic binary_op supplied by type
634
635
636
    mp_obj_type_t *type;
generic_binary_op:
    type = mp_obj_get_type(lhs);
637
638
639
640
    if (type->binary_op != NULL) {
        mp_obj_t result = type->binary_op(op, lhs, rhs);
        if (result != MP_OBJ_NULL) {
            return result;
Damien's avatar
Damien committed
641
642
        }
    }
643

644
645
    // TODO implement dispatch for reverse binary ops

John R. Lenton's avatar
John R. Lenton committed
646
    // TODO specify in error message what the operator is
647
    nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_TypeError,
John R. Lenton's avatar
John R. Lenton committed
648
649
        "unsupported operand types for binary operator: '%s', '%s'",
        mp_obj_get_type_str(lhs), mp_obj_get_type_str(rhs)));
650
    return mp_const_none;
Damien's avatar
Damien committed
651
652
}

653
mp_obj_t rt_make_function_from_id(int unique_code_id, mp_obj_t def_args) {
654
655
    DEBUG_OP_printf("make_function_from_id %d\n", unique_code_id);
    if (unique_code_id < 1 || unique_code_id >= next_unique_code_id) {
Damien's avatar
Damien committed
656
        // illegal code id
657
        return mp_const_none;
Damien's avatar
Damien committed
658
    }
659
660
661
662

    // make the function, depending on the code kind
    mp_code_t *c = &unique_codes[unique_code_id];
    mp_obj_t fun;
Damien's avatar
Damien committed
663
    switch (c->kind) {
664
        case MP_CODE_BYTE:
665
            fun = mp_obj_new_fun_bc(c->scope_flags, c->arg_names, c->n_args, def_args, c->n_state, c->u_byte.code);
666
            break;
667
        case MP_CODE_NATIVE:
668
            fun = rt_make_function_n(c->n_args, c->u_native.fun);
Damien's avatar
Damien committed
669
            break;
670
671
        case MP_CODE_INLINE_ASM:
            fun = mp_obj_new_fun_asm(c->n_args, c->u_inline_asm.fun);
Damien's avatar
Damien committed
672
673
674
            break;
        default:
            assert(0);
675
            fun = mp_const_none;
Damien's avatar
Damien committed
676
    }
677
678

    // check for generator functions and if so wrap in generator object
679
    if ((c->scope_flags & MP_SCOPE_FLAG_GENERATOR) != 0) {
680
        fun = mp_obj_new_gen_wrap(fun);
681
682
    }

683
    return fun;
Damien's avatar
Damien committed
684
685
}

686
mp_obj_t rt_make_closure_from_id(int unique_code_id, mp_obj_t closure_tuple) {
Damien George's avatar
Damien George committed
687
    DEBUG_OP_printf("make_closure_from_id %d\n", unique_code_id);
688
    // make function object
689
    mp_obj_t ffun = rt_make_function_from_id(unique_code_id, MP_OBJ_NULL);
Damien's avatar
Damien committed
690
    // wrap function in closure object
691
    return mp_obj_new_closure(ffun, closure_tuple);
Damien's avatar
Damien committed
692
693
}

694
mp_obj_t rt_call_function_0(mp_obj_t fun) {
695
    return rt_call_function_n_kw(fun, 0, 0, NULL);
696
697
}

698
mp_obj_t rt_call_function_1(mp_obj_t fun, mp_obj_t arg) {
699
    return rt_call_function_n_kw(fun, 1, 0, &arg);
700
701
}

702
703
mp_obj_t rt_call_function_2(mp_obj_t fun, mp_obj_t arg1, mp_obj_t arg2) {
    mp_obj_t args[2];
704
705
706
    args[0] = arg1;
    args[1] = arg2;
    return rt_call_function_n_kw(fun, 2, 0, args);
707
708
}

709
710
711
712
713
714
// wrapper that accepts n_args and n_kw in one argument
// native emitter can only pass at most 3 arguments to a function
mp_obj_t rt_call_function_n_kw_for_native(mp_obj_t fun_in, uint n_args_kw, const mp_obj_t *args) {
    return rt_call_function_n_kw(fun_in, n_args_kw & 0xff, (n_args_kw >> 8) & 0xff, args);
}

715
716
// args contains, eg: arg0  arg1  key0  value0  key1  value1
mp_obj_t rt_call_function_n_kw(mp_obj_t fun_in, uint n_args, uint n_kw, const mp_obj_t *args) {
717
718
    // TODO improve this: fun object can specify its type and we parse here the arguments,
    // passing to the function arrays of fixed and keyword arguments
719

720
721
    DEBUG_OP_printf("calling function %p(n_args=%d, n_kw=%d, args=%p)\n", fun_in, n_args, n_kw, args);

722
723
724
725
726
727
    // get the type
    mp_obj_type_t *type = mp_obj_get_type(fun_in);

    // do the call
    if (type->call != NULL) {
        return type->call(fun_in, n_args, n_kw, args);
728
    } else {
Damien George's avatar
Damien George committed
729
        nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_TypeError, "'%s' object is not callable", mp_obj_get_type_str(fun_in)));
730
    }
731
732
}

733
734
// args contains: fun  self/NULL  arg(0)  ...  arg(n_args-2)  arg(n_args-1)  kw_key(0)  kw_val(0)  ... kw_key(n_kw-1)  kw_val(n_kw-1)
// if n_args==0 and n_kw==0 then there are only fun and self/NULL
735
mp_obj_t rt_call_method_n_kw(uint n_args, uint n_kw, const mp_obj_t *args) {
736
737
738
    DEBUG_OP_printf("call method (fun=%p, self=%p, n_args=%u, n_kw=%u, args=%p)\n", args[0], args[1], n_args, n_kw, args);
    int adjust = (args[1] == NULL) ? 0 : 1;
    return rt_call_function_n_kw(args[0], n_args + adjust, n_kw, args + 2 - adjust);
739
740
}

741
mp_obj_t rt_build_tuple(int n_args, mp_obj_t *items) {
742
    return mp_obj_new_tuple(n_args, items);
743
744
}

745
mp_obj_t rt_build_list(int n_args, mp_obj_t *items) {
746
    return mp_obj_new_list(n_args, items);
Damien's avatar
Damien committed
747
748
}

749
750
mp_obj_t rt_build_set(int n_args, mp_obj_t *items) {
    return mp_obj_new_set(n_args, items);
Damien's avatar
Damien committed
751
752
}

753
mp_obj_t rt_store_set(mp_obj_t set, mp_obj_t item) {
754
    mp_obj_set_store(set, item);
Damien's avatar
Damien committed
755
756
757
    return set;
}

758
// unpacked items are stored in reverse order into the array pointed to by items
759
void rt_unpack_sequence(mp_obj_t seq_in, uint num, mp_obj_t *items) {
760
    uint seq_len;
761
762
763
764
765
766
    if (MP_OBJ_IS_TYPE(seq_in, &tuple_type) || MP_OBJ_IS_TYPE(seq_in, &list_type)) {
        mp_obj_t *seq_items;
        if (MP_OBJ_IS_TYPE(seq_in, &tuple_type)) {
            mp_obj_tuple_get(seq_in, &seq_len, &seq_items);
        } else {
            mp_obj_list_get(seq_in, &seq_len, &seq_items);
767
        }
768
        if (seq_len < num) {
769
            goto too_short;
770
        } else if (seq_len > num) {
771
            goto too_long;
772
        }
773
774
775
        for (uint i = 0; i < num; i++) {
            items[i] = seq_items[num - 1 - i];
        }
776
    } else {
777
778
779
780
781
782
783
784
785
786
787
788
        mp_obj_t iterable = rt_getiter(seq_in);

        for (seq_len = 0; seq_len < num; seq_len++) {
            mp_obj_t el = rt_iternext(iterable);
            if (el == mp_const_stop_iteration) {
                goto too_short;
            }
            items[num - 1 - seq_len] = el;
        }
        if (rt_iternext(iterable) != mp_const_stop_iteration) {
            goto too_long;
        }
789
    }
790
791
792
    return;

too_short:
793
    nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_ValueError, "need more than %d values to unpack", seq_len));
794
too_long:
795
    nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_ValueError, "too many values to unpack (expected %d)", num));
796
797
}

798
799
mp_obj_t rt_build_map(int n_args) {
    return mp_obj_new_dict(n_args);
Damien's avatar
Damien committed
800
801
}

802
803
804
mp_obj_t rt_store_map(mp_obj_t map, mp_obj_t key, mp_obj_t value) {
    // map should always be a dict
    return mp_obj_dict_store(map, key, value);
Damien's avatar
Damien committed
805
806
}

807
mp_obj_t rt_load_attr(mp_obj_t base, qstr attr) {
808
809
810
811
    DEBUG_OP_printf("load attr %p.%s\n", base, qstr_str(attr));
    // use load_method
    mp_obj_t dest[2];
    rt_load_method(base, attr, dest);
812
    if (dest[1] == MP_OBJ_NULL) {
813
        // load_method returned just a normal attribute
814
        return dest[0];
815
816
817
    } else {
        // load_method returned a method, so build a bound method object
        return mp_obj_new_bound_meth(dest[0], dest[1]);
Damien's avatar
Damien committed
818
819
820
    }
}

821
822
823
// no attribute found, returns:     dest[0] == MP_OBJ_NULL, dest[1] == MP_OBJ_NULL
// normal attribute found, returns: dest[0] == <attribute>, dest[1] == MP_OBJ_NULL
// method attribute found, returns: dest[0] == <method>,    dest[1] == <self>
824
STATIC void rt_load_method_maybe(mp_obj_t base, qstr attr, mp_obj_t *dest) {
825
826
827
828
829
830
831
832
833
834
835
836
837
    // clear output to indicate no attribute/method found yet
    dest[0] = MP_OBJ_NULL;
    dest[1] = MP_OBJ_NULL;

    // get the type
    mp_obj_type_t *type = mp_obj_get_type(base);

    // if this type can do its own load, then call it
    if (type->load_attr != NULL) {
        type->load_attr(base, attr, dest);
    }

    // if nothing found yet, look for built-in and generic names
838
    if (dest[0] == MP_OBJ_NULL) {
Damien George's avatar
Damien George committed
839
840
841
842
        if (attr == MP_QSTR___class__) {
            // a.__class__ is equivalent to type(a)
            dest[0] = type;
        } else if (attr == MP_QSTR___next__ && type->iternext != NULL) {
843
844
            dest[0] = (mp_obj_t)&mp_builtin_next_obj;
            dest[1] = base;
845
846
        } else if (type->load_attr == NULL) {
            // generic method lookup if type didn't provide a specific one
847
            // this is a lookup in the object (ie not class or type)
848
849
850
851
            const mp_method_t *meth = type->methods;
            if (meth != NULL) {
                for (; meth->name != NULL; meth++) {
                    if (strcmp(meth->name, qstr_str(attr)) == 0) {
852
853
854
855
                        // check if the methods are functions, static or class methods
                        // see http://docs.python.org/3.3/howto/descriptor.html
                        if (MP_OBJ_IS_TYPE(meth->fun, &mp_type_staticmethod)) {
                            // return just the function
856
                            dest[0] = ((mp_obj_static_class_method_t*)meth->fun)->fun;
857
858
                        } else if (MP_OBJ_IS_TYPE(meth->fun, &mp_type_classmethod)) {
                            // return a bound method, with self being the type of this object
859
                            dest[0] = ((mp_obj_static_class_method_t*)meth->fun)->fun;
860
                            dest[1] = mp_obj_get_type(base);
861
862
                        } else {
                            // return a bound method, with self being this object
863
864
                            dest[0] = (mp_obj_t)meth->fun;
                            dest[1] = base;
865
                        }
866
867
                        break;
                    }
868
                }
Damien's avatar
Damien committed
869
870
            }
        }
871
    }
872
873
874
875
876
877
}

void rt_load_method(mp_obj_t base, qstr attr, mp_obj_t *dest) {
    DEBUG_OP_printf("load method %p.%s\n", base, qstr_str(attr));

    rt_load_method_maybe(base, attr, dest);
878

879
    if (dest[0] == MP_OBJ_NULL) {
880
881
        // no attribute/method called attr
        // following CPython, we give a more detailed error message for type objects
882
        if (MP_OBJ_IS_TYPE(base, &mp_type_type)) {
883
884
            nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_AttributeError,
                "type object '%s' has no attribute '%s'", qstr_str(((mp_obj_type_t*)base)->name), qstr_str(attr)));
885
        } else {
886
            nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_AttributeError, "'%s' object has no attribute '%s'", mp_obj_get_type_str(base), qstr_str(attr)));
887
888
        }
    }
889
890
}

891
void rt_store_attr(mp_obj_t base, qstr attr, mp_obj_t value) {
Damien's avatar
Damien committed
892
    DEBUG_OP_printf("store attr %p.%s <- %p\n", base, qstr_str(attr), value);
893
894
895
896
897
    mp_obj_type_t *type = mp_obj_get_type(base);
    if (type->store_attr != NULL) {
        if (type->store_attr(base, attr, value)) {
            return;
        }
898
    }
899
    nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_AttributeError, "'%s' object has no attribute '%s'", mp_obj_get_type_str(base), qstr_str(attr)));
900
901
}

902
void rt_store_subscr(mp_obj_t base, mp_obj_t index, mp_obj_t value) {
Damien's avatar
Damien committed
903
    DEBUG_OP_printf("store subscr %p[%p] <- %p\n", base, index, value);
904
    if (MP_OBJ_IS_TYPE(base, &list_type)) {
905
        // list store
906
907
908
909
        mp_obj_list_store(base, index, value);
    } else if (MP_OBJ_IS_TYPE(base, &dict_type)) {
        // dict store
        mp_obj_dict_store(base, index, value);
Damien's avatar
Damien committed
910
    } else {
911
912
913
914
915
916
917
918
        mp_obj_type_t *type = mp_obj_get_type(base);
        if (type->store_item != NULL) {
            bool r = type->store_item(base, index, value);
            if (r) {
                return;
            }
            // TODO: call base classes here?
        }