runtime.c 38.2 KB
Newer Older
1
// in principle, rt_xxx functions are called only by vm/native/viper and make assumptions about args
2
// mp_xxx functions are safer and can be called by anyone
3
// note that rt_assign_xxx are called only from emit*, and maybe we can rename them to reflect this
4

Damien's avatar
Damien committed
5
6
7
8
9
10
#include <stdint.h>
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <assert.h>

11
#include "nlr.h"
Damien's avatar
Damien committed
12
#include "misc.h"
13
#include "mpconfig.h"
14
#include "qstr.h"
15
16
#include "obj.h"
#include "runtime0.h"
Damien's avatar
Damien committed
17
#include "runtime.h"
18
19
#include "map.h"
#include "builtin.h"
20
#include "objarray.h"
21
#include "bc.h"
22

23
#if 0 // print debugging info
24
#define DEBUG_PRINT (1)
25
#define WRITE_CODE (1)
26
27
28
#define DEBUG_printf(args...) printf(args)
#define DEBUG_OP_printf(args...) printf(args)
#else // don't print debugging info
29
#define DEBUG_printf(args...) (void)0
Damien's avatar
Damien committed
30
#define DEBUG_OP_printf(args...) (void)0
31
#endif
Damien's avatar
Damien committed
32

33
// locals and globals need to be pointers because they can be the same in outer module scope
34
35
36
static mp_map_t *map_locals;
static mp_map_t *map_globals;
static mp_map_t map_builtins;
37
static mp_map_t map_loaded_modules; // TODO: expose as sys.modules
38

Damien's avatar
Damien committed
39
typedef enum {
40
41
42
43
44
45
46
    MP_CODE_NONE,
    MP_CODE_BYTE,
    MP_CODE_NATIVE,
    MP_CODE_INLINE_ASM,
} mp_code_kind_t;

typedef struct _mp_code_t {
47
48
49
50
51
52
53
54
    struct {
        mp_code_kind_t kind : 8;
        bool is_generator : 1;
    };
    struct {
        uint n_args : 16;
        uint n_state : 16;
    };
Damien's avatar
Damien committed
55
56
57
58
59
    union {
        struct {
            byte *code;
            uint len;
        } u_byte;
60
        struct {
61
            mp_fun_t fun;
62
63
        } u_native;
        struct {
64
            void *fun;
65
        } u_inline_asm;
Damien's avatar
Damien committed
66
    };
67
} mp_code_t;
Damien's avatar
Damien committed
68

69
static uint next_unique_code_id;
70
71
static machine_uint_t unique_codes_alloc = 0;
static mp_code_t *unique_codes = NULL;
Damien's avatar
Damien committed
72

73
74
#ifdef WRITE_CODE
FILE *fp_write_code = NULL;
75
#endif
Damien's avatar
Damien committed
76

77
78
79
80
81
// a good optimising compiler will inline this if necessary
static void mp_map_add_qstr(mp_map_t *map, qstr qstr, mp_obj_t value) {
    mp_map_lookup(map, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP_ADD_IF_NOT_FOUND)->value = value;
}

82
void rt_init(void) {
83
    // locals = globals for outer module (see Objects/frameobject.c/PyFrame_New())
84
    map_locals = map_globals = mp_map_new(1);
85
    mp_map_add_qstr(map_globals, MP_QSTR___name__, MP_OBJ_NEW_QSTR(MP_QSTR___main__));
86

87
    // init built-in hash table
88
    mp_map_init(&map_builtins, 3);
89

90
91
92
    // init loaded modules table
    mp_map_init(&map_loaded_modules, 3);

93
    // built-in exceptions (TODO, make these proper classes, and const if possible)
94
    mp_map_add_qstr(&map_builtins, MP_QSTR_AttributeError, mp_obj_new_exception(MP_QSTR_AttributeError));
Paul Sokolovsky's avatar
Paul Sokolovsky committed
95
    mp_map_add_qstr(&map_builtins, MP_QSTR_ImportError, mp_obj_new_exception(MP_QSTR_ImportError));
96
97
98
99
100
101
    mp_map_add_qstr(&map_builtins, MP_QSTR_IndexError, mp_obj_new_exception(MP_QSTR_IndexError));
    mp_map_add_qstr(&map_builtins, MP_QSTR_KeyError, mp_obj_new_exception(MP_QSTR_KeyError));
    mp_map_add_qstr(&map_builtins, MP_QSTR_NameError, mp_obj_new_exception(MP_QSTR_NameError));
    mp_map_add_qstr(&map_builtins, MP_QSTR_TypeError, mp_obj_new_exception(MP_QSTR_TypeError));
    mp_map_add_qstr(&map_builtins, MP_QSTR_SyntaxError, mp_obj_new_exception(MP_QSTR_SyntaxError));
    mp_map_add_qstr(&map_builtins, MP_QSTR_ValueError, mp_obj_new_exception(MP_QSTR_ValueError));
102
103
104
    // Somehow CPython managed to have OverflowError not inherit from ValueError ;-/
    // TODO: For MICROPY_CPYTHON_COMPAT==0 use ValueError to avoid exc proliferation
    mp_map_add_qstr(&map_builtins, MP_QSTR_OverflowError, mp_obj_new_exception(MP_QSTR_OverflowError));
105
    mp_map_add_qstr(&map_builtins, MP_QSTR_OSError, mp_obj_new_exception(MP_QSTR_OSError));
Paul Sokolovsky's avatar
Paul Sokolovsky committed
106
    mp_map_add_qstr(&map_builtins, MP_QSTR_AssertionError, mp_obj_new_exception(MP_QSTR_AssertionError));
107
    mp_map_add_qstr(&map_builtins, MP_QSTR_StopIteration, mp_obj_new_exception(MP_QSTR_StopIteration));
108

Damien George's avatar
Damien George committed
109
    // built-in objects
110
    mp_map_add_qstr(&map_builtins, MP_QSTR_Ellipsis, mp_const_ellipsis);
Damien George's avatar
Damien George committed
111

112
    // built-in core functions
113
    mp_map_add_qstr(&map_builtins, MP_QSTR___build_class__, (mp_obj_t)&mp_builtin___build_class___obj);
114
    mp_map_add_qstr(&map_builtins, MP_QSTR___import__, (mp_obj_t)&mp_builtin___import___obj);
115
    mp_map_add_qstr(&map_builtins, MP_QSTR___repl_print__, (mp_obj_t)&mp_builtin___repl_print___obj);
116

117
    // built-in types
118
    mp_map_add_qstr(&map_builtins, MP_QSTR_bool, (mp_obj_t)&bool_type);
119
#if MICROPY_ENABLE_FLOAT
120
    mp_map_add_qstr(&map_builtins, MP_QSTR_complex, (mp_obj_t)&complex_type);
121
#endif
122
    mp_map_add_qstr(&map_builtins, MP_QSTR_dict, (mp_obj_t)&dict_type);
John R. Lenton's avatar
John R. Lenton committed
123
    mp_map_add_qstr(&map_builtins, MP_QSTR_enumerate, (mp_obj_t)&enumerate_type);
John R. Lenton's avatar
John R. Lenton committed
124
    mp_map_add_qstr(&map_builtins, MP_QSTR_filter, (mp_obj_t)&filter_type);
125
#if MICROPY_ENABLE_FLOAT
126
    mp_map_add_qstr(&map_builtins, MP_QSTR_float, (mp_obj_t)&float_type);
127
#endif
128
129
    mp_map_add_qstr(&map_builtins, MP_QSTR_int, (mp_obj_t)&int_type);
    mp_map_add_qstr(&map_builtins, MP_QSTR_list, (mp_obj_t)&list_type);
John R. Lenton's avatar
John R. Lenton committed
130
    mp_map_add_qstr(&map_builtins, MP_QSTR_map, (mp_obj_t)&map_type);
131
    mp_map_add_qstr(&map_builtins, MP_QSTR_set, (mp_obj_t)&set_type);
Damien George's avatar
Damien George committed
132
    mp_map_add_qstr(&map_builtins, MP_QSTR_super, (mp_obj_t)&super_type);
133
    mp_map_add_qstr(&map_builtins, MP_QSTR_tuple, (mp_obj_t)&tuple_type);
134
    mp_map_add_qstr(&map_builtins, MP_QSTR_type, (mp_obj_t)&mp_const_type);
John R. Lenton's avatar
John R. Lenton committed
135
    mp_map_add_qstr(&map_builtins, MP_QSTR_zip, (mp_obj_t)&zip_type);
136

137
138
139
    mp_map_add_qstr(&map_builtins, MP_QSTR_classmethod, (mp_obj_t)&mp_type_classmethod);
    mp_map_add_qstr(&map_builtins, MP_QSTR_staticmethod, (mp_obj_t)&mp_type_staticmethod);

140
141
142
    mp_obj_t m_array = mp_obj_new_module(MP_QSTR_array);
    rt_store_attr(m_array, MP_QSTR_array, (mp_obj_t)&array_type);

143
144
145
146
    // built-in user functions
    mp_map_add_qstr(&map_builtins, MP_QSTR_abs, (mp_obj_t)&mp_builtin_abs_obj);
    mp_map_add_qstr(&map_builtins, MP_QSTR_all, (mp_obj_t)&mp_builtin_all_obj);
    mp_map_add_qstr(&map_builtins, MP_QSTR_any, (mp_obj_t)&mp_builtin_any_obj);
147
    mp_map_add_qstr(&map_builtins, MP_QSTR_bytes, (mp_obj_t)&mp_builtin_bytes_obj);
148
149
    mp_map_add_qstr(&map_builtins, MP_QSTR_callable, (mp_obj_t)&mp_builtin_callable_obj);
    mp_map_add_qstr(&map_builtins, MP_QSTR_chr, (mp_obj_t)&mp_builtin_chr_obj);
150
    mp_map_add_qstr(&map_builtins, MP_QSTR_dir, (mp_obj_t)&mp_builtin_dir_obj);
151
    mp_map_add_qstr(&map_builtins, MP_QSTR_divmod, (mp_obj_t)&mp_builtin_divmod_obj);
Damien George's avatar
Damien George committed
152
    mp_map_add_qstr(&map_builtins, MP_QSTR_eval, (mp_obj_t)&mp_builtin_eval_obj);
Damien George's avatar
Damien George committed
153
    mp_map_add_qstr(&map_builtins, MP_QSTR_exec, (mp_obj_t)&mp_builtin_exec_obj);
154
    mp_map_add_qstr(&map_builtins, MP_QSTR_hash, (mp_obj_t)&mp_builtin_hash_obj);
155
    mp_map_add_qstr(&map_builtins, MP_QSTR_id, (mp_obj_t)&mp_builtin_id_obj);
156
157
    mp_map_add_qstr(&map_builtins, MP_QSTR_isinstance, (mp_obj_t)&mp_builtin_isinstance_obj);
    mp_map_add_qstr(&map_builtins, MP_QSTR_issubclass, (mp_obj_t)&mp_builtin_issubclass_obj);
158
    mp_map_add_qstr(&map_builtins, MP_QSTR_iter, (mp_obj_t)&mp_builtin_iter_obj);
159
160
161
    mp_map_add_qstr(&map_builtins, MP_QSTR_len, (mp_obj_t)&mp_builtin_len_obj);
    mp_map_add_qstr(&map_builtins, MP_QSTR_max, (mp_obj_t)&mp_builtin_max_obj);
    mp_map_add_qstr(&map_builtins, MP_QSTR_min, (mp_obj_t)&mp_builtin_min_obj);
162
    mp_map_add_qstr(&map_builtins, MP_QSTR_next, (mp_obj_t)&mp_builtin_next_obj);
163
164
165
166
    mp_map_add_qstr(&map_builtins, MP_QSTR_ord, (mp_obj_t)&mp_builtin_ord_obj);
    mp_map_add_qstr(&map_builtins, MP_QSTR_pow, (mp_obj_t)&mp_builtin_pow_obj);
    mp_map_add_qstr(&map_builtins, MP_QSTR_print, (mp_obj_t)&mp_builtin_print_obj);
    mp_map_add_qstr(&map_builtins, MP_QSTR_range, (mp_obj_t)&mp_builtin_range_obj);
Damien George's avatar
Damien George committed
167
    mp_map_add_qstr(&map_builtins, MP_QSTR_repr, (mp_obj_t)&mp_builtin_repr_obj);
John R. Lenton's avatar
sorted    
John R. Lenton committed
168
    mp_map_add_qstr(&map_builtins, MP_QSTR_sorted, (mp_obj_t)&mp_builtin_sorted_obj);
169
    mp_map_add_qstr(&map_builtins, MP_QSTR_sum, (mp_obj_t)&mp_builtin_sum_obj);
170
    mp_map_add_qstr(&map_builtins, MP_QSTR_str, (mp_obj_t)&mp_builtin_str_obj);
171
    mp_map_add_qstr(&map_builtins, MP_QSTR_bytearray, (mp_obj_t)&mp_builtin_bytearray_obj);
172

173
#if MICROPY_CPYTHON_COMPAT
174
    // Precreate sys module, so "import sys" didn't throw exceptions.
175
176
177
    mp_obj_t m_sys = mp_obj_new_module(MP_QSTR_sys);
    // Avoid warning of unused var
    (void)m_sys;
178
#endif
179
180
181
182
    // init sys.path
    // for efficiency, left to platform-specific startup code
    //sys_path = mp_obj_new_list(0, NULL);
    //rt_store_attr(m_sys, MP_QSTR_path, sys_path);
183

184
    mp_module_micropython_init();
185

186
    // TODO: wastes one mp_code_t structure in mem
187
    next_unique_code_id = 1; // 0 indicates "no code"
188
    unique_codes_alloc = 0;
Damien's avatar
Damien committed
189
190
    unique_codes = NULL;

191
192
#ifdef WRITE_CODE
    fp_write_code = fopen("out-code", "wb");
193
#endif
Damien's avatar
Damien committed
194
195
}

196
void rt_deinit(void) {
197
    m_del(mp_code_t, unique_codes, unique_codes_alloc);
198
199
200
    mp_map_free(map_globals);
    mp_map_deinit(&map_loaded_modules);
    mp_map_deinit(&map_builtins);
201
202
203
#ifdef WRITE_CODE
    if (fp_write_code != NULL) {
        fclose(fp_write_code);
Damien's avatar
Damien committed
204
    }
205
#endif
Damien's avatar
Damien committed
206
207
}

208
uint rt_get_unique_code_id(void) {
209
    return next_unique_code_id++;
Damien's avatar
Damien committed
210
211
}

212
static void alloc_unique_codes(void) {
213
    if (next_unique_code_id > unique_codes_alloc) {
214
        DEBUG_printf("allocate more unique codes: " UINT_FMT " -> %u\n", unique_codes_alloc, next_unique_code_id);
215
216
        // increase size of unique_codes table
        unique_codes = m_renew(mp_code_t, unique_codes, unique_codes_alloc, next_unique_code_id);
217
        for (uint i = unique_codes_alloc; i < next_unique_code_id; i++) {
218
            unique_codes[i].kind = MP_CODE_NONE;
219
        }
220
        unique_codes_alloc = next_unique_code_id;
Damien's avatar
Damien committed
221
    }
222
223
}

224
void rt_assign_byte_code(uint unique_code_id, byte *code, uint len, int n_args, int n_locals, int n_stack, bool is_generator) {
225
226
    alloc_unique_codes();

227
    assert(1 <= unique_code_id && unique_code_id < next_unique_code_id && unique_codes[unique_code_id].kind == MP_CODE_NONE);
228
    unique_codes[unique_code_id].kind = MP_CODE_BYTE;
229
    unique_codes[unique_code_id].is_generator = is_generator;
230
231
    unique_codes[unique_code_id].n_args = n_args;
    unique_codes[unique_code_id].n_state = n_locals + n_stack;
232
233
234
    unique_codes[unique_code_id].u_byte.code = code;
    unique_codes[unique_code_id].u_byte.len = len;

Damien's avatar
Damien committed
235
    //printf("byte code: %d bytes\n", len);
236
237

#ifdef DEBUG_PRINT
238
    DEBUG_printf("assign byte code: id=%d code=%p len=%u n_args=%d n_locals=%d n_stack=%d\n", unique_code_id, code, len, n_args, n_locals, n_stack);
239
240
241
242
243
244
245
    for (int i = 0; i < 128 && i < len; i++) {
        if (i > 0 && i % 16 == 0) {
            DEBUG_printf("\n");
        }
        DEBUG_printf(" %02x", code[i]);
    }
    DEBUG_printf("\n");
246
247
#if MICROPY_DEBUG_PRINTERS
    mp_byte_code_print(code, len);
248
#endif
249
#endif
250
251
}

252
void rt_assign_native_code(uint unique_code_id, void *fun, uint len, int n_args) {
253
254
    alloc_unique_codes();

255
    assert(1 <= unique_code_id && unique_code_id < next_unique_code_id && unique_codes[unique_code_id].kind == MP_CODE_NONE);
256
    unique_codes[unique_code_id].kind = MP_CODE_NATIVE;
257
    unique_codes[unique_code_id].is_generator = false;
258
259
    unique_codes[unique_code_id].n_args = n_args;
    unique_codes[unique_code_id].n_state = 0;
Damien's avatar
Damien committed
260
261
    unique_codes[unique_code_id].u_native.fun = fun;

262
    //printf("native code: %d bytes\n", len);
263

264
#ifdef DEBUG_PRINT
Damien's avatar
Damien committed
265
266
267
268
269
270
271
272
273
274
    DEBUG_printf("assign native code: id=%d fun=%p len=%u n_args=%d\n", unique_code_id, fun, len, n_args);
    byte *fun_data = (byte*)(((machine_uint_t)fun) & (~1)); // need to clear lower bit in case it's thumb code
    for (int i = 0; i < 128 && i < len; i++) {
        if (i > 0 && i % 16 == 0) {
            DEBUG_printf("\n");
        }
        DEBUG_printf(" %02x", fun_data[i]);
    }
    DEBUG_printf("\n");

275
276
277
278
#ifdef WRITE_CODE
    if (fp_write_code != NULL) {
        fwrite(fun_data, len, 1, fp_write_code);
        fflush(fp_write_code);
Damien's avatar
Damien committed
279
    }
280
281
#endif
#endif
Damien's avatar
Damien committed
282
283
}

284
void rt_assign_inline_asm_code(uint unique_code_id, void *fun, uint len, int n_args) {
285
286
    alloc_unique_codes();

287
    assert(1 <= unique_code_id && unique_code_id < next_unique_code_id && unique_codes[unique_code_id].kind == MP_CODE_NONE);
288
    unique_codes[unique_code_id].kind = MP_CODE_INLINE_ASM;
289
    unique_codes[unique_code_id].is_generator = false;
290
291
    unique_codes[unique_code_id].n_args = n_args;
    unique_codes[unique_code_id].n_state = 0;
292
    unique_codes[unique_code_id].u_inline_asm.fun = fun;
Damien's avatar
Damien committed
293

294
#ifdef DEBUG_PRINT
295
296
297
298
299
300
301
302
303
304
    DEBUG_printf("assign inline asm code: id=%d fun=%p len=%u n_args=%d\n", unique_code_id, fun, len, n_args);
    byte *fun_data = (byte*)(((machine_uint_t)fun) & (~1)); // need to clear lower bit in case it's thumb code
    for (int i = 0; i < 128 && i < len; i++) {
        if (i > 0 && i % 16 == 0) {
            DEBUG_printf("\n");
        }
        DEBUG_printf(" %02x", fun_data[i]);
    }
    DEBUG_printf("\n");

305
306
307
#ifdef WRITE_CODE
    if (fp_write_code != NULL) {
        fwrite(fun_data, len, 1, fp_write_code);
308
    }
309
310
#endif
#endif
Damien's avatar
Damien committed
311
312
}

313
314
int rt_is_true(mp_obj_t arg) {
    DEBUG_OP_printf("is true %p\n", arg);
315
316
317
318
319
320
321
    if (arg == mp_const_false) {
        return 0;
    } else if (arg == mp_const_true) {
        return 1;
    } else if (arg == mp_const_none) {
        return 0;
    } else if (MP_OBJ_IS_SMALL_INT(arg)) {
322
323
324
325
326
327
        if (MP_OBJ_SMALL_INT_VALUE(arg) == 0) {
            return 0;
        } else {
            return 1;
        }
    } else {
328
329
330
        mp_obj_type_t *type = mp_obj_get_type(arg);
        if (type->unary_op != NULL) {
            mp_obj_t result = type->unary_op(RT_UNARY_OP_BOOL, arg);
331
            if (result != MP_OBJ_NULL) {
332
333
334
335
                return result == mp_const_true;
            }
        }

336
337
338
339
340
        mp_obj_t len = mp_obj_len_maybe(arg);
        if (len != MP_OBJ_NULL) {
            // obj has a length, truth determined if len != 0
            return len != MP_OBJ_NEW_SMALL_INT(0);
        } else {
341
            // any other obj is true per Python semantics
342
343
            return 1;
        }
344
345
346
347
348
349
350
    }
}

mp_obj_t rt_list_append(mp_obj_t self_in, mp_obj_t arg) {
    return mp_obj_list_append(self_in, arg);
}

Damien's avatar
Damien committed
351
352
353
354
#define PARSE_DEC_IN_INTG (1)
#define PARSE_DEC_IN_FRAC (2)
#define PARSE_DEC_IN_EXP  (3)

355
mp_obj_t rt_load_const_dec(qstr qstr) {
Damien's avatar
Damien committed
356
357
358
359
#if MICROPY_ENABLE_FLOAT
    DEBUG_OP_printf("load '%s'\n", qstr_str(qstr));
    const char *s = qstr_str(qstr);
    int in = PARSE_DEC_IN_INTG;
360
    mp_float_t dec_val = 0;
Damien's avatar
Damien committed
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
    bool exp_neg = false;
    int exp_val = 0;
    int exp_extra = 0;
    bool imag = false;
    for (; *s; s++) {
        int dig = *s;
        if ('0' <= dig && dig <= '9') {
            dig -= '0';
            if (in == PARSE_DEC_IN_EXP) {
                exp_val = 10 * exp_val + dig;
            } else {
                dec_val = 10 * dec_val + dig;
                if (in == PARSE_DEC_IN_FRAC) {
                    exp_extra -= 1;
                }
            }
        } else if (in == PARSE_DEC_IN_INTG && dig == '.') {
            in = PARSE_DEC_IN_FRAC;
        } else if (in != PARSE_DEC_IN_EXP && (dig == 'E' || dig == 'e')) {
            in = PARSE_DEC_IN_EXP;
            if (s[1] == '+') {
                s++;
            } else if (s[1] == '-') {
                s++;
                exp_neg = true;
            }
        } else if (dig == 'J' || dig == 'j') {
            s++;
            imag = true;
            break;
        } else {
            // unknown character
            break;
        }
    }
    if (*s != 0) {
397
        nlr_jump(mp_obj_new_exception_msg(MP_QSTR_SyntaxError, "invalid syntax for number"));
Damien's avatar
Damien committed
398
399
400
401
402
403
404
405
406
407
408
409
    }
    if (exp_neg) {
        exp_val = -exp_val;
    }
    exp_val += exp_extra;
    for (; exp_val > 0; exp_val--) {
        dec_val *= 10;
    }
    for (; exp_val < 0; exp_val++) {
        dec_val *= 0.1;
    }
    if (imag) {
410
        return mp_obj_new_complex(0, dec_val);
Damien's avatar
Damien committed
411
    } else {
412
        return mp_obj_new_float(dec_val);
Damien's avatar
Damien committed
413
414
    }
#else
415
    nlr_jump(mp_obj_new_exception_msg(MP_QSTR_SyntaxError, "decimal numbers not supported"));
Damien's avatar
Damien committed
416
417
418
#endif
}

419
mp_obj_t rt_load_const_str(qstr qstr) {
Damien's avatar
Damien committed
420
    DEBUG_OP_printf("load '%s'\n", qstr_str(qstr));
421
    return MP_OBJ_NEW_QSTR(qstr);
Damien's avatar
Damien committed
422
423
}

424
425
426
427
428
429
430
mp_obj_t rt_load_const_bytes(qstr qstr) {
    DEBUG_OP_printf("load b'%s'\n", qstr_str(qstr));
    uint len;
    const byte *data = qstr_data(qstr, &len);
    return mp_obj_new_bytes(data, len);
}

431
mp_obj_t rt_load_name(qstr qstr) {
Damien's avatar
Damien committed
432
    // logic: search locals, globals, builtins
433
    DEBUG_OP_printf("load name %s\n", qstr_str(qstr));
434
    mp_map_elem_t *elem = mp_map_lookup(map_locals, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP);
Damien's avatar
Damien committed
435
    if (elem == NULL) {
436
        elem = mp_map_lookup(map_globals, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP);
Damien's avatar
Damien committed
437
        if (elem == NULL) {
438
            elem = mp_map_lookup(&map_builtins, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP);
439
            if (elem == NULL) {
440
                nlr_jump(mp_obj_new_exception_msg_varg(MP_QSTR_NameError, "name '%s' is not defined", qstr_str(qstr)));
441
            }
Damien's avatar
Damien committed
442
443
444
445
446
        }
    }
    return elem->value;
}

447
mp_obj_t rt_load_global(qstr qstr) {
448
449
    // logic: search globals, builtins
    DEBUG_OP_printf("load global %s\n", qstr_str(qstr));
450
    mp_map_elem_t *elem = mp_map_lookup(map_globals, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP);
451
    if (elem == NULL) {
452
        elem = mp_map_lookup(&map_builtins, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP);
453
        if (elem == NULL) {
454
            nlr_jump(mp_obj_new_exception_msg_varg(MP_QSTR_NameError, "name '%s' is not defined", qstr_str(qstr)));
455
456
457
        }
    }
    return elem->value;
Damien's avatar
Damien committed
458
459
}

460
mp_obj_t rt_load_build_class(void) {
Damien's avatar
Damien committed
461
    DEBUG_OP_printf("load_build_class\n");
462
    mp_map_elem_t *elem = mp_map_lookup(&map_builtins, MP_OBJ_NEW_QSTR(MP_QSTR___build_class__), MP_MAP_LOOKUP);
Damien's avatar
Damien committed
463
    if (elem == NULL) {
464
        nlr_jump(mp_obj_new_exception_msg(MP_QSTR_NameError, "name '__build_class__' is not defined"));
Damien's avatar
Damien committed
465
466
467
468
    }
    return elem->value;
}

469
470
mp_obj_t rt_get_cell(mp_obj_t cell) {
    return mp_obj_cell_get(cell);
471
472
}

473
474
void rt_set_cell(mp_obj_t cell, mp_obj_t val) {
    mp_obj_cell_set(cell, val);
475
476
}

477
void rt_store_name(qstr qstr, mp_obj_t obj) {
478
    DEBUG_OP_printf("store name %s <- %p\n", qstr_str(qstr), obj);
479
    mp_map_lookup(map_locals, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP_ADD_IF_NOT_FOUND)->value = obj;
480
481
}

482
void rt_store_global(qstr qstr, mp_obj_t obj) {
483
    DEBUG_OP_printf("store global %s <- %p\n", qstr_str(qstr), obj);
484
    mp_map_lookup(map_globals, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP_ADD_IF_NOT_FOUND)->value = obj;
Damien's avatar
Damien committed
485
486
}

487
mp_obj_t rt_unary_op(int op, mp_obj_t arg) {
Damien's avatar
Damien committed
488
    DEBUG_OP_printf("unary %d %p\n", op, arg);
489

490
491
    if (MP_OBJ_IS_SMALL_INT(arg)) {
        mp_small_int_t val = MP_OBJ_SMALL_INT_VALUE(arg);
Damien's avatar
Damien committed
492
        switch (op) {
493
            case RT_UNARY_OP_BOOL: return MP_BOOL(val != 0);
Damien's avatar
Damien committed
494
495
496
497
498
            case RT_UNARY_OP_POSITIVE: break;
            case RT_UNARY_OP_NEGATIVE: val = -val; break;
            case RT_UNARY_OP_INVERT: val = ~val; break;
            default: assert(0); val = 0;
        }
499
        if (MP_OBJ_FITS_SMALL_INT(val)) {
500
501
            return MP_OBJ_NEW_SMALL_INT(val);
        }
502
        return mp_obj_new_int(val);
503
504
505
506
    } else {
        mp_obj_type_t *type = mp_obj_get_type(arg);
        if (type->unary_op != NULL) {
            mp_obj_t result = type->unary_op(op, arg);
507
508
509
            if (result != NULL) {
                return result;
            }
Damien's avatar
Damien committed
510
        }
511
        // TODO specify in error message what the operator is
512
        nlr_jump(mp_obj_new_exception_msg_varg(MP_QSTR_TypeError, "bad operand type for unary operator: '%s'", type->name));
Damien's avatar
Damien committed
513
    }
Damien's avatar
Damien committed
514
515
}

516
mp_obj_t rt_binary_op(int op, mp_obj_t lhs, mp_obj_t rhs) {
Damien's avatar
Damien committed
517
    DEBUG_OP_printf("binary %d %p %p\n", op, lhs, rhs);
518
519
520
521
522
523
524
525
526
527

    // TODO correctly distinguish inplace operators for mutable objects
    // lookup logic that CPython uses for +=:
    //   check for implemented +=
    //   then check for implemented +
    //   then check for implemented seq.inplace_concat
    //   then check for implemented seq.concat
    //   then fail
    // note that list does not implement + or +=, so that inplace_concat is reached first for +=

528
529
    // deal with is
    if (op == RT_BINARY_OP_IS) {
530
531
532
        return MP_BOOL(lhs == rhs);
    }

533
    // deal with == and != for all types
534
    if (op == RT_BINARY_OP_EQUAL || op == RT_BINARY_OP_NOT_EQUAL) {
535
        if (mp_obj_equal(lhs, rhs)) {
536
            if (op == RT_BINARY_OP_EQUAL) {
537
538
539
540
541
                return mp_const_true;
            } else {
                return mp_const_false;
            }
        } else {
542
            if (op == RT_BINARY_OP_EQUAL) {
543
544
545
546
547
548
549
550
                return mp_const_false;
            } else {
                return mp_const_true;
            }
        }
    }

    // deal with exception_match for all types
551
    if (op == RT_BINARY_OP_EXCEPTION_MATCH) {
552
553
554
555
556
557
558
559
560
561
        // TODO properly! at the moment it just compares the exception identifier for equality
        if (MP_OBJ_IS_TYPE(lhs, &exception_type) && MP_OBJ_IS_TYPE(rhs, &exception_type)) {
            if (mp_obj_exception_get_type(lhs) == mp_obj_exception_get_type(rhs)) {
                return mp_const_true;
            } else {
                return mp_const_false;
            }
        }
    }

562
    if (MP_OBJ_IS_SMALL_INT(lhs)) {
563
        mp_small_int_t lhs_val = MP_OBJ_SMALL_INT_VALUE(lhs);
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
        if (MP_OBJ_IS_SMALL_INT(rhs)) {
            mp_small_int_t rhs_val = MP_OBJ_SMALL_INT_VALUE(rhs);
            switch (op) {
                case RT_BINARY_OP_OR:
                case RT_BINARY_OP_INPLACE_OR: lhs_val |= rhs_val; break;
                case RT_BINARY_OP_XOR:
                case RT_BINARY_OP_INPLACE_XOR: lhs_val ^= rhs_val; break;
                case RT_BINARY_OP_AND:
                case RT_BINARY_OP_INPLACE_AND: lhs_val &= rhs_val; break;
                case RT_BINARY_OP_LSHIFT:
                case RT_BINARY_OP_INPLACE_LSHIFT: lhs_val <<= rhs_val; break;
                case RT_BINARY_OP_RSHIFT:
                case RT_BINARY_OP_INPLACE_RSHIFT: lhs_val >>= rhs_val; break;
                case RT_BINARY_OP_ADD:
                case RT_BINARY_OP_INPLACE_ADD: lhs_val += rhs_val; break;
                case RT_BINARY_OP_SUBTRACT:
                case RT_BINARY_OP_INPLACE_SUBTRACT: lhs_val -= rhs_val; break;
                case RT_BINARY_OP_MULTIPLY:
                case RT_BINARY_OP_INPLACE_MULTIPLY: lhs_val *= rhs_val; break;
                case RT_BINARY_OP_FLOOR_DIVIDE:
                case RT_BINARY_OP_INPLACE_FLOOR_DIVIDE: lhs_val /= rhs_val; break;
    #if MICROPY_ENABLE_FLOAT
                case RT_BINARY_OP_TRUE_DIVIDE:
                case RT_BINARY_OP_INPLACE_TRUE_DIVIDE: return mp_obj_new_float((mp_float_t)lhs_val / (mp_float_t)rhs_val);
    #endif

                // TODO implement modulo as specified by Python
                case RT_BINARY_OP_MODULO:
                case RT_BINARY_OP_INPLACE_MODULO: lhs_val %= rhs_val; break;

                // TODO check for negative power, and overflow
                case RT_BINARY_OP_POWER:
                case RT_BINARY_OP_INPLACE_POWER:
                {
                    int ans = 1;
                    while (rhs_val > 0) {
                        if (rhs_val & 1) {
                            ans *= lhs_val;
                        }
                        lhs_val *= lhs_val;
                        rhs_val /= 2;
605
                    }
606
607
                    lhs_val = ans;
                    break;
608
                }
609
610
611
612
                case RT_BINARY_OP_LESS: return MP_BOOL(lhs_val < rhs_val); break;
                case RT_BINARY_OP_MORE: return MP_BOOL(lhs_val > rhs_val); break;
                case RT_BINARY_OP_LESS_EQUAL: return MP_BOOL(lhs_val <= rhs_val); break;
                case RT_BINARY_OP_MORE_EQUAL: return MP_BOOL(lhs_val >= rhs_val); break;
613

614
615
                default: assert(0);
            }
616
617
            // TODO: We just should make mp_obj_new_int() inline and use that
            if (MP_OBJ_FITS_SMALL_INT(lhs_val)) {
618
619
                return MP_OBJ_NEW_SMALL_INT(lhs_val);
            }
620
            return mp_obj_new_int(lhs_val);
621
#if MICROPY_ENABLE_FLOAT
622
623
624
625
        } else if (MP_OBJ_IS_TYPE(rhs, &float_type)) {
            return mp_obj_float_binary_op(op, lhs_val, rhs);
        } else if (MP_OBJ_IS_TYPE(rhs, &complex_type)) {
            return mp_obj_complex_binary_op(op, lhs_val, 0, rhs);
626
#endif
627
        }
628
    }
629

630
    /* deal with `in`
631
632
     *
     * NOTE `a in b` is `b.__contains__(a)`, hence why the generic dispatch
Damien George's avatar
Damien George committed
633
     * needs to go below with swapped arguments
634
     */
635
    if (op == RT_BINARY_OP_IN) {
636
637
638
        mp_obj_type_t *type = mp_obj_get_type(rhs);
        if (type->binary_op != NULL) {
            mp_obj_t res = type->binary_op(op, rhs, lhs);
Damien George's avatar
Damien George committed
639
            if (res != MP_OBJ_NULL) {
640
                return res;
John R. Lenton's avatar
John R. Lenton committed
641
            }
642
643
644
645
646
647
648
        }
        if (type->getiter != NULL) {
            /* second attempt, walk the iterator */
            mp_obj_t next = NULL;
            mp_obj_t iter = rt_getiter(rhs);
            while ((next = rt_iternext(iter)) != mp_const_stop_iteration) {
                if (mp_obj_equal(next, lhs)) {
649
                    return mp_const_true;
John R. Lenton's avatar
John R. Lenton committed
650
                }
651
            }
652
            return mp_const_false;
653
654
655
656
657
658
659
660
        }

        nlr_jump(mp_obj_new_exception_msg_varg(
                     MP_QSTR_TypeError, "'%s' object is not iterable",
                     mp_obj_get_type_str(rhs)));
        return mp_const_none;
    }

661
662
663
664
665
666
    // generic binary_op supplied by type
    mp_obj_type_t *type = mp_obj_get_type(lhs);
    if (type->binary_op != NULL) {
        mp_obj_t result = type->binary_op(op, lhs, rhs);
        if (result != MP_OBJ_NULL) {
            return result;
Damien's avatar
Damien committed
667
668
        }
    }
669

670
671
    // TODO implement dispatch for reverse binary ops

John R. Lenton's avatar
John R. Lenton committed
672
673
674
675
    // TODO specify in error message what the operator is
    nlr_jump(mp_obj_new_exception_msg_varg(MP_QSTR_TypeError,
        "unsupported operand types for binary operator: '%s', '%s'",
        mp_obj_get_type_str(lhs), mp_obj_get_type_str(rhs)));
676
    return mp_const_none;
Damien's avatar
Damien committed
677
678
}

679
mp_obj_t rt_make_function_from_id(int unique_code_id, mp_obj_t def_args) {
680
681
    DEBUG_OP_printf("make_function_from_id %d\n", unique_code_id);
    if (unique_code_id < 1 || unique_code_id >= next_unique_code_id) {
Damien's avatar
Damien committed
682
        // illegal code id
683
        return mp_const_none;
Damien's avatar
Damien committed
684
    }
685
686
687
688

    // make the function, depending on the code kind
    mp_code_t *c = &unique_codes[unique_code_id];
    mp_obj_t fun;
Damien's avatar
Damien committed
689
    switch (c->kind) {
690
        case MP_CODE_BYTE:
691
            fun = mp_obj_new_fun_bc(c->n_args, def_args, c->n_state, c->u_byte.code);
692
            break;
693
        case MP_CODE_NATIVE:
694
            fun = rt_make_function_n(c->n_args, c->u_native.fun);
Damien's avatar
Damien committed
695
            break;
696
697
        case MP_CODE_INLINE_ASM:
            fun = mp_obj_new_fun_asm(c->n_args, c->u_inline_asm.fun);
Damien's avatar
Damien committed
698
699
700
            break;
        default:
            assert(0);
701
            fun = mp_const_none;
Damien's avatar
Damien committed
702
    }
703
704
705

    // check for generator functions and if so wrap in generator object
    if (c->is_generator) {
706
        fun = mp_obj_new_gen_wrap(fun);
707
708
    }

709
    return fun;
Damien's avatar
Damien committed
710
711
}

712
mp_obj_t rt_make_closure_from_id(int unique_code_id, mp_obj_t closure_tuple) {
Damien George's avatar
Damien George committed
713
    DEBUG_OP_printf("make_closure_from_id %d\n", unique_code_id);
714
    // make function object
715
    mp_obj_t ffun = rt_make_function_from_id(unique_code_id, MP_OBJ_NULL);
Damien's avatar
Damien committed
716
    // wrap function in closure object
717
    return mp_obj_new_closure(ffun, closure_tuple);
Damien's avatar
Damien committed
718
719
}

720
mp_obj_t rt_call_function_0(mp_obj_t fun) {
721
    return rt_call_function_n_kw(fun, 0, 0, NULL);
722
723
}

724
mp_obj_t rt_call_function_1(mp_obj_t fun, mp_obj_t arg) {
725
    return rt_call_function_n_kw(fun, 1, 0, &arg);
726
727
}

728
729
mp_obj_t rt_call_function_2(mp_obj_t fun, mp_obj_t arg1, mp_obj_t arg2) {
    mp_obj_t args[2];
730
731
732
    args[0] = arg1;
    args[1] = arg2;
    return rt_call_function_n_kw(fun, 2, 0, args);
733
734
}

735
736
737
738
739
740
// wrapper that accepts n_args and n_kw in one argument
// native emitter can only pass at most 3 arguments to a function
mp_obj_t rt_call_function_n_kw_for_native(mp_obj_t fun_in, uint n_args_kw, const mp_obj_t *args) {
    return rt_call_function_n_kw(fun_in, n_args_kw & 0xff, (n_args_kw >> 8) & 0xff, args);
}

741
742
// args contains, eg: arg0  arg1  key0  value0  key1  value1
mp_obj_t rt_call_function_n_kw(mp_obj_t fun_in, uint n_args, uint n_kw, const mp_obj_t *args) {
743
744
    // TODO improve this: fun object can specify its type and we parse here the arguments,
    // passing to the function arrays of fixed and keyword arguments
745

746
747
    DEBUG_OP_printf("calling function %p(n_args=%d, n_kw=%d, args=%p)\n", fun_in, n_args, n_kw, args);

748
749
750
751
752
753
    // get the type
    mp_obj_type_t *type = mp_obj_get_type(fun_in);

    // do the call
    if (type->call != NULL) {
        return type->call(fun_in, n_args, n_kw, args);
754
    } else {
755
        nlr_jump(mp_obj_new_exception_msg_varg(MP_QSTR_TypeError, "'%s' object is not callable", type->name));
756
    }
757
758
}

759
760
// args contains: fun  self/NULL  arg(0)  ...  arg(n_args-2)  arg(n_args-1)  kw_key(0)  kw_val(0)  ... kw_key(n_kw-1)  kw_val(n_kw-1)
// if n_args==0 and n_kw==0 then there are only fun and self/NULL
761
mp_obj_t rt_call_method_n_kw(uint n_args, uint n_kw, const mp_obj_t *args) {
762
763
764
    DEBUG_OP_printf("call method (fun=%p, self=%p, n_args=%u, n_kw=%u, args=%p)\n", args[0], args[1], n_args, n_kw, args);
    int adjust = (args[1] == NULL) ? 0 : 1;
    return rt_call_function_n_kw(args[0], n_args + adjust, n_kw, args + 2 - adjust);
765
766
}

767
mp_obj_t rt_build_tuple(int n_args, mp_obj_t *items) {
768
    return mp_obj_new_tuple(n_args, items);
769
770
}

771
mp_obj_t rt_build_list(int n_args, mp_obj_t *items) {
772
    return mp_obj_new_list(n_args, items);
Damien's avatar
Damien committed
773
774
}

775
776
mp_obj_t rt_build_set(int n_args, mp_obj_t *items) {
    return mp_obj_new_set(n_args, items);
Damien's avatar
Damien committed
777
778
}

779
mp_obj_t rt_store_set(mp_obj_t set, mp_obj_t item) {
780
    mp_obj_set_store(set, item);
Damien's avatar
Damien committed
781
782
783
    return set;
}

784
// unpacked items are stored in reverse order into the array pointed to by items
785
void rt_unpack_sequence(mp_obj_t seq_in, uint num, mp_obj_t *items) {
786
    uint seq_len;
787
788
789
790
791
792
    if (MP_OBJ_IS_TYPE(seq_in, &tuple_type) || MP_OBJ_IS_TYPE(seq_in, &list_type)) {
        mp_obj_t *seq_items;
        if (MP_OBJ_IS_TYPE(seq_in, &tuple_type)) {
            mp_obj_tuple_get(seq_in, &seq_len, &seq_items);
        } else {
            mp_obj_list_get(seq_in, &seq_len, &seq_items);
793
        }
794
        if (seq_len < num) {
795
            goto too_short;
796
        } else if (seq_len > num) {
797
            goto too_long;
798
        }
799
800
801
        for (uint i = 0; i < num; i++) {
            items[i] = seq_items[num - 1 - i];
        }
802
    } else {
803
804
805
806
807
808
809
810
811
812
813
814
        mp_obj_t iterable = rt_getiter(seq_in);

        for (seq_len = 0; seq_len < num; seq_len++) {
            mp_obj_t el = rt_iternext(iterable);
            if (el == mp_const_stop_iteration) {
                goto too_short;
            }
            items[num - 1 - seq_len] = el;
        }
        if (rt_iternext(iterable) != mp_const_stop_iteration) {
            goto too_long;
        }
815
    }
816
817
818
819
820
821
    return;

too_short:
    nlr_jump(mp_obj_new_exception_msg_varg(MP_QSTR_ValueError, "need more than %d values to unpack", seq_len));
too_long:
    nlr_jump(mp_obj_new_exception_msg_varg(MP_QSTR_ValueError, "too many values to unpack (expected %d)", num));
822
823
}

824
825
mp_obj_t rt_build_map(int n_args) {
    return mp_obj_new_dict(n_args);
Damien's avatar
Damien committed
826
827
}

828
829
830
mp_obj_t rt_store_map(mp_obj_t map, mp_obj_t key, mp_obj_t value) {
    // map should always be a dict
    return mp_obj_dict_store(map, key, value);
Damien's avatar
Damien committed
831
832
}

833
mp_obj_t rt_load_attr(mp_obj_t base, qstr attr) {
834
835
836
837
    DEBUG_OP_printf("load attr %p.%s\n", base, qstr_str(attr));
    // use load_method
    mp_obj_t dest[2];
    rt_load_method(base, attr, dest);
838
    if (dest[1] == MP_OBJ_NULL) {
839
        // load_method returned just a normal attribute
840
        return dest[0];
841
842
843
    } else {
        // load_method returned a method, so build a bound method object
        return mp_obj_new_bound_meth(dest[0], dest[1]);
Damien's avatar
Damien committed
844
845
846
    }
}

847
848
849
850
// no attribute found, returns:     dest[0] == MP_OBJ_NULL, dest[1] == MP_OBJ_NULL
// normal attribute found, returns: dest[0] == <attribute>, dest[1] == MP_OBJ_NULL
// method attribute found, returns: dest[0] == <method>,    dest[1] == <self>
static void rt_load_method_maybe(mp_obj_t base, qstr attr, mp_obj_t *dest) {
851
852
853
854
855
856
857
858
859
860
861
862
863
    // clear output to indicate no attribute/method found yet
    dest[0] = MP_OBJ_NULL;
    dest[1] = MP_OBJ_NULL;

    // get the type
    mp_obj_type_t *type = mp_obj_get_type(base);

    // if this type can do its own load, then call it
    if (type->load_attr != NULL) {
        type->load_attr(base, attr, dest);
    }

    // if nothing found yet, look for built-in and generic names
864
    if (dest[0] == MP_OBJ_NULL) {
Damien George's avatar
Damien George committed
865
866
867
868
        if (attr == MP_QSTR___class__) {
            // a.__class__ is equivalent to type(a)
            dest[0] = type;
        } else if (attr == MP_QSTR___next__ && type->iternext != NULL) {
869
870
            dest[0] = (mp_obj_t)&mp_builtin_next_obj;
            dest[1] = base;
871
872
        } else if (type->load_attr == NULL) {
            // generic method lookup if type didn't provide a specific one
873
            // this is a lookup in the object (ie not class or type)
874
875
876
877
            const mp_method_t *meth = type->methods;
            if (meth != NULL) {
                for (; meth->name != NULL; meth++) {
                    if (strcmp(meth->name, qstr_str(attr)) == 0) {
878
879
880
881
                        // check if the methods are functions, static or class methods
                        // see http://docs.python.org/3.3/howto/descriptor.html
                        if (MP_OBJ_IS_TYPE(meth->fun, &mp_type_staticmethod)) {
                            // return just the function
882
                            dest[0] = ((mp_obj_static_class_method_t*)meth->fun)->fun;
883
884
                        } else if (MP_OBJ_IS_TYPE(meth->fun, &mp_type_classmethod)) {
                            // return a bound method, with self being the type of this object
885
                            dest[0] = ((mp_obj_static_class_method_t*)meth->fun)->fun;
886
                            dest[1] = mp_obj_get_type(base);
887
888
                        } else {
                            // return a bound method, with self being this object
889
890
                            dest[0] = (mp_obj_t)meth->fun;
                            dest[1] = base;
891
                        }
892
893
                        break;
                    }
894
                }
Damien's avatar
Damien committed
895
896
            }
        }