runtime.c 43.7 KB
Newer Older
1
// in principle, rt_xxx functions are called only by vm/native/viper and make assumptions about args
2
// mp_xxx functions are safer and can be called by anyone
3
// note that rt_assign_xxx are called only from emit*, and maybe we can rename them to reflect this
4

Damien's avatar
Damien committed
5
6
7
8
#include <stdio.h>
#include <string.h>
#include <assert.h>

9
#include "nlr.h"
Damien's avatar
Damien committed
10
#include "misc.h"
11
#include "mpconfig.h"
12
#include "qstr.h"
13
#include "obj.h"
14
#include "parsenum.h"
15
#include "runtime0.h"
Damien's avatar
Damien committed
16
#include "runtime.h"
17
18
#include "map.h"
#include "builtin.h"
19
#include "objarray.h"
20
#include "bc.h"
21

22
#if 0 // print debugging info
23
#define DEBUG_PRINT (1)
24
#define WRITE_CODE (1)
25
#define DEBUG_printf DEBUG_printf
26
#define DEBUG_OP_printf(...) DEBUG_printf(__VA_ARGS__)
27
#else // don't print debugging info
28
29
#define DEBUG_printf(...) (void)0
#define DEBUG_OP_printf(...) (void)0
30
#endif
Damien's avatar
Damien committed
31

32
// locals and globals need to be pointers because they can be the same in outer module scope
33
34
35
36
STATIC mp_map_t *map_locals;
STATIC mp_map_t *map_globals;
STATIC mp_map_t map_builtins;
STATIC mp_map_t map_loaded_modules; // TODO: expose as sys.modules
37

Damien's avatar
Damien committed
38
typedef enum {
39
40
41
42
43
44
45
    MP_CODE_NONE,
    MP_CODE_BYTE,
    MP_CODE_NATIVE,
    MP_CODE_INLINE_ASM,
} mp_code_kind_t;

typedef struct _mp_code_t {
46
47
48
49
    mp_code_kind_t kind : 8;
    uint scope_flags : 8;
    uint n_args : 16;
    uint n_state : 16;
Damien's avatar
Damien committed
50
51
52
53
54
    union {
        struct {
            byte *code;
            uint len;
        } u_byte;
55
        struct {
56
            mp_fun_t fun;
57
58
        } u_native;
        struct {
59
            void *fun;
60
        } u_inline_asm;
Damien's avatar
Damien committed
61
    };
62
    qstr *arg_names;
63
} mp_code_t;
Damien's avatar
Damien committed
64

65
66
67
STATIC uint next_unique_code_id;
STATIC machine_uint_t unique_codes_alloc = 0;
STATIC mp_code_t *unique_codes = NULL;
Damien's avatar
Damien committed
68

69
70
#ifdef WRITE_CODE
FILE *fp_write_code = NULL;
71
#endif
Damien's avatar
Damien committed
72

73
74
75
76
77
78
79
80
81
82
83
// builtins
// we put this table in ROM because it's always needed and takes up quite a bit of room in RAM
// in fact, it uses less ROM here in table form than the equivalent in code form initialising a dynamic mp_map_t object in RAM
// at the moment it's a linear table, but we could convert it to a const mp_map_t table with a simple preprocessing script
// if we wanted to allow dynamic modification of the builtins, we could provide an mp_map_t object which is searched before this one

typedef struct _mp_builtin_elem_t {
    qstr qstr;
    mp_obj_t fun;
} mp_builtin_elem_t;

84
STATIC const mp_builtin_elem_t builtin_table[] = {
85
86
87
88
89
90
91
    // built-in core functions
    { MP_QSTR___build_class__, (mp_obj_t)&mp_builtin___build_class___obj },
    { MP_QSTR___import__, (mp_obj_t)&mp_builtin___import___obj },
    { MP_QSTR___repl_print__, (mp_obj_t)&mp_builtin___repl_print___obj },

    // built-in types
    { MP_QSTR_bool, (mp_obj_t)&bool_type },
92
    { MP_QSTR_bytes, (mp_obj_t)&bytes_type },
93
#if MICROPY_ENABLE_FLOAT
94
    { MP_QSTR_complex, (mp_obj_t)&mp_type_complex },
95
96
97
98
99
#endif
    { MP_QSTR_dict, (mp_obj_t)&dict_type },
    { MP_QSTR_enumerate, (mp_obj_t)&enumerate_type },
    { MP_QSTR_filter, (mp_obj_t)&filter_type },
#if MICROPY_ENABLE_FLOAT
100
    { MP_QSTR_float, (mp_obj_t)&mp_type_float },
101
102
103
104
105
#endif
    { MP_QSTR_int, (mp_obj_t)&int_type },
    { MP_QSTR_list, (mp_obj_t)&list_type },
    { MP_QSTR_map, (mp_obj_t)&map_type },
    { MP_QSTR_set, (mp_obj_t)&set_type },
106
    { MP_QSTR_str, (mp_obj_t)&str_type },
107
108
    { MP_QSTR_super, (mp_obj_t)&super_type },
    { MP_QSTR_tuple, (mp_obj_t)&tuple_type },
109
    { MP_QSTR_type, (mp_obj_t)&mp_type_type },
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
    { MP_QSTR_zip, (mp_obj_t)&zip_type },

    { MP_QSTR_classmethod, (mp_obj_t)&mp_type_classmethod },
    { MP_QSTR_staticmethod, (mp_obj_t)&mp_type_staticmethod },

    // built-in user functions
    { MP_QSTR_abs, (mp_obj_t)&mp_builtin_abs_obj },
    { MP_QSTR_all, (mp_obj_t)&mp_builtin_all_obj },
    { MP_QSTR_any, (mp_obj_t)&mp_builtin_any_obj },
    { MP_QSTR_callable, (mp_obj_t)&mp_builtin_callable_obj },
    { MP_QSTR_chr, (mp_obj_t)&mp_builtin_chr_obj },
    { MP_QSTR_dir, (mp_obj_t)&mp_builtin_dir_obj },
    { MP_QSTR_divmod, (mp_obj_t)&mp_builtin_divmod_obj },
    { MP_QSTR_eval, (mp_obj_t)&mp_builtin_eval_obj },
    { MP_QSTR_exec, (mp_obj_t)&mp_builtin_exec_obj },
    { MP_QSTR_hash, (mp_obj_t)&mp_builtin_hash_obj },
    { MP_QSTR_id, (mp_obj_t)&mp_builtin_id_obj },
    { MP_QSTR_isinstance, (mp_obj_t)&mp_builtin_isinstance_obj },
    { MP_QSTR_issubclass, (mp_obj_t)&mp_builtin_issubclass_obj },
    { MP_QSTR_iter, (mp_obj_t)&mp_builtin_iter_obj },
    { MP_QSTR_len, (mp_obj_t)&mp_builtin_len_obj },
    { MP_QSTR_max, (mp_obj_t)&mp_builtin_max_obj },
    { MP_QSTR_min, (mp_obj_t)&mp_builtin_min_obj },
    { MP_QSTR_next, (mp_obj_t)&mp_builtin_next_obj },
    { MP_QSTR_ord, (mp_obj_t)&mp_builtin_ord_obj },
    { MP_QSTR_pow, (mp_obj_t)&mp_builtin_pow_obj },
    { MP_QSTR_print, (mp_obj_t)&mp_builtin_print_obj },
    { MP_QSTR_range, (mp_obj_t)&mp_builtin_range_obj },
    { MP_QSTR_repr, (mp_obj_t)&mp_builtin_repr_obj },
    { MP_QSTR_sorted, (mp_obj_t)&mp_builtin_sorted_obj },
    { MP_QSTR_sum, (mp_obj_t)&mp_builtin_sum_obj },
    { MP_QSTR_bytearray, (mp_obj_t)&mp_builtin_bytearray_obj },

143
144
    // built-in exceptions
    { MP_QSTR_BaseException, (mp_obj_t)&mp_type_BaseException },
145
    { MP_QSTR_ArithmeticError, (mp_obj_t)&mp_type_ArithmeticError },
146
147
    { MP_QSTR_AssertionError, (mp_obj_t)&mp_type_AssertionError },
    { MP_QSTR_AttributeError, (mp_obj_t)&mp_type_AttributeError },
148
149
150
151
152
153
154
155
156
157
    { MP_QSTR_BufferError, (mp_obj_t)&mp_type_BufferError },
    { MP_QSTR_BytesWarning, (mp_obj_t)&mp_type_BytesWarning },
    { MP_QSTR_DeprecationWarning, (mp_obj_t)&mp_type_DeprecationWarning },
    { MP_QSTR_EOFError, (mp_obj_t)&mp_type_EOFError },
    { MP_QSTR_EnvironmentError, (mp_obj_t)&mp_type_EnvironmentError },
    { MP_QSTR_Exception, (mp_obj_t)&mp_type_Exception },
    { MP_QSTR_FloatingPointError, (mp_obj_t)&mp_type_FloatingPointError },
    { MP_QSTR_FutureWarning, (mp_obj_t)&mp_type_FutureWarning },
    { MP_QSTR_GeneratorExit, (mp_obj_t)&mp_type_GeneratorExit },
    { MP_QSTR_IOError, (mp_obj_t)&mp_type_IOError },
158
    { MP_QSTR_ImportError, (mp_obj_t)&mp_type_ImportError },
159
    { MP_QSTR_ImportWarning, (mp_obj_t)&mp_type_ImportWarning },
160
161
162
    { MP_QSTR_IndentationError, (mp_obj_t)&mp_type_IndentationError },
    { MP_QSTR_IndexError, (mp_obj_t)&mp_type_IndexError },
    { MP_QSTR_KeyError, (mp_obj_t)&mp_type_KeyError },
163
164
    { MP_QSTR_LookupError, (mp_obj_t)&mp_type_LookupError },
    { MP_QSTR_MemoryError, (mp_obj_t)&mp_type_MemoryError },
165
    { MP_QSTR_NameError, (mp_obj_t)&mp_type_NameError },
166
167
168
169
170
171
172
173
    { MP_QSTR_NotImplementedError, (mp_obj_t)&mp_type_NotImplementedError },
    { MP_QSTR_OSError, (mp_obj_t)&mp_type_OSError },
    { MP_QSTR_OverflowError, (mp_obj_t)&mp_type_OverflowError },
    { MP_QSTR_PendingDeprecationWarning, (mp_obj_t)&mp_type_PendingDeprecationWarning },
    { MP_QSTR_ReferenceError, (mp_obj_t)&mp_type_ReferenceError },
    { MP_QSTR_ResourceWarning, (mp_obj_t)&mp_type_ResourceWarning },
    { MP_QSTR_RuntimeError, (mp_obj_t)&mp_type_RuntimeError },
    { MP_QSTR_RuntimeWarning, (mp_obj_t)&mp_type_RuntimeWarning },
174
    { MP_QSTR_SyntaxError, (mp_obj_t)&mp_type_SyntaxError },
175
176
177
178
    { MP_QSTR_SyntaxWarning, (mp_obj_t)&mp_type_SyntaxWarning },
    { MP_QSTR_SystemError, (mp_obj_t)&mp_type_SystemError },
    { MP_QSTR_SystemExit, (mp_obj_t)&mp_type_SystemExit },
    { MP_QSTR_TabError, (mp_obj_t)&mp_type_TabError },
179
    { MP_QSTR_TypeError, (mp_obj_t)&mp_type_TypeError },
180
181
    { MP_QSTR_UnboundLocalError, (mp_obj_t)&mp_type_UnboundLocalError },
    { MP_QSTR_UserWarning, (mp_obj_t)&mp_type_UserWarning },
182
    { MP_QSTR_ValueError, (mp_obj_t)&mp_type_ValueError },
183
184
185
    { MP_QSTR_Warning, (mp_obj_t)&mp_type_Warning },
    { MP_QSTR_ZeroDivisionError, (mp_obj_t)&mp_type_ZeroDivisionError },
    { MP_QSTR_StopIteration, (mp_obj_t)&mp_type_StopIteration },
186
187
188
    // Somehow CPython managed to have OverflowError not inherit from ValueError ;-/
    // TODO: For MICROPY_CPYTHON_COMPAT==0 use ValueError to avoid exc proliferation

189
190
191
    // Extra builtins as defined by a port
    MICROPY_EXTRA_BUILTINS

192
193
194
    { MP_QSTR_, MP_OBJ_NULL }, // end of list sentinel
};

195
// a good optimising compiler will inline this if necessary
196
STATIC void mp_map_add_qstr(mp_map_t *map, qstr qstr, mp_obj_t value) {
197
198
199
    mp_map_lookup(map, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP_ADD_IF_NOT_FOUND)->value = value;
}

200
void rt_init(void) {
201
    // locals = globals for outer module (see Objects/frameobject.c/PyFrame_New())
202
    map_locals = map_globals = mp_map_new(1);
203
    mp_map_add_qstr(map_globals, MP_QSTR___name__, MP_OBJ_NEW_QSTR(MP_QSTR___main__));
204

205
    // init built-in hash table
206
    mp_map_init(&map_builtins, 3);
207

208
209
210
    // init loaded modules table
    mp_map_init(&map_loaded_modules, 3);

Damien George's avatar
Damien George committed
211
    // built-in objects
212
    mp_map_add_qstr(&map_builtins, MP_QSTR_Ellipsis, mp_const_ellipsis);
Damien George's avatar
Damien George committed
213

214
215
216
    mp_obj_t m_array = mp_obj_new_module(MP_QSTR_array);
    rt_store_attr(m_array, MP_QSTR_array, (mp_obj_t)&array_type);

217
218
219
    mp_obj_t m_collections = mp_obj_new_module(MP_QSTR_collections);
    rt_store_attr(m_collections, MP_QSTR_namedtuple, (mp_obj_t)&mp_namedtuple_obj);

220
#if MICROPY_CPYTHON_COMPAT
221
    // Precreate sys module, so "import sys" didn't throw exceptions.
222
223
224
    mp_obj_t m_sys = mp_obj_new_module(MP_QSTR_sys);
    // Avoid warning of unused var
    (void)m_sys;
225
#endif
226
227
228
229
    // init sys.path
    // for efficiency, left to platform-specific startup code
    //sys_path = mp_obj_new_list(0, NULL);
    //rt_store_attr(m_sys, MP_QSTR_path, sys_path);
230

231
232
233
    // we pre-import the micropython module
    // probably shouldn't do this, so we are compatible with CPython
    rt_store_name(MP_QSTR_micropython, (mp_obj_t)&mp_module_micropython);
234

235
    // TODO: wastes one mp_code_t structure in mem
236
    next_unique_code_id = 1; // 0 indicates "no code"
237
    unique_codes_alloc = 0;
Damien's avatar
Damien committed
238
239
    unique_codes = NULL;

240
241
#ifdef WRITE_CODE
    fp_write_code = fopen("out-code", "wb");
242
#endif
Damien's avatar
Damien committed
243
244
}

245
void rt_deinit(void) {
246
    m_del(mp_code_t, unique_codes, unique_codes_alloc);
247
248
249
    mp_map_free(map_globals);
    mp_map_deinit(&map_loaded_modules);
    mp_map_deinit(&map_builtins);
250
251
252
#ifdef WRITE_CODE
    if (fp_write_code != NULL) {
        fclose(fp_write_code);
Damien's avatar
Damien committed
253
    }
254
#endif
Damien's avatar
Damien committed
255
256
}

257
uint rt_get_unique_code_id(void) {
258
    return next_unique_code_id++;
Damien's avatar
Damien committed
259
260
}

261
STATIC void alloc_unique_codes(void) {
262
    if (next_unique_code_id > unique_codes_alloc) {
263
        DEBUG_printf("allocate more unique codes: " UINT_FMT " -> %u\n", unique_codes_alloc, next_unique_code_id);
264
265
        // increase size of unique_codes table
        unique_codes = m_renew(mp_code_t, unique_codes, unique_codes_alloc, next_unique_code_id);
266
        for (uint i = unique_codes_alloc; i < next_unique_code_id; i++) {
267
            unique_codes[i].kind = MP_CODE_NONE;
268
        }
269
        unique_codes_alloc = next_unique_code_id;
Damien's avatar
Damien committed
270
    }
271
272
}

273
void rt_assign_byte_code(uint unique_code_id, byte *code, uint len, int n_args, int n_locals, int n_stack, uint scope_flags, qstr *arg_names) {
274
275
    alloc_unique_codes();

276
    assert(1 <= unique_code_id && unique_code_id < next_unique_code_id && unique_codes[unique_code_id].kind == MP_CODE_NONE);
277
    unique_codes[unique_code_id].kind = MP_CODE_BYTE;
278
    unique_codes[unique_code_id].scope_flags = scope_flags;
279
280
    unique_codes[unique_code_id].n_args = n_args;
    unique_codes[unique_code_id].n_state = n_locals + n_stack;
281
282
    unique_codes[unique_code_id].u_byte.code = code;
    unique_codes[unique_code_id].u_byte.len = len;
283
    unique_codes[unique_code_id].arg_names = arg_names;
284

Damien's avatar
Damien committed
285
    //printf("byte code: %d bytes\n", len);
286
287

#ifdef DEBUG_PRINT
288
    DEBUG_printf("assign byte code: id=%d code=%p len=%u n_args=%d n_locals=%d n_stack=%d\n", unique_code_id, code, len, n_args, n_locals, n_stack);
289
290
291
292
293
294
295
    for (int i = 0; i < 128 && i < len; i++) {
        if (i > 0 && i % 16 == 0) {
            DEBUG_printf("\n");
        }
        DEBUG_printf(" %02x", code[i]);
    }
    DEBUG_printf("\n");
296
297
#if MICROPY_DEBUG_PRINTERS
    mp_byte_code_print(code, len);
298
#endif
299
#endif
300
301
}

302
void rt_assign_native_code(uint unique_code_id, void *fun, uint len, int n_args) {
303
304
    alloc_unique_codes();

305
    assert(1 <= unique_code_id && unique_code_id < next_unique_code_id && unique_codes[unique_code_id].kind == MP_CODE_NONE);
306
    unique_codes[unique_code_id].kind = MP_CODE_NATIVE;
307
    unique_codes[unique_code_id].scope_flags = 0;
308
309
    unique_codes[unique_code_id].n_args = n_args;
    unique_codes[unique_code_id].n_state = 0;
Damien's avatar
Damien committed
310
311
    unique_codes[unique_code_id].u_native.fun = fun;

312
    //printf("native code: %d bytes\n", len);
313

314
#ifdef DEBUG_PRINT
Damien's avatar
Damien committed
315
316
317
318
319
320
321
322
323
324
    DEBUG_printf("assign native code: id=%d fun=%p len=%u n_args=%d\n", unique_code_id, fun, len, n_args);
    byte *fun_data = (byte*)(((machine_uint_t)fun) & (~1)); // need to clear lower bit in case it's thumb code
    for (int i = 0; i < 128 && i < len; i++) {
        if (i > 0 && i % 16 == 0) {
            DEBUG_printf("\n");
        }
        DEBUG_printf(" %02x", fun_data[i]);
    }
    DEBUG_printf("\n");

325
326
327
328
#ifdef WRITE_CODE
    if (fp_write_code != NULL) {
        fwrite(fun_data, len, 1, fp_write_code);
        fflush(fp_write_code);
Damien's avatar
Damien committed
329
    }
330
331
#endif
#endif
Damien's avatar
Damien committed
332
333
}

334
void rt_assign_inline_asm_code(uint unique_code_id, void *fun, uint len, int n_args) {
335
336
    alloc_unique_codes();

337
    assert(1 <= unique_code_id && unique_code_id < next_unique_code_id && unique_codes[unique_code_id].kind == MP_CODE_NONE);
338
    unique_codes[unique_code_id].kind = MP_CODE_INLINE_ASM;
339
    unique_codes[unique_code_id].scope_flags = 0;
340
341
    unique_codes[unique_code_id].n_args = n_args;
    unique_codes[unique_code_id].n_state = 0;
342
    unique_codes[unique_code_id].u_inline_asm.fun = fun;
Damien's avatar
Damien committed
343

344
#ifdef DEBUG_PRINT
345
346
347
348
349
350
351
352
353
354
    DEBUG_printf("assign inline asm code: id=%d fun=%p len=%u n_args=%d\n", unique_code_id, fun, len, n_args);
    byte *fun_data = (byte*)(((machine_uint_t)fun) & (~1)); // need to clear lower bit in case it's thumb code
    for (int i = 0; i < 128 && i < len; i++) {
        if (i > 0 && i % 16 == 0) {
            DEBUG_printf("\n");
        }
        DEBUG_printf(" %02x", fun_data[i]);
    }
    DEBUG_printf("\n");

355
356
357
#ifdef WRITE_CODE
    if (fp_write_code != NULL) {
        fwrite(fun_data, len, 1, fp_write_code);
358
    }
359
360
#endif
#endif
Damien's avatar
Damien committed
361
362
}

363
364
int rt_is_true(mp_obj_t arg) {
    DEBUG_OP_printf("is true %p\n", arg);
365
366
367
368
369
370
371
    if (arg == mp_const_false) {
        return 0;
    } else if (arg == mp_const_true) {
        return 1;
    } else if (arg == mp_const_none) {
        return 0;
    } else if (MP_OBJ_IS_SMALL_INT(arg)) {
372
373
374
375
376
377
        if (MP_OBJ_SMALL_INT_VALUE(arg) == 0) {
            return 0;
        } else {
            return 1;
        }
    } else {
378
379
380
        mp_obj_type_t *type = mp_obj_get_type(arg);
        if (type->unary_op != NULL) {
            mp_obj_t result = type->unary_op(RT_UNARY_OP_BOOL, arg);
381
            if (result != MP_OBJ_NULL) {
382
383
384
385
                return result == mp_const_true;
            }
        }

386
387
388
389
390
        mp_obj_t len = mp_obj_len_maybe(arg);
        if (len != MP_OBJ_NULL) {
            // obj has a length, truth determined if len != 0
            return len != MP_OBJ_NEW_SMALL_INT(0);
        } else {
391
            // any other obj is true per Python semantics
392
393
            return 1;
        }
394
395
396
397
398
399
400
401
    }
}

mp_obj_t rt_list_append(mp_obj_t self_in, mp_obj_t arg) {
    return mp_obj_list_append(self_in, arg);
}

mp_obj_t rt_load_const_dec(qstr qstr) {
Damien's avatar
Damien committed
402
    DEBUG_OP_printf("load '%s'\n", qstr_str(qstr));
403
404
    uint len;
    const byte* data = qstr_data(qstr, &len);
405
    return mp_parse_num_decimal((const char*)data, len, true, false);
Damien's avatar
Damien committed
406
407
}

408
mp_obj_t rt_load_const_str(qstr qstr) {
Damien's avatar
Damien committed
409
    DEBUG_OP_printf("load '%s'\n", qstr_str(qstr));
410
    return MP_OBJ_NEW_QSTR(qstr);
Damien's avatar
Damien committed
411
412
}

413
414
415
416
417
418
419
mp_obj_t rt_load_const_bytes(qstr qstr) {
    DEBUG_OP_printf("load b'%s'\n", qstr_str(qstr));
    uint len;
    const byte *data = qstr_data(qstr, &len);
    return mp_obj_new_bytes(data, len);
}

420
mp_obj_t rt_load_name(qstr qstr) {
Damien's avatar
Damien committed
421
    // logic: search locals, globals, builtins
422
    DEBUG_OP_printf("load name %s\n", qstr_str(qstr));
423
    mp_map_elem_t *elem = mp_map_lookup(map_locals, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP);
424
425
426
427
    if (elem != NULL) {
        return elem->value;
    } else {
        return rt_load_global(qstr);
Damien's avatar
Damien committed
428
429
430
    }
}

431
mp_obj_t rt_load_global(qstr qstr) {
432
433
    // logic: search globals, builtins
    DEBUG_OP_printf("load global %s\n", qstr_str(qstr));
434
    mp_map_elem_t *elem = mp_map_lookup(map_globals, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP);
435
    if (elem == NULL) {
436
        elem = mp_map_lookup(&map_builtins, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP);
437
        if (elem == NULL) {
438
439
440
441
442
            for (const mp_builtin_elem_t *e = &builtin_table[0]; e->qstr != MP_QSTR_; e++) {
                if (e->qstr == qstr) {
                    return e->fun;
                }
            }
443
            nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_NameError, "name '%s' is not defined", qstr_str(qstr)));
444
445
446
        }
    }
    return elem->value;
Damien's avatar
Damien committed
447
448
}

449
mp_obj_t rt_load_build_class(void) {
Damien's avatar
Damien committed
450
    DEBUG_OP_printf("load_build_class\n");
451
    mp_map_elem_t *elem = mp_map_lookup(&map_builtins, MP_OBJ_NEW_QSTR(MP_QSTR___build_class__), MP_MAP_LOOKUP);
452
453
454
455
    if (elem != NULL) {
        return elem->value;
    } else {
        return (mp_obj_t)&mp_builtin___build_class___obj;
Damien's avatar
Damien committed
456
457
458
    }
}

459
460
mp_obj_t rt_get_cell(mp_obj_t cell) {
    return mp_obj_cell_get(cell);
461
462
}

463
464
void rt_set_cell(mp_obj_t cell, mp_obj_t val) {
    mp_obj_cell_set(cell, val);
465
466
}

467
void rt_store_name(qstr qstr, mp_obj_t obj) {
468
    DEBUG_OP_printf("store name %s <- %p\n", qstr_str(qstr), obj);
469
    mp_map_lookup(map_locals, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP_ADD_IF_NOT_FOUND)->value = obj;
470
471
}

472
void rt_store_global(qstr qstr, mp_obj_t obj) {
473
    DEBUG_OP_printf("store global %s <- %p\n", qstr_str(qstr), obj);
474
    mp_map_lookup(map_globals, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP_ADD_IF_NOT_FOUND)->value = obj;
Damien's avatar
Damien committed
475
476
}

477
mp_obj_t rt_unary_op(int op, mp_obj_t arg) {
Damien's avatar
Damien committed
478
    DEBUG_OP_printf("unary %d %p\n", op, arg);
479

480
481
    if (MP_OBJ_IS_SMALL_INT(arg)) {
        mp_small_int_t val = MP_OBJ_SMALL_INT_VALUE(arg);
Damien's avatar
Damien committed
482
        switch (op) {
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
            case RT_UNARY_OP_BOOL:
                return MP_BOOL(val != 0);
            case RT_UNARY_OP_POSITIVE:
                return arg;
            case RT_UNARY_OP_NEGATIVE:
                // check for overflow
                if (val == MP_SMALL_INT_MIN) {
                    return mp_obj_new_int(-val);
                } else {
                    return MP_OBJ_NEW_SMALL_INT(-val);
                }
            case RT_UNARY_OP_INVERT:
                return MP_OBJ_NEW_SMALL_INT(~val);
            default:
                assert(0);
                return arg;
499
        }
500
501
502
503
    } else {
        mp_obj_type_t *type = mp_obj_get_type(arg);
        if (type->unary_op != NULL) {
            mp_obj_t result = type->unary_op(op, arg);
504
505
506
            if (result != NULL) {
                return result;
            }
Damien's avatar
Damien committed
507
        }
508
        // TODO specify in error message what the operator is
Damien George's avatar
Damien George committed
509
        nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_TypeError, "bad operand type for unary operator: '%s'", mp_obj_get_type_str(arg)));
Damien's avatar
Damien committed
510
    }
Damien's avatar
Damien committed
511
512
}

513
mp_obj_t rt_binary_op(int op, mp_obj_t lhs, mp_obj_t rhs) {
Damien's avatar
Damien committed
514
    DEBUG_OP_printf("binary %d %p %p\n", op, lhs, rhs);
515
516
517
518
519
520
521
522
523
524

    // TODO correctly distinguish inplace operators for mutable objects
    // lookup logic that CPython uses for +=:
    //   check for implemented +=
    //   then check for implemented +
    //   then check for implemented seq.inplace_concat
    //   then check for implemented seq.concat
    //   then fail
    // note that list does not implement + or +=, so that inplace_concat is reached first for +=

525
526
    // deal with is
    if (op == RT_BINARY_OP_IS) {
527
528
529
        return MP_BOOL(lhs == rhs);
    }

530
    // deal with == and != for all types
531
    if (op == RT_BINARY_OP_EQUAL || op == RT_BINARY_OP_NOT_EQUAL) {
532
        if (mp_obj_equal(lhs, rhs)) {
533
            if (op == RT_BINARY_OP_EQUAL) {
534
535
536
537
538
                return mp_const_true;
            } else {
                return mp_const_false;
            }
        } else {
539
            if (op == RT_BINARY_OP_EQUAL) {
540
541
542
543
544
545
546
547
                return mp_const_false;
            } else {
                return mp_const_true;
            }
        }
    }

    // deal with exception_match for all types
548
    if (op == RT_BINARY_OP_EXCEPTION_MATCH) {
549
550
551
552
553
554
        // rhs must be issubclass(rhs, BaseException)
        if (mp_obj_is_exception_type(rhs)) {
            // if lhs is an instance of an exception, then extract and use its type
            if (mp_obj_is_exception_instance(lhs)) {
                lhs = mp_obj_get_type(lhs);
            }
555
            if (mp_obj_is_subclass_fast(lhs, rhs)) {
556
557
558
559
560
561
562
                return mp_const_true;
            } else {
                return mp_const_false;
            }
        }
    }

563
    if (MP_OBJ_IS_SMALL_INT(lhs)) {
564
        mp_small_int_t lhs_val = MP_OBJ_SMALL_INT_VALUE(lhs);
565
566
        if (MP_OBJ_IS_SMALL_INT(rhs)) {
            mp_small_int_t rhs_val = MP_OBJ_SMALL_INT_VALUE(rhs);
567
568
569
570
571
572
573
574
575
            // This is a binary operation: lhs_val op rhs_val
            // We need to be careful to handle overflow; see CERT INT32-C
            // Operations that can overflow:
            //      +       result always fits in machine_int_t, then handled by SMALL_INT check
            //      -       result always fits in machine_int_t, then handled by SMALL_INT check
            //      *       checked explicitly
            //      /       if lhs=MIN and rhs=-1; result always fits in machine_int_t, then handled by SMALL_INT check
            //      %       if lhs=MIN and rhs=-1; result always fits in machine_int_t, then handled by SMALL_INT check
            //      <<      checked explicitly
576
577
578
579
580
581
582
583
            switch (op) {
                case RT_BINARY_OP_OR:
                case RT_BINARY_OP_INPLACE_OR: lhs_val |= rhs_val; break;
                case RT_BINARY_OP_XOR:
                case RT_BINARY_OP_INPLACE_XOR: lhs_val ^= rhs_val; break;
                case RT_BINARY_OP_AND:
                case RT_BINARY_OP_INPLACE_AND: lhs_val &= rhs_val; break;
                case RT_BINARY_OP_LSHIFT:
584
585
586
587
588
589
590
591
592
593
594
595
596
597
                case RT_BINARY_OP_INPLACE_LSHIFT: {
                    if (rhs_val < 0) {
                        // negative shift not allowed
                        nlr_jump(mp_obj_new_exception_msg(&mp_type_ValueError, "negative shift count"));
                    } else if (rhs_val >= BITS_PER_WORD || lhs_val > (MP_SMALL_INT_MAX >> rhs_val) || lhs_val < (MP_SMALL_INT_MIN >> rhs_val)) {
                        // left-shift will overflow, so use higher precision integer
                        lhs = mp_obj_new_int_from_ll(lhs_val);
                        goto generic_binary_op;
                    } else {
                        // use standard precision
                        lhs_val <<= rhs_val;
                    }
                    break;
                }
598
                case RT_BINARY_OP_RSHIFT:
599
600
601
602
603
604
605
606
607
                case RT_BINARY_OP_INPLACE_RSHIFT:
                    if (rhs_val < 0) {
                        // negative shift not allowed
                        nlr_jump(mp_obj_new_exception_msg(&mp_type_ValueError, "negative shift count"));
                    } else {
                        // standard precision is enough for right-shift
                        lhs_val >>= rhs_val;
                    }
                    break;
608
609
610
611
612
                case RT_BINARY_OP_ADD:
                case RT_BINARY_OP_INPLACE_ADD: lhs_val += rhs_val; break;
                case RT_BINARY_OP_SUBTRACT:
                case RT_BINARY_OP_INPLACE_SUBTRACT: lhs_val -= rhs_val; break;
                case RT_BINARY_OP_MULTIPLY:
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
                case RT_BINARY_OP_INPLACE_MULTIPLY: {

                    // If long long type exists and is larger than machine_int_t, then
                    // we can use the following code to perform overflow-checked multiplication.
                    // Otherwise (eg in x64 case) we must use the branching code below.
                    #if 0
                    // compute result using long long precision
                    long long res = (long long)lhs_val * (long long)rhs_val;
                    if (res > MP_SMALL_INT_MAX || res < MP_SMALL_INT_MIN) {
                        // result overflowed SMALL_INT, so return higher precision integer
                        return mp_obj_new_int_from_ll(res);
                    } else {
                        // use standard precision
                        lhs_val = (mp_small_int_t)res;
                    }
                    #endif

                    if (lhs_val > 0) { // lhs_val is positive
                        if (rhs_val > 0) { // lhs_val and rhs_val are positive
                            if (lhs_val > (MP_SMALL_INT_MAX / rhs_val)) {
                                goto mul_overflow;
                            }
                        } else { // lhs_val positive, rhs_val nonpositive
                            if (rhs_val < (MP_SMALL_INT_MIN / lhs_val)) {
                                goto mul_overflow;
                            }
                        } // lhs_val positive, rhs_val nonpositive
                    } else { // lhs_val is nonpositive
                        if (rhs_val > 0) { // lhs_val is nonpositive, rhs_val is positive
                            if (lhs_val < (MP_SMALL_INT_MIN / rhs_val)) {
                                goto mul_overflow;
                            }
                        } else { // lhs_val and rhs_val are nonpositive
                            if (lhs_val != 0 && rhs_val < (MP_SMALL_INT_MAX / lhs_val)) {
                                goto mul_overflow;
                            }
                        } // End if lhs_val and rhs_val are nonpositive
                    } // End if lhs_val is nonpositive

                    // use standard precision
                    return MP_OBJ_NEW_SMALL_INT(lhs_val * rhs_val);

                mul_overflow:
                    // use higher precision
                    lhs = mp_obj_new_int_from_ll(lhs_val);
                    goto generic_binary_op;

                    break;
                }
662
663
                case RT_BINARY_OP_FLOOR_DIVIDE:
                case RT_BINARY_OP_INPLACE_FLOOR_DIVIDE: lhs_val /= rhs_val; break;
664
                #if MICROPY_ENABLE_FLOAT
665
666
                case RT_BINARY_OP_TRUE_DIVIDE:
                case RT_BINARY_OP_INPLACE_TRUE_DIVIDE: return mp_obj_new_float((mp_float_t)lhs_val / (mp_float_t)rhs_val);
667
                #endif
668
669
670
671
672
673
674

                // TODO implement modulo as specified by Python
                case RT_BINARY_OP_MODULO:
                case RT_BINARY_OP_INPLACE_MODULO: lhs_val %= rhs_val; break;

                case RT_BINARY_OP_POWER:
                case RT_BINARY_OP_INPLACE_POWER:
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
                    if (rhs_val < 0) {
                        #if MICROPY_ENABLE_FLOAT
                        lhs = mp_obj_new_float(lhs_val);
                        goto generic_binary_op;
                        #else
                        nlr_jump(mp_obj_new_exception_msg(&mp_type_ValueError, "negative power with no float support"));
                        #endif
                    } else {
                        // TODO check for overflow
                        machine_int_t ans = 1;
                        while (rhs_val > 0) {
                            if (rhs_val & 1) {
                                ans *= lhs_val;
                            }
                            lhs_val *= lhs_val;
                            rhs_val /= 2;
691
                        }
692
                        lhs_val = ans;
693
                    }
694
                    break;
695
696
697
698
                case RT_BINARY_OP_LESS: return MP_BOOL(lhs_val < rhs_val); break;
                case RT_BINARY_OP_MORE: return MP_BOOL(lhs_val > rhs_val); break;
                case RT_BINARY_OP_LESS_EQUAL: return MP_BOOL(lhs_val <= rhs_val); break;
                case RT_BINARY_OP_MORE_EQUAL: return MP_BOOL(lhs_val >= rhs_val); break;
699

700
701
                default: assert(0);
            }
702
703
            // TODO: We just should make mp_obj_new_int() inline and use that
            if (MP_OBJ_FITS_SMALL_INT(lhs_val)) {
704
                return MP_OBJ_NEW_SMALL_INT(lhs_val);
705
706
            } else {
                return mp_obj_new_int(lhs_val);
707
            }
708
#if MICROPY_ENABLE_FLOAT
709
        } else if (MP_OBJ_IS_TYPE(rhs, &mp_type_float)) {
710
            return mp_obj_float_binary_op(op, lhs_val, rhs);
711
        } else if (MP_OBJ_IS_TYPE(rhs, &mp_type_complex)) {
712
            return mp_obj_complex_binary_op(op, lhs_val, 0, rhs);
713
#endif
714
        }
715
    }
716

717
    /* deal with `in`
718
719
     *
     * NOTE `a in b` is `b.__contains__(a)`, hence why the generic dispatch
Damien George's avatar
Damien George committed
720
     * needs to go below with swapped arguments
721
     */
722
    if (op == RT_BINARY_OP_IN) {
723
724
725
        mp_obj_type_t *type = mp_obj_get_type(rhs);
        if (type->binary_op != NULL) {
            mp_obj_t res = type->binary_op(op, rhs, lhs);
Damien George's avatar
Damien George committed
726
            if (res != MP_OBJ_NULL) {
727
                return res;
John R. Lenton's avatar
John R. Lenton committed
728
            }
729
730
731
732
733
734
735
        }
        if (type->getiter != NULL) {
            /* second attempt, walk the iterator */
            mp_obj_t next = NULL;
            mp_obj_t iter = rt_getiter(rhs);
            while ((next = rt_iternext(iter)) != mp_const_stop_iteration) {
                if (mp_obj_equal(next, lhs)) {
736
                    return mp_const_true;
John R. Lenton's avatar
John R. Lenton committed
737
                }
738
            }
739
            return mp_const_false;
740
741
742
        }

        nlr_jump(mp_obj_new_exception_msg_varg(
743
                     &mp_type_TypeError, "'%s' object is not iterable",
744
745
746
747
                     mp_obj_get_type_str(rhs)));
        return mp_const_none;
    }

748
    // generic binary_op supplied by type
749
750
751
    mp_obj_type_t *type;
generic_binary_op:
    type = mp_obj_get_type(lhs);
752
753
754
755
    if (type->binary_op != NULL) {
        mp_obj_t result = type->binary_op(op, lhs, rhs);
        if (result != MP_OBJ_NULL) {
            return result;
Damien's avatar
Damien committed
756
757
        }
    }
758

759
760
    // TODO implement dispatch for reverse binary ops

John R. Lenton's avatar
John R. Lenton committed
761
    // TODO specify in error message what the operator is
762
    nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_TypeError,
John R. Lenton's avatar
John R. Lenton committed
763
764
        "unsupported operand types for binary operator: '%s', '%s'",
        mp_obj_get_type_str(lhs), mp_obj_get_type_str(rhs)));
765
    return mp_const_none;
Damien's avatar
Damien committed
766
767
}

768
mp_obj_t rt_make_function_from_id(int unique_code_id, mp_obj_t def_args) {
769
770
    DEBUG_OP_printf("make_function_from_id %d\n", unique_code_id);
    if (unique_code_id < 1 || unique_code_id >= next_unique_code_id) {
Damien's avatar
Damien committed
771
        // illegal code id
772
        return mp_const_none;
Damien's avatar
Damien committed
773
    }
774
775
776
777

    // make the function, depending on the code kind
    mp_code_t *c = &unique_codes[unique_code_id];
    mp_obj_t fun;
Damien's avatar
Damien committed
778
    switch (c->kind) {
779
        case MP_CODE_BYTE:
780
            fun = mp_obj_new_fun_bc(c->scope_flags, c->arg_names, c->n_args, def_args, c->n_state, c->u_byte.code);
781
            break;
782
        case MP_CODE_NATIVE:
783
            fun = rt_make_function_n(c->n_args, c->u_native.fun);
Damien's avatar
Damien committed
784
            break;
785
786
        case MP_CODE_INLINE_ASM:
            fun = mp_obj_new_fun_asm(c->n_args, c->u_inline_asm.fun);
Damien's avatar
Damien committed
787
788
789
            break;
        default:
            assert(0);
790
            fun = mp_const_none;
Damien's avatar
Damien committed
791
    }
792
793

    // check for generator functions and if so wrap in generator object
794
    if ((c->scope_flags & MP_SCOPE_FLAG_GENERATOR) != 0) {
795
        fun = mp_obj_new_gen_wrap(fun);
796
797
    }

798
    return fun;
Damien's avatar
Damien committed
799
800
}

801
mp_obj_t rt_make_closure_from_id(int unique_code_id, mp_obj_t closure_tuple) {
Damien George's avatar
Damien George committed
802
    DEBUG_OP_printf("make_closure_from_id %d\n", unique_code_id);
803
    // make function object
804
    mp_obj_t ffun = rt_make_function_from_id(unique_code_id, MP_OBJ_NULL);
Damien's avatar
Damien committed
805
    // wrap function in closure object
806
    return mp_obj_new_closure(ffun, closure_tuple);
Damien's avatar
Damien committed
807
808
}

809
mp_obj_t rt_call_function_0(mp_obj_t fun) {
810
    return rt_call_function_n_kw(fun, 0, 0, NULL);
811
812
}

813
mp_obj_t rt_call_function_1(mp_obj_t fun, mp_obj_t arg) {
814
    return rt_call_function_n_kw(fun, 1, 0, &arg);
815
816
}

817
818
mp_obj_t rt_call_function_2(mp_obj_t fun, mp_obj_t arg1, mp_obj_t arg2) {
    mp_obj_t args[2];
819
820
821
    args[0] = arg1;
    args[1] = arg2;
    return rt_call_function_n_kw(fun, 2, 0, args);
822
823
}

824
825
826
827
828
829
// wrapper that accepts n_args and n_kw in one argument
// native emitter can only pass at most 3 arguments to a function
mp_obj_t rt_call_function_n_kw_for_native(mp_obj_t fun_in, uint n_args_kw, const mp_obj_t *args) {
    return rt_call_function_n_kw(fun_in, n_args_kw & 0xff, (n_args_kw >> 8) & 0xff, args);
}

830
831
// args contains, eg: arg0  arg1  key0  value0  key1  value1
mp_obj_t rt_call_function_n_kw(mp_obj_t fun_in, uint n_args, uint n_kw, const mp_obj_t *args) {
832
833
    // TODO improve this: fun object can specify its type and we parse here the arguments,
    // passing to the function arrays of fixed and keyword arguments
834

835
836
    DEBUG_OP_printf("calling function %p(n_args=%d, n_kw=%d, args=%p)\n", fun_in, n_args, n_kw, args);

837
838
839
840
841
842
    // get the type
    mp_obj_type_t *type = mp_obj_get_type(fun_in);

    // do the call
    if (type->call != NULL) {
        return type->call(fun_in, n_args, n_kw, args);
843
    } else {
Damien George's avatar
Damien George committed
844
        nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_TypeError, "'%s' object is not callable", mp_obj_get_type_str(fun_in)));
845
    }
846
847
}

848
849
// args contains: fun  self/NULL  arg(0)  ...  arg(n_args-2)  arg(n_args-1)  kw_key(0)  kw_val(0)  ... kw_key(n_kw-1)  kw_val(n_kw-1)
// if n_args==0 and n_kw==0 then there are only fun and self/NULL
850
mp_obj_t rt_call_method_n_kw(uint n_args, uint n_kw, const mp_obj_t *args) {
851
852
853
    DEBUG_OP_printf("call method (fun=%p, self=%p, n_args=%u, n_kw=%u, args=%p)\n", args[0], args[1], n_args, n_kw, args);
    int adjust = (args[1] == NULL) ? 0 : 1;
    return rt_call_function_n_kw(args[0], n_args + adjust, n_kw, args + 2 - adjust);
854
855
}

856
mp_obj_t rt_build_tuple(int n_args, mp_obj_t *items) {
857
    return mp_obj_new_tuple(n_args, items);
858
859
}

860
mp_obj_t rt_build_list(int n_args, mp_obj_t *items) {
861
    return mp_obj_new_list(n_args, items);
Damien's avatar
Damien committed
862
863
}

864
865
mp_obj_t rt_build_set(int n_args, mp_obj_t *items) {
    return mp_obj_new_set(n_args, items);
Damien's avatar
Damien committed
866
867
}

868
mp_obj_t rt_store_set(mp_obj_t set, mp_obj_t item) {
869
    mp_obj_set_store(set, item);
Damien's avatar
Damien committed
870
871
872
    return set;
}

873
// unpacked items are stored in reverse order into the array pointed to by items
874
void rt_unpack_sequence(mp_obj_t seq_in, uint num, mp_obj_t *items) {
875
    uint seq_len;
876
877
878
879
880
881
    if (MP_OBJ_IS_TYPE(seq_in, &tuple_type) || MP_OBJ_IS_TYPE(seq_in, &list_type)) {
        mp_obj_t *seq_items;
        if (MP_OBJ_IS_TYPE(seq_in, &tuple_type)) {
            mp_obj_tuple_get(seq_in, &seq_len, &seq_items);
        } else {
            mp_obj_list_get(seq_in, &seq_len, &seq_items);
882
        }
883
        if (seq_len < num) {
884
            goto too_short;
885
        } else if (seq_len > num) {
886
            goto too_long;
887
        }
888
889
890
        for (uint i = 0; i < num; i++) {
            items[i] = seq_items[num - 1 - i];
        }
891
    } else {
892
893
894
895
896
897
898
899
900
901
902
903
        mp_obj_t iterable = rt_getiter(seq_in);

        for (seq_len = 0; seq_len < num; seq_len++) {
            mp_obj_t el = rt_iternext(iterable);
            if (el == mp_const_stop_iteration) {
                goto too_short;
            }
            items[num - 1 - seq_len] = el;
        }
        if (rt_iternext(iterable) != mp_const_stop_iteration) {
            goto too_long;
        }
904
    }
905
906
907
    return;

too_short:
908
    nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_ValueError, "need more than %d values to unpack", seq_len));
909
too_long:
910
    nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_ValueError, "too many values to unpack (expected %d)", num));
911
912
}

913
914
mp_obj_t rt_build_map(int n_args) {
    return mp_obj_new_dict(n_args);
Damien's avatar
Damien committed
915
916
}

917
918
919
mp_obj_t rt_store_map(mp_obj_t map, mp_obj_t key, mp_obj_t value) {
    // map should always be a dict
    return mp_obj_dict_store(map, key, value);
Damien's avatar
Damien committed
920
921
}

922
mp_obj_t rt_load_attr(mp_obj_t base, qstr attr) {
923
924
925
926
    DEBUG_OP_printf("load attr %p.%s\n", base, qstr_str(attr));
    // use load_method
    mp_obj_t dest[2];
    rt_load_method(base, attr, dest);
927
    if (dest[1] == MP_OBJ_NULL) {
928
        // load_method returned just a normal attribute
929
        return dest[0];
930
931
932
    } else {
        // load_method returned a method, so build a bound method object
        return mp_obj_new_bound_meth(dest[0], dest[1]);
Damien's avatar
Damien committed
933
934
935
    }
}

936
937
938
// no attribute found, returns:     dest[0] == MP_OBJ_NULL, dest[1] == MP_OBJ_NULL
// normal attribute found, returns: dest[0] == <attribute>, dest[1] == MP_OBJ_NULL
// method attribute found, returns: dest[0] == <method>,    dest[1] == <self>
939
STATIC void rt_load_method_maybe(mp_obj_t base, qstr attr, mp_obj_t *dest) {
940
941
942
943
944
945
946
947
948
949
950
951
952
    // clear output to indicate no attribute/method found yet
    dest[0] = MP_OBJ_NULL;
    dest[1] = MP_OBJ_NULL;

    // get the type
    mp_obj_type_t *type = mp_obj_get_type(base);

    // if this type can do its own load, then call it
    if (type->load_attr != NULL) {
        type->load_attr(base, attr, dest);
    }

    // if nothing found yet, look for built-in and generic names
953
    if (dest[0] == MP_OBJ_NULL) {
Damien George's avatar
Damien George committed
954
955
956
957
        if (attr == MP_QSTR___class__) {
            // a.__class__ is equivalent to type(a)
            dest[0] = type;
        } else if (attr == MP_QSTR___next__ && type->iternext != NULL) {
958
959
            dest[0] = (mp_obj_t)&mp_builtin_next_obj;
            dest[1] = base;
960
961
        } else if (type->load_attr == NULL) {
            // generic method lookup if type didn't provide a specific one
962
            // this is a lookup in the object (ie not class or type)
963
964
965
966
            const mp_method_t *meth = type->methods;
            if (meth != NULL) {
                for (; meth->name != NULL; meth++) {
                    if (strcmp(meth->name, qstr_str(attr)) == 0) {