emitnative.c 95.3 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
/*
 * This file is part of the Micro Python project, http://micropython.org/
 *
 * The MIT License (MIT)
 *
 * Copyright (c) 2013, 2014 Damien P. George
 *
 * Permission is hereby granted, free of charge, to any person obtaining a copy
 * of this software and associated documentation files (the "Software"), to deal
 * in the Software without restriction, including without limitation the rights
 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
 * copies of the Software, and to permit persons to whom the Software is
 * furnished to do so, subject to the following conditions:
 *
 * The above copyright notice and this permission notice shall be included in
 * all copies or substantial portions of the Software.
 *
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
 * THE SOFTWARE.
 */

27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
// Essentially normal Python has 1 type: Python objects
// Viper has more than 1 type, and is just a more complicated (a superset of) Python.
// If you declare everything in Viper as a Python object (ie omit type decls) then
// it should in principle be exactly the same as Python native.
// Having types means having more opcodes, like binary_op_nat_nat, binary_op_nat_obj etc.
// In practice we won't have a VM but rather do this in asm which is actually very minimal.

// Because it breaks strict Python equivalence it should be a completely separate
// decorator.  It breaks equivalence because overflow on integers wraps around.
// It shouldn't break equivalence if you don't use the new types, but since the
// type decls might be used in normal Python for other reasons, it's probably safest,
// cleanest and clearest to make it a separate decorator.

// Actually, it does break equivalence because integers default to native integers,
// not Python objects.

// for x in l[0:8]: can be compiled into a native loop if l has pointer type

#include <stdio.h>
#include <string.h>
#include <assert.h>

49
50
#include "py/nlr.h"
#include "py/emit.h"
51
#include "py/bc.h"
52

53
54
55
56
57
58
59
#if 0 // print debugging info
#define DEBUG_PRINT (1)
#define DEBUG_printf DEBUG_printf
#else // don't print debugging info
#define DEBUG_printf(...) (void)0
#endif

60
// wrapper around everything in this file
61
62
63
#if (MICROPY_EMIT_X64 && N_X64) \
    || (MICROPY_EMIT_X86 && N_X86) \
    || (MICROPY_EMIT_THUMB && N_THUMB) \
64
65
    || (MICROPY_EMIT_ARM && N_ARM) \
    || (MICROPY_EMIT_XTENSA && N_XTENSA) \
66

67
68
69
// this is defined so that the assembler exports generic assembler API macros
#define GENERIC_ASM_API (1)

70
#if N_X64
71
72

// x64 specific stuff
73
#include "py/asmx64.h"
74
75
#define EXPORT_FUN(name) emit_native_x64_##name

76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
#elif N_X86

// x86 specific stuff

STATIC byte mp_f_n_args[MP_F_NUMBER_OF] = {
    [MP_F_CONVERT_OBJ_TO_NATIVE] = 2,
    [MP_F_CONVERT_NATIVE_TO_OBJ] = 2,
    [MP_F_LOAD_NAME] = 1,
    [MP_F_LOAD_GLOBAL] = 1,
    [MP_F_LOAD_BUILD_CLASS] = 0,
    [MP_F_LOAD_ATTR] = 2,
    [MP_F_LOAD_METHOD] = 3,
    [MP_F_STORE_NAME] = 2,
    [MP_F_STORE_GLOBAL] = 2,
    [MP_F_STORE_ATTR] = 3,
    [MP_F_OBJ_SUBSCR] = 3,
    [MP_F_OBJ_IS_TRUE] = 1,
    [MP_F_UNARY_OP] = 2,
    [MP_F_BINARY_OP] = 3,
    [MP_F_BUILD_TUPLE] = 2,
    [MP_F_BUILD_LIST] = 2,
    [MP_F_LIST_APPEND] = 2,
    [MP_F_BUILD_MAP] = 1,
    [MP_F_STORE_MAP] = 3,
#if MICROPY_PY_BUILTINS_SET
    [MP_F_BUILD_SET] = 2,
    [MP_F_STORE_SET] = 2,
#endif
    [MP_F_MAKE_FUNCTION_FROM_RAW_CODE] = 3,
    [MP_F_NATIVE_CALL_FUNCTION_N_KW] = 3,
    [MP_F_CALL_METHOD_N_KW] = 3,
107
    [MP_F_CALL_METHOD_N_KW_VAR] = 3,
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
    [MP_F_GETITER] = 1,
    [MP_F_ITERNEXT] = 1,
    [MP_F_NLR_PUSH] = 1,
    [MP_F_NLR_POP] = 0,
    [MP_F_NATIVE_RAISE] = 1,
    [MP_F_IMPORT_NAME] = 3,
    [MP_F_IMPORT_FROM] = 2,
    [MP_F_IMPORT_ALL] = 1,
#if MICROPY_PY_BUILTINS_SLICE
    [MP_F_NEW_SLICE] = 3,
#endif
    [MP_F_UNPACK_SEQUENCE] = 3,
    [MP_F_UNPACK_EX] = 3,
    [MP_F_DELETE_NAME] = 1,
    [MP_F_DELETE_GLOBAL] = 1,
123
124
    [MP_F_NEW_CELL] = 1,
    [MP_F_MAKE_CLOSURE_FROM_RAW_CODE] = 3,
125
    [MP_F_SETUP_CODE_STATE] = 5,
126
127
};

128
#include "py/asmx86.h"
129
130
#define EXPORT_FUN(name) emit_native_x86_##name

131
#elif N_THUMB
132
133

// thumb specific stuff
134
#include "py/asmthumb.h"
135
136
#define EXPORT_FUN(name) emit_native_thumb_##name

Fabian Vogt's avatar
Fabian Vogt committed
137
138
139
#elif N_ARM

// ARM specific stuff
140
#include "py/asmarm.h"
Fabian Vogt's avatar
Fabian Vogt committed
141
142
#define EXPORT_FUN(name) emit_native_arm_##name

143
144
145
146
147
148
#elif N_XTENSA

// Xtensa specific stuff
#include "py/asmxtensa.h"
#define EXPORT_FUN(name) emit_native_xtensa_##name

149
150
151
#else

#error unknown native emitter
Fabian Vogt's avatar
Fabian Vogt committed
152

153
154
#endif

155
156
157
158
#define EMIT_NATIVE_VIPER_TYPE_ERROR(emit, ...) do { \
        *emit->error_slot = mp_obj_new_exception_msg_varg(&mp_type_ViperTypeError, __VA_ARGS__); \
    } while (0)

159
typedef enum {
160
161
162
163
    STACK_VALUE,
    STACK_REG,
    STACK_IMM,
} stack_info_kind_t;
164

165
// these enums must be distinct and the bottom 4 bits
166
// must correspond to the correct MP_NATIVE_TYPE_xxx value
167
typedef enum {
168
169
170
171
    VTYPE_PYOBJ = 0x00 | MP_NATIVE_TYPE_OBJ,
    VTYPE_BOOL = 0x00 | MP_NATIVE_TYPE_BOOL,
    VTYPE_INT = 0x00 | MP_NATIVE_TYPE_INT,
    VTYPE_UINT = 0x00 | MP_NATIVE_TYPE_UINT,
172
173
174
175
    VTYPE_PTR = 0x00 | MP_NATIVE_TYPE_PTR,
    VTYPE_PTR8 = 0x00 | MP_NATIVE_TYPE_PTR8,
    VTYPE_PTR16 = 0x00 | MP_NATIVE_TYPE_PTR16,
    VTYPE_PTR32 = 0x00 | MP_NATIVE_TYPE_PTR32,
176

177
    VTYPE_PTR_NONE = 0x50 | MP_NATIVE_TYPE_PTR,
178

179
180
    VTYPE_UNBOUND = 0x60 | MP_NATIVE_TYPE_OBJ,
    VTYPE_BUILTIN_CAST = 0x70 | MP_NATIVE_TYPE_OBJ,
181
182
} vtype_kind_t;

183
184
185
186
187
188
189
190
191
STATIC qstr vtype_to_qstr(vtype_kind_t vtype) {
    switch (vtype) {
        case VTYPE_PYOBJ: return MP_QSTR_object;
        case VTYPE_BOOL: return MP_QSTR_bool;
        case VTYPE_INT: return MP_QSTR_int;
        case VTYPE_UINT: return MP_QSTR_uint;
        case VTYPE_PTR: return MP_QSTR_ptr;
        case VTYPE_PTR8: return MP_QSTR_ptr8;
        case VTYPE_PTR16: return MP_QSTR_ptr16;
192
        case VTYPE_PTR32: return MP_QSTR_ptr32;
193
194
195
196
        case VTYPE_PTR_NONE: default: return MP_QSTR_None;
    }
}

197
198
199
200
201
typedef struct _stack_info_t {
    vtype_kind_t vtype;
    stack_info_kind_t kind;
    union {
        int u_reg;
202
        mp_int_t u_imm;
203
    } data;
204
205
} stack_info_t;

206
struct _emit_t {
207
    mp_obj_t *error_slot;
208
209
210
    int pass;

    bool do_viper_types;
211

212
213
    vtype_kind_t return_vtype;

214
    mp_uint_t local_vtype_alloc;
215
    vtype_kind_t *local_vtype;
216

217
    mp_uint_t stack_info_alloc;
218
    stack_info_t *stack_info;
219
    vtype_kind_t saved_stack_vtype;
220

221
    int prelude_offset;
222
    int const_table_offset;
223
    int n_state;
224
225
226
227
228
229
230
    int stack_start;
    int stack_size;

    bool last_emit_was_return_value;

    scope_t *scope;

231
    ASM_T *as;
232
233
};

234
emit_t *EXPORT_FUN(new)(mp_obj_t *error_slot, mp_uint_t max_num_labels) {
235
    emit_t *emit = m_new0(emit_t, 1);
236
    emit->error_slot = error_slot;
237
238
    emit->as = m_new0(ASM_T, 1);
    mp_asm_base_init(&emit->as->base, max_num_labels);
239
240
241
    return emit;
}

242
void EXPORT_FUN(free)(emit_t *emit) {
243
244
    mp_asm_base_deinit(&emit->as->base, false);
    m_del_obj(ASM_T, emit->as);
245
246
    m_del(vtype_kind_t, emit->local_vtype, emit->local_vtype_alloc);
    m_del(stack_info_t, emit->stack_info, emit->stack_info_alloc);
247
248
249
    m_del_obj(emit_t, emit);
}

250
251
252
253
254
255
256
257
258
259
260
261
262
STATIC void emit_native_set_native_type(emit_t *emit, mp_uint_t op, mp_uint_t arg1, qstr arg2) {
    switch (op) {
        case MP_EMIT_NATIVE_TYPE_ENABLE:
            emit->do_viper_types = arg1;
            break;

        default: {
            vtype_kind_t type;
            switch (arg2) {
                case MP_QSTR_object: type = VTYPE_PYOBJ; break;
                case MP_QSTR_bool: type = VTYPE_BOOL; break;
                case MP_QSTR_int: type = VTYPE_INT; break;
                case MP_QSTR_uint: type = VTYPE_UINT; break;
263
264
265
                case MP_QSTR_ptr: type = VTYPE_PTR; break;
                case MP_QSTR_ptr8: type = VTYPE_PTR8; break;
                case MP_QSTR_ptr16: type = VTYPE_PTR16; break;
266
                case MP_QSTR_ptr32: type = VTYPE_PTR32; break;
267
                default: EMIT_NATIVE_VIPER_TYPE_ERROR(emit, "unknown type '%q'", arg2); return;
268
269
270
271
272
273
274
275
276
277
            }
            if (op == MP_EMIT_NATIVE_TYPE_RETURN) {
                emit->return_vtype = type;
            } else {
                assert(arg1 < emit->local_vtype_alloc);
                emit->local_vtype[arg1] = type;
            }
            break;
        }
    }
278
279
}

280
STATIC void emit_pre_pop_reg(emit_t *emit, vtype_kind_t *vtype, int reg_dest);
281
STATIC void emit_post_push_reg(emit_t *emit, vtype_kind_t vtype, int reg);
282
STATIC void emit_native_load_fast(emit_t *emit, qstr qst, mp_uint_t local_num);
283
STATIC void emit_native_store_fast(emit_t *emit, qstr qst, mp_uint_t local_num);
284

285
#define STATE_START (sizeof(mp_code_state_t) / sizeof(mp_uint_t))
286

287
STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scope) {
288
289
    DEBUG_printf("start_pass(pass=%u, scope=%p)\n", pass, scope);

290
291
292
293
294
295
    emit->pass = pass;
    emit->stack_start = 0;
    emit->stack_size = 0;
    emit->last_emit_was_return_value = false;
    emit->scope = scope;

296
297
298
299
    // allocate memory for keeping track of the types of locals
    if (emit->local_vtype_alloc < scope->num_locals) {
        emit->local_vtype = m_renew(vtype_kind_t, emit->local_vtype, emit->local_vtype_alloc, scope->num_locals);
        emit->local_vtype_alloc = scope->num_locals;
300
    }
301
302
303

    // allocate memory for keeping track of the objects on the stack
    // XXX don't know stack size on entry, and it should be maximum over all scopes
304
    // XXX this is such a big hack and really needs to be fixed
305
    if (emit->stack_info == NULL) {
306
        emit->stack_info_alloc = scope->stack_size + 200;
307
        emit->stack_info = m_new(stack_info_t, emit->stack_info_alloc);
308
309
    }

310
    // set default type for return
311
    emit->return_vtype = VTYPE_PYOBJ;
312
313
314
315
316
317
318
319
320
321

    // set default type for arguments
    mp_uint_t num_args = emit->scope->num_pos_args + emit->scope->num_kwonly_args;
    if (scope->scope_flags & MP_SCOPE_FLAG_VARARGS) {
        num_args += 1;
    }
    if (scope->scope_flags & MP_SCOPE_FLAG_VARKEYWORDS) {
        num_args += 1;
    }
    for (mp_uint_t i = 0; i < num_args; i++) {
322
323
        emit->local_vtype[i] = VTYPE_PYOBJ;
    }
324
325

    // local variables begin unbound, and have unknown type
326
    for (mp_uint_t i = num_args; i < emit->local_vtype_alloc; i++) {
327
328
329
330
331
        emit->local_vtype[i] = VTYPE_UNBOUND;
    }

    // values on stack begin unbound
    for (mp_uint_t i = 0; i < emit->stack_info_alloc; i++) {
332
        emit->stack_info[i].kind = STACK_VALUE;
333
        emit->stack_info[i].vtype = VTYPE_UNBOUND;
334
335
    }

336
    mp_asm_base_start_pass(&emit->as->base, pass == MP_PASS_EMIT ? MP_ASM_PASS_EMIT : MP_ASM_PASS_COMPUTE);
337

338
    // generate code for entry to function
339

340
341
    if (emit->do_viper_types) {

342
343
        // right now we have a restriction of maximum of 4 arguments
        if (scope->num_pos_args >= 5) {
344
            EMIT_NATIVE_VIPER_TYPE_ERROR(emit, "Viper functions don't currently support more than 4 arguments");
345
346
347
            return;
        }

348
349
350
351
352
353
354
355
356
        // entry to function
        int num_locals = 0;
        if (pass > MP_PASS_SCOPE) {
            num_locals = scope->num_locals - REG_LOCAL_NUM;
            if (num_locals < 0) {
                num_locals = 0;
            }
            emit->stack_start = num_locals;
            num_locals += scope->stack_size;
357
        }
358
359
        ASM_ENTRY(emit->as, num_locals);

360
361
362
363
364
365
366
        // TODO don't load r7 if we don't need it
        #if N_THUMB
        asm_thumb_mov_reg_i32(emit->as, ASM_THUMB_REG_R7, (mp_uint_t)mp_fun_table);
        #elif N_ARM
        asm_arm_mov_reg_i32(emit->as, ASM_ARM_REG_R7, (mp_uint_t)mp_fun_table);
        #endif

367
368
369
370
371
372
373
374
375
376
377
378
        #if N_X86
        for (int i = 0; i < scope->num_pos_args; i++) {
            if (i == 0) {
                asm_x86_mov_arg_to_r32(emit->as, i, REG_LOCAL_1);
            } else if (i == 1) {
                asm_x86_mov_arg_to_r32(emit->as, i, REG_LOCAL_2);
            } else if (i == 2) {
                asm_x86_mov_arg_to_r32(emit->as, i, REG_LOCAL_3);
            } else {
                asm_x86_mov_arg_to_r32(emit->as, i, REG_TEMP0);
                asm_x86_mov_r32_to_local(emit->as, REG_TEMP0, i - REG_LOCAL_NUM);
            }
379
        }
380
381
382
383
384
385
386
387
388
389
390
391
        #else
        for (int i = 0; i < scope->num_pos_args; i++) {
            if (i == 0) {
                ASM_MOV_REG_REG(emit->as, REG_LOCAL_1, REG_ARG_1);
            } else if (i == 1) {
                ASM_MOV_REG_REG(emit->as, REG_LOCAL_2, REG_ARG_2);
            } else if (i == 2) {
                ASM_MOV_REG_REG(emit->as, REG_LOCAL_3, REG_ARG_3);
            } else if (i == 3) {
                ASM_MOV_REG_TO_LOCAL(emit->as, REG_ARG_4, i - REG_LOCAL_NUM);
            } else {
                // TODO not implemented
392
                mp_not_implemented("more than 4 viper args");
393
394
395
396
397
398
399
400
401
402
403
            }
        }
        #endif

    } else {
        // work out size of state (locals plus stack)
        emit->n_state = scope->num_locals + scope->stack_size;

        // allocate space on C-stack for code_state structure, which includes state
        ASM_ENTRY(emit->as, STATE_START + emit->n_state);

404
405
406
407
408
409
410
        // TODO don't load r7 if we don't need it
        #if N_THUMB
        asm_thumb_mov_reg_i32(emit->as, ASM_THUMB_REG_R7, (mp_uint_t)mp_fun_table);
        #elif N_ARM
        asm_arm_mov_reg_i32(emit->as, ASM_ARM_REG_R7, (mp_uint_t)mp_fun_table);
        #endif

411
412
413
414
415
416
417
418
419
        // prepare incoming arguments for call to mp_setup_code_state
        #if N_X86
        asm_x86_mov_arg_to_r32(emit->as, 0, REG_ARG_2);
        asm_x86_mov_arg_to_r32(emit->as, 1, REG_ARG_3);
        asm_x86_mov_arg_to_r32(emit->as, 2, REG_ARG_4);
        asm_x86_mov_arg_to_r32(emit->as, 3, REG_ARG_5);
        #else
        #if N_THUMB
        ASM_MOV_REG_REG(emit->as, ASM_THUMB_REG_R4, REG_ARG_4);
420
421
        #elif N_ARM
        ASM_MOV_REG_REG(emit->as, ASM_ARM_REG_R4, REG_ARG_4);
422
423
424
425
426
427
428
429
430
431
        #else
        ASM_MOV_REG_REG(emit->as, REG_ARG_5, REG_ARG_4);
        #endif
        ASM_MOV_REG_REG(emit->as, REG_ARG_4, REG_ARG_3);
        ASM_MOV_REG_REG(emit->as, REG_ARG_3, REG_ARG_2);
        ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_ARG_1);
        #endif

        // set code_state.ip (offset from start of this function to prelude info)
        // XXX this encoding may change size
432
        ASM_MOV_IMM_TO_LOCAL_USING(emit->as, emit->prelude_offset, offsetof(mp_code_state_t, ip) / sizeof(mp_uint_t), REG_ARG_1);
433
434

        // set code_state.n_state
435
        ASM_MOV_IMM_TO_LOCAL_USING(emit->as, emit->n_state, offsetof(mp_code_state_t, n_state) / sizeof(mp_uint_t), REG_ARG_1);
436
437
438
439
440
441
442
443
444

        // put address of code_state into first arg
        ASM_MOV_LOCAL_ADDR_TO_REG(emit->as, 0, REG_ARG_1);

        // call mp_setup_code_state to prepare code_state structure
        #if N_THUMB
        asm_thumb_op16(emit->as, 0xb400 | (1 << ASM_THUMB_REG_R4)); // push 5th arg
        asm_thumb_bl_ind(emit->as, mp_fun_table[MP_F_SETUP_CODE_STATE], MP_F_SETUP_CODE_STATE, ASM_THUMB_REG_R4);
        asm_thumb_op16(emit->as, 0xbc00 | (1 << REG_RET)); // pop dummy (was 5th arg)
445
446
447
448
        #elif N_ARM
        asm_arm_push(emit->as, 1 << ASM_ARM_REG_R4); // push 5th arg
        asm_arm_bl_ind(emit->as, mp_fun_table[MP_F_SETUP_CODE_STATE], MP_F_SETUP_CODE_STATE, ASM_ARM_REG_R4);
        asm_arm_pop(emit->as, 1 << REG_RET); // pop dummy (was 5th arg)
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
        #else
        ASM_CALL_IND(emit->as, mp_fun_table[MP_F_SETUP_CODE_STATE], MP_F_SETUP_CODE_STATE);
        #endif

        // cache some locals in registers
        if (scope->num_locals > 0) {
            ASM_MOV_LOCAL_TO_REG(emit->as, STATE_START + emit->n_state - 1 - 0, REG_LOCAL_1);
            if (scope->num_locals > 1) {
                ASM_MOV_LOCAL_TO_REG(emit->as, STATE_START + emit->n_state - 1 - 1, REG_LOCAL_2);
                if (scope->num_locals > 2) {
                    ASM_MOV_LOCAL_TO_REG(emit->as, STATE_START + emit->n_state - 1 - 2, REG_LOCAL_3);
                }
            }
        }

        // set the type of closed over variables
        for (mp_uint_t i = 0; i < scope->id_info_len; i++) {
            id_info_t *id = &scope->id_info[i];
            if (id->kind == ID_INFO_KIND_CELL) {
                emit->local_vtype[id->local_num] = VTYPE_PYOBJ;
            }
470
471
472
473
474
        }
    }

}

475
STATIC void emit_native_end_pass(emit_t *emit) {
Fabian Vogt's avatar
Fabian Vogt committed
476
    if (!emit->last_emit_was_return_value) {
477
        ASM_EXIT(emit->as);
Fabian Vogt's avatar
Fabian Vogt committed
478
    }
479
480

    if (!emit->do_viper_types) {
481
482
483
484
485
        emit->prelude_offset = mp_asm_base_get_code_pos(&emit->as->base);
        mp_asm_base_data(&emit->as->base, 1, emit->scope->scope_flags);
        mp_asm_base_data(&emit->as->base, 1, emit->scope->num_pos_args);
        mp_asm_base_data(&emit->as->base, 1, emit->scope->num_kwonly_args);
        mp_asm_base_data(&emit->as->base, 1, emit->scope->num_def_pos_args);
486

487
488
        // write code info
        #if MICROPY_PERSISTENT_CODE
489
490
491
492
493
        mp_asm_base_data(&emit->as->base, 1, 5);
        mp_asm_base_data(&emit->as->base, 1, emit->scope->simple_name);
        mp_asm_base_data(&emit->as->base, 1, emit->scope->simple_name >> 8);
        mp_asm_base_data(&emit->as->base, 1, emit->scope->source_file);
        mp_asm_base_data(&emit->as->base, 1, emit->scope->source_file >> 8);
494
        #else
495
        mp_asm_base_data(&emit->as->base, 1, 1);
496
        #endif
497
498
499
500
501
502

        // bytecode prelude: initialise closed over variables
        for (int i = 0; i < emit->scope->id_info_len; i++) {
            id_info_t *id = &emit->scope->id_info[i];
            if (id->kind == ID_INFO_KIND_CELL) {
                assert(id->local_num < 255);
503
                mp_asm_base_data(&emit->as->base, 1, id->local_num); // write the local which should be converted to a cell
504
505
            }
        }
506
        mp_asm_base_data(&emit->as->base, 1, 255); // end of list sentinel
507

508
509
        mp_asm_base_align(&emit->as->base, ASM_WORD_SIZE);
        emit->const_table_offset = mp_asm_base_get_code_pos(&emit->as->base);
510
511

        // write argument names as qstr objects
512
        // see comment in corresponding part of emitbc.c about the logic here
513
        for (int i = 0; i < emit->scope->num_pos_args + emit->scope->num_kwonly_args; i++) {
514
515
516
517
518
519
520
521
            qstr qst = MP_QSTR__star_;
            for (int j = 0; j < emit->scope->id_info_len; ++j) {
                id_info_t *id = &emit->scope->id_info[j];
                if ((id->flags & ID_FLAG_IS_PARAM) && id->local_num == i) {
                    qst = id->qst;
                    break;
                }
            }
522
            mp_asm_base_data(&emit->as->base, ASM_WORD_SIZE, (mp_uint_t)MP_OBJ_NEW_QSTR(qst));
523
524
525
526
        }

    }

527
    ASM_END_PASS(emit->as);
528
529
530

    // check stack is back to zero size
    if (emit->stack_size != 0) {
531
        mp_printf(&mp_plat_print, "ERROR: stack size not back to zero; got %d\n", emit->stack_size);
532
533
    }

534
    if (emit->pass == MP_PASS_EMIT) {
535
536
        void *f = mp_asm_base_get_code(&emit->as->base);
        mp_uint_t f_len = mp_asm_base_get_code_size(&emit->as->base);
537
538

        // compute type signature
539
540
        // note that the lower 4 bits of a vtype are tho correct MP_NATIVE_TYPE_xxx
        mp_uint_t type_sig = emit->return_vtype & 0xf;
541
        for (mp_uint_t i = 0; i < emit->scope->num_pos_args; i++) {
542
            type_sig |= (emit->local_vtype[i] & 0xf) << (i * 4 + 4);
543
544
        }

545
546
        mp_emit_glue_assign_native(emit->scope->raw_code,
            emit->do_viper_types ? MP_CODE_NATIVE_VIPER : MP_CODE_NATIVE_PY,
547
548
            f, f_len, (mp_uint_t*)((byte*)f + emit->const_table_offset),
            emit->scope->num_pos_args, emit->scope->scope_flags, type_sig);
549
550
551
    }
}

552
STATIC bool emit_native_last_emit_was_return_value(emit_t *emit) {
553
554
555
    return emit->last_emit_was_return_value;
}

556
557
STATIC void adjust_stack(emit_t *emit, mp_int_t stack_size_delta) {
    assert((mp_int_t)emit->stack_size + stack_size_delta >= 0);
558
    emit->stack_size += stack_size_delta;
559
    if (emit->pass > MP_PASS_SCOPE && emit->stack_size > emit->scope->stack_size) {
560
561
        emit->scope->stack_size = emit->stack_size;
    }
562
563
564
565
#ifdef DEBUG_PRINT
    DEBUG_printf("  adjust_stack; stack_size=%d+%d; stack now:", emit->stack_size - stack_size_delta, stack_size_delta);
    for (int i = 0; i < emit->stack_size; i++) {
        stack_info_t *si = &emit->stack_info[i];
566
        DEBUG_printf(" (v=%d k=%d %d)", si->vtype, si->kind, si->data.u_reg);
567
568
569
    }
    DEBUG_printf("\n");
#endif
570
571
}

572
STATIC void emit_native_adjust_stack_size(emit_t *emit, mp_int_t delta) {
573
    DEBUG_printf("adjust_stack_size(" INT_FMT ")\n", delta);
574
575
576
577
578
579
580
581
    // If we are adjusting the stack in a positive direction (pushing) then we
    // need to fill in values for the stack kind and vtype of the newly-pushed
    // entries.  These should be set to "value" (ie not reg or imm) because we
    // should only need to adjust the stack due to a jump to this part in the
    // code (and hence we have settled the stack before the jump).
    for (mp_int_t i = 0; i < delta; i++) {
        stack_info_t *si = &emit->stack_info[emit->stack_size + i];
        si->kind = STACK_VALUE;
582
583
584
585
586
587
588
        // TODO we don't know the vtype to use here.  At the moment this is a
        // hack to get the case of multi comparison working.
        if (delta == 1) {
            si->vtype = emit->saved_stack_vtype;
        } else {
            si->vtype = VTYPE_PYOBJ;
        }
589
590
591
592
593
    }
    adjust_stack(emit, delta);
}

STATIC void emit_native_set_source_line(emit_t *emit, mp_uint_t source_line) {
594
595
    (void)emit;
    (void)source_line;
596
597
}

598
/*
599
STATIC void emit_pre_raw(emit_t *emit, int stack_size_delta) {
600
601
602
    adjust_stack(emit, stack_size_delta);
    emit->last_emit_was_return_value = false;
}
603
*/
604

605
// this must be called at start of emit functions
606
STATIC void emit_native_pre(emit_t *emit) {
607
608
609
610
611
612
613
614
615
616
617
618
    emit->last_emit_was_return_value = false;
    // settle the stack
    /*
    if (regs_needed != 0) {
        for (int i = 0; i < emit->stack_size; i++) {
            switch (emit->stack_info[i].kind) {
                case STACK_VALUE:
                    break;

                case STACK_REG:
                    // TODO only push reg if in regs_needed
                    emit->stack_info[i].kind = STACK_VALUE;
619
                    ASM_MOV_REG_TO_LOCAL(emit->as, emit->stack_info[i].data.u_reg, emit->stack_start + i);
620
621
622
623
624
625
626
627
628
629
                    break;

                case STACK_IMM:
                    // don't think we ever need to push imms for settling
                    //ASM_MOV_IMM_TO_LOCAL(emit->last_imm, emit->stack_start + i);
                    break;
            }
        }
    }
    */
630
631
}

632
633
634
635
636
637
638
639
// depth==0 is top, depth==1 is before top, etc
STATIC stack_info_t *peek_stack(emit_t *emit, mp_uint_t depth) {
    return &emit->stack_info[emit->stack_size - 1 - depth];
}

// depth==0 is top, depth==1 is before top, etc
STATIC vtype_kind_t peek_vtype(emit_t *emit, mp_uint_t depth) {
    return peek_stack(emit, depth)->vtype;
640
}
641

642
643
// pos=1 is TOS, pos=2 is next, etc
// use pos=0 for no skipping
644
STATIC void need_reg_single(emit_t *emit, int reg_needed, int skip_stack_pos) {
645
646
647
648
    skip_stack_pos = emit->stack_size - skip_stack_pos;
    for (int i = 0; i < emit->stack_size; i++) {
        if (i != skip_stack_pos) {
            stack_info_t *si = &emit->stack_info[i];
649
            if (si->kind == STACK_REG && si->data.u_reg == reg_needed) {
650
                si->kind = STACK_VALUE;
651
                ASM_MOV_REG_TO_LOCAL(emit->as, si->data.u_reg, emit->stack_start + i);
652
653
654
655
656
            }
        }
    }
}

657
STATIC void need_reg_all(emit_t *emit) {
658
659
    for (int i = 0; i < emit->stack_size; i++) {
        stack_info_t *si = &emit->stack_info[i];
660
        if (si->kind == STACK_REG) {
661
            si->kind = STACK_VALUE;
662
            ASM_MOV_REG_TO_LOCAL(emit->as, si->data.u_reg, emit->stack_start + i);
663
664
665
        }
    }
}
666

667
STATIC void need_stack_settled(emit_t *emit) {
668
    DEBUG_printf("  need_stack_settled; stack_size=%d\n", emit->stack_size);
669
670
671
    for (int i = 0; i < emit->stack_size; i++) {
        stack_info_t *si = &emit->stack_info[i];
        if (si->kind == STACK_REG) {
672
            DEBUG_printf("    reg(%u) to local(%u)\n", si->data.u_reg, emit->stack_start + i);
673
            si->kind = STACK_VALUE;
674
            ASM_MOV_REG_TO_LOCAL(emit->as, si->data.u_reg, emit->stack_start + i);
675
        }
676
    }
677
678
679
    for (int i = 0; i < emit->stack_size; i++) {
        stack_info_t *si = &emit->stack_info[i];
        if (si->kind == STACK_IMM) {
680
            DEBUG_printf("    imm(" INT_FMT ") to local(%u)\n", si->data.u_imm, emit->stack_start + i);
681
            si->kind = STACK_VALUE;
682
            ASM_MOV_IMM_TO_LOCAL_USING(emit->as, si->data.u_imm, emit->stack_start + i, REG_TEMP0);
683
684
        }
    }
685
686
}

687
// pos=1 is TOS, pos=2 is next, etc
688
STATIC void emit_access_stack(emit_t *emit, int pos, vtype_kind_t *vtype, int reg_dest) {
689
690
    need_reg_single(emit, reg_dest, pos);
    stack_info_t *si = &emit->stack_info[emit->stack_size - pos];
691
692
693
    *vtype = si->vtype;
    switch (si->kind) {
        case STACK_VALUE:
694
            ASM_MOV_LOCAL_TO_REG(emit->as, emit->stack_start + emit->stack_size - pos, reg_dest);
695
696
            break;

697
        case STACK_REG:
698
699
            if (si->data.u_reg != reg_dest) {
                ASM_MOV_REG_REG(emit->as, reg_dest, si->data.u_reg);
700
701
702
            }
            break;

703
        case STACK_IMM:
704
            ASM_MOV_IMM_TO_REG(emit->as, si->data.u_imm, reg_dest);
705
706
707
708
            break;
    }
}

709
710
711
712
713
714
715
716
717
// does an efficient X=pop(); discard(); push(X)
// needs a (non-temp) register in case the poped element was stored in the stack
STATIC void emit_fold_stack_top(emit_t *emit, int reg_dest) {
    stack_info_t *si = &emit->stack_info[emit->stack_size - 2];
    si[0] = si[1];
    if (si->kind == STACK_VALUE) {
        // if folded element was on the stack we need to put it in a register
        ASM_MOV_LOCAL_TO_REG(emit->as, emit->stack_start + emit->stack_size - 1, reg_dest);
        si->kind = STACK_REG;
718
        si->data.u_reg = reg_dest;
719
720
721
722
723
724
725
    }
    adjust_stack(emit, -1);
}

// If stacked value is in a register and the register is not r1 or r2, then
// *reg_dest is set to that register.  Otherwise the value is put in *reg_dest.
STATIC void emit_pre_pop_reg_flexible(emit_t *emit, vtype_kind_t *vtype, int *reg_dest, int not_r1, int not_r2) {
726
727
    emit->last_emit_was_return_value = false;
    stack_info_t *si = peek_stack(emit, 0);
728
    if (si->kind == STACK_REG && si->data.u_reg != not_r1 && si->data.u_reg != not_r2) {
729
        *vtype = si->vtype;
730
        *reg_dest = si->data.u_reg;
731
732
733
734
735
736
737
        need_reg_single(emit, *reg_dest, 1);
    } else {
        emit_access_stack(emit, 1, vtype, *reg_dest);
    }
    adjust_stack(emit, -1);
}

738
STATIC void emit_pre_pop_discard(emit_t *emit) {
739
740
741
742
    emit->last_emit_was_return_value = false;
    adjust_stack(emit, -1);
}

743
STATIC void emit_pre_pop_reg(emit_t *emit, vtype_kind_t *vtype, int reg_dest) {
744
745
746
747
748
    emit->last_emit_was_return_value = false;
    emit_access_stack(emit, 1, vtype, reg_dest);
    adjust_stack(emit, -1);
}

749
STATIC void emit_pre_pop_reg_reg(emit_t *emit, vtype_kind_t *vtypea, int rega, vtype_kind_t *vtypeb, int regb) {
750
    emit_pre_pop_reg(emit, vtypea, rega);
751
    emit_pre_pop_reg(emit, vtypeb, regb);
752
753
}

754
STATIC void emit_pre_pop_reg_reg_reg(emit_t *emit, vtype_kind_t *vtypea, int rega, vtype_kind_t *vtypeb, int regb, vtype_kind_t *vtypec, int regc) {
755
    emit_pre_pop_reg(emit, vtypea, rega);
756
757
    emit_pre_pop_reg(emit, vtypeb, regb);
    emit_pre_pop_reg(emit, vtypec, regc);
758
759
}

760
STATIC void emit_post(emit_t *emit) {
761
    (void)emit;
762
763
}

764
765
766
767
768
STATIC void emit_post_top_set_vtype(emit_t *emit, vtype_kind_t new_vtype) {
    stack_info_t *si = &emit->stack_info[emit->stack_size - 1];
    si->vtype = new_vtype;
}

769
STATIC void emit_post_push_reg(emit_t *emit, vtype_kind_t vtype, int reg) {
770
771
772
    stack_info_t *si = &emit->stack_info[emit->stack_size];
    si->vtype = vtype;
    si->kind = STACK_REG;
773
    si->data.u_reg = reg;
774
    adjust_stack(emit, 1);
775
776
}

777
STATIC void emit_post_push_imm(emit_t *emit, vtype_kind_t vtype, mp_int_t imm) {
778
779
780
    stack_info_t *si = &emit->stack_info[emit->stack_size];
    si->vtype = vtype;
    si->kind = STACK_IMM;
781
    si->data.u_imm = imm;
782
    adjust_stack(emit, 1);
783
784
}

785
STATIC void emit_post_push_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb) {
786
787
    emit_post_push_reg(emit, vtypea, rega);
    emit_post_push_reg(emit, vtypeb, regb);
788
789
}

790
STATIC void emit_post_push_reg_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb, vtype_kind_t vtypec, int regc) {
791
792
793
    emit_post_push_reg(emit, vtypea, rega);
    emit_post_push_reg(emit, vtypeb, regb);
    emit_post_push_reg(emit, vtypec, regc);
794
795
}

796
STATIC void emit_post_push_reg_reg_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb, vtype_kind_t vtypec, int regc, vtype_kind_t vtyped, int regd) {
797
798
799
800
    emit_post_push_reg(emit, vtypea, rega);
    emit_post_push_reg(emit, vtypeb, regb);
    emit_post_push_reg(emit, vtypec, regc);
    emit_post_push_reg(emit, vtyped, regd);
801
802
}

803
STATIC void emit_call(emit_t *emit, mp_fun_kind_t fun_kind) {
804
    need_reg_all(emit);
805
    ASM_CALL_IND(emit->as, mp_fun_table[fun_kind], fun_kind);
806
807
}

808
STATIC void emit_call_with_imm_arg(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val, int arg_reg) {
809
    need_reg_all(emit);
810
811
    ASM_MOV_IMM_TO_REG(emit->as, arg_val, arg_reg);
    ASM_CALL_IND(emit->as, mp_fun_table[fun_kind], fun_kind);
812
813
}

814
// the first arg is stored in the code aligned on a mp_uint_t boundary
815
STATIC void emit_call_with_imm_arg_aligned(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val, int arg_reg) {
816
    need_reg_all(emit);
817
818
    ASM_MOV_ALIGNED_IMM_TO_REG(emit->as, arg_val, arg_reg);
    ASM_CALL_IND(emit->as, mp_fun_table[fun_kind], fun_kind);
819
820
}

821
STATIC void emit_call_with_2_imm_args(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val1, int arg_reg1, mp_int_t arg_val2, int arg_reg2) {
822
    need_reg_all(emit);
823
824
825
    ASM_MOV_IMM_TO_REG(emit->as, arg_val1, arg_reg1);
    ASM_MOV_IMM_TO_REG(emit->as, arg_val2, arg_reg2);
    ASM_CALL_IND(emit->as, mp_fun_table[fun_kind], fun_kind);
826
827
}

828
// the first arg is stored in the code aligned on a mp_uint_t boundary
829
STATIC void emit_call_with_3_imm_args_and_first_aligned(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val1, int arg_reg1, mp_int_t arg_val2, int arg_reg2, mp_int_t arg_val3, int arg_reg3) {
830
    need_reg_all(emit);
831
832
833
834
    ASM_MOV_ALIGNED_IMM_TO_REG(emit->as, arg_val1, arg_reg1);
    ASM_MOV_IMM_TO_REG(emit->as, arg_val2, arg_reg2);
    ASM_MOV_IMM_TO_REG(emit->as, arg_val3, arg_reg3);
    ASM_CALL_IND(emit->as, mp_fun_table[fun_kind], fun_kind);
835
836
}

837
838
839
840
841
842
// vtype of all n_pop objects is VTYPE_PYOBJ
// Will convert any items that are not VTYPE_PYOBJ to this type and put them back on the stack.
// If any conversions of non-immediate values are needed, then it uses REG_ARG_1, REG_ARG_2 and REG_RET.
// Otherwise, it does not use any temporary registers (but may use reg_dest before loading it with stack pointer).
STATIC void emit_get_stack_pointer_to_reg_for_pop(emit_t *emit, mp_uint_t reg_dest, mp_uint_t n_pop) {
    need_reg_all(emit);
843

844
845
846
847
848
849
850
851
852
    // First, store any immediate values to their respective place on the stack.
    for (mp_uint_t i = 0; i < n_pop; i++) {
        stack_info_t *si = &emit->stack_info[emit->stack_size - 1 - i];
        // must push any imm's to stack
        // must convert them to VTYPE_PYOBJ for viper code
        if (si->kind == STACK_IMM) {
            si->kind = STACK_VALUE;
            switch (si->vtype) {
                case VTYPE_PYOBJ:
853
                    ASM_MOV_IMM_TO_LOCAL_USING(emit->as, si->data.u_imm, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
854
855
                    break;
                case VTYPE_BOOL:
856
                    if (si->data.u_imm == 0) {
857
                        ASM_MOV_IMM_TO_LOCAL_USING(emit->as, (mp_uint_t)mp_const_false, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
858
                    } else {
859
                        ASM_MOV_IMM_TO_LOCAL_USING(emit->as, (mp_uint_t)mp_const_true, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
860
861
862
863
864
                    }
                    si->vtype = VTYPE_PYOBJ;
                    break;
                case VTYPE_INT:
                case VTYPE_UINT:
865
                    ASM_MOV_IMM_TO_LOCAL_USING(emit->as, (uintptr_t)MP_OBJ_NEW_SMALL_INT(si->data.u_imm), emit->stack_start + emit->stack_size - 1 - i, reg_dest);
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
                    si->vtype = VTYPE_PYOBJ;
                    break;
                default:
                    // not handled
                    assert(0);
            }
        }

        // verify that this value is on the stack
        assert(si->kind == STACK_VALUE);
    }

    // Second, convert any non-VTYPE_PYOBJ to that type.
    for (mp_uint_t i = 0; i < n_pop; i++) {
        stack_info_t *si = &emit->stack_info[emit->stack_size - 1 - i];
        if (si->vtype != VTYPE_PYOBJ) {
            mp_uint_t local_num = emit->stack_start + emit->stack_size - 1 - i;
883
            ASM_MOV_LOCAL_TO_REG(emit->as, local_num, REG_ARG_1);
884
            emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, si->vtype, REG_ARG_2); // arg2 = type
885
            ASM_MOV_REG_TO_LOCAL(emit->as, REG_RET, local_num);
886
            si->vtype = VTYPE_PYOBJ;
887
            DEBUG_printf("  convert_native_to_obj(local_num=" UINT_FMT ")\n", local_num);
888
889
890
891
892
        }
    }

    // Adujust the stack for a pop of n_pop items, and load the stack pointer into reg_dest.
    adjust_stack(emit, -n_pop);
893
    ASM_MOV_LOCAL_ADDR_TO_REG(emit->as, emit->stack_start + emit->stack_size, reg_dest);
894
895
896
897
898
899
900
901
}

// vtype of all n_push objects is VTYPE_PYOBJ
STATIC void emit_get_stack_pointer_to_reg_for_push(emit_t *emit, mp_uint_t reg_dest, mp_uint_t n_push) {
    need_reg_all(emit);
    for (mp_uint_t i = 0; i < n_push; i++) {
        emit->stack_info[emit->stack_size + i].kind = STACK_VALUE;
        emit->stack_info[emit->stack_size + i].vtype = VTYPE_PYOBJ;
902
    }
903
    ASM_MOV_LOCAL_ADDR_TO_REG(emit->as, emit->stack_start + emit->stack_size, reg_dest);
904
905
906
    adjust_stack(emit, n_push);
}

907
STATIC void emit_native_label_assign(emit_t *emit, mp_uint_t l) {
908
    DEBUG_printf("label_assign(" UINT_FMT ")\n", l);
909
    emit_native_pre(emit);
910
911
    // need to commit stack because we can jump here from elsewhere
    need_stack_settled(emit);
912
    mp_asm_base_label_assign(&emit->as->base, l);
913
    emit_post(emit);
914
915
}

916
917
STATIC void emit_native_import_name(emit_t *emit, qstr qst) {
    DEBUG_printf("import_name %s\n", qstr_str(qst));
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946

    // get arguments from stack: arg2 = fromlist, arg3 = level
    // if using viper types these arguments must be converted to proper objects
    if (emit->do_viper_types) {
        // fromlist should be None or a tuple
        stack_info_t *top = peek_stack(emit, 0);
        if (top->vtype == VTYPE_PTR_NONE) {
            emit_pre_pop_discard(emit);
            ASM_MOV_IMM_TO_REG(emit->as, (mp_uint_t)mp_const_none, REG_ARG_2);
        } else {
            vtype_kind_t vtype_fromlist;
            emit_pre_pop_reg(emit, &vtype_fromlist, REG_ARG_2);
            assert(vtype_fromlist == VTYPE_PYOBJ);
        }

        // level argument should be an immediate integer
        top = peek_stack(emit, 0);
        assert(top->vtype == VTYPE_INT && top->kind == STACK_IMM);
        ASM_MOV_IMM_TO_REG(emit->as, (mp_uint_t)MP_OBJ_NEW_SMALL_INT(top->data.u_imm), REG_ARG_3);
        emit_pre_pop_discard(emit);

    } else {
        vtype_kind_t vtype_fromlist;
        vtype_kind_t vtype_level;
        emit_pre_pop_reg_reg(emit, &vtype_fromlist, REG_ARG_2, &vtype_level, REG_ARG_3);
        assert(vtype_fromlist == VTYPE_PYOBJ);
        assert(vtype_level == VTYPE_PYOBJ);
    }

947
    emit_call_with_imm_arg(emit, MP_F_IMPORT_NAME, qst, REG_ARG_1); // arg1 = import name
948
    emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
949
950
}

951
952
953
954
955
956
STATIC void emit_native_import_from(emit_t *emit, qstr qst) {
    DEBUG_printf("import_from %s\n", qstr_str(qst));
    emit_native_pre(emit);
    vtype_kind_t vtype_module;
    emit_access_stack(emit, 1, &vtype_module, REG_ARG_1); // arg1 = module
    assert(vtype_module == VTYPE_PYOBJ);
957
    emit_call_with_imm_arg(emit, MP_F_IMPORT_FROM, qst, REG_ARG_2); // arg2 = import name
958
    emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
959
960
}

961
STATIC void emit_native_import_star(emit_t *emit) {
962
963
964
965
    DEBUG_printf("import_star\n");
    vtype_kind_t vtype_module;
    emit_pre_pop_reg(emit, &vtype_module, REG_ARG_1); // arg1 = module
    assert(vtype_module == VTYPE_PYOBJ);
966
    emit_call(emit, MP_F_IMPORT_ALL);
967
    emit_post(emit);
968
969
}

970
STATIC void emit_native_load_const_tok(emit_t *emit, mp_token_kind_t tok) {
971
    DEBUG_printf("load_const_tok(tok=%u)\n", tok);
972
    emit_native_pre(emit);
973
    vtype_kind_t vtype;
974
    mp_uint_t val;
975
976
    if (emit->do_viper_types) {
        switch (tok) {
977
978
979
            case MP_TOKEN_KW_NONE: vtype = VTYPE_PTR_NONE; val = 0; break;
            case MP_TOKEN_KW_FALSE: vtype = VTYPE_BOOL; val = 0; break;
            case MP_TOKEN_KW_TRUE: vtype = VTYPE_BOOL; val = 1; break;
980
981
982
            default:
                assert(tok == MP_TOKEN_ELLIPSIS);
                vtype = VTYPE_PYOBJ; val = (mp_uint_t)&mp_const_ellipsis_obj; break;
983
984
985
986
        }
    } else {
        vtype = VTYPE_PYOBJ;
        switch (tok) {
987
988
989
            case MP_TOKEN_KW_NONE: val = (mp_uint_t)mp_const_none; break;
            case MP_TOKEN_KW_FALSE: val = (mp_uint_t)mp_const_false; break;
            case MP_TOKEN_KW_TRUE: val = (mp_uint_t)mp_const_true; break;
990
991
992
            default:
                assert(tok == MP_TOKEN_ELLIPSIS);
                val = (mp_uint_t)&mp_const_ellipsis_obj; break;
993
994
995
996
997
        }
    }
    emit_post_push_imm(emit, vtype, val);
}

998
STATIC void emit_native_load_const_small_int(emit_t *emit, mp_int_t arg) {
999
    DEBUG_printf("load_const_small_int(int=" INT_FMT ")\n", arg);
1000
    emit_native_pre(emit);
1001
1002
1003
    if (emit->do_viper_types) {
        emit_post_push_imm(emit, VTYPE_INT, arg);
    } else {
1004
        emit_post_push_imm(emit, VTYPE_PYOBJ, (mp_uint_t)MP_OBJ_NEW_SMALL_INT(arg));
1005
1006
1007
    }
}

1008
STATIC void emit_native_load_const_str(emit_t *emit, qstr qst) {
1009
    emit_native_pre(emit);
1010
1011
1012
    // TODO: Eventually we want to be able to work with raw pointers in viper to
    // do native array access.  For now we just load them as any other object.
    /*
1013
1014
1015
1016
    if (emit->do_viper_types) {
        // not implemented properly
        // load a pointer to the asciiz string?
        assert(0);
1017
        emit_post_push_imm(emit, VTYPE_PTR, (mp_uint_t)qstr_str(qst));
1018
1019
1020
    } else
    */
    {
1021
        emit_post_push_imm(emit, VTYPE_PYOBJ, (mp_uint_t)MP_OBJ_NEW_QSTR(qst));
1022
1023
1024
    }
}

1025
STATIC void emit_native_load_const_obj(emit_t *emit, mp_obj_t obj) {
1026
    emit_native_pre(emit);