emitnative.c 66.3 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
/*
 * This file is part of the Micro Python project, http://micropython.org/
 *
 * The MIT License (MIT)
 *
 * Copyright (c) 2013, 2014 Damien P. George
 *
 * Permission is hereby granted, free of charge, to any person obtaining a copy
 * of this software and associated documentation files (the "Software"), to deal
 * in the Software without restriction, including without limitation the rights
 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
 * copies of the Software, and to permit persons to whom the Software is
 * furnished to do so, subject to the following conditions:
 *
 * The above copyright notice and this permission notice shall be included in
 * all copies or substantial portions of the Software.
 *
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
 * THE SOFTWARE.
 */

27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
// Essentially normal Python has 1 type: Python objects
// Viper has more than 1 type, and is just a more complicated (a superset of) Python.
// If you declare everything in Viper as a Python object (ie omit type decls) then
// it should in principle be exactly the same as Python native.
// Having types means having more opcodes, like binary_op_nat_nat, binary_op_nat_obj etc.
// In practice we won't have a VM but rather do this in asm which is actually very minimal.

// Because it breaks strict Python equivalence it should be a completely separate
// decorator.  It breaks equivalence because overflow on integers wraps around.
// It shouldn't break equivalence if you don't use the new types, but since the
// type decls might be used in normal Python for other reasons, it's probably safest,
// cleanest and clearest to make it a separate decorator.

// Actually, it does break equivalence because integers default to native integers,
// not Python objects.

// for x in l[0:8]: can be compiled into a native loop if l has pointer type

xbe's avatar
xbe committed
45
#include <stdbool.h>
46
47
48
49
50
#include <stdint.h>
#include <stdio.h>
#include <string.h>
#include <assert.h>

51
#include "mpconfig.h"
52
#include "nlr.h"
53
#include "misc.h"
54
#include "qstr.h"
55
56
#include "lexer.h"
#include "parse.h"
57
58
#include "obj.h"
#include "emitglue.h"
59
#include "scope.h"
60
#include "runtime0.h"
61
#include "emit.h"
62
#include "runtime.h"
63

64
65
66
67
68
69
70
#if 0 // print debugging info
#define DEBUG_PRINT (1)
#define DEBUG_printf DEBUG_printf
#else // don't print debugging info
#define DEBUG_printf(...) (void)0
#endif

71
// wrapper around everything in this file
72
73
74
75
#if (MICROPY_EMIT_X64 && N_X64) \
    || (MICROPY_EMIT_X86 && N_X86) \
    || (MICROPY_EMIT_THUMB && N_THUMB) \
    || (MICROPY_EMIT_ARM && N_ARM)
76

77
#if N_X64
78
79
80
81
82
83
84
85
86
87

// x64 specific stuff

#include "asmx64.h"

#define EXPORT_FUN(name) emit_native_x64_##name

#define REG_TEMP0 (REG_RAX)
#define REG_TEMP1 (REG_RDI)
#define REG_TEMP2 (REG_RSI)
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191

#define REG_LOCAL_1 (REG_RBX)
#define REG_LOCAL_NUM (1)

#define ASM_PASS_COMPUTE    ASM_X64_PASS_COMPUTE
#define ASM_PASS_EMIT       ASM_X64_PASS_EMIT

#define ASM_T               asm_x64_t
#define ASM_NEW             asm_x64_new
#define ASM_FREE            asm_x64_free
#define ASM_GET_CODE        asm_x64_get_code
#define ASM_GET_CODE_SIZE   asm_x64_get_code_size
#define ASM_START_PASS      asm_x64_start_pass
#define ASM_END_PASS        asm_x64_end_pass
#define ASM_ENTRY           asm_x64_entry
#define ASM_EXIT            asm_x64_exit

#define ASM_LABEL_ASSIGN    asm_x64_label_assign
#define ASM_JUMP            asm_x64_jmp_label
#define ASM_JUMP_IF_REG_ZERO(as, reg, label) \
    do { \
        asm_x64_test_r8_with_r8(as, reg, reg); \
        asm_x64_jcc_label(as, ASM_X64_CC_JZ, label); \
    } while (0)
#define ASM_JUMP_IF_REG_NONZERO(as, reg, label) \
    do { \
        asm_x64_test_r8_with_r8(as, reg, reg); \
        asm_x64_jcc_label(as, ASM_X64_CC_JNZ, label); \
    } while (0)
#define ASM_JUMP_IF_REG_EQ(as, reg1, reg2, label) \
    do { \
        asm_x64_cmp_r64_with_r64(as, reg1, reg2); \
        asm_x64_jcc_label(as, ASM_X64_CC_JE, label); \
    } while (0)
#define ASM_CALL_IND(as, ptr, idx) asm_x64_call_ind(as, ptr, REG_RAX)

#define ASM_MOV_REG_TO_LOCAL        asm_x64_mov_r64_to_local
#define ASM_MOV_IMM_TO_REG          asm_x64_mov_i64_to_r64_optimised
#define ASM_MOV_ALIGNED_IMM_TO_REG  asm_x64_mov_i64_to_r64_aligned
#define ASM_MOV_IMM_TO_LOCAL_USING(as, imm, local_num, reg_temp) \
    do { \
        asm_x64_mov_i64_to_r64_optimised(as, (imm), (reg_temp)); \
        asm_x64_mov_r64_to_local(as, (reg_temp), (local_num)); \
    } while (false)
#define ASM_MOV_LOCAL_TO_REG        asm_x64_mov_local_to_r64
#define ASM_MOV_REG_TO_REG          asm_x64_mov_r64_to_r64
#define ASM_MOV_LOCAL_ADDR_TO_REG   asm_x64_mov_local_addr_to_r64

#elif N_X86

// x86 specific stuff

#include "asmx86.h"

STATIC byte mp_f_n_args[MP_F_NUMBER_OF] = {
    [MP_F_CONVERT_OBJ_TO_NATIVE] = 2,
    [MP_F_CONVERT_NATIVE_TO_OBJ] = 2,
    [MP_F_LOAD_CONST_INT] = 1,
    [MP_F_LOAD_CONST_DEC] = 1,
    [MP_F_LOAD_CONST_STR] = 1,
    [MP_F_LOAD_CONST_BYTES] = 1,
    [MP_F_LOAD_NAME] = 1,
    [MP_F_LOAD_GLOBAL] = 1,
    [MP_F_LOAD_BUILD_CLASS] = 0,
    [MP_F_LOAD_ATTR] = 2,
    [MP_F_LOAD_METHOD] = 3,
    [MP_F_STORE_NAME] = 2,
    [MP_F_STORE_GLOBAL] = 2,
    [MP_F_STORE_ATTR] = 3,
    [MP_F_OBJ_SUBSCR] = 3,
    [MP_F_OBJ_IS_TRUE] = 1,
    [MP_F_UNARY_OP] = 2,
    [MP_F_BINARY_OP] = 3,
    [MP_F_BUILD_TUPLE] = 2,
    [MP_F_BUILD_LIST] = 2,
    [MP_F_LIST_APPEND] = 2,
    [MP_F_BUILD_MAP] = 1,
    [MP_F_STORE_MAP] = 3,
#if MICROPY_PY_BUILTINS_SET
    [MP_F_BUILD_SET] = 2,
    [MP_F_STORE_SET] = 2,
#endif
    [MP_F_MAKE_FUNCTION_FROM_RAW_CODE] = 3,
    [MP_F_NATIVE_CALL_FUNCTION_N_KW] = 3,
    [MP_F_CALL_METHOD_N_KW] = 3,
    [MP_F_GETITER] = 1,
    [MP_F_ITERNEXT] = 1,
    [MP_F_NLR_PUSH] = 1,
    [MP_F_NLR_POP] = 0,
    [MP_F_NATIVE_RAISE] = 1,
    [MP_F_IMPORT_NAME] = 3,
    [MP_F_IMPORT_FROM] = 2,
    [MP_F_IMPORT_ALL] = 1,
#if MICROPY_PY_BUILTINS_SLICE
    [MP_F_NEW_SLICE] = 3,
#endif
    [MP_F_UNPACK_SEQUENCE] = 3,
    [MP_F_UNPACK_EX] = 3,
    [MP_F_DELETE_NAME] = 1,
    [MP_F_DELETE_GLOBAL] = 1,
};

#define EXPORT_FUN(name) emit_native_x86_##name

192
// caller-save, so can be used as temporaries
193
#define REG_TEMP0 (REG_EAX)
194
195
#define REG_TEMP1 (REG_ECX)
#define REG_TEMP2 (REG_EDX)
196

197
198
199
200
201
// callee-save, so can be used as locals
#define REG_LOCAL_1 (REG_EBX)
#define REG_LOCAL_2 (REG_ESI)
#define REG_LOCAL_3 (REG_EDI)
#define REG_LOCAL_NUM (3)
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245

#define ASM_PASS_COMPUTE    ASM_X86_PASS_COMPUTE
#define ASM_PASS_EMIT       ASM_X86_PASS_EMIT

#define ASM_T               asm_x86_t
#define ASM_NEW             asm_x86_new
#define ASM_FREE            asm_x86_free
#define ASM_GET_CODE        asm_x86_get_code
#define ASM_GET_CODE_SIZE   asm_x86_get_code_size
#define ASM_START_PASS      asm_x86_start_pass
#define ASM_END_PASS        asm_x86_end_pass
#define ASM_ENTRY           asm_x86_entry
#define ASM_EXIT            asm_x86_exit

#define ASM_LABEL_ASSIGN    asm_x86_label_assign
#define ASM_JUMP            asm_x86_jmp_label
#define ASM_JUMP_IF_REG_ZERO(as, reg, label) \
    do { \
        asm_x86_test_r8_with_r8(as, reg, reg); \
        asm_x86_jcc_label(as, ASM_X86_CC_JZ, label); \
    } while (0)
#define ASM_JUMP_IF_REG_NONZERO(as, reg, label) \
    do { \
        asm_x86_test_r8_with_r8(as, reg, reg); \
        asm_x86_jcc_label(as, ASM_X86_CC_JNZ, label); \
    } while (0)
#define ASM_JUMP_IF_REG_EQ(as, reg1, reg2, label) \
    do { \
        asm_x86_cmp_r32_with_r32(as, reg1, reg2); \
        asm_x86_jcc_label(as, ASM_X86_CC_JE, label); \
    } while (0)
#define ASM_CALL_IND(as, ptr, idx) asm_x86_call_ind(as, ptr, mp_f_n_args[idx], REG_EAX)

#define ASM_MOV_REG_TO_LOCAL        asm_x86_mov_r32_to_local
#define ASM_MOV_IMM_TO_REG          asm_x86_mov_i32_to_r32
#define ASM_MOV_ALIGNED_IMM_TO_REG  asm_x86_mov_i32_to_r32_aligned
#define ASM_MOV_IMM_TO_LOCAL_USING(as, imm, local_num, reg_temp) \
    do { \
        asm_x86_mov_i32_to_r32(as, (imm), (reg_temp)); \
        asm_x86_mov_r32_to_local(as, (reg_temp), (local_num)); \
    } while (false)
#define ASM_MOV_LOCAL_TO_REG        asm_x86_mov_local_to_r32
#define ASM_MOV_REG_TO_REG          asm_x86_mov_r32_to_r32
#define ASM_MOV_LOCAL_ADDR_TO_REG   asm_x86_mov_local_addr_to_r32
246

247
#elif N_THUMB
248
249
250
251
252
253
254
255
256
257

// thumb specific stuff

#include "asmthumb.h"

#define EXPORT_FUN(name) emit_native_thumb_##name

#define REG_TEMP0 (REG_R0)
#define REG_TEMP1 (REG_R1)
#define REG_TEMP2 (REG_R2)
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306

#define REG_LOCAL_1 (REG_R4)
#define REG_LOCAL_2 (REG_R5)
#define REG_LOCAL_3 (REG_R6)
#define REG_LOCAL_NUM (3)

#define ASM_PASS_COMPUTE    ASM_THUMB_PASS_COMPUTE
#define ASM_PASS_EMIT       ASM_THUMB_PASS_EMIT

#define ASM_T               asm_thumb_t
#define ASM_NEW             asm_thumb_new
#define ASM_FREE            asm_thumb_free
#define ASM_GET_CODE        asm_thumb_get_code
#define ASM_GET_CODE_SIZE   asm_thumb_get_code_size
#define ASM_START_PASS      asm_thumb_start_pass
#define ASM_END_PASS        asm_thumb_end_pass
#define ASM_ENTRY           asm_thumb_entry
#define ASM_EXIT            asm_thumb_exit

#define ASM_LABEL_ASSIGN    asm_thumb_label_assign
#define ASM_JUMP            asm_thumb_b_label
#define ASM_JUMP_IF_REG_ZERO(as, reg, label) \
    do { \
        asm_thumb_cmp_rlo_i8(as, reg, 0); \
        asm_thumb_bcc_label(as, THUMB_CC_EQ, label); \
    } while (0)
#define ASM_JUMP_IF_REG_NONZERO(as, reg, label) \
    do { \
        asm_thumb_cmp_rlo_i8(as, reg, 0); \
        asm_thumb_bcc_label(as, THUMB_CC_NE, label); \
    } while (0)
#define ASM_JUMP_IF_REG_EQ(as, reg1, reg2, label) \
    do { \
        asm_thumb_cmp_rlo_rlo(as, reg1, reg2); \
        asm_thumb_bcc_label(as, THUMB_CC_EQ, label); \
    } while (0)
#define ASM_CALL_IND(as, ptr, idx) asm_thumb_bl_ind(as, ptr, idx, REG_R3)

#define ASM_MOV_REG_TO_LOCAL(as, reg, local_num) asm_thumb_mov_local_reg(as, (local_num), (reg))
#define ASM_MOV_IMM_TO_REG(as, imm, reg) asm_thumb_mov_reg_i32_optimised(as, (reg), (imm))
#define ASM_MOV_ALIGNED_IMM_TO_REG(as, imm, reg) asm_thumb_mov_reg_i32_aligned(as, (reg), (imm))
#define ASM_MOV_IMM_TO_LOCAL_USING(as, imm, local_num, reg_temp) \
    do { \
        asm_thumb_mov_reg_i32_optimised(as, (reg_temp), (imm)); \
        asm_thumb_mov_local_reg(as, (local_num), (reg_temp)); \
    } while (false)
#define ASM_MOV_LOCAL_TO_REG(as, local_num, reg) asm_thumb_mov_reg_local(as, (reg), (local_num))
#define ASM_MOV_REG_TO_REG(as, reg_src, reg_dest) asm_thumb_mov_reg_reg(as, (reg_dest), (reg_src))
#define ASM_MOV_LOCAL_ADDR_TO_REG(as, local_num, reg) asm_thumb_mov_reg_local_addr(as, (reg), (local_num))
307

Fabian Vogt's avatar
Fabian Vogt committed
308
309
310
311
312
313
314
315
316
317
318
#elif N_ARM

// ARM specific stuff

#include "asmarm.h"

#define EXPORT_FUN(name) emit_native_arm_##name

#define REG_TEMP0 (REG_R0)
#define REG_TEMP1 (REG_R1)
#define REG_TEMP2 (REG_R2)
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371

#define REG_LOCAL_1 (REG_R4)
#define REG_LOCAL_2 (REG_R5)
#define REG_LOCAL_3 (REG_R6)
#define REG_LOCAL_NUM (3)

#define ASM_PASS_COMPUTE    ASM_ARM_PASS_COMPUTE
#define ASM_PASS_EMIT       ASM_ARM_PASS_EMIT

#define ASM_T               asm_arm_t
#define ASM_NEW             asm_arm_new
#define ASM_FREE            asm_arm_free
#define ASM_GET_CODE        asm_arm_get_code
#define ASM_GET_CODE_SIZE   asm_arm_get_code_size
#define ASM_START_PASS      asm_arm_start_pass
#define ASM_END_PASS        asm_arm_end_pass
#define ASM_ENTRY           asm_arm_entry
#define ASM_EXIT            asm_arm_exit

#define ASM_LABEL_ASSIGN    asm_arm_label_assign
#define ASM_JUMP            asm_arm_b_label
#define ASM_JUMP_IF_REG_ZERO(as, reg, label) \
    do { \
        asm_arm_cmp_reg_i8(as, reg, 0); \
        asm_arm_bcc_label(as, ARM_CC_EQ, label); \
    } while (0)
#define ASM_JUMP_IF_REG_NONZERO(as, reg, label) \
    do { \
        asm_arm_cmp_reg_i8(as, reg, 0); \
        asm_arm_bcc_label(as, ARM_CC_NE, label); \
    } while (0)
#define ASM_JUMP_IF_REG_EQ(as, reg1, reg2, label) \
    do { \
        asm_arm_cmp_reg_reg(as, reg1, reg2); \
        asm_arm_bcc_label(as, ARM_CC_EQ, label); \
    } while (0)
#define ASM_CALL_IND(as, ptr, idx) asm_arm_bl_ind(as, ptr, idx, REG_R3)

#define ASM_MOV_REG_TO_LOCAL(as, reg, local_num) asm_arm_mov_local_reg(as, (local_num), (reg))
#define ASM_MOV_IMM_TO_REG(as, imm, reg) asm_arm_mov_reg_i32(as, (reg), (imm))
#define ASM_MOV_ALIGNED_IMM_TO_REG(as, imm, reg) asm_arm_mov_reg_i32(as, (reg), (imm))
#define ASM_MOV_IMM_TO_LOCAL_USING(as, imm, local_num, reg_temp) \
    do { \
        asm_arm_mov_reg_i32(as, (reg_temp), (imm)); \
        asm_arm_mov_local_reg(as, (local_num), (reg_temp)); \
    } while (false)
#define ASM_MOV_LOCAL_TO_REG(as, local_num, reg) asm_arm_mov_reg_local(as, (reg), (local_num))
#define ASM_MOV_REG_TO_REG(as, reg_src, reg_dest) asm_arm_mov_reg_reg(as, (reg_dest), (reg_src))
#define ASM_MOV_LOCAL_ADDR_TO_REG(as, local_num, reg) asm_arm_mov_reg_local_addr(as, (reg), (local_num))

#else

#error unknown native emitter
Fabian Vogt's avatar
Fabian Vogt committed
372

373
374
375
#endif

typedef enum {
376
377
378
379
    STACK_VALUE,
    STACK_REG,
    STACK_IMM,
} stack_info_kind_t;
380
381

typedef enum {
382
383
384
385
    VTYPE_PYOBJ = MP_NATIVE_TYPE_OBJ,
    VTYPE_BOOL = MP_NATIVE_TYPE_BOOL,
    VTYPE_INT = MP_NATIVE_TYPE_INT,
    VTYPE_UINT = MP_NATIVE_TYPE_UINT,
386
387
388
389
390
391
    VTYPE_UNBOUND,
    VTYPE_PTR,
    VTYPE_PTR_NONE,
    VTYPE_BUILTIN_V_INT,
} vtype_kind_t;

392
393
394
395
396
typedef struct _stack_info_t {
    vtype_kind_t vtype;
    stack_info_kind_t kind;
    union {
        int u_reg;
397
        mp_int_t u_imm;
398
399
400
    };
} stack_info_t;

401
402
403
404
struct _emit_t {
    int pass;

    bool do_viper_types;
405

406
407
    vtype_kind_t return_vtype;

408
    uint local_vtype_alloc;
409
    vtype_kind_t *local_vtype;
410

411
    uint stack_info_alloc;
412
413
    stack_info_t *stack_info;

414
415
416
417
418
419
420
    int stack_start;
    int stack_size;

    bool last_emit_was_return_value;

    scope_t *scope;

421
    ASM_T *as;
422
423
424
};

emit_t *EXPORT_FUN(new)(uint max_num_labels) {
425
    emit_t *emit = m_new0(emit_t, 1);
426
    emit->as = ASM_NEW(max_num_labels);
427
428
429
    return emit;
}

430
void EXPORT_FUN(free)(emit_t *emit) {
431
    ASM_FREE(emit->as, false);
432
433
    m_del(vtype_kind_t, emit->local_vtype, emit->local_vtype_alloc);
    m_del(stack_info_t, emit->stack_info, emit->stack_info_alloc);
434
435
436
    m_del_obj(emit_t, emit);
}

437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
STATIC void emit_native_set_native_type(emit_t *emit, mp_uint_t op, mp_uint_t arg1, qstr arg2) {
    switch (op) {
        case MP_EMIT_NATIVE_TYPE_ENABLE:
            emit->do_viper_types = arg1;
            break;

        default: {
            vtype_kind_t type;
            switch (arg2) {
                case MP_QSTR_object: type = VTYPE_PYOBJ; break;
                case MP_QSTR_bool: type = VTYPE_BOOL; break;
                case MP_QSTR_int: type = VTYPE_INT; break;
                case MP_QSTR_uint: type = VTYPE_UINT; break;
                default: printf("ViperTypeError: unknown type %s\n", qstr_str(arg2)); return;
            }
            if (op == MP_EMIT_NATIVE_TYPE_RETURN) {
                emit->return_vtype = type;
            } else {
                assert(arg1 < emit->local_vtype_alloc);
                emit->local_vtype[arg1] = type;
            }
            break;
        }
    }
461
462
}

463
STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scope) {
464
465
466
467
468
469
    emit->pass = pass;
    emit->stack_start = 0;
    emit->stack_size = 0;
    emit->last_emit_was_return_value = false;
    emit->scope = scope;

470
471
472
473
    // allocate memory for keeping track of the types of locals
    if (emit->local_vtype_alloc < scope->num_locals) {
        emit->local_vtype = m_renew(vtype_kind_t, emit->local_vtype, emit->local_vtype_alloc, scope->num_locals);
        emit->local_vtype_alloc = scope->num_locals;
474
    }
475
476
477

    // allocate memory for keeping track of the objects on the stack
    // XXX don't know stack size on entry, and it should be maximum over all scopes
478
    if (emit->stack_info == NULL) {
479
        emit->stack_info_alloc = scope->stack_size + 50;
480
        emit->stack_info = m_new(stack_info_t, emit->stack_info_alloc);
481
482
    }

483
484
    // set default type for return and arguments
    emit->return_vtype = VTYPE_PYOBJ;
485
    for (mp_uint_t i = 0; i < emit->scope->num_pos_args; i++) {
486
487
        emit->local_vtype[i] = VTYPE_PYOBJ;
    }
488
489
490
491
492
493
494
495

    // local variables begin unbound, and have unknown type
    for (mp_uint_t i = emit->scope->num_pos_args; i < emit->local_vtype_alloc; i++) {
        emit->local_vtype[i] = VTYPE_UNBOUND;
    }

    // values on stack begin unbound
    for (mp_uint_t i = 0; i < emit->stack_info_alloc; i++) {
496
        emit->stack_info[i].kind = STACK_VALUE;
497
        emit->stack_info[i].vtype = VTYPE_UNBOUND;
498
499
    }

500
    ASM_START_PASS(emit->as, pass == MP_PASS_EMIT ? ASM_PASS_EMIT : ASM_PASS_COMPUTE);
501
502
503

    // entry to function
    int num_locals = 0;
504
    if (pass > MP_PASS_SCOPE) {
505
506
507
508
509
510
511
        num_locals = scope->num_locals - REG_LOCAL_NUM;
        if (num_locals < 0) {
            num_locals = 0;
        }
        emit->stack_start = num_locals;
        num_locals += scope->stack_size;
    }
512
    ASM_ENTRY(emit->as, num_locals);
513
514

    // initialise locals from parameters
515
#if N_X64
516
    for (int i = 0; i < scope->num_pos_args; i++) {
517
518
519
        if (i == 0) {
            asm_x64_mov_r64_to_r64(emit->as, REG_ARG_1, REG_LOCAL_1);
        } else if (i == 1) {
520
            asm_x64_mov_r64_to_local(emit->as, REG_ARG_2, i - REG_LOCAL_NUM);
521
        } else if (i == 2) {
522
            asm_x64_mov_r64_to_local(emit->as, REG_ARG_3, i - REG_LOCAL_NUM);
523
524
525
526
527
        } else {
            // TODO not implemented
            assert(0);
        }
    }
528
529
530
#elif N_X86
    for (int i = 0; i < scope->num_pos_args; i++) {
        if (i == 0) {
531
            asm_x86_mov_arg_to_r32(emit->as, i, REG_LOCAL_1);
532
        } else if (i == 1) {
533
            asm_x86_mov_arg_to_r32(emit->as, i, REG_LOCAL_2);
534
535
        } else if (i == 2) {
            asm_x86_mov_arg_to_r32(emit->as, i, REG_LOCAL_3);
536
        } else {
537
538
            asm_x86_mov_arg_to_r32(emit->as, i, REG_TEMP0);
            asm_x86_mov_r32_to_local(emit->as, REG_TEMP0, i - REG_LOCAL_NUM);
539
540
        }
    }
541
#elif N_THUMB
542
    for (int i = 0; i < scope->num_pos_args; i++) {
543
544
545
546
547
548
549
550
551
552
553
554
555
556
        if (i == 0) {
            asm_thumb_mov_reg_reg(emit->as, REG_LOCAL_1, REG_ARG_1);
        } else if (i == 1) {
            asm_thumb_mov_reg_reg(emit->as, REG_LOCAL_2, REG_ARG_2);
        } else if (i == 2) {
            asm_thumb_mov_reg_reg(emit->as, REG_LOCAL_3, REG_ARG_3);
        } else if (i == 3) {
            asm_thumb_mov_local_reg(emit->as, i - REG_LOCAL_NUM, REG_ARG_4);
        } else {
            // TODO not implemented
            assert(0);
        }
    }

557
    asm_thumb_mov_reg_i32(emit->as, REG_R7, (mp_uint_t)mp_fun_table);
Fabian Vogt's avatar
Fabian Vogt committed
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
#elif N_ARM
    for (int i = 0; i < scope->num_pos_args; i++) {
        if (i == 0) {
            asm_arm_mov_reg_reg(emit->as, REG_LOCAL_1, REG_ARG_1);
        } else if (i == 1) {
            asm_arm_mov_reg_reg(emit->as, REG_LOCAL_2, REG_ARG_2);
        } else if (i == 2) {
            asm_arm_mov_reg_reg(emit->as, REG_LOCAL_3, REG_ARG_3);
        } else if (i == 3) {
            asm_arm_mov_local_reg(emit->as, i - REG_LOCAL_NUM, REG_ARG_4);
        } else {
            // TODO not implemented
            assert(0);
        }
    }

    asm_arm_mov_reg_i32(emit->as, REG_R7, (mp_uint_t)mp_fun_table);
575
576
#else
    #error not implemented
577
578
579
#endif
}

580
STATIC void emit_native_end_pass(emit_t *emit) {
Fabian Vogt's avatar
Fabian Vogt committed
581
    if (!emit->last_emit_was_return_value) {
582
        ASM_EXIT(emit->as);
Fabian Vogt's avatar
Fabian Vogt committed
583
    }
584
    ASM_END_PASS(emit->as);
585
586
587
588
589
590

    // check stack is back to zero size
    if (emit->stack_size != 0) {
        printf("ERROR: stack size not back to zero; got %d\n", emit->stack_size);
    }

591
    if (emit->pass == MP_PASS_EMIT) {
592
593
        void *f = ASM_GET_CODE(emit->as);
        mp_uint_t f_len = ASM_GET_CODE_SIZE(emit->as);
594
595
596
597
598
599
600
601
602

        // compute type signature
        // TODO check that viper types here convert correctly to valid types for emit glue
        mp_uint_t type_sig = emit->return_vtype & 3;
        for (mp_uint_t i = 0; i < emit->scope->num_pos_args; i++) {
            type_sig |= (emit->local_vtype[i] & 3) << (i * 2 + 2);
        }

        mp_emit_glue_assign_native(emit->scope->raw_code, emit->do_viper_types ? MP_CODE_NATIVE_VIPER : MP_CODE_NATIVE_PY, f, f_len, emit->scope->num_pos_args, type_sig);
603
604
605
    }
}

606
STATIC bool emit_native_last_emit_was_return_value(emit_t *emit) {
607
608
609
    return emit->last_emit_was_return_value;
}

610
611
STATIC void emit_native_adjust_stack_size(emit_t *emit, int delta) {
    emit->stack_size += delta;
612
613
}

614
STATIC void emit_native_set_source_line(emit_t *emit, int source_line) {
615
616
}

617
STATIC void adjust_stack(emit_t *emit, int stack_size_delta) {
618
    DEBUG_printf("adjust stack: stack:%d + delta:%d\n", emit->stack_size, stack_size_delta);
619
    assert((int)emit->stack_size + stack_size_delta >= 0);
620
    emit->stack_size += stack_size_delta;
621
    if (emit->pass > MP_PASS_SCOPE && emit->stack_size > emit->scope->stack_size) {
622
623
624
625
        emit->scope->stack_size = emit->stack_size;
    }
}

626
/*
627
STATIC void emit_pre_raw(emit_t *emit, int stack_size_delta) {
628
629
630
    adjust_stack(emit, stack_size_delta);
    emit->last_emit_was_return_value = false;
}
631
*/
632

633
// this must be called at start of emit functions
634
STATIC void emit_native_pre(emit_t *emit) {
635
636
637
638
639
640
641
642
643
644
645
646
    emit->last_emit_was_return_value = false;
    // settle the stack
    /*
    if (regs_needed != 0) {
        for (int i = 0; i < emit->stack_size; i++) {
            switch (emit->stack_info[i].kind) {
                case STACK_VALUE:
                    break;

                case STACK_REG:
                    // TODO only push reg if in regs_needed
                    emit->stack_info[i].kind = STACK_VALUE;
647
                    ASM_MOV_REG_TO_LOCAL(emit->as, emit->stack_info[i].u_reg, emit->stack_start + i);
648
649
650
651
652
653
654
655
656
657
                    break;

                case STACK_IMM:
                    // don't think we ever need to push imms for settling
                    //ASM_MOV_IMM_TO_LOCAL(emit->last_imm, emit->stack_start + i);
                    break;
            }
        }
    }
    */
658
659
}

660
STATIC vtype_kind_t peek_vtype(emit_t *emit) {
661
662
    return emit->stack_info[emit->stack_size - 1].vtype;
}
663

664
665
// pos=1 is TOS, pos=2 is next, etc
// use pos=0 for no skipping
666
STATIC void need_reg_single(emit_t *emit, int reg_needed, int skip_stack_pos) {
667
668
669
670
671
672
    skip_stack_pos = emit->stack_size - skip_stack_pos;
    for (int i = 0; i < emit->stack_size; i++) {
        if (i != skip_stack_pos) {
            stack_info_t *si = &emit->stack_info[i];
            if (si->kind == STACK_REG && si->u_reg == reg_needed) {
                si->kind = STACK_VALUE;
673
                ASM_MOV_REG_TO_LOCAL(emit->as, si->u_reg, emit->stack_start + i);
674
675
676
677
678
            }
        }
    }
}

679
STATIC void need_reg_all(emit_t *emit) {
680
681
    for (int i = 0; i < emit->stack_size; i++) {
        stack_info_t *si = &emit->stack_info[i];
682
        if (si->kind == STACK_REG) {
683
            si->kind = STACK_VALUE;
684
            ASM_MOV_REG_TO_LOCAL(emit->as, si->u_reg, emit->stack_start + i);
685
686
687
        }
    }
}
688

689
STATIC void need_stack_settled(emit_t *emit) {
690
691
692
693
    for (int i = 0; i < emit->stack_size; i++) {
        stack_info_t *si = &emit->stack_info[i];
        if (si->kind == STACK_REG) {
            si->kind = STACK_VALUE;
694
            ASM_MOV_REG_TO_LOCAL(emit->as, si->u_reg, emit->stack_start + i);
695
        }
696
    }
697
698
699
    for (int i = 0; i < emit->stack_size; i++) {
        stack_info_t *si = &emit->stack_info[i];
        if (si->kind == STACK_IMM) {
700
            si->kind = STACK_VALUE;
701
            ASM_MOV_IMM_TO_LOCAL_USING(emit->as, si->u_imm, emit->stack_start + i, REG_TEMP0);
702
703
        }
    }
704
705
}

706
// pos=1 is TOS, pos=2 is next, etc
707
STATIC void emit_access_stack(emit_t *emit, int pos, vtype_kind_t *vtype, int reg_dest) {
708
709
    need_reg_single(emit, reg_dest, pos);
    stack_info_t *si = &emit->stack_info[emit->stack_size - pos];
710
711
712
    *vtype = si->vtype;
    switch (si->kind) {
        case STACK_VALUE:
713
            ASM_MOV_LOCAL_TO_REG(emit->as, emit->stack_start + emit->stack_size - pos, reg_dest);
714
715
            break;

716
717
        case STACK_REG:
            if (si->u_reg != reg_dest) {
718
                ASM_MOV_REG_TO_REG(emit->as, si->u_reg, reg_dest);
719
720
721
            }
            break;

722
        case STACK_IMM:
723
            ASM_MOV_IMM_TO_REG(emit->as, si->u_imm, reg_dest);
724
725
726
727
            break;
    }
}

728
STATIC void emit_pre_pop_discard(emit_t *emit) {
729
730
731
732
    emit->last_emit_was_return_value = false;
    adjust_stack(emit, -1);
}

733
STATIC void emit_pre_pop_reg(emit_t *emit, vtype_kind_t *vtype, int reg_dest) {
734
735
736
737
738
    emit->last_emit_was_return_value = false;
    emit_access_stack(emit, 1, vtype, reg_dest);
    adjust_stack(emit, -1);
}

739
STATIC void emit_pre_pop_reg_reg(emit_t *emit, vtype_kind_t *vtypea, int rega, vtype_kind_t *vtypeb, int regb) {
740
    emit_pre_pop_reg(emit, vtypea, rega);
741
    emit_pre_pop_reg(emit, vtypeb, regb);
742
743
}

744
STATIC void emit_pre_pop_reg_reg_reg(emit_t *emit, vtype_kind_t *vtypea, int rega, vtype_kind_t *vtypeb, int regb, vtype_kind_t *vtypec, int regc) {
745
    emit_pre_pop_reg(emit, vtypea, rega);
746
747
    emit_pre_pop_reg(emit, vtypeb, regb);
    emit_pre_pop_reg(emit, vtypec, regc);
748
749
}

750
STATIC void emit_post(emit_t *emit) {
751
752
}

753
STATIC void emit_post_push_reg(emit_t *emit, vtype_kind_t vtype, int reg) {
754
755
756
757
758
    stack_info_t *si = &emit->stack_info[emit->stack_size];
    si->vtype = vtype;
    si->kind = STACK_REG;
    si->u_reg = reg;
    adjust_stack(emit, 1);
759
760
}

761
STATIC void emit_post_push_imm(emit_t *emit, vtype_kind_t vtype, mp_int_t imm) {
762
763
764
765
766
    stack_info_t *si = &emit->stack_info[emit->stack_size];
    si->vtype = vtype;
    si->kind = STACK_IMM;
    si->u_imm = imm;
    adjust_stack(emit, 1);
767
768
}

769
STATIC void emit_post_push_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb) {
770
771
    emit_post_push_reg(emit, vtypea, rega);
    emit_post_push_reg(emit, vtypeb, regb);
772
773
}

774
STATIC void emit_post_push_reg_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb, vtype_kind_t vtypec, int regc) {
775
776
777
    emit_post_push_reg(emit, vtypea, rega);
    emit_post_push_reg(emit, vtypeb, regb);
    emit_post_push_reg(emit, vtypec, regc);
778
779
}

780
STATIC void emit_post_push_reg_reg_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb, vtype_kind_t vtypec, int regc, vtype_kind_t vtyped, int regd) {
781
782
783
784
    emit_post_push_reg(emit, vtypea, rega);
    emit_post_push_reg(emit, vtypeb, regb);
    emit_post_push_reg(emit, vtypec, regc);
    emit_post_push_reg(emit, vtyped, regd);
785
786
}

787
STATIC void emit_call(emit_t *emit, mp_fun_kind_t fun_kind) {
788
    need_reg_all(emit);
789
    ASM_CALL_IND(emit->as, mp_fun_table[fun_kind], fun_kind);
790
791
}

792
STATIC void emit_call_with_imm_arg(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val, int arg_reg) {
793
    need_reg_all(emit);
794
795
    ASM_MOV_IMM_TO_REG(emit->as, arg_val, arg_reg);
    ASM_CALL_IND(emit->as, mp_fun_table[fun_kind], fun_kind);
796
797
}

798
// the first arg is stored in the code aligned on a mp_uint_t boundary
799
STATIC void emit_call_with_imm_arg_aligned(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val, int arg_reg) {
800
    need_reg_all(emit);
801
802
    ASM_MOV_ALIGNED_IMM_TO_REG(emit->as, arg_val, arg_reg);
    ASM_CALL_IND(emit->as, mp_fun_table[fun_kind], fun_kind);
803
804
}

805
STATIC void emit_call_with_2_imm_args(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val1, int arg_reg1, mp_int_t arg_val2, int arg_reg2) {
806
    need_reg_all(emit);
807
808
809
    ASM_MOV_IMM_TO_REG(emit->as, arg_val1, arg_reg1);
    ASM_MOV_IMM_TO_REG(emit->as, arg_val2, arg_reg2);
    ASM_CALL_IND(emit->as, mp_fun_table[fun_kind], fun_kind);
810
811
}

812
// the first arg is stored in the code aligned on a mp_uint_t boundary
813
STATIC void emit_call_with_3_imm_args_and_first_aligned(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val1, int arg_reg1, mp_int_t arg_val2, int arg_reg2, mp_int_t arg_val3, int arg_reg3) {
814
    need_reg_all(emit);
815
816
817
818
    ASM_MOV_ALIGNED_IMM_TO_REG(emit->as, arg_val1, arg_reg1);
    ASM_MOV_IMM_TO_REG(emit->as, arg_val2, arg_reg2);
    ASM_MOV_IMM_TO_REG(emit->as, arg_val3, arg_reg3);
    ASM_CALL_IND(emit->as, mp_fun_table[fun_kind], fun_kind);
819
820
}

821
822
823
824
825
826
// vtype of all n_pop objects is VTYPE_PYOBJ
// Will convert any items that are not VTYPE_PYOBJ to this type and put them back on the stack.
// If any conversions of non-immediate values are needed, then it uses REG_ARG_1, REG_ARG_2 and REG_RET.
// Otherwise, it does not use any temporary registers (but may use reg_dest before loading it with stack pointer).
STATIC void emit_get_stack_pointer_to_reg_for_pop(emit_t *emit, mp_uint_t reg_dest, mp_uint_t n_pop) {
    need_reg_all(emit);
827

828
829
830
831
832
833
834
835
836
    // First, store any immediate values to their respective place on the stack.
    for (mp_uint_t i = 0; i < n_pop; i++) {
        stack_info_t *si = &emit->stack_info[emit->stack_size - 1 - i];
        // must push any imm's to stack
        // must convert them to VTYPE_PYOBJ for viper code
        if (si->kind == STACK_IMM) {
            si->kind = STACK_VALUE;
            switch (si->vtype) {
                case VTYPE_PYOBJ:
837
                    ASM_MOV_IMM_TO_LOCAL_USING(emit->as, si->u_imm, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
838
839
840
                    break;
                case VTYPE_BOOL:
                    if (si->u_imm == 0) {
841
                        ASM_MOV_IMM_TO_LOCAL_USING(emit->as, (mp_uint_t)mp_const_false, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
842
                    } else {
843
                        ASM_MOV_IMM_TO_LOCAL_USING(emit->as, (mp_uint_t)mp_const_true, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
844
845
846
847
848
                    }
                    si->vtype = VTYPE_PYOBJ;
                    break;
                case VTYPE_INT:
                case VTYPE_UINT:
849
                    ASM_MOV_IMM_TO_LOCAL_USING(emit->as, (si->u_imm << 1) | 1, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
                    si->vtype = VTYPE_PYOBJ;
                    break;
                default:
                    // not handled
                    assert(0);
            }
        }

        // verify that this value is on the stack
        assert(si->kind == STACK_VALUE);
    }

    // Second, convert any non-VTYPE_PYOBJ to that type.
    for (mp_uint_t i = 0; i < n_pop; i++) {
        stack_info_t *si = &emit->stack_info[emit->stack_size - 1 - i];
        if (si->vtype != VTYPE_PYOBJ) {
            mp_uint_t local_num = emit->stack_start + emit->stack_size - 1 - i;
867
            ASM_MOV_LOCAL_TO_REG(emit->as, local_num, REG_ARG_1);
868
            emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, si->vtype, REG_ARG_2); // arg2 = type
869
            ASM_MOV_REG_TO_LOCAL(emit->as, REG_RET, local_num);
870
871
872
873
874
875
            si->vtype = VTYPE_PYOBJ;
        }
    }

    // Adujust the stack for a pop of n_pop items, and load the stack pointer into reg_dest.
    adjust_stack(emit, -n_pop);
876
    ASM_MOV_LOCAL_ADDR_TO_REG(emit->as, emit->stack_start + emit->stack_size, reg_dest);
877
878
879
880
881
882
883
884
}

// vtype of all n_push objects is VTYPE_PYOBJ
STATIC void emit_get_stack_pointer_to_reg_for_push(emit_t *emit, mp_uint_t reg_dest, mp_uint_t n_push) {
    need_reg_all(emit);
    for (mp_uint_t i = 0; i < n_push; i++) {
        emit->stack_info[emit->stack_size + i].kind = STACK_VALUE;
        emit->stack_info[emit->stack_size + i].vtype = VTYPE_PYOBJ;
885
    }
886
    ASM_MOV_LOCAL_ADDR_TO_REG(emit->as, emit->stack_start + emit->stack_size, reg_dest);
887
888
889
890
891
    adjust_stack(emit, n_push);
}

STATIC void emit_native_load_id(emit_t *emit, qstr qstr) {
    emit_common_load_id(emit, &EXPORT_FUN(method_table), emit->scope, qstr);
892
893
}

894
STATIC void emit_native_store_id(emit_t *emit, qstr qstr) {
895
896
897
    emit_common_store_id(emit, &EXPORT_FUN(method_table), emit->scope, qstr);
}

898
STATIC void emit_native_delete_id(emit_t *emit, qstr qstr) {
899
900
901
    emit_common_delete_id(emit, &EXPORT_FUN(method_table), emit->scope, qstr);
}

902
STATIC void emit_native_label_assign(emit_t *emit, uint l) {
903
    emit_native_pre(emit);
904
905
    // need to commit stack because we can jump here from elsewhere
    need_stack_settled(emit);
906
    ASM_LABEL_ASSIGN(emit->as, l);
907
    emit_post(emit);
908
909
}

910
911
912
913
914
915
916
STATIC void emit_native_import_name(emit_t *emit, qstr qst) {
    DEBUG_printf("import_name %s\n", qstr_str(qst));
    vtype_kind_t vtype_fromlist;
    vtype_kind_t vtype_level;
    emit_pre_pop_reg_reg(emit, &vtype_fromlist, REG_ARG_2, &vtype_level, REG_ARG_3); // arg2 = fromlist, arg3 = level
    assert(vtype_fromlist == VTYPE_PYOBJ);
    assert(vtype_level == VTYPE_PYOBJ);
917
    emit_call_with_imm_arg(emit, MP_F_IMPORT_NAME, qst, REG_ARG_1); // arg1 = import name
918
    emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
919
920
}

921
922
923
924
925
926
STATIC void emit_native_import_from(emit_t *emit, qstr qst) {
    DEBUG_printf("import_from %s\n", qstr_str(qst));
    emit_native_pre(emit);
    vtype_kind_t vtype_module;
    emit_access_stack(emit, 1, &vtype_module, REG_ARG_1); // arg1 = module
    assert(vtype_module == VTYPE_PYOBJ);
927
    emit_call_with_imm_arg(emit, MP_F_IMPORT_FROM, qst, REG_ARG_2); // arg2 = import name
928
    emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
929
930
}

931
STATIC void emit_native_import_star(emit_t *emit) {
932
933
934
935
    DEBUG_printf("import_star\n");
    vtype_kind_t vtype_module;
    emit_pre_pop_reg(emit, &vtype_module, REG_ARG_1); // arg1 = module
    assert(vtype_module == VTYPE_PYOBJ);
936
    emit_call(emit, MP_F_IMPORT_ALL);
937
    emit_post(emit);
938
939
}

940
STATIC void emit_native_load_const_tok(emit_t *emit, mp_token_kind_t tok) {
941
    DEBUG_printf("load_const_tok %d\n", tok);
942
    emit_native_pre(emit);
943
    int vtype;
944
    mp_uint_t val;
945
946
    if (emit->do_viper_types) {
        switch (tok) {
947
948
949
            case MP_TOKEN_KW_NONE: vtype = VTYPE_PTR_NONE; val = 0; break;
            case MP_TOKEN_KW_FALSE: vtype = VTYPE_BOOL; val = 0; break;
            case MP_TOKEN_KW_TRUE: vtype = VTYPE_BOOL; val = 1; break;
950
951
952
953
954
            default: assert(0); vtype = 0; val = 0; // shouldn't happen
        }
    } else {
        vtype = VTYPE_PYOBJ;
        switch (tok) {
955
956
957
            case MP_TOKEN_KW_NONE: val = (mp_uint_t)mp_const_none; break;
            case MP_TOKEN_KW_FALSE: val = (mp_uint_t)mp_const_false; break;
            case MP_TOKEN_KW_TRUE: val = (mp_uint_t)mp_const_true; break;
958
959
960
961
962
963
            default: assert(0); vtype = 0; val = 0; // shouldn't happen
        }
    }
    emit_post_push_imm(emit, vtype, val);
}

964
STATIC void emit_native_load_const_small_int(emit_t *emit, mp_int_t arg) {
965
    DEBUG_printf("load_const_small_int %d\n", arg);
966
    emit_native_pre(emit);
967
968
969
970
971
972
973
    if (emit->do_viper_types) {
        emit_post_push_imm(emit, VTYPE_INT, arg);
    } else {
        emit_post_push_imm(emit, VTYPE_PYOBJ, (arg << 1) | 1);
    }
}

974
975
976
977
STATIC void emit_native_load_const_int(emit_t *emit, qstr qst) {
    DEBUG_printf("load_const_int %s\n", qstr_str(st));
    // for viper: load integer, check fits in 32 bits
    emit_native_pre(emit);
978
    emit_call_with_imm_arg(emit, MP_F_LOAD_CONST_INT, qst, REG_ARG_1);
979
    emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
980
981
}

982
STATIC void emit_native_load_const_dec(emit_t *emit, qstr qstr) {
983
    // for viper, a float/complex is just a Python object
984
    emit_native_pre(emit);
985
    emit_call_with_imm_arg(emit, MP_F_LOAD_CONST_DEC, qstr, REG_ARG_1);
986
    emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
987
988
}

989
STATIC void emit_native_load_const_str(emit_t *emit, qstr qstr, bool bytes) {
990
    emit_native_pre(emit);
991
992
993
994
    if (emit->do_viper_types) {
        // not implemented properly
        // load a pointer to the asciiz string?
        assert(0);
995
        emit_post_push_imm(emit, VTYPE_PTR, (mp_uint_t)qstr_str(qstr));
996
    } else {
997
        if (bytes) {
998
            emit_call_with_imm_arg(emit, MP_F_LOAD_CONST_BYTES, qstr, REG_ARG_1);
999
        } else {
1000
            emit_call_with_imm_arg(emit, MP_F_LOAD_CONST_STR, qstr, REG_ARG_1);