emitnative.c 85.9 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
/*
 * This file is part of the Micro Python project, http://micropython.org/
 *
 * The MIT License (MIT)
 *
 * Copyright (c) 2013, 2014 Damien P. George
 *
 * Permission is hereby granted, free of charge, to any person obtaining a copy
 * of this software and associated documentation files (the "Software"), to deal
 * in the Software without restriction, including without limitation the rights
 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
 * copies of the Software, and to permit persons to whom the Software is
 * furnished to do so, subject to the following conditions:
 *
 * The above copyright notice and this permission notice shall be included in
 * all copies or substantial portions of the Software.
 *
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
 * THE SOFTWARE.
 */

27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
// Essentially normal Python has 1 type: Python objects
// Viper has more than 1 type, and is just a more complicated (a superset of) Python.
// If you declare everything in Viper as a Python object (ie omit type decls) then
// it should in principle be exactly the same as Python native.
// Having types means having more opcodes, like binary_op_nat_nat, binary_op_nat_obj etc.
// In practice we won't have a VM but rather do this in asm which is actually very minimal.

// Because it breaks strict Python equivalence it should be a completely separate
// decorator.  It breaks equivalence because overflow on integers wraps around.
// It shouldn't break equivalence if you don't use the new types, but since the
// type decls might be used in normal Python for other reasons, it's probably safest,
// cleanest and clearest to make it a separate decorator.

// Actually, it does break equivalence because integers default to native integers,
// not Python objects.

// for x in l[0:8]: can be compiled into a native loop if l has pointer type

xbe's avatar
xbe committed
45
#include <stdbool.h>
46
47
48
49
50
#include <stdint.h>
#include <stdio.h>
#include <string.h>
#include <assert.h>

51
#include "mpconfig.h"
52
#include "nlr.h"
53
#include "misc.h"
54
#include "qstr.h"
55
56
#include "lexer.h"
#include "parse.h"
57
58
#include "obj.h"
#include "emitglue.h"
59
#include "scope.h"
60
#include "runtime0.h"
61
#include "emit.h"
62
#include "runtime.h"
63

64
65
66
67
68
69
70
#if 0 // print debugging info
#define DEBUG_PRINT (1)
#define DEBUG_printf DEBUG_printf
#else // don't print debugging info
#define DEBUG_printf(...) (void)0
#endif

71
// wrapper around everything in this file
72
73
74
75
#if (MICROPY_EMIT_X64 && N_X64) \
    || (MICROPY_EMIT_X86 && N_X86) \
    || (MICROPY_EMIT_THUMB && N_THUMB) \
    || (MICROPY_EMIT_ARM && N_ARM)
76

77
#if N_X64
78
79
80
81
82
83
84

// x64 specific stuff

#include "asmx64.h"

#define EXPORT_FUN(name) emit_native_x64_##name

85
86
87
88
89
#define REG_RET ASM_X64_REG_RAX
#define REG_ARG_1 ASM_X64_REG_RDI
#define REG_ARG_2 ASM_X64_REG_RSI
#define REG_ARG_3 ASM_X64_REG_RDX
#define REG_ARG_4 ASM_X64_REG_RCX
90
91

// caller-save
92
93
94
#define REG_TEMP0 ASM_X64_REG_RAX
#define REG_TEMP1 ASM_X64_REG_RDI
#define REG_TEMP2 ASM_X64_REG_RSI
95
96

// callee-save
97
98
99
#define REG_LOCAL_1 ASM_X64_REG_RBX
#define REG_LOCAL_2 ASM_X64_REG_R12
#define REG_LOCAL_3 ASM_X64_REG_R13
100
#define REG_LOCAL_NUM (3)
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131

#define ASM_PASS_COMPUTE    ASM_X64_PASS_COMPUTE
#define ASM_PASS_EMIT       ASM_X64_PASS_EMIT

#define ASM_T               asm_x64_t
#define ASM_NEW             asm_x64_new
#define ASM_FREE            asm_x64_free
#define ASM_GET_CODE        asm_x64_get_code
#define ASM_GET_CODE_SIZE   asm_x64_get_code_size
#define ASM_START_PASS      asm_x64_start_pass
#define ASM_END_PASS        asm_x64_end_pass
#define ASM_ENTRY           asm_x64_entry
#define ASM_EXIT            asm_x64_exit

#define ASM_LABEL_ASSIGN    asm_x64_label_assign
#define ASM_JUMP            asm_x64_jmp_label
#define ASM_JUMP_IF_REG_ZERO(as, reg, label) \
    do { \
        asm_x64_test_r8_with_r8(as, reg, reg); \
        asm_x64_jcc_label(as, ASM_X64_CC_JZ, label); \
    } while (0)
#define ASM_JUMP_IF_REG_NONZERO(as, reg, label) \
    do { \
        asm_x64_test_r8_with_r8(as, reg, reg); \
        asm_x64_jcc_label(as, ASM_X64_CC_JNZ, label); \
    } while (0)
#define ASM_JUMP_IF_REG_EQ(as, reg1, reg2, label) \
    do { \
        asm_x64_cmp_r64_with_r64(as, reg1, reg2); \
        asm_x64_jcc_label(as, ASM_X64_CC_JE, label); \
    } while (0)
132
#define ASM_CALL_IND(as, ptr, idx) asm_x64_call_ind(as, ptr, ASM_X64_REG_RAX)
133
134
135
136
137
138
139
140
141
142

#define ASM_MOV_REG_TO_LOCAL        asm_x64_mov_r64_to_local
#define ASM_MOV_IMM_TO_REG          asm_x64_mov_i64_to_r64_optimised
#define ASM_MOV_ALIGNED_IMM_TO_REG  asm_x64_mov_i64_to_r64_aligned
#define ASM_MOV_IMM_TO_LOCAL_USING(as, imm, local_num, reg_temp) \
    do { \
        asm_x64_mov_i64_to_r64_optimised(as, (imm), (reg_temp)); \
        asm_x64_mov_r64_to_local(as, (reg_temp), (local_num)); \
    } while (false)
#define ASM_MOV_LOCAL_TO_REG        asm_x64_mov_local_to_r64
143
#define ASM_MOV_REG_REG(as, reg_dest, reg_src) asm_x64_mov_r64_r64((as), (reg_dest), (reg_src))
144
145
#define ASM_MOV_LOCAL_ADDR_TO_REG   asm_x64_mov_local_addr_to_r64

146
147
#define ASM_LSL_REG(as, reg) asm_x64_shl_r64_cl((as), (reg))
#define ASM_ASR_REG(as, reg) asm_x64_sar_r64_cl((as), (reg))
148
149
150
#define ASM_OR_REG_REG(as, reg_dest, reg_src) asm_x64_or_r64_r64((as), (reg_dest), (reg_src))
#define ASM_XOR_REG_REG(as, reg_dest, reg_src) asm_x64_xor_r64_r64((as), (reg_dest), (reg_src))
#define ASM_AND_REG_REG(as, reg_dest, reg_src) asm_x64_and_r64_r64((as), (reg_dest), (reg_src))
151
152
153
#define ASM_ADD_REG_REG(as, reg_dest, reg_src) asm_x64_add_r64_r64((as), (reg_dest), (reg_src))
#define ASM_SUB_REG_REG(as, reg_dest, reg_src) asm_x64_sub_r64_r64((as), (reg_dest), (reg_src))

154
155
156
157
#define ASM_STORE_REG_REG(as, reg_src, reg_base) asm_x64_mov_r64_to_disp((as), (reg_src), (reg_base), 0)
#define ASM_STORE8_REG_REG(as, reg_src, reg_base) asm_x64_mov_r8_to_disp((as), (reg_src), (reg_base), 0)
#define ASM_STORE16_REG_REG(as, reg_src, reg_base) asm_x64_mov_r16_to_disp((as), (reg_src), (reg_base), 0)

158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
#elif N_X86

// x86 specific stuff

#include "asmx86.h"

STATIC byte mp_f_n_args[MP_F_NUMBER_OF] = {
    [MP_F_CONVERT_OBJ_TO_NATIVE] = 2,
    [MP_F_CONVERT_NATIVE_TO_OBJ] = 2,
    [MP_F_LOAD_CONST_INT] = 1,
    [MP_F_LOAD_CONST_DEC] = 1,
    [MP_F_LOAD_CONST_STR] = 1,
    [MP_F_LOAD_CONST_BYTES] = 1,
    [MP_F_LOAD_NAME] = 1,
    [MP_F_LOAD_GLOBAL] = 1,
    [MP_F_LOAD_BUILD_CLASS] = 0,
    [MP_F_LOAD_ATTR] = 2,
    [MP_F_LOAD_METHOD] = 3,
    [MP_F_STORE_NAME] = 2,
    [MP_F_STORE_GLOBAL] = 2,
    [MP_F_STORE_ATTR] = 3,
    [MP_F_OBJ_SUBSCR] = 3,
    [MP_F_OBJ_IS_TRUE] = 1,
    [MP_F_UNARY_OP] = 2,
    [MP_F_BINARY_OP] = 3,
    [MP_F_BUILD_TUPLE] = 2,
    [MP_F_BUILD_LIST] = 2,
    [MP_F_LIST_APPEND] = 2,
    [MP_F_BUILD_MAP] = 1,
    [MP_F_STORE_MAP] = 3,
#if MICROPY_PY_BUILTINS_SET
    [MP_F_BUILD_SET] = 2,
    [MP_F_STORE_SET] = 2,
#endif
    [MP_F_MAKE_FUNCTION_FROM_RAW_CODE] = 3,
    [MP_F_NATIVE_CALL_FUNCTION_N_KW] = 3,
    [MP_F_CALL_METHOD_N_KW] = 3,
    [MP_F_GETITER] = 1,
    [MP_F_ITERNEXT] = 1,
    [MP_F_NLR_PUSH] = 1,
    [MP_F_NLR_POP] = 0,
    [MP_F_NATIVE_RAISE] = 1,
    [MP_F_IMPORT_NAME] = 3,
    [MP_F_IMPORT_FROM] = 2,
    [MP_F_IMPORT_ALL] = 1,
#if MICROPY_PY_BUILTINS_SLICE
    [MP_F_NEW_SLICE] = 3,
#endif
    [MP_F_UNPACK_SEQUENCE] = 3,
    [MP_F_UNPACK_EX] = 3,
    [MP_F_DELETE_NAME] = 1,
    [MP_F_DELETE_GLOBAL] = 1,
};

#define EXPORT_FUN(name) emit_native_x86_##name

214
#define REG_RET ASM_X86_REG_EAX
215
216
217
#define REG_ARG_1 ASM_X86_REG_ARG_1
#define REG_ARG_2 ASM_X86_REG_ARG_2
#define REG_ARG_3 ASM_X86_REG_ARG_3
218

219
// caller-save, so can be used as temporaries
220
221
222
#define REG_TEMP0 ASM_X86_REG_EAX
#define REG_TEMP1 ASM_X86_REG_ECX
#define REG_TEMP2 ASM_X86_REG_EDX
223

224
// callee-save, so can be used as locals
225
226
227
#define REG_LOCAL_1 ASM_X86_REG_EBX
#define REG_LOCAL_2 ASM_X86_REG_ESI
#define REG_LOCAL_3 ASM_X86_REG_EDI
228
#define REG_LOCAL_NUM (3)
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259

#define ASM_PASS_COMPUTE    ASM_X86_PASS_COMPUTE
#define ASM_PASS_EMIT       ASM_X86_PASS_EMIT

#define ASM_T               asm_x86_t
#define ASM_NEW             asm_x86_new
#define ASM_FREE            asm_x86_free
#define ASM_GET_CODE        asm_x86_get_code
#define ASM_GET_CODE_SIZE   asm_x86_get_code_size
#define ASM_START_PASS      asm_x86_start_pass
#define ASM_END_PASS        asm_x86_end_pass
#define ASM_ENTRY           asm_x86_entry
#define ASM_EXIT            asm_x86_exit

#define ASM_LABEL_ASSIGN    asm_x86_label_assign
#define ASM_JUMP            asm_x86_jmp_label
#define ASM_JUMP_IF_REG_ZERO(as, reg, label) \
    do { \
        asm_x86_test_r8_with_r8(as, reg, reg); \
        asm_x86_jcc_label(as, ASM_X86_CC_JZ, label); \
    } while (0)
#define ASM_JUMP_IF_REG_NONZERO(as, reg, label) \
    do { \
        asm_x86_test_r8_with_r8(as, reg, reg); \
        asm_x86_jcc_label(as, ASM_X86_CC_JNZ, label); \
    } while (0)
#define ASM_JUMP_IF_REG_EQ(as, reg1, reg2, label) \
    do { \
        asm_x86_cmp_r32_with_r32(as, reg1, reg2); \
        asm_x86_jcc_label(as, ASM_X86_CC_JE, label); \
    } while (0)
260
#define ASM_CALL_IND(as, ptr, idx) asm_x86_call_ind(as, ptr, mp_f_n_args[idx], ASM_X86_REG_EAX)
261
262
263
264
265
266
267
268
269
270

#define ASM_MOV_REG_TO_LOCAL        asm_x86_mov_r32_to_local
#define ASM_MOV_IMM_TO_REG          asm_x86_mov_i32_to_r32
#define ASM_MOV_ALIGNED_IMM_TO_REG  asm_x86_mov_i32_to_r32_aligned
#define ASM_MOV_IMM_TO_LOCAL_USING(as, imm, local_num, reg_temp) \
    do { \
        asm_x86_mov_i32_to_r32(as, (imm), (reg_temp)); \
        asm_x86_mov_r32_to_local(as, (reg_temp), (local_num)); \
    } while (false)
#define ASM_MOV_LOCAL_TO_REG        asm_x86_mov_local_to_r32
271
#define ASM_MOV_REG_REG(as, reg_dest, reg_src) asm_x86_mov_r32_r32((as), (reg_dest), (reg_src))
272
#define ASM_MOV_LOCAL_ADDR_TO_REG   asm_x86_mov_local_addr_to_r32
273

274
275
#define ASM_LSL_REG(as, reg) asm_x86_shl_r32_cl((as), (reg))
#define ASM_ASR_REG(as, reg) asm_x86_sar_r32_cl((as), (reg))
276
277
278
#define ASM_OR_REG_REG(as, reg_dest, reg_src) asm_x86_or_r32_r32((as), (reg_dest), (reg_src))
#define ASM_XOR_REG_REG(as, reg_dest, reg_src) asm_x86_xor_r32_r32((as), (reg_dest), (reg_src))
#define ASM_AND_REG_REG(as, reg_dest, reg_src) asm_x86_and_r32_r32((as), (reg_dest), (reg_src))
279
280
281
#define ASM_ADD_REG_REG(as, reg_dest, reg_src) asm_x86_add_r32_r32((as), (reg_dest), (reg_src))
#define ASM_SUB_REG_REG(as, reg_dest, reg_src) asm_x86_sub_r32_r32((as), (reg_dest), (reg_src))

282
283
284
285
#define ASM_STORE_REG_REG(as, reg_src, reg_base) asm_x86_mov_r32_to_disp((as), (reg_src), (reg_base), 0)
#define ASM_STORE8_REG_REG(as, reg_src, reg_base) asm_x86_mov_r8_to_disp((as), (reg_src), (reg_base), 0)
#define ASM_STORE16_REG_REG(as, reg_src, reg_base) asm_x86_mov_r16_to_disp((as), (reg_src), (reg_base), 0)

286
#elif N_THUMB
287
288
289
290
291
292
293

// thumb specific stuff

#include "asmthumb.h"

#define EXPORT_FUN(name) emit_native_thumb_##name

294
295
296
297
298
#define REG_RET ASM_THUMB_REG_R0
#define REG_ARG_1 ASM_THUMB_REG_R0
#define REG_ARG_2 ASM_THUMB_REG_R1
#define REG_ARG_3 ASM_THUMB_REG_R2
#define REG_ARG_4 ASM_THUMB_REG_R3
299

300
301
302
#define REG_TEMP0 ASM_THUMB_REG_R0
#define REG_TEMP1 ASM_THUMB_REG_R1
#define REG_TEMP2 ASM_THUMB_REG_R2
303

304
305
306
#define REG_LOCAL_1 ASM_THUMB_REG_R4
#define REG_LOCAL_2 ASM_THUMB_REG_R5
#define REG_LOCAL_3 ASM_THUMB_REG_R6
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
#define REG_LOCAL_NUM (3)

#define ASM_PASS_COMPUTE    ASM_THUMB_PASS_COMPUTE
#define ASM_PASS_EMIT       ASM_THUMB_PASS_EMIT

#define ASM_T               asm_thumb_t
#define ASM_NEW             asm_thumb_new
#define ASM_FREE            asm_thumb_free
#define ASM_GET_CODE        asm_thumb_get_code
#define ASM_GET_CODE_SIZE   asm_thumb_get_code_size
#define ASM_START_PASS      asm_thumb_start_pass
#define ASM_END_PASS        asm_thumb_end_pass
#define ASM_ENTRY           asm_thumb_entry
#define ASM_EXIT            asm_thumb_exit

#define ASM_LABEL_ASSIGN    asm_thumb_label_assign
#define ASM_JUMP            asm_thumb_b_label
#define ASM_JUMP_IF_REG_ZERO(as, reg, label) \
    do { \
        asm_thumb_cmp_rlo_i8(as, reg, 0); \
327
        asm_thumb_bcc_label(as, ASM_THUMB_CC_EQ, label); \
328
329
330
331
    } while (0)
#define ASM_JUMP_IF_REG_NONZERO(as, reg, label) \
    do { \
        asm_thumb_cmp_rlo_i8(as, reg, 0); \
332
        asm_thumb_bcc_label(as, ASM_THUMB_CC_NE, label); \
333
334
335
336
    } while (0)
#define ASM_JUMP_IF_REG_EQ(as, reg1, reg2, label) \
    do { \
        asm_thumb_cmp_rlo_rlo(as, reg1, reg2); \
337
        asm_thumb_bcc_label(as, ASM_THUMB_CC_EQ, label); \
338
    } while (0)
339
#define ASM_CALL_IND(as, ptr, idx) asm_thumb_bl_ind(as, ptr, idx, ASM_THUMB_REG_R3)
340
341
342
343
344
345
346
347
348
349

#define ASM_MOV_REG_TO_LOCAL(as, reg, local_num) asm_thumb_mov_local_reg(as, (local_num), (reg))
#define ASM_MOV_IMM_TO_REG(as, imm, reg) asm_thumb_mov_reg_i32_optimised(as, (reg), (imm))
#define ASM_MOV_ALIGNED_IMM_TO_REG(as, imm, reg) asm_thumb_mov_reg_i32_aligned(as, (reg), (imm))
#define ASM_MOV_IMM_TO_LOCAL_USING(as, imm, local_num, reg_temp) \
    do { \
        asm_thumb_mov_reg_i32_optimised(as, (reg_temp), (imm)); \
        asm_thumb_mov_local_reg(as, (local_num), (reg_temp)); \
    } while (false)
#define ASM_MOV_LOCAL_TO_REG(as, local_num, reg) asm_thumb_mov_reg_local(as, (reg), (local_num))
350
#define ASM_MOV_REG_REG(as, reg_dest, reg_src) asm_thumb_mov_reg_reg((as), (reg_dest), (reg_src))
351
#define ASM_MOV_LOCAL_ADDR_TO_REG(as, local_num, reg) asm_thumb_mov_reg_local_addr(as, (reg), (local_num))
352

353
354
#define ASM_LSL_REG_REG(as, reg_dest, reg_shift) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_LSL, (reg_dest), (reg_shift))
#define ASM_ASR_REG_REG(as, reg_dest, reg_shift) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_ASR, (reg_dest), (reg_shift))
355
356
357
#define ASM_OR_REG_REG(as, reg_dest, reg_src) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_ORR, (reg_dest), (reg_src))
#define ASM_XOR_REG_REG(as, reg_dest, reg_src) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_EOR, (reg_dest), (reg_src))
#define ASM_AND_REG_REG(as, reg_dest, reg_src) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_AND, (reg_dest), (reg_src))
358
359
360
#define ASM_ADD_REG_REG(as, reg_dest, reg_src) asm_thumb_add_rlo_rlo_rlo((as), (reg_dest), (reg_dest), (reg_src))
#define ASM_SUB_REG_REG(as, reg_dest, reg_src) asm_thumb_sub_rlo_rlo_rlo((as), (reg_dest), (reg_dest), (reg_src))

361
362
363
364
#define ASM_STORE_REG_REG(as, reg_src, reg_base) asm_thumb_str_rlo_rlo_i5((as), (reg_src), (reg_base), 0)
#define ASM_STORE8_REG_REG(as, reg_src, reg_base) asm_thumb_strb_rlo_rlo_i5((as), (reg_src), (reg_base), 0)
#define ASM_STORE16_REG_REG(as, reg_src, reg_base) asm_thumb_strh_rlo_rlo_i5((as), (reg_src), (reg_base), 0)

Fabian Vogt's avatar
Fabian Vogt committed
365
366
367
368
369
370
371
372
#elif N_ARM

// ARM specific stuff

#include "asmarm.h"

#define EXPORT_FUN(name) emit_native_arm_##name

373
374
375
376
377
#define REG_RET ASM_ARM_REG_R0
#define REG_ARG_1 ASM_ARM_REG_R0
#define REG_ARG_2 ASM_ARM_REG_R1
#define REG_ARG_3 ASM_ARM_REG_R2
#define REG_ARG_4 ASM_ARM_REG_R3
378

379
380
381
#define REG_TEMP0 ASM_ARM_REG_R0
#define REG_TEMP1 ASM_ARM_REG_R1
#define REG_TEMP2 ASM_ARM_REG_R2
382

383
384
385
#define REG_LOCAL_1 ASM_ARM_REG_R4
#define REG_LOCAL_2 ASM_ARM_REG_R5
#define REG_LOCAL_3 ASM_ARM_REG_R6
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
#define REG_LOCAL_NUM (3)

#define ASM_PASS_COMPUTE    ASM_ARM_PASS_COMPUTE
#define ASM_PASS_EMIT       ASM_ARM_PASS_EMIT

#define ASM_T               asm_arm_t
#define ASM_NEW             asm_arm_new
#define ASM_FREE            asm_arm_free
#define ASM_GET_CODE        asm_arm_get_code
#define ASM_GET_CODE_SIZE   asm_arm_get_code_size
#define ASM_START_PASS      asm_arm_start_pass
#define ASM_END_PASS        asm_arm_end_pass
#define ASM_ENTRY           asm_arm_entry
#define ASM_EXIT            asm_arm_exit

#define ASM_LABEL_ASSIGN    asm_arm_label_assign
#define ASM_JUMP            asm_arm_b_label
#define ASM_JUMP_IF_REG_ZERO(as, reg, label) \
    do { \
        asm_arm_cmp_reg_i8(as, reg, 0); \
406
        asm_arm_bcc_label(as, ASM_ARM_CC_EQ, label); \
407
408
409
410
    } while (0)
#define ASM_JUMP_IF_REG_NONZERO(as, reg, label) \
    do { \
        asm_arm_cmp_reg_i8(as, reg, 0); \
411
        asm_arm_bcc_label(as, ASM_ARM_CC_NE, label); \
412
413
414
415
    } while (0)
#define ASM_JUMP_IF_REG_EQ(as, reg1, reg2, label) \
    do { \
        asm_arm_cmp_reg_reg(as, reg1, reg2); \
416
        asm_arm_bcc_label(as, ASM_ARM_CC_EQ, label); \
417
    } while (0)
418
#define ASM_CALL_IND(as, ptr, idx) asm_arm_bl_ind(as, ptr, idx, ASM_ARM_REG_R3)
419
420
421
422
423
424
425
426
427
428

#define ASM_MOV_REG_TO_LOCAL(as, reg, local_num) asm_arm_mov_local_reg(as, (local_num), (reg))
#define ASM_MOV_IMM_TO_REG(as, imm, reg) asm_arm_mov_reg_i32(as, (reg), (imm))
#define ASM_MOV_ALIGNED_IMM_TO_REG(as, imm, reg) asm_arm_mov_reg_i32(as, (reg), (imm))
#define ASM_MOV_IMM_TO_LOCAL_USING(as, imm, local_num, reg_temp) \
    do { \
        asm_arm_mov_reg_i32(as, (reg_temp), (imm)); \
        asm_arm_mov_local_reg(as, (local_num), (reg_temp)); \
    } while (false)
#define ASM_MOV_LOCAL_TO_REG(as, local_num, reg) asm_arm_mov_reg_local(as, (reg), (local_num))
429
#define ASM_MOV_REG_REG(as, reg_dest, reg_src) asm_arm_mov_reg_reg((as), (reg_dest), (reg_src))
430
431
#define ASM_MOV_LOCAL_ADDR_TO_REG(as, local_num, reg) asm_arm_mov_reg_local_addr(as, (reg), (local_num))

432
433
#define ASM_LSL_REG_REG(as, reg_dest, reg_shift) asm_arm_lsl_reg_reg((as), (reg_dest), (reg_shift))
#define ASM_ASR_REG_REG(as, reg_dest, reg_shift) asm_arm_asr_reg_reg((as), (reg_dest), (reg_shift))
434
435
436
#define ASM_OR_REG_REG(as, reg_dest, reg_src) asm_arm_orr_reg_reg_reg((as), (reg_dest), (reg_dest), (reg_src))
#define ASM_XOR_REG_REG(as, reg_dest, reg_src) asm_arm_eor_reg_reg_reg((as), (reg_dest), (reg_dest), (reg_src))
#define ASM_AND_REG_REG(as, reg_dest, reg_src) asm_arm_and_reg_reg_reg((as), (reg_dest), (reg_dest), (reg_src))
437
438
439
#define ASM_ADD_REG_REG(as, reg_dest, reg_src) asm_arm_add_reg_reg_reg((as), (reg_dest), (reg_dest), (reg_src))
#define ASM_SUB_REG_REG(as, reg_dest, reg_src) asm_arm_sub_reg_reg_reg((as), (reg_dest), (reg_dest), (reg_src))

440
441
442
#define ASM_STORE_REG_REG(as, reg_value, reg_base) asm_arm_str_reg_reg((as), (reg_value), (reg_base))
#define ASM_STORE8_REG_REG(as, reg_value, reg_base) asm_arm_strb_reg_reg((as), (reg_value), (reg_base))
#define ASM_STORE16_REG_REG(as, reg_value, reg_base) asm_arm_strh_reg_reg((as), (reg_value), (reg_base))
443

444
445
446
#else

#error unknown native emitter
Fabian Vogt's avatar
Fabian Vogt committed
447

448
449
450
#endif

typedef enum {
451
452
453
454
    STACK_VALUE,
    STACK_REG,
    STACK_IMM,
} stack_info_kind_t;
455

456
457
// these enums must be distinct and the bottom 2 bits
// must correspond to the correct MP_NATIVE_TYPE_xxx value
458
typedef enum {
459
460
461
462
463
464
465
466
467
468
469
470
    VTYPE_PYOBJ = 0x00 | MP_NATIVE_TYPE_OBJ,
    VTYPE_BOOL = 0x00 | MP_NATIVE_TYPE_BOOL,
    VTYPE_INT = 0x00 | MP_NATIVE_TYPE_INT,
    VTYPE_UINT = 0x00 | MP_NATIVE_TYPE_UINT,

    VTYPE_PTR = 0x10 | MP_NATIVE_TYPE_UINT, // pointer to word sized entity
    VTYPE_PTR8 = 0x20 | MP_NATIVE_TYPE_UINT,
    VTYPE_PTR16 = 0x30 | MP_NATIVE_TYPE_UINT,
    VTYPE_PTR_NONE = 0x40 | MP_NATIVE_TYPE_UINT,

    VTYPE_UNBOUND = 0x50 | MP_NATIVE_TYPE_OBJ,
    VTYPE_BUILTIN_CAST = 0x60 | MP_NATIVE_TYPE_OBJ,
471
472
} vtype_kind_t;

473
474
475
476
477
typedef struct _stack_info_t {
    vtype_kind_t vtype;
    stack_info_kind_t kind;
    union {
        int u_reg;
478
        mp_int_t u_imm;
479
480
481
    };
} stack_info_t;

482
483
484
485
struct _emit_t {
    int pass;

    bool do_viper_types;
486

487
488
    vtype_kind_t return_vtype;

489
    mp_uint_t local_vtype_alloc;
490
    vtype_kind_t *local_vtype;
491

492
    mp_uint_t stack_info_alloc;
493
494
    stack_info_t *stack_info;

495
496
497
498
499
500
501
    int stack_start;
    int stack_size;

    bool last_emit_was_return_value;

    scope_t *scope;

502
    ASM_T *as;
503
504
};

505
emit_t *EXPORT_FUN(new)(mp_uint_t max_num_labels) {
506
    emit_t *emit = m_new0(emit_t, 1);
507
    emit->as = ASM_NEW(max_num_labels);
508
509
510
    return emit;
}

511
void EXPORT_FUN(free)(emit_t *emit) {
512
    ASM_FREE(emit->as, false);
513
514
    m_del(vtype_kind_t, emit->local_vtype, emit->local_vtype_alloc);
    m_del(stack_info_t, emit->stack_info, emit->stack_info_alloc);
515
516
517
    m_del_obj(emit_t, emit);
}

518
519
520
521
522
523
524
525
526
527
528
529
530
STATIC void emit_native_set_native_type(emit_t *emit, mp_uint_t op, mp_uint_t arg1, qstr arg2) {
    switch (op) {
        case MP_EMIT_NATIVE_TYPE_ENABLE:
            emit->do_viper_types = arg1;
            break;

        default: {
            vtype_kind_t type;
            switch (arg2) {
                case MP_QSTR_object: type = VTYPE_PYOBJ; break;
                case MP_QSTR_bool: type = VTYPE_BOOL; break;
                case MP_QSTR_int: type = VTYPE_INT; break;
                case MP_QSTR_uint: type = VTYPE_UINT; break;
531
532
533
                case MP_QSTR_ptr: type = VTYPE_PTR; break;
                case MP_QSTR_ptr8: type = VTYPE_PTR8; break;
                case MP_QSTR_ptr16: type = VTYPE_PTR16; break;
534
535
536
537
538
539
540
541
542
543
544
                default: printf("ViperTypeError: unknown type %s\n", qstr_str(arg2)); return;
            }
            if (op == MP_EMIT_NATIVE_TYPE_RETURN) {
                emit->return_vtype = type;
            } else {
                assert(arg1 < emit->local_vtype_alloc);
                emit->local_vtype[arg1] = type;
            }
            break;
        }
    }
545
546
}

547
STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scope) {
548
549
    DEBUG_printf("start_pass(pass=%u, scope=%p)\n", pass, scope);

550
551
552
553
554
555
    emit->pass = pass;
    emit->stack_start = 0;
    emit->stack_size = 0;
    emit->last_emit_was_return_value = false;
    emit->scope = scope;

556
557
558
559
    // allocate memory for keeping track of the types of locals
    if (emit->local_vtype_alloc < scope->num_locals) {
        emit->local_vtype = m_renew(vtype_kind_t, emit->local_vtype, emit->local_vtype_alloc, scope->num_locals);
        emit->local_vtype_alloc = scope->num_locals;
560
    }
561
562
563

    // allocate memory for keeping track of the objects on the stack
    // XXX don't know stack size on entry, and it should be maximum over all scopes
564
    if (emit->stack_info == NULL) {
565
        emit->stack_info_alloc = scope->stack_size + 50;
566
        emit->stack_info = m_new(stack_info_t, emit->stack_info_alloc);
567
568
    }

569
570
    // set default type for return and arguments
    emit->return_vtype = VTYPE_PYOBJ;
571
    for (mp_uint_t i = 0; i < emit->scope->num_pos_args; i++) {
572
573
        emit->local_vtype[i] = VTYPE_PYOBJ;
    }
574
575
576
577
578
579
580
581

    // local variables begin unbound, and have unknown type
    for (mp_uint_t i = emit->scope->num_pos_args; i < emit->local_vtype_alloc; i++) {
        emit->local_vtype[i] = VTYPE_UNBOUND;
    }

    // values on stack begin unbound
    for (mp_uint_t i = 0; i < emit->stack_info_alloc; i++) {
582
        emit->stack_info[i].kind = STACK_VALUE;
583
        emit->stack_info[i].vtype = VTYPE_UNBOUND;
584
585
    }

586
    ASM_START_PASS(emit->as, pass == MP_PASS_EMIT ? ASM_PASS_EMIT : ASM_PASS_COMPUTE);
587
588
589

    // entry to function
    int num_locals = 0;
590
    if (pass > MP_PASS_SCOPE) {
591
592
593
594
595
596
597
        num_locals = scope->num_locals - REG_LOCAL_NUM;
        if (num_locals < 0) {
            num_locals = 0;
        }
        emit->stack_start = num_locals;
        num_locals += scope->stack_size;
    }
598
    ASM_ENTRY(emit->as, num_locals);
599
600

    // initialise locals from parameters
601
#if N_X64
602
    for (int i = 0; i < scope->num_pos_args; i++) {
603
        if (i == 0) {
604
            ASM_MOV_REG_REG(emit->as, REG_LOCAL_1, REG_ARG_1);
605
        } else if (i == 1) {
606
            ASM_MOV_REG_REG(emit->as, REG_LOCAL_2, REG_ARG_2);
607
        } else if (i == 2) {
608
            ASM_MOV_REG_REG(emit->as, REG_LOCAL_3, REG_ARG_3);
609
610
        } else if (i == 3) {
            asm_x64_mov_r64_to_local(emit->as, REG_ARG_4, i - REG_LOCAL_NUM);
611
612
613
614
615
        } else {
            // TODO not implemented
            assert(0);
        }
    }
616
617
618
#elif N_X86
    for (int i = 0; i < scope->num_pos_args; i++) {
        if (i == 0) {
619
            asm_x86_mov_arg_to_r32(emit->as, i, REG_LOCAL_1);
620
        } else if (i == 1) {
621
            asm_x86_mov_arg_to_r32(emit->as, i, REG_LOCAL_2);
622
623
        } else if (i == 2) {
            asm_x86_mov_arg_to_r32(emit->as, i, REG_LOCAL_3);
624
        } else {
625
626
            asm_x86_mov_arg_to_r32(emit->as, i, REG_TEMP0);
            asm_x86_mov_r32_to_local(emit->as, REG_TEMP0, i - REG_LOCAL_NUM);
627
628
        }
    }
629
#elif N_THUMB
630
    for (int i = 0; i < scope->num_pos_args; i++) {
631
        if (i == 0) {
632
            ASM_MOV_REG_REG(emit->as, REG_LOCAL_1, REG_ARG_1);
633
        } else if (i == 1) {
634
            ASM_MOV_REG_REG(emit->as, REG_LOCAL_2, REG_ARG_2);
635
        } else if (i == 2) {
636
            ASM_MOV_REG_REG(emit->as, REG_LOCAL_3, REG_ARG_3);
637
638
639
640
641
642
643
644
        } else if (i == 3) {
            asm_thumb_mov_local_reg(emit->as, i - REG_LOCAL_NUM, REG_ARG_4);
        } else {
            // TODO not implemented
            assert(0);
        }
    }

645
    // TODO don't load r7 if we don't need it
646
    asm_thumb_mov_reg_i32(emit->as, ASM_THUMB_REG_R7, (mp_uint_t)mp_fun_table);
Fabian Vogt's avatar
Fabian Vogt committed
647
648
649
#elif N_ARM
    for (int i = 0; i < scope->num_pos_args; i++) {
        if (i == 0) {
650
            ASM_MOV_REG_REG(emit->as, REG_LOCAL_1, REG_ARG_1);
Fabian Vogt's avatar
Fabian Vogt committed
651
        } else if (i == 1) {
652
            ASM_MOV_REG_REG(emit->as, REG_LOCAL_2, REG_ARG_2);
Fabian Vogt's avatar
Fabian Vogt committed
653
        } else if (i == 2) {
654
            ASM_MOV_REG_REG(emit->as, REG_LOCAL_3, REG_ARG_3);
Fabian Vogt's avatar
Fabian Vogt committed
655
656
657
658
659
660
661
662
        } else if (i == 3) {
            asm_arm_mov_local_reg(emit->as, i - REG_LOCAL_NUM, REG_ARG_4);
        } else {
            // TODO not implemented
            assert(0);
        }
    }

663
    // TODO don't load r7 if we don't need it
664
    asm_arm_mov_reg_i32(emit->as, ASM_ARM_REG_R7, (mp_uint_t)mp_fun_table);
665
666
#else
    #error not implemented
667
668
669
#endif
}

670
STATIC void emit_native_end_pass(emit_t *emit) {
Fabian Vogt's avatar
Fabian Vogt committed
671
    if (!emit->last_emit_was_return_value) {
672
        ASM_EXIT(emit->as);
Fabian Vogt's avatar
Fabian Vogt committed
673
    }
674
    ASM_END_PASS(emit->as);
675
676
677
678
679
680

    // check stack is back to zero size
    if (emit->stack_size != 0) {
        printf("ERROR: stack size not back to zero; got %d\n", emit->stack_size);
    }

681
    if (emit->pass == MP_PASS_EMIT) {
682
683
        void *f = ASM_GET_CODE(emit->as);
        mp_uint_t f_len = ASM_GET_CODE_SIZE(emit->as);
684
685

        // compute type signature
686
        // note that the lower 2 bits of a vtype are tho correct MP_NATIVE_TYPE_xxx
687
688
689
690
691
692
        mp_uint_t type_sig = emit->return_vtype & 3;
        for (mp_uint_t i = 0; i < emit->scope->num_pos_args; i++) {
            type_sig |= (emit->local_vtype[i] & 3) << (i * 2 + 2);
        }

        mp_emit_glue_assign_native(emit->scope->raw_code, emit->do_viper_types ? MP_CODE_NATIVE_VIPER : MP_CODE_NATIVE_PY, f, f_len, emit->scope->num_pos_args, type_sig);
693
694
695
    }
}

696
STATIC bool emit_native_last_emit_was_return_value(emit_t *emit) {
697
698
699
    return emit->last_emit_was_return_value;
}

700
701
702
STATIC void adjust_stack(emit_t *emit, mp_int_t stack_size_delta) {
    DEBUG_printf("  adjust_stack; stack_size=%d, delta=%d\n", emit->stack_size, stack_size_delta);
    assert((mp_int_t)emit->stack_size + stack_size_delta >= 0);
703
    emit->stack_size += stack_size_delta;
704
    if (emit->pass > MP_PASS_SCOPE && emit->stack_size > emit->scope->stack_size) {
705
706
707
708
        emit->scope->stack_size = emit->stack_size;
    }
}

709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
STATIC void emit_native_adjust_stack_size(emit_t *emit, mp_int_t delta) {
    // If we are adjusting the stack in a positive direction (pushing) then we
    // need to fill in values for the stack kind and vtype of the newly-pushed
    // entries.  These should be set to "value" (ie not reg or imm) because we
    // should only need to adjust the stack due to a jump to this part in the
    // code (and hence we have settled the stack before the jump).
    for (mp_int_t i = 0; i < delta; i++) {
        stack_info_t *si = &emit->stack_info[emit->stack_size + i];
        si->kind = STACK_VALUE;
        si->vtype = VTYPE_PYOBJ; // XXX we don't know the vtype...
    }
    adjust_stack(emit, delta);
}

STATIC void emit_native_set_source_line(emit_t *emit, mp_uint_t source_line) {
}

726
/*
727
STATIC void emit_pre_raw(emit_t *emit, int stack_size_delta) {
728
729
730
    adjust_stack(emit, stack_size_delta);
    emit->last_emit_was_return_value = false;
}
731
*/
732

733
// this must be called at start of emit functions
734
STATIC void emit_native_pre(emit_t *emit) {
735
736
737
738
739
740
741
742
743
744
745
746
    emit->last_emit_was_return_value = false;
    // settle the stack
    /*
    if (regs_needed != 0) {
        for (int i = 0; i < emit->stack_size; i++) {
            switch (emit->stack_info[i].kind) {
                case STACK_VALUE:
                    break;

                case STACK_REG:
                    // TODO only push reg if in regs_needed
                    emit->stack_info[i].kind = STACK_VALUE;
747
                    ASM_MOV_REG_TO_LOCAL(emit->as, emit->stack_info[i].u_reg, emit->stack_start + i);
748
749
750
751
752
753
754
755
756
757
                    break;

                case STACK_IMM:
                    // don't think we ever need to push imms for settling
                    //ASM_MOV_IMM_TO_LOCAL(emit->last_imm, emit->stack_start + i);
                    break;
            }
        }
    }
    */
758
759
}

760
761
762
763
764
765
766
767
// depth==0 is top, depth==1 is before top, etc
STATIC stack_info_t *peek_stack(emit_t *emit, mp_uint_t depth) {
    return &emit->stack_info[emit->stack_size - 1 - depth];
}

// depth==0 is top, depth==1 is before top, etc
STATIC vtype_kind_t peek_vtype(emit_t *emit, mp_uint_t depth) {
    return peek_stack(emit, depth)->vtype;
768
}
769

770
771
// pos=1 is TOS, pos=2 is next, etc
// use pos=0 for no skipping
772
STATIC void need_reg_single(emit_t *emit, int reg_needed, int skip_stack_pos) {
773
774
775
776
777
778
    skip_stack_pos = emit->stack_size - skip_stack_pos;
    for (int i = 0; i < emit->stack_size; i++) {
        if (i != skip_stack_pos) {
            stack_info_t *si = &emit->stack_info[i];
            if (si->kind == STACK_REG && si->u_reg == reg_needed) {
                si->kind = STACK_VALUE;
779
                ASM_MOV_REG_TO_LOCAL(emit->as, si->u_reg, emit->stack_start + i);
780
781
782
783
784
            }
        }
    }
}

785
STATIC void need_reg_all(emit_t *emit) {
786
787
    for (int i = 0; i < emit->stack_size; i++) {
        stack_info_t *si = &emit->stack_info[i];
788
        if (si->kind == STACK_REG) {
789
            si->kind = STACK_VALUE;
790
            ASM_MOV_REG_TO_LOCAL(emit->as, si->u_reg, emit->stack_start + i);
791
792
793
        }
    }
}
794

795
STATIC void need_stack_settled(emit_t *emit) {
796
    DEBUG_printf("  need_stack_settled; stack_size=%d\n", emit->stack_size);
797
798
799
    for (int i = 0; i < emit->stack_size; i++) {
        stack_info_t *si = &emit->stack_info[i];
        if (si->kind == STACK_REG) {
800
            DEBUG_printf("    reg(%u) to local(%u)\n", si->u_reg, emit->stack_start + i);
801
            si->kind = STACK_VALUE;
802
            ASM_MOV_REG_TO_LOCAL(emit->as, si->u_reg, emit->stack_start + i);
803
        }
804
    }
805
806
807
    for (int i = 0; i < emit->stack_size; i++) {
        stack_info_t *si = &emit->stack_info[i];
        if (si->kind == STACK_IMM) {
808
            DEBUG_printf("    imm(" INT_FMT ") to local(%u)\n", si->u_imm, emit->stack_start + i);
809
            si->kind = STACK_VALUE;
810
            ASM_MOV_IMM_TO_LOCAL_USING(emit->as, si->u_imm, emit->stack_start + i, REG_TEMP0);
811
812
        }
    }
813
814
}

815
// pos=1 is TOS, pos=2 is next, etc
816
STATIC void emit_access_stack(emit_t *emit, int pos, vtype_kind_t *vtype, int reg_dest) {
817
818
    need_reg_single(emit, reg_dest, pos);
    stack_info_t *si = &emit->stack_info[emit->stack_size - pos];
819
820
821
    *vtype = si->vtype;
    switch (si->kind) {
        case STACK_VALUE:
822
            ASM_MOV_LOCAL_TO_REG(emit->as, emit->stack_start + emit->stack_size - pos, reg_dest);
823
824
            break;

825
826
        case STACK_REG:
            if (si->u_reg != reg_dest) {
827
                ASM_MOV_REG_REG(emit->as, reg_dest, si->u_reg);
828
829
830
            }
            break;

831
        case STACK_IMM:
832
            ASM_MOV_IMM_TO_REG(emit->as, si->u_imm, reg_dest);
833
834
835
836
            break;
    }
}

837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
// does an efficient X=pop(); discard(); push(X)
// needs a (non-temp) register in case the poped element was stored in the stack
STATIC void emit_fold_stack_top(emit_t *emit, int reg_dest) {
    stack_info_t *si = &emit->stack_info[emit->stack_size - 2];
    si[0] = si[1];
    if (si->kind == STACK_VALUE) {
        // if folded element was on the stack we need to put it in a register
        ASM_MOV_LOCAL_TO_REG(emit->as, emit->stack_start + emit->stack_size - 1, reg_dest);
        si->kind = STACK_REG;
        si->u_reg = reg_dest;
    }
    adjust_stack(emit, -1);
}

// If stacked value is in a register and the register is not r1 or r2, then
// *reg_dest is set to that register.  Otherwise the value is put in *reg_dest.
STATIC void emit_pre_pop_reg_flexible(emit_t *emit, vtype_kind_t *vtype, int *reg_dest, int not_r1, int not_r2) {
854
855
    emit->last_emit_was_return_value = false;
    stack_info_t *si = peek_stack(emit, 0);
856
    if (si->kind == STACK_REG && si->u_reg != not_r1 && si->u_reg != not_r2) {
857
858
859
860
861
862
863
864
865
        *vtype = si->vtype;
        *reg_dest = si->u_reg;
        need_reg_single(emit, *reg_dest, 1);
    } else {
        emit_access_stack(emit, 1, vtype, *reg_dest);
    }
    adjust_stack(emit, -1);
}

866
STATIC void emit_pre_pop_discard(emit_t *emit) {
867
868
869
870
    emit->last_emit_was_return_value = false;
    adjust_stack(emit, -1);
}

871
STATIC void emit_pre_pop_reg(emit_t *emit, vtype_kind_t *vtype, int reg_dest) {
872
873
874
875
876
    emit->last_emit_was_return_value = false;
    emit_access_stack(emit, 1, vtype, reg_dest);
    adjust_stack(emit, -1);
}

877
STATIC void emit_pre_pop_reg_reg(emit_t *emit, vtype_kind_t *vtypea, int rega, vtype_kind_t *vtypeb, int regb) {
878
    emit_pre_pop_reg(emit, vtypea, rega);
879
    emit_pre_pop_reg(emit, vtypeb, regb);
880
881
}

882
STATIC void emit_pre_pop_reg_reg_reg(emit_t *emit, vtype_kind_t *vtypea, int rega, vtype_kind_t *vtypeb, int regb, vtype_kind_t *vtypec, int regc) {
883
    emit_pre_pop_reg(emit, vtypea, rega);
884
885
    emit_pre_pop_reg(emit, vtypeb, regb);
    emit_pre_pop_reg(emit, vtypec, regc);
886
887
}

888
STATIC void emit_post(emit_t *emit) {
889
890
}

891
892
893
894
895
STATIC void emit_post_top_set_vtype(emit_t *emit, vtype_kind_t new_vtype) {
    stack_info_t *si = &emit->stack_info[emit->stack_size - 1];
    si->vtype = new_vtype;
}

896
STATIC void emit_post_push_reg(emit_t *emit, vtype_kind_t vtype, int reg) {
897
898
899
900
901
    stack_info_t *si = &emit->stack_info[emit->stack_size];
    si->vtype = vtype;
    si->kind = STACK_REG;
    si->u_reg = reg;
    adjust_stack(emit, 1);
902
903
}

904
STATIC void emit_post_push_imm(emit_t *emit, vtype_kind_t vtype, mp_int_t imm) {
905
906
907
908
909
    stack_info_t *si = &emit->stack_info[emit->stack_size];
    si->vtype = vtype;
    si->kind = STACK_IMM;
    si->u_imm = imm;
    adjust_stack(emit, 1);
910
911
}

912
STATIC void emit_post_push_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb) {
913
914
    emit_post_push_reg(emit, vtypea, rega);
    emit_post_push_reg(emit, vtypeb, regb);
915
916
}

917
STATIC void emit_post_push_reg_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb, vtype_kind_t vtypec, int regc) {
918
919
920
    emit_post_push_reg(emit, vtypea, rega);
    emit_post_push_reg(emit, vtypeb, regb);
    emit_post_push_reg(emit, vtypec, regc);
921
922
}

923
STATIC void emit_post_push_reg_reg_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb, vtype_kind_t vtypec, int regc, vtype_kind_t vtyped, int regd) {
924
925
926
927
    emit_post_push_reg(emit, vtypea, rega);
    emit_post_push_reg(emit, vtypeb, regb);
    emit_post_push_reg(emit, vtypec, regc);
    emit_post_push_reg(emit, vtyped, regd);
928
929
}

930
STATIC void emit_call(emit_t *emit, mp_fun_kind_t fun_kind) {
931
    need_reg_all(emit);
932
    ASM_CALL_IND(emit->as, mp_fun_table[fun_kind], fun_kind);
933
934
}

935
STATIC void emit_call_with_imm_arg(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val, int arg_reg) {
936
    need_reg_all(emit);
937
938
    ASM_MOV_IMM_TO_REG(emit->as, arg_val, arg_reg);
    ASM_CALL_IND(emit->as, mp_fun_table[fun_kind], fun_kind);
939
940
}

941
// the first arg is stored in the code aligned on a mp_uint_t boundary
942
STATIC void emit_call_with_imm_arg_aligned(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val, int arg_reg) {
943
    need_reg_all(emit);
944
945
    ASM_MOV_ALIGNED_IMM_TO_REG(emit->as, arg_val, arg_reg);
    ASM_CALL_IND(emit->as, mp_fun_table[fun_kind], fun_kind);
946
947
}

948
STATIC void emit_call_with_2_imm_args(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val1, int arg_reg1, mp_int_t arg_val2, int arg_reg2) {
949
    need_reg_all(emit);
950
951
952
    ASM_MOV_IMM_TO_REG(emit->as, arg_val1, arg_reg1);
    ASM_MOV_IMM_TO_REG(emit->as, arg_val2, arg_reg2);
    ASM_CALL_IND(emit->as, mp_fun_table[fun_kind], fun_kind);
953
954
}

955
// the first arg is stored in the code aligned on a mp_uint_t boundary
956
STATIC void emit_call_with_3_imm_args_and_first_aligned(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_t arg_val1, int arg_reg1, mp_int_t arg_val2, int arg_reg2, mp_int_t arg_val3, int arg_reg3) {
957
    need_reg_all(emit);
958
959
960
961
    ASM_MOV_ALIGNED_IMM_TO_REG(emit->as, arg_val1, arg_reg1);
    ASM_MOV_IMM_TO_REG(emit->as, arg_val2, arg_reg2);
    ASM_MOV_IMM_TO_REG(emit->as, arg_val3, arg_reg3);
    ASM_CALL_IND(emit->as, mp_fun_table[fun_kind], fun_kind);
962
963
}

964
965
966
967
968
969
// vtype of all n_pop objects is VTYPE_PYOBJ
// Will convert any items that are not VTYPE_PYOBJ to this type and put them back on the stack.
// If any conversions of non-immediate values are needed, then it uses REG_ARG_1, REG_ARG_2 and REG_RET.
// Otherwise, it does not use any temporary registers (but may use reg_dest before loading it with stack pointer).
STATIC void emit_get_stack_pointer_to_reg_for_pop(emit_t *emit, mp_uint_t reg_dest, mp_uint_t n_pop) {
    need_reg_all(emit);
970

971
972
973
974
975
976
977
978
979
    // First, store any immediate values to their respective place on the stack.
    for (mp_uint_t i = 0; i < n_pop; i++) {
        stack_info_t *si = &emit->stack_info[emit->stack_size - 1 - i];
        // must push any imm's to stack
        // must convert them to VTYPE_PYOBJ for viper code
        if (si->kind == STACK_IMM) {
            si->kind = STACK_VALUE;
            switch (si->vtype) {
                case VTYPE_PYOBJ:
980
                    ASM_MOV_IMM_TO_LOCAL_USING(emit->as, si->u_imm, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
981
982
983
                    break;
                case VTYPE_BOOL:
                    if (si->u_imm == 0) {
984
                        ASM_MOV_IMM_TO_LOCAL_USING(emit->as, (mp_uint_t)mp_const_false, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
985
                    } else {
986
                        ASM_MOV_IMM_TO_LOCAL_USING(emit->as, (mp_uint_t)mp_const_true, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
987
988
989
990
991
                    }
                    si->vtype = VTYPE_PYOBJ;
                    break;
                case VTYPE_INT:
                case VTYPE_UINT:
992
                    ASM_MOV_IMM_TO_LOCAL_USING(emit->as, (si->u_imm << 1) | 1, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
993
994
995
996
997
998
999
1000
                    si->vtype = VTYPE_PYOBJ;
                    break;
                default:
                    // not handled
                    assert(0);
            }
        }

For faster browsing, not all history is shown. View entire blame