emitnative.c 45.2 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
// Essentially normal Python has 1 type: Python objects
// Viper has more than 1 type, and is just a more complicated (a superset of) Python.
// If you declare everything in Viper as a Python object (ie omit type decls) then
// it should in principle be exactly the same as Python native.
// Having types means having more opcodes, like binary_op_nat_nat, binary_op_nat_obj etc.
// In practice we won't have a VM but rather do this in asm which is actually very minimal.

// Because it breaks strict Python equivalence it should be a completely separate
// decorator.  It breaks equivalence because overflow on integers wraps around.
// It shouldn't break equivalence if you don't use the new types, but since the
// type decls might be used in normal Python for other reasons, it's probably safest,
// cleanest and clearest to make it a separate decorator.

// Actually, it does break equivalence because integers default to native integers,
// not Python objects.

// for x in l[0:8]: can be compiled into a native loop if l has pointer type

#include <unistd.h>
#include <stdlib.h>
#include <stdint.h>
#include <stdio.h>
#include <string.h>
#include <assert.h>

#include "misc.h"
27
#include "mpconfig.h"
28
#include "qstr.h"
29
30
31
#include "lexer.h"
#include "parse.h"
#include "scope.h"
32
#include "runtime0.h"
33
#include "emit.h"
34
35
#include "obj.h"
#include "runtime.h"
36
37

// wrapper around everything in this file
38
#if (MICROPY_EMIT_X64 && N_X64) || (MICROPY_EMIT_THUMB && N_THUMB)
39

40
#if N_X64
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55

// x64 specific stuff

#include "asmx64.h"

#define REG_LOCAL_1 (REG_RBX)
#define REG_LOCAL_NUM (1)

#define EXPORT_FUN(name) emit_native_x64_##name

#define REG_TEMP0 (REG_RAX)
#define REG_TEMP1 (REG_RDI)
#define REG_TEMP2 (REG_RSI)
#define ASM_MOV_REG_TO_LOCAL(reg, local_num) asm_x64_mov_r64_to_local(emit->as, (reg), (local_num))
#define ASM_MOV_IMM_TO_REG(imm, reg) asm_x64_mov_i64_to_r64_optimised(emit->as, (imm), (reg))
56
#define ASM_MOV_IMM_TO_LOCAL_USING(imm, local_num, reg_temp) do { asm_x64_mov_i64_to_r64_optimised(emit->as, (imm), (reg_temp)); asm_x64_mov_r64_to_local(emit->as, (reg_temp), (local_num)); } while (false)
57
58
59
60
#define ASM_MOV_LOCAL_TO_REG(local_num, reg) asm_x64_mov_local_to_r64(emit->as, (local_num), (reg))
#define ASM_MOV_REG_TO_REG(reg_src, reg_dest) asm_x64_mov_r64_to_r64(emit->as, (reg_src), (reg_dest))
#define ASM_MOV_LOCAL_ADDR_TO_REG(local_num, reg) asm_x64_mov_local_addr_to_r64(emit->as, (local_num), (reg))

61
#elif N_THUMB
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78

// thumb specific stuff

#include "asmthumb.h"

#define REG_LOCAL_1 (REG_R4)
#define REG_LOCAL_2 (REG_R5)
#define REG_LOCAL_3 (REG_R6)
#define REG_LOCAL_NUM (3)

#define EXPORT_FUN(name) emit_native_thumb_##name

#define REG_TEMP0 (REG_R0)
#define REG_TEMP1 (REG_R1)
#define REG_TEMP2 (REG_R2)
#define ASM_MOV_REG_TO_LOCAL(reg, local_num) asm_thumb_mov_local_reg(emit->as, (local_num), (reg))
#define ASM_MOV_IMM_TO_REG(imm, reg) asm_thumb_mov_reg_i32_optimised(emit->as, (reg), (imm))
79
#define ASM_MOV_IMM_TO_LOCAL_USING(imm, local_num, reg_temp) do { asm_thumb_mov_reg_i32_optimised(emit->as, (reg_temp), (imm)); asm_thumb_mov_local_reg(emit->as, (local_num), (reg_temp)); } while (false)
80
81
82
83
84
85
86
#define ASM_MOV_LOCAL_TO_REG(local_num, reg) asm_thumb_mov_reg_local(emit->as, (reg), (local_num))
#define ASM_MOV_REG_TO_REG(reg_src, reg_dest) asm_thumb_mov_reg_reg(emit->as, (reg_dest), (reg_src))
#define ASM_MOV_LOCAL_ADDR_TO_REG(local_num, reg) asm_thumb_mov_reg_local_addr(emit->as, (reg), (local_num))

#endif

typedef enum {
87
88
89
90
    STACK_VALUE,
    STACK_REG,
    STACK_IMM,
} stack_info_kind_t;
91
92
93
94
95
96
97
98
99
100
101

typedef enum {
    VTYPE_UNBOUND,
    VTYPE_PYOBJ,
    VTYPE_BOOL,
    VTYPE_INT,
    VTYPE_PTR,
    VTYPE_PTR_NONE,
    VTYPE_BUILTIN_V_INT,
} vtype_kind_t;

102
103
104
105
106
107
108
109
110
typedef struct _stack_info_t {
    vtype_kind_t vtype;
    stack_info_kind_t kind;
    union {
        int u_reg;
        machine_int_t u_imm;
    };
} stack_info_t;

111
112
113
114
struct _emit_t {
    int pass;

    bool do_viper_types;
115
116

    int local_vtype_alloc;
117
    vtype_kind_t *local_vtype;
118
119
120
121

    int stack_info_alloc;
    stack_info_t *stack_info;

122
123
124
125
126
127
128
    int stack_start;
    int stack_size;

    bool last_emit_was_return_value;

    scope_t *scope;

129
#if N_X64
130
    asm_x64_t *as;
131
#elif N_THUMB
132
133
134
135
136
137
138
    asm_thumb_t *as;
#endif
};

emit_t *EXPORT_FUN(new)(uint max_num_labels) {
    emit_t *emit = m_new(emit_t, 1);
    emit->do_viper_types = false;
139
140
    emit->local_vtype = NULL;
    emit->stack_info = NULL;
141
#if N_X64
142
    emit->as = asm_x64_new(max_num_labels);
143
#elif N_THUMB
144
145
146
147
148
    emit->as = asm_thumb_new(max_num_labels);
#endif
    return emit;
}

149
150
151
152
153
154
155
156
157
static void emit_native_free(emit_t *emit) {
#if N_X64
    asm_x64_free(emit->as, false);
#elif N_THUMB
    asm_thumb_free(emit->as, false);
#endif
    m_del_obj(emit_t, emit);
}

158
159
160
161
162
163
164
165
166
167
168
static void emit_native_set_viper_types(emit_t *emit, bool do_viper_types) {
    emit->do_viper_types = do_viper_types;
}

static void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scope) {
    emit->pass = pass;
    emit->stack_start = 0;
    emit->stack_size = 0;
    emit->last_emit_was_return_value = false;
    emit->scope = scope;

169
170
171
172
173
174
175
    if (emit->local_vtype == NULL) {
        emit->local_vtype_alloc = scope->num_locals + 20; // XXX should be maximum over all scopes
        emit->local_vtype = m_new(vtype_kind_t, emit->local_vtype_alloc);
    }
    if (emit->stack_info == NULL) {
        emit->stack_info_alloc = scope->stack_size + 50; // XXX don't know stack size on entry, should be maximum over all scopes
        emit->stack_info = m_new(stack_info_t, emit->stack_info_alloc);
176
177
178
179
    }

    if (emit->do_viper_types) {
        // TODO set types of arguments based on type signature
180
181
182
183
184
185
186
        for (int i = 0; i < emit->local_vtype_alloc; i++) {
            emit->local_vtype[i] = VTYPE_UNBOUND;
        }
        for (int i = 0; i < emit->stack_info_alloc; i++) {
            emit->stack_info[i].kind = STACK_VALUE;
            emit->stack_info[i].vtype = VTYPE_UNBOUND;
        }
187
    } else {
188
189
190
191
192
193
        for (int i = 0; i < emit->local_vtype_alloc; i++) {
            emit->local_vtype[i] = VTYPE_PYOBJ;
        }
        for (int i = 0; i < emit->stack_info_alloc; i++) {
            emit->stack_info[i].kind = STACK_VALUE;
            emit->stack_info[i].vtype = VTYPE_PYOBJ;
194
195
196
        }
    }

197
#if N_X64
198
    asm_x64_start_pass(emit->as, pass);
199
#elif N_THUMB
200
201
202
203
204
205
206
207
208
209
210
211
212
    asm_thumb_start_pass(emit->as, pass);
#endif

    // entry to function
    int num_locals = 0;
    if (pass > PASS_1) {
        num_locals = scope->num_locals - REG_LOCAL_NUM;
        if (num_locals < 0) {
            num_locals = 0;
        }
        emit->stack_start = num_locals;
        num_locals += scope->stack_size;
    }
213
#if N_X64
214
    asm_x64_entry(emit->as, num_locals);
215
#elif N_THUMB
216
217
218
219
    asm_thumb_entry(emit->as, num_locals);
#endif

    // initialise locals from parameters
220
#if N_X64
221
222
223
224
225
226
227
228
229
230
231
232
    for (int i = 0; i < scope->num_params; i++) {
        if (i == 0) {
            asm_x64_mov_r64_to_r64(emit->as, REG_ARG_1, REG_LOCAL_1);
        } else if (i == 1) {
            asm_x64_mov_r64_to_local(emit->as, REG_ARG_2, i - 1);
        } else if (i == 2) {
            asm_x64_mov_r64_to_local(emit->as, REG_ARG_3, i - 1);
        } else {
            // TODO not implemented
            assert(0);
        }
    }
233
#elif N_THUMB
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
    for (int i = 0; i < scope->num_params; i++) {
        if (i == 0) {
            asm_thumb_mov_reg_reg(emit->as, REG_LOCAL_1, REG_ARG_1);
        } else if (i == 1) {
            asm_thumb_mov_reg_reg(emit->as, REG_LOCAL_2, REG_ARG_2);
        } else if (i == 2) {
            asm_thumb_mov_reg_reg(emit->as, REG_LOCAL_3, REG_ARG_3);
        } else if (i == 3) {
            asm_thumb_mov_local_reg(emit->as, i - REG_LOCAL_NUM, REG_ARG_4);
        } else {
            // TODO not implemented
            assert(0);
        }
    }

    asm_thumb_mov_reg_i32(emit->as, REG_R7, (machine_uint_t)rt_fun_table);
#endif
}

static void emit_native_end_pass(emit_t *emit) {
254
#if N_X64
255
256
257
258
    if (!emit->last_emit_was_return_value) {
        asm_x64_exit(emit->as);
    }
    asm_x64_end_pass(emit->as);
259
#elif N_THUMB
260
261
262
263
264
265
266
267
268
269
270
271
    if (!emit->last_emit_was_return_value) {
        asm_thumb_exit(emit->as);
    }
    asm_thumb_end_pass(emit->as);
#endif

    // check stack is back to zero size
    if (emit->stack_size != 0) {
        printf("ERROR: stack size not back to zero; got %d\n", emit->stack_size);
    }

    if (emit->pass == PASS_3) {
272
#if N_X64
273
        void *f = asm_x64_get_code(emit->as);
274
        rt_assign_native_code(emit->scope->unique_code_id, f, asm_x64_get_code_size(emit->as), emit->scope->num_params);
275
#elif N_THUMB
276
        void *f = asm_thumb_get_code(emit->as);
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
        rt_assign_native_code(emit->scope->unique_code_id, f, asm_thumb_get_code_size(emit->as), emit->scope->num_params);
#endif
    }
}

static bool emit_native_last_emit_was_return_value(emit_t *emit) {
    return emit->last_emit_was_return_value;
}

static int emit_native_get_stack_size(emit_t *emit) {
    return emit->stack_size;
}

static void emit_native_set_stack_size(emit_t *emit, int size) {
    emit->stack_size = size;
}

294
295
296
static void emit_native_set_source_line(emit_t *emit, int source_line) {
}

297
298
299
300
301
302
303
304
static void adjust_stack(emit_t *emit, int stack_size_delta) {
    emit->stack_size += stack_size_delta;
    assert(emit->stack_size >= 0);
    if (emit->pass > PASS_1 && emit->stack_size > emit->scope->stack_size) {
        emit->scope->stack_size = emit->stack_size;
    }
}

305
/*
306
307
308
309
static void emit_pre_raw(emit_t *emit, int stack_size_delta) {
    adjust_stack(emit, stack_size_delta);
    emit->last_emit_was_return_value = false;
}
310
*/
311

312
// this must be called at start of emit functions
313
static void emit_pre(emit_t *emit) {
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
    emit->last_emit_was_return_value = false;
    // settle the stack
    /*
    if (regs_needed != 0) {
        for (int i = 0; i < emit->stack_size; i++) {
            switch (emit->stack_info[i].kind) {
                case STACK_VALUE:
                    break;

                case STACK_REG:
                    // TODO only push reg if in regs_needed
                    emit->stack_info[i].kind = STACK_VALUE;
                    ASM_MOV_REG_TO_LOCAL(emit->stack_info[i].u_reg, emit->stack_start + i);
                    break;

                case STACK_IMM:
                    // don't think we ever need to push imms for settling
                    //ASM_MOV_IMM_TO_LOCAL(emit->last_imm, emit->stack_start + i);
                    break;
            }
        }
    }
    */
337
338
339
}

static vtype_kind_t peek_vtype(emit_t *emit) {
340
341
    return emit->stack_info[emit->stack_size - 1].vtype;
}
342

343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
// pos=1 is TOS, pos=2 is next, etc
// use pos=0 for no skipping
static void need_reg_single(emit_t *emit, int reg_needed, int skip_stack_pos) {
    skip_stack_pos = emit->stack_size - skip_stack_pos;
    for (int i = 0; i < emit->stack_size; i++) {
        if (i != skip_stack_pos) {
            stack_info_t *si = &emit->stack_info[i];
            if (si->kind == STACK_REG && si->u_reg == reg_needed) {
                si->kind = STACK_VALUE;
                ASM_MOV_REG_TO_LOCAL(si->u_reg, emit->stack_start + i);
            }
        }
    }
}

static void need_reg_all(emit_t *emit) {
359
360
    for (int i = 0; i < emit->stack_size; i++) {
        stack_info_t *si = &emit->stack_info[i];
361
        if (si->kind == STACK_REG) {
362
363
364
365
366
            si->kind = STACK_VALUE;
            ASM_MOV_REG_TO_LOCAL(si->u_reg, emit->stack_start + i);
        }
    }
}
367

368
static void need_stack_settled(emit_t *emit) {
369
370
371
372
373
374
    for (int i = 0; i < emit->stack_size; i++) {
        stack_info_t *si = &emit->stack_info[i];
        if (si->kind == STACK_REG) {
            si->kind = STACK_VALUE;
            ASM_MOV_REG_TO_LOCAL(si->u_reg, emit->stack_start + i);
        }
375
    }
376
377
378
379
380
381
    for (int i = 0; i < emit->stack_size; i++) {
        stack_info_t *si = &emit->stack_info[i];
        if (si->kind == STACK_IMM) {
            ASM_MOV_IMM_TO_LOCAL_USING(si->u_imm, emit->stack_start + i, REG_TEMP0);
        }
    }
382
383
}

384
385
386
387
// pos=1 is TOS, pos=2 is next, etc
static void emit_access_stack(emit_t *emit, int pos, vtype_kind_t *vtype, int reg_dest) {
    need_reg_single(emit, reg_dest, pos);
    stack_info_t *si = &emit->stack_info[emit->stack_size - pos];
388
389
390
    *vtype = si->vtype;
    switch (si->kind) {
        case STACK_VALUE:
391
            ASM_MOV_LOCAL_TO_REG(emit->stack_start + emit->stack_size - pos, reg_dest);
392
393
            break;

394
395
396
        case STACK_REG:
            if (si->u_reg != reg_dest) {
                ASM_MOV_REG_TO_REG(si->u_reg, reg_dest);
397
398
399
            }
            break;

400
401
        case STACK_IMM:
            ASM_MOV_IMM_TO_REG(si->u_imm, reg_dest);
402
403
404
405
            break;
    }
}

406
407
408
409
410
411
static void emit_pre_pop_reg(emit_t *emit, vtype_kind_t *vtype, int reg_dest) {
    emit->last_emit_was_return_value = false;
    emit_access_stack(emit, 1, vtype, reg_dest);
    adjust_stack(emit, -1);
}

412
413
static void emit_pre_pop_reg_reg(emit_t *emit, vtype_kind_t *vtypea, int rega, vtype_kind_t *vtypeb, int regb) {
    emit_pre_pop_reg(emit, vtypea, rega);
414
    emit_pre_pop_reg(emit, vtypeb, regb);
415
416
417
418
}

static void emit_pre_pop_reg_reg_reg(emit_t *emit, vtype_kind_t *vtypea, int rega, vtype_kind_t *vtypeb, int regb, vtype_kind_t *vtypec, int regc) {
    emit_pre_pop_reg(emit, vtypea, rega);
419
420
    emit_pre_pop_reg(emit, vtypeb, regb);
    emit_pre_pop_reg(emit, vtypec, regc);
421
422
423
424
425
426
}

static void emit_post(emit_t *emit) {
}

static void emit_post_push_reg(emit_t *emit, vtype_kind_t vtype, int reg) {
427
428
429
430
431
    stack_info_t *si = &emit->stack_info[emit->stack_size];
    si->vtype = vtype;
    si->kind = STACK_REG;
    si->u_reg = reg;
    adjust_stack(emit, 1);
432
433
434
}

static void emit_post_push_imm(emit_t *emit, vtype_kind_t vtype, machine_int_t imm) {
435
436
437
438
439
    stack_info_t *si = &emit->stack_info[emit->stack_size];
    si->vtype = vtype;
    si->kind = STACK_IMM;
    si->u_imm = imm;
    adjust_stack(emit, 1);
440
441
442
}

static void emit_post_push_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb) {
443
444
    emit_post_push_reg(emit, vtypea, rega);
    emit_post_push_reg(emit, vtypeb, regb);
445
446
447
}

static void emit_post_push_reg_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb, vtype_kind_t vtypec, int regc) {
448
449
450
    emit_post_push_reg(emit, vtypea, rega);
    emit_post_push_reg(emit, vtypeb, regb);
    emit_post_push_reg(emit, vtypec, regc);
451
452
453
}

static void emit_post_push_reg_reg_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb, vtype_kind_t vtypec, int regc, vtype_kind_t vtyped, int regd) {
454
455
456
457
    emit_post_push_reg(emit, vtypea, rega);
    emit_post_push_reg(emit, vtypeb, regb);
    emit_post_push_reg(emit, vtypec, regc);
    emit_post_push_reg(emit, vtyped, regd);
458
459
460
}

// vtype of all n_pop objects is VTYPE_PYOBJ
461
// does not use any temporary registers (but may use reg_dest before loading it with stack pointer)
462
// TODO this needs some thinking for viper code
463
static void emit_get_stack_pointer_to_reg_for_pop(emit_t *emit, int reg_dest, int n_pop) {
464
    need_reg_all(emit);
465
    for (int i = 0; i < n_pop; i++) {
466
467
        stack_info_t *si = &emit->stack_info[emit->stack_size - 1 - i];
        // must push any imm's to stack
468
        // must convert them to VTYPE_PYOBJ for viper code
469
470
        if (si->kind == STACK_IMM) {
            si->kind = STACK_VALUE;
471
472
473
474
475
476
477
            switch (si->vtype) {
                case VTYPE_PYOBJ:
                    ASM_MOV_IMM_TO_LOCAL_USING(si->u_imm, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
                    break;
                case VTYPE_BOOL:
                    si->vtype = VTYPE_PYOBJ;
                    if (si->u_imm == 0) {
478
                        ASM_MOV_IMM_TO_LOCAL_USING((machine_uint_t)mp_const_false, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
479
                    } else {
480
                        ASM_MOV_IMM_TO_LOCAL_USING((machine_uint_t)mp_const_true, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
481
482
483
484
485
486
487
488
489
490
                    }
                    break;
                case VTYPE_INT:
                    si->vtype = VTYPE_PYOBJ;
                    ASM_MOV_IMM_TO_LOCAL_USING((si->u_imm << 1) | 1, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
                    break;
                default:
                    // not handled
                    assert(0);
            }
491
492
493
        }
        assert(si->kind == STACK_VALUE);
        assert(si->vtype == VTYPE_PYOBJ);
494
    }
495
496
497
498
499
500
    ASM_MOV_LOCAL_ADDR_TO_REG(emit->stack_start + emit->stack_size - 1, reg_dest);
    adjust_stack(emit, -n_pop);
}

// vtype of all n_push objects is VTYPE_PYOBJ
static void emit_get_stack_pointer_to_reg_for_push(emit_t *emit, int reg_dest, int n_push) {
501
    need_reg_all(emit);
502
    for (int i = 0; i < n_push; i++) {
503
        emit->stack_info[emit->stack_size + i].kind = STACK_VALUE;
504
        emit->stack_info[emit->stack_size + i].vtype = VTYPE_PYOBJ;
505
506
507
508
509
510
    }
    ASM_MOV_LOCAL_ADDR_TO_REG(emit->stack_start + emit->stack_size + n_push - 1, reg_dest);
    adjust_stack(emit, n_push);
}

static void emit_call(emit_t *emit, rt_fun_kind_t fun_kind, void *fun) {
511
    need_reg_all(emit);
512
#if N_X64
513
    asm_x64_call_ind(emit->as, fun, REG_RAX);
514
#elif N_THUMB
515
516
517
518
519
    asm_thumb_bl_ind(emit->as, rt_fun_table[fun_kind], fun_kind, REG_R3);
#endif
}

static void emit_call_with_imm_arg(emit_t *emit, rt_fun_kind_t fun_kind, void *fun, machine_int_t arg_val, int arg_reg) {
520
    need_reg_all(emit);
521
    ASM_MOV_IMM_TO_REG(arg_val, arg_reg);
522
523
524
525
526
#if N_X64
    asm_x64_call_ind(emit->as, fun, REG_RAX);
#elif N_THUMB
    asm_thumb_bl_ind(emit->as, rt_fun_table[fun_kind], fun_kind, REG_R3);
#endif
527
528
529
530
531
}

static void emit_native_load_id(emit_t *emit, qstr qstr) {
    // check for built-ins
    if (strcmp(qstr_str(qstr), "v_int") == 0) {
532
        assert(0);
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
        emit_pre(emit);
        //emit_post_push_blank(emit, VTYPE_BUILTIN_V_INT);

    // not a built-in, so do usual thing
    } else {
        emit_common_load_id(emit, &EXPORT_FUN(method_table), emit->scope, qstr);
    }
}

static void emit_native_store_id(emit_t *emit, qstr qstr) {
    // TODO check for built-ins and disallow
    emit_common_store_id(emit, &EXPORT_FUN(method_table), emit->scope, qstr);
}

static void emit_native_delete_id(emit_t *emit, qstr qstr) {
    // TODO check for built-ins and disallow
    emit_common_delete_id(emit, &EXPORT_FUN(method_table), emit->scope, qstr);
}

static void emit_native_label_assign(emit_t *emit, int l) {
553
    emit_pre(emit);
554
555
    // need to commit stack because we can jump here from elsewhere
    need_stack_settled(emit);
556
#if N_X64
557
    asm_x64_label_assign(emit->as, l);
558
#elif N_THUMB
559
560
    asm_thumb_label_assign(emit->as, l);
#endif
561
    emit_post(emit);
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
}

static void emit_native_import_name(emit_t *emit, qstr qstr) {
    // not implemented
    assert(0);
}

static void emit_native_import_from(emit_t *emit, qstr qstr) {
    // not implemented
    assert(0);
}

static void emit_native_import_star(emit_t *emit) {
    // not implemented
    assert(0);
}

579
static void emit_native_load_const_tok(emit_t *emit, mp_token_kind_t tok) {
580
581
582
583
584
    emit_pre(emit);
    int vtype;
    machine_uint_t val;
    if (emit->do_viper_types) {
        switch (tok) {
585
586
587
            case MP_TOKEN_KW_NONE: vtype = VTYPE_PTR_NONE; val = 0; break;
            case MP_TOKEN_KW_FALSE: vtype = VTYPE_BOOL; val = 0; break;
            case MP_TOKEN_KW_TRUE: vtype = VTYPE_BOOL; val = 1; break;
588
589
590
591
592
            default: assert(0); vtype = 0; val = 0; // shouldn't happen
        }
    } else {
        vtype = VTYPE_PYOBJ;
        switch (tok) {
593
594
595
            case MP_TOKEN_KW_NONE: val = (machine_uint_t)mp_const_none; break;
            case MP_TOKEN_KW_FALSE: val = (machine_uint_t)mp_const_false; break;
            case MP_TOKEN_KW_TRUE: val = (machine_uint_t)mp_const_true; break;
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
            default: assert(0); vtype = 0; val = 0; // shouldn't happen
        }
    }
    emit_post_push_imm(emit, vtype, val);
}

static void emit_native_load_const_small_int(emit_t *emit, int arg) {
    emit_pre(emit);
    if (emit->do_viper_types) {
        emit_post_push_imm(emit, VTYPE_INT, arg);
    } else {
        emit_post_push_imm(emit, VTYPE_PYOBJ, (arg << 1) | 1);
    }
}

static void emit_native_load_const_int(emit_t *emit, qstr qstr) {
    // not implemented
    // load integer, check fits in 32 bits
    assert(0);
}

static void emit_native_load_const_dec(emit_t *emit, qstr qstr) {
618
619
620
621
    // for viper, a float/complex is just a Python object
    emit_pre(emit);
    emit_call_with_imm_arg(emit, RT_F_LOAD_CONST_DEC, rt_load_const_dec, qstr, REG_ARG_1);
    emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
622
623
624
}

static void emit_native_load_const_id(emit_t *emit, qstr qstr) {
625
626
627
628
629
630
631
    emit_pre(emit);
    if (emit->do_viper_types) {
        assert(0);
    } else {
        emit_call_with_imm_arg(emit, RT_F_LOAD_CONST_STR, rt_load_const_str, qstr, REG_ARG_1); // TODO
        emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
    }
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
}

static void emit_native_load_const_str(emit_t *emit, qstr qstr, bool bytes) {
    emit_pre(emit);
    if (emit->do_viper_types) {
        // not implemented properly
        // load a pointer to the asciiz string?
        assert(0);
        emit_post_push_imm(emit, VTYPE_PTR, (machine_uint_t)qstr_str(qstr));
    } else {
        emit_call_with_imm_arg(emit, RT_F_LOAD_CONST_STR, rt_load_const_str, qstr, REG_ARG_1);
        emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
    }
}

static void emit_native_load_const_verbatim_str(emit_t *emit, const char *str) {
    // not supported/needed for viper
    assert(0);
}

static void emit_native_load_fast(emit_t *emit, qstr qstr, int local_num) {
    vtype_kind_t vtype = emit->local_vtype[local_num];
    if (vtype == VTYPE_UNBOUND) {
        printf("ViperTypeError: local %s used before type known\n", qstr_str(qstr));
    }
    emit_pre(emit);
658
#if N_X64
659
660
661
    if (local_num == 0) {
        emit_post_push_reg(emit, vtype, REG_LOCAL_1);
    } else {
662
        need_reg_single(emit, REG_RAX, 0);
663
664
665
        asm_x64_mov_local_to_r64(emit->as, local_num - 1, REG_RAX);
        emit_post_push_reg(emit, vtype, REG_RAX);
    }
666
#elif N_THUMB
667
668
669
670
671
672
673
    if (local_num == 0) {
        emit_post_push_reg(emit, vtype, REG_LOCAL_1);
    } else if (local_num == 1) {
        emit_post_push_reg(emit, vtype, REG_LOCAL_2);
    } else if (local_num == 2) {
        emit_post_push_reg(emit, vtype, REG_LOCAL_3);
    } else {
674
        need_reg_single(emit, REG_R0, 0);
675
676
677
678
679
680
        asm_thumb_mov_reg_local(emit->as, REG_R0, local_num - 1);
        emit_post_push_reg(emit, vtype, REG_R0);
    }
#endif
}

Damien's avatar
Damien committed
681
682
683
684
685
686
687
688
689
690
691
static void emit_native_load_deref(emit_t *emit, qstr qstr, int local_num) {
    // not implemented
    // in principle could support this quite easily (ldr r0, [r0, #0]) and then get closed over variables!
    assert(0);
}

static void emit_native_load_closure(emit_t *emit, qstr qstr, int local_num) {
    // not implemented
    assert(0);
}

692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
static void emit_native_load_name(emit_t *emit, qstr qstr) {
    emit_pre(emit);
    emit_call_with_imm_arg(emit, RT_F_LOAD_NAME, rt_load_name, qstr, REG_ARG_1);
    emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
}

static void emit_native_load_global(emit_t *emit, qstr qstr) {
    emit_pre(emit);
    emit_call_with_imm_arg(emit, RT_F_LOAD_GLOBAL, rt_load_global, qstr, REG_ARG_1);
    emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
}

static void emit_native_load_attr(emit_t *emit, qstr qstr) {
    // depends on type of subject:
    //  - integer, function, pointer to integers: error
    //  - pointer to structure: get member, quite easy
    //  - Python object: call rt_load_attr, and needs to be typed to convert result
    vtype_kind_t vtype_base;
    emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = base
    assert(vtype_base == VTYPE_PYOBJ);
    emit_call_with_imm_arg(emit, RT_F_LOAD_ATTR, rt_load_attr, qstr, REG_ARG_2); // arg2 = attribute name
    emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
}

static void emit_native_load_method(emit_t *emit, qstr qstr) {
    vtype_kind_t vtype_base;
    emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = base
    assert(vtype_base == VTYPE_PYOBJ);
    emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, 2); // arg3 = dest ptr
    emit_call_with_imm_arg(emit, RT_F_LOAD_METHOD, rt_load_method, qstr, REG_ARG_2); // arg2 = method name
}

static void emit_native_load_build_class(emit_t *emit) {
725
726
727
    emit_pre(emit);
    emit_call(emit, RT_F_LOAD_BUILD_CLASS, rt_load_build_class);
    emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
728
729
730
731
}

static void emit_native_store_fast(emit_t *emit, qstr qstr, int local_num) {
    vtype_kind_t vtype;
732
#if N_X64
733
734
735
736
737
738
    if (local_num == 0) {
        emit_pre_pop_reg(emit, &vtype, REG_LOCAL_1);
    } else {
        emit_pre_pop_reg(emit, &vtype, REG_RAX);
        asm_x64_mov_r64_to_local(emit->as, REG_RAX, local_num - 1);
    }
739
#elif N_THUMB
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
    if (local_num == 0) {
        emit_pre_pop_reg(emit, &vtype, REG_LOCAL_1);
    } else if (local_num == 1) {
        emit_pre_pop_reg(emit, &vtype, REG_LOCAL_2);
    } else if (local_num == 2) {
        emit_pre_pop_reg(emit, &vtype, REG_LOCAL_3);
    } else {
        emit_pre_pop_reg(emit, &vtype, REG_R0);
        asm_thumb_mov_local_reg(emit->as, local_num - 1, REG_R0);
    }
#endif

    emit_post(emit);

    // check types
    if (emit->local_vtype[local_num] == VTYPE_UNBOUND) {
        // first time this local is assigned, so give it a type of the object stored in it
        emit->local_vtype[local_num] = vtype;
    } else if (emit->local_vtype[local_num] != vtype) {
        // type of local is not the same as object stored in it
        printf("ViperTypeError: type mismatch, local %s has type %d but source object has type %d\n", qstr_str(qstr), emit->local_vtype[local_num], vtype);
    }
}

Damien's avatar
Damien committed
764
765
766
767
768
static void emit_native_store_deref(emit_t *emit, qstr qstr, int local_num) {
    // not implemented
    assert(0);
}

769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
static void emit_native_store_name(emit_t *emit, qstr qstr) {
    // rt_store_name, but needs conversion of object (maybe have rt_viper_store_name(obj, type))
    vtype_kind_t vtype;
    emit_pre_pop_reg(emit, &vtype, REG_ARG_2);
    assert(vtype == VTYPE_PYOBJ);
    emit_call_with_imm_arg(emit, RT_F_STORE_NAME, rt_store_name, qstr, REG_ARG_1); // arg1 = name
    emit_post(emit);
}

static void emit_native_store_global(emit_t *emit, qstr qstr) {
    // not implemented
    assert(0);
}

static void emit_native_store_attr(emit_t *emit, qstr qstr) {
784
785
786
787
788
789
    vtype_kind_t vtype_base, vtype_val;
    emit_pre_pop_reg_reg(emit, &vtype_base, REG_ARG_1, &vtype_val, REG_ARG_3); // arg1 = base, arg3 = value
    assert(vtype_base == VTYPE_PYOBJ);
    assert(vtype_val == VTYPE_PYOBJ);
    emit_call_with_imm_arg(emit, RT_F_STORE_ATTR, rt_store_attr, qstr, REG_ARG_2); // arg2 = attribute name
    emit_post(emit);
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
}

static void emit_native_store_subscr(emit_t *emit) {
    // depends on type of subject:
    //  - integer, function, pointer to structure: error
    //  - pointer to integers: store as per array
    //  - Python object: call runtime with converted object or type info
    vtype_kind_t vtype_index, vtype_base, vtype_value;
    emit_pre_pop_reg_reg_reg(emit, &vtype_index, REG_ARG_2, &vtype_base, REG_ARG_1, &vtype_value, REG_ARG_3); // index, base, value to store
    assert(vtype_index == VTYPE_PYOBJ);
    assert(vtype_base == VTYPE_PYOBJ);
    assert(vtype_value == VTYPE_PYOBJ);
    emit_call(emit, RT_F_STORE_SUBSCR, rt_store_subscr);
}

805
806
807
808
809
810
811
static void emit_native_store_locals(emit_t *emit) {
    // not needed
    vtype_kind_t vtype;
    emit_pre_pop_reg(emit, &vtype, REG_TEMP0);
    emit_post(emit);
}

812
813
814
815
816
817
static void emit_native_delete_fast(emit_t *emit, qstr qstr, int local_num) {
    // not implemented
    // could support for Python types, just set to None (so GC can reclaim it)
    assert(0);
}

Damien's avatar
Damien committed
818
819
820
821
822
static void emit_native_delete_deref(emit_t *emit, qstr qstr, int local_num) {
    // not supported
    assert(0);
}

823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
static void emit_native_delete_name(emit_t *emit, qstr qstr) {
    // not implemented
    // use rt_delete_name
    assert(0);
}

static void emit_native_delete_global(emit_t *emit, qstr qstr) {
    // not implemented
    // use rt_delete_global
    assert(0);
}

static void emit_native_delete_attr(emit_t *emit, qstr qstr) {
    // not supported
    assert(0);
}

static void emit_native_delete_subscr(emit_t *emit) {
    // not supported
    assert(0);
}

static void emit_native_dup_top(emit_t *emit) {
    vtype_kind_t vtype;
    emit_pre_pop_reg(emit, &vtype, REG_TEMP0);
    emit_post_push_reg_reg(emit, vtype, REG_TEMP0, vtype, REG_TEMP0);
}

static void emit_native_dup_top_two(emit_t *emit) {
    vtype_kind_t vtype0, vtype1;
    emit_pre_pop_reg_reg(emit, &vtype0, REG_TEMP0, &vtype1, REG_TEMP1);
    emit_post_push_reg_reg_reg_reg(emit, vtype1, REG_TEMP1, vtype0, REG_TEMP0, vtype1, REG_TEMP1, vtype0, REG_TEMP0);
}

static void emit_native_pop_top(emit_t *emit) {
    vtype_kind_t vtype;
    emit_pre_pop_reg(emit, &vtype, REG_TEMP0);
    emit_post(emit);
}

static void emit_native_rot_two(emit_t *emit) {
864
865
866
    vtype_kind_t vtype0, vtype1;
    emit_pre_pop_reg_reg(emit, &vtype0, REG_TEMP0, &vtype1, REG_TEMP1);
    emit_post_push_reg_reg(emit, vtype0, REG_TEMP0, vtype1, REG_TEMP1);
867
868
869
870
871
872
873
874
875
876
}

static void emit_native_rot_three(emit_t *emit) {
    vtype_kind_t vtype0, vtype1, vtype2;
    emit_pre_pop_reg_reg_reg(emit, &vtype0, REG_TEMP0, &vtype1, REG_TEMP1, &vtype2, REG_TEMP2);
    emit_post_push_reg_reg_reg(emit, vtype0, REG_TEMP0, vtype2, REG_TEMP2, vtype1, REG_TEMP1);
}

static void emit_native_jump(emit_t *emit, int label) {
    emit_pre(emit);
877
#if N_X64
878
    asm_x64_jmp_label(emit->as, label);
879
#elif N_THUMB
880
881
882
883
884
    asm_thumb_b_label(emit->as, label);
#endif
    emit_post(emit);
}

885
static void emit_native_pop_jump_pre_helper(emit_t *emit, int label) {
886
887
888
889
890
891
892
893
894
895
    vtype_kind_t vtype = peek_vtype(emit);
    if (vtype == VTYPE_BOOL) {
        emit_pre_pop_reg(emit, &vtype, REG_RET);
    } else if (vtype == VTYPE_PYOBJ) {
        emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
        emit_call(emit, RT_F_IS_TRUE, rt_is_true);
    } else {
        printf("ViperTypeError: expecting a bool or pyobj, got %d\n", vtype);
        assert(0);
    }
896
897
898
899
}

static void emit_native_pop_jump_if_false(emit_t *emit, int label) {
    emit_native_pop_jump_pre_helper(emit, label);
900
#if N_X64
901
902
    asm_x64_test_r8_with_r8(emit->as, REG_RET, REG_RET);
    asm_x64_jcc_label(emit->as, JCC_JZ, label);
903
#elif N_THUMB
904
905
    asm_thumb_cmp_rlo_i8(emit->as, REG_RET, 0);
    asm_thumb_bcc_label(emit->as, THUMB_CC_EQ, label);
906
907
908
909
910
#endif
    emit_post(emit);
}

static void emit_native_pop_jump_if_true(emit_t *emit, int label) {
911
912
913
914
915
916
917
918
919
    emit_native_pop_jump_pre_helper(emit, label);
#if N_X64
    asm_x64_test_r8_with_r8(emit->as, REG_RET, REG_RET);
    asm_x64_jcc_label(emit->as, JCC_JNZ, label);
#elif N_THUMB
    asm_thumb_cmp_rlo_i8(emit->as, REG_RET, 0);
    asm_thumb_bcc_label(emit->as, THUMB_CC_NE, label);
#endif
    emit_post(emit);
920
}
921

922
923
924
925
926
927
928
929
930
931
932
933
934
static void emit_native_jump_if_true_or_pop(emit_t *emit, int label) {
    assert(0);
}
static void emit_native_jump_if_false_or_pop(emit_t *emit, int label) {
    assert(0);
}

static void emit_native_setup_loop(emit_t *emit, int label) {
    emit_pre(emit);
    emit_post(emit);
}

static void emit_native_break_loop(emit_t *emit, int label) {
935
    emit_native_jump(emit, label); // TODO properly
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
}
static void emit_native_continue_loop(emit_t *emit, int label) {
    assert(0);
}
static void emit_native_setup_with(emit_t *emit, int label) {
    // not supported, or could be with runtime call
    assert(0);
}
static void emit_native_with_cleanup(emit_t *emit) {
    assert(0);
}
static void emit_native_setup_except(emit_t *emit, int label) {
    assert(0);
}
static void emit_native_setup_finally(emit_t *emit, int label) {
    assert(0);
}
static void emit_native_end_finally(emit_t *emit) {
    assert(0);
}
956

957
958
959
static void emit_native_get_iter(emit_t *emit) {
    // perhaps the difficult one, as we want to rewrite for loops using native code
    // in cases where we iterate over a Python object, can we use normal runtime calls?
960
961
962
963
964
965
966
967

    vtype_kind_t vtype;
    emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
    assert(vtype == VTYPE_PYOBJ);
    emit_call(emit, RT_F_GETITER, rt_getiter);
    emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
}

968
static void emit_native_for_iter(emit_t *emit, int label) {
969
970
971
972
973
    emit_pre(emit);
    vtype_kind_t vtype;
    emit_access_stack(emit, 1, &vtype, REG_ARG_1);
    assert(vtype == VTYPE_PYOBJ);
    emit_call(emit, RT_F_ITERNEXT, rt_iternext);
974
    ASM_MOV_IMM_TO_REG((machine_uint_t)mp_const_stop_iteration, REG_TEMP1);
975
976
977
978
979
#if N_X64
    asm_x64_cmp_r64_with_r64(emit->as, REG_RET, REG_TEMP1);
    asm_x64_jcc_label(emit->as, JCC_JE, label);
#elif N_THUMB
    asm_thumb_cmp_reg_reg(emit->as, REG_RET, REG_TEMP1);
Damien's avatar
Damien committed
980
    asm_thumb_bcc_label(emit->as, THUMB_CC_EQ, label);
981
982
#endif
    emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
983
}
984

985
static void emit_native_for_iter_end(emit_t *emit) {
986
987
988
989
    // adjust stack counter (we get here from for_iter ending, which popped the value for us)
    emit_pre(emit);
    adjust_stack(emit, -1);
    emit_post(emit);
990
991
992
993
994
995
996
997
998
999
1000
}

static void emit_native_pop_block(emit_t *emit) {
    emit_pre(emit);
    emit_post(emit);
}

static void emit_native_pop_except(emit_t *emit) {
    assert(0);
}