emitnative.c 44.9 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
// Essentially normal Python has 1 type: Python objects
// Viper has more than 1 type, and is just a more complicated (a superset of) Python.
// If you declare everything in Viper as a Python object (ie omit type decls) then
// it should in principle be exactly the same as Python native.
// Having types means having more opcodes, like binary_op_nat_nat, binary_op_nat_obj etc.
// In practice we won't have a VM but rather do this in asm which is actually very minimal.

// Because it breaks strict Python equivalence it should be a completely separate
// decorator.  It breaks equivalence because overflow on integers wraps around.
// It shouldn't break equivalence if you don't use the new types, but since the
// type decls might be used in normal Python for other reasons, it's probably safest,
// cleanest and clearest to make it a separate decorator.

// Actually, it does break equivalence because integers default to native integers,
// not Python objects.

// for x in l[0:8]: can be compiled into a native loop if l has pointer type

#include <unistd.h>
#include <stdlib.h>
#include <stdint.h>
#include <stdio.h>
#include <string.h>
#include <assert.h>

#include "misc.h"
27
#include "mpyconfig.h"
28
29
30
31
32
33
34
#include "lexer.h"
#include "parse.h"
#include "scope.h"
#include "runtime.h"
#include "emit.h"

// wrapper around everything in this file
35
#if N_X64 || N_THUMB
36

37
#if N_X64
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52

// x64 specific stuff

#include "asmx64.h"

#define REG_LOCAL_1 (REG_RBX)
#define REG_LOCAL_NUM (1)

#define EXPORT_FUN(name) emit_native_x64_##name

#define REG_TEMP0 (REG_RAX)
#define REG_TEMP1 (REG_RDI)
#define REG_TEMP2 (REG_RSI)
#define ASM_MOV_REG_TO_LOCAL(reg, local_num) asm_x64_mov_r64_to_local(emit->as, (reg), (local_num))
#define ASM_MOV_IMM_TO_REG(imm, reg) asm_x64_mov_i64_to_r64_optimised(emit->as, (imm), (reg))
53
#define ASM_MOV_IMM_TO_LOCAL_USING(imm, local_num, reg_temp) do { asm_x64_mov_i64_to_r64_optimised(emit->as, (imm), (reg_temp)); asm_x64_mov_r64_to_local(emit->as, (reg_temp), (local_num)); } while (false)
54
55
56
57
#define ASM_MOV_LOCAL_TO_REG(local_num, reg) asm_x64_mov_local_to_r64(emit->as, (local_num), (reg))
#define ASM_MOV_REG_TO_REG(reg_src, reg_dest) asm_x64_mov_r64_to_r64(emit->as, (reg_src), (reg_dest))
#define ASM_MOV_LOCAL_ADDR_TO_REG(local_num, reg) asm_x64_mov_local_addr_to_r64(emit->as, (local_num), (reg))

58
#elif N_THUMB
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75

// thumb specific stuff

#include "asmthumb.h"

#define REG_LOCAL_1 (REG_R4)
#define REG_LOCAL_2 (REG_R5)
#define REG_LOCAL_3 (REG_R6)
#define REG_LOCAL_NUM (3)

#define EXPORT_FUN(name) emit_native_thumb_##name

#define REG_TEMP0 (REG_R0)
#define REG_TEMP1 (REG_R1)
#define REG_TEMP2 (REG_R2)
#define ASM_MOV_REG_TO_LOCAL(reg, local_num) asm_thumb_mov_local_reg(emit->as, (local_num), (reg))
#define ASM_MOV_IMM_TO_REG(imm, reg) asm_thumb_mov_reg_i32_optimised(emit->as, (reg), (imm))
76
#define ASM_MOV_IMM_TO_LOCAL_USING(imm, local_num, reg_temp) do { asm_thumb_mov_reg_i32_optimised(emit->as, (reg_temp), (imm)); asm_thumb_mov_local_reg(emit->as, (local_num), (reg_temp)); } while (false)
77
78
79
80
81
82
83
#define ASM_MOV_LOCAL_TO_REG(local_num, reg) asm_thumb_mov_reg_local(emit->as, (reg), (local_num))
#define ASM_MOV_REG_TO_REG(reg_src, reg_dest) asm_thumb_mov_reg_reg(emit->as, (reg_dest), (reg_src))
#define ASM_MOV_LOCAL_ADDR_TO_REG(local_num, reg) asm_thumb_mov_reg_local_addr(emit->as, (reg), (local_num))

#endif

typedef enum {
84
85
86
87
    STACK_VALUE,
    STACK_REG,
    STACK_IMM,
} stack_info_kind_t;
88
89
90
91
92
93
94
95
96
97
98

typedef enum {
    VTYPE_UNBOUND,
    VTYPE_PYOBJ,
    VTYPE_BOOL,
    VTYPE_INT,
    VTYPE_PTR,
    VTYPE_PTR_NONE,
    VTYPE_BUILTIN_V_INT,
} vtype_kind_t;

99
100
101
102
103
104
105
106
107
typedef struct _stack_info_t {
    vtype_kind_t vtype;
    stack_info_kind_t kind;
    union {
        int u_reg;
        machine_int_t u_imm;
    };
} stack_info_t;

108
109
110
111
struct _emit_t {
    int pass;

    bool do_viper_types;
112
113

    int local_vtype_alloc;
114
    vtype_kind_t *local_vtype;
115
116
117
118

    int stack_info_alloc;
    stack_info_t *stack_info;

119
120
121
122
123
124
125
    int stack_start;
    int stack_size;

    bool last_emit_was_return_value;

    scope_t *scope;

126
#if N_X64
127
    asm_x64_t *as;
128
#elif N_THUMB
129
130
131
132
133
134
135
    asm_thumb_t *as;
#endif
};

emit_t *EXPORT_FUN(new)(uint max_num_labels) {
    emit_t *emit = m_new(emit_t, 1);
    emit->do_viper_types = false;
136
137
    emit->local_vtype = NULL;
    emit->stack_info = NULL;
138
#if N_X64
139
    emit->as = asm_x64_new(max_num_labels);
140
#elif N_THUMB
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
    emit->as = asm_thumb_new(max_num_labels);
#endif
    return emit;
}

static void emit_native_set_viper_types(emit_t *emit, bool do_viper_types) {
    emit->do_viper_types = do_viper_types;
}

static void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scope) {
    emit->pass = pass;
    emit->stack_start = 0;
    emit->stack_size = 0;
    emit->last_emit_was_return_value = false;
    emit->scope = scope;

157
158
159
160
161
162
163
    if (emit->local_vtype == NULL) {
        emit->local_vtype_alloc = scope->num_locals + 20; // XXX should be maximum over all scopes
        emit->local_vtype = m_new(vtype_kind_t, emit->local_vtype_alloc);
    }
    if (emit->stack_info == NULL) {
        emit->stack_info_alloc = scope->stack_size + 50; // XXX don't know stack size on entry, should be maximum over all scopes
        emit->stack_info = m_new(stack_info_t, emit->stack_info_alloc);
164
165
166
167
    }

    if (emit->do_viper_types) {
        // TODO set types of arguments based on type signature
168
169
170
171
172
173
174
        for (int i = 0; i < emit->local_vtype_alloc; i++) {
            emit->local_vtype[i] = VTYPE_UNBOUND;
        }
        for (int i = 0; i < emit->stack_info_alloc; i++) {
            emit->stack_info[i].kind = STACK_VALUE;
            emit->stack_info[i].vtype = VTYPE_UNBOUND;
        }
175
    } else {
176
177
178
179
180
181
        for (int i = 0; i < emit->local_vtype_alloc; i++) {
            emit->local_vtype[i] = VTYPE_PYOBJ;
        }
        for (int i = 0; i < emit->stack_info_alloc; i++) {
            emit->stack_info[i].kind = STACK_VALUE;
            emit->stack_info[i].vtype = VTYPE_PYOBJ;
182
183
184
        }
    }

185
#if N_X64
186
    asm_x64_start_pass(emit->as, pass);
187
#elif N_THUMB
188
189
190
191
192
193
194
195
196
197
198
199
200
    asm_thumb_start_pass(emit->as, pass);
#endif

    // entry to function
    int num_locals = 0;
    if (pass > PASS_1) {
        num_locals = scope->num_locals - REG_LOCAL_NUM;
        if (num_locals < 0) {
            num_locals = 0;
        }
        emit->stack_start = num_locals;
        num_locals += scope->stack_size;
    }
201
#if N_X64
202
    asm_x64_entry(emit->as, num_locals);
203
#elif N_THUMB
204
205
206
207
    asm_thumb_entry(emit->as, num_locals);
#endif

    // initialise locals from parameters
208
#if N_X64
209
210
211
212
213
214
215
216
217
218
219
220
    for (int i = 0; i < scope->num_params; i++) {
        if (i == 0) {
            asm_x64_mov_r64_to_r64(emit->as, REG_ARG_1, REG_LOCAL_1);
        } else if (i == 1) {
            asm_x64_mov_r64_to_local(emit->as, REG_ARG_2, i - 1);
        } else if (i == 2) {
            asm_x64_mov_r64_to_local(emit->as, REG_ARG_3, i - 1);
        } else {
            // TODO not implemented
            assert(0);
        }
    }
221
#elif N_THUMB
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
    for (int i = 0; i < scope->num_params; i++) {
        if (i == 0) {
            asm_thumb_mov_reg_reg(emit->as, REG_LOCAL_1, REG_ARG_1);
        } else if (i == 1) {
            asm_thumb_mov_reg_reg(emit->as, REG_LOCAL_2, REG_ARG_2);
        } else if (i == 2) {
            asm_thumb_mov_reg_reg(emit->as, REG_LOCAL_3, REG_ARG_3);
        } else if (i == 3) {
            asm_thumb_mov_local_reg(emit->as, i - REG_LOCAL_NUM, REG_ARG_4);
        } else {
            // TODO not implemented
            assert(0);
        }
    }

    asm_thumb_mov_reg_i32(emit->as, REG_R7, (machine_uint_t)rt_fun_table);
#endif
}

static void emit_native_end_pass(emit_t *emit) {
242
#if N_X64
243
244
245
246
    if (!emit->last_emit_was_return_value) {
        asm_x64_exit(emit->as);
    }
    asm_x64_end_pass(emit->as);
247
#elif N_THUMB
248
249
250
251
252
253
254
255
256
257
258
259
    if (!emit->last_emit_was_return_value) {
        asm_thumb_exit(emit->as);
    }
    asm_thumb_end_pass(emit->as);
#endif

    // check stack is back to zero size
    if (emit->stack_size != 0) {
        printf("ERROR: stack size not back to zero; got %d\n", emit->stack_size);
    }

    if (emit->pass == PASS_3) {
260
#if N_X64
261
262
        py_fun_t f = asm_x64_get_code(emit->as);
        rt_assign_native_code(emit->scope->unique_code_id, f, asm_x64_get_code_size(emit->as), emit->scope->num_params);
263
#elif N_THUMB
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
        py_fun_t f = asm_thumb_get_code(emit->as);
        rt_assign_native_code(emit->scope->unique_code_id, f, asm_thumb_get_code_size(emit->as), emit->scope->num_params);
#endif
    }
}

static bool emit_native_last_emit_was_return_value(emit_t *emit) {
    return emit->last_emit_was_return_value;
}

static int emit_native_get_stack_size(emit_t *emit) {
    return emit->stack_size;
}

static void emit_native_set_stack_size(emit_t *emit, int size) {
    emit->stack_size = size;
}

static void adjust_stack(emit_t *emit, int stack_size_delta) {
    emit->stack_size += stack_size_delta;
    assert(emit->stack_size >= 0);
    if (emit->pass > PASS_1 && emit->stack_size > emit->scope->stack_size) {
        emit->scope->stack_size = emit->stack_size;
    }
}

290
/*
291
292
293
294
static void emit_pre_raw(emit_t *emit, int stack_size_delta) {
    adjust_stack(emit, stack_size_delta);
    emit->last_emit_was_return_value = false;
}
295
*/
296

297
// this must be called at start of emit functions
298
static void emit_pre(emit_t *emit) {
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
    emit->last_emit_was_return_value = false;
    // settle the stack
    /*
    if (regs_needed != 0) {
        for (int i = 0; i < emit->stack_size; i++) {
            switch (emit->stack_info[i].kind) {
                case STACK_VALUE:
                    break;

                case STACK_REG:
                    // TODO only push reg if in regs_needed
                    emit->stack_info[i].kind = STACK_VALUE;
                    ASM_MOV_REG_TO_LOCAL(emit->stack_info[i].u_reg, emit->stack_start + i);
                    break;

                case STACK_IMM:
                    // don't think we ever need to push imms for settling
                    //ASM_MOV_IMM_TO_LOCAL(emit->last_imm, emit->stack_start + i);
                    break;
            }
        }
    }
    */
322
323
324
}

static vtype_kind_t peek_vtype(emit_t *emit) {
325
326
    return emit->stack_info[emit->stack_size - 1].vtype;
}
327

328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
// pos=1 is TOS, pos=2 is next, etc
// use pos=0 for no skipping
static void need_reg_single(emit_t *emit, int reg_needed, int skip_stack_pos) {
    skip_stack_pos = emit->stack_size - skip_stack_pos;
    for (int i = 0; i < emit->stack_size; i++) {
        if (i != skip_stack_pos) {
            stack_info_t *si = &emit->stack_info[i];
            if (si->kind == STACK_REG && si->u_reg == reg_needed) {
                si->kind = STACK_VALUE;
                ASM_MOV_REG_TO_LOCAL(si->u_reg, emit->stack_start + i);
            }
        }
    }
}

static void need_reg_all(emit_t *emit) {
344
345
    for (int i = 0; i < emit->stack_size; i++) {
        stack_info_t *si = &emit->stack_info[i];
346
        if (si->kind == STACK_REG) {
347
348
349
350
351
            si->kind = STACK_VALUE;
            ASM_MOV_REG_TO_LOCAL(si->u_reg, emit->stack_start + i);
        }
    }
}
352

353
static void need_stack_settled(emit_t *emit) {
354
355
356
357
358
359
    for (int i = 0; i < emit->stack_size; i++) {
        stack_info_t *si = &emit->stack_info[i];
        if (si->kind == STACK_REG) {
            si->kind = STACK_VALUE;
            ASM_MOV_REG_TO_LOCAL(si->u_reg, emit->stack_start + i);
        }
360
    }
361
362
363
364
365
366
    for (int i = 0; i < emit->stack_size; i++) {
        stack_info_t *si = &emit->stack_info[i];
        if (si->kind == STACK_IMM) {
            ASM_MOV_IMM_TO_LOCAL_USING(si->u_imm, emit->stack_start + i, REG_TEMP0);
        }
    }
367
368
}

369
370
371
372
// pos=1 is TOS, pos=2 is next, etc
static void emit_access_stack(emit_t *emit, int pos, vtype_kind_t *vtype, int reg_dest) {
    need_reg_single(emit, reg_dest, pos);
    stack_info_t *si = &emit->stack_info[emit->stack_size - pos];
373
374
375
    *vtype = si->vtype;
    switch (si->kind) {
        case STACK_VALUE:
376
            ASM_MOV_LOCAL_TO_REG(emit->stack_start + emit->stack_size - pos, reg_dest);
377
378
            break;

379
380
381
        case STACK_REG:
            if (si->u_reg != reg_dest) {
                ASM_MOV_REG_TO_REG(si->u_reg, reg_dest);
382
383
384
            }
            break;

385
386
        case STACK_IMM:
            ASM_MOV_IMM_TO_REG(si->u_imm, reg_dest);
387
388
389
390
            break;
    }
}

391
392
393
394
395
396
static void emit_pre_pop_reg(emit_t *emit, vtype_kind_t *vtype, int reg_dest) {
    emit->last_emit_was_return_value = false;
    emit_access_stack(emit, 1, vtype, reg_dest);
    adjust_stack(emit, -1);
}

397
398
static void emit_pre_pop_reg_reg(emit_t *emit, vtype_kind_t *vtypea, int rega, vtype_kind_t *vtypeb, int regb) {
    emit_pre_pop_reg(emit, vtypea, rega);
399
    emit_pre_pop_reg(emit, vtypeb, regb);
400
401
402
403
}

static void emit_pre_pop_reg_reg_reg(emit_t *emit, vtype_kind_t *vtypea, int rega, vtype_kind_t *vtypeb, int regb, vtype_kind_t *vtypec, int regc) {
    emit_pre_pop_reg(emit, vtypea, rega);
404
405
    emit_pre_pop_reg(emit, vtypeb, regb);
    emit_pre_pop_reg(emit, vtypec, regc);
406
407
408
409
410
411
}

static void emit_post(emit_t *emit) {
}

static void emit_post_push_reg(emit_t *emit, vtype_kind_t vtype, int reg) {
412
413
414
415
416
    stack_info_t *si = &emit->stack_info[emit->stack_size];
    si->vtype = vtype;
    si->kind = STACK_REG;
    si->u_reg = reg;
    adjust_stack(emit, 1);
417
418
419
}

static void emit_post_push_imm(emit_t *emit, vtype_kind_t vtype, machine_int_t imm) {
420
421
422
423
424
    stack_info_t *si = &emit->stack_info[emit->stack_size];
    si->vtype = vtype;
    si->kind = STACK_IMM;
    si->u_imm = imm;
    adjust_stack(emit, 1);
425
426
427
}

static void emit_post_push_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb) {
428
429
    emit_post_push_reg(emit, vtypea, rega);
    emit_post_push_reg(emit, vtypeb, regb);
430
431
432
}

static void emit_post_push_reg_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb, vtype_kind_t vtypec, int regc) {
433
434
435
    emit_post_push_reg(emit, vtypea, rega);
    emit_post_push_reg(emit, vtypeb, regb);
    emit_post_push_reg(emit, vtypec, regc);
436
437
438
}

static void emit_post_push_reg_reg_reg_reg(emit_t *emit, vtype_kind_t vtypea, int rega, vtype_kind_t vtypeb, int regb, vtype_kind_t vtypec, int regc, vtype_kind_t vtyped, int regd) {
439
440
441
442
    emit_post_push_reg(emit, vtypea, rega);
    emit_post_push_reg(emit, vtypeb, regb);
    emit_post_push_reg(emit, vtypec, regc);
    emit_post_push_reg(emit, vtyped, regd);
443
444
445
}

// vtype of all n_pop objects is VTYPE_PYOBJ
446
// does not use any temporary registers (but may use reg_dest before loading it with stack pointer)
447
static void emit_get_stack_pointer_to_reg_for_pop(emit_t *emit, int reg_dest, int n_pop) {
448
    need_reg_all(emit);
449
    for (int i = 0; i < n_pop; i++) {
450
451
452
453
454
455
456
457
        stack_info_t *si = &emit->stack_info[emit->stack_size - 1 - i];
        // must push any imm's to stack
        if (si->kind == STACK_IMM) {
            si->kind = STACK_VALUE;
            ASM_MOV_IMM_TO_LOCAL_USING(si->u_imm, emit->stack_start + emit->stack_size - 1 - i, reg_dest);
        }
        assert(si->kind == STACK_VALUE);
        assert(si->vtype == VTYPE_PYOBJ);
458
    }
459
460
461
462
463
464
    ASM_MOV_LOCAL_ADDR_TO_REG(emit->stack_start + emit->stack_size - 1, reg_dest);
    adjust_stack(emit, -n_pop);
}

// vtype of all n_push objects is VTYPE_PYOBJ
static void emit_get_stack_pointer_to_reg_for_push(emit_t *emit, int reg_dest, int n_push) {
465
    need_reg_all(emit);
466
    for (int i = 0; i < n_push; i++) {
467
        emit->stack_info[emit->stack_size + i].kind = STACK_VALUE;
468
        emit->stack_info[emit->stack_size + i].vtype = VTYPE_PYOBJ;
469
470
471
472
473
474
    }
    ASM_MOV_LOCAL_ADDR_TO_REG(emit->stack_start + emit->stack_size + n_push - 1, reg_dest);
    adjust_stack(emit, n_push);
}

static void emit_call(emit_t *emit, rt_fun_kind_t fun_kind, void *fun) {
475
    need_reg_all(emit);
476
#if N_X64
477
    asm_x64_call_ind(emit->as, fun, REG_RAX);
478
#elif N_THUMB
479
480
481
482
483
    asm_thumb_bl_ind(emit->as, rt_fun_table[fun_kind], fun_kind, REG_R3);
#endif
}

static void emit_call_with_imm_arg(emit_t *emit, rt_fun_kind_t fun_kind, void *fun, machine_int_t arg_val, int arg_reg) {
484
    need_reg_all(emit);
485
    ASM_MOV_IMM_TO_REG(arg_val, arg_reg);
486
487
488
489
490
#if N_X64
    asm_x64_call_ind(emit->as, fun, REG_RAX);
#elif N_THUMB
    asm_thumb_bl_ind(emit->as, rt_fun_table[fun_kind], fun_kind, REG_R3);
#endif
491
492
493
494
495
}

static void emit_native_load_id(emit_t *emit, qstr qstr) {
    // check for built-ins
    if (strcmp(qstr_str(qstr), "v_int") == 0) {
496
        assert(0);
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
        emit_pre(emit);
        //emit_post_push_blank(emit, VTYPE_BUILTIN_V_INT);

    // not a built-in, so do usual thing
    } else {
        emit_common_load_id(emit, &EXPORT_FUN(method_table), emit->scope, qstr);
    }
}

static void emit_native_store_id(emit_t *emit, qstr qstr) {
    // TODO check for built-ins and disallow
    emit_common_store_id(emit, &EXPORT_FUN(method_table), emit->scope, qstr);
}

static void emit_native_delete_id(emit_t *emit, qstr qstr) {
    // TODO check for built-ins and disallow
    emit_common_delete_id(emit, &EXPORT_FUN(method_table), emit->scope, qstr);
}

static void emit_native_label_assign(emit_t *emit, int l) {
517
    emit_pre(emit);
518
519
    // need to commit stack because we can jump here from elsewhere
    need_stack_settled(emit);
520
#if N_X64
521
    asm_x64_label_assign(emit->as, l);
522
#elif N_THUMB
523
524
    asm_thumb_label_assign(emit->as, l);
#endif
525
    emit_post(emit);
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
}

static void emit_native_import_name(emit_t *emit, qstr qstr) {
    // not implemented
    assert(0);
}

static void emit_native_import_from(emit_t *emit, qstr qstr) {
    // not implemented
    assert(0);
}

static void emit_native_import_star(emit_t *emit) {
    // not implemented
    assert(0);
}

static void emit_native_load_const_tok(emit_t *emit, py_token_kind_t tok) {
    emit_pre(emit);
    int vtype;
    machine_uint_t val;
    if (emit->do_viper_types) {
        switch (tok) {
            case PY_TOKEN_KW_NONE: vtype = VTYPE_PTR_NONE; val = 0; break;
            case PY_TOKEN_KW_FALSE: vtype = VTYPE_BOOL; val = 0; break;
            case PY_TOKEN_KW_TRUE: vtype = VTYPE_BOOL; val = 1; break;
            default: assert(0); vtype = 0; val = 0; // shouldn't happen
        }
    } else {
        vtype = VTYPE_PYOBJ;
        switch (tok) {
            case PY_TOKEN_KW_NONE: val = (machine_uint_t)py_const_none; break;
            case PY_TOKEN_KW_FALSE: val = (machine_uint_t)py_const_false; break;
            case PY_TOKEN_KW_TRUE: val = (machine_uint_t)py_const_true; break;
            default: assert(0); vtype = 0; val = 0; // shouldn't happen
        }
    }
    emit_post_push_imm(emit, vtype, val);
}

static void emit_native_load_const_small_int(emit_t *emit, int arg) {
    emit_pre(emit);
    if (emit->do_viper_types) {
        emit_post_push_imm(emit, VTYPE_INT, arg);
    } else {
        emit_post_push_imm(emit, VTYPE_PYOBJ, (arg << 1) | 1);
    }
}

static void emit_native_load_const_int(emit_t *emit, qstr qstr) {
    // not implemented
    // load integer, check fits in 32 bits
    assert(0);
}

static void emit_native_load_const_dec(emit_t *emit, qstr qstr) {
582
583
584
585
    // for viper, a float/complex is just a Python object
    emit_pre(emit);
    emit_call_with_imm_arg(emit, RT_F_LOAD_CONST_DEC, rt_load_const_dec, qstr, REG_ARG_1);
    emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
586
587
588
}

static void emit_native_load_const_id(emit_t *emit, qstr qstr) {
589
590
591
592
593
594
595
    emit_pre(emit);
    if (emit->do_viper_types) {
        assert(0);
    } else {
        emit_call_with_imm_arg(emit, RT_F_LOAD_CONST_STR, rt_load_const_str, qstr, REG_ARG_1); // TODO
        emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
    }
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
}

static void emit_native_load_const_str(emit_t *emit, qstr qstr, bool bytes) {
    emit_pre(emit);
    if (emit->do_viper_types) {
        // not implemented properly
        // load a pointer to the asciiz string?
        assert(0);
        emit_post_push_imm(emit, VTYPE_PTR, (machine_uint_t)qstr_str(qstr));
    } else {
        emit_call_with_imm_arg(emit, RT_F_LOAD_CONST_STR, rt_load_const_str, qstr, REG_ARG_1);
        emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
    }
}

static void emit_native_load_const_verbatim_start(emit_t *emit) {
    // not supported/needed for viper
    assert(0);
}

static void emit_native_load_const_verbatim_int(emit_t *emit, int val) {
    // not supported/needed for viper
    assert(0);
}

static void emit_native_load_const_verbatim_str(emit_t *emit, const char *str) {
    // not supported/needed for viper
    assert(0);
}

static void emit_native_load_const_verbatim_strn(emit_t *emit, const char *str, int len) {
    // not supported/needed for viper
    assert(0);
}

static void emit_native_load_const_verbatim_quoted_str(emit_t *emit, qstr qstr, bool bytes) {
    // not supported/needed for viper
    assert(0);
}

static void emit_native_load_const_verbatim_end(emit_t *emit) {
    // not supported/needed for viper
    assert(0);
}

static void emit_native_load_fast(emit_t *emit, qstr qstr, int local_num) {
    vtype_kind_t vtype = emit->local_vtype[local_num];
    if (vtype == VTYPE_UNBOUND) {
        printf("ViperTypeError: local %s used before type known\n", qstr_str(qstr));
    }
    emit_pre(emit);
647
#if N_X64
648
649
650
    if (local_num == 0) {
        emit_post_push_reg(emit, vtype, REG_LOCAL_1);
    } else {
651
        need_reg_single(emit, REG_RAX, 0);
652
653
654
        asm_x64_mov_local_to_r64(emit->as, local_num - 1, REG_RAX);
        emit_post_push_reg(emit, vtype, REG_RAX);
    }
655
#elif N_THUMB
656
657
658
659
660
661
662
    if (local_num == 0) {
        emit_post_push_reg(emit, vtype, REG_LOCAL_1);
    } else if (local_num == 1) {
        emit_post_push_reg(emit, vtype, REG_LOCAL_2);
    } else if (local_num == 2) {
        emit_post_push_reg(emit, vtype, REG_LOCAL_3);
    } else {
663
        need_reg_single(emit, REG_R0, 0);
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
        asm_thumb_mov_reg_local(emit->as, REG_R0, local_num - 1);
        emit_post_push_reg(emit, vtype, REG_R0);
    }
#endif
}

static void emit_native_load_name(emit_t *emit, qstr qstr) {
    emit_pre(emit);
    emit_call_with_imm_arg(emit, RT_F_LOAD_NAME, rt_load_name, qstr, REG_ARG_1);
    emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
}

static void emit_native_load_global(emit_t *emit, qstr qstr) {
    emit_pre(emit);
    emit_call_with_imm_arg(emit, RT_F_LOAD_GLOBAL, rt_load_global, qstr, REG_ARG_1);
    emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
}

682
static void emit_native_load_deref(emit_t *emit, qstr qstr, int local_num) {
683
684
685
686
687
    // not implemented
    // in principle could support this quite easily (ldr r0, [r0, #0]) and then get closed over variables!
    assert(0);
}

688
static void emit_native_load_closure(emit_t *emit, qstr qstr, int local_num) {
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
    // not implemented
    assert(0);
}

static void emit_native_load_attr(emit_t *emit, qstr qstr) {
    // depends on type of subject:
    //  - integer, function, pointer to integers: error
    //  - pointer to structure: get member, quite easy
    //  - Python object: call rt_load_attr, and needs to be typed to convert result
    vtype_kind_t vtype_base;
    emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = base
    assert(vtype_base == VTYPE_PYOBJ);
    emit_call_with_imm_arg(emit, RT_F_LOAD_ATTR, rt_load_attr, qstr, REG_ARG_2); // arg2 = attribute name
    emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
}

static void emit_native_load_method(emit_t *emit, qstr qstr) {
    vtype_kind_t vtype_base;
    emit_pre_pop_reg(emit, &vtype_base, REG_ARG_1); // arg1 = base
    assert(vtype_base == VTYPE_PYOBJ);
    emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_3, 2); // arg3 = dest ptr
    emit_call_with_imm_arg(emit, RT_F_LOAD_METHOD, rt_load_method, qstr, REG_ARG_2); // arg2 = method name
}

static void emit_native_load_build_class(emit_t *emit) {
714
715
716
    emit_pre(emit);
    emit_call(emit, RT_F_LOAD_BUILD_CLASS, rt_load_build_class);
    emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
717
718
719
720
}

static void emit_native_store_fast(emit_t *emit, qstr qstr, int local_num) {
    vtype_kind_t vtype;
721
#if N_X64
722
723
724
725
726
727
    if (local_num == 0) {
        emit_pre_pop_reg(emit, &vtype, REG_LOCAL_1);
    } else {
        emit_pre_pop_reg(emit, &vtype, REG_RAX);
        asm_x64_mov_r64_to_local(emit->as, REG_RAX, local_num - 1);
    }
728
#elif N_THUMB
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
    if (local_num == 0) {
        emit_pre_pop_reg(emit, &vtype, REG_LOCAL_1);
    } else if (local_num == 1) {
        emit_pre_pop_reg(emit, &vtype, REG_LOCAL_2);
    } else if (local_num == 2) {
        emit_pre_pop_reg(emit, &vtype, REG_LOCAL_3);
    } else {
        emit_pre_pop_reg(emit, &vtype, REG_R0);
        asm_thumb_mov_local_reg(emit->as, local_num - 1, REG_R0);
    }
#endif

    emit_post(emit);

    // check types
    if (emit->local_vtype[local_num] == VTYPE_UNBOUND) {
        // first time this local is assigned, so give it a type of the object stored in it
        emit->local_vtype[local_num] = vtype;
    } else if (emit->local_vtype[local_num] != vtype) {
        // type of local is not the same as object stored in it
        printf("ViperTypeError: type mismatch, local %s has type %d but source object has type %d\n", qstr_str(qstr), emit->local_vtype[local_num], vtype);
    }
}

static void emit_native_store_name(emit_t *emit, qstr qstr) {
    // rt_store_name, but needs conversion of object (maybe have rt_viper_store_name(obj, type))
    vtype_kind_t vtype;
    emit_pre_pop_reg(emit, &vtype, REG_ARG_2);
    assert(vtype == VTYPE_PYOBJ);
    emit_call_with_imm_arg(emit, RT_F_STORE_NAME, rt_store_name, qstr, REG_ARG_1); // arg1 = name
    emit_post(emit);
}

static void emit_native_store_global(emit_t *emit, qstr qstr) {
    // not implemented
    assert(0);
}

767
static void emit_native_store_deref(emit_t *emit, qstr qstr, int local_num) {
768
769
770
771
772
    // not implemented
    assert(0);
}

static void emit_native_store_attr(emit_t *emit, qstr qstr) {
773
774
775
776
777
778
    vtype_kind_t vtype_base, vtype_val;
    emit_pre_pop_reg_reg(emit, &vtype_base, REG_ARG_1, &vtype_val, REG_ARG_3); // arg1 = base, arg3 = value
    assert(vtype_base == VTYPE_PYOBJ);
    assert(vtype_val == VTYPE_PYOBJ);
    emit_call_with_imm_arg(emit, RT_F_STORE_ATTR, rt_store_attr, qstr, REG_ARG_2); // arg2 = attribute name
    emit_post(emit);
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
}

static void emit_native_store_subscr(emit_t *emit) {
    // depends on type of subject:
    //  - integer, function, pointer to structure: error
    //  - pointer to integers: store as per array
    //  - Python object: call runtime with converted object or type info
    vtype_kind_t vtype_index, vtype_base, vtype_value;
    emit_pre_pop_reg_reg_reg(emit, &vtype_index, REG_ARG_2, &vtype_base, REG_ARG_1, &vtype_value, REG_ARG_3); // index, base, value to store
    assert(vtype_index == VTYPE_PYOBJ);
    assert(vtype_base == VTYPE_PYOBJ);
    assert(vtype_value == VTYPE_PYOBJ);
    emit_call(emit, RT_F_STORE_SUBSCR, rt_store_subscr);
}

794
795
796
797
798
799
800
static void emit_native_store_locals(emit_t *emit) {
    // not needed
    vtype_kind_t vtype;
    emit_pre_pop_reg(emit, &vtype, REG_TEMP0);
    emit_post(emit);
}

801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
static void emit_native_delete_fast(emit_t *emit, qstr qstr, int local_num) {
    // not implemented
    // could support for Python types, just set to None (so GC can reclaim it)
    assert(0);
}

static void emit_native_delete_name(emit_t *emit, qstr qstr) {
    // not implemented
    // use rt_delete_name
    assert(0);
}

static void emit_native_delete_global(emit_t *emit, qstr qstr) {
    // not implemented
    // use rt_delete_global
    assert(0);
}

819
static void emit_native_delete_deref(emit_t *emit, qstr qstr, int local_num) {
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
    // not supported
    assert(0);
}

static void emit_native_delete_attr(emit_t *emit, qstr qstr) {
    // not supported
    assert(0);
}

static void emit_native_delete_subscr(emit_t *emit) {
    // not supported
    assert(0);
}

static void emit_native_dup_top(emit_t *emit) {
    vtype_kind_t vtype;
    emit_pre_pop_reg(emit, &vtype, REG_TEMP0);
    emit_post_push_reg_reg(emit, vtype, REG_TEMP0, vtype, REG_TEMP0);
}

static void emit_native_dup_top_two(emit_t *emit) {
    vtype_kind_t vtype0, vtype1;
    emit_pre_pop_reg_reg(emit, &vtype0, REG_TEMP0, &vtype1, REG_TEMP1);
    emit_post_push_reg_reg_reg_reg(emit, vtype1, REG_TEMP1, vtype0, REG_TEMP0, vtype1, REG_TEMP1, vtype0, REG_TEMP0);
}

static void emit_native_pop_top(emit_t *emit) {
    vtype_kind_t vtype;
    emit_pre_pop_reg(emit, &vtype, REG_TEMP0);
    emit_post(emit);
}

static void emit_native_rot_two(emit_t *emit) {
853
854
855
    vtype_kind_t vtype0, vtype1;
    emit_pre_pop_reg_reg(emit, &vtype0, REG_TEMP0, &vtype1, REG_TEMP1);
    emit_post_push_reg_reg(emit, vtype0, REG_TEMP0, vtype1, REG_TEMP1);
856
857
858
859
860
861
862
863
864
865
}

static void emit_native_rot_three(emit_t *emit) {
    vtype_kind_t vtype0, vtype1, vtype2;
    emit_pre_pop_reg_reg_reg(emit, &vtype0, REG_TEMP0, &vtype1, REG_TEMP1, &vtype2, REG_TEMP2);
    emit_post_push_reg_reg_reg(emit, vtype0, REG_TEMP0, vtype2, REG_TEMP2, vtype1, REG_TEMP1);
}

static void emit_native_jump(emit_t *emit, int label) {
    emit_pre(emit);
866
#if N_X64
867
    asm_x64_jmp_label(emit->as, label);
868
#elif N_THUMB
869
870
871
872
873
    asm_thumb_b_label(emit->as, label);
#endif
    emit_post(emit);
}

874
static void emit_native_pop_jump_pre_helper(emit_t *emit, int label) {
875
876
877
878
879
880
881
882
883
884
    vtype_kind_t vtype = peek_vtype(emit);
    if (vtype == VTYPE_BOOL) {
        emit_pre_pop_reg(emit, &vtype, REG_RET);
    } else if (vtype == VTYPE_PYOBJ) {
        emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
        emit_call(emit, RT_F_IS_TRUE, rt_is_true);
    } else {
        printf("ViperTypeError: expecting a bool or pyobj, got %d\n", vtype);
        assert(0);
    }
885
886
887
888
}

static void emit_native_pop_jump_if_false(emit_t *emit, int label) {
    emit_native_pop_jump_pre_helper(emit, label);
889
#if N_X64
890
891
    asm_x64_test_r8_with_r8(emit->as, REG_RET, REG_RET);
    asm_x64_jcc_label(emit->as, JCC_JZ, label);
892
#elif N_THUMB
893
894
    asm_thumb_cmp_rlo_i8(emit->as, REG_RET, 0);
    asm_thumb_bcc_label(emit->as, THUMB_CC_EQ, label);
895
896
897
898
899
#endif
    emit_post(emit);
}

static void emit_native_pop_jump_if_true(emit_t *emit, int label) {
900
901
902
903
904
905
906
907
908
    emit_native_pop_jump_pre_helper(emit, label);
#if N_X64
    asm_x64_test_r8_with_r8(emit->as, REG_RET, REG_RET);
    asm_x64_jcc_label(emit->as, JCC_JNZ, label);
#elif N_THUMB
    asm_thumb_cmp_rlo_i8(emit->as, REG_RET, 0);
    asm_thumb_bcc_label(emit->as, THUMB_CC_NE, label);
#endif
    emit_post(emit);
909
}
910

911
912
913
914
915
916
917
918
919
920
921
922
923
static void emit_native_jump_if_true_or_pop(emit_t *emit, int label) {
    assert(0);
}
static void emit_native_jump_if_false_or_pop(emit_t *emit, int label) {
    assert(0);
}

static void emit_native_setup_loop(emit_t *emit, int label) {
    emit_pre(emit);
    emit_post(emit);
}

static void emit_native_break_loop(emit_t *emit, int label) {
924
    emit_native_jump(emit, label); // TODO properly
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
}
static void emit_native_continue_loop(emit_t *emit, int label) {
    assert(0);
}
static void emit_native_setup_with(emit_t *emit, int label) {
    // not supported, or could be with runtime call
    assert(0);
}
static void emit_native_with_cleanup(emit_t *emit) {
    assert(0);
}
static void emit_native_setup_except(emit_t *emit, int label) {
    assert(0);
}
static void emit_native_setup_finally(emit_t *emit, int label) {
    assert(0);
}
static void emit_native_end_finally(emit_t *emit) {
    assert(0);
}
945

946
947
948
static void emit_native_get_iter(emit_t *emit) {
    // perhaps the difficult one, as we want to rewrite for loops using native code
    // in cases where we iterate over a Python object, can we use normal runtime calls?
949
950
951
952
953
954
955
956

    vtype_kind_t vtype;
    emit_pre_pop_reg(emit, &vtype, REG_ARG_1);
    assert(vtype == VTYPE_PYOBJ);
    emit_call(emit, RT_F_GETITER, rt_getiter);
    emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
}

957
static void emit_native_for_iter(emit_t *emit, int label) {
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
    emit_pre(emit);
    vtype_kind_t vtype;
    emit_access_stack(emit, 1, &vtype, REG_ARG_1);
    assert(vtype == VTYPE_PYOBJ);
    emit_call(emit, RT_F_ITERNEXT, rt_iternext);
    ASM_MOV_IMM_TO_REG((machine_uint_t)py_const_stop_iteration, REG_TEMP1);
#if N_X64
    asm_x64_cmp_r64_with_r64(emit->as, REG_RET, REG_TEMP1);
    asm_x64_jcc_label(emit->as, JCC_JE, label);
#elif N_THUMB
    assert(0); // XXX TODO
    asm_thumb_cmp_reg_reg(emit->as, REG_RET, REG_TEMP1);
    // use it, b?
    asm_thumb_b_label(emit->as, label);
#endif
    emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
974
}
975

976
static void emit_native_for_iter_end(emit_t *emit) {
977
978
979
980
    // adjust stack counter (we get here from for_iter ending, which popped the value for us)
    emit_pre(emit);
    adjust_stack(emit, -1);
    emit_post(emit);
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
}

static void emit_native_pop_block(emit_t *emit) {
    emit_pre(emit);
    emit_post(emit);
}

static void emit_native_pop_except(emit_t *emit) {
    assert(0);
}

static void emit_native_unary_op(emit_t *emit, rt_unary_op_t op) {
    vtype_kind_t vtype;
    emit_pre_pop_reg(emit, &vtype, REG_ARG_2);
    assert(vtype == VTYPE_PYOBJ);
    emit_call_with_imm_arg(emit, RT_F_UNARY_OP, rt_unary_op, op, REG_ARG_1);
    emit_post_push_reg(emit, VTYPE_PYOBJ, REG_RET);
}

static void emit_native_binary_op(emit_t *emit, rt_binary_op_t op) {
For faster browsing, not all history is shown. View entire blame