asmx64.c 23.5 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
/*
 * This file is part of the Micro Python project, http://micropython.org/
 *
 * The MIT License (MIT)
 *
 * Copyright (c) 2013, 2014 Damien P. George
 *
 * Permission is hereby granted, free of charge, to any person obtaining a copy
 * of this software and associated documentation files (the "Software"), to deal
 * in the Software without restriction, including without limitation the rights
 * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
 * copies of the Software, and to permit persons to whom the Software is
 * furnished to do so, subject to the following conditions:
 *
 * The above copyright notice and this permission notice shall be included in
 * all copies or substantial portions of the Software.
 *
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
 * THE SOFTWARE.
 */

Markus Siemens's avatar
Markus Siemens committed
27
#include <stdint.h>
Damien's avatar
Damien committed
28
29
30
31
#include <stdio.h>
#include <assert.h>
#include <string.h>

32
#include "py/mpconfig.h"
33
34
35

// wrapper around everything in this file
#if MICROPY_EMIT_X64
Damien's avatar
Damien committed
36

37
#include "py/asmx64.h"
38

Damien's avatar
Damien committed
39
40
41
42
/* all offsets are measured in multiples of 8 bytes */
#define WORD_SIZE                (8)

#define OPCODE_NOP               (0x90)
43
#define OPCODE_PUSH_R64          (0x50) /* +rq */
Damien's avatar
Damien committed
44
45
#define OPCODE_PUSH_I64          (0x68)
#define OPCODE_PUSH_M64          (0xff) /* /6 */
46
#define OPCODE_POP_R64           (0x58) /* +rq */
Damien's avatar
Damien committed
47
48
#define OPCODE_RET               (0xc3)
#define OPCODE_MOV_I8_TO_R8      (0xb0) /* +rb */
49
#define OPCODE_MOV_I64_TO_R64    (0xb8) /* +rq */
Damien's avatar
Damien committed
50
#define OPCODE_MOV_I32_TO_RM32   (0xc7)
51
#define OPCODE_MOV_R8_TO_RM8     (0x88) /* /r */
52
#define OPCODE_MOV_R64_TO_RM64   (0x89) /* /r */
53
54
55
#define OPCODE_MOV_RM64_TO_R64   (0x8b) /* /r */
#define OPCODE_MOVZX_RM8_TO_R64  (0xb6) /* 0x0f 0xb6/r */
#define OPCODE_MOVZX_RM16_TO_R64 (0xb7) /* 0x0f 0xb7/r */
Damien's avatar
Damien committed
56
#define OPCODE_LEA_MEM_TO_R64    (0x8d) /* /r */
57
58
#define OPCODE_AND_R64_TO_RM64   (0x21) /* /r */
#define OPCODE_OR_R64_TO_RM64    (0x09) /* /r */
Damien's avatar
Damien committed
59
#define OPCODE_XOR_R64_TO_RM64   (0x31) /* /r */
60
#define OPCODE_ADD_R64_TO_RM64   (0x01) /* /r */
Damien's avatar
Damien committed
61
62
63
64
65
#define OPCODE_ADD_I32_TO_RM32   (0x81) /* /0 */
#define OPCODE_ADD_I8_TO_RM32    (0x83) /* /0 */
#define OPCODE_SUB_R64_FROM_RM64 (0x29)
#define OPCODE_SUB_I32_FROM_RM64 (0x81) /* /5 */
#define OPCODE_SUB_I8_FROM_RM64  (0x83) /* /5 */
66
67
68
69
70
71
72
73
74
//#define OPCODE_SHL_RM32_BY_I8    (0xc1) /* /4 */
//#define OPCODE_SHR_RM32_BY_I8    (0xc1) /* /5 */
//#define OPCODE_SAR_RM32_BY_I8    (0xc1) /* /7 */
#define OPCODE_SHL_RM64_CL       (0xd3) /* /4 */
#define OPCODE_SAR_RM64_CL       (0xd3) /* /7 */
//#define OPCODE_CMP_I32_WITH_RM32 (0x81) /* /7 */
//#define OPCODE_CMP_I8_WITH_RM32  (0x83) /* /7 */
#define OPCODE_CMP_R64_WITH_RM64 (0x39) /* /r */
//#define OPCODE_CMP_RM32_WITH_R32 (0x3b)
Damien's avatar
Damien committed
75
76
77
78
79
80
81
82
83
84
85
86
#define OPCODE_TEST_R8_WITH_RM8  (0x84) /* /r */
#define OPCODE_JMP_REL8          (0xeb)
#define OPCODE_JMP_REL32         (0xe9)
#define OPCODE_JCC_REL8          (0x70) /* | jcc type */
#define OPCODE_JCC_REL32_A       (0x0f)
#define OPCODE_JCC_REL32_B       (0x80) /* | jcc type */
#define OPCODE_SETCC_RM8_A       (0x0f)
#define OPCODE_SETCC_RM8_B       (0x90) /* | jcc type, /0 */
#define OPCODE_CALL_REL32        (0xe8)
#define OPCODE_CALL_RM32         (0xff) /* /2 */
#define OPCODE_LEAVE             (0xc9)

87
#define MODRM_R64(x)    (((x) & 0x7) << 3)
Damien's avatar
Damien committed
88
89
90
91
#define MODRM_RM_DISP0  (0x00)
#define MODRM_RM_DISP8  (0x40)
#define MODRM_RM_DISP32 (0x80)
#define MODRM_RM_REG    (0xc0)
92
#define MODRM_RM_R64(x) ((x) & 0x7)
Damien's avatar
Damien committed
93

94
95
#define OP_SIZE_PREFIX (0x66)

Damien's avatar
Damien committed
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
#define REX_PREFIX  (0x40)
#define REX_W       (0x08)  // width
#define REX_R       (0x04)  // register
#define REX_X       (0x02)  // index
#define REX_B       (0x01)  // base

#define IMM32_L0(x) ((x) & 0xff)
#define IMM32_L1(x) (((x) >> 8) & 0xff)
#define IMM32_L2(x) (((x) >> 16) & 0xff)
#define IMM32_L3(x) (((x) >> 24) & 0xff)
#define IMM64_L4(x) (((x) >> 32) & 0xff)
#define IMM64_L5(x) (((x) >> 40) & 0xff)
#define IMM64_L6(x) (((x) >> 48) & 0xff)
#define IMM64_L7(x) (((x) >> 56) & 0xff)

#define UNSIGNED_FIT8(x) (((x) & 0xffffffffffffff00) == 0)
#define UNSIGNED_FIT32(x) (((x) & 0xffffffff00000000) == 0)
#define SIGNED_FIT8(x) (((x) & 0xffffff80) == 0) || (((x) & 0xffffff80) == 0xffffff80)

struct _asm_x64_t {
116
    uint pass;
117
118
    mp_uint_t code_offset;
    mp_uint_t code_size;
Damien's avatar
Damien committed
119
120
121
    byte *code_base;
    byte dummy_data[8];

122
123
    mp_uint_t max_num_labels;
    mp_uint_t *label_offsets;
124
    int num_locals;
Damien's avatar
Damien committed
125
126
};

127
asm_x64_t *asm_x64_new(mp_uint_t max_num_labels) {
128
    asm_x64_t *as;
Damien's avatar
Damien committed
129

130
    as = m_new0(asm_x64_t, 1);
131
    as->max_num_labels = max_num_labels;
132
    as->label_offsets = m_new(mp_uint_t, max_num_labels);
Damien's avatar
Damien committed
133
134
135
136

    return as;
}

137
void asm_x64_free(asm_x64_t *as, bool free_code) {
Damien's avatar
Damien committed
138
    if (free_code) {
139
        MP_PLAT_FREE_EXEC(as->code_base, as->code_size);
Damien's avatar
Damien committed
140
    }
141
    m_del(mp_uint_t, as->label_offsets, as->max_num_labels);
142
    m_del_obj(asm_x64_t, as);
Damien's avatar
Damien committed
143
144
}

145
void asm_x64_start_pass(asm_x64_t *as, uint pass) {
Damien's avatar
Damien committed
146
147
    as->pass = pass;
    as->code_offset = 0;
148
    if (pass == ASM_X64_PASS_COMPUTE) {
149
        // reset all labels
150
        memset(as->label_offsets, -1, as->max_num_labels * sizeof(mp_uint_t));
Damien's avatar
Damien committed
151
152
153
154
    }
}

void asm_x64_end_pass(asm_x64_t *as) {
155
    if (as->pass == ASM_X64_PASS_COMPUTE) {
156
        MP_PLAT_ALLOC_EXEC(as->code_offset, (void**) &as->code_base, &as->code_size);
157
        if(as->code_base == NULL) {
158
159
            assert(0);
        }
160
        //printf("code_size: %u\n", as->code_size);
Damien's avatar
Damien committed
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
    }

    /*
    // check labels are resolved
    if (as->label != NULL)
    {
        int i;
        for (i = 0; i < as->label->len; ++i)
            if (g_array_index(as->label, Label, i).unresolved != NULL)
                return false;
    }
    */
}

// all functions must go through this one to emit bytes
176
STATIC byte *asm_x64_get_cur_to_write_bytes(asm_x64_t *as, int num_bytes_to_write) {
Damien's avatar
Damien committed
177
    //printf("emit %d\n", num_bytes_to_write);
178
    if (as->pass < ASM_X64_PASS_EMIT) {
Damien's avatar
Damien committed
179
180
181
182
183
184
185
186
187
188
        as->code_offset += num_bytes_to_write;
        return as->dummy_data;
    } else {
        assert(as->code_offset + num_bytes_to_write <= as->code_size);
        byte *c = as->code_base + as->code_offset;
        as->code_offset += num_bytes_to_write;
        return c;
    }
}

189
mp_uint_t asm_x64_get_code_size(asm_x64_t *as) {
Damien's avatar
Damien committed
190
191
192
    return as->code_size;
}

193
void *asm_x64_get_code(asm_x64_t *as) {
Damien's avatar
Damien committed
194
195
196
    return as->code_base;
}

197
STATIC void asm_x64_write_byte_1(asm_x64_t *as, byte b1) {
Damien's avatar
Damien committed
198
199
200
201
    byte* c = asm_x64_get_cur_to_write_bytes(as, 1);
    c[0] = b1;
}

202
STATIC void asm_x64_write_byte_2(asm_x64_t *as, byte b1, byte b2) {
Damien's avatar
Damien committed
203
204
205
206
207
    byte* c = asm_x64_get_cur_to_write_bytes(as, 2);
    c[0] = b1;
    c[1] = b2;
}

208
STATIC void asm_x64_write_byte_3(asm_x64_t *as, byte b1, byte b2, byte b3) {
Damien's avatar
Damien committed
209
210
211
212
213
214
    byte* c = asm_x64_get_cur_to_write_bytes(as, 3);
    c[0] = b1;
    c[1] = b2;
    c[2] = b3;
}

215
STATIC void asm_x64_write_word32(asm_x64_t *as, int w32) {
Damien's avatar
Damien committed
216
217
218
219
220
221
222
    byte* c = asm_x64_get_cur_to_write_bytes(as, 4);
    c[0] = IMM32_L0(w32);
    c[1] = IMM32_L1(w32);
    c[2] = IMM32_L2(w32);
    c[3] = IMM32_L3(w32);
}

223
STATIC void asm_x64_write_word64(asm_x64_t *as, int64_t w64) {
Damien's avatar
Damien committed
224
225
226
227
228
229
230
231
232
233
234
235
    byte* c = asm_x64_get_cur_to_write_bytes(as, 8);
    c[0] = IMM32_L0(w64);
    c[1] = IMM32_L1(w64);
    c[2] = IMM32_L2(w64);
    c[3] = IMM32_L3(w64);
    c[4] = IMM64_L4(w64);
    c[5] = IMM64_L5(w64);
    c[6] = IMM64_L6(w64);
    c[7] = IMM64_L7(w64);
}

/* unused
236
STATIC void asm_x64_write_word32_to(asm_x64_t *as, int offset, int w32) {
Damien's avatar
Damien committed
237
238
239
240
241
242
243
244
245
246
    byte* c;
    assert(offset + 4 <= as->code_size);
    c = as->code_base + offset;
    c[0] = IMM32_L0(w32);
    c[1] = IMM32_L1(w32);
    c[2] = IMM32_L2(w32);
    c[3] = IMM32_L3(w32);
}
*/

247
STATIC void asm_x64_write_r64_disp(asm_x64_t *as, int r64, int disp_r64, int disp_offset) {
248
    assert(disp_r64 < 8);
249
    assert(disp_r64 != ASM_X64_REG_RSP);
Damien's avatar
Damien committed
250

251
    if (disp_offset == 0 && disp_r64 != ASM_X64_REG_RBP) {
Damien's avatar
Damien committed
252
253
254
255
256
257
258
259
260
        asm_x64_write_byte_1(as, MODRM_R64(r64) | MODRM_RM_DISP0 | MODRM_RM_R64(disp_r64));
    } else if (SIGNED_FIT8(disp_offset)) {
        asm_x64_write_byte_2(as, MODRM_R64(r64) | MODRM_RM_DISP8 | MODRM_RM_R64(disp_r64), IMM32_L0(disp_offset));
    } else {
        asm_x64_write_byte_1(as, MODRM_R64(r64) | MODRM_RM_DISP32 | MODRM_RM_R64(disp_r64));
        asm_x64_write_word32(as, disp_offset);
    }
}

261
262
263
264
STATIC void asm_x64_generic_r64_r64(asm_x64_t *as, int dest_r64, int src_r64, int op) {
    asm_x64_write_byte_3(as, REX_PREFIX | REX_W | (src_r64 < 8 ? 0 : REX_R) | (dest_r64 < 8 ? 0 : REX_B), op, MODRM_R64(src_r64) | MODRM_RM_REG | MODRM_RM_R64(dest_r64));
}

265
void asm_x64_nop(asm_x64_t *as) {
Damien's avatar
Damien committed
266
267
268
    asm_x64_write_byte_1(as, OPCODE_NOP);
}

269
void asm_x64_push_r64(asm_x64_t *as, int src_r64) {
270
271
272
273
274
    if (src_r64 < 8) {
        asm_x64_write_byte_1(as, OPCODE_PUSH_R64 | src_r64);
    } else {
        asm_x64_write_byte_2(as, REX_PREFIX | REX_B, OPCODE_PUSH_R64 | (src_r64 & 7));
    }
Damien's avatar
Damien committed
275
276
}

277
/*
278
void asm_x64_push_i32(asm_x64_t *as, int src_i32) {
Damien's avatar
Damien committed
279
280
281
    asm_x64_write_byte_1(as, OPCODE_PUSH_I64);
    asm_x64_write_word32(as, src_i32); // will be sign extended to 64 bits
}
282
*/
Damien's avatar
Damien committed
283

284
/*
285
void asm_x64_push_disp(asm_x64_t *as, int src_r64, int src_offset) {
286
    assert(src_r64 < 8);
Damien's avatar
Damien committed
287
288
289
    asm_x64_write_byte_1(as, OPCODE_PUSH_M64);
    asm_x64_write_r64_disp(as, 6, src_r64, src_offset);
}
290
*/
Damien's avatar
Damien committed
291

292
void asm_x64_pop_r64(asm_x64_t *as, int dest_r64) {
293
294
295
296
297
    if (dest_r64 < 8) {
        asm_x64_write_byte_1(as, OPCODE_POP_R64 | dest_r64);
    } else {
        asm_x64_write_byte_2(as, REX_PREFIX | REX_B, OPCODE_POP_R64 | (dest_r64 & 7));
    }
Damien's avatar
Damien committed
298
299
}

300
STATIC void asm_x64_ret(asm_x64_t *as) {
Damien's avatar
Damien committed
301
302
303
    asm_x64_write_byte_1(as, OPCODE_RET);
}

304
305
void asm_x64_mov_r64_r64(asm_x64_t *as, int dest_r64, int src_r64) {
    asm_x64_generic_r64_r64(as, dest_r64, src_r64, OPCODE_MOV_R64_TO_RM64);
Damien's avatar
Damien committed
306
307
}

308
void asm_x64_mov_r8_to_mem8(asm_x64_t *as, int src_r64, int dest_r64, int dest_disp) {
309
310
311
312
313
314
315
316
317
    assert(dest_r64 < 8);
    if (src_r64 < 8) {
        asm_x64_write_byte_1(as, OPCODE_MOV_R8_TO_RM8);
    } else {
        asm_x64_write_byte_2(as, REX_PREFIX | REX_R, OPCODE_MOV_R8_TO_RM8);
    }
    asm_x64_write_r64_disp(as, src_r64, dest_r64, dest_disp);
}

318
void asm_x64_mov_r16_to_mem16(asm_x64_t *as, int src_r64, int dest_r64, int dest_disp) {
319
    assert(dest_r64 < 8);
320
321
322
323
324
    if (src_r64 < 8) {
        asm_x64_write_byte_2(as, OP_SIZE_PREFIX, OPCODE_MOV_R64_TO_RM64);
    } else {
        asm_x64_write_byte_3(as, OP_SIZE_PREFIX, REX_PREFIX | REX_R, OPCODE_MOV_R64_TO_RM64);
    }
325
326
327
    asm_x64_write_r64_disp(as, src_r64, dest_r64, dest_disp);
}

328
void asm_x64_mov_r64_to_mem64(asm_x64_t *as, int src_r64, int dest_r64, int dest_disp) {
Damien's avatar
Damien committed
329
    // use REX prefix for 64 bit operation
330
331
    assert(dest_r64 < 8);
    asm_x64_write_byte_2(as, REX_PREFIX | REX_W | (src_r64 < 8 ? 0 : REX_R), OPCODE_MOV_R64_TO_RM64);
Damien's avatar
Damien committed
332
333
334
    asm_x64_write_r64_disp(as, src_r64, dest_r64, dest_disp);
}

335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
void asm_x64_mov_mem8_to_r64zx(asm_x64_t *as, int src_r64, int src_disp, int dest_r64) {
    assert(src_r64 < 8);
    if (dest_r64 < 8) {
        asm_x64_write_byte_2(as, 0x0f, OPCODE_MOVZX_RM8_TO_R64);
    } else {
        asm_x64_write_byte_3(as, REX_PREFIX | REX_R, 0x0f, OPCODE_MOVZX_RM8_TO_R64);
    }
    asm_x64_write_r64_disp(as, dest_r64, src_r64, src_disp);
}

void asm_x64_mov_mem16_to_r64zx(asm_x64_t *as, int src_r64, int src_disp, int dest_r64) {
    assert(src_r64 < 8);
    if (dest_r64 < 8) {
        asm_x64_write_byte_2(as, 0x0f, OPCODE_MOVZX_RM16_TO_R64);
    } else {
        asm_x64_write_byte_3(as, REX_PREFIX | REX_R, 0x0f, OPCODE_MOVZX_RM16_TO_R64);
    }
    asm_x64_write_r64_disp(as, dest_r64, src_r64, src_disp);
}

void asm_x64_mov_mem64_to_r64(asm_x64_t *as, int src_r64, int src_disp, int dest_r64) {
Damien's avatar
Damien committed
356
    // use REX prefix for 64 bit operation
357
358
    assert(src_r64 < 8);
    asm_x64_write_byte_2(as, REX_PREFIX | REX_W | (dest_r64 < 8 ? 0 : REX_R), OPCODE_MOV_RM64_TO_R64);
Damien's avatar
Damien committed
359
360
361
    asm_x64_write_r64_disp(as, dest_r64, src_r64, src_disp);
}

362
STATIC void asm_x64_lea_disp_to_r64(asm_x64_t *as, int src_r64, int src_disp, int dest_r64) {
Damien's avatar
Damien committed
363
    // use REX prefix for 64 bit operation
364
365
    assert(src_r64 < 8);
    assert(dest_r64 < 8);
Damien's avatar
Damien committed
366
367
368
369
    asm_x64_write_byte_2(as, REX_PREFIX | REX_W, OPCODE_LEA_MEM_TO_R64);
    asm_x64_write_r64_disp(as, dest_r64, src_r64, src_disp);
}

370
/*
Damien's avatar
Damien committed
371
void asm_x64_mov_i8_to_r8(asm_x64_t *as, int src_i8, int dest_r64) {
372
    assert(dest_r64 < 8);
Damien's avatar
Damien committed
373
374
    asm_x64_write_byte_2(as, OPCODE_MOV_I8_TO_R8 | dest_r64, src_i8);
}
375
*/
Damien's avatar
Damien committed
376

377
STATIC void asm_x64_mov_i32_to_r64(asm_x64_t *as, int src_i32, int dest_r64) {
Damien's avatar
Damien committed
378
    // cpu defaults to i32 to r64, with zero extension
379
380
381
382
383
    if (dest_r64 < 8) {
        asm_x64_write_byte_1(as, OPCODE_MOV_I64_TO_R64 | dest_r64);
    } else {
        asm_x64_write_byte_2(as, REX_PREFIX | REX_B, OPCODE_MOV_I64_TO_R64 | (dest_r64 & 7));
    }
Damien's avatar
Damien committed
384
385
386
    asm_x64_write_word32(as, src_i32);
}

387
void asm_x64_mov_i64_to_r64(asm_x64_t *as, int64_t src_i64, int dest_r64) {
Damien's avatar
Damien committed
388
389
    // cpu defaults to i32 to r64
    // to mov i64 to r64 need to use REX prefix
390
    assert(dest_r64 < 8);
Damien's avatar
Damien committed
391
392
393
394
395
    asm_x64_write_byte_2(as, REX_PREFIX | REX_W, OPCODE_MOV_I64_TO_R64 | dest_r64);
    asm_x64_write_word64(as, src_i64);
}

void asm_x64_mov_i64_to_r64_optimised(asm_x64_t *as, int64_t src_i64, int dest_r64) {
396
    // TODO use movzx, movsx if possible
Damien's avatar
Damien committed
397
398
399
400
401
402
403
404
405
    if (UNSIGNED_FIT32(src_i64)) {
        // 5 bytes
        asm_x64_mov_i32_to_r64(as, src_i64 & 0xffffffff, dest_r64);
    } else {
        // 10 bytes
        asm_x64_mov_i64_to_r64(as, src_i64, dest_r64);
    }
}

406
407
408
409
410
411
412
413
414
// src_i64 is stored as a full word in the code, and aligned to machine-word boundary
void asm_x64_mov_i64_to_r64_aligned(asm_x64_t *as, int64_t src_i64, int dest_r64) {
    // mov instruction uses 2 bytes for the instruction, before the i64
    while (((as->code_offset + 2) & (WORD_SIZE - 1)) != 0) {
        asm_x64_nop(as);
    }
    asm_x64_mov_i64_to_r64(as, src_i64, dest_r64);
}

415
416
417
418
419
420
421
422
void asm_x64_and_r64_r64(asm_x64_t *as, int dest_r64, int src_r64) {
    asm_x64_generic_r64_r64(as, dest_r64, src_r64, OPCODE_AND_R64_TO_RM64);
}

void asm_x64_or_r64_r64(asm_x64_t *as, int dest_r64, int src_r64) {
    asm_x64_generic_r64_r64(as, dest_r64, src_r64, OPCODE_OR_R64_TO_RM64);
}

423
424
void asm_x64_xor_r64_r64(asm_x64_t *as, int dest_r64, int src_r64) {
    asm_x64_generic_r64_r64(as, dest_r64, src_r64, OPCODE_XOR_R64_TO_RM64);
Damien's avatar
Damien committed
425
426
}

427
428
void asm_x64_shl_r64_cl(asm_x64_t* as, int dest_r64) {
    asm_x64_generic_r64_r64(as, dest_r64, 4, OPCODE_SHL_RM64_CL);
Damien's avatar
Damien committed
429
430
}

431
432
void asm_x64_sar_r64_cl(asm_x64_t* as, int dest_r64) {
    asm_x64_generic_r64_r64(as, dest_r64, 7, OPCODE_SAR_RM64_CL);
Damien's avatar
Damien committed
433
434
}

435
436
437
438
439
440
void asm_x64_add_r64_r64(asm_x64_t *as, int dest_r64, int src_r64) {
    asm_x64_generic_r64_r64(as, dest_r64, src_r64, OPCODE_ADD_R64_TO_RM64);
}

void asm_x64_sub_r64_r64(asm_x64_t *as, int dest_r64, int src_r64) {
    asm_x64_generic_r64_r64(as, dest_r64, src_r64, OPCODE_SUB_R64_FROM_RM64);
Damien's avatar
Damien committed
441
442
}

443
/*
444
void asm_x64_sub_i32_from_r32(asm_x64_t *as, int src_i32, int dest_r32) {
Damien's avatar
Damien committed
445
446
447
448
449
450
451
452
453
454
    if (SIGNED_FIT8(src_i32)) {
        // defaults to 32 bit operation
        asm_x64_write_byte_2(as, OPCODE_SUB_I8_FROM_RM64, MODRM_R64(5) | MODRM_RM_REG | MODRM_RM_R64(dest_r32));
        asm_x64_write_byte_1(as, src_i32 & 0xff);
    } else {
        // defaults to 32 bit operation
        asm_x64_write_byte_2(as, OPCODE_SUB_I32_FROM_RM64, MODRM_R64(5) | MODRM_RM_REG | MODRM_RM_R64(dest_r32));
        asm_x64_write_word32(as, src_i32);
    }
}
455
*/
Damien's avatar
Damien committed
456

457
STATIC void asm_x64_sub_r64_i32(asm_x64_t *as, int dest_r64, int src_i32) {
458
    assert(dest_r64 < 8);
Damien's avatar
Damien committed
459
460
461
462
463
464
465
466
467
468
469
    if (SIGNED_FIT8(src_i32)) {
        // use REX prefix for 64 bit operation
        asm_x64_write_byte_3(as, REX_PREFIX | REX_W, OPCODE_SUB_I8_FROM_RM64, MODRM_R64(5) | MODRM_RM_REG | MODRM_RM_R64(dest_r64));
        asm_x64_write_byte_1(as, src_i32 & 0xff);
    } else {
        // use REX prefix for 64 bit operation
        asm_x64_write_byte_3(as, REX_PREFIX | REX_W, OPCODE_SUB_I32_FROM_RM64, MODRM_R64(5) | MODRM_RM_REG | MODRM_RM_R64(dest_r64));
        asm_x64_write_word32(as, src_i32);
    }
}

470
/*
471
void asm_x64_shl_r32_by_imm(asm_x64_t *as, int r32, int imm) {
Damien's avatar
Damien committed
472
473
474
475
    asm_x64_write_byte_2(as, OPCODE_SHL_RM32_BY_I8, MODRM_R64(4) | MODRM_RM_REG | MODRM_RM_R64(r32));
    asm_x64_write_byte_1(as, imm);
}

476
void asm_x64_shr_r32_by_imm(asm_x64_t *as, int r32, int imm) {
Damien's avatar
Damien committed
477
478
479
480
    asm_x64_write_byte_2(as, OPCODE_SHR_RM32_BY_I8, MODRM_R64(5) | MODRM_RM_REG | MODRM_RM_R64(r32));
    asm_x64_write_byte_1(as, imm);
}

481
void asm_x64_sar_r32_by_imm(asm_x64_t *as, int r32, int imm) {
Damien's avatar
Damien committed
482
483
484
    asm_x64_write_byte_2(as, OPCODE_SAR_RM32_BY_I8, MODRM_R64(7) | MODRM_RM_REG | MODRM_RM_R64(r32));
    asm_x64_write_byte_1(as, imm);
}
485
*/
Damien's avatar
Damien committed
486

487
void asm_x64_cmp_r64_with_r64(asm_x64_t *as, int src_r64_a, int src_r64_b) {
488
    asm_x64_generic_r64_r64(as, src_r64_b, src_r64_a, OPCODE_CMP_R64_WITH_RM64);
Damien's avatar
Damien committed
489
490
}

491
/*
492
void asm_x64_cmp_i32_with_r32(asm_x64_t *as, int src_i32, int src_r32) {
Damien's avatar
Damien committed
493
494
495
496
497
498
499
500
    if (SIGNED_FIT8(src_i32)) {
        asm_x64_write_byte_2(as, OPCODE_CMP_I8_WITH_RM32, MODRM_R64(7) | MODRM_RM_REG | MODRM_RM_R64(src_r32));
        asm_x64_write_byte_1(as, src_i32 & 0xff);
    } else {
        asm_x64_write_byte_2(as, OPCODE_CMP_I32_WITH_RM32, MODRM_R64(7) | MODRM_RM_REG | MODRM_RM_R64(src_r32));
        asm_x64_write_word32(as, src_i32);
    }
}
501
*/
Damien's avatar
Damien committed
502

503
void asm_x64_test_r8_with_r8(asm_x64_t *as, int src_r64_a, int src_r64_b) {
Damien's avatar
Damien committed
504
    // TODO implement for other registers
505
506
    assert(src_r64_a == ASM_X64_REG_RAX);
    assert(src_r64_b == ASM_X64_REG_RAX);
Damien's avatar
Damien committed
507
508
509
    asm_x64_write_byte_2(as, OPCODE_TEST_R8_WITH_RM8, MODRM_R64(src_r64_a) | MODRM_RM_REG | MODRM_RM_R64(src_r64_b));
}

510
void asm_x64_setcc_r8(asm_x64_t *as, int jcc_type, int dest_r8) {
511
    assert(dest_r8 < 8);
Damien's avatar
Damien committed
512
513
514
    asm_x64_write_byte_3(as, OPCODE_SETCC_RM8_A, OPCODE_SETCC_RM8_B | jcc_type, MODRM_R64(0) | MODRM_RM_REG | MODRM_RM_R64(dest_r8));
}

515
void asm_x64_label_assign(asm_x64_t *as, int label) {
516
    assert(label < as->max_num_labels);
517
    if (as->pass < ASM_X64_PASS_EMIT) {
518
519
520
        // assign label offset
        assert(as->label_offsets[label] == -1);
        as->label_offsets[label] = as->code_offset;
521
522
    } else {
        // ensure label offset has not changed from PASS_COMPUTE to PASS_EMIT
523
        //printf("l%d: (at %ld=%ld)\n", label, as->label_offsets[label], as->code_offset);
524
        assert(as->label_offsets[label] == as->code_offset);
Damien's avatar
Damien committed
525
526
527
    }
}

528
STATIC mp_uint_t get_label_dest(asm_x64_t *as, int label) {
529
530
531
532
533
    assert(label < as->max_num_labels);
    return as->label_offsets[label];
}

void asm_x64_jmp_label(asm_x64_t *as, int label) {
534
535
536
    mp_uint_t dest = get_label_dest(as, label);
    mp_int_t rel = dest - as->code_offset;
    if (dest != -1 && rel < 0) {
537
538
539
540
541
        // is a backwards jump, so we know the size of the jump on the first pass
        // calculate rel assuming 8 bit relative jump
        rel -= 2;
        if (SIGNED_FIT8(rel)) {
            asm_x64_write_byte_2(as, OPCODE_JMP_REL8, rel & 0xff);
Damien's avatar
Damien committed
542
        } else {
543
544
            rel += 2;
            goto large_jump;
Damien's avatar
Damien committed
545
        }
546
547
548
549
550
551
    } else {
        // is a forwards jump, so need to assume it's large
        large_jump:
        rel -= 5;
        asm_x64_write_byte_1(as, OPCODE_JMP_REL32);
        asm_x64_write_word32(as, rel);
Damien's avatar
Damien committed
552
553
554
    }
}

555
void asm_x64_jcc_label(asm_x64_t *as, int jcc_type, int label) {
556
557
558
    mp_uint_t dest = get_label_dest(as, label);
    mp_int_t rel = dest - as->code_offset;
    if (dest != -1 && rel < 0) {
559
560
561
562
563
        // is a backwards jump, so we know the size of the jump on the first pass
        // calculate rel assuming 8 bit relative jump
        rel -= 2;
        if (SIGNED_FIT8(rel)) {
            asm_x64_write_byte_2(as, OPCODE_JCC_REL8 | jcc_type, rel & 0xff);
Damien's avatar
Damien committed
564
        } else {
565
566
            rel += 2;
            goto large_jump;
Damien's avatar
Damien committed
567
        }
568
569
570
571
572
573
    } else {
        // is a forwards jump, so need to assume it's large
        large_jump:
        rel -= 6;
        asm_x64_write_byte_2(as, OPCODE_JCC_REL32_A, OPCODE_JCC_REL32_B | jcc_type);
        asm_x64_write_word32(as, rel);
Damien's avatar
Damien committed
574
575
576
    }
}

577
void asm_x64_entry(asm_x64_t *as, int num_locals) {
578
    asm_x64_push_r64(as, ASM_X64_REG_RBP);
579
    asm_x64_mov_r64_r64(as, ASM_X64_REG_RBP, ASM_X64_REG_RSP);
Damien's avatar
Damien committed
580
581
582
583
    if (num_locals < 0) {
        num_locals = 0;
    }
    num_locals |= 1; // make it odd so stack is aligned on 16 byte boundary
584
    asm_x64_sub_r64_i32(as, ASM_X64_REG_RSP, num_locals * WORD_SIZE);
585
586
587
    asm_x64_push_r64(as, ASM_X64_REG_RBX);
    asm_x64_push_r64(as, ASM_X64_REG_R12);
    asm_x64_push_r64(as, ASM_X64_REG_R13);
588
    as->num_locals = num_locals;
Damien's avatar
Damien committed
589
590
}

591
void asm_x64_exit(asm_x64_t *as) {
592
593
594
    asm_x64_pop_r64(as, ASM_X64_REG_R13);
    asm_x64_pop_r64(as, ASM_X64_REG_R12);
    asm_x64_pop_r64(as, ASM_X64_REG_RBX);
Damien's avatar
Damien committed
595
596
597
598
    asm_x64_write_byte_1(as, OPCODE_LEAVE);
    asm_x64_ret(as);
}

599
600
601
602
603
604
605
606
607
608
609
// locals:
//  - stored on the stack in ascending order
//  - numbered 0 through as->num_locals-1
//  - RBP points above the last local
//
//                          | RPB
//                          v
//  l0  l1  l2  ...  l(n-1)
//  ^                ^
//  | low address    | high address in RAM
//
610
STATIC int asm_x64_local_offset_from_ebp(asm_x64_t *as, int local_num) {
611
    return (-as->num_locals + local_num) * WORD_SIZE;
Damien's avatar
Damien committed
612
613
}

614
void asm_x64_mov_local_to_r64(asm_x64_t *as, int src_local_num, int dest_r64) {
615
    asm_x64_mov_mem64_to_r64(as, ASM_X64_REG_RBP, asm_x64_local_offset_from_ebp(as, src_local_num), dest_r64);
Damien's avatar
Damien committed
616
617
}

618
void asm_x64_mov_r64_to_local(asm_x64_t *as, int src_r64, int dest_local_num) {
619
    asm_x64_mov_r64_to_mem64(as, src_r64, ASM_X64_REG_RBP, asm_x64_local_offset_from_ebp(as, dest_local_num));
Damien's avatar
Damien committed
620
621
}

622
623
void asm_x64_mov_local_addr_to_r64(asm_x64_t *as, int local_num, int dest_r64) {
    int offset = asm_x64_local_offset_from_ebp(as, local_num);
Damien's avatar
Damien committed
624
    if (offset == 0) {
625
        asm_x64_mov_r64_r64(as, dest_r64, ASM_X64_REG_RBP);
Damien's avatar
Damien committed
626
    } else {
627
        asm_x64_lea_disp_to_r64(as, ASM_X64_REG_RBP, offset, dest_r64);
Damien's avatar
Damien committed
628
629
630
    }
}

631
/*
632
void asm_x64_push_local(asm_x64_t *as, int local_num) {
633
    asm_x64_push_disp(as, ASM_X64_REG_RBP, asm_x64_local_offset_from_ebp(as, local_num));
Damien's avatar
Damien committed
634
635
}

636
void asm_x64_push_local_addr(asm_x64_t *as, int local_num, int temp_r64)
Damien's avatar
Damien committed
637
{
638
    asm_x64_mov_r64_r64(as, temp_r64, ASM_X64_REG_RBP);
639
    asm_x64_add_i32_to_r32(as, asm_x64_local_offset_from_ebp(as, local_num), temp_r64);
Damien's avatar
Damien committed
640
641
    asm_x64_push_r64(as, temp_r64);
}
642
*/
Damien's avatar
Damien committed
643
644
645
646

/*
   can't use these because code might be relocated when resized

647
void asm_x64_call(asm_x64_t *as, void* func)
Damien's avatar
Damien committed
648
{
649
    asm_x64_sub_i32_from_r32(as, 8, ASM_X64_REG_RSP);
Damien's avatar
Damien committed
650
651
    asm_x64_write_byte_1(as, OPCODE_CALL_REL32);
    asm_x64_write_word32(as, func - (void*)(as->code_cur + 4));
652
    asm_x64_mov_r64_r64(as, ASM_X64_REG_RSP, ASM_X64_REG_RBP);
Damien's avatar
Damien committed
653
654
}

655
void asm_x64_call_i1(asm_x64_t *as, void* func, int i1)
Damien's avatar
Damien committed
656
{
657
658
    asm_x64_sub_i32_from_r32(as, 8, ASM_X64_REG_RSP);
    asm_x64_sub_i32_from_r32(as, 12, ASM_X64_REG_RSP);
Damien's avatar
Damien committed
659
660
661
    asm_x64_push_i32(as, i1);
    asm_x64_write_byte_1(as, OPCODE_CALL_REL32);
    asm_x64_write_word32(as, func - (void*)(as->code_cur + 4));
662
    asm_x64_add_i32_to_r32(as, 16, ASM_X64_REG_RSP);
663
    asm_x64_mov_r64_r64(as, ASM_X64_REG_RSP, ASM_X64_REG_RBP);
Damien's avatar
Damien committed
664
665
666
}
*/

667
void asm_x64_call_ind(asm_x64_t *as, void *ptr, int temp_r64) {
668
    assert(temp_r64 < 8);
669
670
671
672
673
674
#ifdef __LP64__
    asm_x64_mov_i64_to_r64_optimised(as, (int64_t)ptr, temp_r64);
#else
    // If we get here, sizeof(int) == sizeof(void*).
    asm_x64_mov_i64_to_r64_optimised(as, (int64_t)(unsigned int)ptr, temp_r64);
#endif
Damien's avatar
Damien committed
675
676
    asm_x64_write_byte_2(as, OPCODE_CALL_RM32, MODRM_R64(2) | MODRM_RM_REG | MODRM_RM_R64(temp_r64));
    // this reduces code size by 2 bytes per call, but doesn't seem to speed it up at all
677
678
    // doesn't work anymore because calls are 64 bits away
    /*
Damien's avatar
Damien committed
679
680
    asm_x64_write_byte_1(as, OPCODE_CALL_REL32);
    asm_x64_write_word32(as, ptr - (void*)(as->code_base + as->code_offset + 4));
681
    */
Damien's avatar
Damien committed
682
}
683
684

#endif // MICROPY_EMIT_X64