runtime.c 42 KB
Newer Older
1
// in principle, rt_xxx functions are called only by vm/native/viper and make assumptions about args
2
// mp_xxx functions are safer and can be called by anyone
3
// note that rt_assign_xxx are called only from emit*, and maybe we can rename them to reflect this
4

Damien's avatar
Damien committed
5
6
7
8
#include <stdio.h>
#include <string.h>
#include <assert.h>

9
#include "nlr.h"
Damien's avatar
Damien committed
10
#include "misc.h"
11
#include "mpconfig.h"
12
#include "qstr.h"
13
#include "obj.h"
14
#include "parsenum.h"
15
#include "runtime0.h"
Damien's avatar
Damien committed
16
#include "runtime.h"
17
18
#include "map.h"
#include "builtin.h"
19
#include "objarray.h"
20
#include "bc.h"
21

22
#if 0 // print debugging info
23
#define DEBUG_PRINT (1)
24
#define WRITE_CODE (1)
25
#define DEBUG_printf DEBUG_printf
26
#define DEBUG_OP_printf(...) DEBUG_printf(__VA_ARGS__)
27
#else // don't print debugging info
28
29
#define DEBUG_printf(...) (void)0
#define DEBUG_OP_printf(...) (void)0
30
#endif
Damien's avatar
Damien committed
31

32
// locals and globals need to be pointers because they can be the same in outer module scope
33
34
35
36
STATIC mp_map_t *map_locals;
STATIC mp_map_t *map_globals;
STATIC mp_map_t map_builtins;
STATIC mp_map_t map_loaded_modules; // TODO: expose as sys.modules
37

Damien's avatar
Damien committed
38
typedef enum {
39
40
41
42
43
44
45
    MP_CODE_NONE,
    MP_CODE_BYTE,
    MP_CODE_NATIVE,
    MP_CODE_INLINE_ASM,
} mp_code_kind_t;

typedef struct _mp_code_t {
46
47
48
49
    mp_code_kind_t kind : 8;
    uint scope_flags : 8;
    uint n_args : 16;
    uint n_state : 16;
Damien's avatar
Damien committed
50
51
52
53
54
    union {
        struct {
            byte *code;
            uint len;
        } u_byte;
55
        struct {
56
            mp_fun_t fun;
57
58
        } u_native;
        struct {
59
            void *fun;
60
        } u_inline_asm;
Damien's avatar
Damien committed
61
    };
62
    qstr *arg_names;
63
} mp_code_t;
Damien's avatar
Damien committed
64

65
66
67
STATIC uint next_unique_code_id;
STATIC machine_uint_t unique_codes_alloc = 0;
STATIC mp_code_t *unique_codes = NULL;
Damien's avatar
Damien committed
68

69
70
#ifdef WRITE_CODE
FILE *fp_write_code = NULL;
71
#endif
Damien's avatar
Damien committed
72

73
74
75
76
77
78
79
80
81
82
83
// builtins
// we put this table in ROM because it's always needed and takes up quite a bit of room in RAM
// in fact, it uses less ROM here in table form than the equivalent in code form initialising a dynamic mp_map_t object in RAM
// at the moment it's a linear table, but we could convert it to a const mp_map_t table with a simple preprocessing script
// if we wanted to allow dynamic modification of the builtins, we could provide an mp_map_t object which is searched before this one

typedef struct _mp_builtin_elem_t {
    qstr qstr;
    mp_obj_t fun;
} mp_builtin_elem_t;

84
STATIC const mp_builtin_elem_t builtin_table[] = {
85
86
87
88
89
90
91
92
    // built-in core functions
    { MP_QSTR___build_class__, (mp_obj_t)&mp_builtin___build_class___obj },
    { MP_QSTR___import__, (mp_obj_t)&mp_builtin___import___obj },
    { MP_QSTR___repl_print__, (mp_obj_t)&mp_builtin___repl_print___obj },

    // built-in types
    { MP_QSTR_bool, (mp_obj_t)&bool_type },
#if MICROPY_ENABLE_FLOAT
93
    { MP_QSTR_complex, (mp_obj_t)&mp_type_complex },
94
95
96
97
98
#endif
    { MP_QSTR_dict, (mp_obj_t)&dict_type },
    { MP_QSTR_enumerate, (mp_obj_t)&enumerate_type },
    { MP_QSTR_filter, (mp_obj_t)&filter_type },
#if MICROPY_ENABLE_FLOAT
99
    { MP_QSTR_float, (mp_obj_t)&mp_type_float },
100
101
102
103
104
105
106
#endif
    { MP_QSTR_int, (mp_obj_t)&int_type },
    { MP_QSTR_list, (mp_obj_t)&list_type },
    { MP_QSTR_map, (mp_obj_t)&map_type },
    { MP_QSTR_set, (mp_obj_t)&set_type },
    { MP_QSTR_super, (mp_obj_t)&super_type },
    { MP_QSTR_tuple, (mp_obj_t)&tuple_type },
107
    { MP_QSTR_type, (mp_obj_t)&mp_type_type },
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
    { MP_QSTR_zip, (mp_obj_t)&zip_type },

    { MP_QSTR_classmethod, (mp_obj_t)&mp_type_classmethod },
    { MP_QSTR_staticmethod, (mp_obj_t)&mp_type_staticmethod },

    // built-in user functions
    { MP_QSTR_abs, (mp_obj_t)&mp_builtin_abs_obj },
    { MP_QSTR_all, (mp_obj_t)&mp_builtin_all_obj },
    { MP_QSTR_any, (mp_obj_t)&mp_builtin_any_obj },
    { MP_QSTR_bytes, (mp_obj_t)&mp_builtin_bytes_obj },
    { MP_QSTR_callable, (mp_obj_t)&mp_builtin_callable_obj },
    { MP_QSTR_chr, (mp_obj_t)&mp_builtin_chr_obj },
    { MP_QSTR_dir, (mp_obj_t)&mp_builtin_dir_obj },
    { MP_QSTR_divmod, (mp_obj_t)&mp_builtin_divmod_obj },
    { MP_QSTR_eval, (mp_obj_t)&mp_builtin_eval_obj },
    { MP_QSTR_exec, (mp_obj_t)&mp_builtin_exec_obj },
    { MP_QSTR_hash, (mp_obj_t)&mp_builtin_hash_obj },
    { MP_QSTR_id, (mp_obj_t)&mp_builtin_id_obj },
    { MP_QSTR_isinstance, (mp_obj_t)&mp_builtin_isinstance_obj },
    { MP_QSTR_issubclass, (mp_obj_t)&mp_builtin_issubclass_obj },
    { MP_QSTR_iter, (mp_obj_t)&mp_builtin_iter_obj },
    { MP_QSTR_len, (mp_obj_t)&mp_builtin_len_obj },
    { MP_QSTR_max, (mp_obj_t)&mp_builtin_max_obj },
    { MP_QSTR_min, (mp_obj_t)&mp_builtin_min_obj },
    { MP_QSTR_next, (mp_obj_t)&mp_builtin_next_obj },
    { MP_QSTR_ord, (mp_obj_t)&mp_builtin_ord_obj },
    { MP_QSTR_pow, (mp_obj_t)&mp_builtin_pow_obj },
    { MP_QSTR_print, (mp_obj_t)&mp_builtin_print_obj },
    { MP_QSTR_range, (mp_obj_t)&mp_builtin_range_obj },
    { MP_QSTR_repr, (mp_obj_t)&mp_builtin_repr_obj },
    { MP_QSTR_sorted, (mp_obj_t)&mp_builtin_sorted_obj },
    { MP_QSTR_sum, (mp_obj_t)&mp_builtin_sum_obj },
    { MP_QSTR_str, (mp_obj_t)&mp_builtin_str_obj },
    { MP_QSTR_bytearray, (mp_obj_t)&mp_builtin_bytearray_obj },

143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
    // built-in exceptions
    { MP_QSTR_BaseException, (mp_obj_t)&mp_type_BaseException },
    { MP_QSTR_AssertionError, (mp_obj_t)&mp_type_AssertionError },
    { MP_QSTR_AttributeError, (mp_obj_t)&mp_type_AttributeError },
    { MP_QSTR_ImportError, (mp_obj_t)&mp_type_ImportError },
    { MP_QSTR_IndentationError, (mp_obj_t)&mp_type_IndentationError },
    { MP_QSTR_IndexError, (mp_obj_t)&mp_type_IndexError },
    { MP_QSTR_KeyError, (mp_obj_t)&mp_type_KeyError },
    { MP_QSTR_NameError, (mp_obj_t)&mp_type_NameError },
    { MP_QSTR_SyntaxError, (mp_obj_t)&mp_type_SyntaxError },
    { MP_QSTR_TypeError, (mp_obj_t)&mp_type_TypeError },
    { MP_QSTR_ValueError, (mp_obj_t)&mp_type_ValueError },
    // Somehow CPython managed to have OverflowError not inherit from ValueError ;-/
    // TODO: For MICROPY_CPYTHON_COMPAT==0 use ValueError to avoid exc proliferation
    { MP_QSTR_OverflowError, (mp_obj_t)&mp_type_OverflowError },
    { MP_QSTR_OSError, (mp_obj_t)&mp_type_OSError },
    { MP_QSTR_NotImplementedError, (mp_obj_t)&mp_type_NotImplementedError },
    { MP_QSTR_StopIteration, (mp_obj_t)&mp_type_StopIteration },

162
163
164
    // Extra builtins as defined by a port
    MICROPY_EXTRA_BUILTINS

165
166
167
    { MP_QSTR_, MP_OBJ_NULL }, // end of list sentinel
};

168
// a good optimising compiler will inline this if necessary
169
STATIC void mp_map_add_qstr(mp_map_t *map, qstr qstr, mp_obj_t value) {
170
171
172
    mp_map_lookup(map, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP_ADD_IF_NOT_FOUND)->value = value;
}

173
void rt_init(void) {
174
    // locals = globals for outer module (see Objects/frameobject.c/PyFrame_New())
175
    map_locals = map_globals = mp_map_new(1);
176
    mp_map_add_qstr(map_globals, MP_QSTR___name__, MP_OBJ_NEW_QSTR(MP_QSTR___main__));
177

178
    // init built-in hash table
179
    mp_map_init(&map_builtins, 3);
180

181
182
183
    // init loaded modules table
    mp_map_init(&map_loaded_modules, 3);

Damien George's avatar
Damien George committed
184
    // built-in objects
185
    mp_map_add_qstr(&map_builtins, MP_QSTR_Ellipsis, mp_const_ellipsis);
Damien George's avatar
Damien George committed
186

187
188
189
    mp_obj_t m_array = mp_obj_new_module(MP_QSTR_array);
    rt_store_attr(m_array, MP_QSTR_array, (mp_obj_t)&array_type);

190
191
192
    mp_obj_t m_collections = mp_obj_new_module(MP_QSTR_collections);
    rt_store_attr(m_collections, MP_QSTR_namedtuple, (mp_obj_t)&mp_namedtuple_obj);

193
#if MICROPY_CPYTHON_COMPAT
194
    // Precreate sys module, so "import sys" didn't throw exceptions.
195
196
197
    mp_obj_t m_sys = mp_obj_new_module(MP_QSTR_sys);
    // Avoid warning of unused var
    (void)m_sys;
198
#endif
199
200
201
202
    // init sys.path
    // for efficiency, left to platform-specific startup code
    //sys_path = mp_obj_new_list(0, NULL);
    //rt_store_attr(m_sys, MP_QSTR_path, sys_path);
203

204
205
206
    // we pre-import the micropython module
    // probably shouldn't do this, so we are compatible with CPython
    rt_store_name(MP_QSTR_micropython, (mp_obj_t)&mp_module_micropython);
207

208
    // TODO: wastes one mp_code_t structure in mem
209
    next_unique_code_id = 1; // 0 indicates "no code"
210
    unique_codes_alloc = 0;
Damien's avatar
Damien committed
211
212
    unique_codes = NULL;

213
214
#ifdef WRITE_CODE
    fp_write_code = fopen("out-code", "wb");
215
#endif
Damien's avatar
Damien committed
216
217
}

218
void rt_deinit(void) {
219
    m_del(mp_code_t, unique_codes, unique_codes_alloc);
220
221
222
    mp_map_free(map_globals);
    mp_map_deinit(&map_loaded_modules);
    mp_map_deinit(&map_builtins);
223
224
225
#ifdef WRITE_CODE
    if (fp_write_code != NULL) {
        fclose(fp_write_code);
Damien's avatar
Damien committed
226
    }
227
#endif
Damien's avatar
Damien committed
228
229
}

230
uint rt_get_unique_code_id(void) {
231
    return next_unique_code_id++;
Damien's avatar
Damien committed
232
233
}

234
STATIC void alloc_unique_codes(void) {
235
    if (next_unique_code_id > unique_codes_alloc) {
236
        DEBUG_printf("allocate more unique codes: " UINT_FMT " -> %u\n", unique_codes_alloc, next_unique_code_id);
237
238
        // increase size of unique_codes table
        unique_codes = m_renew(mp_code_t, unique_codes, unique_codes_alloc, next_unique_code_id);
239
        for (uint i = unique_codes_alloc; i < next_unique_code_id; i++) {
240
            unique_codes[i].kind = MP_CODE_NONE;
241
        }
242
        unique_codes_alloc = next_unique_code_id;
Damien's avatar
Damien committed
243
    }
244
245
}

246
void rt_assign_byte_code(uint unique_code_id, byte *code, uint len, int n_args, int n_locals, int n_stack, uint scope_flags, qstr *arg_names) {
247
248
    alloc_unique_codes();

249
    assert(1 <= unique_code_id && unique_code_id < next_unique_code_id && unique_codes[unique_code_id].kind == MP_CODE_NONE);
250
    unique_codes[unique_code_id].kind = MP_CODE_BYTE;
251
    unique_codes[unique_code_id].scope_flags = scope_flags;
252
253
    unique_codes[unique_code_id].n_args = n_args;
    unique_codes[unique_code_id].n_state = n_locals + n_stack;
254
255
    unique_codes[unique_code_id].u_byte.code = code;
    unique_codes[unique_code_id].u_byte.len = len;
256
    unique_codes[unique_code_id].arg_names = arg_names;
257

Damien's avatar
Damien committed
258
    //printf("byte code: %d bytes\n", len);
259
260

#ifdef DEBUG_PRINT
261
    DEBUG_printf("assign byte code: id=%d code=%p len=%u n_args=%d n_locals=%d n_stack=%d\n", unique_code_id, code, len, n_args, n_locals, n_stack);
262
263
264
265
266
267
268
    for (int i = 0; i < 128 && i < len; i++) {
        if (i > 0 && i % 16 == 0) {
            DEBUG_printf("\n");
        }
        DEBUG_printf(" %02x", code[i]);
    }
    DEBUG_printf("\n");
269
270
#if MICROPY_DEBUG_PRINTERS
    mp_byte_code_print(code, len);
271
#endif
272
#endif
273
274
}

275
void rt_assign_native_code(uint unique_code_id, void *fun, uint len, int n_args) {
276
277
    alloc_unique_codes();

278
    assert(1 <= unique_code_id && unique_code_id < next_unique_code_id && unique_codes[unique_code_id].kind == MP_CODE_NONE);
279
    unique_codes[unique_code_id].kind = MP_CODE_NATIVE;
280
    unique_codes[unique_code_id].scope_flags = 0;
281
282
    unique_codes[unique_code_id].n_args = n_args;
    unique_codes[unique_code_id].n_state = 0;
Damien's avatar
Damien committed
283
284
    unique_codes[unique_code_id].u_native.fun = fun;

285
    //printf("native code: %d bytes\n", len);
286

287
#ifdef DEBUG_PRINT
Damien's avatar
Damien committed
288
289
290
291
292
293
294
295
296
297
    DEBUG_printf("assign native code: id=%d fun=%p len=%u n_args=%d\n", unique_code_id, fun, len, n_args);
    byte *fun_data = (byte*)(((machine_uint_t)fun) & (~1)); // need to clear lower bit in case it's thumb code
    for (int i = 0; i < 128 && i < len; i++) {
        if (i > 0 && i % 16 == 0) {
            DEBUG_printf("\n");
        }
        DEBUG_printf(" %02x", fun_data[i]);
    }
    DEBUG_printf("\n");

298
299
300
301
#ifdef WRITE_CODE
    if (fp_write_code != NULL) {
        fwrite(fun_data, len, 1, fp_write_code);
        fflush(fp_write_code);
Damien's avatar
Damien committed
302
    }
303
304
#endif
#endif
Damien's avatar
Damien committed
305
306
}

307
void rt_assign_inline_asm_code(uint unique_code_id, void *fun, uint len, int n_args) {
308
309
    alloc_unique_codes();

310
    assert(1 <= unique_code_id && unique_code_id < next_unique_code_id && unique_codes[unique_code_id].kind == MP_CODE_NONE);
311
    unique_codes[unique_code_id].kind = MP_CODE_INLINE_ASM;
312
    unique_codes[unique_code_id].scope_flags = 0;
313
314
    unique_codes[unique_code_id].n_args = n_args;
    unique_codes[unique_code_id].n_state = 0;
315
    unique_codes[unique_code_id].u_inline_asm.fun = fun;
Damien's avatar
Damien committed
316

317
#ifdef DEBUG_PRINT
318
319
320
321
322
323
324
325
326
327
    DEBUG_printf("assign inline asm code: id=%d fun=%p len=%u n_args=%d\n", unique_code_id, fun, len, n_args);
    byte *fun_data = (byte*)(((machine_uint_t)fun) & (~1)); // need to clear lower bit in case it's thumb code
    for (int i = 0; i < 128 && i < len; i++) {
        if (i > 0 && i % 16 == 0) {
            DEBUG_printf("\n");
        }
        DEBUG_printf(" %02x", fun_data[i]);
    }
    DEBUG_printf("\n");

328
329
330
#ifdef WRITE_CODE
    if (fp_write_code != NULL) {
        fwrite(fun_data, len, 1, fp_write_code);
331
    }
332
333
#endif
#endif
Damien's avatar
Damien committed
334
335
}

336
337
int rt_is_true(mp_obj_t arg) {
    DEBUG_OP_printf("is true %p\n", arg);
338
339
340
341
342
343
344
    if (arg == mp_const_false) {
        return 0;
    } else if (arg == mp_const_true) {
        return 1;
    } else if (arg == mp_const_none) {
        return 0;
    } else if (MP_OBJ_IS_SMALL_INT(arg)) {
345
346
347
348
349
350
        if (MP_OBJ_SMALL_INT_VALUE(arg) == 0) {
            return 0;
        } else {
            return 1;
        }
    } else {
351
352
353
        mp_obj_type_t *type = mp_obj_get_type(arg);
        if (type->unary_op != NULL) {
            mp_obj_t result = type->unary_op(RT_UNARY_OP_BOOL, arg);
354
            if (result != MP_OBJ_NULL) {
355
356
357
358
                return result == mp_const_true;
            }
        }

359
360
361
362
363
        mp_obj_t len = mp_obj_len_maybe(arg);
        if (len != MP_OBJ_NULL) {
            // obj has a length, truth determined if len != 0
            return len != MP_OBJ_NEW_SMALL_INT(0);
        } else {
364
            // any other obj is true per Python semantics
365
366
            return 1;
        }
367
368
369
370
371
372
373
374
    }
}

mp_obj_t rt_list_append(mp_obj_t self_in, mp_obj_t arg) {
    return mp_obj_list_append(self_in, arg);
}

mp_obj_t rt_load_const_dec(qstr qstr) {
Damien's avatar
Damien committed
375
    DEBUG_OP_printf("load '%s'\n", qstr_str(qstr));
376
377
    uint len;
    const byte* data = qstr_data(qstr, &len);
378
    return mp_parse_num_decimal((const char*)data, len, true);
Damien's avatar
Damien committed
379
380
}

381
mp_obj_t rt_load_const_str(qstr qstr) {
Damien's avatar
Damien committed
382
    DEBUG_OP_printf("load '%s'\n", qstr_str(qstr));
383
    return MP_OBJ_NEW_QSTR(qstr);
Damien's avatar
Damien committed
384
385
}

386
387
388
389
390
391
392
mp_obj_t rt_load_const_bytes(qstr qstr) {
    DEBUG_OP_printf("load b'%s'\n", qstr_str(qstr));
    uint len;
    const byte *data = qstr_data(qstr, &len);
    return mp_obj_new_bytes(data, len);
}

393
mp_obj_t rt_load_name(qstr qstr) {
Damien's avatar
Damien committed
394
    // logic: search locals, globals, builtins
395
    DEBUG_OP_printf("load name %s\n", qstr_str(qstr));
396
    mp_map_elem_t *elem = mp_map_lookup(map_locals, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP);
397
398
399
400
    if (elem != NULL) {
        return elem->value;
    } else {
        return rt_load_global(qstr);
Damien's avatar
Damien committed
401
402
403
    }
}

404
mp_obj_t rt_load_global(qstr qstr) {
405
406
    // logic: search globals, builtins
    DEBUG_OP_printf("load global %s\n", qstr_str(qstr));
407
    mp_map_elem_t *elem = mp_map_lookup(map_globals, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP);
408
    if (elem == NULL) {
409
        elem = mp_map_lookup(&map_builtins, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP);
410
        if (elem == NULL) {
411
412
413
414
415
            for (const mp_builtin_elem_t *e = &builtin_table[0]; e->qstr != MP_QSTR_; e++) {
                if (e->qstr == qstr) {
                    return e->fun;
                }
            }
416
            nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_NameError, "name '%s' is not defined", qstr_str(qstr)));
417
418
419
        }
    }
    return elem->value;
Damien's avatar
Damien committed
420
421
}

422
mp_obj_t rt_load_build_class(void) {
Damien's avatar
Damien committed
423
    DEBUG_OP_printf("load_build_class\n");
424
    mp_map_elem_t *elem = mp_map_lookup(&map_builtins, MP_OBJ_NEW_QSTR(MP_QSTR___build_class__), MP_MAP_LOOKUP);
425
426
427
428
    if (elem != NULL) {
        return elem->value;
    } else {
        return (mp_obj_t)&mp_builtin___build_class___obj;
Damien's avatar
Damien committed
429
430
431
    }
}

432
433
mp_obj_t rt_get_cell(mp_obj_t cell) {
    return mp_obj_cell_get(cell);
434
435
}

436
437
void rt_set_cell(mp_obj_t cell, mp_obj_t val) {
    mp_obj_cell_set(cell, val);
438
439
}

440
void rt_store_name(qstr qstr, mp_obj_t obj) {
441
    DEBUG_OP_printf("store name %s <- %p\n", qstr_str(qstr), obj);
442
    mp_map_lookup(map_locals, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP_ADD_IF_NOT_FOUND)->value = obj;
443
444
}

445
void rt_store_global(qstr qstr, mp_obj_t obj) {
446
    DEBUG_OP_printf("store global %s <- %p\n", qstr_str(qstr), obj);
447
    mp_map_lookup(map_globals, MP_OBJ_NEW_QSTR(qstr), MP_MAP_LOOKUP_ADD_IF_NOT_FOUND)->value = obj;
Damien's avatar
Damien committed
448
449
}

450
mp_obj_t rt_unary_op(int op, mp_obj_t arg) {
Damien's avatar
Damien committed
451
    DEBUG_OP_printf("unary %d %p\n", op, arg);
452

453
454
    if (MP_OBJ_IS_SMALL_INT(arg)) {
        mp_small_int_t val = MP_OBJ_SMALL_INT_VALUE(arg);
Damien's avatar
Damien committed
455
        switch (op) {
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
            case RT_UNARY_OP_BOOL:
                return MP_BOOL(val != 0);
            case RT_UNARY_OP_POSITIVE:
                return arg;
            case RT_UNARY_OP_NEGATIVE:
                // check for overflow
                if (val == MP_SMALL_INT_MIN) {
                    return mp_obj_new_int(-val);
                } else {
                    return MP_OBJ_NEW_SMALL_INT(-val);
                }
            case RT_UNARY_OP_INVERT:
                return MP_OBJ_NEW_SMALL_INT(~val);
            default:
                assert(0);
                return arg;
472
        }
473
474
475
476
    } else {
        mp_obj_type_t *type = mp_obj_get_type(arg);
        if (type->unary_op != NULL) {
            mp_obj_t result = type->unary_op(op, arg);
477
478
479
            if (result != NULL) {
                return result;
            }
Damien's avatar
Damien committed
480
        }
481
        // TODO specify in error message what the operator is
Damien George's avatar
Damien George committed
482
        nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_TypeError, "bad operand type for unary operator: '%s'", mp_obj_get_type_str(arg)));
Damien's avatar
Damien committed
483
    }
Damien's avatar
Damien committed
484
485
}

486
mp_obj_t rt_binary_op(int op, mp_obj_t lhs, mp_obj_t rhs) {
Damien's avatar
Damien committed
487
    DEBUG_OP_printf("binary %d %p %p\n", op, lhs, rhs);
488
489
490
491
492
493
494
495
496
497

    // TODO correctly distinguish inplace operators for mutable objects
    // lookup logic that CPython uses for +=:
    //   check for implemented +=
    //   then check for implemented +
    //   then check for implemented seq.inplace_concat
    //   then check for implemented seq.concat
    //   then fail
    // note that list does not implement + or +=, so that inplace_concat is reached first for +=

498
499
    // deal with is
    if (op == RT_BINARY_OP_IS) {
500
501
502
        return MP_BOOL(lhs == rhs);
    }

503
    // deal with == and != for all types
504
    if (op == RT_BINARY_OP_EQUAL || op == RT_BINARY_OP_NOT_EQUAL) {
505
        if (mp_obj_equal(lhs, rhs)) {
506
            if (op == RT_BINARY_OP_EQUAL) {
507
508
509
510
511
                return mp_const_true;
            } else {
                return mp_const_false;
            }
        } else {
512
            if (op == RT_BINARY_OP_EQUAL) {
513
514
515
516
517
518
519
520
                return mp_const_false;
            } else {
                return mp_const_true;
            }
        }
    }

    // deal with exception_match for all types
521
    if (op == RT_BINARY_OP_EXCEPTION_MATCH) {
522
523
524
525
526
527
        // rhs must be issubclass(rhs, BaseException)
        if (mp_obj_is_exception_type(rhs)) {
            // if lhs is an instance of an exception, then extract and use its type
            if (mp_obj_is_exception_instance(lhs)) {
                lhs = mp_obj_get_type(lhs);
            }
528
            if (mp_obj_is_subclass_fast(lhs, rhs)) {
529
530
531
532
533
534
535
                return mp_const_true;
            } else {
                return mp_const_false;
            }
        }
    }

536
    if (MP_OBJ_IS_SMALL_INT(lhs)) {
537
        mp_small_int_t lhs_val = MP_OBJ_SMALL_INT_VALUE(lhs);
538
539
        if (MP_OBJ_IS_SMALL_INT(rhs)) {
            mp_small_int_t rhs_val = MP_OBJ_SMALL_INT_VALUE(rhs);
540
541
542
543
544
545
546
547
548
            // This is a binary operation: lhs_val op rhs_val
            // We need to be careful to handle overflow; see CERT INT32-C
            // Operations that can overflow:
            //      +       result always fits in machine_int_t, then handled by SMALL_INT check
            //      -       result always fits in machine_int_t, then handled by SMALL_INT check
            //      *       checked explicitly
            //      /       if lhs=MIN and rhs=-1; result always fits in machine_int_t, then handled by SMALL_INT check
            //      %       if lhs=MIN and rhs=-1; result always fits in machine_int_t, then handled by SMALL_INT check
            //      <<      checked explicitly
549
550
551
552
553
554
555
556
            switch (op) {
                case RT_BINARY_OP_OR:
                case RT_BINARY_OP_INPLACE_OR: lhs_val |= rhs_val; break;
                case RT_BINARY_OP_XOR:
                case RT_BINARY_OP_INPLACE_XOR: lhs_val ^= rhs_val; break;
                case RT_BINARY_OP_AND:
                case RT_BINARY_OP_INPLACE_AND: lhs_val &= rhs_val; break;
                case RT_BINARY_OP_LSHIFT:
557
558
559
560
561
562
563
564
565
566
567
568
569
570
                case RT_BINARY_OP_INPLACE_LSHIFT: {
                    if (rhs_val < 0) {
                        // negative shift not allowed
                        nlr_jump(mp_obj_new_exception_msg(&mp_type_ValueError, "negative shift count"));
                    } else if (rhs_val >= BITS_PER_WORD || lhs_val > (MP_SMALL_INT_MAX >> rhs_val) || lhs_val < (MP_SMALL_INT_MIN >> rhs_val)) {
                        // left-shift will overflow, so use higher precision integer
                        lhs = mp_obj_new_int_from_ll(lhs_val);
                        goto generic_binary_op;
                    } else {
                        // use standard precision
                        lhs_val <<= rhs_val;
                    }
                    break;
                }
571
                case RT_BINARY_OP_RSHIFT:
572
573
574
575
576
577
578
579
580
                case RT_BINARY_OP_INPLACE_RSHIFT:
                    if (rhs_val < 0) {
                        // negative shift not allowed
                        nlr_jump(mp_obj_new_exception_msg(&mp_type_ValueError, "negative shift count"));
                    } else {
                        // standard precision is enough for right-shift
                        lhs_val >>= rhs_val;
                    }
                    break;
581
582
583
584
585
                case RT_BINARY_OP_ADD:
                case RT_BINARY_OP_INPLACE_ADD: lhs_val += rhs_val; break;
                case RT_BINARY_OP_SUBTRACT:
                case RT_BINARY_OP_INPLACE_SUBTRACT: lhs_val -= rhs_val; break;
                case RT_BINARY_OP_MULTIPLY:
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
                case RT_BINARY_OP_INPLACE_MULTIPLY: {

                    // If long long type exists and is larger than machine_int_t, then
                    // we can use the following code to perform overflow-checked multiplication.
                    // Otherwise (eg in x64 case) we must use the branching code below.
                    #if 0
                    // compute result using long long precision
                    long long res = (long long)lhs_val * (long long)rhs_val;
                    if (res > MP_SMALL_INT_MAX || res < MP_SMALL_INT_MIN) {
                        // result overflowed SMALL_INT, so return higher precision integer
                        return mp_obj_new_int_from_ll(res);
                    } else {
                        // use standard precision
                        lhs_val = (mp_small_int_t)res;
                    }
                    #endif

                    if (lhs_val > 0) { // lhs_val is positive
                        if (rhs_val > 0) { // lhs_val and rhs_val are positive
                            if (lhs_val > (MP_SMALL_INT_MAX / rhs_val)) {
                                goto mul_overflow;
                            }
                        } else { // lhs_val positive, rhs_val nonpositive
                            if (rhs_val < (MP_SMALL_INT_MIN / lhs_val)) {
                                goto mul_overflow;
                            }
                        } // lhs_val positive, rhs_val nonpositive
                    } else { // lhs_val is nonpositive
                        if (rhs_val > 0) { // lhs_val is nonpositive, rhs_val is positive
                            if (lhs_val < (MP_SMALL_INT_MIN / rhs_val)) {
                                goto mul_overflow;
                            }
                        } else { // lhs_val and rhs_val are nonpositive
                            if (lhs_val != 0 && rhs_val < (MP_SMALL_INT_MAX / lhs_val)) {
                                goto mul_overflow;
                            }
                        } // End if lhs_val and rhs_val are nonpositive
                    } // End if lhs_val is nonpositive

                    // use standard precision
                    return MP_OBJ_NEW_SMALL_INT(lhs_val * rhs_val);

                mul_overflow:
                    // use higher precision
                    lhs = mp_obj_new_int_from_ll(lhs_val);
                    goto generic_binary_op;

                    break;
                }
635
636
                case RT_BINARY_OP_FLOOR_DIVIDE:
                case RT_BINARY_OP_INPLACE_FLOOR_DIVIDE: lhs_val /= rhs_val; break;
637
                #if MICROPY_ENABLE_FLOAT
638
639
                case RT_BINARY_OP_TRUE_DIVIDE:
                case RT_BINARY_OP_INPLACE_TRUE_DIVIDE: return mp_obj_new_float((mp_float_t)lhs_val / (mp_float_t)rhs_val);
640
                #endif
641
642
643
644
645
646
647

                // TODO implement modulo as specified by Python
                case RT_BINARY_OP_MODULO:
                case RT_BINARY_OP_INPLACE_MODULO: lhs_val %= rhs_val; break;

                case RT_BINARY_OP_POWER:
                case RT_BINARY_OP_INPLACE_POWER:
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
                    if (rhs_val < 0) {
                        #if MICROPY_ENABLE_FLOAT
                        lhs = mp_obj_new_float(lhs_val);
                        goto generic_binary_op;
                        #else
                        nlr_jump(mp_obj_new_exception_msg(&mp_type_ValueError, "negative power with no float support"));
                        #endif
                    } else {
                        // TODO check for overflow
                        machine_int_t ans = 1;
                        while (rhs_val > 0) {
                            if (rhs_val & 1) {
                                ans *= lhs_val;
                            }
                            lhs_val *= lhs_val;
                            rhs_val /= 2;
664
                        }
665
                        lhs_val = ans;
666
                    }
667
                    break;
668
669
670
671
                case RT_BINARY_OP_LESS: return MP_BOOL(lhs_val < rhs_val); break;
                case RT_BINARY_OP_MORE: return MP_BOOL(lhs_val > rhs_val); break;
                case RT_BINARY_OP_LESS_EQUAL: return MP_BOOL(lhs_val <= rhs_val); break;
                case RT_BINARY_OP_MORE_EQUAL: return MP_BOOL(lhs_val >= rhs_val); break;
672

673
674
                default: assert(0);
            }
675
676
            // TODO: We just should make mp_obj_new_int() inline and use that
            if (MP_OBJ_FITS_SMALL_INT(lhs_val)) {
677
                return MP_OBJ_NEW_SMALL_INT(lhs_val);
678
679
            } else {
                return mp_obj_new_int(lhs_val);
680
            }
681
#if MICROPY_ENABLE_FLOAT
682
        } else if (MP_OBJ_IS_TYPE(rhs, &mp_type_float)) {
683
            return mp_obj_float_binary_op(op, lhs_val, rhs);
684
        } else if (MP_OBJ_IS_TYPE(rhs, &mp_type_complex)) {
685
            return mp_obj_complex_binary_op(op, lhs_val, 0, rhs);
686
#endif
687
        }
688
    }
689

690
    /* deal with `in`
691
692
     *
     * NOTE `a in b` is `b.__contains__(a)`, hence why the generic dispatch
Damien George's avatar
Damien George committed
693
     * needs to go below with swapped arguments
694
     */
695
    if (op == RT_BINARY_OP_IN) {
696
697
698
        mp_obj_type_t *type = mp_obj_get_type(rhs);
        if (type->binary_op != NULL) {
            mp_obj_t res = type->binary_op(op, rhs, lhs);
Damien George's avatar
Damien George committed
699
            if (res != MP_OBJ_NULL) {
700
                return res;
John R. Lenton's avatar
John R. Lenton committed
701
            }
702
703
704
705
706
707
708
        }
        if (type->getiter != NULL) {
            /* second attempt, walk the iterator */
            mp_obj_t next = NULL;
            mp_obj_t iter = rt_getiter(rhs);
            while ((next = rt_iternext(iter)) != mp_const_stop_iteration) {
                if (mp_obj_equal(next, lhs)) {
709
                    return mp_const_true;
John R. Lenton's avatar
John R. Lenton committed
710
                }
711
            }
712
            return mp_const_false;
713
714
715
        }

        nlr_jump(mp_obj_new_exception_msg_varg(
716
                     &mp_type_TypeError, "'%s' object is not iterable",
717
718
719
720
                     mp_obj_get_type_str(rhs)));
        return mp_const_none;
    }

721
    // generic binary_op supplied by type
722
723
724
    mp_obj_type_t *type;
generic_binary_op:
    type = mp_obj_get_type(lhs);
725
726
727
728
    if (type->binary_op != NULL) {
        mp_obj_t result = type->binary_op(op, lhs, rhs);
        if (result != MP_OBJ_NULL) {
            return result;
Damien's avatar
Damien committed
729
730
        }
    }
731

732
733
    // TODO implement dispatch for reverse binary ops

John R. Lenton's avatar
John R. Lenton committed
734
    // TODO specify in error message what the operator is
735
    nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_TypeError,
John R. Lenton's avatar
John R. Lenton committed
736
737
        "unsupported operand types for binary operator: '%s', '%s'",
        mp_obj_get_type_str(lhs), mp_obj_get_type_str(rhs)));
738
    return mp_const_none;
Damien's avatar
Damien committed
739
740
}

741
mp_obj_t rt_make_function_from_id(int unique_code_id, mp_obj_t def_args) {
742
743
    DEBUG_OP_printf("make_function_from_id %d\n", unique_code_id);
    if (unique_code_id < 1 || unique_code_id >= next_unique_code_id) {
Damien's avatar
Damien committed
744
        // illegal code id
745
        return mp_const_none;
Damien's avatar
Damien committed
746
    }
747
748
749
750

    // make the function, depending on the code kind
    mp_code_t *c = &unique_codes[unique_code_id];
    mp_obj_t fun;
Damien's avatar
Damien committed
751
    switch (c->kind) {
752
        case MP_CODE_BYTE:
753
            fun = mp_obj_new_fun_bc(c->scope_flags, c->arg_names, c->n_args, def_args, c->n_state, c->u_byte.code);
754
            break;
755
        case MP_CODE_NATIVE:
756
            fun = rt_make_function_n(c->n_args, c->u_native.fun);
Damien's avatar
Damien committed
757
            break;
758
759
        case MP_CODE_INLINE_ASM:
            fun = mp_obj_new_fun_asm(c->n_args, c->u_inline_asm.fun);
Damien's avatar
Damien committed
760
761
762
            break;
        default:
            assert(0);
763
            fun = mp_const_none;
Damien's avatar
Damien committed
764
    }
765
766

    // check for generator functions and if so wrap in generator object
767
    if ((c->scope_flags & MP_SCOPE_FLAG_GENERATOR) != 0) {
768
        fun = mp_obj_new_gen_wrap(fun);
769
770
    }

771
    return fun;
Damien's avatar
Damien committed
772
773
}

774
mp_obj_t rt_make_closure_from_id(int unique_code_id, mp_obj_t closure_tuple) {
Damien George's avatar
Damien George committed
775
    DEBUG_OP_printf("make_closure_from_id %d\n", unique_code_id);
776
    // make function object
777
    mp_obj_t ffun = rt_make_function_from_id(unique_code_id, MP_OBJ_NULL);
Damien's avatar
Damien committed
778
    // wrap function in closure object
779
    return mp_obj_new_closure(ffun, closure_tuple);
Damien's avatar
Damien committed
780
781
}

782
mp_obj_t rt_call_function_0(mp_obj_t fun) {
783
    return rt_call_function_n_kw(fun, 0, 0, NULL);
784
785
}

786
mp_obj_t rt_call_function_1(mp_obj_t fun, mp_obj_t arg) {
787
    return rt_call_function_n_kw(fun, 1, 0, &arg);
788
789
}

790
791
mp_obj_t rt_call_function_2(mp_obj_t fun, mp_obj_t arg1, mp_obj_t arg2) {
    mp_obj_t args[2];
792
793
794
    args[0] = arg1;
    args[1] = arg2;
    return rt_call_function_n_kw(fun, 2, 0, args);
795
796
}

797
798
799
800
801
802
// wrapper that accepts n_args and n_kw in one argument
// native emitter can only pass at most 3 arguments to a function
mp_obj_t rt_call_function_n_kw_for_native(mp_obj_t fun_in, uint n_args_kw, const mp_obj_t *args) {
    return rt_call_function_n_kw(fun_in, n_args_kw & 0xff, (n_args_kw >> 8) & 0xff, args);
}

803
804
// args contains, eg: arg0  arg1  key0  value0  key1  value1
mp_obj_t rt_call_function_n_kw(mp_obj_t fun_in, uint n_args, uint n_kw, const mp_obj_t *args) {
805
806
    // TODO improve this: fun object can specify its type and we parse here the arguments,
    // passing to the function arrays of fixed and keyword arguments
807

808
809
    DEBUG_OP_printf("calling function %p(n_args=%d, n_kw=%d, args=%p)\n", fun_in, n_args, n_kw, args);

810
811
812
813
814
815
    // get the type
    mp_obj_type_t *type = mp_obj_get_type(fun_in);

    // do the call
    if (type->call != NULL) {
        return type->call(fun_in, n_args, n_kw, args);
816
    } else {
Damien George's avatar
Damien George committed
817
        nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_TypeError, "'%s' object is not callable", mp_obj_get_type_str(fun_in)));
818
    }
819
820
}

821
822
// args contains: fun  self/NULL  arg(0)  ...  arg(n_args-2)  arg(n_args-1)  kw_key(0)  kw_val(0)  ... kw_key(n_kw-1)  kw_val(n_kw-1)
// if n_args==0 and n_kw==0 then there are only fun and self/NULL
823
mp_obj_t rt_call_method_n_kw(uint n_args, uint n_kw, const mp_obj_t *args) {
824
825
826
    DEBUG_OP_printf("call method (fun=%p, self=%p, n_args=%u, n_kw=%u, args=%p)\n", args[0], args[1], n_args, n_kw, args);
    int adjust = (args[1] == NULL) ? 0 : 1;
    return rt_call_function_n_kw(args[0], n_args + adjust, n_kw, args + 2 - adjust);
827
828
}

829
mp_obj_t rt_build_tuple(int n_args, mp_obj_t *items) {
830
    return mp_obj_new_tuple(n_args, items);
831
832
}

833
mp_obj_t rt_build_list(int n_args, mp_obj_t *items) {
834
    return mp_obj_new_list(n_args, items);
Damien's avatar
Damien committed
835
836
}

837
838
mp_obj_t rt_build_set(int n_args, mp_obj_t *items) {
    return mp_obj_new_set(n_args, items);
Damien's avatar
Damien committed
839
840
}

841
mp_obj_t rt_store_set(mp_obj_t set, mp_obj_t item) {
842
    mp_obj_set_store(set, item);
Damien's avatar
Damien committed
843
844
845
    return set;
}

846
// unpacked items are stored in reverse order into the array pointed to by items
847
void rt_unpack_sequence(mp_obj_t seq_in, uint num, mp_obj_t *items) {
848
    uint seq_len;
849
850
851
852
853
854
    if (MP_OBJ_IS_TYPE(seq_in, &tuple_type) || MP_OBJ_IS_TYPE(seq_in, &list_type)) {
        mp_obj_t *seq_items;
        if (MP_OBJ_IS_TYPE(seq_in, &tuple_type)) {
            mp_obj_tuple_get(seq_in, &seq_len, &seq_items);
        } else {
            mp_obj_list_get(seq_in, &seq_len, &seq_items);
855
        }
856
        if (seq_len < num) {
857
            goto too_short;
858
        } else if (seq_len > num) {
859
            goto too_long;
860
        }
861
862
863
        for (uint i = 0; i < num; i++) {
            items[i] = seq_items[num - 1 - i];
        }
864
    } else {
865
866
867
868
869
870
871
872
873
874
875
876
        mp_obj_t iterable = rt_getiter(seq_in);

        for (seq_len = 0; seq_len < num; seq_len++) {
            mp_obj_t el = rt_iternext(iterable);
            if (el == mp_const_stop_iteration) {
                goto too_short;
            }
            items[num - 1 - seq_len] = el;
        }
        if (rt_iternext(iterable) != mp_const_stop_iteration) {
            goto too_long;
        }
877
    }
878
879
880
    return;

too_short:
881
    nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_ValueError, "need more than %d values to unpack", seq_len));
882
too_long:
883
    nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_ValueError, "too many values to unpack (expected %d)", num));
884
885
}

886
887
mp_obj_t rt_build_map(int n_args) {
    return mp_obj_new_dict(n_args);
Damien's avatar
Damien committed
888
889
}

890
891
892
mp_obj_t rt_store_map(mp_obj_t map, mp_obj_t key, mp_obj_t value) {
    // map should always be a dict
    return mp_obj_dict_store(map, key, value);
Damien's avatar
Damien committed
893
894
}

895
mp_obj_t rt_load_attr(mp_obj_t base, qstr attr) {
896
897
898
899
    DEBUG_OP_printf("load attr %p.%s\n", base, qstr_str(attr));
    // use load_method
    mp_obj_t dest[2];
    rt_load_method(base, attr, dest);
900
    if (dest[1] == MP_OBJ_NULL) {
901
        // load_method returned just a normal attribute
902
        return dest[0];
903
904
905
    } else {
        // load_method returned a method, so build a bound method object
        return mp_obj_new_bound_meth(dest[0], dest[1]);
Damien's avatar
Damien committed
906
907
908
    }
}

909
910
911
// no attribute found, returns:     dest[0] == MP_OBJ_NULL, dest[1] == MP_OBJ_NULL
// normal attribute found, returns: dest[0] == <attribute>, dest[1] == MP_OBJ_NULL
// method attribute found, returns: dest[0] == <method>,    dest[1] == <self>
912
STATIC void rt_load_method_maybe(mp_obj_t base, qstr attr, mp_obj_t *dest) {
913
914
915
916
917
918
919
920
921
922
923
924
925
    // clear output to indicate no attribute/method found yet
    dest[0] = MP_OBJ_NULL;
    dest[1] = MP_OBJ_NULL;

    // get the type
    mp_obj_type_t *type = mp_obj_get_type(base);

    // if this type can do its own load, then call it
    if (type->load_attr != NULL) {
        type->load_attr(base, attr, dest);
    }

    // if nothing found yet, look for built-in and generic names
926
    if (dest[0] == MP_OBJ_NULL) {
Damien George's avatar
Damien George committed
927
928
929
930
        if (attr == MP_QSTR___class__) {
            // a.__class__ is equivalent to type(a)
            dest[0] = type;
        } else if (attr == MP_QSTR___next__ && type->iternext != NULL) {
931
932
            dest[0] = (mp_obj_t)&mp_builtin_next_obj;
            dest[1] = base;
933
934
        } else if (type->load_attr == NULL) {
            // generic method lookup if type didn't provide a specific one
935
            // this is a lookup in the object (ie not class or type)
936
937
938
939
            const mp_method_t *meth = type->methods;
            if (meth != NULL) {
                for (; meth->name != NULL; meth++) {
                    if (strcmp(meth->name, qstr_str(attr)) == 0) {
940
941
942
943
                        // check if the methods are functions, static or class methods
                        // see http://docs.python.org/3.3/howto/descriptor.html
                        if (MP_OBJ_IS_TYPE(meth->fun, &mp_type_staticmethod)) {
                            // return just the function
944
                            dest[0] = ((mp_obj_static_class_method_t*)meth->fun)->fun;
945
946
                        } else if (MP_OBJ_IS_TYPE(meth->fun, &mp_type_classmethod)) {
                            // return a bound method, with self being the type of this object
947
                            dest[0] = ((mp_obj_static_class_method_t*)meth->fun)->fun;
948
                            dest[1] = mp_obj_get_type(base);
949
950
                        } else {
                            // return a bound method, with self being this object
951
952
                            dest[0] = (mp_obj_t)meth->fun;
                            dest[1] = base;
953
                        }
954
955
                        break;
                    }
956
                }
Damien's avatar
Damien committed
957
958
            }
        }
959
    }
960
961
962
963
964
965
}

void rt_load_method(mp_obj_t base, qstr attr, mp_obj_t *dest) {
    DEBUG_OP_printf("load method %p.%s\n", base, qstr_str(attr));

    rt_load_method_maybe(base, attr, dest);
966

967
    if (dest[0] == MP_OBJ_NULL) {
968
969
        // no attribute/method called attr
        // following CPython, we give a more detailed error message for type objects
970
971
        if (MP_OBJ_IS_TYPE(base, &mp_type_type)) {
            nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_AttributeError, "type object '%s' has no attribute '%s'", ((mp_obj_type_t*)base)->name, qstr_str(attr)));
972
        } else {
973
            nlr_jump(mp_obj_new_exception_msg_varg(&mp_type_AttributeError, "'%s' object has no attribute '%s'", mp_obj_get_type_str(base), qstr_str(attr)));