Commit b4b10fd3 authored by Damien George's avatar Damien George
Browse files

py: Put all global state together in state structures.

This patch consolidates all global variables in py/ core into one place,
in a global structure.  Root pointers are all located together to make
GC tracing easier and more efficient.
parent ad2307c9
......@@ -5,8 +5,8 @@
#include "py/nlr.h"
#include "py/parsehelper.h"
#include "py/compile.h"
#include "py/runtime0.h"
#include "py/runtime.h"
#include "py/stackctrl.h"
#include "py/repl.h"
#include "py/pfenv.h"
......@@ -48,6 +48,7 @@ void do_str(const char *src) {
}
int main(int argc, char **argv) {
mp_stack_set_limit(10240);
mp_init();
do_str("print('hello world!', list(x+1 for x in range(10)), end='eol\n')");
mp_deinit();
......
......@@ -32,6 +32,7 @@
#include "py/compile.h"
#include "py/runtime0.h"
#include "py/runtime.h"
#include "py/stackctrl.h"
#include "py/gc.h"
#include "pyexec.h"
#include "gccollect.h"
......@@ -39,7 +40,7 @@
void user_init(void) {
soft_reset:
//mp_stack_set_limit((char*)&_ram_end - (char*)&_heap_end - 1024);
mp_stack_set_limit(10240);
mp_hal_init();
gc_init(&_heap_start, &_heap_end);
gc_collect_init();
......
......@@ -90,7 +90,6 @@ extern const mp_obj_module_t mp_module_sys;
extern const mp_obj_module_t mp_module_gc;
extern const mp_obj_dict_t mp_module_builtins_globals;
extern mp_obj_dict_t *mp_module_builtins_override_dict;
struct _dummy_t;
extern struct _dummy_t mp_sys_stdin_obj;
......
......@@ -30,6 +30,7 @@
#include <string.h>
#include <assert.h>
#include "py/mpstate.h"
#include "py/emit.h"
#include "py/bc0.h"
......@@ -383,7 +384,7 @@ STATIC void emit_bc_adjust_stack_size(emit_t *emit, mp_int_t delta) {
STATIC void emit_bc_set_source_line(emit_t *emit, mp_uint_t source_line) {
//printf("source: line %d -> %d offset %d -> %d\n", emit->last_source_line, source_line, emit->last_source_line_offset, emit->bytecode_offset);
#if MICROPY_ENABLE_SOURCE_LINE
if (mp_optimise_value >= 3) {
if (MP_STATE_VM(mp_optimise_value) >= 3) {
// If we compile with -O3, don't store line numbers.
return;
}
......
......@@ -28,6 +28,7 @@
#include <stdio.h>
#include <string.h>
#include "py/mpstate.h"
#include "py/gc.h"
#include "py/obj.h"
#include "py/runtime.h"
......@@ -48,25 +49,6 @@
#define WORDS_PER_BLOCK (4)
#define BYTES_PER_BLOCK (WORDS_PER_BLOCK * BYTES_PER_WORD)
STATIC byte *gc_alloc_table_start;
STATIC mp_uint_t gc_alloc_table_byte_len;
#if MICROPY_ENABLE_FINALISER
STATIC byte *gc_finaliser_table_start;
#endif
// We initialise gc_pool_start to a dummy value so it stays out of the bss
// section. This makes sure we don't trace this pointer in a collect cycle.
// If we did trace it, it would make the first block of the heap always
// reachable, and hence we can never free that block.
STATIC mp_uint_t *gc_pool_start = (void*)4;
STATIC mp_uint_t *gc_pool_end;
STATIC int gc_stack_overflow;
STATIC mp_uint_t gc_stack[MICROPY_ALLOC_GC_STACK_SIZE];
STATIC mp_uint_t *gc_sp;
STATIC uint16_t gc_lock_depth;
uint16_t gc_auto_collect_enabled;
STATIC mp_uint_t gc_last_free_atb_index;
// ATB = allocation table byte
// 0b00 = FREE -- free block
// 0b01 = HEAD -- head of a chain of blocks
......@@ -90,15 +72,15 @@ STATIC mp_uint_t gc_last_free_atb_index;
#define ATB_3_IS_FREE(a) (((a) & ATB_MASK_3) == 0)
#define BLOCK_SHIFT(block) (2 * ((block) & (BLOCKS_PER_ATB - 1)))
#define ATB_GET_KIND(block) ((gc_alloc_table_start[(block) / BLOCKS_PER_ATB] >> BLOCK_SHIFT(block)) & 3)
#define ATB_ANY_TO_FREE(block) do { gc_alloc_table_start[(block) / BLOCKS_PER_ATB] &= (~(AT_MARK << BLOCK_SHIFT(block))); } while (0)
#define ATB_FREE_TO_HEAD(block) do { gc_alloc_table_start[(block) / BLOCKS_PER_ATB] |= (AT_HEAD << BLOCK_SHIFT(block)); } while (0)
#define ATB_FREE_TO_TAIL(block) do { gc_alloc_table_start[(block) / BLOCKS_PER_ATB] |= (AT_TAIL << BLOCK_SHIFT(block)); } while (0)
#define ATB_HEAD_TO_MARK(block) do { gc_alloc_table_start[(block) / BLOCKS_PER_ATB] |= (AT_MARK << BLOCK_SHIFT(block)); } while (0)
#define ATB_MARK_TO_HEAD(block) do { gc_alloc_table_start[(block) / BLOCKS_PER_ATB] &= (~(AT_TAIL << BLOCK_SHIFT(block))); } while (0)
#define BLOCK_FROM_PTR(ptr) (((ptr) - (mp_uint_t)gc_pool_start) / BYTES_PER_BLOCK)
#define PTR_FROM_BLOCK(block) (((block) * BYTES_PER_BLOCK + (mp_uint_t)gc_pool_start))
#define ATB_GET_KIND(block) ((MP_STATE_MEM(gc_alloc_table_start)[(block) / BLOCKS_PER_ATB] >> BLOCK_SHIFT(block)) & 3)
#define ATB_ANY_TO_FREE(block) do { MP_STATE_MEM(gc_alloc_table_start)[(block) / BLOCKS_PER_ATB] &= (~(AT_MARK << BLOCK_SHIFT(block))); } while (0)
#define ATB_FREE_TO_HEAD(block) do { MP_STATE_MEM(gc_alloc_table_start)[(block) / BLOCKS_PER_ATB] |= (AT_HEAD << BLOCK_SHIFT(block)); } while (0)
#define ATB_FREE_TO_TAIL(block) do { MP_STATE_MEM(gc_alloc_table_start)[(block) / BLOCKS_PER_ATB] |= (AT_TAIL << BLOCK_SHIFT(block)); } while (0)
#define ATB_HEAD_TO_MARK(block) do { MP_STATE_MEM(gc_alloc_table_start)[(block) / BLOCKS_PER_ATB] |= (AT_MARK << BLOCK_SHIFT(block)); } while (0)
#define ATB_MARK_TO_HEAD(block) do { MP_STATE_MEM(gc_alloc_table_start)[(block) / BLOCKS_PER_ATB] &= (~(AT_TAIL << BLOCK_SHIFT(block))); } while (0)
#define BLOCK_FROM_PTR(ptr) (((ptr) - (mp_uint_t)MP_STATE_MEM(gc_pool_start)) / BYTES_PER_BLOCK)
#define PTR_FROM_BLOCK(block) (((block) * BYTES_PER_BLOCK + (mp_uint_t)MP_STATE_MEM(gc_pool_start)))
#define ATB_FROM_BLOCK(bl) ((bl) / BLOCKS_PER_ATB)
#if MICROPY_ENABLE_FINALISER
......@@ -107,9 +89,9 @@ STATIC mp_uint_t gc_last_free_atb_index;
#define BLOCKS_PER_FTB (8)
#define FTB_GET(block) ((gc_finaliser_table_start[(block) / BLOCKS_PER_FTB] >> ((block) & 7)) & 1)
#define FTB_SET(block) do { gc_finaliser_table_start[(block) / BLOCKS_PER_FTB] |= (1 << ((block) & 7)); } while (0)
#define FTB_CLEAR(block) do { gc_finaliser_table_start[(block) / BLOCKS_PER_FTB] &= (~(1 << ((block) & 7))); } while (0)
#define FTB_GET(block) ((MP_STATE_MEM(gc_finaliser_table_start)[(block) / BLOCKS_PER_FTB] >> ((block) & 7)) & 1)
#define FTB_SET(block) do { MP_STATE_MEM(gc_finaliser_table_start)[(block) / BLOCKS_PER_FTB] |= (1 << ((block) & 7)); } while (0)
#define FTB_CLEAR(block) do { MP_STATE_MEM(gc_finaliser_table_start)[(block) / BLOCKS_PER_FTB] &= (~(1 << ((block) & 7))); } while (0)
#endif
// TODO waste less memory; currently requires that all entries in alloc_table have a corresponding block in pool
......@@ -125,67 +107,67 @@ void gc_init(void *start, void *end) {
// => T = A * (1 + BLOCKS_PER_ATB / BLOCKS_PER_FTB + BLOCKS_PER_ATB * BYTES_PER_BLOCK)
mp_uint_t total_byte_len = (byte*)end - (byte*)start;
#if MICROPY_ENABLE_FINALISER
gc_alloc_table_byte_len = total_byte_len * BITS_PER_BYTE / (BITS_PER_BYTE + BITS_PER_BYTE * BLOCKS_PER_ATB / BLOCKS_PER_FTB + BITS_PER_BYTE * BLOCKS_PER_ATB * BYTES_PER_BLOCK);
MP_STATE_MEM(gc_alloc_table_byte_len) = total_byte_len * BITS_PER_BYTE / (BITS_PER_BYTE + BITS_PER_BYTE * BLOCKS_PER_ATB / BLOCKS_PER_FTB + BITS_PER_BYTE * BLOCKS_PER_ATB * BYTES_PER_BLOCK);
#else
gc_alloc_table_byte_len = total_byte_len / (1 + BITS_PER_BYTE / 2 * BYTES_PER_BLOCK);
MP_STATE_MEM(gc_alloc_table_byte_len) = total_byte_len / (1 + BITS_PER_BYTE / 2 * BYTES_PER_BLOCK);
#endif
gc_alloc_table_start = (byte*)start;
MP_STATE_MEM(gc_alloc_table_start) = (byte*)start;
#if MICROPY_ENABLE_FINALISER
mp_uint_t gc_finaliser_table_byte_len = (gc_alloc_table_byte_len * BLOCKS_PER_ATB + BLOCKS_PER_FTB - 1) / BLOCKS_PER_FTB;
gc_finaliser_table_start = gc_alloc_table_start + gc_alloc_table_byte_len;
mp_uint_t gc_finaliser_table_byte_len = (MP_STATE_MEM(gc_alloc_table_byte_len) * BLOCKS_PER_ATB + BLOCKS_PER_FTB - 1) / BLOCKS_PER_FTB;
MP_STATE_MEM(gc_finaliser_table_start) = MP_STATE_MEM(gc_alloc_table_start) + MP_STATE_MEM(gc_alloc_table_byte_len);
#endif
mp_uint_t gc_pool_block_len = gc_alloc_table_byte_len * BLOCKS_PER_ATB;
gc_pool_start = (mp_uint_t*)((byte*)end - gc_pool_block_len * BYTES_PER_BLOCK);
gc_pool_end = (mp_uint_t*)end;
mp_uint_t gc_pool_block_len = MP_STATE_MEM(gc_alloc_table_byte_len) * BLOCKS_PER_ATB;
MP_STATE_MEM(gc_pool_start) = (mp_uint_t*)((byte*)end - gc_pool_block_len * BYTES_PER_BLOCK);
MP_STATE_MEM(gc_pool_end) = (mp_uint_t*)end;
#if MICROPY_ENABLE_FINALISER
assert((byte*)gc_pool_start >= gc_finaliser_table_start + gc_finaliser_table_byte_len);
assert((byte*)MP_STATE_MEM(gc_pool_start) >= MP_STATE_MEM(gc_finaliser_table_start) + gc_finaliser_table_byte_len);
#endif
// clear ATBs
memset(gc_alloc_table_start, 0, gc_alloc_table_byte_len);
memset(MP_STATE_MEM(gc_alloc_table_start), 0, MP_STATE_MEM(gc_alloc_table_byte_len));
#if MICROPY_ENABLE_FINALISER
// clear FTBs
memset(gc_finaliser_table_start, 0, gc_finaliser_table_byte_len);
memset(MP_STATE_MEM(gc_finaliser_table_start), 0, gc_finaliser_table_byte_len);
#endif
// set last free ATB index to start of heap
gc_last_free_atb_index = 0;
MP_STATE_MEM(gc_last_free_atb_index) = 0;
// unlock the GC
gc_lock_depth = 0;
MP_STATE_MEM(gc_lock_depth) = 0;
// allow auto collection
gc_auto_collect_enabled = 1;
MP_STATE_MEM(gc_auto_collect_enabled) = 1;
DEBUG_printf("GC layout:\n");
DEBUG_printf(" alloc table at %p, length " UINT_FMT " bytes, " UINT_FMT " blocks\n", gc_alloc_table_start, gc_alloc_table_byte_len, gc_alloc_table_byte_len * BLOCKS_PER_ATB);
DEBUG_printf(" alloc table at %p, length " UINT_FMT " bytes, " UINT_FMT " blocks\n", MP_STATE_MEM(gc_alloc_table_start), MP_STATE_MEM(gc_alloc_table_byte_len), MP_STATE_MEM(gc_alloc_table_byte_len) * BLOCKS_PER_ATB);
#if MICROPY_ENABLE_FINALISER
DEBUG_printf(" finaliser table at %p, length " UINT_FMT " bytes, " UINT_FMT " blocks\n", gc_finaliser_table_start, gc_finaliser_table_byte_len, gc_finaliser_table_byte_len * BLOCKS_PER_FTB);
DEBUG_printf(" finaliser table at %p, length " UINT_FMT " bytes, " UINT_FMT " blocks\n", MP_STATE_MEM(gc_finaliser_table_start), gc_finaliser_table_byte_len, gc_finaliser_table_byte_len * BLOCKS_PER_FTB);
#endif
DEBUG_printf(" pool at %p, length " UINT_FMT " bytes, " UINT_FMT " blocks\n", gc_pool_start, gc_pool_block_len * BYTES_PER_BLOCK, gc_pool_block_len);
DEBUG_printf(" pool at %p, length " UINT_FMT " bytes, " UINT_FMT " blocks\n", MP_STATE_MEM(gc_pool_start), gc_pool_block_len * BYTES_PER_BLOCK, gc_pool_block_len);
}
void gc_lock(void) {
gc_lock_depth++;
MP_STATE_MEM(gc_lock_depth)++;
}
void gc_unlock(void) {
gc_lock_depth--;
MP_STATE_MEM(gc_lock_depth)--;
}
bool gc_is_locked(void) {
return gc_lock_depth != 0;
return MP_STATE_MEM(gc_lock_depth) != 0;
}
#define VERIFY_PTR(ptr) ( \
(ptr & (BYTES_PER_BLOCK - 1)) == 0 /* must be aligned on a block */ \
&& ptr >= (mp_uint_t)gc_pool_start /* must be above start of pool */ \
&& ptr < (mp_uint_t)gc_pool_end /* must be below end of pool */ \
&& ptr >= (mp_uint_t)MP_STATE_MEM(gc_pool_start) /* must be above start of pool */ \
&& ptr < (mp_uint_t)MP_STATE_MEM(gc_pool_end) /* must be below end of pool */ \
)
#define VERIFY_MARK_AND_PUSH(ptr) \
......@@ -195,19 +177,19 @@ bool gc_is_locked(void) {
if (ATB_GET_KIND(_block) == AT_HEAD) { \
/* an unmarked head, mark it, and push it on gc stack */ \
ATB_HEAD_TO_MARK(_block); \
if (gc_sp < &gc_stack[MICROPY_ALLOC_GC_STACK_SIZE]) { \
*gc_sp++ = _block; \
if (MP_STATE_MEM(gc_sp) < &MP_STATE_MEM(gc_stack)[MICROPY_ALLOC_GC_STACK_SIZE]) { \
*MP_STATE_MEM(gc_sp)++ = _block; \
} else { \
gc_stack_overflow = 1; \
MP_STATE_MEM(gc_stack_overflow) = 1; \
} \
} \
} \
} while (0)
STATIC void gc_drain_stack(void) {
while (gc_sp > gc_stack) {
while (MP_STATE_MEM(gc_sp) > MP_STATE_MEM(gc_stack)) {
// pop the next block off the stack
mp_uint_t block = *--gc_sp;
mp_uint_t block = *--MP_STATE_MEM(gc_sp);
// work out number of consecutive blocks in the chain starting with this one
mp_uint_t n_blocks = 0;
......@@ -225,15 +207,15 @@ STATIC void gc_drain_stack(void) {
}
STATIC void gc_deal_with_stack_overflow(void) {
while (gc_stack_overflow) {
gc_stack_overflow = 0;
gc_sp = gc_stack;
while (MP_STATE_MEM(gc_stack_overflow)) {
MP_STATE_MEM(gc_stack_overflow) = 0;
MP_STATE_MEM(gc_sp) = MP_STATE_MEM(gc_stack);
// scan entire memory looking for blocks which have been marked but not their children
for (mp_uint_t block = 0; block < gc_alloc_table_byte_len * BLOCKS_PER_ATB; block++) {
for (mp_uint_t block = 0; block < MP_STATE_MEM(gc_alloc_table_byte_len) * BLOCKS_PER_ATB; block++) {
// trace (again) if mark bit set
if (ATB_GET_KIND(block) == AT_MARK) {
*gc_sp++ = block;
*MP_STATE_MEM(gc_sp)++ = block;
gc_drain_stack();
}
}
......@@ -250,7 +232,7 @@ STATIC void gc_sweep(void) {
#endif
// free unmarked heads and their tails
int free_tail = 0;
for (mp_uint_t block = 0; block < gc_alloc_table_byte_len * BLOCKS_PER_ATB; block++) {
for (mp_uint_t block = 0; block < MP_STATE_MEM(gc_alloc_table_byte_len) * BLOCKS_PER_ATB; block++) {
switch (ATB_GET_KIND(block)) {
case AT_HEAD:
#if MICROPY_ENABLE_FINALISER
......@@ -292,8 +274,13 @@ STATIC void gc_sweep(void) {
void gc_collect_start(void) {
gc_lock();
gc_stack_overflow = 0;
gc_sp = gc_stack;
MP_STATE_MEM(gc_stack_overflow) = 0;
MP_STATE_MEM(gc_sp) = MP_STATE_MEM(gc_stack);
// Trace root pointers. This relies on the root pointers being organised
// correctly in the mp_state_ctx structure. We scan nlr_top, dict_locals,
// dict_globals, then the root pointer section of mp_state_vm.
void **ptrs = (void**)(void*)&mp_state_ctx;
gc_collect_root(ptrs, offsetof(mp_state_ctx_t, vm.stack_top) / sizeof(mp_uint_t));
}
void gc_collect_root(void **ptrs, mp_uint_t len) {
......@@ -307,18 +294,18 @@ void gc_collect_root(void **ptrs, mp_uint_t len) {
void gc_collect_end(void) {
gc_deal_with_stack_overflow();
gc_sweep();
gc_last_free_atb_index = 0;
MP_STATE_MEM(gc_last_free_atb_index) = 0;
gc_unlock();
}
void gc_info(gc_info_t *info) {
info->total = (gc_pool_end - gc_pool_start) * sizeof(mp_uint_t);
info->total = (MP_STATE_MEM(gc_pool_end) - MP_STATE_MEM(gc_pool_start)) * sizeof(mp_uint_t);
info->used = 0;
info->free = 0;
info->num_1block = 0;
info->num_2block = 0;
info->max_block = 0;
for (mp_uint_t block = 0, len = 0; block < gc_alloc_table_byte_len * BLOCKS_PER_ATB; block++) {
for (mp_uint_t block = 0, len = 0; block < MP_STATE_MEM(gc_alloc_table_byte_len) * BLOCKS_PER_ATB; block++) {
mp_uint_t kind = ATB_GET_KIND(block);
if (kind == AT_FREE || kind == AT_HEAD) {
if (len == 1) {
......@@ -361,7 +348,7 @@ void *gc_alloc(mp_uint_t n_bytes, bool has_finaliser) {
DEBUG_printf("gc_alloc(" UINT_FMT " bytes -> " UINT_FMT " blocks)\n", n_bytes, n_blocks);
// check if GC is locked
if (gc_lock_depth > 0) {
if (MP_STATE_MEM(gc_lock_depth) > 0) {
return NULL;
}
......@@ -374,12 +361,12 @@ void *gc_alloc(mp_uint_t n_bytes, bool has_finaliser) {
mp_uint_t end_block;
mp_uint_t start_block;
mp_uint_t n_free = 0;
int collected = !gc_auto_collect_enabled;
int collected = !MP_STATE_MEM(gc_auto_collect_enabled);
for (;;) {
// look for a run of n_blocks available blocks
for (i = gc_last_free_atb_index; i < gc_alloc_table_byte_len; i++) {
byte a = gc_alloc_table_start[i];
for (i = MP_STATE_MEM(gc_last_free_atb_index); i < MP_STATE_MEM(gc_alloc_table_byte_len); i++) {
byte a = MP_STATE_MEM(gc_alloc_table_start)[i];
if (ATB_0_IS_FREE(a)) { if (++n_free >= n_blocks) { i = i * BLOCKS_PER_ATB + 0; goto found; } } else { n_free = 0; }
if (ATB_1_IS_FREE(a)) { if (++n_free >= n_blocks) { i = i * BLOCKS_PER_ATB + 1; goto found; } } else { n_free = 0; }
if (ATB_2_IS_FREE(a)) { if (++n_free >= n_blocks) { i = i * BLOCKS_PER_ATB + 2; goto found; } } else { n_free = 0; }
......@@ -407,7 +394,7 @@ found:
// before this one. Also, whenever we free or shink a block we must check
// if this index needs adjusting (see gc_realloc and gc_free).
if (n_free == 1) {
gc_last_free_atb_index = (i + 1) / BLOCKS_PER_ATB;
MP_STATE_MEM(gc_last_free_atb_index) = (i + 1) / BLOCKS_PER_ATB;
}
// mark first block as used head
......@@ -420,7 +407,7 @@ found:
}
// get pointer to first block
void *ret_ptr = (void*)(gc_pool_start + start_block * WORDS_PER_BLOCK);
void *ret_ptr = (void*)(MP_STATE_MEM(gc_pool_start) + start_block * WORDS_PER_BLOCK);
DEBUG_printf("gc_alloc(%p)\n", ret_ptr);
// zero out the additional bytes of the newly allocated blocks
......@@ -458,7 +445,7 @@ void *gc_alloc_with_finaliser(mp_uint_t n_bytes) {
// force the freeing of a piece of memory
void gc_free(void *ptr_in) {
if (gc_lock_depth > 0) {
if (MP_STATE_MEM(gc_lock_depth) > 0) {
// TODO how to deal with this error?
return;
}
......@@ -470,8 +457,8 @@ void gc_free(void *ptr_in) {
mp_uint_t block = BLOCK_FROM_PTR(ptr);
if (ATB_GET_KIND(block) == AT_HEAD) {
// set the last_free pointer to this block if it's earlier in the heap
if (block / BLOCKS_PER_ATB < gc_last_free_atb_index) {
gc_last_free_atb_index = block / BLOCKS_PER_ATB;
if (block / BLOCKS_PER_ATB < MP_STATE_MEM(gc_last_free_atb_index)) {
MP_STATE_MEM(gc_last_free_atb_index) = block / BLOCKS_PER_ATB;
}
// free head and all of its tail blocks
......@@ -540,7 +527,7 @@ void *gc_realloc(void *ptr, mp_uint_t n_bytes) {
#else // Alternative gc_realloc impl
void *gc_realloc(void *ptr_in, mp_uint_t n_bytes) {
if (gc_lock_depth > 0) {
if (MP_STATE_MEM(gc_lock_depth) > 0) {
return NULL;
}
......@@ -581,7 +568,7 @@ void *gc_realloc(void *ptr_in, mp_uint_t n_bytes) {
// efficiently shrink it (see below for shrinking code).
mp_uint_t n_free = 0;
mp_uint_t n_blocks = 1; // counting HEAD block
mp_uint_t max_block = gc_alloc_table_byte_len * BLOCKS_PER_ATB;
mp_uint_t max_block = MP_STATE_MEM(gc_alloc_table_byte_len) * BLOCKS_PER_ATB;
for (mp_uint_t bl = block + n_blocks; bl < max_block; bl++) {
byte block_type = ATB_GET_KIND(bl);
if (block_type == AT_TAIL) {
......@@ -612,8 +599,8 @@ void *gc_realloc(void *ptr_in, mp_uint_t n_bytes) {
}
// set the last_free pointer to end of this block if it's earlier in the heap
if ((block + new_blocks) / BLOCKS_PER_ATB < gc_last_free_atb_index) {
gc_last_free_atb_index = (block + new_blocks) / BLOCKS_PER_ATB;
if ((block + new_blocks) / BLOCKS_PER_ATB < MP_STATE_MEM(gc_last_free_atb_index)) {
MP_STATE_MEM(gc_last_free_atb_index) = (block + new_blocks) / BLOCKS_PER_ATB;
}
#if EXTENSIVE_HEAP_PROFILING
......@@ -675,22 +662,22 @@ void gc_dump_alloc_table(void) {
#if !EXTENSIVE_HEAP_PROFILING
// When comparing heap output we don't want to print the starting
// pointer of the heap because it changes from run to run.
printf("GC memory layout; from %p:", gc_pool_start);
printf("GC memory layout; from %p:", MP_STATE_MEM(gc_pool_start));
#endif
for (mp_uint_t bl = 0; bl < gc_alloc_table_byte_len * BLOCKS_PER_ATB; bl++) {
for (mp_uint_t bl = 0; bl < MP_STATE_MEM(gc_alloc_table_byte_len) * BLOCKS_PER_ATB; bl++) {
if (bl % DUMP_BYTES_PER_LINE == 0) {
// a new line of blocks
{
// check if this line contains only free blocks
mp_uint_t bl2 = bl;
while (bl2 < gc_alloc_table_byte_len * BLOCKS_PER_ATB && ATB_GET_KIND(bl2) == AT_FREE) {
while (bl2 < MP_STATE_MEM(gc_alloc_table_byte_len) * BLOCKS_PER_ATB && ATB_GET_KIND(bl2) == AT_FREE) {
bl2++;
}
if (bl2 - bl >= 2 * DUMP_BYTES_PER_LINE) {
// there are at least 2 lines containing only free blocks, so abbreviate their printing
printf("\n (" UINT_FMT " lines all free)", (bl2 - bl) / DUMP_BYTES_PER_LINE);
bl = bl2 & (~(DUMP_BYTES_PER_LINE - 1));
if (bl >= gc_alloc_table_byte_len * BLOCKS_PER_ATB) {
if (bl >= MP_STATE_MEM(gc_alloc_table_byte_len) * BLOCKS_PER_ATB) {
// got to end of heap
break;
}
......@@ -736,7 +723,7 @@ void gc_dump_alloc_table(void) {
*/
/* this prints the uPy object type of the head block */
case AT_HEAD: {
mp_uint_t *ptr = gc_pool_start + bl * WORDS_PER_BLOCK;
mp_uint_t *ptr = MP_STATE_MEM(gc_pool_start) + bl * WORDS_PER_BLOCK;
if (*ptr == (mp_uint_t)&mp_type_tuple) { c = 'T'; }
else if (*ptr == (mp_uint_t)&mp_type_list) { c = 'L'; }
else if (*ptr == (mp_uint_t)&mp_type_dict) { c = 'D'; }
......
......@@ -39,11 +39,6 @@ void gc_lock(void);
void gc_unlock(void);
bool gc_is_locked(void);
// This variable controls auto garbage collection. If set to 0 then the
// GC won't automatically run when gc_alloc can't find enough blocks. But
// you can still allocate/free memory and also explicitly call gc_collect.
extern uint16_t gc_auto_collect_enabled;
// A given port must implement gc_collect by using the other collect functions.
void gc_collect(void);
void gc_collect_start(void);
......
......@@ -27,6 +27,7 @@
#include <stdio.h>
#include <assert.h>
#include "py/mpstate.h"
#include "py/lexer.h"
#define TAB_SIZE (8)
......@@ -34,8 +35,6 @@
// TODO seems that CPython allows NULL byte in the input stream
// don't know if that's intentional or not, but we don't allow it
mp_uint_t mp_optimise_value;
// TODO replace with a call to a standard function
STATIC bool str_strn_equal(const char *str, const char *strn, mp_uint_t len) {
mp_uint_t i = 0;
......@@ -662,7 +661,7 @@ STATIC void mp_lexer_next_token_into(mp_lexer_t *lex, bool first_token) {
if (str_strn_equal(tok_kw[i], lex->vstr.buf, lex->vstr.len)) {
if (i == MP_ARRAY_SIZE(tok_kw) - 1) {
// tok_kw[MP_ARRAY_SIZE(tok_kw) - 1] == "__debug__"
lex->tok_kind = (mp_optimise_value == 0 ? MP_TOKEN_KW_TRUE : MP_TOKEN_KW_FALSE);
lex->tok_kind = (MP_STATE_VM(mp_optimise_value) == 0 ? MP_TOKEN_KW_TRUE : MP_TOKEN_KW_FALSE);
} else {
lex->tok_kind = MP_TOKEN_KW_FALSE + i;
}
......
......@@ -192,6 +192,4 @@ typedef enum {
mp_import_stat_t mp_import_stat(const char *path);
mp_lexer_t *mp_lexer_new_from_file(const char *filename);
extern mp_uint_t mp_optimise_value;
#endif // __MICROPY_INCLUDED_PY_LEXER_H__
......@@ -30,6 +30,7 @@
#include "py/mpconfig.h"
#include "py/misc.h"
#include "py/mpstate.h"
#if 0 // print debugging info
#define DEBUG_printf DEBUG_printf
......@@ -38,11 +39,7 @@
#endif
#if MICROPY_MEM_STATS
STATIC size_t total_bytes_allocated = 0;
STATIC size_t current_bytes_allocated = 0;
STATIC size_t peak_bytes_allocated = 0;
#define UPDATE_PEAK() { if (current_bytes_allocated > peak_bytes_allocated) peak_bytes_allocated = current_bytes_allocated; }
#define UPDATE_PEAK() { if (MP_STATE_MEM(current_bytes_allocated) > MP_STATE_MEM(peak_bytes_allocated)) MP_STATE_MEM(peak_bytes_allocated) = MP_STATE_MEM(current_bytes_allocated); }
#endif
#if MICROPY_ENABLE_GC
......@@ -68,8 +65,8 @@ void *m_malloc(size_t num_bytes) {
return m_malloc_fail(num_bytes);
}
#if MICROPY_MEM_STATS
total_bytes_allocated += num_bytes;
current_bytes_allocated += num_bytes;
MP_STATE_MEM(total_bytes_allocated) += num_bytes;
MP_STATE_MEM(current_bytes_allocated) += num_bytes;
UPDATE_PEAK();
#endif
DEBUG_printf("malloc %d : %p\n", num_bytes, ptr);
......@@ -79,8 +76,8 @@ void *m_malloc(size_t num_bytes) {
void *m_malloc_maybe(size_t num_bytes) {
void *ptr = malloc(num_bytes);
#if MICROPY_MEM_STATS
total_bytes_allocated += num_bytes;
current_bytes_allocated += num_bytes;
MP_STATE_MEM(total_bytes_allocated) += num_bytes;
MP_STATE_MEM(current_bytes_allocated) += num_bytes;
UPDATE_PEAK();
#endif
DEBUG_printf("malloc %d : %p\n", num_bytes, ptr);
......@@ -94,8 +91,8 @@ void *m_malloc_with_finaliser(size_t num_bytes) {
return m_malloc_fail(num_bytes);
}
#if MICROPY_MEM_STATS
total_bytes_allocated += num_bytes;
current_bytes_allocated += num_bytes;
MP_STATE_MEM(total_bytes_allocated) += num_bytes;
MP_STATE_MEM(current_bytes_allocated) += num_bytes;
UPDATE_PEAK();
#endif
DEBUG_printf("malloc %d : %p\n", num_bytes, ptr);
......@@ -124,8 +121,8 @@ void *m_realloc(void *ptr, size_t old_num_bytes, size_t new_num_bytes) {
// allocated total. If we process only positive increments,
// we'll count 3K.
size_t diff = new_num_bytes - old_num_bytes;
total_bytes_allocated += diff;
current_bytes_allocated += diff;
MP_STATE_MEM(total_bytes_allocated) += diff;
MP_STATE_MEM(current_bytes_allocated) += diff;
UPDATE_PEAK();
#endif
DEBUG_printf("realloc %p, %d, %d : %p\n", ptr, old_num_bytes, new_num_bytes, new_ptr);
......@@ -143,8 +140,8 @@ void *m_realloc_maybe(void *ptr, size_t old_num_bytes, size_t new_num_bytes) {
// Also, don't count failed reallocs.
if (!(new_ptr == NULL && new_num_bytes != 0)) {
size_t diff = new_num_bytes - old_num_bytes;
total_bytes_allocated += diff;
current_bytes_allocated += diff;
MP_STATE_MEM(total_bytes_allocated) += diff;
MP_STATE_MEM(current_bytes_allocated) += diff;
UPDATE_PEAK();
}
#endif
......@@ -155,21 +152,21 @@ void *m_realloc_maybe(void *ptr, size_t old_num_bytes, size_t new_num_bytes) {
void m_free(void *ptr, size_t num_bytes) {
free(ptr);
#if MICROPY_MEM_STATS
current_bytes_allocated -= num_bytes;
MP_STATE_MEM(current_bytes_allocated) -= num_bytes;
#endif
DEBUG_printf("free %p, %d\n", ptr, num_bytes);
}
#if MICROPY_MEM_STATS
size_t m_get_total_bytes_allocated(void) {
return total_bytes_allocated;
return MP_STATE_MEM(total_bytes_allocated);
}
size_t m_get_current_bytes_allocated(void) {
return current_bytes_allocated;
return MP_STATE_MEM(current_bytes_allocated);
}
size_t m_get_peak_bytes_allocated(void) {
return peak_bytes_allocated;
return MP_STATE_MEM(peak_bytes_allocated);
}
#endif
......@@ -24,6 +24,7 @@
* THE SOFTWARE.
*/
#include "py/mpstate.h"
#include "py/obj.h"
#include "py/gc.h"
......@@ -48,7 +49,7 @@ MP_DEFINE_CONST_FUN_OBJ_0(gc_collect_obj, py_gc_collect);
/// \function disable()
/// Disable the garbage collector.
STATIC mp_obj_t gc_disable(void) {
gc_auto_collect_enabled = 0;
MP_STATE_MEM(gc_auto_collect_enabled) = 0;
return mp_const_none;
}
MP_DEFINE_CONST_FUN_OBJ_0(gc_disable_obj, gc_disable);
......@@ -56,13 +57,13 @@ MP_DEFINE_CONST_FUN_OBJ_0(gc_disable_obj, gc_disable);
/// \function enable()
/// Enable the garbage collector.
STATIC mp_obj_t gc_enable(void) {
gc_auto_collect_enabled = 1;
MP_STATE_MEM(gc_auto_collect_enabled) = 1;
return mp_const_none;
}
MP_DEFINE_CONST_FUN_OBJ_0(gc_enable_obj, gc_enable);
STATIC mp_obj_t gc_isenabled(void) {
return MP_BOOL(gc_auto_collect_enabled);
return MP_BOOL(MP_STATE_MEM(gc_auto_collect_enabled));
}
MP_DEFINE_CONST_FUN_OBJ_0(gc_isenabled_obj, gc_isenabled);
......
......@@ -488,6 +488,11 @@ typedef double mp_float_t;
#define MICROPY_PORT_CONSTANTS
#endif
// Any root pointers for GC scanning - see mpstate.c
#ifndef MICROPY_PORT_ROOT_POINTERS
#define MICROPY_PORT_ROOT_POINTERS
#endif
/*****************************************************************************/
/* Miscellaneous settings */
......
/*
* This file is part of the Micro Python project, http://micropython.org/