plan
This commit is contained in:
590
source/quickjs.c
590
source/quickjs.c
@@ -275,11 +275,43 @@ typedef enum OPCodeEnum OPCodeEnum;
|
||||
struct StoneArenaPage;
|
||||
struct mist_text;
|
||||
|
||||
/* ============================================================
|
||||
Buddy Allocator for Actor Memory Blocks
|
||||
============================================================ */
|
||||
|
||||
#define BUDDY_MIN_ORDER 16 /* 64KB minimum block */
|
||||
#define BUDDY_MAX_ORDER 28 /* 256MB maximum */
|
||||
#define BUDDY_LEVELS (BUDDY_MAX_ORDER - BUDDY_MIN_ORDER + 1)
|
||||
#define BUDDY_POOL_SIZE (1ULL << BUDDY_MAX_ORDER)
|
||||
|
||||
typedef struct BuddyBlock {
|
||||
struct BuddyBlock *next;
|
||||
struct BuddyBlock *prev;
|
||||
uint8_t order; /* log2 of size */
|
||||
uint8_t is_free;
|
||||
} BuddyBlock;
|
||||
|
||||
typedef struct BuddyAllocator {
|
||||
uint8_t *base; /* 256MB base address */
|
||||
size_t total_size; /* 256MB */
|
||||
BuddyBlock *free_lists[BUDDY_LEVELS];
|
||||
uint8_t initialized;
|
||||
} BuddyAllocator;
|
||||
|
||||
/* Forward declarations for buddy allocator functions */
|
||||
static int buddy_init(BuddyAllocator *b);
|
||||
static void *buddy_alloc(BuddyAllocator *b, size_t size);
|
||||
static void buddy_free(BuddyAllocator *b, void *ptr, size_t size);
|
||||
static void buddy_destroy(BuddyAllocator *b);
|
||||
|
||||
struct JSRuntime {
|
||||
JSMallocFunctions mf;
|
||||
JSMallocState malloc_state;
|
||||
const char *rt_info;
|
||||
|
||||
/* Buddy allocator for actor memory blocks */
|
||||
BuddyAllocator buddy;
|
||||
|
||||
int class_count; /* size of class_array */
|
||||
JSClass *class_array;
|
||||
|
||||
@@ -693,6 +725,9 @@ typedef struct JSRecordEntry JSRecordEntry;
|
||||
/* Placeholder: these will be replaced with actual implementations after
|
||||
* JSRecord is defined */
|
||||
|
||||
/* Forward declaration for bump allocator */
|
||||
static void *ctx_alloc(JSContext *ctx, size_t size);
|
||||
|
||||
/* ============================================================
|
||||
Stone Arena Allocator
|
||||
============================================================ */
|
||||
@@ -871,6 +906,13 @@ struct JSContext {
|
||||
JSRuntime *rt;
|
||||
struct list_head link;
|
||||
|
||||
/* Actor memory block (bump allocation) */
|
||||
uint8_t *heap_base; /* start of current block */
|
||||
uint8_t *heap_free; /* bump pointer */
|
||||
uint8_t *heap_end; /* end of block */
|
||||
size_t current_block_size; /* current block size (64KB initially) */
|
||||
size_t next_block_size; /* doubles if <10% recovered after GC */
|
||||
|
||||
uint16_t binary_object_count;
|
||||
int binary_object_size;
|
||||
|
||||
@@ -1526,15 +1568,19 @@ static int rec_set_own (JSContext *ctx, JSRecord *rec, JSValue k,
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* Allocate a new record with specified class_id (default JS_CLASS_OBJECT) */
|
||||
/* Allocate a new record with specified class_id (default JS_CLASS_OBJECT)
|
||||
Uses bump allocation from context heap. Tab is inline (flexible array member). */
|
||||
static JSRecord *js_new_record_class (JSContext *ctx, uint32_t initial_mask,
|
||||
JSClassID class_id) {
|
||||
JSRecord *rec;
|
||||
JSRuntime *rt = ctx->rt;
|
||||
|
||||
if (initial_mask == 0) initial_mask = JS_RECORD_INITIAL_MASK;
|
||||
|
||||
rec = js_mallocz (ctx, sizeof (JSRecord));
|
||||
/* Allocate record + inline tab in one bump allocation */
|
||||
size_t tab_size = sizeof(JSRecordEntry) * (initial_mask + 1);
|
||||
size_t total_size = sizeof(JSRecord) + tab_size;
|
||||
|
||||
JSRecord *rec = ctx_alloc(ctx, total_size);
|
||||
if (!rec) return NULL;
|
||||
|
||||
rec->header.ref_count = 1;
|
||||
@@ -1551,16 +1597,10 @@ static JSRecord *js_new_record_class (JSContext *ctx, uint32_t initial_mask,
|
||||
rec->tmp_mark = 0;
|
||||
rec->u.opaque = NULL;
|
||||
|
||||
rec->tab = js_mallocz (ctx, sizeof (JSRecordEntry) * (initial_mask + 1));
|
||||
if (!rec->tab) {
|
||||
js_free (ctx, rec);
|
||||
return NULL;
|
||||
}
|
||||
/* Tab is inline right after the struct */
|
||||
rec->tab = (JSRecordEntry *)((uint8_t *)rec + sizeof(JSRecord));
|
||||
rec_tab_init (rec->tab, initial_mask);
|
||||
|
||||
/* Add to GC list */
|
||||
list_add_tail (&rec->header.link, &rt->gc_obj_list);
|
||||
|
||||
return rec;
|
||||
}
|
||||
|
||||
@@ -2239,6 +2279,409 @@ static inline BOOL js_check_stack_overflow (JSRuntime *rt,
|
||||
}
|
||||
#endif
|
||||
|
||||
/* ============================================================
|
||||
Buddy Allocator Implementation
|
||||
============================================================ */
|
||||
|
||||
/* Get order (log2) for a given size, rounding up to minimum */
|
||||
static int buddy_get_order(size_t size) {
|
||||
int order = BUDDY_MIN_ORDER;
|
||||
size_t block_size = 1ULL << BUDDY_MIN_ORDER;
|
||||
while (block_size < size && order < BUDDY_MAX_ORDER) {
|
||||
order++;
|
||||
block_size <<= 1;
|
||||
}
|
||||
return order;
|
||||
}
|
||||
|
||||
/* Get offset of block from base */
|
||||
static size_t buddy_block_offset(BuddyAllocator *b, void *ptr) {
|
||||
return (uint8_t *)ptr - b->base;
|
||||
}
|
||||
|
||||
/* Get buddy address for a block at given offset and order */
|
||||
static void *buddy_get_buddy(BuddyAllocator *b, void *ptr, int order) {
|
||||
size_t offset = buddy_block_offset(b, ptr);
|
||||
size_t buddy_offset = offset ^ (1ULL << order);
|
||||
return b->base + buddy_offset;
|
||||
}
|
||||
|
||||
/* Remove block from its free list */
|
||||
static void buddy_list_remove(BuddyBlock *block) {
|
||||
if (block->prev) block->prev->next = block->next;
|
||||
if (block->next) block->next->prev = block->prev;
|
||||
block->next = NULL;
|
||||
block->prev = NULL;
|
||||
}
|
||||
|
||||
/* Add block to front of free list */
|
||||
static void buddy_list_add(BuddyAllocator *b, BuddyBlock *block, int order) {
|
||||
int level = order - BUDDY_MIN_ORDER;
|
||||
block->next = b->free_lists[level];
|
||||
block->prev = NULL;
|
||||
if (b->free_lists[level]) {
|
||||
b->free_lists[level]->prev = block;
|
||||
}
|
||||
b->free_lists[level] = block;
|
||||
block->order = order;
|
||||
block->is_free = 1;
|
||||
}
|
||||
|
||||
/* Initialize buddy allocator with 256MB pool */
|
||||
static int buddy_init(BuddyAllocator *b) {
|
||||
if (b->initialized) return 0;
|
||||
|
||||
/* Allocate the pool (using system malloc, not js_malloc) */
|
||||
b->base = (uint8_t *)malloc(BUDDY_POOL_SIZE);
|
||||
if (!b->base) return -1;
|
||||
|
||||
b->total_size = BUDDY_POOL_SIZE;
|
||||
|
||||
/* Initialize free lists */
|
||||
for (int i = 0; i < BUDDY_LEVELS; i++) {
|
||||
b->free_lists[i] = NULL;
|
||||
}
|
||||
|
||||
/* Add entire pool as one free block at max order */
|
||||
BuddyBlock *block = (BuddyBlock *)b->base;
|
||||
buddy_list_add(b, block, BUDDY_MAX_ORDER);
|
||||
|
||||
b->initialized = 1;
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* Allocate a block of at least 'size' bytes */
|
||||
static void *buddy_alloc(BuddyAllocator *b, size_t size) {
|
||||
if (!b->initialized) {
|
||||
if (buddy_init(b) < 0) return NULL;
|
||||
}
|
||||
|
||||
int order = buddy_get_order(size);
|
||||
if (order > BUDDY_MAX_ORDER) return NULL;
|
||||
|
||||
/* Find smallest available block that fits */
|
||||
int level = order - BUDDY_MIN_ORDER;
|
||||
int found_level = -1;
|
||||
for (int i = level; i < BUDDY_LEVELS; i++) {
|
||||
if (b->free_lists[i]) {
|
||||
found_level = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (found_level < 0) return NULL; /* Out of memory */
|
||||
|
||||
/* Remove block from free list */
|
||||
BuddyBlock *block = b->free_lists[found_level];
|
||||
if (block->prev) {
|
||||
block->prev->next = block->next;
|
||||
} else {
|
||||
b->free_lists[found_level] = block->next;
|
||||
}
|
||||
if (block->next) block->next->prev = NULL;
|
||||
|
||||
/* Split block down to required order */
|
||||
int current_order = found_level + BUDDY_MIN_ORDER;
|
||||
while (current_order > order) {
|
||||
current_order--;
|
||||
/* Create buddy block in upper half */
|
||||
BuddyBlock *buddy = (BuddyBlock *)((uint8_t *)block + (1ULL << current_order));
|
||||
buddy_list_add(b, buddy, current_order);
|
||||
}
|
||||
|
||||
block->order = order;
|
||||
block->is_free = 0;
|
||||
return block;
|
||||
}
|
||||
|
||||
/* Free a block */
|
||||
static void buddy_free(BuddyAllocator *b, void *ptr, size_t size) {
|
||||
if (!ptr || !b->initialized) return;
|
||||
|
||||
int order = buddy_get_order(size);
|
||||
BuddyBlock *block = (BuddyBlock *)ptr;
|
||||
|
||||
/* Try to coalesce with buddy */
|
||||
while (order < BUDDY_MAX_ORDER) {
|
||||
BuddyBlock *buddy = buddy_get_buddy(b, block, order);
|
||||
|
||||
/* Check if buddy is free and same order */
|
||||
if (!buddy->is_free || buddy->order != order) break;
|
||||
|
||||
/* Remove buddy from free list */
|
||||
int level = order - BUDDY_MIN_ORDER;
|
||||
if (buddy->prev) {
|
||||
buddy->prev->next = buddy->next;
|
||||
} else {
|
||||
b->free_lists[level] = buddy->next;
|
||||
}
|
||||
if (buddy->next) buddy->next->prev = NULL;
|
||||
|
||||
/* Coalesce: use lower address as merged block */
|
||||
if ((uint8_t *)buddy < (uint8_t *)block) {
|
||||
block = buddy;
|
||||
}
|
||||
order++;
|
||||
}
|
||||
|
||||
/* Add merged block to free list */
|
||||
buddy_list_add(b, block, order);
|
||||
}
|
||||
|
||||
/* Destroy buddy allocator and free pool */
|
||||
static void buddy_destroy(BuddyAllocator *b) {
|
||||
if (!b->initialized) return;
|
||||
|
||||
free(b->base);
|
||||
b->base = NULL;
|
||||
b->initialized = 0;
|
||||
for (int i = 0; i < BUDDY_LEVELS; i++) {
|
||||
b->free_lists[i] = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
/* ============================================================
|
||||
Bump Allocator and Cheney GC
|
||||
============================================================ */
|
||||
|
||||
/* Forward declarations for GC helpers */
|
||||
static int ctx_gc(JSContext *ctx);
|
||||
static JSValue gc_copy_value(JSContext *ctx, JSValue v, uint8_t **to_free, uint8_t *to_end);
|
||||
static void gc_scan_object(JSContext *ctx, void *ptr, uint8_t **to_free, uint8_t *to_end);
|
||||
static size_t gc_object_size(void *ptr);
|
||||
|
||||
/* Check if pointer is in stone arena (not subject to GC) */
|
||||
static int is_stone_ptr(JSContext *ctx, void *ptr) {
|
||||
StoneArenaPage *page = ctx->st_pages;
|
||||
while (page) {
|
||||
if ((uint8_t *)ptr >= page->data &&
|
||||
(uint8_t *)ptr < page->data + STONE_PAGE_SIZE) {
|
||||
return 1;
|
||||
}
|
||||
page = page->next;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* Bump allocator - allocate from current heap block */
|
||||
static void *ctx_alloc(JSContext *ctx, size_t size) {
|
||||
size = (size + 7) & ~7; /* 8-byte align */
|
||||
|
||||
if (ctx->heap_free + size > ctx->heap_end) {
|
||||
/* Block full - trigger GC */
|
||||
if (ctx_gc(ctx) < 0) return NULL;
|
||||
if (ctx->heap_free + size > ctx->heap_end) {
|
||||
return NULL; /* Still OOM after GC */
|
||||
}
|
||||
}
|
||||
|
||||
void *ptr = ctx->heap_free;
|
||||
ctx->heap_free += size;
|
||||
memset(ptr, 0, size);
|
||||
return ptr;
|
||||
}
|
||||
|
||||
/* Get size of a heap object based on its type */
|
||||
static size_t gc_object_size(void *ptr) {
|
||||
/* All mist objects have header at offset 8 (after JSGCObjectHeader) */
|
||||
objhdr_t hdr = *((objhdr_t *)((char *)ptr + 8));
|
||||
uint8_t type = objhdr_type(hdr);
|
||||
uint64_t cap = objhdr_cap56(hdr);
|
||||
|
||||
switch (type) {
|
||||
case OBJ_ARRAY: {
|
||||
/* JSArray + inline values array. Cap is element capacity. */
|
||||
size_t values_size = sizeof(JSValue) * cap;
|
||||
return sizeof(JSArray) + values_size;
|
||||
}
|
||||
case OBJ_TEXT: {
|
||||
/* JSString: header + pad + hdr + length + packed chars */
|
||||
size_t word_count = (cap + 1) / 2;
|
||||
return sizeof(JSString) + word_count * sizeof(uint64_t);
|
||||
}
|
||||
case OBJ_RECORD: {
|
||||
/* JSRecord + inline tab. Cap is mask, so tab size is mask+1 entries. */
|
||||
size_t tab_size = sizeof(JSRecordEntry) * (cap + 1);
|
||||
return sizeof(JSRecord) + tab_size;
|
||||
}
|
||||
case OBJ_FUNCTION:
|
||||
case OBJ_CODE:
|
||||
case OBJ_FRAME:
|
||||
case OBJ_BLOB:
|
||||
default:
|
||||
/* Conservative estimate for unknown types */
|
||||
return 64;
|
||||
}
|
||||
}
|
||||
|
||||
/* Copy a single value, returning the new value with updated pointer if needed */
|
||||
static JSValue gc_copy_value(JSContext *ctx, JSValue v, uint8_t **to_free, uint8_t *to_end) {
|
||||
if (!JS_IsPtr(v)) return v; /* Immediate value - no copy needed */
|
||||
|
||||
void *ptr = JS_VALUE_GET_PTR(v);
|
||||
if (is_stone_ptr(ctx, ptr)) return v; /* Stone memory - don't copy */
|
||||
|
||||
/* Check if pointer is in current heap (not external allocation) */
|
||||
if ((uint8_t *)ptr < ctx->heap_base || (uint8_t *)ptr >= ctx->heap_end) {
|
||||
/* External allocation (using js_malloc) - keep reference */
|
||||
return v;
|
||||
}
|
||||
|
||||
/* Get object header (at offset 8 after JSGCObjectHeader) */
|
||||
objhdr_t *hdr_ptr = (objhdr_t *)((char *)ptr + 8);
|
||||
objhdr_t hdr = *hdr_ptr;
|
||||
|
||||
/* Already forwarded? */
|
||||
if (objhdr_type(hdr) == OBJ_FORWARD) {
|
||||
/* Extract forwarding address from cap56 field */
|
||||
void *new_ptr = (void *)(uintptr_t)objhdr_cap56(hdr);
|
||||
return JS_MKPTR(JS_TAG_PTR, new_ptr);
|
||||
}
|
||||
|
||||
/* Copy object to new space */
|
||||
size_t size = gc_object_size(ptr);
|
||||
if (*to_free + size > to_end) {
|
||||
/* Should not happen if we sized new block correctly */
|
||||
return v;
|
||||
}
|
||||
|
||||
void *new_ptr = *to_free;
|
||||
memcpy(new_ptr, ptr, size);
|
||||
*to_free += size;
|
||||
|
||||
/* Install forwarding pointer in old location */
|
||||
*hdr_ptr = objhdr_make((uint64_t)(uintptr_t)new_ptr, OBJ_FORWARD, 0, 0, 0, 0);
|
||||
|
||||
return JS_MKPTR(JS_TAG_PTR, new_ptr);
|
||||
}
|
||||
|
||||
/* Scan a copied object and update its internal references */
|
||||
static void gc_scan_object(JSContext *ctx, void *ptr, uint8_t **to_free, uint8_t *to_end) {
|
||||
objhdr_t hdr = *((objhdr_t *)((char *)ptr + 8));
|
||||
uint8_t type = objhdr_type(hdr);
|
||||
|
||||
switch (type) {
|
||||
case OBJ_ARRAY: {
|
||||
JSArray *arr = (JSArray *)ptr;
|
||||
for (uint32_t i = 0; i < arr->len; i++) {
|
||||
arr->values[i] = gc_copy_value(ctx, arr->values[i], to_free, to_end);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case OBJ_RECORD: {
|
||||
JSRecord *rec = (JSRecord *)ptr;
|
||||
/* Copy prototype */
|
||||
if (rec->proto) {
|
||||
JSValue proto_val = JS_MKPTR(JS_TAG_PTR, rec->proto);
|
||||
proto_val = gc_copy_value(ctx, proto_val, to_free, to_end);
|
||||
rec->proto = (JSRecord *)JS_VALUE_GET_PTR(proto_val);
|
||||
}
|
||||
/* Copy table entries */
|
||||
uint32_t mask = (uint32_t)objhdr_cap56(rec->mist_hdr);
|
||||
for (uint32_t i = 0; i <= mask; i++) {
|
||||
JSValue k = rec->tab[i].key;
|
||||
if (!rec_key_is_empty(k) && !rec_key_is_tomb(k)) {
|
||||
rec->tab[i].key = gc_copy_value(ctx, k, to_free, to_end);
|
||||
rec->tab[i].val = gc_copy_value(ctx, rec->tab[i].val, to_free, to_end);
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
case OBJ_TEXT:
|
||||
case OBJ_BLOB:
|
||||
case OBJ_CODE:
|
||||
/* No internal references to scan */
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
/* Cheney copying GC - collect garbage and compact live objects */
|
||||
static int ctx_gc(JSContext *ctx) {
|
||||
JSRuntime *rt = ctx->rt;
|
||||
size_t old_used = ctx->heap_free - ctx->heap_base;
|
||||
|
||||
/* Request new block from runtime */
|
||||
size_t new_size = ctx->next_block_size;
|
||||
uint8_t *new_block = buddy_alloc(&rt->buddy, new_size);
|
||||
if (!new_block) {
|
||||
/* Try with same size */
|
||||
new_size = ctx->current_block_size;
|
||||
new_block = buddy_alloc(&rt->buddy, new_size);
|
||||
if (!new_block) return -1;
|
||||
}
|
||||
|
||||
uint8_t *to_base = new_block;
|
||||
uint8_t *to_free = new_block;
|
||||
uint8_t *to_end = new_block + new_size;
|
||||
|
||||
/* Copy roots: global object, class prototypes, exception, etc. */
|
||||
ctx->global_obj = gc_copy_value(ctx, ctx->global_obj, &to_free, to_end);
|
||||
ctx->global_var_obj = gc_copy_value(ctx, ctx->global_var_obj, &to_free, to_end);
|
||||
ctx->function_proto = gc_copy_value(ctx, ctx->function_proto, &to_free, to_end);
|
||||
ctx->array_ctor = gc_copy_value(ctx, ctx->array_ctor, &to_free, to_end);
|
||||
ctx->regexp_ctor = gc_copy_value(ctx, ctx->regexp_ctor, &to_free, to_end);
|
||||
ctx->throw_type_error = gc_copy_value(ctx, ctx->throw_type_error, &to_free, to_end);
|
||||
ctx->eval_obj = gc_copy_value(ctx, ctx->eval_obj, &to_free, to_end);
|
||||
ctx->array_proto_values = gc_copy_value(ctx, ctx->array_proto_values, &to_free, to_end);
|
||||
|
||||
for (int i = 0; i < JS_NATIVE_ERROR_COUNT; i++) {
|
||||
ctx->native_error_proto[i] = gc_copy_value(ctx, ctx->native_error_proto[i], &to_free, to_end);
|
||||
}
|
||||
|
||||
/* Copy class prototypes */
|
||||
for (int i = 0; i < rt->class_count; i++) {
|
||||
ctx->class_proto[i] = gc_copy_value(ctx, ctx->class_proto[i], &to_free, to_end);
|
||||
}
|
||||
|
||||
/* Copy value stack */
|
||||
for (int i = 0; i < ctx->value_stack_top; i++) {
|
||||
ctx->value_stack[i] = gc_copy_value(ctx, ctx->value_stack[i], &to_free, to_end);
|
||||
}
|
||||
|
||||
/* Copy frame stack references */
|
||||
for (int i = 0; i <= ctx->frame_stack_top; i++) {
|
||||
struct VMFrame *frame = &ctx->frame_stack[i];
|
||||
frame->cur_func = gc_copy_value(ctx, frame->cur_func, &to_free, to_end);
|
||||
frame->this_obj = gc_copy_value(ctx, frame->this_obj, &to_free, to_end);
|
||||
}
|
||||
|
||||
/* Cheney scan: scan copied objects to find more references */
|
||||
uint8_t *scan = to_base;
|
||||
while (scan < to_free) {
|
||||
gc_scan_object(ctx, scan, &to_free, to_end);
|
||||
scan += gc_object_size(scan);
|
||||
}
|
||||
|
||||
/* Return old block to buddy allocator */
|
||||
buddy_free(&rt->buddy, ctx->heap_base, ctx->current_block_size);
|
||||
|
||||
/* Update context with new block */
|
||||
size_t new_used = to_free - to_base;
|
||||
size_t recovered = old_used > new_used ? old_used - new_used : 0;
|
||||
|
||||
ctx->heap_base = to_base;
|
||||
ctx->heap_free = to_free;
|
||||
ctx->heap_end = to_end;
|
||||
ctx->current_block_size = new_size;
|
||||
|
||||
/* If <10% recovered, double next block size for future allocations */
|
||||
if (old_used > 0 && recovered < old_used / 10) {
|
||||
size_t doubled = new_size * 2;
|
||||
if (doubled <= (1ULL << BUDDY_MAX_ORDER)) {
|
||||
ctx->next_block_size = doubled;
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef DUMP_GC
|
||||
printf("GC: old_used=%zu new_used=%zu recovered=%zu new_block_size=%zu\n",
|
||||
old_used, new_used, recovered, new_size);
|
||||
#endif
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
JSRuntime *JS_NewRuntime2 (const JSMallocFunctions *mf, void *opaque) {
|
||||
JSRuntime *rt;
|
||||
JSMallocState ms;
|
||||
@@ -2393,45 +2836,44 @@ int JS_GetStripInfo (JSRuntime *rt) { return rt->strip_flags; }
|
||||
|
||||
/* atom_get_free/is_free/set_free removed */
|
||||
|
||||
/* Note: the string contents are uninitialized */
|
||||
static JSString *js_alloc_string_rt (JSRuntime *rt, int max_len) {
|
||||
/* Allocate a string using bump allocation from context heap.
|
||||
Note: the string contents are uninitialized */
|
||||
static JSString *js_alloc_string (JSContext *ctx, int max_len) {
|
||||
JSString *str;
|
||||
size_t size;
|
||||
/* Allocate packed UTF-32: 2 chars per 64-bit word.
|
||||
Calculate number of 64-bit words needed: (max_len + 1) / 2 */
|
||||
/* Allocate packed UTF-32: 2 chars per 64-bit word. */
|
||||
size_t data_words = (max_len + 1) / 2;
|
||||
/* Add explicit null termination capacity just in case?
|
||||
Let's allocate one extra word if odd to be safe?
|
||||
(max_len + 1)/2 covers max_len chars.
|
||||
If max_len=1, data_words=1. u[0] holds char 0 and 1.
|
||||
*/
|
||||
size = sizeof (JSString) + data_words * sizeof (uint64_t);
|
||||
size_t size = sizeof(JSString) + data_words * sizeof(uint64_t);
|
||||
|
||||
str = ctx_alloc(ctx, size);
|
||||
if (unlikely(!str)) {
|
||||
JS_ThrowOutOfMemory(ctx);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
str = js_malloc_rt (rt, size);
|
||||
if (unlikely (!str)) return NULL;
|
||||
str->header.ref_count = 1;
|
||||
str->pad = 0;
|
||||
/* Initialize objhdr_t with OBJ_TEXT type so JS_IsString can detect it */
|
||||
str->hdr = objhdr_make (max_len, OBJ_TEXT, false, false, false, false);
|
||||
str->hdr = objhdr_make(max_len, OBJ_TEXT, false, false, false, false);
|
||||
str->len = max_len;
|
||||
// Initialize content to 0?
|
||||
// memset(str->u, 0, data_words * sizeof(uint64_t));
|
||||
// Should usually be initialized by caller.
|
||||
|
||||
#ifdef DUMP_LEAKS
|
||||
list_add_tail (&str->link, &rt->string_list);
|
||||
#endif
|
||||
return str;
|
||||
}
|
||||
|
||||
static JSString *js_alloc_string (JSContext *ctx, int max_len) {
|
||||
JSString *p;
|
||||
p = js_alloc_string_rt (ctx->rt, max_len);
|
||||
if (unlikely (!p)) {
|
||||
JS_ThrowOutOfMemory (ctx);
|
||||
return NULL;
|
||||
}
|
||||
return p;
|
||||
/* Legacy function for runtime-level string allocation (for stone arena use) */
|
||||
static JSString *js_alloc_string_rt (JSRuntime *rt, int max_len) {
|
||||
/* For stone arena strings, use js_malloc_rt since they're not GC'd */
|
||||
size_t data_words = (max_len + 1) / 2;
|
||||
size_t size = sizeof(JSString) + data_words * sizeof(uint64_t);
|
||||
|
||||
JSString *str = js_malloc_rt(rt, size);
|
||||
if (unlikely(!str)) return NULL;
|
||||
|
||||
str->header.ref_count = 1;
|
||||
str->pad = 0;
|
||||
str->hdr = objhdr_make(max_len, OBJ_TEXT, false, false, false, false);
|
||||
str->len = max_len;
|
||||
|
||||
return str;
|
||||
}
|
||||
|
||||
/* same as JS_FreeValueRT() but faster */
|
||||
@@ -2550,6 +2992,9 @@ void JS_FreeRuntime (JSRuntime *rt) {
|
||||
/* Stone text tables and arena are now per-context, freed in JS_FreeContext
|
||||
*/
|
||||
|
||||
/* Destroy buddy allocator */
|
||||
buddy_destroy(&rt->buddy);
|
||||
|
||||
{
|
||||
JSMallocState ms = rt->malloc_state;
|
||||
rt->mf.js_free (&ms, rt);
|
||||
@@ -2618,6 +3063,22 @@ JSContext *JS_NewContextRaw (JSRuntime *rt) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
/* Allocate initial heap block for bump allocation */
|
||||
ctx->current_block_size = 1ULL << BUDDY_MIN_ORDER; /* 64KB */
|
||||
ctx->next_block_size = ctx->current_block_size;
|
||||
ctx->heap_base = buddy_alloc(&rt->buddy, ctx->current_block_size);
|
||||
if (!ctx->heap_base) {
|
||||
js_free_rt (rt, ctx->st_text_hash);
|
||||
js_free_rt (rt, ctx->st_text_array);
|
||||
js_free_rt (rt, ctx->value_stack);
|
||||
js_free_rt (rt, ctx->frame_stack);
|
||||
js_free_rt (rt, ctx->class_proto);
|
||||
js_free_rt (rt, ctx);
|
||||
return NULL;
|
||||
}
|
||||
ctx->heap_free = ctx->heap_base;
|
||||
ctx->heap_end = ctx->heap_base + ctx->current_block_size;
|
||||
|
||||
JS_AddIntrinsicBasicObjects (ctx);
|
||||
rt->js = ctx;
|
||||
return ctx;
|
||||
@@ -2744,6 +3205,14 @@ void JS_FreeContext (JSContext *ctx) {
|
||||
js_free_rt (rt, ctx->st_text_hash);
|
||||
js_free_rt (rt, ctx->st_text_array);
|
||||
|
||||
/* Free heap block back to buddy allocator */
|
||||
if (ctx->heap_base) {
|
||||
buddy_free(&rt->buddy, ctx->heap_base, ctx->current_block_size);
|
||||
ctx->heap_base = NULL;
|
||||
ctx->heap_free = NULL;
|
||||
ctx->heap_end = NULL;
|
||||
}
|
||||
|
||||
list_del (&ctx->link);
|
||||
remove_gc_object (&ctx->header);
|
||||
js_free_rt (ctx->rt, ctx);
|
||||
@@ -3623,33 +4092,34 @@ JSValue JS_NewObjectProto (JSContext *ctx, JSValue proto) {
|
||||
return JS_NewObjectProtoClass (ctx, proto, JS_CLASS_OBJECT);
|
||||
}
|
||||
|
||||
/* Create an intrinsic array with specified capacity */
|
||||
/* Create an intrinsic array with specified capacity
|
||||
Uses bump allocation - values are inline after the JSArray struct */
|
||||
JSValue JS_NewArrayLen (JSContext *ctx, uint32_t len) {
|
||||
JSRuntime *rt = ctx->rt;
|
||||
JSArray *arr;
|
||||
uint32_t cap;
|
||||
|
||||
arr = js_mallocz (ctx, sizeof (JSArray));
|
||||
if (!arr) return JS_EXCEPTION;
|
||||
arr->header.ref_count = 1;
|
||||
arr->len = len;
|
||||
cap = len > 0 ? len : JS_ARRAY_INITIAL_SIZE;
|
||||
arr->mist_hdr = objhdr_make (cap, OBJ_ARRAY, false, false, false, false);
|
||||
if (cap > 0) {
|
||||
arr->values = js_mallocz (ctx, sizeof (JSValue) * cap);
|
||||
if (!arr->values) {
|
||||
js_free (ctx, arr);
|
||||
return JS_EXCEPTION;
|
||||
}
|
||||
/* Initialize all values to null */
|
||||
for (uint32_t i = 0; i < len; i++) {
|
||||
arr->values[i] = JS_NULL;
|
||||
}
|
||||
} else {
|
||||
arr->values = NULL;
|
||||
size_t values_size = sizeof(JSValue) * cap;
|
||||
size_t total_size = sizeof(JSArray) + values_size;
|
||||
|
||||
arr = ctx_alloc(ctx, total_size);
|
||||
if (!arr) return JS_EXCEPTION;
|
||||
|
||||
arr->header.ref_count = 1;
|
||||
arr->header.gc_obj_type = JS_GC_OBJ_TYPE_ARRAY;
|
||||
arr->len = len;
|
||||
arr->mist_hdr = objhdr_make(cap, OBJ_ARRAY, false, false, false, false);
|
||||
arr->free_mark = 0;
|
||||
|
||||
/* Values are inline right after the struct */
|
||||
arr->values = (JSValue *)((uint8_t *)arr + sizeof(JSArray));
|
||||
|
||||
/* Initialize all values to null */
|
||||
for (uint32_t i = 0; i < cap; i++) {
|
||||
arr->values[i] = JS_NULL;
|
||||
}
|
||||
add_gc_object (rt, &arr->header, JS_GC_OBJ_TYPE_ARRAY);
|
||||
return JS_MKPTR (JS_TAG_OBJECT, arr);
|
||||
|
||||
return JS_MKPTR(JS_TAG_OBJECT, arr);
|
||||
}
|
||||
|
||||
JSValue JS_NewArray (JSContext *ctx) { return JS_NewArrayLen (ctx, 0); }
|
||||
|
||||
Reference in New Issue
Block a user