This commit is contained in:
2026-02-02 06:15:10 -06:00
parent b23b918f97
commit c356fe462d
2 changed files with 198 additions and 28 deletions

View File

@@ -226,7 +226,7 @@ static inline JS_BOOL JS_VALUE_IS_NUMBER (JSValue v) {
/* Forward declarations for memory functions (now declared in quickjs.h) */
/* js_realloc2 is internal only */
static void *js_realloc2 (JSContext *ctx, void **pptr, size_t size, size_t *pslack);
static void *js_realloc2 (JSContext *ctx, void *ptr, size_t size, size_t *pslack);
/* Forward declaration for string_get */
static inline int string_get (const JSText *p, int idx);
@@ -874,6 +874,165 @@ void JS_DeleteGCRef (JSContext *ctx, JSGCRef *ref) {
}
}
/* ============================================================
Stone Arena Functions
============================================================ */
/* Stone page for large allocations */
typedef struct StonePage {
struct StonePage *next;
size_t size;
uint8_t data[];
} StonePage;
/* Initial stone text table size */
#define ST_TEXT_INITIAL_SIZE 256
/* Allocate from stone arena (permanent, immutable memory) */
static void *st_alloc (JSContext *ctx, size_t bytes, size_t align) {
JSRuntime *rt = ctx->rt;
/* Align the request */
bytes = (bytes + align - 1) & ~(align - 1);
/* Check if we have space in the stone arena */
if (ctx->stone_base && ctx->stone_free + bytes <= ctx->stone_end) {
void *ptr = ctx->stone_free;
ctx->stone_free += bytes;
return ptr;
}
/* No stone arena or not enough space - allocate a page */
size_t page_size = sizeof (StonePage) + bytes;
StonePage *page = rt->mf.js_malloc (&rt->malloc_state, page_size);
if (!page) return NULL;
page->next = ctx->st_pages;
page->size = bytes;
ctx->st_pages = page;
return page->data;
}
/* Free all stone arena pages */
static void st_free_all (JSContext *ctx) {
JSRuntime *rt = ctx->rt;
StonePage *page = ctx->st_pages;
while (page) {
StonePage *next = page->next;
rt->mf.js_free (&rt->malloc_state, page);
page = next;
}
ctx->st_pages = NULL;
}
/* Resize the stone text intern hash table */
static int st_text_resize (JSContext *ctx) {
JSRuntime *rt = ctx->rt;
uint32_t new_size, new_resize;
uint32_t *new_hash;
JSText **new_array;
if (ctx->st_text_size == 0) {
/* Initial allocation */
new_size = ST_TEXT_INITIAL_SIZE;
} else {
/* Double the size */
new_size = ctx->st_text_size * 2;
}
new_resize = new_size * 3 / 4; /* 75% load factor */
/* Allocate new hash table (use runtime malloc, not bump allocator) */
new_hash = rt->mf.js_malloc (&rt->malloc_state, new_size * sizeof (uint32_t));
if (!new_hash) return -1;
memset (new_hash, 0, new_size * sizeof (uint32_t));
/* Allocate new text array (one extra for 1-based indexing) */
new_array = rt->mf.js_malloc (&rt->malloc_state, (new_size + 1) * sizeof (JSText *));
if (!new_array) {
rt->mf.js_free (&rt->malloc_state, new_hash);
return -1;
}
memset (new_array, 0, (new_size + 1) * sizeof (JSText *));
/* Rehash existing entries */
if (ctx->st_text_count > 0) {
uint32_t mask = new_size - 1;
for (uint32_t id = 1; id <= ctx->st_text_count; id++) {
JSText *text = ctx->st_text_array[id];
new_array[id] = text;
/* Compute hash and find slot */
uint64_t hash = get_text_hash (text);
uint32_t slot = hash & mask;
while (new_hash[slot] != 0)
slot = (slot + 1) & mask;
new_hash[slot] = id;
}
}
/* Free old tables */
if (ctx->st_text_hash) rt->mf.js_free (&rt->malloc_state, ctx->st_text_hash);
if (ctx->st_text_array) rt->mf.js_free (&rt->malloc_state, ctx->st_text_array);
ctx->st_text_hash = new_hash;
ctx->st_text_array = new_array;
ctx->st_text_size = new_size;
ctx->st_text_resize = new_resize;
return 0;
}
/* Realloc with slack reporting (for bump allocator) */
static void *js_realloc2 (JSContext *ctx, void *ptr, size_t size, size_t *pslack) {
void *new_ptr;
/* Align size to 8 bytes */
size = (size + 7) & ~7;
if (!ptr) {
/* New allocation */
new_ptr = js_malloc (ctx, size);
if (!new_ptr) return NULL;
*pslack = 0;
return new_ptr;
}
/* Bump allocator: allocate new space and copy.
For simplicity, we allocate new space and copy. */
new_ptr = js_malloc (ctx, size);
if (!new_ptr) return NULL;
/* Copy old data (caller ensures safety) */
memcpy (new_ptr, ptr, size);
*pslack = 0;
return new_ptr;
}
/* ============================================================
GC Public API
============================================================ */
/* Forward declaration for ctx_gc */
static int ctx_gc (JSContext *ctx);
/* Run GC on a specific context */
void JS_RunGC (JSRuntime *rt) {
JSContext *ctx = rt->js;
if (ctx) {
ctx_gc (ctx);
}
}
/* JS_MarkValue - mark a value during GC traversal.
With copying GC, this is a no-op as we discover live objects by tracing. */
void JS_MarkValue (JSRuntime *rt, JSValue val, JS_MarkFunc *mark_func) {
(void)rt;
(void)val;
(void)mark_func;
/* No-op with copying GC - values are discovered by tracing from roots */
}
/* Helper to check if a pointer is in stone memory */
static inline int is_stone_ptr (JSContext *ctx, void *ptr) {
return (uint8_t *)ptr >= ctx->stone_base && (uint8_t *)ptr < ctx->stone_end;
@@ -1507,7 +1666,11 @@ static JSValue js_new_function (JSContext *ctx, JSFunctionKind kind);
static void mark_function_children (JSRuntime *rt, JSFunction *func, JS_MarkFunc *mark_func);
static void mark_function_children_decref (JSRuntime *rt, JSFunction *func);
static void gc_decref_child (JSRuntime *rt, JSGCObjectHeader *p);
/* Legacy GC decref - no-op with copying GC */
static void gc_decref_child (JSRuntime *rt, JSGCObjectHeader *p) {
(void)rt;
(void)p;
}
/* Forward declarations for intrinsics (now declared in quickjs.h) */
@@ -1688,7 +1851,7 @@ static no_inline int js_realloc_array (JSContext *ctx, void **parray, int elem_s
void *new_array;
/* XXX: potential arithmetic overflow */
new_size = max_int (req_size, *psize * 3 / 2);
new_array = js_realloc2 (ctx, parray, new_size * elem_size, &slack);
new_array = js_realloc2 (ctx, *parray, new_size * elem_size, &slack);
if (!new_array) return -1;
new_size += slack / elem_size;
*psize = new_size;
@@ -2161,8 +2324,8 @@ static int ctx_gc (JSContext *ctx) {
ctx->heap_end = to_end;
ctx->current_block_size = new_size;
/* If <10% recovered, double next block size for future allocations */
if (old_used > 0 && recovered < old_used / 10) {
/* If <20% recovered, double next block size for future allocations */
if (old_used > 0 && recovered < old_used / 5) {
size_t doubled = new_size * 2;
if (doubled <= (1ULL << BUDDY_MAX_ORDER)) {
ctx->next_block_size = doubled;
@@ -2349,9 +2512,20 @@ static JSText *js_alloc_string (JSContext *ctx, int max_len) {
return str;
}
static inline void JS_MarkValueEdgeEx (JSRuntime *rt, JSValue val, JSGCObjectHeader *parent, const char *edge, uint32_t atom, int32_t prop_index);
/* Legacy mark functions - no-ops with copying GC */
static inline void JS_MarkValueEdgeEx (JSRuntime *rt, JSValue val, JSGCObjectHeader *parent, const char *edge, uint32_t atom, int32_t prop_index) {
(void)rt;
(void)val;
(void)parent;
(void)edge;
(void)atom;
(void)prop_index;
}
static inline void JS_MarkValueEdge (JSRuntime *rt, JSValue val, JSGCObjectHeader *parent, const char *edge) {
JS_MarkValueEdgeEx (rt, val, parent, edge, 0, -1);
(void)rt;
(void)val;
(void)parent;
(void)edge;
}
void JS_SetRuntimeInfo (JSRuntime *rt, const char *s) {
@@ -3100,7 +3274,9 @@ fail:
}
void JS_FreeCString (JSContext *ctx, const char *ptr) {
js_free (ctx, (void *)ptr);
/* With copying GC, no explicit freeing needed */
(void)ctx;
(void)ptr;
}
/* return < 0, 0 or > 0 */
@@ -3445,18 +3621,12 @@ JSValue JS_NewCFunctionData (JSContext *ctx, JSCFunctionData *func, int length,
/* free_property is defined earlier as a stub since shapes are removed */
static void free_var_ref (JSRuntime *rt, JSVarRef *var_ref) {
if (var_ref) {
assert (var_ref->header.ref_count > 0);
if (--var_ref->header.ref_count == 0) {
if (var_ref->is_detached) {
JS_FreeValueRT (rt, var_ref->value);
} else {
list_del (&var_ref->var_ref_link); /* still on the stack */
}
remove_gc_object (&var_ref->header);
js_free_rt (rt, var_ref);
}
/* With copying GC, var_ref lifetime is managed by the GC.
We still do bookkeeping for the var_ref_link list. */
if (var_ref && !var_ref->is_detached) {
list_del (&var_ref->var_ref_link);
}
/* The actual memory is reclaimed by the copying GC */
}
/* Finalizer for JS_CLASS_VAR_REF_OBJECT - frees the stored var_ref */
@@ -6820,14 +6990,14 @@ static JSVarRef *get_var_ref (JSContext *ctx, JSStackFrame *sf, int var_idx, BOO
list_for_each (el, &sf->var_ref_list) {
var_ref = list_entry (el, JSVarRef, var_ref_link);
if (var_ref->pvalue == pvalue) {
var_ref->header.ref_count++;
/* With copying GC, no need to increment ref_count */
return var_ref;
}
}
/* create a new one */
var_ref = js_malloc (ctx, sizeof (JSVarRef));
if (!var_ref) return NULL;
var_ref->header.ref_count = 1;
/* ref_count not needed with copying GC */
add_gc_object (ctx->rt, &var_ref->header, JS_GC_OBJ_TYPE_VAR_REF);
var_ref->is_detached = FALSE;
list_add_tail (&var_ref->var_ref_link, &sf->var_ref_list);
@@ -6856,7 +7026,7 @@ static JSValue js_closure2 (JSContext *ctx, JSValue func_obj, JSFunctionBytecode
if (!var_ref) goto fail;
} else {
var_ref = cur_var_refs[cv->var_idx];
var_ref->header.ref_count++;
/* No ref_count increment needed with copying GC */
}
var_refs[i] = var_ref;
}
@@ -7930,7 +8100,7 @@ restart:
if (opcode == OP_make_var_ref_ref) {
var_ref = var_refs[idx];
var_ref->header.ref_count++;
/* No ref_count increment needed with copying GC */
} else {
var_ref = get_var_ref (ctx, sf, idx, opcode == OP_make_arg_ref);
if (!var_ref) goto exception;
@@ -19601,11 +19771,10 @@ int JS_GetLength (JSContext *ctx, JSValue obj, int64_t *pres) {
}
static void free_arg_list (JSContext *ctx, JSValue *tab, uint32_t len) {
uint32_t i;
for (i = 0; i < len; i++) {
JS_FreeValue (ctx, tab[i]);
}
js_free (ctx, tab);
/* With copying GC, no explicit freeing needed - GC handles it */
(void)ctx;
(void)tab;
(void)len;
}
/* XXX: should use ValueArray */

View File

@@ -398,6 +398,7 @@ void JS_FreeRuntime (JSRuntime *rt);
void *JS_GetRuntimeOpaque (JSRuntime *rt);
void JS_SetRuntimeOpaque (JSRuntime *rt, void *opaque);
typedef void JS_MarkFunc (JSRuntime *rt, JSGCObjectHeader *gp);
/* JS_MarkValue is a no-op with copying GC (values are traced from roots) */
void JS_MarkValue (JSRuntime *rt, JSValue val, JS_MarkFunc *mark_func);
void JS_RunGC (JSRuntime *rt);
JS_BOOL JS_IsLiveObject (JSRuntime *rt, JSValue obj);