Merge branch 'mach_memory' into mcode_streamline

This commit is contained in:
2026-02-13 05:58:21 -06:00
9 changed files with 12961 additions and 12368 deletions

View File

@@ -113,6 +113,19 @@ var v = a[] // pop: v is 3, a is [1, 2]
- `packages/` — core packages
- `Makefile` — build system (`make` to rebuild, `make bootstrap` for first build)
## Testing
After any C runtime changes, run all three test suites before considering the work done:
```
make # rebuild
./cell --dev vm_suite # VM-level tests (641 tests)
./cell --dev test suite # language-level tests (493 tests)
./cell --dev fuzz # fuzzer (100 iterations)
```
All three must pass with 0 failures.
## Documentation
The `docs/` folder is the single source of truth. The website at `website/` mounts it via Hugo. Key files:

View File

@@ -1425,7 +1425,9 @@ var parse = function(tokens, src, filename, tokenizer) {
vars: [],
in_loop: opts.in_loop == true,
function_nr: fn_nr,
is_function_scope: opts.is_func == true
is_function_scope: opts.is_func == true,
func_node: null,
has_inner_func: false
}
}
@@ -1478,6 +1480,15 @@ var parse = function(tokens, src, filename, tokenizer) {
return false
}
var sem_find_func_scope = function(scope) {
var s = scope
while (s != null) {
if (s.is_function_scope) return s
s = s.parent
}
return null
}
var sem_add_intrinsic = function(name) {
if (find(intrinsics, name) == null) push(intrinsics, name)
}
@@ -1675,6 +1686,7 @@ var parse = function(tokens, src, filename, tokenizer) {
var pname = null
var def_val = null
var sr = null
var enclosing = null
if (_assign_kinds[kind] == true) {
sem_check_assign_target(scope, expr.left)
@@ -1776,9 +1788,12 @@ var parse = function(tokens, src, filename, tokenizer) {
}
if (kind == "function") {
enclosing = sem_find_func_scope(scope)
if (enclosing != null) enclosing.has_inner_func = true
fn_nr_val = expr.function_nr
if (fn_nr_val == null) fn_nr_val = scope.function_nr
fn_scope = make_scope(scope, fn_nr_val, {is_func: true})
fn_scope.func_node = expr
expr.outer = scope.function_nr
i = 0
while (i < length(expr.list)) {
@@ -1859,6 +1874,8 @@ var parse = function(tokens, src, filename, tokenizer) {
var pname = null
var def_val = null
var sr = null
var enclosing = null
var func_scope = null
var tt = null
if (kind == "var_list") {
@@ -1981,7 +1998,26 @@ var parse = function(tokens, src, filename, tokenizer) {
return null
}
if (kind == "return" || kind == "go") {
if (kind == "go") {
sem_check_expr(scope, stmt.expression)
if (stmt.expression == null || stmt.expression.kind != "(") {
sem_error(stmt, "'go' must be followed by a function call")
} else {
func_scope = sem_find_func_scope(scope)
if (func_scope != null && func_scope.func_node != null) {
if (func_scope.func_node.disruption != null) {
sem_error(stmt, "cannot use 'go' in a function with a disruption clause")
}
if (func_scope.has_inner_func) {
sem_error(stmt, "cannot use 'go' in a function that defines inner functions")
}
}
stmt.tail = true
}
return null
}
if (kind == "return") {
sem_check_expr(scope, stmt.expression)
if (stmt.expression != null && stmt.expression.kind == "(") {
stmt.tail = true
@@ -2022,11 +2058,14 @@ var parse = function(tokens, src, filename, tokenizer) {
}
if (kind == "function") {
enclosing = sem_find_func_scope(scope)
if (enclosing != null) enclosing.has_inner_func = true
name = stmt.name
if (name != null) sem_add_var(scope, name, {make: "function", fn_nr: scope.function_nr})
fn_nr_val = stmt.function_nr
if (fn_nr_val == null) fn_nr_val = scope.function_nr
fn_scope = make_scope(scope, fn_nr_val, {is_func: true})
fn_scope.func_node = stmt
stmt.outer = scope.function_nr
i = 0
while (i < length(stmt.list)) {

File diff suppressed because it is too large Load Diff

View File

@@ -650,7 +650,8 @@ var qbe_emit = function(ir, qbe) {
continue
}
if (op == "goinvoke") {
emit(` call $cell_rt_goinvoke(l %ctx, l ${s(a1)})`)
emit(` %_goret =l call $cell_rt_goinvoke(l %ctx, l ${s(a1)})`)
emit(` ret %_goret`)
continue
}

File diff suppressed because it is too large Load Diff

View File

@@ -1702,17 +1702,68 @@ JSValue JS_CallRegisterVM(JSContext *ctx, JSCodeRegister *code,
break;
}
case MACH_GOINVOKE: {
/* Async invoke: call and discard result */
/* Tail call: replace current frame with callee */
JSFrameRegister *fr = (JSFrameRegister *)JS_VALUE_GET_PTR(frame->slots[a]);
int nr = (int)objhdr_cap56(fr->hdr);
int c_argc = (nr >= 2) ? nr - 2 : 0;
ctx->reg_current_frame = frame_ref.val;
ctx->current_register_pc = pc > 0 ? pc - 1 : 0;
JSValue ret = JS_Call(ctx, fr->function, fr->slots[0],
c_argc, &fr->slots[1]);
frame = (JSFrameRegister *)JS_VALUE_GET_PTR(frame_ref.val);
ctx->reg_current_frame = JS_NULL;
if (JS_IsException(ret)) goto disrupt;
JSValue fn_val = fr->function;
JSFunction *fn = JS_VALUE_GET_FUNCTION(fn_val);
if (fn->kind == JS_FUNC_KIND_REGISTER) {
/* Register function: tail call by replacing current frame */
JSCodeRegister *fn_code = fn->u.reg.code;
JSFrameRegister *new_frame = alloc_frame_register(ctx, fn_code->nr_slots);
if (!new_frame) {
frame = (JSFrameRegister *)JS_VALUE_GET_PTR(frame_ref.val);
goto disrupt;
}
frame = (JSFrameRegister *)JS_VALUE_GET_PTR(frame_ref.val);
fr = (JSFrameRegister *)JS_VALUE_GET_PTR(frame->slots[a]);
fn_val = fr->function;
fn = JS_VALUE_GET_FUNCTION(fn_val);
fn_code = fn->u.reg.code;
new_frame->function = fn_val;
/* Copy this + args from call frame to new frame */
int copy_count = (c_argc < fn_code->arity) ? c_argc : fn_code->arity;
new_frame->slots[0] = fr->slots[0]; /* this */
for (int i = 0; i < copy_count; i++)
new_frame->slots[1 + i] = fr->slots[1 + i];
/* Tail call: callee returns to OUR caller, not to us */
new_frame->caller = frame->caller;
frame->caller = JS_NULL; /* detach current frame */
/* Switch to callee */
frame = new_frame;
frame_ref.val = JS_MKPTR(frame);
code = fn_code;
env = fn->u.reg.env_record;
pc = code->entry_point;
} else {
/* C/bytecode function: call it, then return result to our caller */
ctx->reg_current_frame = frame_ref.val;
ctx->current_register_pc = pc > 0 ? pc - 1 : 0;
JSValue ret;
if (fn->kind == JS_FUNC_KIND_C)
ret = js_call_c_function(ctx, fn_val, fr->slots[0], c_argc, &fr->slots[1]);
else
ret = JS_CallInternal(ctx, fn_val, fr->slots[0], c_argc, &fr->slots[1], 0);
frame = (JSFrameRegister *)JS_VALUE_GET_PTR(frame_ref.val);
ctx->reg_current_frame = JS_NULL;
if (JS_IsException(ret)) goto disrupt;
/* Tail-return: act like MACH_RETURN with the result */
result = ret;
if (JS_IsNull(frame->caller)) goto done;
JSFrameRegister *caller = (JSFrameRegister *)JS_VALUE_GET_PTR(frame->caller);
frame->caller = JS_NULL;
frame = caller;
frame_ref.val = JS_MKPTR(frame);
int ret_info = JS_VALUE_GET_INT(frame->address);
JSFunction *ret_fn = JS_VALUE_GET_FUNCTION(frame->function);
code = ret_fn->u.reg.code;
env = ret_fn->u.reg.env_record;
pc = ret_info >> 16;
int ret_slot = ret_info & 0xFFFF;
if (ret_slot != 0xFFFF) frame->slots[ret_slot] = ret;
}
break;
}

View File

@@ -351,8 +351,8 @@ JSValue cell_rt_goframe(JSContext *ctx, JSValue fn, int64_t nargs) {
return cell_rt_frame(ctx, fn, nargs);
}
void cell_rt_goinvoke(JSContext *ctx, JSValue frame_val) {
cell_rt_invoke(ctx, frame_val);
JSValue cell_rt_goinvoke(JSContext *ctx, JSValue frame_val) {
return cell_rt_invoke(ctx, frame_val);
}
/* --- Array push/pop --- */

View File

@@ -1081,18 +1081,18 @@ struct JSContext {
size_t current_block_size; /* current block size (64KB initially) */
size_t next_block_size; /* doubles if <10% recovered after GC */
/* Stone arena - permanent immutable allocations */
uint8_t *stone_base; /* stone arena base */
uint8_t *stone_free; /* stone arena bump pointer */
uint8_t *stone_end; /* stone arena end */
/* Constant text pool — compilation constants */
uint8_t *ct_base; /* pool base */
uint8_t *ct_free; /* pool bump pointer */
uint8_t *ct_end; /* pool end */
/* Stone text intern table */
void *st_pages; /* stone page list for large allocations */
uint32_t *st_text_hash; /* hash table (slot -> id) */
JSText **st_text_array; /* array of JSText pointers indexed by id */
uint32_t st_text_size; /* hash table size (power of 2) */
uint32_t st_text_count; /* number of interned texts */
uint32_t st_text_resize; /* threshold for resize */
/* Constant text intern table */
void *ct_pages; /* page list for large allocations */
uint32_t *ct_hash; /* hash table (slot -> id) */
JSText **ct_array; /* array of JSText pointers indexed by id */
uint32_t ct_size; /* hash table size (power of 2) */
uint32_t ct_count; /* number of interned texts */
uint32_t ct_resize_threshold; /* threshold for resize */
uint16_t binary_object_count;
int binary_object_size;
@@ -1161,22 +1161,22 @@ static inline const char *JS_KeyGetStr (JSContext *ctx, char *buf, size_t buf_si
/* ============================================================
Stone Arena Functions
Constant Text Pool Functions
============================================================ */
/* Stone page for large allocations */
typedef struct StonePage {
struct StonePage *next;
/* Constant text page for large allocations */
typedef struct CTPage {
struct CTPage *next;
size_t size;
uint8_t data[];
} StonePage;
} CTPage;
/* Initial stone text table size */
#define ST_TEXT_INITIAL_SIZE 256
/* Initial constant text table size */
#define CT_INITIAL_SIZE 256
/* Allocate from stone arena (permanent, immutable memory) */
/* Allocate from constant text pool */
/* Resize the stone text intern hash table */
/* Resize the constant text intern hash table */
/* Realloc with slack reporting (for bump allocator)
WARNING: This function is NOT GC-safe! The caller must protect the source
@@ -1192,9 +1192,9 @@ static int ctx_gc (JSContext *ctx, int allow_grow, size_t alloc_size);
/* JS_MarkValue - mark a value during GC traversal.
With copying GC, this is a no-op as we discover live objects by tracing. */
/* Helper to check if a pointer is in stone memory */
static inline int is_stone_ptr (JSContext *ctx, void *ptr) {
return (uint8_t *)ptr >= ctx->stone_base && (uint8_t *)ptr < ctx->stone_end;
/* Helper to check if a pointer is in constant text pool memory */
static inline int is_ct_ptr (JSContext *ctx, void *ptr) {
return (uint8_t *)ptr >= ctx->ct_base && (uint8_t *)ptr < ctx->ct_end;
}
/* Intern a UTF-32 string as a stone text, returning a JSValue string */
@@ -1554,9 +1554,9 @@ JSText *pretext_concat_value (JSContext *ctx, JSText *s, JSValue v);
JSValue js_new_blob (JSContext *ctx, blob *b);
/* Functions from header region (defined in runtime.c) */
void *js_realloc (JSContext *ctx, void *ptr, size_t size);
void *st_alloc (JSContext *ctx, size_t bytes, size_t align);
void st_free_all (JSContext *ctx);
int st_text_resize (JSContext *ctx);
void *ct_alloc (JSContext *ctx, size_t bytes, size_t align);
void ct_free_all (JSContext *ctx);
int ct_resize (JSContext *ctx);
JSValue intern_text_to_value (JSContext *ctx, const uint32_t *utf32, uint32_t len);
JSValue js_key_new (JSContext *ctx, const char *str);
JSValue js_key_new_len (JSContext *ctx, const char *str, size_t len);

View File

@@ -134,53 +134,53 @@ void JS_DeleteGCRef (JSContext *ctx, JSGCRef *ref) {
}
}
void *st_alloc (JSContext *ctx, size_t bytes, size_t align) {
void *ct_alloc (JSContext *ctx, size_t bytes, size_t align) {
/* Align the request */
bytes = (bytes + align - 1) & ~(align - 1);
/* Check if we have space in the stone arena */
if (ctx->stone_base && ctx->stone_free + bytes <= ctx->stone_end) {
void *ptr = ctx->stone_free;
ctx->stone_free += bytes;
/* Check if we have space in the constant text pool */
if (ctx->ct_base && ctx->ct_free + bytes <= ctx->ct_end) {
void *ptr = ctx->ct_free;
ctx->ct_free += bytes;
return ptr;
}
/* No stone arena or not enough space - allocate a page */
size_t page_size = sizeof (StonePage) + bytes;
StonePage *page = malloc (page_size);
/* No pool space - allocate a page */
size_t page_size = sizeof (CTPage) + bytes;
CTPage *page = malloc (page_size);
if (!page) return NULL;
page->next = ctx->st_pages;
page->next = ctx->ct_pages;
page->size = bytes;
ctx->st_pages = page;
ctx->ct_pages = page;
return page->data;
}
/* Free all stone arena pages */
void st_free_all (JSContext *ctx) {
StonePage *page = ctx->st_pages;
/* Free all constant text pool pages */
void ct_free_all (JSContext *ctx) {
CTPage *page = ctx->ct_pages;
while (page) {
StonePage *next = page->next;
CTPage *next = page->next;
free (page);
page = next;
}
ctx->st_pages = NULL;
ctx->ct_pages = NULL;
}
int st_text_resize (JSContext *ctx) {
uint32_t new_size, new_resize;
int ct_resize (JSContext *ctx) {
uint32_t new_size, new_threshold;
uint32_t *new_hash;
JSText **new_array;
if (ctx->st_text_size == 0) {
if (ctx->ct_size == 0) {
/* Initial allocation */
new_size = ST_TEXT_INITIAL_SIZE;
new_size = CT_INITIAL_SIZE;
} else {
/* Double the size */
new_size = ctx->st_text_size * 2;
new_size = ctx->ct_size * 2;
}
new_resize = new_size * 3 / 4; /* 75% load factor */
new_threshold = new_size * 3 / 4; /* 75% load factor */
/* Allocate new hash table (use runtime malloc, not bump allocator) */
new_hash = js_malloc_rt (new_size * sizeof (uint32_t));
@@ -196,10 +196,10 @@ int st_text_resize (JSContext *ctx) {
memset (new_array, 0, (new_size + 1) * sizeof (JSText *));
/* Rehash existing entries */
if (ctx->st_text_count > 0) {
if (ctx->ct_count > 0) {
uint32_t mask = new_size - 1;
for (uint32_t id = 1; id <= ctx->st_text_count; id++) {
JSText *text = ctx->st_text_array[id];
for (uint32_t id = 1; id <= ctx->ct_count; id++) {
JSText *text = ctx->ct_array[id];
new_array[id] = text;
/* Compute hash and find slot */
@@ -212,13 +212,13 @@ int st_text_resize (JSContext *ctx) {
}
/* Free old tables */
if (ctx->st_text_hash) js_free_rt (ctx->st_text_hash);
if (ctx->st_text_array) js_free_rt (ctx->st_text_array);
if (ctx->ct_hash) js_free_rt (ctx->ct_hash);
if (ctx->ct_array) js_free_rt (ctx->ct_array);
ctx->st_text_hash = new_hash;
ctx->st_text_array = new_array;
ctx->st_text_size = new_size;
ctx->st_text_resize = new_resize;
ctx->ct_hash = new_hash;
ctx->ct_array = new_array;
ctx->ct_size = new_size;
ctx->ct_resize_threshold = new_threshold;
return 0;
}
@@ -251,12 +251,12 @@ JSValue intern_text_to_value (JSContext *ctx, const uint32_t *utf32, uint32_t le
uint64_t hash = fash64_hash_words (packed, word_count, len);
/* Look up in hash table */
uint32_t mask = ctx->st_text_size - 1;
uint32_t mask = ctx->ct_size - 1;
uint32_t slot = hash & mask;
while (ctx->st_text_hash[slot] != 0) {
uint32_t id = ctx->st_text_hash[slot];
JSText *existing = ctx->st_text_array[id];
while (ctx->ct_hash[slot] != 0) {
uint32_t id = ctx->ct_hash[slot];
JSText *existing = ctx->ct_array[id];
if (text_equal (existing, packed, len)) {
/* Found existing entry */
return JS_MKPTR (existing);
@@ -265,18 +265,18 @@ JSValue intern_text_to_value (JSContext *ctx, const uint32_t *utf32, uint32_t le
}
/* Not found - create new entry */
if (ctx->st_text_count >= ctx->st_text_resize) {
if (st_text_resize (ctx) < 0) return JS_NULL; /* OOM */
if (ctx->ct_count >= ctx->ct_resize_threshold) {
if (ct_resize (ctx) < 0) return JS_NULL; /* OOM */
/* Recompute slot after resize */
mask = ctx->st_text_size - 1;
mask = ctx->ct_size - 1;
slot = hash & mask;
while (ctx->st_text_hash[slot] != 0)
while (ctx->ct_hash[slot] != 0)
slot = (slot + 1) & mask;
}
/* Allocate JSText in stone arena */
/* Allocate JSText in constant text pool */
size_t text_size = sizeof (JSText) + word_count * sizeof (uint64_t);
JSText *text = st_alloc (ctx, text_size, 8);
JSText *text = ct_alloc (ctx, text_size, 8);
if (!text) return JS_NULL; /* OOM */
/* Initialize the text */
@@ -285,9 +285,9 @@ JSValue intern_text_to_value (JSContext *ctx, const uint32_t *utf32, uint32_t le
memcpy (text->packed, packed, word_count * sizeof (uint64_t));
/* Add to intern table */
uint32_t new_id = ++ctx->st_text_count;
ctx->st_text_hash[slot] = new_id;
ctx->st_text_array[new_id] = text;
uint32_t new_id = ++ctx->ct_count;
ctx->ct_hash[slot] = new_id;
ctx->ct_array[new_id] = text;
return JS_MKPTR (text);
}
@@ -875,22 +875,11 @@ PPretext *ppretext_append_int (PPretext *p, int n) {
}
/* Convert a JSValue string to a property key.
For immediates, returns the value as-is (can be used directly as keys).
For heap strings, returns interned version. */
Returns the value as-is for immediates and heap texts.
No allocation — cannot trigger GC. */
JSValue js_key_from_string (JSContext *ctx, JSValue val) {
if (MIST_IsImmediateASCII (val)) {
return val; /* Immediates can be used directly as keys */
}
if (JS_IsText (val)) {
JSText *p = JS_VALUE_GET_TEXT (val);
int64_t len = JSText_len (p); /* Use JSText_len which checks header for stoned text */
/* Extract UTF-32 characters and intern */
uint32_t *utf32_buf = alloca (len * sizeof (uint32_t));
for (int64_t i = 0; i < len; i++) {
utf32_buf[i] = string_get (p, i);
}
return intern_text_to_value (ctx, utf32_buf, len);
}
if (MIST_IsImmediateASCII (val)) return val;
if (JS_IsText (val)) return val;
return JS_NULL;
}
@@ -1019,7 +1008,7 @@ JSValue gc_copy_value (JSContext *ctx, JSValue v, uint8_t *from_base, uint8_t *f
for (;;) {
void *ptr = JS_VALUE_GET_PTR (v);
if (is_stone_ptr (ctx, ptr)) return v;
if (is_ct_ptr (ctx, ptr)) return v;
if (!ptr_in_range (ptr, from_base, from_end)) return v;
@@ -1198,7 +1187,7 @@ int ctx_gc (JSContext *ctx, int allow_grow, size_t alloc_size) {
void *gptr = JS_VALUE_GET_PTR(ctx->global_obj);
printf(" ptr=%p in_from=%d is_stone=%d\n", gptr,
((uint8_t*)gptr >= from_base && (uint8_t*)gptr < from_end),
is_stone_ptr(ctx, gptr));
is_ct_ptr(ctx, gptr));
fflush(stdout);
}
#endif
@@ -1414,14 +1403,14 @@ JSContext *JS_NewContextRawWithHeapSize (JSRuntime *rt, size_t heap_size) {
/* Initialize per-context execution state (moved from JSRuntime) */
ctx->current_exception = JS_NULL;
/* Initialize stone text intern table */
ctx->st_pages = NULL;
ctx->st_text_array = NULL;
ctx->st_text_hash = NULL;
ctx->st_text_count = 0;
ctx->st_text_size = 0;
ctx->st_text_resize = 0;
if (st_text_resize (ctx) < 0) {
/* Initialize constant text intern table */
ctx->ct_pages = NULL;
ctx->ct_array = NULL;
ctx->ct_hash = NULL;
ctx->ct_count = 0;
ctx->ct_size = 0;
ctx->ct_resize_threshold = 0;
if (ct_resize (ctx) < 0) {
js_free_rt (ctx->class_array);
js_free_rt (ctx->class_proto);
js_free_rt (ctx);
@@ -1433,8 +1422,8 @@ JSContext *JS_NewContextRawWithHeapSize (JSRuntime *rt, size_t heap_size) {
ctx->next_block_size = ctx->current_block_size;
ctx->heap_base = heap_block_alloc (rt, ctx->current_block_size);
if (!ctx->heap_base) {
js_free_rt (ctx->st_text_hash);
js_free_rt (ctx->st_text_array);
js_free_rt (ctx->ct_hash);
js_free_rt (ctx->ct_array);
js_free_rt (ctx->class_array);
js_free_rt (ctx->class_proto);
js_free_rt (ctx);
@@ -1501,10 +1490,10 @@ void JS_FreeContext (JSContext *ctx) {
js_free_rt (ctx->class_array);
js_free_rt (ctx->class_proto);
/* Free stone arena and intern table */
st_free_all (ctx);
js_free_rt (ctx->st_text_hash);
js_free_rt (ctx->st_text_array);
/* Free constant text pool and intern table */
ct_free_all (ctx);
js_free_rt (ctx->ct_hash);
js_free_rt (ctx->ct_array);
/* Free heap block */
if (ctx->heap_base) {
@@ -2734,10 +2723,10 @@ JSValue JS_GetPropertyValue (JSContext *ctx, JSValue this_obj, JSValue prop) {
if (JS_IsArray (this_obj)) {
return JS_NULL;
}
/* Create an interned key from the string */
/* Use text directly as key */
JSValue key = js_key_from_string (ctx, prop);
ret = JS_GetProperty (ctx, this_obj, key);
/* key is interned or immediate, no need to free */
/* key is the original text or immediate */
return ret;
}
@@ -2919,7 +2908,7 @@ JSValue JS_GetPropertyKey (JSContext *ctx, JSValue this_obj, JSValue key) {
return rec_get (ctx, rec, key);
}
/* For string keys, create an interned key and use JS_GetProperty */
/* For string keys, use text directly as key */
if (JS_IsText (key)) {
JSValue prop_key = js_key_from_string (ctx, key);
return JS_GetProperty (ctx, this_obj, prop_key);
@@ -2946,7 +2935,7 @@ int JS_SetPropertyKey (JSContext *ctx, JSValue this_obj, JSValue key, JSValue va
return rec_set_own (ctx, rec, key, val);
}
/* For string keys, create an interned key */
/* For string keys, use text directly as key */
if (JS_IsText (key)) {
JSValue prop_key = js_key_from_string (ctx, key);
return JS_SetPropertyInternal (ctx, this_obj, prop_key, val);
@@ -2956,8 +2945,8 @@ int JS_SetPropertyKey (JSContext *ctx, JSValue this_obj, JSValue key, JSValue va
return JS_SetPropertyInternal (ctx, this_obj, key, val);
}
/* GC-SAFE for record keys (no allocations).
String keys call js_key_from_string then JS_HasProperty which re-chases. */
/* GC-SAFE: no allocations.
String keys pass through js_key_from_string (no interning). */
int JS_HasPropertyKey (JSContext *ctx, JSValue obj, JSValue key) {
if (JS_IsRecord (key)) {
if (!JS_IsRecord (obj)) return FALSE;
@@ -2970,7 +2959,7 @@ int JS_HasPropertyKey (JSContext *ctx, JSValue obj, JSValue key) {
return FALSE;
}
/* For string keys, create an interned key */
/* For string keys, use text directly as key */
if (JS_IsText (key)) {
JSValue prop_key = js_key_from_string (ctx, key);
return JS_HasProperty (ctx, obj, prop_key);
@@ -2998,7 +2987,7 @@ int JS_DeletePropertyKey (JSContext *ctx, JSValue obj, JSValue key) {
return TRUE;
}
/* For string keys, create an interned key */
/* For string keys, use text directly as key */
if (JS_IsText (key)) {
JSValue prop_key = js_key_from_string (ctx, key);
return JS_DeleteProperty (ctx, obj, prop_key);
@@ -8281,7 +8270,7 @@ static JSValue js_cell_array_sort (JSContext *ctx, JSValue this_val, int argc, J
} else if (JS_VALUE_GET_TAG (argv[1]) == JS_TAG_STRING
|| JS_VALUE_GET_TAG (argv[1]) == JS_TAG_STRING_IMM) {
JSValue prop_key = js_key_from_string (ctx, argv[1]);
/* Re-read items[i] after allocation (js_key_from_string can trigger GC) */
/* Re-read items[i] (js_key_from_string no longer allocates, but re-read is harmless) */
arr = JS_VALUE_GET_ARRAY (arr_ref.val);
items[i] = arr->values[i];
key = JS_GetProperty (ctx, items[i], prop_key);
@@ -8564,7 +8553,7 @@ static JSValue js_cell_object (JSContext *ctx, JSValue this_val, int argc, JSVal
if (i >= (int)keys->len) break;
JSValue key = keys->values[i];
if (JS_IsText (key)) {
/* Use JSValue key directly - create interned key */
/* Use text directly as key */
JSValue prop_key = js_key_from_string (ctx, key);
JSValue val;
if (argc < 2 || JS_IsNull (func_ref.val)) {