stone pool renamed to constant pool - more appropriate

This commit is contained in:
2026-02-13 05:17:22 -06:00
parent 1df6553577
commit b4d42fb83d
4 changed files with 3116 additions and 3114 deletions

View File

@@ -113,6 +113,19 @@ var v = a[] // pop: v is 3, a is [1, 2]
- `packages/` — core packages
- `Makefile` — build system (`make` to rebuild, `make bootstrap` for first build)
## Testing
After any C runtime changes, run all three test suites before considering the work done:
```
make # rebuild
./cell --dev vm_suite # VM-level tests (641 tests)
./cell --dev test suite # language-level tests (493 tests)
./cell --dev fuzz # fuzzer (100 iterations)
```
All three must pass with 0 failures.
## Documentation
The `docs/` folder is the single source of truth. The website at `website/` mounts it via Hugo. Key files:

File diff suppressed because it is too large Load Diff

View File

@@ -1081,18 +1081,18 @@ struct JSContext {
size_t current_block_size; /* current block size (64KB initially) */
size_t next_block_size; /* doubles if <10% recovered after GC */
/* Stone arena - permanent immutable allocations */
uint8_t *stone_base; /* stone arena base */
uint8_t *stone_free; /* stone arena bump pointer */
uint8_t *stone_end; /* stone arena end */
/* Constant text pool — compilation constants */
uint8_t *ct_base; /* pool base */
uint8_t *ct_free; /* pool bump pointer */
uint8_t *ct_end; /* pool end */
/* Stone text intern table */
void *st_pages; /* stone page list for large allocations */
uint32_t *st_text_hash; /* hash table (slot -> id) */
JSText **st_text_array; /* array of JSText pointers indexed by id */
uint32_t st_text_size; /* hash table size (power of 2) */
uint32_t st_text_count; /* number of interned texts */
uint32_t st_text_resize; /* threshold for resize */
/* Constant text intern table */
void *ct_pages; /* page list for large allocations */
uint32_t *ct_hash; /* hash table (slot -> id) */
JSText **ct_array; /* array of JSText pointers indexed by id */
uint32_t ct_size; /* hash table size (power of 2) */
uint32_t ct_count; /* number of interned texts */
uint32_t ct_resize_threshold; /* threshold for resize */
uint16_t binary_object_count;
int binary_object_size;
@@ -1161,22 +1161,22 @@ static inline const char *JS_KeyGetStr (JSContext *ctx, char *buf, size_t buf_si
/* ============================================================
Stone Arena Functions
Constant Text Pool Functions
============================================================ */
/* Stone page for large allocations */
typedef struct StonePage {
struct StonePage *next;
/* Constant text page for large allocations */
typedef struct CTPage {
struct CTPage *next;
size_t size;
uint8_t data[];
} StonePage;
} CTPage;
/* Initial stone text table size */
#define ST_TEXT_INITIAL_SIZE 256
/* Initial constant text table size */
#define CT_INITIAL_SIZE 256
/* Allocate from stone arena (permanent, immutable memory) */
/* Allocate from constant text pool */
/* Resize the stone text intern hash table */
/* Resize the constant text intern hash table */
/* Realloc with slack reporting (for bump allocator)
WARNING: This function is NOT GC-safe! The caller must protect the source
@@ -1192,9 +1192,9 @@ static int ctx_gc (JSContext *ctx, int allow_grow, size_t alloc_size);
/* JS_MarkValue - mark a value during GC traversal.
With copying GC, this is a no-op as we discover live objects by tracing. */
/* Helper to check if a pointer is in stone memory */
static inline int is_stone_ptr (JSContext *ctx, void *ptr) {
return (uint8_t *)ptr >= ctx->stone_base && (uint8_t *)ptr < ctx->stone_end;
/* Helper to check if a pointer is in constant text pool memory */
static inline int is_ct_ptr (JSContext *ctx, void *ptr) {
return (uint8_t *)ptr >= ctx->ct_base && (uint8_t *)ptr < ctx->ct_end;
}
/* Intern a UTF-32 string as a stone text, returning a JSValue string */
@@ -1554,9 +1554,9 @@ JSText *pretext_concat_value (JSContext *ctx, JSText *s, JSValue v);
JSValue js_new_blob (JSContext *ctx, blob *b);
/* Functions from header region (defined in runtime.c) */
void *js_realloc (JSContext *ctx, void *ptr, size_t size);
void *st_alloc (JSContext *ctx, size_t bytes, size_t align);
void st_free_all (JSContext *ctx);
int st_text_resize (JSContext *ctx);
void *ct_alloc (JSContext *ctx, size_t bytes, size_t align);
void ct_free_all (JSContext *ctx);
int ct_resize (JSContext *ctx);
JSValue intern_text_to_value (JSContext *ctx, const uint32_t *utf32, uint32_t len);
JSValue js_key_new (JSContext *ctx, const char *str);
JSValue js_key_new_len (JSContext *ctx, const char *str, size_t len);

View File

@@ -134,53 +134,53 @@ void JS_DeleteGCRef (JSContext *ctx, JSGCRef *ref) {
}
}
void *st_alloc (JSContext *ctx, size_t bytes, size_t align) {
void *ct_alloc (JSContext *ctx, size_t bytes, size_t align) {
/* Align the request */
bytes = (bytes + align - 1) & ~(align - 1);
/* Check if we have space in the stone arena */
if (ctx->stone_base && ctx->stone_free + bytes <= ctx->stone_end) {
void *ptr = ctx->stone_free;
ctx->stone_free += bytes;
/* Check if we have space in the constant text pool */
if (ctx->ct_base && ctx->ct_free + bytes <= ctx->ct_end) {
void *ptr = ctx->ct_free;
ctx->ct_free += bytes;
return ptr;
}
/* No stone arena or not enough space - allocate a page */
size_t page_size = sizeof (StonePage) + bytes;
StonePage *page = malloc (page_size);
/* No pool space - allocate a page */
size_t page_size = sizeof (CTPage) + bytes;
CTPage *page = malloc (page_size);
if (!page) return NULL;
page->next = ctx->st_pages;
page->next = ctx->ct_pages;
page->size = bytes;
ctx->st_pages = page;
ctx->ct_pages = page;
return page->data;
}
/* Free all stone arena pages */
void st_free_all (JSContext *ctx) {
StonePage *page = ctx->st_pages;
/* Free all constant text pool pages */
void ct_free_all (JSContext *ctx) {
CTPage *page = ctx->ct_pages;
while (page) {
StonePage *next = page->next;
CTPage *next = page->next;
free (page);
page = next;
}
ctx->st_pages = NULL;
ctx->ct_pages = NULL;
}
int st_text_resize (JSContext *ctx) {
uint32_t new_size, new_resize;
int ct_resize (JSContext *ctx) {
uint32_t new_size, new_threshold;
uint32_t *new_hash;
JSText **new_array;
if (ctx->st_text_size == 0) {
if (ctx->ct_size == 0) {
/* Initial allocation */
new_size = ST_TEXT_INITIAL_SIZE;
new_size = CT_INITIAL_SIZE;
} else {
/* Double the size */
new_size = ctx->st_text_size * 2;
new_size = ctx->ct_size * 2;
}
new_resize = new_size * 3 / 4; /* 75% load factor */
new_threshold = new_size * 3 / 4; /* 75% load factor */
/* Allocate new hash table (use runtime malloc, not bump allocator) */
new_hash = js_malloc_rt (new_size * sizeof (uint32_t));
@@ -196,10 +196,10 @@ int st_text_resize (JSContext *ctx) {
memset (new_array, 0, (new_size + 1) * sizeof (JSText *));
/* Rehash existing entries */
if (ctx->st_text_count > 0) {
if (ctx->ct_count > 0) {
uint32_t mask = new_size - 1;
for (uint32_t id = 1; id <= ctx->st_text_count; id++) {
JSText *text = ctx->st_text_array[id];
for (uint32_t id = 1; id <= ctx->ct_count; id++) {
JSText *text = ctx->ct_array[id];
new_array[id] = text;
/* Compute hash and find slot */
@@ -212,13 +212,13 @@ int st_text_resize (JSContext *ctx) {
}
/* Free old tables */
if (ctx->st_text_hash) js_free_rt (ctx->st_text_hash);
if (ctx->st_text_array) js_free_rt (ctx->st_text_array);
if (ctx->ct_hash) js_free_rt (ctx->ct_hash);
if (ctx->ct_array) js_free_rt (ctx->ct_array);
ctx->st_text_hash = new_hash;
ctx->st_text_array = new_array;
ctx->st_text_size = new_size;
ctx->st_text_resize = new_resize;
ctx->ct_hash = new_hash;
ctx->ct_array = new_array;
ctx->ct_size = new_size;
ctx->ct_resize_threshold = new_threshold;
return 0;
}
@@ -251,12 +251,12 @@ JSValue intern_text_to_value (JSContext *ctx, const uint32_t *utf32, uint32_t le
uint64_t hash = fash64_hash_words (packed, word_count, len);
/* Look up in hash table */
uint32_t mask = ctx->st_text_size - 1;
uint32_t mask = ctx->ct_size - 1;
uint32_t slot = hash & mask;
while (ctx->st_text_hash[slot] != 0) {
uint32_t id = ctx->st_text_hash[slot];
JSText *existing = ctx->st_text_array[id];
while (ctx->ct_hash[slot] != 0) {
uint32_t id = ctx->ct_hash[slot];
JSText *existing = ctx->ct_array[id];
if (text_equal (existing, packed, len)) {
/* Found existing entry */
return JS_MKPTR (existing);
@@ -265,18 +265,18 @@ JSValue intern_text_to_value (JSContext *ctx, const uint32_t *utf32, uint32_t le
}
/* Not found - create new entry */
if (ctx->st_text_count >= ctx->st_text_resize) {
if (st_text_resize (ctx) < 0) return JS_NULL; /* OOM */
if (ctx->ct_count >= ctx->ct_resize_threshold) {
if (ct_resize (ctx) < 0) return JS_NULL; /* OOM */
/* Recompute slot after resize */
mask = ctx->st_text_size - 1;
mask = ctx->ct_size - 1;
slot = hash & mask;
while (ctx->st_text_hash[slot] != 0)
while (ctx->ct_hash[slot] != 0)
slot = (slot + 1) & mask;
}
/* Allocate JSText in stone arena */
/* Allocate JSText in constant text pool */
size_t text_size = sizeof (JSText) + word_count * sizeof (uint64_t);
JSText *text = st_alloc (ctx, text_size, 8);
JSText *text = ct_alloc (ctx, text_size, 8);
if (!text) return JS_NULL; /* OOM */
/* Initialize the text */
@@ -285,9 +285,9 @@ JSValue intern_text_to_value (JSContext *ctx, const uint32_t *utf32, uint32_t le
memcpy (text->packed, packed, word_count * sizeof (uint64_t));
/* Add to intern table */
uint32_t new_id = ++ctx->st_text_count;
ctx->st_text_hash[slot] = new_id;
ctx->st_text_array[new_id] = text;
uint32_t new_id = ++ctx->ct_count;
ctx->ct_hash[slot] = new_id;
ctx->ct_array[new_id] = text;
return JS_MKPTR (text);
}
@@ -875,22 +875,11 @@ PPretext *ppretext_append_int (PPretext *p, int n) {
}
/* Convert a JSValue string to a property key.
For immediates, returns the value as-is (can be used directly as keys).
For heap strings, returns interned version. */
Returns the value as-is for immediates and heap texts.
No allocation — cannot trigger GC. */
JSValue js_key_from_string (JSContext *ctx, JSValue val) {
if (MIST_IsImmediateASCII (val)) {
return val; /* Immediates can be used directly as keys */
}
if (JS_IsText (val)) {
JSText *p = JS_VALUE_GET_TEXT (val);
int64_t len = JSText_len (p); /* Use JSText_len which checks header for stoned text */
/* Extract UTF-32 characters and intern */
uint32_t *utf32_buf = alloca (len * sizeof (uint32_t));
for (int64_t i = 0; i < len; i++) {
utf32_buf[i] = string_get (p, i);
}
return intern_text_to_value (ctx, utf32_buf, len);
}
if (MIST_IsImmediateASCII (val)) return val;
if (JS_IsText (val)) return val;
return JS_NULL;
}
@@ -1019,7 +1008,7 @@ JSValue gc_copy_value (JSContext *ctx, JSValue v, uint8_t *from_base, uint8_t *f
for (;;) {
void *ptr = JS_VALUE_GET_PTR (v);
if (is_stone_ptr (ctx, ptr)) return v;
if (is_ct_ptr (ctx, ptr)) return v;
if (!ptr_in_range (ptr, from_base, from_end)) return v;
@@ -1198,7 +1187,7 @@ int ctx_gc (JSContext *ctx, int allow_grow, size_t alloc_size) {
void *gptr = JS_VALUE_GET_PTR(ctx->global_obj);
printf(" ptr=%p in_from=%d is_stone=%d\n", gptr,
((uint8_t*)gptr >= from_base && (uint8_t*)gptr < from_end),
is_stone_ptr(ctx, gptr));
is_ct_ptr(ctx, gptr));
fflush(stdout);
}
#endif
@@ -1414,14 +1403,14 @@ JSContext *JS_NewContextRawWithHeapSize (JSRuntime *rt, size_t heap_size) {
/* Initialize per-context execution state (moved from JSRuntime) */
ctx->current_exception = JS_NULL;
/* Initialize stone text intern table */
ctx->st_pages = NULL;
ctx->st_text_array = NULL;
ctx->st_text_hash = NULL;
ctx->st_text_count = 0;
ctx->st_text_size = 0;
ctx->st_text_resize = 0;
if (st_text_resize (ctx) < 0) {
/* Initialize constant text intern table */
ctx->ct_pages = NULL;
ctx->ct_array = NULL;
ctx->ct_hash = NULL;
ctx->ct_count = 0;
ctx->ct_size = 0;
ctx->ct_resize_threshold = 0;
if (ct_resize (ctx) < 0) {
js_free_rt (ctx->class_array);
js_free_rt (ctx->class_proto);
js_free_rt (ctx);
@@ -1433,8 +1422,8 @@ JSContext *JS_NewContextRawWithHeapSize (JSRuntime *rt, size_t heap_size) {
ctx->next_block_size = ctx->current_block_size;
ctx->heap_base = heap_block_alloc (rt, ctx->current_block_size);
if (!ctx->heap_base) {
js_free_rt (ctx->st_text_hash);
js_free_rt (ctx->st_text_array);
js_free_rt (ctx->ct_hash);
js_free_rt (ctx->ct_array);
js_free_rt (ctx->class_array);
js_free_rt (ctx->class_proto);
js_free_rt (ctx);
@@ -1501,10 +1490,10 @@ void JS_FreeContext (JSContext *ctx) {
js_free_rt (ctx->class_array);
js_free_rt (ctx->class_proto);
/* Free stone arena and intern table */
st_free_all (ctx);
js_free_rt (ctx->st_text_hash);
js_free_rt (ctx->st_text_array);
/* Free constant text pool and intern table */
ct_free_all (ctx);
js_free_rt (ctx->ct_hash);
js_free_rt (ctx->ct_array);
/* Free heap block */
if (ctx->heap_base) {
@@ -2734,10 +2723,10 @@ JSValue JS_GetPropertyValue (JSContext *ctx, JSValue this_obj, JSValue prop) {
if (JS_IsArray (this_obj)) {
return JS_NULL;
}
/* Create an interned key from the string */
/* Use text directly as key */
JSValue key = js_key_from_string (ctx, prop);
ret = JS_GetProperty (ctx, this_obj, key);
/* key is interned or immediate, no need to free */
/* key is the original text or immediate */
return ret;
}
@@ -2919,7 +2908,7 @@ JSValue JS_GetPropertyKey (JSContext *ctx, JSValue this_obj, JSValue key) {
return rec_get (ctx, rec, key);
}
/* For string keys, create an interned key and use JS_GetProperty */
/* For string keys, use text directly as key */
if (JS_IsText (key)) {
JSValue prop_key = js_key_from_string (ctx, key);
return JS_GetProperty (ctx, this_obj, prop_key);
@@ -2946,7 +2935,7 @@ int JS_SetPropertyKey (JSContext *ctx, JSValue this_obj, JSValue key, JSValue va
return rec_set_own (ctx, rec, key, val);
}
/* For string keys, create an interned key */
/* For string keys, use text directly as key */
if (JS_IsText (key)) {
JSValue prop_key = js_key_from_string (ctx, key);
return JS_SetPropertyInternal (ctx, this_obj, prop_key, val);
@@ -2956,8 +2945,8 @@ int JS_SetPropertyKey (JSContext *ctx, JSValue this_obj, JSValue key, JSValue va
return JS_SetPropertyInternal (ctx, this_obj, key, val);
}
/* GC-SAFE for record keys (no allocations).
String keys call js_key_from_string then JS_HasProperty which re-chases. */
/* GC-SAFE: no allocations.
String keys pass through js_key_from_string (no interning). */
int JS_HasPropertyKey (JSContext *ctx, JSValue obj, JSValue key) {
if (JS_IsRecord (key)) {
if (!JS_IsRecord (obj)) return FALSE;
@@ -2970,7 +2959,7 @@ int JS_HasPropertyKey (JSContext *ctx, JSValue obj, JSValue key) {
return FALSE;
}
/* For string keys, create an interned key */
/* For string keys, use text directly as key */
if (JS_IsText (key)) {
JSValue prop_key = js_key_from_string (ctx, key);
return JS_HasProperty (ctx, obj, prop_key);
@@ -2998,7 +2987,7 @@ int JS_DeletePropertyKey (JSContext *ctx, JSValue obj, JSValue key) {
return TRUE;
}
/* For string keys, create an interned key */
/* For string keys, use text directly as key */
if (JS_IsText (key)) {
JSValue prop_key = js_key_from_string (ctx, key);
return JS_DeleteProperty (ctx, obj, prop_key);
@@ -8281,7 +8270,7 @@ static JSValue js_cell_array_sort (JSContext *ctx, JSValue this_val, int argc, J
} else if (JS_VALUE_GET_TAG (argv[1]) == JS_TAG_STRING
|| JS_VALUE_GET_TAG (argv[1]) == JS_TAG_STRING_IMM) {
JSValue prop_key = js_key_from_string (ctx, argv[1]);
/* Re-read items[i] after allocation (js_key_from_string can trigger GC) */
/* Re-read items[i] (js_key_from_string no longer allocates, but re-read is harmless) */
arr = JS_VALUE_GET_ARRAY (arr_ref.val);
items[i] = arr->values[i];
key = JS_GetProperty (ctx, items[i], prop_key);
@@ -8564,7 +8553,7 @@ static JSValue js_cell_object (JSContext *ctx, JSValue this_val, int argc, JSVal
if (i >= (int)keys->len) break;
JSValue key = keys->values[i];
if (JS_IsText (key)) {
/* Use JSValue key directly - create interned key */
/* Use text directly as key */
JSValue prop_key = js_key_from_string (ctx, key);
JSValue val;
if (argc < 2 || JS_IsNull (func_ref.val)) {