plan
This commit is contained in:
276
gc_plan.md
276
gc_plan.md
@@ -11,13 +11,18 @@ no cycle detection - just bump allocation and copying live objects when memory f
|
||||
```
|
||||
JSRuntime (256 MB pool)
|
||||
├── Buddy allocator for block management
|
||||
├── Class definitions (shared across contexts)
|
||||
└── JSContext #1 (actor)
|
||||
├── Current block (64KB initially)
|
||||
├── heap_base: start of block
|
||||
├── heap_free: bump pointer
|
||||
├── heap_free: bump pointer (total used = heap_free - heap_base)
|
||||
├── Text interning table (per-context, rebuilt on GC)
|
||||
└── On memory pressure: request new block, copy live objects, return old block
|
||||
```
|
||||
|
||||
**Key principle**: Each JSContext (actor) owns its own memory. Nothing is stored in JSRuntime
|
||||
except the buddy allocator and class definitions. There is no runtime-level string arena.
|
||||
|
||||
## Memory Model (from docs/memory.md)
|
||||
|
||||
### Object Header (objhdr_t - 64 bits)
|
||||
@@ -25,19 +30,62 @@ JSRuntime (256 MB pool)
|
||||
[56 bits: capacity] [1 bit: R flag] [3 bits: reserved] [1 bit: stone] [3 bits: type]
|
||||
```
|
||||
|
||||
All heap objects start with just `objhdr_t`. No `JSGCObjectHeader`, no ref counts.
|
||||
|
||||
### Object Types
|
||||
- 0: OBJ_ARRAY - Header, Length, Elements[]
|
||||
- 1: OBJ_BLOB - Header, Length (bits), BitWords[]
|
||||
- 2: OBJ_TEXT - Header, Length/Hash, PackedChars[]
|
||||
- 2: OBJ_TEXT - Header, Length/Hash, PackedChars[] (see Text section below)
|
||||
- 3: OBJ_RECORD - Header, Prototype, Length, Key/Value pairs
|
||||
- 4: OBJ_FUNCTION - Header, Code, Outer (always stone, 3 words)
|
||||
- 5: OBJ_CODE - Header, Arity, Size, ClosureSize, Entry, Disruption
|
||||
- 5: OBJ_CODE - Header, Arity, Size, ClosureSize, Entry, Disruption (in context memory)
|
||||
- 6: OBJ_FRAME - Header, Function, Caller, ReturnAddr, Slots[]
|
||||
- 7: OBJ_FORWARD - Forwarding pointer (used during GC)
|
||||
|
||||
### Text (Type 2) - Two Modes
|
||||
|
||||
Text has two forms depending on the stone bit:
|
||||
|
||||
**Pretext (stone=0)**: Mutable intermediate representation
|
||||
- `capacity` = max chars it can hold
|
||||
- `length` word = actual number of characters
|
||||
- Used during string building/concatenation
|
||||
|
||||
**Text (stone=1)**: Immutable user-facing string
|
||||
- `capacity` = length (they're equal for stoned text)
|
||||
- `length` word = hash (for record key lookup)
|
||||
- All text keys in records must be stoned
|
||||
- Text literals are stoned and interned
|
||||
|
||||
```c
|
||||
typedef struct {
|
||||
objhdr_t hdr; /* type=OBJ_TEXT, cap=capacity, s=stone bit */
|
||||
uint64_t len_or_hash; /* length if pretext (s=0), hash if text (s=1) */
|
||||
uint64_t packed[]; /* 2 UTF32 chars per word */
|
||||
} JSText;
|
||||
```
|
||||
|
||||
**DELETE JSString** - just use JSText (currently named mist_text).
|
||||
|
||||
### Text Interning (Per-Context)
|
||||
|
||||
Each context maintains its own text interning table:
|
||||
- Texts used as record keys are stoned and interned
|
||||
- Text literals are stoned and interned
|
||||
- During GC, a new interning table is built as live objects are copied
|
||||
- This prevents the table from becoming a graveyard
|
||||
|
||||
```c
|
||||
/* In JSContext */
|
||||
JSText **text_intern_array; /* indexed by ID */
|
||||
uint32_t *text_intern_hash; /* hash table mapping to IDs */
|
||||
uint32_t text_intern_count; /* number of interned texts */
|
||||
uint32_t text_intern_size; /* hash table size */
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Phase 1: Add Buddy Allocator to JSRuntime
|
||||
## Phase 1: Add Buddy Allocator to JSRuntime [DONE]
|
||||
|
||||
### File: source/quickjs.c
|
||||
|
||||
@@ -65,12 +113,15 @@ typedef struct BuddyAllocator {
|
||||
```c
|
||||
struct JSRuntime {
|
||||
BuddyAllocator buddy;
|
||||
/* ... keep: class_count, class_array, context_list ... */
|
||||
int class_count;
|
||||
JSClass *class_array;
|
||||
struct list_head context_list;
|
||||
/* REMOVE: gc_obj_list, gc_zero_ref_count_list, gc_phase, malloc_gc_threshold */
|
||||
/* REMOVE: malloc functions, malloc_state - contexts use bump allocation */
|
||||
};
|
||||
```
|
||||
|
||||
**1.3 Implement buddy functions**
|
||||
**1.3 Implement buddy functions** [DONE]
|
||||
- `buddy_init(BuddyAllocator *b)` - allocate 256MB, initialize free lists
|
||||
- `buddy_alloc(BuddyAllocator *b, size_t size)` - allocate block of given size
|
||||
- `buddy_free(BuddyAllocator *b, void *ptr, size_t size)` - return block
|
||||
@@ -78,7 +129,7 @@ struct JSRuntime {
|
||||
|
||||
---
|
||||
|
||||
## Phase 2: Restructure JSContext for Bump Allocation
|
||||
## Phase 2: Restructure JSContext for Bump Allocation [DONE]
|
||||
|
||||
### File: source/quickjs.c
|
||||
|
||||
@@ -88,7 +139,7 @@ struct JSContext {
|
||||
JSRuntime *rt;
|
||||
struct list_head link;
|
||||
|
||||
/* Actor memory block */
|
||||
/* Actor memory block - bump allocation */
|
||||
uint8_t *heap_base; /* start of current block */
|
||||
uint8_t *heap_free; /* bump pointer */
|
||||
uint8_t *heap_end; /* end of block */
|
||||
@@ -108,19 +159,22 @@ struct JSContext {
|
||||
JSValue *class_proto;
|
||||
JSValue current_exception;
|
||||
|
||||
/* Stone arena (immutable interned strings) */
|
||||
struct StoneArenaPage *st_pages;
|
||||
/* ... stone interning fields ... */
|
||||
/* Text interning (per-context, rebuilt on GC) */
|
||||
JSText **text_intern_array;
|
||||
uint32_t *text_intern_hash;
|
||||
uint32_t text_intern_count;
|
||||
uint32_t text_intern_size;
|
||||
|
||||
/* Other context state */
|
||||
uint16_t class_count;
|
||||
int interrupt_counter;
|
||||
void *user_opaque;
|
||||
/* REMOVE: JSGCObjectHeader header at start */
|
||||
|
||||
/* REMOVE: JSGCObjectHeader header */
|
||||
};
|
||||
```
|
||||
|
||||
**2.2 Implement bump allocator**
|
||||
**2.2 Implement bump allocator** [DONE]
|
||||
```c
|
||||
static void *ctx_alloc(JSContext *ctx, size_t size) {
|
||||
size = (size + 7) & ~7; /* 8-byte align */
|
||||
@@ -150,44 +204,44 @@ typedef struct {
|
||||
objhdr_t hdr; /* type=OBJ_ARRAY, cap=element_capacity */
|
||||
uint64_t len;
|
||||
JSValue elem[];
|
||||
} MistArray;
|
||||
} JSArray;
|
||||
|
||||
/* Text */
|
||||
/* Text (replaces both mist_text and JSString) */
|
||||
typedef struct {
|
||||
objhdr_t hdr; /* type=OBJ_TEXT, cap=char_capacity, s=stone bit */
|
||||
uint64_t len_or_hash; /* len if pretext, hash if stoned */
|
||||
objhdr_t hdr; /* type=OBJ_TEXT, cap=capacity, s=stone bit */
|
||||
uint64_t len_or_hash; /* len if pretext (s=0), hash if text (s=1) */
|
||||
uint64_t packed[]; /* 2 UTF32 chars per word */
|
||||
} MistText;
|
||||
} JSText;
|
||||
|
||||
/* Record (object) */
|
||||
typedef struct MistRecord {
|
||||
typedef struct JSRecord {
|
||||
objhdr_t hdr; /* type=OBJ_RECORD, cap=mask, s=stone bit */
|
||||
struct MistRecord *proto;
|
||||
struct JSRecord *proto;
|
||||
uint64_t len;
|
||||
uint64_t tombs;
|
||||
uint16_t class_id;
|
||||
uint16_t _pad;
|
||||
uint32_t rec_id; /* for record-as-key hashing */
|
||||
JSValue slots[]; /* key[0], val[0], key[1], val[1], ... */
|
||||
} MistRecord;
|
||||
} JSRecord;
|
||||
|
||||
/* Function */
|
||||
typedef struct {
|
||||
objhdr_t hdr; /* type=OBJ_FUNCTION, always stone */
|
||||
JSValue code; /* pointer to MistCode */
|
||||
JSValue outer; /* pointer to MistFrame */
|
||||
} MistFunction;
|
||||
JSValue code; /* pointer to code object */
|
||||
JSValue outer; /* pointer to enclosing frame */
|
||||
} JSFunction;
|
||||
|
||||
/* Frame */
|
||||
typedef struct {
|
||||
objhdr_t hdr; /* type=OBJ_FRAME, cap=slot_count */
|
||||
JSValue function; /* MistFunction */
|
||||
JSValue caller; /* MistFrame or null */
|
||||
JSValue function; /* JSFunction */
|
||||
JSValue caller; /* JSFrame or null */
|
||||
uint64_t return_addr;
|
||||
JSValue slots[]; /* args, locals, temporaries */
|
||||
} MistFrame;
|
||||
} JSFrame;
|
||||
|
||||
/* Code (always in stone/immutable memory) */
|
||||
/* Code (in context memory, always stone) */
|
||||
typedef struct {
|
||||
objhdr_t hdr; /* type=OBJ_CODE, always stone */
|
||||
uint32_t arity;
|
||||
@@ -196,20 +250,24 @@ typedef struct {
|
||||
uint64_t entry_point;
|
||||
uint64_t disruption_point;
|
||||
uint8_t bytecode[];
|
||||
} MistCode;
|
||||
} JSCode;
|
||||
```
|
||||
|
||||
### 3.2 Delete JSGCObjectHeader usage
|
||||
### 3.2 Delete legacy types
|
||||
|
||||
Remove from:
|
||||
- JSRecord, JSArray, JSFunction - remove `JSGCObjectHeader header` field
|
||||
- All `p->header.ref_count`, `p->header.gc_obj_type`, `p->header.mark` accesses
|
||||
**DELETE:**
|
||||
- `JSString` struct - use `JSText` instead
|
||||
- `JSGCObjectHeader` struct
|
||||
- `mist_text` - rename to `JSText`
|
||||
- All `p->header.ref_count` accesses
|
||||
- All `p->header.gc_obj_type` accesses
|
||||
- `JS_GC_OBJ_TYPE_*` enum values
|
||||
- `add_gc_object()`, `remove_gc_object()` functions
|
||||
- `gc_obj_list`, `gc_zero_ref_count_list` in JSRuntime
|
||||
- `js_alloc_string_rt()` - no runtime-level string allocation
|
||||
|
||||
---
|
||||
|
||||
## Phase 4: Implement Cheney Copying GC
|
||||
## Phase 4: Implement Cheney Copying GC [DONE - needs update]
|
||||
|
||||
### 4.1 Core GC function
|
||||
|
||||
@@ -226,6 +284,10 @@ static int ctx_gc(JSContext *ctx) {
|
||||
uint8_t *to_free = new_block;
|
||||
uint8_t *to_end = new_block + new_size;
|
||||
|
||||
/* Reset text interning table (will be rebuilt during copy) */
|
||||
ctx->text_intern_count = 0;
|
||||
memset(ctx->text_intern_hash, 0, ctx->text_intern_size * sizeof(uint32_t));
|
||||
|
||||
/* Copy roots */
|
||||
ctx->global_obj = gc_copy_value(ctx, ctx->global_obj, &to_free, to_end);
|
||||
ctx->current_exception = gc_copy_value(ctx, ctx->current_exception, &to_free, to_end);
|
||||
@@ -238,7 +300,7 @@ static int ctx_gc(JSContext *ctx) {
|
||||
ctx->value_stack[i] = gc_copy_value(ctx, ctx->value_stack[i], &to_free, to_end);
|
||||
}
|
||||
|
||||
/* Scan copied objects (Cheney scan pointer) */
|
||||
/* Cheney scan: process copied objects */
|
||||
uint8_t *scan = to_base;
|
||||
while (scan < to_free) {
|
||||
gc_scan_object(ctx, scan, &to_free, to_end);
|
||||
@@ -273,25 +335,27 @@ static JSValue gc_copy_value(JSContext *ctx, JSValue v, uint8_t **to_free, uint8
|
||||
if (!JS_IsPtr(v)) return v; /* immediate value */
|
||||
|
||||
void *ptr = JS_VALUE_GET_PTR(v);
|
||||
if (is_stone_ptr(ptr)) return v; /* stone memory, don't copy */
|
||||
|
||||
objhdr_t hdr = *(objhdr_t *)ptr;
|
||||
|
||||
/* Already forwarded? */
|
||||
if (objhdr_type(hdr) == OBJ_FORWARD) {
|
||||
return JS_MKPTR(JS_TAG_PTR, (void *)(hdr >> 3)); /* extract forwarding address */
|
||||
/* Extract forwarding address from cap56 field */
|
||||
return JS_MKPTR(JS_TAG_PTR, (void *)(uintptr_t)objhdr_cap56(hdr));
|
||||
}
|
||||
|
||||
/* Copy object */
|
||||
size_t size = gc_object_size(ptr);
|
||||
if (*to_free + size > to_end) abort(); /* shouldn't happen */
|
||||
|
||||
void *new_ptr = *to_free;
|
||||
memcpy(new_ptr, ptr, size);
|
||||
*to_free += size;
|
||||
|
||||
/* Install forwarding pointer */
|
||||
*(objhdr_t *)ptr = ((objhdr_t)(uintptr_t)new_ptr << 3) | OBJ_FORWARD;
|
||||
/* Install forwarding pointer in old location */
|
||||
*(objhdr_t *)ptr = objhdr_make((uint64_t)(uintptr_t)new_ptr, OBJ_FORWARD, 0, 0, 0, 0);
|
||||
|
||||
/* If it's a stoned text, re-intern it */
|
||||
if (objhdr_type(hdr) == OBJ_TEXT && objhdr_s(hdr)) {
|
||||
gc_intern_text(ctx, (JSText *)new_ptr);
|
||||
}
|
||||
|
||||
return JS_MKPTR(JS_TAG_PTR, new_ptr);
|
||||
}
|
||||
@@ -300,15 +364,19 @@ static void gc_scan_object(JSContext *ctx, void *ptr, uint8_t **to_free, uint8_t
|
||||
objhdr_t hdr = *(objhdr_t *)ptr;
|
||||
switch (objhdr_type(hdr)) {
|
||||
case OBJ_ARRAY: {
|
||||
MistArray *arr = ptr;
|
||||
JSArray *arr = ptr;
|
||||
for (uint64_t i = 0; i < arr->len; i++) {
|
||||
arr->elem[i] = gc_copy_value(ctx, arr->elem[i], to_free, to_end);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case OBJ_RECORD: {
|
||||
MistRecord *rec = ptr;
|
||||
rec->proto = gc_copy_value(ctx, rec->proto, to_free, to_end);
|
||||
JSRecord *rec = ptr;
|
||||
if (rec->proto) {
|
||||
JSValue pv = JS_MKPTR(JS_TAG_PTR, rec->proto);
|
||||
pv = gc_copy_value(ctx, pv, to_free, to_end);
|
||||
rec->proto = (JSRecord *)JS_VALUE_GET_PTR(pv);
|
||||
}
|
||||
uint64_t mask = objhdr_cap56(hdr);
|
||||
for (uint64_t i = 0; i <= mask; i++) {
|
||||
rec->slots[i*2] = gc_copy_value(ctx, rec->slots[i*2], to_free, to_end);
|
||||
@@ -317,13 +385,13 @@ static void gc_scan_object(JSContext *ctx, void *ptr, uint8_t **to_free, uint8_t
|
||||
break;
|
||||
}
|
||||
case OBJ_FUNCTION: {
|
||||
MistFunction *fn = ptr;
|
||||
JSFunction *fn = ptr;
|
||||
fn->code = gc_copy_value(ctx, fn->code, to_free, to_end);
|
||||
fn->outer = gc_copy_value(ctx, fn->outer, to_free, to_end);
|
||||
break;
|
||||
}
|
||||
case OBJ_FRAME: {
|
||||
MistFrame *fr = ptr;
|
||||
JSFrame *fr = ptr;
|
||||
fr->function = gc_copy_value(ctx, fr->function, to_free, to_end);
|
||||
fr->caller = gc_copy_value(ctx, fr->caller, to_free, to_end);
|
||||
uint64_t cap = objhdr_cap56(hdr);
|
||||
@@ -358,39 +426,91 @@ static void gc_scan_object(JSContext *ctx, void *ptr, uint8_t **to_free, uint8_t
|
||||
- `mark_children`, `gc_mark` (the old marking functions)
|
||||
- `JSGCPhaseEnum`, `gc_phase` field in JSRuntime
|
||||
- `gc_obj_list`, `gc_zero_ref_count_list` in JSRuntime
|
||||
- `JSRefCountHeader` struct
|
||||
- `js_alloc_string_rt` - no runtime string allocation
|
||||
- Stone arena in runtime (`st_pages`, etc.) - each context has its own
|
||||
|
||||
**Update:**
|
||||
- `JS_FreeValue` - no ref counting, just mark for GC or no-op
|
||||
- `JS_DupValue` - no ref counting, just return value
|
||||
- `__JS_FreeValueRT` - simplified, no ref count checks
|
||||
- `JS_FreeValue` - becomes no-op (GC handles everything)
|
||||
- `JS_DupValue` - becomes no-op (just return value)
|
||||
- `__JS_FreeValueRT` - remove entirely
|
||||
|
||||
---
|
||||
|
||||
## Phase 6: Update Allocation Sites
|
||||
## Phase 6: Update Allocation Sites [IN PROGRESS]
|
||||
|
||||
### 6.1 Replace js_malloc with ctx_alloc
|
||||
|
||||
All object allocations change from:
|
||||
```c
|
||||
JSRecord *rec = js_mallocz(ctx, sizeof(JSRecord));
|
||||
rec->tab = js_mallocz(ctx, sizeof(JSRecordEntry) * size);
|
||||
```
|
||||
to:
|
||||
```c
|
||||
MistRecord *rec = ctx_alloc(ctx, sizeof(MistRecord) + (mask+1) * 2 * sizeof(JSValue));
|
||||
size_t total = sizeof(JSRecord) + (mask+1) * 2 * sizeof(JSValue);
|
||||
JSRecord *rec = ctx_alloc(ctx, total);
|
||||
rec->hdr = objhdr_make(mask, OBJ_RECORD, false, false, false, false);
|
||||
/* slots are inline after the struct */
|
||||
```
|
||||
|
||||
### 6.2 Update object creation functions
|
||||
|
||||
- `JS_NewObject` - use ctx_alloc, set hdr
|
||||
- `JS_NewArray` - use ctx_alloc, set hdr
|
||||
- `JS_NewStringLen` - use ctx_alloc for heap strings
|
||||
- `JS_NewObject` - use ctx_alloc, set hdr, inline slots
|
||||
- `JS_NewArray` - use ctx_alloc, set hdr, inline elements
|
||||
- `JS_NewStringLen` - use ctx_alloc, create JSText
|
||||
- `js_create_function` - use ctx_alloc
|
||||
- String concatenation, array push, etc.
|
||||
|
||||
### 6.3 Delete js_malloc/js_free usage for heap objects
|
||||
|
||||
Keep `js_malloc_rt` only for:
|
||||
- Class arrays (in runtime)
|
||||
- VM stacks (external to GC'd heap)
|
||||
- Temporary C allocations
|
||||
|
||||
---
|
||||
|
||||
## Phase 7: Update Type Checks
|
||||
## Phase 7: Delete JSString, Use JSText
|
||||
|
||||
### 7.1 Replace JSString with JSText
|
||||
|
||||
```c
|
||||
/* OLD - DELETE */
|
||||
struct JSString {
|
||||
JSRefCountHeader header;
|
||||
uint32_t pad;
|
||||
objhdr_t hdr;
|
||||
int64_t len;
|
||||
uint64_t u[];
|
||||
};
|
||||
|
||||
/* NEW - Single text type */
|
||||
typedef struct {
|
||||
objhdr_t hdr; /* type=OBJ_TEXT, cap=capacity, s=stone */
|
||||
uint64_t len_or_hash; /* length if s=0, hash if s=1 */
|
||||
uint64_t packed[]; /* 2 UTF32 chars per word */
|
||||
} JSText;
|
||||
```
|
||||
|
||||
### 7.2 Update string functions
|
||||
|
||||
- `js_alloc_string` -> allocate JSText via ctx_alloc
|
||||
- Remove `js_alloc_string_rt` entirely
|
||||
- `js_free_string` -> no-op (GC handles it)
|
||||
- Update all JSString* to JSText*
|
||||
|
||||
### 7.3 Text stoning
|
||||
|
||||
When a text is used as a record key or becomes a literal:
|
||||
1. Set stone bit: `hdr = objhdr_set_s(hdr, true)`
|
||||
2. Compute hash and store in len_or_hash
|
||||
3. Set capacity = length
|
||||
4. Add to context's intern table
|
||||
|
||||
---
|
||||
|
||||
## Phase 8: Update Type Checks
|
||||
|
||||
Replace `JSGCObjectHeader.gc_obj_type` checks with `objhdr_type`:
|
||||
|
||||
@@ -410,11 +530,11 @@ Update helper functions:
|
||||
|
||||
---
|
||||
|
||||
## Phase 8: Handle C Opaque Objects
|
||||
## Phase 9: Handle C Opaque Objects
|
||||
|
||||
Per docs/memory.md, C opaque objects need special handling:
|
||||
|
||||
**8.1 Track live opaque objects**
|
||||
**9.1 Track live opaque objects**
|
||||
```c
|
||||
typedef struct {
|
||||
void *opaque;
|
||||
@@ -428,8 +548,8 @@ int opaque_ref_count;
|
||||
int opaque_ref_capacity;
|
||||
```
|
||||
|
||||
**8.2 During GC**
|
||||
1. When copying a MistRecord with opaque data, mark it alive
|
||||
**9.2 During GC**
|
||||
1. When copying a JSRecord with opaque data, mark it alive in opaque_refs
|
||||
2. After GC, iterate opaque_refs and call finalizer for those with `alive=0`
|
||||
3. Clear all alive flags for next cycle
|
||||
|
||||
@@ -438,13 +558,14 @@ int opaque_ref_capacity;
|
||||
## File Changes Summary
|
||||
|
||||
### source/quickjs.c
|
||||
- Remove ~500 lines: RC_TRACE, gc_decref, gc_free_cycles, JSGCObjectHeader usage
|
||||
- Add ~300 lines: buddy allocator, Cheney GC, new object layouts
|
||||
- Modify ~200 lines: allocation sites, type checks
|
||||
- Remove ~800 lines: RC_TRACE, gc_decref, gc_free_cycles, JSGCObjectHeader, JSString, runtime string arena
|
||||
- Add ~300 lines: buddy allocator, Cheney GC, JSText
|
||||
- Modify ~400 lines: allocation sites, type checks, string handling
|
||||
|
||||
### source/quickjs.h
|
||||
- Remove: JSGCObjectHeader from public API
|
||||
- Update: JS_FreeValue, JS_DupValue to be no-ops or trivial
|
||||
- Remove: JSGCObjectHeader, JSRefCountHeader from public API
|
||||
- Remove: JS_MarkFunc, JS_MarkValue
|
||||
- Update: JS_FreeValue, JS_DupValue to be no-ops
|
||||
|
||||
---
|
||||
|
||||
@@ -459,20 +580,23 @@ int opaque_ref_capacity;
|
||||
|
||||
## Dependencies / Order of Work
|
||||
|
||||
1. Phase 1 (Buddy) - independent, implement first
|
||||
2. Phase 2 (JSContext) - depends on Phase 1
|
||||
3. Phase 3 (Headers) - major refactor, careful testing needed
|
||||
4. Phase 4 (Cheney GC) - depends on Phases 1-3
|
||||
5. Phase 5 (Remove old GC) - after Phase 4 works
|
||||
6. Phase 6 (Allocation sites) - incremental, with Phase 3
|
||||
7. Phase 7 (Type checks) - with Phase 3
|
||||
8. Phase 8 (Opaque) - last, once basic GC works
|
||||
1. Phase 1 (Buddy) - DONE
|
||||
2. Phase 2 (JSContext bump alloc) - DONE
|
||||
3. Phase 5 (Remove old GC) - Do this early to reduce conflicts
|
||||
4. Phase 7 (Delete JSString) - Major cleanup
|
||||
5. Phase 3 (Unify headers) - Depends on 5, 7
|
||||
6. Phase 6 (Allocation sites) - Incremental, with Phase 3
|
||||
7. Phase 4 (Cheney GC) - After Phases 3, 6
|
||||
8. Phase 8 (Type checks) - With Phase 3
|
||||
9. Phase 9 (Opaque) - Last, once basic GC works
|
||||
|
||||
---
|
||||
|
||||
## Notes
|
||||
|
||||
- Stone arena (immutable interned strings) remains unchanged - not subject to GC
|
||||
- OBJ_CODE lives in stone memory, never copied
|
||||
- Each context owns its memory - no runtime-level allocation except buddy blocks
|
||||
- Text interning is per-context, rebuilt during GC
|
||||
- OBJ_CODE lives in context memory (always stone)
|
||||
- Frames use caller=null to signal returnable (can be shrunk during GC)
|
||||
- Forward pointer type (7) used during GC to mark copied objects
|
||||
- `heap_free - heap_base` = total memory used by context
|
||||
|
||||
590
source/quickjs.c
590
source/quickjs.c
@@ -275,11 +275,43 @@ typedef enum OPCodeEnum OPCodeEnum;
|
||||
struct StoneArenaPage;
|
||||
struct mist_text;
|
||||
|
||||
/* ============================================================
|
||||
Buddy Allocator for Actor Memory Blocks
|
||||
============================================================ */
|
||||
|
||||
#define BUDDY_MIN_ORDER 16 /* 64KB minimum block */
|
||||
#define BUDDY_MAX_ORDER 28 /* 256MB maximum */
|
||||
#define BUDDY_LEVELS (BUDDY_MAX_ORDER - BUDDY_MIN_ORDER + 1)
|
||||
#define BUDDY_POOL_SIZE (1ULL << BUDDY_MAX_ORDER)
|
||||
|
||||
typedef struct BuddyBlock {
|
||||
struct BuddyBlock *next;
|
||||
struct BuddyBlock *prev;
|
||||
uint8_t order; /* log2 of size */
|
||||
uint8_t is_free;
|
||||
} BuddyBlock;
|
||||
|
||||
typedef struct BuddyAllocator {
|
||||
uint8_t *base; /* 256MB base address */
|
||||
size_t total_size; /* 256MB */
|
||||
BuddyBlock *free_lists[BUDDY_LEVELS];
|
||||
uint8_t initialized;
|
||||
} BuddyAllocator;
|
||||
|
||||
/* Forward declarations for buddy allocator functions */
|
||||
static int buddy_init(BuddyAllocator *b);
|
||||
static void *buddy_alloc(BuddyAllocator *b, size_t size);
|
||||
static void buddy_free(BuddyAllocator *b, void *ptr, size_t size);
|
||||
static void buddy_destroy(BuddyAllocator *b);
|
||||
|
||||
struct JSRuntime {
|
||||
JSMallocFunctions mf;
|
||||
JSMallocState malloc_state;
|
||||
const char *rt_info;
|
||||
|
||||
/* Buddy allocator for actor memory blocks */
|
||||
BuddyAllocator buddy;
|
||||
|
||||
int class_count; /* size of class_array */
|
||||
JSClass *class_array;
|
||||
|
||||
@@ -693,6 +725,9 @@ typedef struct JSRecordEntry JSRecordEntry;
|
||||
/* Placeholder: these will be replaced with actual implementations after
|
||||
* JSRecord is defined */
|
||||
|
||||
/* Forward declaration for bump allocator */
|
||||
static void *ctx_alloc(JSContext *ctx, size_t size);
|
||||
|
||||
/* ============================================================
|
||||
Stone Arena Allocator
|
||||
============================================================ */
|
||||
@@ -871,6 +906,13 @@ struct JSContext {
|
||||
JSRuntime *rt;
|
||||
struct list_head link;
|
||||
|
||||
/* Actor memory block (bump allocation) */
|
||||
uint8_t *heap_base; /* start of current block */
|
||||
uint8_t *heap_free; /* bump pointer */
|
||||
uint8_t *heap_end; /* end of block */
|
||||
size_t current_block_size; /* current block size (64KB initially) */
|
||||
size_t next_block_size; /* doubles if <10% recovered after GC */
|
||||
|
||||
uint16_t binary_object_count;
|
||||
int binary_object_size;
|
||||
|
||||
@@ -1526,15 +1568,19 @@ static int rec_set_own (JSContext *ctx, JSRecord *rec, JSValue k,
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* Allocate a new record with specified class_id (default JS_CLASS_OBJECT) */
|
||||
/* Allocate a new record with specified class_id (default JS_CLASS_OBJECT)
|
||||
Uses bump allocation from context heap. Tab is inline (flexible array member). */
|
||||
static JSRecord *js_new_record_class (JSContext *ctx, uint32_t initial_mask,
|
||||
JSClassID class_id) {
|
||||
JSRecord *rec;
|
||||
JSRuntime *rt = ctx->rt;
|
||||
|
||||
if (initial_mask == 0) initial_mask = JS_RECORD_INITIAL_MASK;
|
||||
|
||||
rec = js_mallocz (ctx, sizeof (JSRecord));
|
||||
/* Allocate record + inline tab in one bump allocation */
|
||||
size_t tab_size = sizeof(JSRecordEntry) * (initial_mask + 1);
|
||||
size_t total_size = sizeof(JSRecord) + tab_size;
|
||||
|
||||
JSRecord *rec = ctx_alloc(ctx, total_size);
|
||||
if (!rec) return NULL;
|
||||
|
||||
rec->header.ref_count = 1;
|
||||
@@ -1551,16 +1597,10 @@ static JSRecord *js_new_record_class (JSContext *ctx, uint32_t initial_mask,
|
||||
rec->tmp_mark = 0;
|
||||
rec->u.opaque = NULL;
|
||||
|
||||
rec->tab = js_mallocz (ctx, sizeof (JSRecordEntry) * (initial_mask + 1));
|
||||
if (!rec->tab) {
|
||||
js_free (ctx, rec);
|
||||
return NULL;
|
||||
}
|
||||
/* Tab is inline right after the struct */
|
||||
rec->tab = (JSRecordEntry *)((uint8_t *)rec + sizeof(JSRecord));
|
||||
rec_tab_init (rec->tab, initial_mask);
|
||||
|
||||
/* Add to GC list */
|
||||
list_add_tail (&rec->header.link, &rt->gc_obj_list);
|
||||
|
||||
return rec;
|
||||
}
|
||||
|
||||
@@ -2239,6 +2279,409 @@ static inline BOOL js_check_stack_overflow (JSRuntime *rt,
|
||||
}
|
||||
#endif
|
||||
|
||||
/* ============================================================
|
||||
Buddy Allocator Implementation
|
||||
============================================================ */
|
||||
|
||||
/* Get order (log2) for a given size, rounding up to minimum */
|
||||
static int buddy_get_order(size_t size) {
|
||||
int order = BUDDY_MIN_ORDER;
|
||||
size_t block_size = 1ULL << BUDDY_MIN_ORDER;
|
||||
while (block_size < size && order < BUDDY_MAX_ORDER) {
|
||||
order++;
|
||||
block_size <<= 1;
|
||||
}
|
||||
return order;
|
||||
}
|
||||
|
||||
/* Get offset of block from base */
|
||||
static size_t buddy_block_offset(BuddyAllocator *b, void *ptr) {
|
||||
return (uint8_t *)ptr - b->base;
|
||||
}
|
||||
|
||||
/* Get buddy address for a block at given offset and order */
|
||||
static void *buddy_get_buddy(BuddyAllocator *b, void *ptr, int order) {
|
||||
size_t offset = buddy_block_offset(b, ptr);
|
||||
size_t buddy_offset = offset ^ (1ULL << order);
|
||||
return b->base + buddy_offset;
|
||||
}
|
||||
|
||||
/* Remove block from its free list */
|
||||
static void buddy_list_remove(BuddyBlock *block) {
|
||||
if (block->prev) block->prev->next = block->next;
|
||||
if (block->next) block->next->prev = block->prev;
|
||||
block->next = NULL;
|
||||
block->prev = NULL;
|
||||
}
|
||||
|
||||
/* Add block to front of free list */
|
||||
static void buddy_list_add(BuddyAllocator *b, BuddyBlock *block, int order) {
|
||||
int level = order - BUDDY_MIN_ORDER;
|
||||
block->next = b->free_lists[level];
|
||||
block->prev = NULL;
|
||||
if (b->free_lists[level]) {
|
||||
b->free_lists[level]->prev = block;
|
||||
}
|
||||
b->free_lists[level] = block;
|
||||
block->order = order;
|
||||
block->is_free = 1;
|
||||
}
|
||||
|
||||
/* Initialize buddy allocator with 256MB pool */
|
||||
static int buddy_init(BuddyAllocator *b) {
|
||||
if (b->initialized) return 0;
|
||||
|
||||
/* Allocate the pool (using system malloc, not js_malloc) */
|
||||
b->base = (uint8_t *)malloc(BUDDY_POOL_SIZE);
|
||||
if (!b->base) return -1;
|
||||
|
||||
b->total_size = BUDDY_POOL_SIZE;
|
||||
|
||||
/* Initialize free lists */
|
||||
for (int i = 0; i < BUDDY_LEVELS; i++) {
|
||||
b->free_lists[i] = NULL;
|
||||
}
|
||||
|
||||
/* Add entire pool as one free block at max order */
|
||||
BuddyBlock *block = (BuddyBlock *)b->base;
|
||||
buddy_list_add(b, block, BUDDY_MAX_ORDER);
|
||||
|
||||
b->initialized = 1;
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* Allocate a block of at least 'size' bytes */
|
||||
static void *buddy_alloc(BuddyAllocator *b, size_t size) {
|
||||
if (!b->initialized) {
|
||||
if (buddy_init(b) < 0) return NULL;
|
||||
}
|
||||
|
||||
int order = buddy_get_order(size);
|
||||
if (order > BUDDY_MAX_ORDER) return NULL;
|
||||
|
||||
/* Find smallest available block that fits */
|
||||
int level = order - BUDDY_MIN_ORDER;
|
||||
int found_level = -1;
|
||||
for (int i = level; i < BUDDY_LEVELS; i++) {
|
||||
if (b->free_lists[i]) {
|
||||
found_level = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (found_level < 0) return NULL; /* Out of memory */
|
||||
|
||||
/* Remove block from free list */
|
||||
BuddyBlock *block = b->free_lists[found_level];
|
||||
if (block->prev) {
|
||||
block->prev->next = block->next;
|
||||
} else {
|
||||
b->free_lists[found_level] = block->next;
|
||||
}
|
||||
if (block->next) block->next->prev = NULL;
|
||||
|
||||
/* Split block down to required order */
|
||||
int current_order = found_level + BUDDY_MIN_ORDER;
|
||||
while (current_order > order) {
|
||||
current_order--;
|
||||
/* Create buddy block in upper half */
|
||||
BuddyBlock *buddy = (BuddyBlock *)((uint8_t *)block + (1ULL << current_order));
|
||||
buddy_list_add(b, buddy, current_order);
|
||||
}
|
||||
|
||||
block->order = order;
|
||||
block->is_free = 0;
|
||||
return block;
|
||||
}
|
||||
|
||||
/* Free a block */
|
||||
static void buddy_free(BuddyAllocator *b, void *ptr, size_t size) {
|
||||
if (!ptr || !b->initialized) return;
|
||||
|
||||
int order = buddy_get_order(size);
|
||||
BuddyBlock *block = (BuddyBlock *)ptr;
|
||||
|
||||
/* Try to coalesce with buddy */
|
||||
while (order < BUDDY_MAX_ORDER) {
|
||||
BuddyBlock *buddy = buddy_get_buddy(b, block, order);
|
||||
|
||||
/* Check if buddy is free and same order */
|
||||
if (!buddy->is_free || buddy->order != order) break;
|
||||
|
||||
/* Remove buddy from free list */
|
||||
int level = order - BUDDY_MIN_ORDER;
|
||||
if (buddy->prev) {
|
||||
buddy->prev->next = buddy->next;
|
||||
} else {
|
||||
b->free_lists[level] = buddy->next;
|
||||
}
|
||||
if (buddy->next) buddy->next->prev = NULL;
|
||||
|
||||
/* Coalesce: use lower address as merged block */
|
||||
if ((uint8_t *)buddy < (uint8_t *)block) {
|
||||
block = buddy;
|
||||
}
|
||||
order++;
|
||||
}
|
||||
|
||||
/* Add merged block to free list */
|
||||
buddy_list_add(b, block, order);
|
||||
}
|
||||
|
||||
/* Destroy buddy allocator and free pool */
|
||||
static void buddy_destroy(BuddyAllocator *b) {
|
||||
if (!b->initialized) return;
|
||||
|
||||
free(b->base);
|
||||
b->base = NULL;
|
||||
b->initialized = 0;
|
||||
for (int i = 0; i < BUDDY_LEVELS; i++) {
|
||||
b->free_lists[i] = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
/* ============================================================
|
||||
Bump Allocator and Cheney GC
|
||||
============================================================ */
|
||||
|
||||
/* Forward declarations for GC helpers */
|
||||
static int ctx_gc(JSContext *ctx);
|
||||
static JSValue gc_copy_value(JSContext *ctx, JSValue v, uint8_t **to_free, uint8_t *to_end);
|
||||
static void gc_scan_object(JSContext *ctx, void *ptr, uint8_t **to_free, uint8_t *to_end);
|
||||
static size_t gc_object_size(void *ptr);
|
||||
|
||||
/* Check if pointer is in stone arena (not subject to GC) */
|
||||
static int is_stone_ptr(JSContext *ctx, void *ptr) {
|
||||
StoneArenaPage *page = ctx->st_pages;
|
||||
while (page) {
|
||||
if ((uint8_t *)ptr >= page->data &&
|
||||
(uint8_t *)ptr < page->data + STONE_PAGE_SIZE) {
|
||||
return 1;
|
||||
}
|
||||
page = page->next;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* Bump allocator - allocate from current heap block */
|
||||
static void *ctx_alloc(JSContext *ctx, size_t size) {
|
||||
size = (size + 7) & ~7; /* 8-byte align */
|
||||
|
||||
if (ctx->heap_free + size > ctx->heap_end) {
|
||||
/* Block full - trigger GC */
|
||||
if (ctx_gc(ctx) < 0) return NULL;
|
||||
if (ctx->heap_free + size > ctx->heap_end) {
|
||||
return NULL; /* Still OOM after GC */
|
||||
}
|
||||
}
|
||||
|
||||
void *ptr = ctx->heap_free;
|
||||
ctx->heap_free += size;
|
||||
memset(ptr, 0, size);
|
||||
return ptr;
|
||||
}
|
||||
|
||||
/* Get size of a heap object based on its type */
|
||||
static size_t gc_object_size(void *ptr) {
|
||||
/* All mist objects have header at offset 8 (after JSGCObjectHeader) */
|
||||
objhdr_t hdr = *((objhdr_t *)((char *)ptr + 8));
|
||||
uint8_t type = objhdr_type(hdr);
|
||||
uint64_t cap = objhdr_cap56(hdr);
|
||||
|
||||
switch (type) {
|
||||
case OBJ_ARRAY: {
|
||||
/* JSArray + inline values array. Cap is element capacity. */
|
||||
size_t values_size = sizeof(JSValue) * cap;
|
||||
return sizeof(JSArray) + values_size;
|
||||
}
|
||||
case OBJ_TEXT: {
|
||||
/* JSString: header + pad + hdr + length + packed chars */
|
||||
size_t word_count = (cap + 1) / 2;
|
||||
return sizeof(JSString) + word_count * sizeof(uint64_t);
|
||||
}
|
||||
case OBJ_RECORD: {
|
||||
/* JSRecord + inline tab. Cap is mask, so tab size is mask+1 entries. */
|
||||
size_t tab_size = sizeof(JSRecordEntry) * (cap + 1);
|
||||
return sizeof(JSRecord) + tab_size;
|
||||
}
|
||||
case OBJ_FUNCTION:
|
||||
case OBJ_CODE:
|
||||
case OBJ_FRAME:
|
||||
case OBJ_BLOB:
|
||||
default:
|
||||
/* Conservative estimate for unknown types */
|
||||
return 64;
|
||||
}
|
||||
}
|
||||
|
||||
/* Copy a single value, returning the new value with updated pointer if needed */
|
||||
static JSValue gc_copy_value(JSContext *ctx, JSValue v, uint8_t **to_free, uint8_t *to_end) {
|
||||
if (!JS_IsPtr(v)) return v; /* Immediate value - no copy needed */
|
||||
|
||||
void *ptr = JS_VALUE_GET_PTR(v);
|
||||
if (is_stone_ptr(ctx, ptr)) return v; /* Stone memory - don't copy */
|
||||
|
||||
/* Check if pointer is in current heap (not external allocation) */
|
||||
if ((uint8_t *)ptr < ctx->heap_base || (uint8_t *)ptr >= ctx->heap_end) {
|
||||
/* External allocation (using js_malloc) - keep reference */
|
||||
return v;
|
||||
}
|
||||
|
||||
/* Get object header (at offset 8 after JSGCObjectHeader) */
|
||||
objhdr_t *hdr_ptr = (objhdr_t *)((char *)ptr + 8);
|
||||
objhdr_t hdr = *hdr_ptr;
|
||||
|
||||
/* Already forwarded? */
|
||||
if (objhdr_type(hdr) == OBJ_FORWARD) {
|
||||
/* Extract forwarding address from cap56 field */
|
||||
void *new_ptr = (void *)(uintptr_t)objhdr_cap56(hdr);
|
||||
return JS_MKPTR(JS_TAG_PTR, new_ptr);
|
||||
}
|
||||
|
||||
/* Copy object to new space */
|
||||
size_t size = gc_object_size(ptr);
|
||||
if (*to_free + size > to_end) {
|
||||
/* Should not happen if we sized new block correctly */
|
||||
return v;
|
||||
}
|
||||
|
||||
void *new_ptr = *to_free;
|
||||
memcpy(new_ptr, ptr, size);
|
||||
*to_free += size;
|
||||
|
||||
/* Install forwarding pointer in old location */
|
||||
*hdr_ptr = objhdr_make((uint64_t)(uintptr_t)new_ptr, OBJ_FORWARD, 0, 0, 0, 0);
|
||||
|
||||
return JS_MKPTR(JS_TAG_PTR, new_ptr);
|
||||
}
|
||||
|
||||
/* Scan a copied object and update its internal references */
|
||||
static void gc_scan_object(JSContext *ctx, void *ptr, uint8_t **to_free, uint8_t *to_end) {
|
||||
objhdr_t hdr = *((objhdr_t *)((char *)ptr + 8));
|
||||
uint8_t type = objhdr_type(hdr);
|
||||
|
||||
switch (type) {
|
||||
case OBJ_ARRAY: {
|
||||
JSArray *arr = (JSArray *)ptr;
|
||||
for (uint32_t i = 0; i < arr->len; i++) {
|
||||
arr->values[i] = gc_copy_value(ctx, arr->values[i], to_free, to_end);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case OBJ_RECORD: {
|
||||
JSRecord *rec = (JSRecord *)ptr;
|
||||
/* Copy prototype */
|
||||
if (rec->proto) {
|
||||
JSValue proto_val = JS_MKPTR(JS_TAG_PTR, rec->proto);
|
||||
proto_val = gc_copy_value(ctx, proto_val, to_free, to_end);
|
||||
rec->proto = (JSRecord *)JS_VALUE_GET_PTR(proto_val);
|
||||
}
|
||||
/* Copy table entries */
|
||||
uint32_t mask = (uint32_t)objhdr_cap56(rec->mist_hdr);
|
||||
for (uint32_t i = 0; i <= mask; i++) {
|
||||
JSValue k = rec->tab[i].key;
|
||||
if (!rec_key_is_empty(k) && !rec_key_is_tomb(k)) {
|
||||
rec->tab[i].key = gc_copy_value(ctx, k, to_free, to_end);
|
||||
rec->tab[i].val = gc_copy_value(ctx, rec->tab[i].val, to_free, to_end);
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
case OBJ_TEXT:
|
||||
case OBJ_BLOB:
|
||||
case OBJ_CODE:
|
||||
/* No internal references to scan */
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
/* Cheney copying GC - collect garbage and compact live objects */
|
||||
static int ctx_gc(JSContext *ctx) {
|
||||
JSRuntime *rt = ctx->rt;
|
||||
size_t old_used = ctx->heap_free - ctx->heap_base;
|
||||
|
||||
/* Request new block from runtime */
|
||||
size_t new_size = ctx->next_block_size;
|
||||
uint8_t *new_block = buddy_alloc(&rt->buddy, new_size);
|
||||
if (!new_block) {
|
||||
/* Try with same size */
|
||||
new_size = ctx->current_block_size;
|
||||
new_block = buddy_alloc(&rt->buddy, new_size);
|
||||
if (!new_block) return -1;
|
||||
}
|
||||
|
||||
uint8_t *to_base = new_block;
|
||||
uint8_t *to_free = new_block;
|
||||
uint8_t *to_end = new_block + new_size;
|
||||
|
||||
/* Copy roots: global object, class prototypes, exception, etc. */
|
||||
ctx->global_obj = gc_copy_value(ctx, ctx->global_obj, &to_free, to_end);
|
||||
ctx->global_var_obj = gc_copy_value(ctx, ctx->global_var_obj, &to_free, to_end);
|
||||
ctx->function_proto = gc_copy_value(ctx, ctx->function_proto, &to_free, to_end);
|
||||
ctx->array_ctor = gc_copy_value(ctx, ctx->array_ctor, &to_free, to_end);
|
||||
ctx->regexp_ctor = gc_copy_value(ctx, ctx->regexp_ctor, &to_free, to_end);
|
||||
ctx->throw_type_error = gc_copy_value(ctx, ctx->throw_type_error, &to_free, to_end);
|
||||
ctx->eval_obj = gc_copy_value(ctx, ctx->eval_obj, &to_free, to_end);
|
||||
ctx->array_proto_values = gc_copy_value(ctx, ctx->array_proto_values, &to_free, to_end);
|
||||
|
||||
for (int i = 0; i < JS_NATIVE_ERROR_COUNT; i++) {
|
||||
ctx->native_error_proto[i] = gc_copy_value(ctx, ctx->native_error_proto[i], &to_free, to_end);
|
||||
}
|
||||
|
||||
/* Copy class prototypes */
|
||||
for (int i = 0; i < rt->class_count; i++) {
|
||||
ctx->class_proto[i] = gc_copy_value(ctx, ctx->class_proto[i], &to_free, to_end);
|
||||
}
|
||||
|
||||
/* Copy value stack */
|
||||
for (int i = 0; i < ctx->value_stack_top; i++) {
|
||||
ctx->value_stack[i] = gc_copy_value(ctx, ctx->value_stack[i], &to_free, to_end);
|
||||
}
|
||||
|
||||
/* Copy frame stack references */
|
||||
for (int i = 0; i <= ctx->frame_stack_top; i++) {
|
||||
struct VMFrame *frame = &ctx->frame_stack[i];
|
||||
frame->cur_func = gc_copy_value(ctx, frame->cur_func, &to_free, to_end);
|
||||
frame->this_obj = gc_copy_value(ctx, frame->this_obj, &to_free, to_end);
|
||||
}
|
||||
|
||||
/* Cheney scan: scan copied objects to find more references */
|
||||
uint8_t *scan = to_base;
|
||||
while (scan < to_free) {
|
||||
gc_scan_object(ctx, scan, &to_free, to_end);
|
||||
scan += gc_object_size(scan);
|
||||
}
|
||||
|
||||
/* Return old block to buddy allocator */
|
||||
buddy_free(&rt->buddy, ctx->heap_base, ctx->current_block_size);
|
||||
|
||||
/* Update context with new block */
|
||||
size_t new_used = to_free - to_base;
|
||||
size_t recovered = old_used > new_used ? old_used - new_used : 0;
|
||||
|
||||
ctx->heap_base = to_base;
|
||||
ctx->heap_free = to_free;
|
||||
ctx->heap_end = to_end;
|
||||
ctx->current_block_size = new_size;
|
||||
|
||||
/* If <10% recovered, double next block size for future allocations */
|
||||
if (old_used > 0 && recovered < old_used / 10) {
|
||||
size_t doubled = new_size * 2;
|
||||
if (doubled <= (1ULL << BUDDY_MAX_ORDER)) {
|
||||
ctx->next_block_size = doubled;
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef DUMP_GC
|
||||
printf("GC: old_used=%zu new_used=%zu recovered=%zu new_block_size=%zu\n",
|
||||
old_used, new_used, recovered, new_size);
|
||||
#endif
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
JSRuntime *JS_NewRuntime2 (const JSMallocFunctions *mf, void *opaque) {
|
||||
JSRuntime *rt;
|
||||
JSMallocState ms;
|
||||
@@ -2393,45 +2836,44 @@ int JS_GetStripInfo (JSRuntime *rt) { return rt->strip_flags; }
|
||||
|
||||
/* atom_get_free/is_free/set_free removed */
|
||||
|
||||
/* Note: the string contents are uninitialized */
|
||||
static JSString *js_alloc_string_rt (JSRuntime *rt, int max_len) {
|
||||
/* Allocate a string using bump allocation from context heap.
|
||||
Note: the string contents are uninitialized */
|
||||
static JSString *js_alloc_string (JSContext *ctx, int max_len) {
|
||||
JSString *str;
|
||||
size_t size;
|
||||
/* Allocate packed UTF-32: 2 chars per 64-bit word.
|
||||
Calculate number of 64-bit words needed: (max_len + 1) / 2 */
|
||||
/* Allocate packed UTF-32: 2 chars per 64-bit word. */
|
||||
size_t data_words = (max_len + 1) / 2;
|
||||
/* Add explicit null termination capacity just in case?
|
||||
Let's allocate one extra word if odd to be safe?
|
||||
(max_len + 1)/2 covers max_len chars.
|
||||
If max_len=1, data_words=1. u[0] holds char 0 and 1.
|
||||
*/
|
||||
size = sizeof (JSString) + data_words * sizeof (uint64_t);
|
||||
size_t size = sizeof(JSString) + data_words * sizeof(uint64_t);
|
||||
|
||||
str = ctx_alloc(ctx, size);
|
||||
if (unlikely(!str)) {
|
||||
JS_ThrowOutOfMemory(ctx);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
str = js_malloc_rt (rt, size);
|
||||
if (unlikely (!str)) return NULL;
|
||||
str->header.ref_count = 1;
|
||||
str->pad = 0;
|
||||
/* Initialize objhdr_t with OBJ_TEXT type so JS_IsString can detect it */
|
||||
str->hdr = objhdr_make (max_len, OBJ_TEXT, false, false, false, false);
|
||||
str->hdr = objhdr_make(max_len, OBJ_TEXT, false, false, false, false);
|
||||
str->len = max_len;
|
||||
// Initialize content to 0?
|
||||
// memset(str->u, 0, data_words * sizeof(uint64_t));
|
||||
// Should usually be initialized by caller.
|
||||
|
||||
#ifdef DUMP_LEAKS
|
||||
list_add_tail (&str->link, &rt->string_list);
|
||||
#endif
|
||||
return str;
|
||||
}
|
||||
|
||||
static JSString *js_alloc_string (JSContext *ctx, int max_len) {
|
||||
JSString *p;
|
||||
p = js_alloc_string_rt (ctx->rt, max_len);
|
||||
if (unlikely (!p)) {
|
||||
JS_ThrowOutOfMemory (ctx);
|
||||
return NULL;
|
||||
}
|
||||
return p;
|
||||
/* Legacy function for runtime-level string allocation (for stone arena use) */
|
||||
static JSString *js_alloc_string_rt (JSRuntime *rt, int max_len) {
|
||||
/* For stone arena strings, use js_malloc_rt since they're not GC'd */
|
||||
size_t data_words = (max_len + 1) / 2;
|
||||
size_t size = sizeof(JSString) + data_words * sizeof(uint64_t);
|
||||
|
||||
JSString *str = js_malloc_rt(rt, size);
|
||||
if (unlikely(!str)) return NULL;
|
||||
|
||||
str->header.ref_count = 1;
|
||||
str->pad = 0;
|
||||
str->hdr = objhdr_make(max_len, OBJ_TEXT, false, false, false, false);
|
||||
str->len = max_len;
|
||||
|
||||
return str;
|
||||
}
|
||||
|
||||
/* same as JS_FreeValueRT() but faster */
|
||||
@@ -2550,6 +2992,9 @@ void JS_FreeRuntime (JSRuntime *rt) {
|
||||
/* Stone text tables and arena are now per-context, freed in JS_FreeContext
|
||||
*/
|
||||
|
||||
/* Destroy buddy allocator */
|
||||
buddy_destroy(&rt->buddy);
|
||||
|
||||
{
|
||||
JSMallocState ms = rt->malloc_state;
|
||||
rt->mf.js_free (&ms, rt);
|
||||
@@ -2618,6 +3063,22 @@ JSContext *JS_NewContextRaw (JSRuntime *rt) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
/* Allocate initial heap block for bump allocation */
|
||||
ctx->current_block_size = 1ULL << BUDDY_MIN_ORDER; /* 64KB */
|
||||
ctx->next_block_size = ctx->current_block_size;
|
||||
ctx->heap_base = buddy_alloc(&rt->buddy, ctx->current_block_size);
|
||||
if (!ctx->heap_base) {
|
||||
js_free_rt (rt, ctx->st_text_hash);
|
||||
js_free_rt (rt, ctx->st_text_array);
|
||||
js_free_rt (rt, ctx->value_stack);
|
||||
js_free_rt (rt, ctx->frame_stack);
|
||||
js_free_rt (rt, ctx->class_proto);
|
||||
js_free_rt (rt, ctx);
|
||||
return NULL;
|
||||
}
|
||||
ctx->heap_free = ctx->heap_base;
|
||||
ctx->heap_end = ctx->heap_base + ctx->current_block_size;
|
||||
|
||||
JS_AddIntrinsicBasicObjects (ctx);
|
||||
rt->js = ctx;
|
||||
return ctx;
|
||||
@@ -2744,6 +3205,14 @@ void JS_FreeContext (JSContext *ctx) {
|
||||
js_free_rt (rt, ctx->st_text_hash);
|
||||
js_free_rt (rt, ctx->st_text_array);
|
||||
|
||||
/* Free heap block back to buddy allocator */
|
||||
if (ctx->heap_base) {
|
||||
buddy_free(&rt->buddy, ctx->heap_base, ctx->current_block_size);
|
||||
ctx->heap_base = NULL;
|
||||
ctx->heap_free = NULL;
|
||||
ctx->heap_end = NULL;
|
||||
}
|
||||
|
||||
list_del (&ctx->link);
|
||||
remove_gc_object (&ctx->header);
|
||||
js_free_rt (ctx->rt, ctx);
|
||||
@@ -3623,33 +4092,34 @@ JSValue JS_NewObjectProto (JSContext *ctx, JSValue proto) {
|
||||
return JS_NewObjectProtoClass (ctx, proto, JS_CLASS_OBJECT);
|
||||
}
|
||||
|
||||
/* Create an intrinsic array with specified capacity */
|
||||
/* Create an intrinsic array with specified capacity
|
||||
Uses bump allocation - values are inline after the JSArray struct */
|
||||
JSValue JS_NewArrayLen (JSContext *ctx, uint32_t len) {
|
||||
JSRuntime *rt = ctx->rt;
|
||||
JSArray *arr;
|
||||
uint32_t cap;
|
||||
|
||||
arr = js_mallocz (ctx, sizeof (JSArray));
|
||||
if (!arr) return JS_EXCEPTION;
|
||||
arr->header.ref_count = 1;
|
||||
arr->len = len;
|
||||
cap = len > 0 ? len : JS_ARRAY_INITIAL_SIZE;
|
||||
arr->mist_hdr = objhdr_make (cap, OBJ_ARRAY, false, false, false, false);
|
||||
if (cap > 0) {
|
||||
arr->values = js_mallocz (ctx, sizeof (JSValue) * cap);
|
||||
if (!arr->values) {
|
||||
js_free (ctx, arr);
|
||||
return JS_EXCEPTION;
|
||||
}
|
||||
/* Initialize all values to null */
|
||||
for (uint32_t i = 0; i < len; i++) {
|
||||
arr->values[i] = JS_NULL;
|
||||
}
|
||||
} else {
|
||||
arr->values = NULL;
|
||||
size_t values_size = sizeof(JSValue) * cap;
|
||||
size_t total_size = sizeof(JSArray) + values_size;
|
||||
|
||||
arr = ctx_alloc(ctx, total_size);
|
||||
if (!arr) return JS_EXCEPTION;
|
||||
|
||||
arr->header.ref_count = 1;
|
||||
arr->header.gc_obj_type = JS_GC_OBJ_TYPE_ARRAY;
|
||||
arr->len = len;
|
||||
arr->mist_hdr = objhdr_make(cap, OBJ_ARRAY, false, false, false, false);
|
||||
arr->free_mark = 0;
|
||||
|
||||
/* Values are inline right after the struct */
|
||||
arr->values = (JSValue *)((uint8_t *)arr + sizeof(JSArray));
|
||||
|
||||
/* Initialize all values to null */
|
||||
for (uint32_t i = 0; i < cap; i++) {
|
||||
arr->values[i] = JS_NULL;
|
||||
}
|
||||
add_gc_object (rt, &arr->header, JS_GC_OBJ_TYPE_ARRAY);
|
||||
return JS_MKPTR (JS_TAG_OBJECT, arr);
|
||||
|
||||
return JS_MKPTR(JS_TAG_OBJECT, arr);
|
||||
}
|
||||
|
||||
JSValue JS_NewArray (JSContext *ctx) { return JS_NewArrayLen (ctx, 0); }
|
||||
|
||||
Reference in New Issue
Block a user