Merge branch 'heap_blob'

This commit is contained in:
2026-02-17 15:59:38 -06:00
3 changed files with 837 additions and 237 deletions

View File

@@ -898,12 +898,12 @@ typedef struct JSArray {
JSValue values[]; /* inline flexible array member */
} JSArray;
/* JSBlob — not allocated on GC heap (blobs use JSRecord + opaque).
Struct kept for reference; gc_object_size/gc_scan_object do not handle OBJ_BLOB. */
/* JSBlob — inline bit data on the GC heap.
cap56 = capacity in bits, S bit = stone (immutable). */
typedef struct JSBlob {
objhdr_t mist_hdr;
word_t length;
uint8_t bits[];
objhdr_t mist_hdr; /* type=OBJ_BLOB, cap56=capacity_bits, S=stone */
word_t length; /* used bits */
word_t bits[]; /* inline bit data, ceil(cap56/64) words */
} JSBlob;
typedef struct JSText {

File diff suppressed because it is too large Load Diff

View File

@@ -5100,6 +5100,380 @@ run("json roundtrip preserves types", function() {
assert_eq(decoded.sub.a, 1, "sub-object preserved")
})
// ============================================================================
// BLOB - GC HEAP INTEGRATION
// ============================================================================
run("blob basic create and stone", function() {
var b = blob()
if (!is_blob(b)) fail("empty blob is not a blob")
b.write_bit(true)
b.write_bit(false)
b.write_bit(true)
stone(b)
if (length(b) != 3) fail("blob length should be 3, got " + text(length(b)))
if (!b.read_logical(0)) fail("bit 0 should be true")
if (b.read_logical(1)) fail("bit 1 should be false")
if (!b.read_logical(2)) fail("bit 2 should be true")
})
run("blob write_number read_number", function() {
var b = blob()
b.write_number(3.14)
b.write_number(2.718)
stone(b)
if (length(b) != 128) fail("expected 128 bits")
var pi = b.read_number(0)
var e = b.read_number(64)
if (pi != 3.14) fail("pi read back wrong: " + text(pi))
if (e != 2.718) fail("e read back wrong: " + text(e))
})
run("blob write_fit read_fit", function() {
var b = blob()
b.write_fit(42, 8)
b.write_fit(-7, 8)
stone(b)
if (b.read_fit(0, 8) != 42) fail("fit 42 failed")
if (b.read_fit(8, 8) != -7) fail("fit -7 failed")
})
run("blob write_text read_text", function() {
var b = blob()
b.write_text("hello world")
stone(b)
var t = b.read_text(0)
if (t != "hello world") fail("text roundtrip failed: " + t)
})
run("blob from text", function() {
var b = blob("abc")
stone(b)
if (length(b) != 24) fail("blob from text length wrong")
var t = text(b)
if (t != "abc") fail("blob to text failed: " + t)
})
run("blob copy slice", function() {
var b = blob()
b.write_fit(100, 16)
b.write_fit(200, 16)
b.write_fit(300, 16)
stone(b)
var slice = blob(b, 16, 32)
stone(slice)
if (slice.read_fit(0, 16) != 200) fail("blob slice failed")
})
run("blob write_blob", function() {
var a = blob()
a.write_fit(1, 8)
a.write_fit(2, 8)
var b = blob()
b.write_fit(3, 8)
b.write_blob(a)
stone(b)
if (b.read_fit(0, 8) != 3) fail("first byte wrong")
if (b.read_fit(8, 8) != 1) fail("second byte wrong")
if (b.read_fit(16, 8) != 2) fail("third byte wrong")
})
run("blob write_pad pad?", function() {
var b = blob()
b.write_fit(7, 4)
b.write_pad(8)
stone(b)
if (length(b) != 8) fail("pad didn't align to 8, got " + text(length(b)))
if (!b["pad?"](4, 8)) fail("pad? should be true")
})
run("blob w16 w32 wf", function() {
var b = blob()
b.w16(1000)
b.w32(100000)
b.wf(1.5)
stone(b)
if (length(b) != 80) fail("expected 80 bits, got " + text(length(b)))
})
run("blob is_data false for blob", function() {
var b = blob()
if (is_data(b)) fail("blob should not be is_data")
})
run("blob text hex format", function() {
var b = blob("AB")
stone(b)
var hex = text(b, "h")
if (hex != "4142") fail("hex encoding wrong: " + hex)
})
run("blob text binary format", function() {
var b = blob()
b.write_bit(true)
b.write_bit(false)
b.write_bit(true)
b.write_bit(true)
stone(b)
var bits = text(b, "b")
if (bits != "1011") fail("binary encoding wrong: " + bits)
})
run("blob(capacity) preallocates", function() {
var b = blob(1024)
if (!is_blob(b)) fail("capacity blob not a blob")
if (length(b) != 0) fail("capacity blob should start empty")
var i = 0
for (i = 0; i < 128; i = i + 1) {
b.write_fit(i, 8)
}
if (length(b) != 1024) fail("after fill length wrong")
})
run("blob(length, bool) fill", function() {
var b = blob(16, true)
stone(b)
if (length(b) != 16) fail("filled blob length wrong")
var i = 0
for (i = 0; i < 16; i = i + 1) {
if (!b.read_logical(i)) fail("bit " + text(i) + " should be true")
}
var z = blob(8, false)
stone(z)
for (i = 0; i < 8; i = i + 1) {
if (z.read_logical(i)) fail("zero bit " + text(i) + " should be false")
}
})
// --- GC stress tests: verify blobs survive collection and don't corrupt ---
run("gc blob survives collection", function() {
var b = blob()
var garbage = null
var i = 0
var v1 = null
var t = null
b.write_number(123.456)
b.write_text("test data")
// Trigger GC pressure by allocating many objects
for (i = 0; i < 500; i = i + 1) {
garbage = {a: i, b: text(i), c: [i, i+1, i+2]}
}
// blob should still be valid after GC
b.write_number(789.012)
stone(b)
v1 = b.read_number(0)
if (v1 != 123.456) fail("blob data corrupted after gc: " + text(v1))
t = b.read_text(64)
if (t != "test data") fail("blob text corrupted after gc: " + t)
})
run("gc blob growth across collections", function() {
var b = blob()
var i = 0
var junk = null
var v = null
for (i = 0; i < 200; i = i + 1) {
b.write_fit(i, 16)
junk = [text(i), {v: i}, text(i) + "_end"]
}
stone(b)
for (i = 0; i < 200; i = i + 1) {
v = b.read_fit(i * 16, 16)
if (v != i) fail("blob growth gc: slot " + text(i) + " = " + text(v))
}
})
run("gc many blobs alive simultaneously", function() {
var blobs = []
var i = 0
var b = null
var trash = null
var v1 = null
var v2 = null
for (i = 0; i < 100; i = i + 1) {
b = blob()
b.write_fit(i * 7, 16)
b.write_fit(i * 13, 16)
stone(b)
blobs[i] = b
}
for (i = 0; i < 200; i = i + 1) {
trash = {x: text(i), y: [i]}
}
for (i = 0; i < 100; i = i + 1) {
v1 = blobs[i].read_fit(0, 16)
v2 = blobs[i].read_fit(16, 16)
if (v1 != i * 7) fail("multi blob " + text(i) + " v1 = " + text(v1))
if (v2 != i * 13) fail("multi blob " + text(i) + " v2 = " + text(v2))
}
})
run("gc blob not polluting other objects", function() {
var results = []
var i = 0
var b = null
var obj = null
var tmp = null
var entry = null
var bv = null
var bt = null
for (i = 0; i < 50; i = i + 1) {
b = blob()
b.write_fit(i, 16)
b.write_text("item" + text(i))
stone(b)
obj = {index: i, name: "obj" + text(i)}
results[i] = {blob_val: b, obj_val: obj}
}
for (i = 0; i < 300; i = i + 1) {
tmp = {a: text(i), b: [i, i]}
}
for (i = 0; i < 50; i = i + 1) {
entry = results[i]
bv = entry.blob_val.read_fit(0, 16)
if (bv != i) fail("pollute test blob " + text(i) + " = " + text(bv))
bt = entry.blob_val.read_text(16)
if (bt != "item" + text(i)) fail("pollute test text " + text(i))
if (entry.obj_val.index != i) fail("pollute test obj index " + text(i))
if (entry.obj_val.name != "obj" + text(i)) fail("pollute test obj name " + text(i))
}
})
run("gc dead blobs are collected", function() {
// Verify that dead blobs don't cause leaks by checking heap stays bounded.
// We do two phases: allocate a batch, check heap, allocate another, check again.
// If blobs leaked, the second batch would cause unbounded growth.
var i = 0
var b = null
var junk = null
var stats1 = null
var stats2 = null
gc_stats()
for (i = 0; i < 200; i = i + 1) {
b = blob(1024)
b.write_fit(i, 16)
junk = {a: text(i), b: [i]}
}
stats1 = gc_stats()
// Second identical batch — should reuse collected space
for (i = 0; i < 200; i = i + 1) {
b = blob(1024)
b.write_fit(i, 16)
junk = {a: text(i), b: [i]}
}
stats2 = gc_stats()
// Heap should not have grown more than 4x between phases
// (some growth is normal from doubling, but not unbounded)
if (stats2.heap_size > stats1.heap_size * 4) {
fail("heap grew too much: " + text(stats1.heap_size) + " -> " + text(stats2.heap_size))
}
})
run("gc blob write_blob both survive", function() {
var src = blob()
var i = 0
var v = null
var dst = null
for (i = 0; i < 100; i = i + 1) {
src.write_fit(i, 16)
}
dst = blob()
dst.write_fit(99, 16)
dst.write_blob(src)
stone(dst)
if (dst.read_fit(0, 16) != 99) fail("dst first word wrong")
for (i = 0; i < 100; i = i + 1) {
v = dst.read_fit(16 + i * 16, 16)
if (v != i) fail("write_blob gc: word " + text(i) + " = " + text(v))
}
})
run("gc blob read_blob across allocation", function() {
var big = blob()
var i = 0
var slices = []
var s = null
var v = null
for (i = 0; i < 200; i = i + 1) {
big.write_fit(i, 16)
}
stone(big)
for (i = 0; i < 50; i = i + 1) {
s = big.read_blob(i * 16, (i + 1) * 16)
stone(s)
slices[i] = s
}
for (i = 0; i < 50; i = i + 1) {
v = slices[i].read_fit(0, 16)
if (v != i) fail("read_blob gc slice " + text(i) + " = " + text(v))
}
})
run("gc blob with random fill", function() {
var b = blob(256, function() { return 42 })
stone(b)
if (length(b) != 256) fail("random fill length wrong")
})
run("gc blob in record values", function() {
var rec = {}
var i = 0
var b = null
var junk = null
var v = null
for (i = 0; i < 50; i = i + 1) {
b = blob()
b.write_fit(i * 3, 16)
stone(b)
rec["k" + text(i)] = b
junk = {x: text(i), y: [i, i+1]}
}
for (i = 0; i < 50; i = i + 1) {
v = rec["k" + text(i)].read_fit(0, 16)
if (v != i * 3) fail("blob in record " + text(i) + " = " + text(v))
}
})
run("gc blob in array elements", function() {
var arr = []
var i = 0
var b = null
var garbage = null
var v = null
for (i = 0; i < 100; i = i + 1) {
b = blob()
b.write_number(i * 1.5)
stone(b)
arr[i] = b
}
for (i = 0; i < 500; i = i + 1) {
garbage = text(i) + text(i)
}
for (i = 0; i < 100; i = i + 1) {
v = arr[i].read_number(0)
if (v != i * 1.5) fail("blob in array " + text(i) + " = " + text(v))
}
})
run("gc blob forward pointer chase", function() {
var b = blob()
var i = 0
var junk = null
var v = null
for (i = 0; i < 500; i = i + 1) {
b.write_fit(i % 128, 8)
}
for (i = 0; i < 300; i = i + 1) {
junk = {a: [i, text(i)], b: text(i) + "!"}
}
stone(b)
for (i = 0; i < 500; i = i + 1) {
v = b.read_fit(i * 8, 8)
if (v != i % 128) fail("fwd chase " + text(i) + " = " + text(v))
}
})
// ============================================================================
// SUMMARY
// ============================================================================