new way to track actor bad memory access

This commit is contained in:
2026-02-13 09:03:33 -06:00
parent 1ba060668e
commit 291304f75d
3 changed files with 139 additions and 50 deletions

66
source/buddy_debug.c Normal file
View File

@@ -0,0 +1,66 @@
/* buddy_debug.c — ASCII visualization for buddy allocator
Included from runtime.c only when DUMP_BUDDY is defined. */
static void buddy_dump(BuddyPool *pool, const char *op,
uint8_t *block, uint8_t order) {
if (!pool || !pool->base) return;
int levels = pool->max_order - BUDDY_MIN_ORDER + 1;
/* Bitmap: one byte per min-block slot */
size_t num_slots = pool->total_size >> BUDDY_MIN_ORDER;
/* Dynamic VLA — pool sizes vary now */
uint8_t *bitmap = alloca(num_slots);
memset(bitmap, 0, num_slots); /* 0 = allocated */
/* Walk all free lists and mark free slots */
for (int i = 0; i < levels; i++) {
for (BuddyBlock *p = pool->free_lists[i]; p; p = p->next) {
size_t off = (uint8_t *)p - pool->base;
size_t slot = off >> BUDDY_MIN_ORDER;
size_t count = 1ULL << i; /* number of min-block slots in this block */
for (size_t s = 0; s < count && (slot + s) < num_slots; s++)
bitmap[slot + s] = 1;
}
}
/* Render 64-char ASCII bar */
size_t slots_per_char = num_slots / 64;
if (slots_per_char == 0) slots_per_char = 1;
char bar[65];
size_t total_free_slots = 0;
for (int c = 0; c < 64; c++) {
size_t base_slot = c * slots_per_char;
size_t free_count = 0;
for (size_t s = 0; s < slots_per_char && (base_slot + s) < num_slots; s++) {
if (bitmap[base_slot + s]) free_count++;
}
total_free_slots += free_count;
/* Majority vote: if more than half are free, show free */
bar[c] = (free_count > slots_per_char / 2) ? '.' : '#';
}
bar[64] = '\0';
size_t blk_offset = block - pool->base;
size_t blk_size = 1ULL << order;
size_t total_free = total_free_slots << BUDDY_MIN_ORDER;
size_t total_alloc = pool->total_size - total_free;
fprintf(stderr, "buddy %s: pool %zuKB order %u (%zuKB) @ +%zuKB allocs=%u\n",
op, pool->total_size / 1024, order, blk_size / 1024,
blk_offset / 1024, pool->alloc_count);
fprintf(stderr, " [%s]\n", bar);
fprintf(stderr, " alloc: %zuKB free: %zuKB total: %zuKB\n",
total_alloc / 1024, total_free / 1024, pool->total_size / 1024);
/* Print free list population */
fprintf(stderr, " free lists:");
for (int i = 0; i < levels; i++) {
int count = 0;
for (BuddyBlock *p = pool->free_lists[i]; p; p = p->next)
count++;
if (count > 0)
fprintf(stderr, " o%d:%d", i + BUDDY_MIN_ORDER, count);
}
fprintf(stderr, "\n");
}

View File

@@ -98,9 +98,9 @@
#include <sys/mman.h>
#include <unistd.h>
#define POISON_HEAP
/* POISON_HEAP: Use ASan's memory poisoning to detect stale pointer access */
#ifdef POISON_HEAP
/* HEAP_CHECK: validate heap pointers at JS_VALUE_GET_* macros */
// #define HEAP_CHECK
#if defined(__has_feature)
#if __has_feature(address_sanitizer)
#define HAVE_ASAN 1
@@ -109,22 +109,6 @@
#define HAVE_ASAN 1
#endif
#ifdef HAVE_ASAN
#include <sanitizer/asan_interface.h>
#define gc_poison_region(addr, size) __asan_poison_memory_region((addr), (size))
#define gc_unpoison_region(addr, size) __asan_unpoison_memory_region((addr), (size))
#else
/* Fallback: no-op when not building with ASan */
#define gc_poison_region(addr, size) ((void)0)
#define gc_unpoison_region(addr, size) ((void)0)
#endif
static inline size_t poison_page_align(size_t size) {
size_t ps = (size_t)sysconf(_SC_PAGESIZE);
return (size + ps - 1) & ~(ps - 1);
}
#endif /* POISON_HEAP */
#ifdef HAVE_ASAN
static struct JSContext *__asan_js_ctx;
#endif
@@ -303,14 +287,27 @@ typedef enum JSErrorEnum {
/* Forward declaration for bytecode freeing */
#define JS_VALUE_GET_BLOB(v) ((JSBlob *)JS_VALUE_GET_PTR (v))
#define JS_VALUE_GET_CODE(v) (JS_VALUE_GET_PTR (v))
#ifdef HEAP_CHECK
void heap_check_fail(void *ptr, struct JSContext *ctx);
#define JS_VALUE_GET_ARRAY(v) ((JSArray *)heap_check_chase(ctx, v))
#define JS_VALUE_GET_OBJ(v) ((JSRecord *)heap_check_chase(ctx, v))
#define JS_VALUE_GET_TEXT(v) ((JSText *)heap_check_chase(ctx, v))
#define JS_VALUE_GET_FUNCTION(v) ((JSFunction *)heap_check_chase(ctx, v))
#define JS_VALUE_GET_FRAME(v) ((JSFrame *)heap_check_chase(ctx, v))
#define JS_VALUE_GET_STRING(v) ((JSText *)heap_check_chase(ctx, v))
#define JS_VALUE_GET_RECORD(v) ((JSRecord *)heap_check_chase(ctx, v))
#else
#define JS_VALUE_GET_ARRAY(v) ((JSArray *)chase (v))
#define JS_VALUE_GET_OBJ(v) ((JSRecord *)chase (v))
#define JS_VALUE_GET_TEXT(v) ((JSText *)chase (v))
#define JS_VALUE_GET_BLOB(v) ((JSBlob *)JS_VALUE_GET_PTR (v))
#define JS_VALUE_GET_FUNCTION(v) ((JSFunction *)chase (v))
#define JS_VALUE_GET_FRAME(v) ((JSFrame *)chase (v))
#define JS_VALUE_GET_CODE(v) (JS_VALUE_GET_PTR (v))
#define JS_VALUE_GET_STRING(v) ((JSText *)chase (v))
#define JS_VALUE_GET_RECORD(v) ((JSRecord *)chase (v))
#endif
/* Compatibility: JS_TAG_STRING is an alias for text type checks */
#define JS_TAG_STRING JS_TAG_STRING_IMM
@@ -1219,6 +1216,17 @@ static inline int is_ct_ptr (JSContext *ctx, void *ptr) {
return (uint8_t *)ptr >= ctx->ct_base && (uint8_t *)ptr < ctx->ct_end;
}
#ifdef HEAP_CHECK
static inline objhdr_t *heap_check_chase(JSContext *ctx, JSValue v) {
objhdr_t *oh = chase(v);
uint8_t *p = (uint8_t *)oh;
if (!((p >= ctx->heap_base && p < ctx->heap_free) ||
(p >= ctx->ct_base && p < ctx->ct_end)))
heap_check_fail(oh, ctx);
return oh;
}
#endif
/* Intern a UTF-32 string as a stone text, returning a JSValue string */
/* Create a stoned, interned key from a UTF-8 C string.
@@ -1252,8 +1260,6 @@ typedef struct JSRegExp {
#define obj_is_stone(rec) objhdr_s ((rec)->mist_hdr)
#define obj_set_stone(rec) ((rec)->mist_hdr = objhdr_set_s ((rec)->mist_hdr, true))
#define JS_VALUE_GET_RECORD(v) ((JSRecord *)chase (v))
/* Get prototype from object (works for both JSRecord and JSRecord since they
* share layout) */
#define JS_OBJ_GET_PROTO(p) (JS_IsNull(((JSRecord *)(p))->proto) ? NULL : (JSRecord *)JS_VALUE_GET_PTR(((JSRecord *)(p))->proto))

View File

@@ -33,6 +33,50 @@
#include "buddy_debug.c"
#endif
#ifdef HEAP_CHECK
void heap_check_fail(void *ptr, JSContext *ctx) {
uint8_t *p = (uint8_t *)ptr;
fprintf(stderr, "\n=== HEAP_CHECK: invalid heap pointer ===\n");
fprintf(stderr, " pointer: %p\n", ptr);
fprintf(stderr, " heap: [%p, %p)\n",
(void *)ctx->heap_base, (void *)ctx->heap_free);
fprintf(stderr, " ct_pool: [%p, %p)\n",
(void *)ctx->ct_base, (void *)ctx->ct_end);
if (!JS_IsNull(ctx->reg_current_frame)) {
fprintf(stderr, " stack trace:\n");
JSFrame *frame = (JSFrame *)JS_VALUE_GET_PTR(ctx->reg_current_frame);
uint32_t pc = ctx->current_register_pc;
int first = 1;
while (frame) {
objhdr_t hdr = *(objhdr_t *)JS_VALUE_GET_PTR(frame->function);
if (objhdr_type(hdr) != OBJ_FUNCTION) break;
JSFunction *fn = (JSFunction *)JS_VALUE_GET_PTR(frame->function);
const char *name = NULL, *file = NULL;
uint16_t line = 0;
if (fn->kind == JS_FUNC_KIND_REGISTER && fn->u.reg.code) {
JSCodeRegister *code = fn->u.reg.code;
file = code->filename_cstr;
name = code->name_cstr;
if (!first)
pc = (uint32_t)(JS_VALUE_GET_INT(frame->address) >> 16);
if (code->line_table && pc < code->instr_count)
line = code->line_table[pc].line;
}
fprintf(stderr, " %s (%s:%u)\n",
name ? name : "<anonymous>",
file ? file : "<unknown>", line);
if (JS_IsNull(frame->caller)) break;
frame = (JSFrame *)JS_VALUE_GET_PTR(frame->caller);
first = 0;
pc = 0;
}
}
fprintf(stderr, "=======================================\n");
fflush(stderr);
abort();
}
#endif
static inline JS_BOOL JS_IsInteger (JSValue v) {
if (JS_VALUE_GET_TAG(v) == JS_TAG_INT) return true;
if (JS_VALUE_GET_TAG(v) != JS_TAG_SHORT_FLOAT) return false;
@@ -1176,32 +1220,14 @@ static size_t buddy_max_block(BuddyAllocator *b) {
/* ============================================================
Heap block allocation wrappers
In POISON_HEAP mode, use malloc so poisoned memory stays poisoned.
Otherwise use buddy allocator for efficiency.
============================================================ */
static void *heap_block_alloc(JSRuntime *rt, size_t size) {
#ifdef POISON_HEAP
(void)rt;
size = poison_page_align(size);
void *p = mmap(NULL, size, PROT_READ | PROT_WRITE,
MAP_ANON | MAP_PRIVATE, -1, 0);
return (p == MAP_FAILED) ? NULL : p;
#else
return buddy_alloc(&rt->buddy, size);
#endif
}
static void heap_block_free(JSRuntime *rt, void *ptr, size_t size) {
#ifdef POISON_HEAP
(void)rt;
/* mmap'd memory is intentionally never munmap'd so virtual addresses
are never reused (preventing stale pointer aliasing). Pages stay
resident because chase() reads forwarding pointers from old blocks. */
gc_poison_region(ptr, size);
#else
buddy_free(&rt->buddy, ptr, size);
#endif
}
/* ============================================================
@@ -1428,16 +1454,10 @@ int ctx_gc (JSContext *ctx, int allow_grow, size_t alloc_size) {
while (new_size < alloc_size && new_size < buddy_max_block(&ctx->rt->buddy))
new_size *= 2;
}
#ifdef POISON_HEAP
new_size = poison_page_align(new_size);
#endif
uint8_t *new_block = heap_block_alloc (rt, new_size);
if (!new_block) {
/* Try with same size */
new_size = ctx->current_block_size;
#ifdef POISON_HEAP
new_size = poison_page_align(new_size);
#endif
new_block = heap_block_alloc (rt, new_size);
if (!new_block) return -1;
}
@@ -1731,9 +1751,6 @@ JSContext *JS_NewContextRawWithHeapSize (JSRuntime *rt, size_t heap_size) {
}
/* Allocate initial heap block for bump allocation */
#ifdef POISON_HEAP
heap_size = poison_page_align(heap_size);
#endif
ctx->current_block_size = heap_size;
ctx->next_block_size = ctx->current_block_size;
ctx->heap_base = heap_block_alloc (rt, ctx->current_block_size);