new way to track actor bad memory access

This commit is contained in:
2026-02-13 09:03:33 -06:00
parent 1ba060668e
commit 291304f75d
3 changed files with 139 additions and 50 deletions

View File

@@ -33,6 +33,50 @@
#include "buddy_debug.c"
#endif
#ifdef HEAP_CHECK
void heap_check_fail(void *ptr, JSContext *ctx) {
uint8_t *p = (uint8_t *)ptr;
fprintf(stderr, "\n=== HEAP_CHECK: invalid heap pointer ===\n");
fprintf(stderr, " pointer: %p\n", ptr);
fprintf(stderr, " heap: [%p, %p)\n",
(void *)ctx->heap_base, (void *)ctx->heap_free);
fprintf(stderr, " ct_pool: [%p, %p)\n",
(void *)ctx->ct_base, (void *)ctx->ct_end);
if (!JS_IsNull(ctx->reg_current_frame)) {
fprintf(stderr, " stack trace:\n");
JSFrame *frame = (JSFrame *)JS_VALUE_GET_PTR(ctx->reg_current_frame);
uint32_t pc = ctx->current_register_pc;
int first = 1;
while (frame) {
objhdr_t hdr = *(objhdr_t *)JS_VALUE_GET_PTR(frame->function);
if (objhdr_type(hdr) != OBJ_FUNCTION) break;
JSFunction *fn = (JSFunction *)JS_VALUE_GET_PTR(frame->function);
const char *name = NULL, *file = NULL;
uint16_t line = 0;
if (fn->kind == JS_FUNC_KIND_REGISTER && fn->u.reg.code) {
JSCodeRegister *code = fn->u.reg.code;
file = code->filename_cstr;
name = code->name_cstr;
if (!first)
pc = (uint32_t)(JS_VALUE_GET_INT(frame->address) >> 16);
if (code->line_table && pc < code->instr_count)
line = code->line_table[pc].line;
}
fprintf(stderr, " %s (%s:%u)\n",
name ? name : "<anonymous>",
file ? file : "<unknown>", line);
if (JS_IsNull(frame->caller)) break;
frame = (JSFrame *)JS_VALUE_GET_PTR(frame->caller);
first = 0;
pc = 0;
}
}
fprintf(stderr, "=======================================\n");
fflush(stderr);
abort();
}
#endif
static inline JS_BOOL JS_IsInteger (JSValue v) {
if (JS_VALUE_GET_TAG(v) == JS_TAG_INT) return true;
if (JS_VALUE_GET_TAG(v) != JS_TAG_SHORT_FLOAT) return false;
@@ -1176,32 +1220,14 @@ static size_t buddy_max_block(BuddyAllocator *b) {
/* ============================================================
Heap block allocation wrappers
In POISON_HEAP mode, use malloc so poisoned memory stays poisoned.
Otherwise use buddy allocator for efficiency.
============================================================ */
static void *heap_block_alloc(JSRuntime *rt, size_t size) {
#ifdef POISON_HEAP
(void)rt;
size = poison_page_align(size);
void *p = mmap(NULL, size, PROT_READ | PROT_WRITE,
MAP_ANON | MAP_PRIVATE, -1, 0);
return (p == MAP_FAILED) ? NULL : p;
#else
return buddy_alloc(&rt->buddy, size);
#endif
}
static void heap_block_free(JSRuntime *rt, void *ptr, size_t size) {
#ifdef POISON_HEAP
(void)rt;
/* mmap'd memory is intentionally never munmap'd so virtual addresses
are never reused (preventing stale pointer aliasing). Pages stay
resident because chase() reads forwarding pointers from old blocks. */
gc_poison_region(ptr, size);
#else
buddy_free(&rt->buddy, ptr, size);
#endif
}
/* ============================================================
@@ -1428,16 +1454,10 @@ int ctx_gc (JSContext *ctx, int allow_grow, size_t alloc_size) {
while (new_size < alloc_size && new_size < buddy_max_block(&ctx->rt->buddy))
new_size *= 2;
}
#ifdef POISON_HEAP
new_size = poison_page_align(new_size);
#endif
uint8_t *new_block = heap_block_alloc (rt, new_size);
if (!new_block) {
/* Try with same size */
new_size = ctx->current_block_size;
#ifdef POISON_HEAP
new_size = poison_page_align(new_size);
#endif
new_block = heap_block_alloc (rt, new_size);
if (!new_block) return -1;
}
@@ -1731,9 +1751,6 @@ JSContext *JS_NewContextRawWithHeapSize (JSRuntime *rt, size_t heap_size) {
}
/* Allocate initial heap block for bump allocation */
#ifdef POISON_HEAP
heap_size = poison_page_align(heap_size);
#endif
ctx->current_block_size = heap_size;
ctx->next_block_size = ctx->current_block_size;
ctx->heap_base = heap_block_alloc (rt, ctx->current_block_size);