Merge branch 'native_boot'

This commit is contained in:
2026-02-23 19:07:07 -06:00
20 changed files with 38708 additions and 38130 deletions

160
audit.ce
View File

@@ -4,88 +4,146 @@
// cell audit Audit all packages
// cell audit <locator> Audit specific package
// cell audit . Audit current directory package
// cell audit --function-hoist [<locator>] Report function hoisting usage
//
// Compiles every script in the package(s) to check for errors.
// Continues past failures and reports all issues at the end.
var shop = use('internal/shop')
var pkg = use('package')
var fd = use('fd')
var target_package = null
var function_hoist = false
var i = 0
var run = function() {
var packages = null
var tokenize_mod = null
var parse_mod = null
var hoist_files = 0
var hoist_refs = 0
var total_ok = 0
var total_errors = 0
var total_scripts = 0
var all_failures = []
var all_unresolved = []
var summary = null
for (i = 0; i < length(args); i++) {
if (args[i] == '--help' || args[i] == '-h') {
log.console("Usage: cell audit [<locator>]")
log.console("Usage: cell audit [--function-hoist] [<locator>]")
log.console("")
log.console("Test-compile all .ce and .cm scripts in package(s).")
log.console("Reports all errors without stopping at the first failure.")
log.console("")
log.console("Flags:")
log.console(" --function-hoist Report files that rely on function hoisting")
return
} else if (args[i] == '--function-hoist') {
function_hoist = true
} else if (!starts_with(args[i], '-')) {
target_package = args[i]
}
}
// Resolve local paths
if (target_package) {
target_package = shop.resolve_locator(target_package)
}
// Resolve local paths
if (target_package) {
target_package = shop.resolve_locator(target_package)
}
var packages = null
var total_ok = 0
var total_errors = 0
var total_scripts = 0
var all_failures = []
var all_unresolved = []
if (target_package) {
packages = [target_package]
} else {
packages = shop.list_packages()
}
if (target_package) {
packages = [target_package]
} else {
packages = shop.list_packages()
}
if (function_hoist) {
tokenize_mod = use('tokenize')
parse_mod = use('parse')
arrfor(packages, function(p) {
var scripts = shop.get_package_scripts(p)
if (length(scripts) == 0) return
arrfor(packages, function(p) {
var scripts = shop.get_package_scripts(p)
var pkg_dir = shop.get_package_dir(p)
if (length(scripts) == 0) return
log.console("Auditing " + p + " (" + text(length(scripts)) + " scripts)...")
var result = shop.build_package_scripts(p)
total_ok = total_ok + result.ok
total_errors = total_errors + length(result.errors)
total_scripts = total_scripts + result.total
arrfor(scripts, function(script) {
var src_path = pkg_dir + '/' + script
var src = null
var tok_result = null
var ast = null
var scan = function() {
if (!fd.is_file(src_path)) return
src = text(fd.slurp(src_path))
tok_result = tokenize_mod(src, script)
ast = parse_mod(tok_result.tokens, src, script, tokenize_mod)
if (ast._hoisted_fns != null && length(ast._hoisted_fns) > 0) {
log.console(p + '/' + script + ":")
hoist_files = hoist_files + 1
arrfor(ast._hoisted_fns, function(ref) {
var msg = " " + ref.name
if (ref.line != null) msg = msg + " (ref line " + text(ref.line)
if (ref.decl_line != null) msg = msg + ", declared line " + text(ref.decl_line)
if (ref.line != null) msg = msg + ")"
log.console(msg)
hoist_refs = hoist_refs + 1
})
}
} disruption {
// skip files that fail to parse
}
scan()
})
})
arrfor(result.errors, function(e) {
push(all_failures, p + ": " + e)
log.console("")
log.console("Summary: " + text(hoist_files) + " files with function hoisting, " + text(hoist_refs) + " total forward references")
return
}
arrfor(packages, function(p) {
var scripts = shop.get_package_scripts(p)
var result = null
var resolution = null
if (length(scripts) == 0) return
log.console("Auditing " + p + " (" + text(length(scripts)) + " scripts)...")
result = shop.build_package_scripts(p)
total_ok = total_ok + result.ok
total_errors = total_errors + length(result.errors)
total_scripts = total_scripts + result.total
arrfor(result.errors, function(e) {
push(all_failures, p + ": " + e)
})
// Check use() resolution
resolution = shop.audit_use_resolution(p)
arrfor(resolution.unresolved, function(u) {
push(all_unresolved, p + '/' + u.script + ": use('" + u.module + "') cannot be resolved")
})
})
// Check use() resolution
var resolution = shop.audit_use_resolution(p)
arrfor(resolution.unresolved, function(u) {
push(all_unresolved, p + '/' + u.script + ": use('" + u.module + "') cannot be resolved")
})
})
log.console("")
if (length(all_failures) > 0) {
log.console("Failed scripts:")
arrfor(all_failures, function(f) {
log.console(" " + f)
})
log.console("")
}
if (length(all_failures) > 0) {
log.console("Failed scripts:")
arrfor(all_failures, function(f) {
log.console(" " + f)
})
log.console("")
}
if (length(all_unresolved) > 0) {
log.console("Unresolved modules:")
arrfor(all_unresolved, function(u) {
log.console(" " + u)
})
log.console("")
}
if (length(all_unresolved) > 0) {
log.console("Unresolved modules:")
arrfor(all_unresolved, function(u) {
log.console(" " + u)
})
log.console("")
}
var summary = "Audit complete: " + text(total_ok) + "/" + text(total_scripts) + " scripts compiled"
if (total_errors > 0) summary = summary + ", " + text(total_errors) + " failed"
if (length(all_unresolved) > 0) summary = summary + ", " + text(length(all_unresolved)) + " unresolved use() calls"
log.console(summary)
summary = "Audit complete: " + text(total_ok) + "/" + text(total_scripts) + " scripts compiled"
if (total_errors > 0) summary = summary + ", " + text(total_errors) + " failed"
if (length(all_unresolved) > 0) summary = summary + ", " + text(length(all_unresolved)) + " unresolved use() calls"
log.console(summary)
}
run()

223
boot.ce Normal file
View File

@@ -0,0 +1,223 @@
// cell boot [--native] <program> - Pre-compile all module dependencies in parallel
//
// Discovers all transitive module dependencies for a program,
// checks which are not yet cached, and compiles uncached ones
// in parallel using worker actors composed via parallel() requestors.
//
// Also used as a child actor by engine.cm for auto-boot.
var shop = use('internal/shop')
var fd = use('fd')
var is_native = false
var target_prog = null
var target_pkg = null
var i = 0
// Child actor mode: receive message from engine.cm
var _child_mode = false
var run_boot = null
$receiver(function(msg) {
_child_mode = true
is_native = msg.native || false
target_prog = msg.program
target_pkg = msg.package
run_boot()
})
// CLI mode: parse arguments
if (args && length(args) > 0) {
for (i = 0; i < length(args); i = i + 1) {
if (args[i] == '--native') {
is_native = true
} else if (args[i] == '--help' || args[i] == '-h') {
log.console("Usage: cell boot [--native] <program>")
log.console("")
log.console("Pre-compile all module dependencies for a program.")
log.console("Uncached modules are compiled in parallel.")
$stop()
} else if (!starts_with(args[i], '-')) {
target_prog = args[i]
}
}
if (!target_prog) {
log.error("boot: no program specified")
$stop()
}
}
// Discover all transitive module dependencies for a file
function discover_deps(file_path) {
var visited = {}
var scripts = []
var c_packages = {}
function trace(fp) {
if (visited[fp]) return
visited[fp] = true
var fi = shop.file_info(fp)
var file_pkg = fi.package
var idx = null
var j = 0
var imp = null
var mod_path = null
var rinfo = null
// record this script (skip the root program itself)
if (ends_with(fp, '.cm')) {
scripts[] = {path: fp, package: file_pkg}
}
var _trace = function() {
idx = shop.index_file(fp)
if (!idx || !idx.imports) return
j = 0
while (j < length(idx.imports)) {
imp = idx.imports[j]
mod_path = imp.module_path
rinfo = shop.resolve_import_info(mod_path, file_pkg)
if (rinfo) {
if (rinfo.type == 'script' && rinfo.resolved_path) {
trace(rinfo.resolved_path)
} else if (rinfo.type == 'native' && rinfo.package) {
c_packages[rinfo.package] = true
}
}
j = j + 1
}
} disruption {}
_trace()
}
trace(file_path)
return {scripts: scripts, c_packages: array(c_packages)}
}
// Filter out already-cached modules
function filter_uncached(deps) {
var uncached = []
var j = 0
var s = null
j = 0
while (j < length(deps.scripts)) {
s = deps.scripts[j]
if (is_native) {
if (!shop.is_native_cached(s.path, s.package)) {
uncached[] = {type: 'native_script', path: s.path, package: s.package}
}
} else {
if (!shop.is_cached(s.path)) {
uncached[] = {type: 'script', path: s.path, package: s.package}
}
}
j = j + 1
}
// C packages always included — build_dynamic handles its own caching
j = 0
while (j < length(deps.c_packages)) {
if (deps.c_packages[j] != 'core') {
uncached[] = {type: 'c_package', package: deps.c_packages[j]}
}
j = j + 1
}
return uncached
}
function item_name(item) {
if (item.path) return item.path
return item.package
}
// Create a requestor that spawns a compile_worker actor for one item
function make_compile_requestor(item) {
var worker = null
var name = item_name(item)
return function(callback, value) {
log.console('boot: spawning worker for ' + name)
$start(function(event) {
if (event.type == 'greet') {
worker = event.actor
send(event.actor, {
type: item.type,
path: item.path,
package: item.package
})
}
if (event.type == 'stop') {
callback(name)
}
if (event.type == 'disrupt') {
log.error('boot: worker failed for ' + name)
callback(null, {message: 'compile failed: ' + name})
}
}, 'compile_worker')
return function cancel(reason) {
if (worker) $stop(worker)
}
}
}
run_boot = function() {
var prog_path = null
var prog_info = null
var deps = null
var uncached = null
var requestors = null
var p = null
// Resolve the program path
if (target_prog) {
p = target_prog
if (ends_with(p, '.ce')) p = text(p, 0, -3)
prog_info = shop.resolve_program ? shop.resolve_program(p, target_pkg) : null
if (prog_info) {
prog_path = prog_info.path
if (!target_pkg && prog_info.pkg) target_pkg = prog_info.pkg
} else {
prog_path = p + '.ce'
if (!fd.is_file(prog_path)) {
prog_path = null
}
}
}
if (!prog_path || !fd.is_file(prog_path)) {
log.error('boot: could not find program: ' + text(target_prog || ''))
$stop()
return
}
// Discover all transitive deps
deps = discover_deps(prog_path)
uncached = filter_uncached(deps)
if (length(uncached) == 0) {
log.console('boot: all modules cached')
$stop()
return
}
// Compile uncached modules in parallel using worker actors
log.console('boot: compiling ' + text(length(uncached)) + ' modules...')
requestors = array(uncached, make_compile_requestor)
parallel(requestors)(function(results, reason) {
if (reason) {
log.error('boot: ' + (reason.message || text(reason)))
} else {
log.console('boot: compiled ' + text(length(results)) + ' modules')
}
$stop()
}, null)
}
// CLI mode: start immediately
if (!_child_mode && target_prog) {
run_boot()
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

36
compile_worker.ce Normal file
View File

@@ -0,0 +1,36 @@
// compile_worker - Worker actor that compiles a single module and replies
//
// Receives a message with:
// {type: 'script', path, package} — bytecode compile
// {type: 'native_script', path, package} — native compile
// {type: 'c_package', package} — C package build
//
// Replies with {ok: true/false, path} and stops.
var shop = use('internal/shop')
var build = use('build')
$receiver(function(msg) {
var name = msg.path || msg.package
var _work = function() {
if (msg.type == 'script') {
log.console('compile_worker: compiling ' + name)
shop.precompile(msg.path, msg.package)
} else if (msg.type == 'native_script') {
log.console('compile_worker: native compiling ' + name)
build.compile_native(msg.path, null, null, msg.package)
} else if (msg.type == 'c_package') {
log.console('compile_worker: building package ' + name)
build.build_dynamic(msg.package, null, null, null)
}
log.console('compile_worker: done ' + name)
send(msg, {ok: true, path: name})
} disruption {
log.error('compile_worker: failed ' + name)
send(msg, {ok: false, error: 'compile failed'})
}
_work()
$stop()
})
var _t = $delay($stop, 120)

View File

@@ -362,6 +362,7 @@ var fold = function(ast) {
var fold_expr = null
var fold_stmt = null
var fold_stmts = null
var fold_fn = null
fold_expr = function(expr, fn_nr) {
if (expr == null) return null
@@ -592,8 +593,6 @@ var fold = function(ast) {
return expr
}
var fold_fn = null
fold_stmt = function(stmt, fn_nr) {
if (stmt == null) return null
var k = stmt.kind

View File

@@ -35,13 +35,16 @@ var packages_path = shop_path ? shop_path + '/packages' : null
// Self-sufficient initialization: content-addressed cache
var use_cache = {}
// Save blob intrinsic before var blob = use_core('blob') shadows it.
var _make_blob = (function() { return blob })()
function content_hash(content) {
var data = content
if (!is_blob(data)) data = stone(blob(text(data)))
if (!is_blob(data)) data = stone(_make_blob(text(data)))
return text(crypto.blake2(data), 'h')
}
function cache_path(hash) {
function pipeline_cache_path(hash) {
if (!shop_path) return null
return shop_path + '/build/' + hash
}
@@ -81,7 +84,7 @@ function detect_cc() {
function native_dylib_cache_path(src, target) {
var native_key = src + '\n' + target + '\nnative\n'
var full_key = native_key + '\nnative'
return cache_path(content_hash(full_key))
return pipeline_cache_path(content_hash(full_key))
}
var _engine_host_target = null
@@ -140,9 +143,11 @@ function load_pipeline_module(name, env) {
if (fd.is_file(source_path)) {
if (!source_blob) source_blob = fd.slurp(source_path)
hash = content_hash(source_blob)
cached = cache_path(hash)
if (cached && fd.is_file(cached))
cached = pipeline_cache_path(hash)
if (cached && fd.is_file(cached)) {
log.system('engine: pipeline ' + name + ' (cached)')
return mach_load(fd.slurp(cached), env)
}
// Cache miss: compile from source using boot seed pipeline
mcode_path = core_path + '/boot/' + name + '.cm.mcode'
@@ -164,7 +169,8 @@ function load_pipeline_module(name, env) {
compiled = boot_sl(compiled)
mcode_json = json.encode(compiled)
mach_blob = mach_compile_mcode_bin(name, mcode_json)
if (cached) {
log.system('engine: pipeline ' + name + ' (compiled)')
if (!native_mode && cached) {
ensure_build_dir()
fd.slurpwrite(cached, mach_blob)
}
@@ -215,6 +221,31 @@ if (native_mode) {
use_cache['core/qbe_emit'] = _qbe_emit_mod
}
var compiler_fingerprint = (function() {
var files = [
"tokenize", "parse", "fold", "mcode", "streamline",
"qbe", "qbe_emit", "ir_stats"
]
var combined = ""
var i = 0
var path = null
while (i < length(files)) {
path = core_path + '/' + files[i] + '.cm'
if (fd.is_file(path))
combined = combined + text(fd.slurp(path))
i = i + 1
}
return content_hash(stone(blob(combined)))
})()
function module_cache_path(content, salt) {
if (!shop_path) return null
var s = salt || 'mach'
return shop_path + '/build/' + content_hash(
stone(_make_blob(text(content) + '\n' + s + '\n' + compiler_fingerprint))
)
}
// analyze: tokenize + parse + fold, check for errors
function analyze(src, filename) {
var tok_result = tokenize_mod(src, filename)
@@ -605,7 +636,6 @@ function use_core(path) {
arrfor(array(core_extras), function(k) { env[k] = core_extras[k] })
env = stone(env)
var hash = null
var cached_path = null
var mach_blob = null
var source_blob = null
@@ -644,14 +674,15 @@ function use_core(path) {
// Bytecode path (fallback or non-native mode)
_load_mod = function() {
if (!source_blob) source_blob = fd.slurp(file_path)
hash = content_hash(source_blob)
cached_path = cache_path(hash)
cached_path = module_cache_path(source_blob, 'mach')
if (cached_path && fd.is_file(cached_path)) {
log.system('engine: cache hit for core/' + path)
result = mach_load(fd.slurp(cached_path), env)
} else {
script = text(source_blob)
ast = analyze(script, file_path)
mach_blob = compile_to_blob('core:' + path, ast)
log.system('engine: compiled core/' + path)
if (!native_mode && cached_path) {
ensure_build_dir()
fd.slurpwrite(cached_path, mach_blob)
@@ -748,8 +779,10 @@ function actor_die(err)
if (underlings) {
unders = array(underlings)
arrfor(unders, function(id, index) {
log.console(`calling on ${id} to disrupt too`)
$_.stop(create_actor({id}))
if (!is_null(underlings[id])) {
log.system(`stopping underling ${id}`)
$_.stop(create_actor({id}))
}
})
}
@@ -801,7 +834,8 @@ core_extras.actor_api = $_
core_extras.log = log
core_extras.runtime_env = runtime_env
core_extras.content_hash = content_hash
core_extras.cache_path = cache_path
core_extras.cache_path = module_cache_path
core_extras.compiler_fingerprint = compiler_fingerprint
core_extras.ensure_build_dir = ensure_build_dir
core_extras.compile_to_blob = compile_to_blob
core_extras.native_mode = native_mode
@@ -1150,6 +1184,7 @@ var id_address = {}
var peer_queue = {}
var portal = null
var portal_fn = null
var enet = use_core('enet')
function peer_connection(peer) {
return {
@@ -1276,6 +1311,7 @@ $_.start = function start(cb, program) {
root_id: root ? root[ACTORDATA].id : null,
program,
native_mode: native_mode,
no_warn: _no_warn,
}
greeters[id] = cb
push(message_queue, { startup })
@@ -1321,7 +1357,7 @@ $_.couple = function couple(actor) {
if (actor == $_.self) return // can't couple to self
couplings[actor[ACTORDATA].id] = true
sys_msg(actor, {kind:'couple', from_id: _cell.id})
log.system(`coupled to ${actor}`)
log.system(`coupled to ${actor[ACTORDATA].id}`)
}
function actor_prep(actor, send) {
@@ -1385,7 +1421,7 @@ function actor_send(actor, message) {
}
return
}
log.system(`Unable to send message to actor ${actor[ACTORDATA]}`)
log.system(`Unable to send message to actor ${actor[ACTORDATA].id}`)
}
function send_messages() {
@@ -1547,7 +1583,7 @@ function handle_sysym(msg)
}
greeter(greet_msg)
}
if (msg.message.type == 'disrupt')
if (msg.message.type == 'disrupt' || msg.message.type == 'stop')
delete underlings[from_id]
} else if (msg.kind == 'contact') {
if (portal_fn) {
@@ -1756,52 +1792,81 @@ $_.clock(_ => {
env.log = log
env = stone(env)
var native_build = null
var native_dylib_path = null
var native_handle = null
var native_basename = null
var native_sym = null
// --- run_program: execute the resolved program ---
function run_program() {
var native_build = null
var native_dylib_path = null
var native_handle = null
var native_basename = null
var native_sym = null
// Native execution path: compile to dylib and run
if (native_mode) {
native_build = use_core('build')
native_dylib_path = native_build.compile_native(prog_path, null, null, pkg)
native_handle = os.dylib_open(native_dylib_path)
native_basename = file_info.name ? file_info.name + (file_info.is_actor ? '.ce' : '.cm') : fd.basename(prog_path)
native_sym = pkg ? shop.c_symbol_for_file(pkg, native_basename) : null
if (native_sym)
os.native_module_load_named(native_handle, native_sym, env)
else
os.native_module_load(native_handle, env)
return
}
var source_blob = fd.slurp(prog_path)
var hash = content_hash(source_blob)
var cached_path = cache_path(hash)
var val = null
var script = null
var ast = null
var mach_blob = null
var _compile = function() {
if (cached_path && fd.is_file(cached_path)) {
val = mach_load(fd.slurp(cached_path), env)
} else {
script = text(source_blob)
ast = analyze(script, prog_path)
mach_blob = compile_user_blob(prog, ast, pkg)
if (cached_path) {
ensure_build_dir()
fd.slurpwrite(cached_path, mach_blob)
}
val = mach_load(mach_blob, env)
// Native execution path: compile to dylib and run
if (native_mode) {
native_build = use_core('build')
native_dylib_path = native_build.compile_native(prog_path, null, null, pkg)
native_handle = os.dylib_open(native_dylib_path)
native_basename = file_info.name ? file_info.name + (file_info.is_actor ? '.ce' : '.cm') : fd.basename(prog_path)
native_sym = pkg ? shop.c_symbol_for_file(pkg, native_basename) : null
if (native_sym)
os.native_module_load_named(native_handle, native_sym, env)
else
os.native_module_load(native_handle, env)
return
}
var source_blob = fd.slurp(prog_path)
var _cached_path = module_cache_path(source_blob, 'mach')
var val = null
var script = null
var ast = null
var mach_blob = null
var _compile = function() {
if (_cached_path && fd.is_file(_cached_path)) {
val = mach_load(fd.slurp(_cached_path), env)
} else {
script = text(source_blob)
ast = analyze(script, prog_path)
mach_blob = compile_user_blob(prog, ast, pkg)
if (_cached_path) {
ensure_build_dir()
fd.slurpwrite(_cached_path, mach_blob)
}
val = mach_load(mach_blob, env)
}
} disruption {
os.exit(1)
}
_compile()
if (val) {
log.error('Program must not return anything')
disrupt
}
} disruption {
os.exit(1)
}
_compile()
if (val) {
log.error('Program must not return anything')
disrupt
// --- Auto-boot: pre-compile uncached deps before running ---
// Only auto-boot for the root program (not child actors, not boot itself).
// Delegates all discovery + compilation to boot.ce (separate actor/memory).
var _is_root_actor = !_cell.args.overling_id
var _skip_boot = !_is_root_actor || prog == 'boot' || prog == 'compile_worker'
if (_skip_boot) {
run_program()
} else {
$_.start(function(event) {
if (event.type == 'greet') {
send(event.actor, {
program: prog,
package: pkg,
native: native_mode
})
}
if (event.type == 'stop') {
run_program()
}
if (event.type == 'disrupt') {
// Boot failed, run program anyway
run_program()
}
}, 'boot')
}
})

View File

@@ -21,25 +21,6 @@ var my$_ = actor_api
var core = "core"
// Compiler fingerprint: hash of all compiler source files so that any compiler
// change invalidates the entire build cache. Folded into hash_path().
var compiler_fingerprint = (function() {
var files = [
"tokenize", "parse", "fold", "mcode", "streamline",
"qbe", "qbe_emit", "ir_stats"
]
var combined = ""
var i = 0
var path = null
while (i < length(files)) {
path = core_path + '/' + files[i] + '.cm'
if (fd.is_file(path))
combined = combined + text(fd.slurp(path))
i = i + 1
}
return content_hash(stone(blob(combined)))
})()
// Make a package name safe for use in C identifiers.
// Replaces /, ., -, @ with _ so the result is a valid C identifier fragment.
function safe_c_name(name) {
@@ -48,21 +29,18 @@ function safe_c_name(name) {
function pull_from_cache(content)
{
var path = hash_path(content)
if (fd.is_file(path))
var path = cache_path(content)
if (fd.is_file(path)) {
log.system('shop: cache hit')
return fd.slurp(path)
}
}
function put_into_cache(content, obj)
{
var path = hash_path(content)
var path = cache_path(content)
fd.slurpwrite(path, obj)
}
function hash_path(content, salt)
{
var s = salt || 'mach'
return global_shop_path + '/build/' + content_hash(stone(blob(text(content) + '\n' + s + '\n' + compiler_fingerprint)))
log.system('shop: cached')
}
var Shop = {}
@@ -818,7 +796,7 @@ function resolve_mod_fn(path, pkg) {
// Check for cached mcode in content-addressed store
if (policy.allow_compile) {
cached_mcode_path = hash_path(content_key, 'mcode')
cached_mcode_path = cache_path(content_key, 'mcode')
if (fd.is_file(cached_mcode_path)) {
mcode_json = text(fd.slurp(cached_mcode_path))
compiled = mach_compile_mcode_bin(path, mcode_json)
@@ -877,7 +855,7 @@ function resolve_mod_fn_bytecode(path, pkg) {
if (cached) return cached
// Check for cached mcode
cached_mcode_path = hash_path(content_key, 'mcode')
cached_mcode_path = cache_path(content_key, 'mcode')
if (fd.is_file(cached_mcode_path)) {
mcode_json = text(fd.slurp(cached_mcode_path))
compiled = mach_compile_mcode_bin(path, mcode_json)
@@ -896,7 +874,7 @@ function resolve_mod_fn_bytecode(path, pkg) {
mcode_json = shop_json.encode(optimized)
fd.ensure_dir(global_shop_path + '/build')
fd.slurpwrite(hash_path(content_key, 'mcode'), stone(blob(mcode_json)))
fd.slurpwrite(cache_path(content_key, 'mcode'), stone(blob(mcode_json)))
compiled = mach_compile_mcode_bin(path, mcode_json)
put_into_cache(content_key, compiled)
@@ -2170,6 +2148,7 @@ Shop.get_lib_dir = function() {
Shop.ensure_dir = fd.ensure_dir
Shop.install_zip = install_zip
Shop.ensure_package_dylibs = ensure_package_dylibs
Shop.resolve_path = resolve_path
Shop.get_local_dir = function() {
return global_shop_path + "/local"
@@ -2236,7 +2215,7 @@ Shop.load_as_mach = function(path, pkg) {
// Try cached mcode -> compile to mach
if (!compiled) {
cached_mcode_path = hash_path(content_key, 'mcode')
cached_mcode_path = cache_path(content_key, 'mcode')
if (fd.is_file(cached_mcode_path)) {
mcode_json = text(fd.slurp(cached_mcode_path))
compiled = mach_compile_mcode_bin(file_path, mcode_json)
@@ -2256,7 +2235,7 @@ Shop.load_as_mach = function(path, pkg) {
ir = _mcode_mod(ast)
optimized = _streamline_mod(ir)
mcode_json = shop_json.encode(optimized)
cached_mcode_path = hash_path(content_key, 'mcode')
cached_mcode_path = cache_path(content_key, 'mcode')
fd.ensure_dir(global_shop_path + '/build')
fd.slurpwrite(cached_mcode_path, stone(blob(mcode_json)))
compiled = mach_compile_mcode_bin(file_path, mcode_json)
@@ -2309,6 +2288,34 @@ Shop.load_as_dylib = function(path, pkg) {
return os.native_module_load_named(result._handle, result._sym, env)
}
// Check if a .cm file has a cached bytecode artifact (mach or mcode)
Shop.is_cached = function(path) {
if (!fd.is_file(path)) return false
var content_key = stone(blob(text(fd.slurp(path))))
if (fd.is_file(cache_path(content_key, 'mach'))) return true
if (fd.is_file(cache_path(content_key, 'mcode'))) return true
return false
}
// Check if a .cm file has a cached native dylib artifact
Shop.is_native_cached = function(path, pkg) {
var build_mod = use_cache['core/build']
if (!build_mod || !fd.is_file(path)) return false
var src = text(fd.slurp(path))
var host = detect_host_target()
if (!host) return false
var san_flags = build_mod.native_sanitize_flags ? build_mod.native_sanitize_flags() : ''
var native_key = build_mod.native_cache_content ?
build_mod.native_cache_content(src, host, san_flags) :
(src + '\n' + host)
return fd.is_file(build_mod.cache_path(native_key, build_mod.SALT_NATIVE))
}
// Compile + cache a module without executing it
Shop.precompile = function(path, pkg) {
resolve_mod_fn(path, pkg)
}
Shop.audit_packages = function() {
var packages = Shop.list_packages()

View File

@@ -362,6 +362,8 @@ var mcode = function(ast) {
s_slot_types[text(dest)] = s_slot_types[text(src)]
}
var emit_numeric_binop = null
// emit_add_decomposed: emit type-dispatched add (text → concat, num → add)
// reads _bp_dest, _bp_left, _bp_right, _bp_ln, _bp_rn from closure
var emit_add_decomposed = function() {
@@ -421,7 +423,7 @@ var mcode = function(ast) {
// emit_numeric_binop: emit type-guarded numeric binary op
// reads _bp_dest, _bp_left, _bp_right, _bp_ln, _bp_rn from closure
var emit_numeric_binop = function(op_str) {
emit_numeric_binop = function(op_str) {
var left_known = is_known_number(_bp_ln) || slot_is_num(_bp_left)
var right_known = is_known_number(_bp_rn) || slot_is_num(_bp_right)
var t0 = null

View File

@@ -1420,6 +1420,7 @@ var parse = function(tokens, src, filename, tokenizer) {
var sem_errors = []
var scopes_array = []
var intrinsics = []
var hoisted_fn_refs = []
var sem_error = function(node, msg) {
var err = {message: msg}
@@ -1441,14 +1442,17 @@ var parse = function(tokens, src, filename, tokenizer) {
}
var sem_add_var = function(scope, name, make_opts) {
push(scope.vars, {
var entry = {
name: name,
is_const: make_opts.is_const == true,
make: make_opts.make,
function_nr: make_opts.fn_nr,
nr_uses: 0,
closure: 0
})
}
if (make_opts.reached == false) entry.reached = false
if (make_opts.decl_line != null) entry.decl_line = make_opts.decl_line
push(scope.vars, entry)
}
var sem_lookup_var = function(scope, name) {
@@ -1567,39 +1571,17 @@ var parse = function(tokens, src, filename, tokenizer) {
var sem_check_expr = null
var sem_check_stmt = null
var sem_predeclare_vars = function(scope, stmts) {
var sem_predeclare_fns = function(scope, stmts) {
var i = 0
var stmt = null
var kind = null
var name = null
var item = null
var ik = null
var j = 0
while (i < length(stmts)) {
stmt = stmts[i]
kind = stmt.kind
if (kind == "function") {
if (stmt.kind == "function") {
name = stmt.name
if (name != null && sem_find_var(scope, name) == null) {
sem_add_var(scope, name, {make: "function", fn_nr: scope.function_nr})
}
} else if (kind == "var") {
name = stmt.left.name
if (name != null && sem_find_var(scope, name) == null) {
sem_add_var(scope, name, {make: "var", fn_nr: scope.function_nr})
}
} else if (kind == "var_list") {
j = 0
while (j < length(stmt.list)) {
item = stmt.list[j]
ik = item.kind
if (ik == "var") {
name = item.left.name
if (name != null && sem_find_var(scope, name) == null) {
sem_add_var(scope, name, {make: "var", fn_nr: scope.function_nr})
}
}
j = j + 1
sem_add_var(scope, name, {make: "function", fn_nr: scope.function_nr,
decl_line: stmt.from_row != null ? stmt.from_row + 1 : null, reached: false})
}
}
i = i + 1
@@ -1831,7 +1813,7 @@ var parse = function(tokens, src, filename, tokenizer) {
i = i + 1
}
if (expr.statements != null) {
sem_predeclare_vars(fn_scope, expr.statements)
sem_predeclare_fns(fn_scope, expr.statements)
i = 0
while (i < length(expr.statements)) {
sem_check_stmt(fn_scope, expr.statements[i])
@@ -1875,6 +1857,11 @@ var parse = function(tokens, src, filename, tokenizer) {
expr.function_nr = r.def_function_nr
r.v.nr_uses = r.v.nr_uses + 1
if (r.level > 0) r.v.closure = 1
if (r.v.reached == false && r.v.decl_line != null && expr.from_row != null && expr.from_row + 1 < r.v.decl_line) {
push(hoisted_fn_refs, {name: name, line: expr.from_row + 1,
col: expr.from_column != null ? expr.from_column + 1 : null,
decl_line: r.v.decl_line})
}
} else {
expr.level = -1
expr.intrinsic = true
@@ -2088,7 +2075,14 @@ var parse = function(tokens, src, filename, tokenizer) {
enclosing = sem_find_func_scope(scope)
if (enclosing != null) enclosing.has_inner_func = true
name = stmt.name
if (name != null && sem_find_var(scope, name) == null) sem_add_var(scope, name, {make: "function", fn_nr: scope.function_nr})
if (name != null) {
existing = sem_find_var(scope, name)
if (existing != null) {
existing.reached = true
} else {
sem_add_var(scope, name, {make: "function", fn_nr: scope.function_nr})
}
}
fn_nr_val = stmt.function_nr
if (fn_nr_val == null) fn_nr_val = scope.function_nr
fn_scope = make_scope(scope, fn_nr_val, {is_func: true})
@@ -2102,7 +2096,7 @@ var parse = function(tokens, src, filename, tokenizer) {
if (def_val != null) sem_check_expr(fn_scope, def_val)
i = i + 1
}
sem_predeclare_vars(fn_scope, stmt.statements)
sem_predeclare_fns(fn_scope, stmt.statements)
i = 0
while (i < length(stmt.statements)) {
sem_check_stmt(fn_scope, stmt.statements[i])
@@ -2124,6 +2118,7 @@ var parse = function(tokens, src, filename, tokenizer) {
}
var semantic_check = function(ast) {
hoisted_fn_refs = []
var global_scope = make_scope(null, 0, {is_func: true})
var i = 0
var stmt = null
@@ -2134,7 +2129,11 @@ var parse = function(tokens, src, filename, tokenizer) {
i = 0
while (i < length(ast.functions)) {
name = ast.functions[i].name
if (name != null) sem_add_var(global_scope, name, {make: "function", fn_nr: 0})
if (name != null) {
sem_add_var(global_scope, name, {make: "function", fn_nr: 0,
decl_line: ast.functions[i].from_row != null ? ast.functions[i].from_row + 1 : null,
reached: false})
}
i = i + 1
}
@@ -2161,6 +2160,7 @@ var parse = function(tokens, src, filename, tokenizer) {
ast.scopes = scopes_array
ast.intrinsics = intrinsics
if (length(hoisted_fn_refs) > 0) ast._hoisted_fns = hoisted_fn_refs
if (length(sem_errors) > 0) {
ast.errors = sem_errors
}

View File

@@ -321,7 +321,7 @@ void script_startup(JSContext *js)
JS_SetGCScanExternal(js, actor_gc_scan);
/* Set per-actor heap memory limit */
js->actor_label = js->name; /* may be NULL; updated when name is set */
JS_SetHeapMemoryLimit(js, ACTOR_MEMORY_LIMIT);
JS_FreeValue(js, js_core_blob_use(js));

View File

@@ -871,7 +871,7 @@ typedef struct letter {
#define ACTOR_FAST_TIMER_NS (10ULL * 1000000)
#define ACTOR_SLOW_TIMER_NS (60000ULL * 1000000)
#define ACTOR_SLOW_STRIKES_MAX 3
#define ACTOR_MEMORY_LIMIT (16ULL * 1024 * 1024)
#define ACTOR_MEMORY_LIMIT (1024ULL * 1024 * 1024)
struct JSContext {
JSRuntime *rt;
@@ -948,6 +948,7 @@ struct JSContext {
uint32_t suspended_pc; /* saved PC for resume */
int vm_call_depth; /* 0 = pure bytecode, >0 = C frames on stack */
size_t heap_memory_limit; /* 0 = no limit, else max heap bytes */
const char *actor_label; /* human-readable label for OOM diagnostics */
JSValue current_exception;

View File

@@ -84,6 +84,7 @@ JSC_CCALL(actor_disrupt,
JSC_SCALL(actor_setname,
js->name = strdup(str);
js->actor_label = js->name;
)
JSC_CCALL(actor_on_exception,

View File

@@ -2044,6 +2044,7 @@ JSContext *JS_NewContextRawWithHeapSize (JSRuntime *rt, size_t heap_size) {
ctx->suspended_pc = 0;
ctx->vm_call_depth = 0;
ctx->heap_memory_limit = 0;
ctx->actor_label = NULL;
JS_AddGCRef(ctx, &ctx->suspended_frame_ref);
ctx->suspended_frame_ref.val = JS_NULL;
@@ -3283,7 +3284,20 @@ JS_RaiseDisrupt (JSContext *ctx, const char *fmt, ...) {
/* Log to "memory" channel + disrupt. Skips JS callback (can't allocate). */
JSValue JS_RaiseOOM (JSContext *ctx) {
fprintf (stderr, "out of memory\n");
size_t used = (size_t)((uint8_t *)ctx->heap_free - (uint8_t *)ctx->heap_base);
size_t block = ctx->current_block_size;
size_t limit = ctx->heap_memory_limit;
const char *label = ctx->actor_label;
if (limit > 0) {
fprintf(stderr, "out of memory: heap %zuKB / %zuKB block, limit %zuMB",
used / 1024, block / 1024, limit / (1024 * 1024));
} else {
fprintf(stderr, "out of memory: heap %zuKB / %zuKB block, no limit",
used / 1024, block / 1024);
}
if (label)
fprintf(stderr, " [%s]", label);
fprintf(stderr, "\n");
ctx->current_exception = JS_TRUE;
return JS_EXCEPTION;
}

View File

@@ -141,6 +141,9 @@ var streamline = function(ir, log) {
return T_UNKNOWN
}
var slot_is = null
var write_rules = null
// track_types reuses write_rules table; move handled specially
// Ops safe to narrow from T_NUM to T_INT when both operands are T_INT.
// Excludes divide (int/int can produce float) and pow (int**neg produces float).
@@ -192,7 +195,7 @@ var streamline = function(ir, log) {
return null
}
var slot_is = function(slot_types, slot, typ) {
slot_is = function(slot_types, slot, typ) {
var known = slot_types[slot]
if (known == null) {
return false
@@ -360,7 +363,7 @@ var streamline = function(ir, log) {
// across label join points.
// Uses data-driven dispatch: each rule is [dest_pos, type].
// =========================================================
var write_rules = {
write_rules = {
int: [1, T_INT], true: [1, T_BOOL], false: [1, T_BOOL],
null: [1, T_NULL], access: [1, null],
array: [1, T_ARRAY], record: [1, T_RECORD],
@@ -1247,6 +1250,13 @@ var streamline = function(ir, log) {
return null
}
var slot_idx_special = null
var get_slot_refs = null
var slot_def_special = null
var slot_use_special = null
var get_slot_defs = null
var get_slot_uses = null
// =========================================================
// Pass: eliminate_moves — copy propagation + self-move nop
// Tracks move chains within basic blocks, substitutes read
@@ -1794,7 +1804,7 @@ var streamline = function(ir, log) {
// =========================================================
// Which instruction positions hold slot references (special cases)
var slot_idx_special = {
slot_idx_special = {
get: [1], put: [1],
access: [1], int: [1], function: [1], regexp: [1],
true: [1], false: [1], null: [1],
@@ -1810,7 +1820,7 @@ var streamline = function(ir, log) {
stone_text: [1]
}
var get_slot_refs = function(instr) {
get_slot_refs = function(instr) {
var special = slot_idx_special[instr[0]]
var result = null
var j = 0
@@ -1827,7 +1837,7 @@ var streamline = function(ir, log) {
}
// DEF/USE classification: which instruction positions are definitions vs uses
var slot_def_special = {
slot_def_special = {
get: [1], put: [], access: [1], int: [1], function: [1], regexp: [1],
true: [1], false: [1], null: [1], record: [1], array: [1],
invoke: [2], tail_invoke: [2], goinvoke: [],
@@ -1841,7 +1851,7 @@ var streamline = function(ir, log) {
return: [], disrupt: []
}
var slot_use_special = {
slot_use_special = {
get: [], put: [1], access: [], int: [], function: [], regexp: [],
true: [], false: [], null: [], record: [], array: [],
invoke: [1], tail_invoke: [1], goinvoke: [1],
@@ -1855,13 +1865,13 @@ var streamline = function(ir, log) {
return: [1], disrupt: []
}
var get_slot_defs = function(instr) {
get_slot_defs = function(instr) {
var special = slot_def_special[instr[0]]
if (special != null) return special
return [1]
}
var get_slot_uses = function(instr) {
get_slot_uses = function(instr) {
var special = slot_use_special[instr[0]]
var result = null
var j = 0

View File

@@ -456,11 +456,12 @@ return {
},
test_mutual_recursion: function() {
var isOdd = null
var isEven = function(n) {
if (n == 0) return true
return isOdd(n - 1)
}
var isOdd = function(n) {
isOdd = function(n) {
if (n == 0) return false
return isEven(n - 1)
}

View File

@@ -450,11 +450,12 @@ run("simple recursion", function() {
})
run("mutual recursion", function() {
var isOdd = null
var isEven = function(n) {
if (n == 0) return true
return isOdd(n - 1)
}
var isOdd = function(n) {
isOdd = function(n) {
if (n == 0) return false
return isEven(n - 1)
}
@@ -1741,6 +1742,19 @@ run("variable shadowing nested", function() {
if (fn1() != 50) fail("nested shadowing failed")
})
run("var no longer hoisted", function() {
// length is an intrinsic. Without var hoisting, it should
// resolve to the intrinsic until the var declaration is reached.
var fn = function() {
var before = length([1, 2, 3])
var length = 999
return [before, length]
}
var result = fn()
if (result[0] != 3) fail("expected intrinsic length([1,2,3]) == 3, got " + text(result[0]))
if (result[1] != 999) fail("expected local length to be 999, got " + text(result[1]))
})
// ============================================================================
// FUNCTION ARITY
// ============================================================================