diff --git a/audit.ce b/audit.ce index 33a5d9f3..1dea25d0 100644 --- a/audit.ce +++ b/audit.ce @@ -37,6 +37,7 @@ var total_ok = 0 var total_errors = 0 var total_scripts = 0 var all_failures = [] +var all_unresolved = [] if (target_package) { packages = [target_package] @@ -57,6 +58,12 @@ arrfor(packages, function(p) { arrfor(result.errors, function(e) { push(all_failures, p + ": " + e) }) + + // Check use() resolution + var resolution = shop.audit_use_resolution(p) + arrfor(resolution.unresolved, function(u) { + push(all_unresolved, p + '/' + u.script + ": use('" + u.module + "') cannot be resolved") + }) }) log.console("") @@ -68,7 +75,18 @@ if (length(all_failures) > 0) { log.console("") } -log.console("Audit complete: " + text(total_ok) + "/" + text(total_scripts) + " scripts compiled" + (total_errors > 0 ? ", " + text(total_errors) + " failed" : "")) +if (length(all_unresolved) > 0) { + log.console("Unresolved modules:") + arrfor(all_unresolved, function(u) { + log.console(" " + u) + }) + log.console("") +} + +var summary = "Audit complete: " + text(total_ok) + "/" + text(total_scripts) + " scripts compiled" +if (total_errors > 0) summary = summary + ", " + text(total_errors) + " failed" +if (length(all_unresolved) > 0) summary = summary + ", " + text(length(all_unresolved)) + " unresolved use() calls" +log.console(summary) } run() diff --git a/build.ce b/build.ce index 3f7aebc4..1358d016 100644 --- a/build.ce +++ b/build.ce @@ -100,7 +100,7 @@ if (target_package) { // Build single package log.console('Building ' + target_package + '...') _build = function() { - lib = build.build_dynamic(target_package, target, buildtype, {verbose: verbose}) + lib = build.build_dynamic(target_package, target, buildtype, {verbose: verbose, force: force_rebuild}) if (lib) { log.console(`Built ${text(length(lib))} module(s)`) } @@ -112,7 +112,7 @@ if (target_package) { } else { // Build all packages log.console('Building all packages...') - results = build.build_all_dynamic(target, buildtype, {verbose: verbose}) + results = build.build_all_dynamic(target, buildtype, {verbose: verbose, force: force_rebuild}) success = 0 failed = 0 diff --git a/build.cm b/build.cm index 277b6e95..e9fd4ab9 100644 --- a/build.cm +++ b/build.cm @@ -13,9 +13,36 @@ var os = use('internal/os') var toolchains = use('toolchains') var shop = use('internal/shop') var pkg_tools = use('package') +var json = use('json') var Build = {} +// ============================================================================ +// Per-run memoization caches (reset when process exits) +// ============================================================================ + +var _stat_done = {} +var _stat_fp = {} +var _read_cache = {} + +function memo_stat(path) { + var st = null + if (!_stat_done[path]) { + _stat_done[path] = true + st = fd.stat(path) + if (st.mtime != null) + _stat_fp[path] = {m: st.mtime, s: st.size} + } + return _stat_fp[path] +} + +function memo_read(path) { + if (_read_cache[path] != null) return _read_cache[path] + if (!memo_stat(path)) return null + _read_cache[path] = text(fd.slurp(path)) + return _read_cache[path] +} + // ============================================================================ // Sigil replacement // ============================================================================ @@ -100,6 +127,8 @@ var SALT_MACH = 'mach' // mach bytecode blob var SALT_MCODE = 'mcode' // mcode IR (JSON) var SALT_DEPS = 'deps' // cached cc -MM dependency list var SALT_FAIL = 'fail' // cached compilation failure +var SALT_BMFST = 'bmfst' // stat-based build manifest (object level) +var SALT_BMFST_DL = 'bmfst_dl' // stat-based build manifest (dylib level) function cache_path(content, salt) { return get_build_dir() + '/' + content_hash(content + '\n' + salt) @@ -120,6 +149,91 @@ function get_build_dir() { Build.ensure_dir = fd.ensure_dir +// ============================================================================ +// Stat-based build manifest (zero-read warm cache) +// ============================================================================ + +function bmfst_path(cmd_str, src_path) { + return cache_path(cmd_str + '\n' + src_path, SALT_BMFST) +} + +function bmfst_probe(cmd_str, src_path) { + var mf_path = bmfst_path(cmd_str, src_path) + if (!fd.is_file(mf_path)) return null + var mf = json.decode(text(fd.slurp(mf_path))) + if (!mf || !mf.d || !mf.o) return null + if (!fd.is_file(mf.o)) return null + var ok = true + arrfor(mf.d, function(entry) { + if (!ok) return + var st = memo_stat(entry.p) + if (!st || st.m != entry.m || st.s != entry.s) + ok = false + }) + if (!ok) return null + return mf.o +} + +function bmfst_save(cmd_str, src_path, deps, obj_path) { + var entries = [] + arrfor(deps, function(dep_path) { + var st = memo_stat(dep_path) + if (st) + push(entries, {p: dep_path, m: st.m, s: st.s}) + }) + var mf = {o: obj_path, d: entries} + var mf_path = bmfst_path(cmd_str, src_path) + fd.slurpwrite(mf_path, stone(blob(json.encode(mf)))) +} + +// Dylib-level stat manifest — keyed on compile cmd + link info + src path. +// All key inputs are available without reading any files. + +function bmfst_dl_key(setup, link_info) { + var parts = [setup.cmd_str, setup.src_path] + push(parts, 'target:' + text(link_info.target)) + push(parts, 'cc:' + text(link_info.cc)) + arrfor(link_info.extra_objects, function(obj) { + if (obj != null) push(parts, 'extra:' + text(obj)) + }) + arrfor(link_info.ldflags, function(flag) { + push(parts, 'ldflag:' + text(flag)) + }) + arrfor(link_info.target_ldflags, function(flag) { + push(parts, 'target_ldflag:' + text(flag)) + }) + return text(parts, '\n') +} + +function bmfst_dl_probe(setup, link_info) { + var mf_path = cache_path(bmfst_dl_key(setup, link_info), SALT_BMFST_DL) + if (!fd.is_file(mf_path)) return null + var mf = json.decode(text(fd.slurp(mf_path))) + if (!mf || !mf.d || !mf.dylib) return null + if (!fd.is_file(mf.dylib)) return null + var ok = true + arrfor(mf.d, function(entry) { + if (!ok) return + var st = memo_stat(entry.p) + if (!st || st.m != entry.m || st.s != entry.s) + ok = false + }) + if (!ok) return null + return mf.dylib +} + +function bmfst_dl_save(setup, link_info, deps, dylib_path) { + var entries = [] + arrfor(deps, function(dep_path) { + var st = memo_stat(dep_path) + if (st) + push(entries, {p: dep_path, m: st.m, s: st.s}) + }) + var mf = {dylib: dylib_path, d: entries} + var mf_path = cache_path(bmfst_dl_key(setup, link_info), SALT_BMFST_DL) + fd.slurpwrite(mf_path, stone(blob(json.encode(mf)))) +} + // ============================================================================ // Dependency scanning helpers // ============================================================================ @@ -158,8 +272,9 @@ function get_c_deps(cc, flags, src_path) { function hash_all_deps(cmd_str, deps) { var parts = [cmd_str] arrfor(deps, function(dep_path) { - if (fd.is_file(dep_path)) - push(parts, dep_path + '\n' + text(fd.slurp(dep_path))) + var content = memo_read(dep_path) + if (content != null) + push(parts, dep_path + '\n' + content) else push(parts, dep_path + '\n') }) @@ -170,32 +285,23 @@ function hash_all_deps(cmd_str, deps) { // Compilation // ============================================================================ -// Compile a single C file for a package -// Returns the object file path (content-addressed in .cell/build) -Build.compile_file = function(pkg, file, target, opts) { +// Build the compile command string and common flags for a C file. +// Returns {cmd_str, src_path, cc, common_flags, pkg_dir} or null if source missing. +function compile_setup(pkg, file, target, opts) { var _opts = opts || {} var _buildtype = _opts.buildtype || 'release' var pkg_dir = shop.get_package_dir(pkg) var src_path = pkg_dir + '/' + file var core_dir = null - if (!fd.is_file(src_path)) { - log.error('Source file not found: ' + src_path) - return null - } + if (!fd.is_file(src_path)) return null - // Use pre-fetched cflags if provided, otherwise fetch them var cflags = _opts.cflags || replace_sigils_array(pkg_tools.get_flags(pkg, 'CFLAGS', target), pkg_dir) var target_cflags = toolchains[target].c_args || [] var cc = toolchains[target].c - - // Symbol name for this file var sym_name = shop.c_symbol_for_file(pkg, file) - - // Build common flags (shared between dep scan and compilation) var common_flags = [] - // Add buildtype-specific flags if (_buildtype == 'release') { common_flags = array(common_flags, ['-O3', '-DNDEBUG']) } else if (_buildtype == 'debug') { @@ -207,18 +313,15 @@ Build.compile_file = function(pkg, file, target, opts) { push(common_flags, '-DCELL_USE_NAME=' + sym_name) push(common_flags, '-I"' + pkg_dir + '"') - // Auto-discover include/ directory if (fd.is_dir(pkg_dir + '/include')) { push(common_flags, '-I"' + pkg_dir + '/include"') } - // External packages need core's source dir for cell.h, quickjs.h, blob.h if (pkg != 'core') { core_dir = shop.get_package_dir('core') push(common_flags, '-I"' + core_dir + '/source"') } - // Add package CFLAGS (resolve relative -I paths) arrfor(cflags, function(flag) { var f = flag var ipath = null @@ -231,82 +334,138 @@ Build.compile_file = function(pkg, file, target, opts) { push(common_flags, f) }) - // Add target CFLAGS arrfor(target_cflags, function(flag) { push(common_flags, flag) }) - // Build full compilation command var cmd_parts = [cc, '-c', '-fPIC'] cmd_parts = array(cmd_parts, common_flags) push(cmd_parts, '"' + src_path + '"') - var cmd_str = text(cmd_parts, ' ') - - if (_opts.verbose) { - log.build('[verbose] CFLAGS: ' + text(cflags, ' ')) - log.build('[verbose] compile: ' + cmd_str) + return { + cmd_str: text(cmd_parts, ' '), + src_path: src_path, + cc: cc, + common_flags: common_flags, + pkg_dir: pkg_dir } +} - // Two-level cache: quick hash for deps file, full hash for object - var file_content = fd.slurp(src_path) - var quick_content = cmd_str + '\n' + text(file_content) - var deps_path = cache_path(quick_content, SALT_DEPS) +// Probe for the full content key (source + all deps + compile flags). +// Returns {full_content, deps, fail} or null if deps not cached yet (cold path). +function probe_source_key(setup, file) { + var file_content = memo_read(setup.src_path) + var quick_content = setup.cmd_str + '\n' + file_content var fail_path = cache_path(quick_content, SALT_FAIL) - - var build_dir = get_build_dir() - fd.ensure_dir(build_dir) - - // Check for cached failure (skip files that previously failed to compile) - if (fd.is_file(fail_path)) { - if (_opts.verbose) log.build('[verbose] skipping ' + file + ' (cached failure)') - log.shop('skip ' + file + ' (cached failure)') - return null - } - + var deps_path = cache_path(quick_content, SALT_DEPS) var deps = null var full_content = null - var obj_path = null - // Warm path: read cached dep list, verify by hashing all deps + if (fd.is_file(fail_path)) return {fail: true, fail_path: fail_path} + if (fd.is_file(deps_path)) { deps = filter(array(text(fd.slurp(deps_path)), '\n'), function(p) { return length(p) > 0 }) - full_content = hash_all_deps(cmd_str, deps) + full_content = hash_all_deps(setup.cmd_str, deps) + return {full_content: full_content, deps: deps} + } + + return null +} + +// Compile a single C file for a package +// Returns the object file path (content-addressed in .cell/build) +Build.compile_file = function(pkg, file, target, opts) { + var _opts = opts || {} + var setup = compile_setup(pkg, file, target, _opts) + if (!setup) { + log.error('Source file not found: ' + shop.get_package_dir(pkg) + '/' + file) + return null + } + + if (_opts.verbose) { + log.build('[verbose] compile: ' + setup.cmd_str) + } + + // Layer 2: stat-based manifest probe (zero file reads on warm cache) + var mf_obj = null + if (!_opts.force) { + mf_obj = bmfst_probe(setup.cmd_str, setup.src_path) + if (mf_obj) { + if (_opts.verbose) log.build('[verbose] manifest hit: ' + file) + log.shop('manifest hit ' + file) + return mf_obj + } + } + + var build_dir = get_build_dir() + fd.ensure_dir(build_dir) + + var probe = probe_source_key(setup, file) + var _fail_msg = null + + // Check for cached failure + if (probe && probe.fail) { + _fail_msg = probe.fail_path ? text(fd.slurp(probe.fail_path)) : null + log.shop('skip ' + file + ' (cached failure)') + if (_fail_msg) log.console(file + ':\n' + _fail_msg) + return null + } + + var full_content = null + var deps = null + var obj_path = null + + // Warm path: deps cached, check object + if (probe && probe.full_content) { + full_content = probe.full_content obj_path = cache_path(full_content, SALT_OBJ) if (fd.is_file(obj_path)) { if (_opts.verbose) log.build('[verbose] cache hit: ' + file) log.shop('cache hit ' + file) + bmfst_save(setup.cmd_str, setup.src_path, probe.deps, obj_path) return obj_path } log.shop('cache stale ' + file + ' (header changed)') + deps = probe.deps } - // Cold path: run cc -MM to discover deps - log.shop('dep scan ' + file) - deps = get_c_deps(cc, common_flags, src_path) - full_content = hash_all_deps(cmd_str, deps) - obj_path = cache_path(full_content, SALT_OBJ) + var file_content = null + var quick_content = null + var err_path = null + var full_cmd = null + var err_text = null + var missing = null + var err_lines = null + var first_err = null + var ret = null - // Check if object exists (might exist from previous build with same deps) - if (fd.is_file(obj_path)) { - fd.slurpwrite(deps_path, stone(blob(text(deps, '\n')))) - if (_opts.verbose) log.build('[verbose] cache hit: ' + file + ' (after dep scan)') - log.shop('cache hit ' + file + ' (after dep scan)') - return obj_path + // Cold path: run cc -MM to discover deps + if (!deps) { + log.shop('dep scan ' + file) + deps = get_c_deps(setup.cc, setup.common_flags, setup.src_path) + full_content = hash_all_deps(setup.cmd_str, deps) + obj_path = cache_path(full_content, SALT_OBJ) + + // Check if object exists (might exist from previous build with same deps) + if (fd.is_file(obj_path)) { + file_content = memo_read(setup.src_path) + quick_content = setup.cmd_str + '\n' + file_content + fd.slurpwrite(cache_path(quick_content, SALT_DEPS), stone(blob(text(deps, '\n')))) + if (_opts.verbose) log.build('[verbose] cache hit: ' + file + ' (after dep scan)') + log.shop('cache hit ' + file + ' (after dep scan)') + bmfst_save(setup.cmd_str, setup.src_path, deps, obj_path) + return obj_path + } } // Compile log.shop('compiling ' + file) log.console('Compiling ' + file) - var err_path = '/tmp/cell_build_err_' + content_hash(src_path) + '.log' - var full_cmd = cmd_str + ' -o "' + obj_path + '" 2>"' + err_path + '"' - var err_text = null - var missing = null - var err_lines = null - var first_err = null - var ret = os.system(full_cmd) + err_path = '/tmp/cell_build_err_' + content_hash(setup.src_path) + '.log' + full_cmd = setup.cmd_str + ' -o "' + obj_path + '" 2>"' + err_path + '"' + ret = os.system(full_cmd) if (ret != 0) { if (fd.is_file(err_path)) { err_text = text(fd.slurp(err_path)) @@ -324,13 +483,17 @@ Build.compile_file = function(pkg, file, target, opts) { if (err_text) log.error(err_text) else log.error('Command: ' + full_cmd) } - // Cache the failure so we don't retry on every build - fd.slurpwrite(fail_path, stone(blob(err_text || 'compilation failed'))) + file_content = memo_read(setup.src_path) + quick_content = setup.cmd_str + '\n' + file_content + fd.slurpwrite(cache_path(quick_content, SALT_FAIL), stone(blob(err_text || 'compilation failed'))) return null } // Save deps for future warm-path lookups - fd.slurpwrite(deps_path, stone(blob(text(deps, '\n')))) + file_content = memo_read(setup.src_path) + quick_content = setup.cmd_str + '\n' + file_content + fd.slurpwrite(cache_path(quick_content, SALT_DEPS), stone(blob(text(deps, '\n')))) + bmfst_save(setup.cmd_str, setup.src_path, deps, obj_path) return obj_path } @@ -358,46 +521,42 @@ Build.build_package = function(pkg, target, exclude_main, buildtype) { // Dynamic library building // ============================================================================ -// Compute link content string from all inputs that affect the dylib output -function compute_link_content(objects, ldflags, target_ldflags, opts) { - // Sort objects for deterministic hash - var sorted_objects = sort(objects) - - // Build a string representing all link inputs - var parts = [] - push(parts, 'target:' + text(opts.target)) - push(parts, 'cc:' + text(opts.cc)) - arrfor(sorted_objects, function(obj) { - // Object paths are content-addressed, so the path itself is the hash - push(parts, 'obj:' + text(obj)) +// Compute dylib content key from source content key + link info +// link_opts: {extra_objects, ldflags, target_ldflags, target, cc} +function compute_dylib_content(full_content, link_opts) { + var parts = [full_content] + push(parts, 'target:' + text(link_opts.target)) + push(parts, 'cc:' + text(link_opts.cc)) + arrfor(link_opts.extra_objects, function(obj) { + if (obj != null) push(parts, 'extra:' + text(obj)) }) - arrfor(ldflags, function(flag) { + arrfor(link_opts.ldflags, function(flag) { push(parts, 'ldflag:' + text(flag)) }) - arrfor(target_ldflags, function(flag) { + arrfor(link_opts.target_ldflags, function(flag) { push(parts, 'target_ldflag:' + text(flag)) }) - return text(parts, '\n') } // Build a per-module dynamic library for a single C file -// Returns the content-addressed dylib path in .cell/build/..dylib +// Returns the content-addressed dylib path in .cell/build/ +// Checks dylib cache first; only compiles the object if the dylib is stale. Build.build_module_dylib = function(pkg, file, target, opts) { var _opts = opts || {} var _target = target || Build.detect_host_target() var _buildtype = _opts.buildtype || 'release' var _extra = _opts.extra_objects || [] - var obj = Build.compile_file(pkg, file, _target, {buildtype: _buildtype, cflags: _opts.cflags}) - if (!obj) return null + + var setup = compile_setup(pkg, file, _target, {buildtype: _buildtype, cflags: _opts.cflags}) + if (!setup) return null var tc = toolchains[_target] var cc = tc.cpp || tc.c var local_dir = get_local_dir() - var pkg_dir = shop.get_package_dir(pkg) // Get link flags - var ldflags = replace_sigils_array(pkg_tools.get_flags(pkg, 'LDFLAGS', _target), pkg_dir) + var ldflags = replace_sigils_array(pkg_tools.get_flags(pkg, 'LDFLAGS', _target), setup.pkg_dir) var target_ldflags = tc.c_link_args || [] var resolved_ldflags = [] arrfor(ldflags, function(flag) { @@ -405,72 +564,149 @@ Build.build_module_dylib = function(pkg, file, target, opts) { var lpath = null if (starts_with(f, '-L') && !starts_with(f, '-L/')) { lpath = text(f, 2) - if (!starts_with(lpath, pkg_dir)) { - f = '-L"' + pkg_dir + '/' + lpath + '"' + if (!starts_with(lpath, setup.pkg_dir)) { + f = '-L"' + setup.pkg_dir + '/' + lpath + '"' } } push(resolved_ldflags, f) }) - // Content-addressed output: hash of (all objects + link flags + target) - var all_objects = [obj] - all_objects = array(all_objects, _extra) - var link_content = compute_link_content(all_objects, resolved_ldflags, target_ldflags, {target: _target, cc: cc}) var build_dir = get_build_dir() fd.ensure_dir(build_dir) - var dylib_path = cache_path(link_content, SALT_DYLIB) + + var link_info = {extra_objects: _extra, ldflags: resolved_ldflags, target_ldflags: target_ldflags, target: _target, cc: cc} + + // Stat-based dylib manifest — zero file reads on warm cache + var mf_dylib = null + if (!_opts.force) { + mf_dylib = bmfst_dl_probe(setup, link_info) + if (mf_dylib) { + if (_opts.verbose) log.build('[verbose] manifest hit: ' + file) + log.shop('manifest hit ' + file) + return mf_dylib + } + } + + // Probe source key — check dylib cache before compiling + var probe = probe_source_key(setup, file) + var dylib_path = null + var dylib_content = null + var obj = null + var obj_path = null var cmd_parts = null var cmd_str = null var ret = null + var post_probe = null + var fallback_probe = null + var _fail_msg2 = null + + if (probe && probe.fail) { + _fail_msg2 = probe.fail_path ? text(fd.slurp(probe.fail_path)) : null + log.shop('skip ' + file + ' (cached failure)') + if (_fail_msg2) log.console(file + ':\n' + _fail_msg2) + return null + } + + if (probe && probe.full_content) { + dylib_content = compute_dylib_content(probe.full_content, link_info) + dylib_path = cache_path(dylib_content, SALT_DYLIB) + if (!_opts.force && fd.is_file(dylib_path)) { + log.shop('cache hit ' + file) + bmfst_dl_save(setup, link_info, probe.deps, dylib_path) + return dylib_path + } + + // Dylib stale but object might be cached — check before compiling + obj_path = cache_path(probe.full_content, SALT_OBJ) + if (fd.is_file(obj_path)) { + obj = obj_path + } + } + + // Object not cached — compile it + if (!obj) { + obj = Build.compile_file(pkg, file, _target, {buildtype: _buildtype, cflags: _opts.cflags, force: _opts.force}) + if (!obj) return null + + // Recompute dylib key with the now-known source key + if (!dylib_path) { + post_probe = probe_source_key(setup, file) + if (post_probe && post_probe.full_content) { + dylib_content = compute_dylib_content(post_probe.full_content, link_info) + dylib_path = cache_path(dylib_content, SALT_DYLIB) + if (fd.is_file(dylib_path)) { + bmfst_dl_save(setup, link_info, post_probe.deps, dylib_path) + return dylib_path + } + } + } + } + + // Need dylib_path for output + if (!dylib_path) { + // Fallback: probe should succeed now since compile_file cached deps + fallback_probe = probe_source_key(setup, file) + if (fallback_probe && fallback_probe.full_content) { + dylib_content = compute_dylib_content(fallback_probe.full_content, link_info) + dylib_path = cache_path(dylib_content, SALT_DYLIB) + } else { + log.error('Cannot compute dylib key for ' + file) + return null + } + } if (_opts.verbose) { log.build('[verbose] LDFLAGS: ' + text(resolved_ldflags, ' ')) } - if (!fd.is_file(dylib_path)) { - cmd_parts = [cc, '-shared', '-fPIC'] + // Link + cmd_parts = [cc, '-shared', '-fPIC'] - if (tc.system == 'darwin') { - cmd_parts = array(cmd_parts, [ - '-undefined', 'dynamic_lookup', - '-Wl,-dead_strip', - '-Wl,-rpath,@loader_path/../local', - '-Wl,-rpath,' + local_dir - ]) - } else if (tc.system == 'linux') { - cmd_parts = array(cmd_parts, [ - '-Wl,--allow-shlib-undefined', - '-Wl,--gc-sections', - '-Wl,-rpath,$ORIGIN/../local', - '-Wl,-rpath,' + local_dir - ]) - } else if (tc.system == 'windows') { - push(cmd_parts, '-Wl,--allow-shlib-undefined') - } - - push(cmd_parts, '-L"' + local_dir + '"') - push(cmd_parts, '"' + text(obj) + '"') - arrfor(_extra, function(extra_obj) { - if (extra_obj != null) push(cmd_parts, '"' + text(extra_obj) + '"') - }) - cmd_parts = array(cmd_parts, resolved_ldflags) - cmd_parts = array(cmd_parts, target_ldflags) - push(cmd_parts, '-o') - push(cmd_parts, '"' + dylib_path + '"') - - cmd_str = text(cmd_parts, ' ') - if (_opts.verbose) log.build('[verbose] link: ' + cmd_str) - log.shop('linking ' + file) - log.console('Linking module ' + file + ' -> ' + fd.basename(dylib_path)) - ret = os.system(cmd_str) - if (ret != 0) { - log.error('Linking failed: ' + file) - return null - } - } else { - log.shop('link cache hit ' + file) + if (tc.system == 'darwin') { + cmd_parts = array(cmd_parts, [ + '-undefined', 'dynamic_lookup', + '-Wl,-dead_strip', + '-Wl,-rpath,@loader_path/../local', + '-Wl,-rpath,' + local_dir + ]) + } else if (tc.system == 'linux') { + cmd_parts = array(cmd_parts, [ + '-Wl,--allow-shlib-undefined', + '-Wl,--gc-sections', + '-Wl,-rpath,$ORIGIN/../local', + '-Wl,-rpath,' + local_dir + ]) + } else if (tc.system == 'windows') { + push(cmd_parts, '-Wl,--allow-shlib-undefined') } + push(cmd_parts, '-L"' + local_dir + '"') + push(cmd_parts, '"' + text(obj) + '"') + arrfor(_extra, function(extra_obj) { + if (extra_obj != null) push(cmd_parts, '"' + text(extra_obj) + '"') + }) + cmd_parts = array(cmd_parts, resolved_ldflags) + cmd_parts = array(cmd_parts, target_ldflags) + push(cmd_parts, '-o') + push(cmd_parts, '"' + dylib_path + '"') + + cmd_str = text(cmd_parts, ' ') + if (_opts.verbose) log.build('[verbose] link: ' + cmd_str) + log.shop('linking ' + file) + log.console('Linking module ' + file + ' -> ' + fd.basename(dylib_path)) + ret = os.system(cmd_str) + if (ret != 0) { + log.error('Linking failed: ' + file) + return null + } + + // Save dylib manifest for future stat-based probes + var mf_deps = null + if (fallback_probe && fallback_probe.deps) mf_deps = fallback_probe.deps + if (post_probe && post_probe.deps) mf_deps = post_probe.deps + if (probe && probe.deps) mf_deps = probe.deps + if (mf_deps) bmfst_dl_save(setup, link_info, mf_deps, dylib_path) + return dylib_path } @@ -493,21 +729,20 @@ Build.build_dynamic = function(pkg, target, buildtype, opts) { var support_objects = [] if (pkg != 'core') { arrfor(sources, function(src_file) { - var obj = Build.compile_file(pkg, src_file, _target, {buildtype: _buildtype, cflags: cached_cflags, verbose: _opts.verbose}) + var obj = Build.compile_file(pkg, src_file, _target, {buildtype: _buildtype, cflags: cached_cflags, verbose: _opts.verbose, force: _opts.force}) if (obj != null) push(support_objects, obj) }) } arrfor(c_files, function(file) { var sym_name = shop.c_symbol_for_file(pkg, file) - var dylib = Build.build_module_dylib(pkg, file, _target, {buildtype: _buildtype, extra_objects: support_objects, cflags: cached_cflags, verbose: _opts.verbose}) + var dylib = Build.build_module_dylib(pkg, file, _target, {buildtype: _buildtype, extra_objects: support_objects, cflags: cached_cflags, verbose: _opts.verbose, force: _opts.force}) if (dylib) { push(results, {file: file, symbol: sym_name, dylib: dylib}) } }) // Write manifest so runtime can find dylibs without the build module - var json = use('json') var mpath = manifest_path(pkg) fd.slurpwrite(mpath, stone(blob(json.encode(results)))) @@ -819,7 +1054,6 @@ Build.compile_cm_to_mach = function(src_path) { if (!fd.is_file(src_path)) { log.error('Source file not found: ' + src_path); disrupt } - var json = use('json') var optimized = shop.compile_file(src_path) return mach_compile_mcode_bin(src_path, json.encode(optimized)) } @@ -829,7 +1063,6 @@ Build.compile_cm_to_mach = function(src_path) { // output: path to write the generated .c file Build.generate_module_table = function(modules, output) { var lines = [] - var json = use('json') push(lines, '/* Generated module table — do not edit */') push(lines, '#include ') push(lines, '#include ') @@ -935,6 +1168,8 @@ Build.SALT_MACH = SALT_MACH Build.SALT_MCODE = SALT_MCODE Build.SALT_DEPS = SALT_DEPS Build.SALT_FAIL = SALT_FAIL +Build.SALT_BMFST = SALT_BMFST +Build.SALT_BMFST_DL = SALT_BMFST_DL Build.cache_path = cache_path Build.manifest_path = manifest_path Build.native_sanitize_flags = native_sanitize_flags diff --git a/fold.cm b/fold.cm index a686c844..044a2609 100644 --- a/fold.cm +++ b/fold.cm @@ -4,6 +4,7 @@ var fold = function(ast) { var scopes = ast.scopes var nr_scopes = length(scopes) + ast._diagnostics = [] var type_tag_map = { array: "array", record: "record", text: "text", @@ -72,6 +73,7 @@ var fold = function(ast) { if (k == "record") { i = 0 while (i < length(expr.list)) { + if (expr.list[i].computed && !is_pure(expr.list[i].left)) return false if (!is_pure(expr.list[i].right)) return false i = i + 1 } @@ -285,6 +287,7 @@ var fold = function(ast) { if (k == "record") { i = 0 while (i < length(expr.list)) { + if (expr.list[i].computed) pre_scan_expr_fns(expr.list[i].left) pre_scan_expr_fns(expr.list[i].right) i = i + 1 } @@ -411,6 +414,9 @@ var fold = function(ast) { } else if (k == "record") { i = 0 while (i < length(expr.list)) { + if (expr.list[i].computed) { + expr.list[i].left = fold_expr(expr.list[i].left, fn_nr) + } expr.list[i].right = fold_expr(expr.list[i].right, fn_nr) i = i + 1 } @@ -701,8 +707,30 @@ var fold = function(ast) { name = stmt.left.name if (name != null) { sv = scope_var(fn_nr, name) - if (sv != null && sv.nr_uses == 0 && is_pure(stmt.right)) { - stmt.dead = true + if (sv != null && sv.nr_uses == 0) { + if (is_pure(stmt.right)) stmt.dead = true + if (stmt.right != null && stmt.right.kind == "(" && stmt.right.expression != null && stmt.right.expression.name == "use") { + push(ast._diagnostics, { + severity: "warning", + line: stmt.left.from_row + 1, + col: stmt.left.from_column + 1, + message: `unused import '${name}'` + }) + } else if (stmt.kind == "def") { + push(ast._diagnostics, { + severity: "warning", + line: stmt.left.from_row + 1, + col: stmt.left.from_column + 1, + message: `unused constant '${name}'` + }) + } else { + push(ast._diagnostics, { + severity: "warning", + line: stmt.left.from_row + 1, + col: stmt.left.from_column + 1, + message: `unused variable '${name}'` + }) + } } } } @@ -715,6 +743,12 @@ var fold = function(ast) { sv = scope_var(fn_nr, stmt.name) if (sv != null && sv.nr_uses == 0) { stmt.dead = true + push(ast._diagnostics, { + severity: "warning", + line: stmt.from_row + 1, + col: stmt.from_column + 1, + message: `unused function '${stmt.name}'` + }) } } if (stmt.dead != true) push(out, stmt) @@ -831,6 +865,7 @@ var fold = function(ast) { if (k == "record") { i = 0 while (i < length(expr.list)) { + if (expr.list[i].computed) walk_expr_for_fns(expr.list[i].left) walk_expr_for_fns(expr.list[i].right) i = i + 1 } @@ -920,6 +955,7 @@ var fold = function(ast) { if (k == "record") { i = 0 while (i < length(expr.list)) { + if (expr.list[i].computed) collect_expr_intrinsics(expr.list[i].left) collect_expr_intrinsics(expr.list[i].right) i = i + 1 } @@ -1028,9 +1064,22 @@ var fold = function(ast) { // Remove dead top-level functions var live_fns = [] var fn = null + var fn_sv = null fi = 0 while (fi < length(ast.functions)) { fn = ast.functions[fi] + if (fn.name != null) { + fn_sv = scope_var(0, fn.name) + if (fn_sv != null && fn_sv.nr_uses == 0) { + fn.dead = true + push(ast._diagnostics, { + severity: "warning", + line: fn.from_row + 1, + col: fn.from_column + 1, + message: `unused function '${fn.name}'` + }) + } + } if (fn.dead != true) { push(live_fns, fn) } diff --git a/internal/engine.cm b/internal/engine.cm index ff384479..009172b6 100644 --- a/internal/engine.cm +++ b/internal/engine.cm @@ -158,6 +158,7 @@ function analyze(src, filename) { var line = null var col = null var has_errors = _ast.errors != null && length(_ast.errors) > 0 + var folded = null if (has_errors) { while (_i < length(_ast.errors)) { e = _ast.errors[_i] @@ -176,7 +177,17 @@ function analyze(src, filename) { } disrupt } - return fold_mod(_ast) + folded = fold_mod(_ast) + if (!_no_warn && folded._diagnostics != null && length(folded._diagnostics) > 0) { + _i = 0 + while (_i < length(folded._diagnostics)) { + e = folded._diagnostics[_i] + os.print(`${filename}:${text(e.line)}:${text(e.col)}: ${e.severity}: ${e.message}\n`) + _i = _i + 1 + } + } + folded._diagnostics = null + return folded } // Lazy-loaded verify_ir module (loaded on first use) diff --git a/internal/shop.cm b/internal/shop.cm index 4a9c5eb6..78065c96 100644 --- a/internal/shop.cm +++ b/internal/shop.cm @@ -48,6 +48,9 @@ function hash_path(content, salt) var Shop = {} +// Stack tracking the chain of use() calls for error reporting +var use_stack = [] + var SCOPE_LOCAL = 0 var SCOPE_PACKAGE = 1 var SCOPE_CORE = 2 @@ -69,11 +72,19 @@ function get_packages_dir() { // Get the core directory (in the global shop) var core_package = 'core' +var _core_realpath = null Shop.get_core_dir = function() { return get_packages_dir() + '/' + core_package } +function is_core_dir(dir) { + if (dir == 'core') return true + if (_core_realpath == null) + _core_realpath = fd.realpath(Shop.get_core_dir()) || false + return _core_realpath && dir == _core_realpath +} + // Get the reports directory (in the global shop) Shop.get_reports_dir = function() { return global_shop_path + '/reports' @@ -182,7 +193,7 @@ function abs_path_to_package(package_dir) return cfg.package } - return null + return package_dir } // given a file, find the absolute path, package name, and import name @@ -815,9 +826,9 @@ function resolve_path(path, ctx) ctx_path = ctx_dir + '/' + path if (fd.is_file(ctx_path)) { - is_core = (ctx == 'core') || (ctx_dir == Shop.get_core_dir()) + is_core = (ctx == 'core') || is_core_dir(ctx_dir) scope = is_core ? SCOPE_CORE : SCOPE_LOCAL - return {path: ctx_path, scope: scope, pkg: ctx} + return {path: ctx_path, scope: scope, pkg: is_core ? 'core' : ctx} } if (is_internal_path(path)) @@ -949,9 +960,11 @@ function try_dylib_symbol(sym, pkg, file_stem) { var c_file = file_stem + '.c' var cpp_file = file_stem + '.cpp' - var entry = find(dylibs, function(r) { + var idx = find(dylibs, function(r) { return r.file == c_file || r.file == cpp_file }) + if (idx == null) return null + var entry = dylibs[idx] if (!entry || !entry.dylib) return null var handle = open_dls[entry.dylib] @@ -959,8 +972,8 @@ function try_dylib_symbol(sym, pkg, file_stem) { handle = os.dylib_open(entry.dylib) if (handle) open_dls[entry.dylib] = handle } - if (!handle) return null - if (!os.dylib_has_symbol(handle, sym)) return null + if (!handle) { log.shop(`try_dylib: no handle for ${entry.dylib}`); return null } + if (!os.dylib_has_symbol(handle, sym)) { log.shop(`try_dylib: no symbol ${sym} in ${entry.dylib}`); return null } log.shop('resolved ' + sym + ' from build cache') return function() { return os.dylib_symbol(handle, sym) } @@ -968,7 +981,8 @@ function try_dylib_symbol(sym, pkg, file_stem) { // Resolve a C symbol by searching: // At each scope: check build-cache dylib first, then internal (static) -function resolve_c_symbol(path, package_context) { +function resolve_c_symbol(path, _pkg_ctx) { + var package_context = is_core_dir(_pkg_ctx) ? 'core' : _pkg_ctx var explicit = split_explicit_package_import(path) var sym = null var loader = null @@ -1279,7 +1293,8 @@ function get_module(path, package_context) { return execute_module(info) } -Shop.use = function use(path, package_context) { +Shop.use = function use(path, _pkg_ctx) { + var package_context = is_core_dir(_pkg_ctx) ? 'core' : _pkg_ctx // Check for embedded module (static builds) var embed_key = 'embedded:' + path var embedded = null @@ -1299,6 +1314,8 @@ Shop.use = function use(path, package_context) { var info = resolve_module_info(path, package_context) var _ctx_dir2 = null var _alias2 = null + var _use_entry = path + ' (package: ' + package_context + ')' + var _chain = null if (!info) { log.shop(`Module '${path}' could not be found in package '${package_context}'`) _ctx_dir2 = package_context ? (starts_with(package_context, '/') ? package_context : get_packages_dir() + '/' + fd.safe_package_path(package_context)) : null @@ -1309,13 +1326,32 @@ Shop.use = function use(path, package_context) { _alias2 = pkg_tools.split_alias(package_context, path) if (_alias2 == null && search(path, '/') != null) log.shop(`Alias '${array(path, '/')[0]}' could not be resolved in package '${package_context}'`) + if (length(use_stack) > 0) { + _chain = 'use() chain:' + arrfor(use_stack, function(frame) { _chain = _chain + '\n -> ' + frame }) + _chain = _chain + '\n -> ' + path + ' [NOT FOUND]' + log.error(_chain) + } disrupt } if (use_cache[info.cache_key]) return use_cache[info.cache_key] - use_cache[info.cache_key] = execute_module(info) - return use_cache[info.cache_key] + + push(use_stack, _use_entry) + var _use_result = null + var _use_ok = false + var _load = function() { + _use_result = execute_module(info) + _use_ok = true + } disruption { + pop(use_stack) + disrupt + } + _load() + pop(use_stack) + use_cache[info.cache_key] = _use_result + return _use_result } // Resolve a use() module path to a filesystem path without compiling. @@ -1854,6 +1890,30 @@ function get_package_scripts(package) return scripts } +// Extract use() call arguments from source text. +// Returns an array of literal string arguments found in use('...') calls. +function extract_use_calls(source) { + var uses = [] + var idx = 0 + var start = 0 + var end = 0 + var arg = null + idx = search(source, "use(") + while (idx != null) { + start = idx + 5 + end = search(text(source, start), "'") + if (end == null) end = search(text(source, start), '"') + if (end != null) { + arg = text(source, start, start + end) + push(uses, arg) + } + idx = search(text(source, idx + 4), "use(") + if (idx != null) idx = idx + (source.length - (source.length - idx)) + else break + } + return uses +} + Shop.build_package_scripts = function(package) { // compiles all .ce and .cm files in a package @@ -1877,6 +1937,63 @@ Shop.build_package_scripts = function(package) return {ok: ok, errors: errors, total: length(scripts)} } +// Check if all use() calls in a package's scripts can be resolved. +// Returns {ok, unresolved: [{script, module}], total} +Shop.audit_use_resolution = function(package) { + var scripts = get_package_scripts(package) + var pkg_dir = starts_with(package, '/') ? package : get_package_abs_dir(package) + var unresolved = [] + var checked = 0 + var src = null + var content = null + var uses = null + var info = null + + arrfor(scripts, function(script) { + var _check = function() { + src = pkg_dir + '/' + script + if (!fd.is_file(src)) return + content = text(fd.slurp(src)) + if (!content || length(content) == 0) return + + // Simple regex-free extraction: find use(' and use(" patterns + uses = [] + var pos = 0 + var rest = content + var ui = null + var quote = null + var end = null + var arg = null + while (length(rest) > 0) { + ui = search(rest, "use(") + if (ui == null) break + rest = text(rest, ui + 4) + if (length(rest) == 0) break + quote = text(rest, 0, 1) + if (quote != "'" && quote != '"') continue + rest = text(rest, 1) + end = search(rest, quote) + if (end == null) continue + arg = text(rest, 0, end) + if (length(arg) > 0) push(uses, arg) + rest = text(rest, end + 1) + } + + arrfor(uses, function(mod) { + var _resolve = function() { + info = resolve_module_info(mod, package) + if (!info) push(unresolved, {script: script, module: mod}) + } disruption {} + _resolve() + }) + checked = checked + 1 + } disruption {} + _check() + }) + + return {ok: checked, unresolved: unresolved, total: length(scripts)} +} + Shop.get_package_scripts = get_package_scripts Shop.list_packages = function() diff --git a/link.cm b/link.cm index 53b04296..c592d351 100644 --- a/link.cm +++ b/link.cm @@ -86,7 +86,7 @@ Link.add = function(canonical, target, shop) { } } else { // Remote package target - ensure it's installed - shop.get(target) + shop.sync(target) } var links = Link.load() @@ -115,8 +115,7 @@ Link.add = function(canonical, target, shop) { } // Install the dependency if not already in shop var _get_dep = function() { - shop.get(dep_locator) - shop.extract(dep_locator) + shop.sync(dep_locator) } disruption { log.build(` Warning: Could not install dependency ${dep_locator}`) } @@ -233,8 +232,7 @@ Link.sync_all = function(shop) { } // Install the dependency if not already in shop var _get = function() { - shop.get(dep_locator) - shop.extract(dep_locator) + shop.sync(dep_locator) } disruption { // Silently continue - dependency may already be installed } diff --git a/parse.cm b/parse.cm index e5fc3d40..01e8f577 100644 --- a/parse.cm +++ b/parse.cm @@ -364,6 +364,7 @@ var parse = function(tokens, src, filename, tokenizer) { advance() left = parse_assign_expr() pair.left = left + pair.computed = true if (tok.kind == "]") advance() else parse_error(tok, "expected ']' after computed property") } else { @@ -1797,6 +1798,9 @@ var parse = function(tokens, src, filename, tokenizer) { prop = expr.list[i] val = prop.right sem_check_expr(scope, val) + if (prop.computed) { + sem_check_expr(scope, prop.left) + } i = i + 1 } return null diff --git a/source/scheduler.c b/source/scheduler.c index c9c08a1f..97b4736f 100644 --- a/source/scheduler.c +++ b/source/scheduler.c @@ -330,6 +330,35 @@ void actor_free(cell_rt *actor) } lockless_shdel(actors, actor->id); + // Purge any pending timers referencing this actor from the timer heap + // to prevent the timer thread from accessing freed memory. + { + pthread_mutex_lock(&engine.lock); + int n = arrlen(timer_heap); + int w = 0; + for (int r = 0; r < n; r++) { + if (timer_heap[r].actor != actor) + timer_heap[w++] = timer_heap[r]; + } + arrsetlen(timer_heap, w); + for (int i = w / 2 - 1; i >= 0; i--) { + int j = i; + while (1) { + int left = 2 * j + 1, right = 2 * j + 2, smallest = j; + if (left < w && timer_heap[left].execute_at_ns < timer_heap[smallest].execute_at_ns) + smallest = left; + if (right < w && timer_heap[right].execute_at_ns < timer_heap[smallest].execute_at_ns) + smallest = right; + if (smallest == j) break; + timer_node tmp = timer_heap[j]; + timer_heap[j] = timer_heap[smallest]; + timer_heap[smallest] = tmp; + j = smallest; + } + } + pthread_mutex_unlock(&engine.lock); + } + // Do not go forward with actor destruction until the actor is completely free pthread_mutex_lock(actor->msg_mutex); pthread_mutex_lock(actor->mutex);