Files
cell/build.cm
2026-02-20 18:09:19 -06:00

1174 lines
38 KiB
Plaintext

// build.cm - Simplified build utilities for Cell
//
// Key functions:
// Build.compile_file(pkg, file, target) - Compile a C file, returns object path
// Build.build_package(pkg, target) - Build all C files for a package
// Build.build_dynamic(pkg, target) - Build dynamic library for a package
// Build.build_static(packages, target, output) - Build static binary
var fd = use('fd')
var crypto = use('internal/crypto')
var blob = use('blob')
var os = use('internal/os')
var toolchains = use('toolchains')
var shop = use('internal/shop')
var pkg_tools = use('package')
var json = use('json')
var Build = {}
// ============================================================================
// Per-run memoization caches (reset when process exits)
// ============================================================================
var _stat_done = {}
var _stat_fp = {}
var _read_cache = {}
function memo_stat(path) {
var st = null
if (!_stat_done[path]) {
_stat_done[path] = true
st = fd.stat(path)
if (st.mtime != null)
_stat_fp[path] = {m: st.mtime, s: st.size}
}
return _stat_fp[path]
}
function memo_read(path) {
if (_read_cache[path] != null) return _read_cache[path]
if (!memo_stat(path)) return null
_read_cache[path] = text(fd.slurp(path))
return _read_cache[path]
}
// ============================================================================
// Sigil replacement
// ============================================================================
// Get the local directory for prebuilt libraries
function get_local_dir() {
return shop.get_local_dir()
}
// Replace sigils in a string
// Supports: $LOCAL -> absolute path to .cell/local, $PACKAGE -> package dir (if provided)
function replace_sigils(str, pkg_dir) {
var local = fd.realpath('.') + '/' + get_local_dir()
var r = replace(str, '$LOCAL', local)
if (pkg_dir) r = replace(r, '$PACKAGE', pkg_dir)
return r
}
// Replace sigils in an array of flags
function replace_sigils_array(flags, pkg_dir) {
var result = []
arrfor(flags, function(flag) {
push(result, replace_sigils(flag, pkg_dir))
})
return result
}
Build.get_local_dir = get_local_dir
// ============================================================================
// Toolchain helpers
// ============================================================================
Build.list_targets = function() {
return array(toolchains)
}
Build.has_target = function(target) {
return toolchains[target] != null
}
Build.detect_host_target = function() {
var platform = os.platform()
var arch = os.arch ? os.arch() : 'arm64'
if (platform == 'macOS' || platform == 'darwin') {
return arch == 'x86_64' ? 'macos_x86_64' : 'macos_arm64'
} else if (platform == 'Linux' || platform == 'linux') {
return arch == 'x86_64' ? 'linux' : 'linux_arm64'
} else if (platform == 'Windows' || platform == 'windows') {
return 'windows'
}
return null
}
// ============================================================================
// Content-addressed build cache
// ============================================================================
function content_hash(str) {
var bb = stone(blob(str))
return text(crypto.blake2(bb, 32), 'h')
}
// Enable AOT ASan by creating .cell/asan_aot in the package root.
function native_sanitize_flags() {
if (fd.is_file('.cell/asan_aot')) {
return ' -fsanitize=address -fno-omit-frame-pointer'
}
return ''
}
// ============================================================================
// Cache key salts — canonical registry
// Every artifact type has a unique salt so hash collisions between types
// are impossible, and no file extensions are needed in build/.
// ============================================================================
var SALT_OBJ = 'obj' // compiled C object file
var SALT_DYLIB = 'dylib' // linked dynamic library
var SALT_NATIVE = 'native' // native-compiled .cm dylib
var SALT_MACH = 'mach' // mach bytecode blob
var SALT_MCODE = 'mcode' // mcode IR (JSON)
var SALT_DEPS = 'deps' // cached cc -MM dependency list
var SALT_FAIL = 'fail' // cached compilation failure
var SALT_BMFST = 'bmfst' // stat-based build manifest (object level)
var SALT_BMFST_DL = 'bmfst_dl' // stat-based build manifest (dylib level)
function cache_path(content, salt) {
return get_build_dir() + '/' + content_hash(content + '\n' + salt)
}
// Deterministic manifest path for a package's built dylibs
function manifest_path(pkg) {
return get_build_dir() + '/' + content_hash(pkg + '\n' + 'manifest')
}
function native_cache_content(src, target, san_flags) {
return src + '\n' + target + '\nnative\n' + (san_flags || '')
}
function get_build_dir() {
return shop.get_build_dir()
}
Build.ensure_dir = fd.ensure_dir
// ============================================================================
// Stat-based build manifest (zero-read warm cache)
// ============================================================================
function bmfst_path(cmd_str, src_path) {
return cache_path(cmd_str + '\n' + src_path, SALT_BMFST)
}
function bmfst_probe(cmd_str, src_path) {
var mf_path = bmfst_path(cmd_str, src_path)
if (!fd.is_file(mf_path)) return null
var mf = json.decode(text(fd.slurp(mf_path)))
if (!mf || !mf.d || !mf.o) return null
if (!fd.is_file(mf.o)) return null
var ok = true
arrfor(mf.d, function(entry) {
if (!ok) return
var st = memo_stat(entry.p)
if (!st || st.m != entry.m || st.s != entry.s)
ok = false
})
if (!ok) return null
return mf.o
}
function bmfst_save(cmd_str, src_path, deps, obj_path) {
var entries = []
arrfor(deps, function(dep_path) {
var st = memo_stat(dep_path)
if (st)
push(entries, {p: dep_path, m: st.m, s: st.s})
})
var mf = {o: obj_path, d: entries}
var mf_path = bmfst_path(cmd_str, src_path)
fd.slurpwrite(mf_path, stone(blob(json.encode(mf))))
}
// Dylib-level stat manifest — keyed on compile cmd + link info + src path.
// All key inputs are available without reading any files.
function bmfst_dl_key(setup, link_info) {
var parts = [setup.cmd_str, setup.src_path]
push(parts, 'target:' + text(link_info.target))
push(parts, 'cc:' + text(link_info.cc))
arrfor(link_info.extra_objects, function(obj) {
if (obj != null) push(parts, 'extra:' + text(obj))
})
arrfor(link_info.ldflags, function(flag) {
push(parts, 'ldflag:' + text(flag))
})
arrfor(link_info.target_ldflags, function(flag) {
push(parts, 'target_ldflag:' + text(flag))
})
return text(parts, '\n')
}
function bmfst_dl_probe(setup, link_info) {
var mf_path = cache_path(bmfst_dl_key(setup, link_info), SALT_BMFST_DL)
if (!fd.is_file(mf_path)) return null
var mf = json.decode(text(fd.slurp(mf_path)))
if (!mf || !mf.d || !mf.dylib) return null
if (!fd.is_file(mf.dylib)) return null
var ok = true
arrfor(mf.d, function(entry) {
if (!ok) return
var st = memo_stat(entry.p)
if (!st || st.m != entry.m || st.s != entry.s)
ok = false
})
if (!ok) return null
return mf.dylib
}
function bmfst_dl_save(setup, link_info, deps, dylib_path) {
var entries = []
arrfor(deps, function(dep_path) {
var st = memo_stat(dep_path)
if (st)
push(entries, {p: dep_path, m: st.m, s: st.s})
})
var mf = {dylib: dylib_path, d: entries}
var mf_path = cache_path(bmfst_dl_key(setup, link_info), SALT_BMFST_DL)
fd.slurpwrite(mf_path, stone(blob(json.encode(mf))))
}
// ============================================================================
// Dependency scanning helpers
// ============================================================================
// Parse make-style dependency output:
// foo.o: foo.c header1.h \
// header2.h
// Returns array of dependency file paths (skips the target)
function parse_makefile_deps(dep_text) {
var joined = replace(dep_text, /\\\n\s*/, ' ')
var colon_pos = search(joined, ':')
if (colon_pos == null) return []
var rest = trim(text(joined, colon_pos + 1))
var parts = filter(array(rest, /\s+/), function(p) {
return length(p) > 0
})
return parts
}
// Run cc -MM to get the preprocessor dependency list.
// Returns array of dependency file paths.
function get_c_deps(cc, flags, src_path) {
var dep_file = '/tmp/cell_deps_' + content_hash(src_path) + '.d'
var dep_cmd = [cc, '-MM', '-MG', '-MF', '"' + dep_file + '"']
dep_cmd = array(dep_cmd, flags)
push(dep_cmd, '"' + src_path + '"')
var ret = os.system(text(dep_cmd, ' ') + ' 2>/dev/null')
if (ret != 0) return [src_path]
if (!fd.is_file(dep_file)) return [src_path]
var dep_text = text(fd.slurp(dep_file))
return parse_makefile_deps(dep_text)
}
// Build a full hash string from the compilation command and all dependency
// file contents. This is the content key for the object file.
function hash_all_deps(cmd_str, deps) {
var parts = [cmd_str]
arrfor(deps, function(dep_path) {
var content = memo_read(dep_path)
if (content != null)
push(parts, dep_path + '\n' + content)
else
push(parts, dep_path + '\n<missing>')
})
return text(parts, '\n')
}
// ============================================================================
// Compilation
// ============================================================================
// Build the compile command string and common flags for a C file.
// Returns {cmd_str, src_path, cc, common_flags, pkg_dir} or null if source missing.
function compile_setup(pkg, file, target, opts) {
var _opts = opts || {}
var _buildtype = _opts.buildtype || 'release'
var pkg_dir = shop.get_package_dir(pkg)
var src_path = pkg_dir + '/' + file
var core_dir = null
if (!fd.is_file(src_path)) return null
var cflags = _opts.cflags || replace_sigils_array(pkg_tools.get_flags(pkg, 'CFLAGS', target), pkg_dir)
var target_cflags = toolchains[target].c_args || []
var cc = toolchains[target].c
var sym_name = shop.c_symbol_for_file(pkg, file)
var common_flags = []
if (_buildtype == 'release') {
common_flags = array(common_flags, ['-O3', '-DNDEBUG'])
} else if (_buildtype == 'debug') {
common_flags = array(common_flags, ['-O2', '-g'])
} else if (_buildtype == 'minsize') {
common_flags = array(common_flags, ['-Os', '-DNDEBUG'])
}
push(common_flags, '-DCELL_USE_NAME=' + sym_name)
push(common_flags, '-I"' + pkg_dir + '"')
if (fd.is_dir(pkg_dir + '/include')) {
push(common_flags, '-I"' + pkg_dir + '/include"')
}
if (pkg != 'core') {
core_dir = shop.get_package_dir('core')
push(common_flags, '-I"' + core_dir + '/source"')
}
arrfor(cflags, function(flag) {
var f = flag
var ipath = null
if (starts_with(f, '-I') && !starts_with(f, '-I/')) {
ipath = text(f, 2)
if (!starts_with(ipath, pkg_dir)) {
f = '-I"' + pkg_dir + '/' + ipath + '"'
}
}
push(common_flags, f)
})
arrfor(target_cflags, function(flag) {
push(common_flags, flag)
})
var cmd_parts = [cc, '-c', '-fPIC']
cmd_parts = array(cmd_parts, common_flags)
push(cmd_parts, '"' + src_path + '"')
return {
cmd_str: text(cmd_parts, ' '),
src_path: src_path,
cc: cc,
common_flags: common_flags,
pkg_dir: pkg_dir
}
}
// Probe for the full content key (source + all deps + compile flags).
// Returns {full_content, deps, fail} or null if deps not cached yet (cold path).
function probe_source_key(setup, file) {
var file_content = memo_read(setup.src_path)
var quick_content = setup.cmd_str + '\n' + file_content
var fail_path = cache_path(quick_content, SALT_FAIL)
var deps_path = cache_path(quick_content, SALT_DEPS)
var deps = null
var full_content = null
if (fd.is_file(fail_path)) return {fail: true}
if (fd.is_file(deps_path)) {
deps = filter(array(text(fd.slurp(deps_path)), '\n'), function(p) {
return length(p) > 0
})
full_content = hash_all_deps(setup.cmd_str, deps)
return {full_content: full_content, deps: deps}
}
return null
}
// Compile a single C file for a package
// Returns the object file path (content-addressed in .cell/build)
Build.compile_file = function(pkg, file, target, opts) {
var _opts = opts || {}
var setup = compile_setup(pkg, file, target, _opts)
if (!setup) {
log.error('Source file not found: ' + shop.get_package_dir(pkg) + '/' + file)
return null
}
if (_opts.verbose) {
log.build('[verbose] compile: ' + setup.cmd_str)
}
// Layer 2: stat-based manifest probe (zero file reads on warm cache)
var mf_obj = null
if (!_opts.force) {
mf_obj = bmfst_probe(setup.cmd_str, setup.src_path)
if (mf_obj) {
if (_opts.verbose) log.build('[verbose] manifest hit: ' + file)
log.shop('manifest hit ' + file)
return mf_obj
}
}
var build_dir = get_build_dir()
fd.ensure_dir(build_dir)
var probe = probe_source_key(setup, file)
// Check for cached failure
if (probe && probe.fail) {
if (_opts.verbose) log.build('[verbose] skipping ' + file + ' (cached failure)')
log.shop('skip ' + file + ' (cached failure)')
return null
}
var full_content = null
var deps = null
var obj_path = null
// Warm path: deps cached, check object
if (probe && probe.full_content) {
full_content = probe.full_content
obj_path = cache_path(full_content, SALT_OBJ)
if (fd.is_file(obj_path)) {
if (_opts.verbose) log.build('[verbose] cache hit: ' + file)
log.shop('cache hit ' + file)
bmfst_save(setup.cmd_str, setup.src_path, probe.deps, obj_path)
return obj_path
}
log.shop('cache stale ' + file + ' (header changed)')
deps = probe.deps
}
var file_content = null
var quick_content = null
var err_path = null
var full_cmd = null
var err_text = null
var missing = null
var err_lines = null
var first_err = null
var ret = null
// Cold path: run cc -MM to discover deps
if (!deps) {
log.shop('dep scan ' + file)
deps = get_c_deps(setup.cc, setup.common_flags, setup.src_path)
full_content = hash_all_deps(setup.cmd_str, deps)
obj_path = cache_path(full_content, SALT_OBJ)
// Check if object exists (might exist from previous build with same deps)
if (fd.is_file(obj_path)) {
file_content = memo_read(setup.src_path)
quick_content = setup.cmd_str + '\n' + file_content
fd.slurpwrite(cache_path(quick_content, SALT_DEPS), stone(blob(text(deps, '\n'))))
if (_opts.verbose) log.build('[verbose] cache hit: ' + file + ' (after dep scan)')
log.shop('cache hit ' + file + ' (after dep scan)')
bmfst_save(setup.cmd_str, setup.src_path, deps, obj_path)
return obj_path
}
}
// Compile
log.shop('compiling ' + file)
log.console('Compiling ' + file)
err_path = '/tmp/cell_build_err_' + content_hash(setup.src_path) + '.log'
full_cmd = setup.cmd_str + ' -o "' + obj_path + '" 2>"' + err_path + '"'
ret = os.system(full_cmd)
if (ret != 0) {
if (fd.is_file(err_path)) {
err_text = text(fd.slurp(err_path))
}
if (err_text) {
missing = search(err_text, /fatal error: [''].*[''] file not found/)
if (missing == null) missing = search(err_text, /fatal error: .*: No such file or directory/)
}
if (missing != null) {
err_lines = array(err_text, "\n")
first_err = length(err_lines) > 0 ? err_lines[0] : err_text
log.error(file + ': ' + first_err + ' (SDK not installed?)')
} else {
log.error('Compilation failed: ' + file)
if (err_text) log.error(err_text)
else log.error('Command: ' + full_cmd)
}
file_content = memo_read(setup.src_path)
quick_content = setup.cmd_str + '\n' + file_content
fd.slurpwrite(cache_path(quick_content, SALT_FAIL), stone(blob(err_text || 'compilation failed')))
return null
}
// Save deps for future warm-path lookups
file_content = memo_read(setup.src_path)
quick_content = setup.cmd_str + '\n' + file_content
fd.slurpwrite(cache_path(quick_content, SALT_DEPS), stone(blob(text(deps, '\n'))))
bmfst_save(setup.cmd_str, setup.src_path, deps, obj_path)
return obj_path
}
// Build all C files for a package
// Returns array of object file paths
Build.build_package = function(pkg, target, exclude_main, buildtype) {
var _target = target || Build.detect_host_target()
var _buildtype = buildtype || 'release'
var c_files = pkg_tools.get_c_files(pkg, _target, exclude_main)
var objects = []
// Pre-fetch cflags once
var pkg_dir = shop.get_package_dir(pkg)
var cached_cflags = replace_sigils_array(pkg_tools.get_flags(pkg, 'CFLAGS', _target), pkg_dir)
arrfor(c_files, function(file) {
var obj = Build.compile_file(pkg, file, _target, {buildtype: _buildtype, cflags: cached_cflags})
push(objects, obj)
})
return objects
}
// ============================================================================
// Dynamic library building
// ============================================================================
// Compute dylib content key from source content key + link info
// link_opts: {extra_objects, ldflags, target_ldflags, target, cc}
function compute_dylib_content(full_content, link_opts) {
var parts = [full_content]
push(parts, 'target:' + text(link_opts.target))
push(parts, 'cc:' + text(link_opts.cc))
arrfor(link_opts.extra_objects, function(obj) {
if (obj != null) push(parts, 'extra:' + text(obj))
})
arrfor(link_opts.ldflags, function(flag) {
push(parts, 'ldflag:' + text(flag))
})
arrfor(link_opts.target_ldflags, function(flag) {
push(parts, 'target_ldflag:' + text(flag))
})
return text(parts, '\n')
}
// Build a per-module dynamic library for a single C file
// Returns the content-addressed dylib path in .cell/build/<hash>
// Checks dylib cache first; only compiles the object if the dylib is stale.
Build.build_module_dylib = function(pkg, file, target, opts) {
var _opts = opts || {}
var _target = target || Build.detect_host_target()
var _buildtype = _opts.buildtype || 'release'
var _extra = _opts.extra_objects || []
var setup = compile_setup(pkg, file, _target, {buildtype: _buildtype, cflags: _opts.cflags})
if (!setup) return null
var tc = toolchains[_target]
var cc = tc.cpp || tc.c
var local_dir = get_local_dir()
// Get link flags
var ldflags = replace_sigils_array(pkg_tools.get_flags(pkg, 'LDFLAGS', _target), setup.pkg_dir)
var target_ldflags = tc.c_link_args || []
var resolved_ldflags = []
arrfor(ldflags, function(flag) {
var f = flag
var lpath = null
if (starts_with(f, '-L') && !starts_with(f, '-L/')) {
lpath = text(f, 2)
if (!starts_with(lpath, setup.pkg_dir)) {
f = '-L"' + setup.pkg_dir + '/' + lpath + '"'
}
}
push(resolved_ldflags, f)
})
var build_dir = get_build_dir()
fd.ensure_dir(build_dir)
var link_info = {extra_objects: _extra, ldflags: resolved_ldflags, target_ldflags: target_ldflags, target: _target, cc: cc}
// Stat-based dylib manifest — zero file reads on warm cache
var mf_dylib = null
if (!_opts.force) {
mf_dylib = bmfst_dl_probe(setup, link_info)
if (mf_dylib) {
if (_opts.verbose) log.build('[verbose] manifest hit: ' + file)
log.shop('manifest hit ' + file)
return mf_dylib
}
}
// Probe source key — check dylib cache before compiling
var probe = probe_source_key(setup, file)
var dylib_path = null
var dylib_content = null
var obj = null
var obj_path = null
var cmd_parts = null
var cmd_str = null
var ret = null
var post_probe = null
var fallback_probe = null
if (probe && probe.fail) {
log.shop('skip ' + file + ' (cached failure)')
return null
}
if (probe && probe.full_content) {
dylib_content = compute_dylib_content(probe.full_content, link_info)
dylib_path = cache_path(dylib_content, SALT_DYLIB)
if (!_opts.force && fd.is_file(dylib_path)) {
log.shop('cache hit ' + file)
bmfst_dl_save(setup, link_info, probe.deps, dylib_path)
return dylib_path
}
// Dylib stale but object might be cached — check before compiling
obj_path = cache_path(probe.full_content, SALT_OBJ)
if (fd.is_file(obj_path)) {
obj = obj_path
}
}
// Object not cached — compile it
if (!obj) {
obj = Build.compile_file(pkg, file, _target, {buildtype: _buildtype, cflags: _opts.cflags, force: _opts.force})
if (!obj) return null
// Recompute dylib key with the now-known source key
if (!dylib_path) {
post_probe = probe_source_key(setup, file)
if (post_probe && post_probe.full_content) {
dylib_content = compute_dylib_content(post_probe.full_content, link_info)
dylib_path = cache_path(dylib_content, SALT_DYLIB)
if (fd.is_file(dylib_path)) {
bmfst_dl_save(setup, link_info, post_probe.deps, dylib_path)
return dylib_path
}
}
}
}
// Need dylib_path for output
if (!dylib_path) {
// Fallback: probe should succeed now since compile_file cached deps
fallback_probe = probe_source_key(setup, file)
if (fallback_probe && fallback_probe.full_content) {
dylib_content = compute_dylib_content(fallback_probe.full_content, link_info)
dylib_path = cache_path(dylib_content, SALT_DYLIB)
} else {
log.error('Cannot compute dylib key for ' + file)
return null
}
}
if (_opts.verbose) {
log.build('[verbose] LDFLAGS: ' + text(resolved_ldflags, ' '))
}
// Link
cmd_parts = [cc, '-shared', '-fPIC']
if (tc.system == 'darwin') {
cmd_parts = array(cmd_parts, [
'-undefined', 'dynamic_lookup',
'-Wl,-dead_strip',
'-Wl,-rpath,@loader_path/../local',
'-Wl,-rpath,' + local_dir
])
} else if (tc.system == 'linux') {
cmd_parts = array(cmd_parts, [
'-Wl,--allow-shlib-undefined',
'-Wl,--gc-sections',
'-Wl,-rpath,$ORIGIN/../local',
'-Wl,-rpath,' + local_dir
])
} else if (tc.system == 'windows') {
push(cmd_parts, '-Wl,--allow-shlib-undefined')
}
push(cmd_parts, '-L"' + local_dir + '"')
push(cmd_parts, '"' + text(obj) + '"')
arrfor(_extra, function(extra_obj) {
if (extra_obj != null) push(cmd_parts, '"' + text(extra_obj) + '"')
})
cmd_parts = array(cmd_parts, resolved_ldflags)
cmd_parts = array(cmd_parts, target_ldflags)
push(cmd_parts, '-o')
push(cmd_parts, '"' + dylib_path + '"')
cmd_str = text(cmd_parts, ' ')
if (_opts.verbose) log.build('[verbose] link: ' + cmd_str)
log.shop('linking ' + file)
log.console('Linking module ' + file + ' -> ' + fd.basename(dylib_path))
ret = os.system(cmd_str)
if (ret != 0) {
log.error('Linking failed: ' + file)
return null
}
// Save dylib manifest for future stat-based probes
var mf_deps = null
if (fallback_probe && fallback_probe.deps) mf_deps = fallback_probe.deps
if (post_probe && post_probe.deps) mf_deps = post_probe.deps
if (probe && probe.deps) mf_deps = probe.deps
if (mf_deps) bmfst_dl_save(setup, link_info, mf_deps, dylib_path)
return dylib_path
}
// Build a dynamic library for a package (one dylib per C file)
// Returns array of {file, symbol, dylib} for each module
// Also writes a manifest mapping symbols to dylib paths
Build.build_dynamic = function(pkg, target, buildtype, opts) {
var _target = target || Build.detect_host_target()
var _buildtype = buildtype || 'release'
var _opts = opts || {}
var c_files = pkg_tools.get_c_files(pkg, _target, true)
var results = []
// Pre-fetch cflags once to avoid repeated TOML reads
var pkg_dir = shop.get_package_dir(pkg)
var cached_cflags = replace_sigils_array(pkg_tools.get_flags(pkg, 'CFLAGS', _target), pkg_dir)
// Compile support sources to cached objects
var sources = pkg_tools.get_sources(pkg)
var support_objects = []
if (pkg != 'core') {
arrfor(sources, function(src_file) {
var obj = Build.compile_file(pkg, src_file, _target, {buildtype: _buildtype, cflags: cached_cflags, verbose: _opts.verbose, force: _opts.force})
if (obj != null) push(support_objects, obj)
})
}
arrfor(c_files, function(file) {
var sym_name = shop.c_symbol_for_file(pkg, file)
var dylib = Build.build_module_dylib(pkg, file, _target, {buildtype: _buildtype, extra_objects: support_objects, cflags: cached_cflags, verbose: _opts.verbose, force: _opts.force})
if (dylib) {
push(results, {file: file, symbol: sym_name, dylib: dylib})
}
})
// Write manifest so runtime can find dylibs without the build module
var mpath = manifest_path(pkg)
fd.slurpwrite(mpath, stone(blob(json.encode(results))))
return results
}
// ============================================================================
// Static binary building
// ============================================================================
// Build a static binary from multiple packages
// packages: array of package names
// output: output binary path
Build.build_static = function(packages, target, output, buildtype) {
var _target = target || Build.detect_host_target()
var _buildtype = buildtype || 'release'
var all_objects = []
var all_ldflags = []
var seen_flags = {}
// Compile all packages
arrfor(packages, function(pkg) {
var is_core = (pkg == 'core')
// For core, include main.c; for others, exclude it
var objects = Build.build_package(pkg, _target, !is_core, _buildtype)
arrfor(objects, function(obj) {
push(all_objects, obj)
})
// Collect LDFLAGS (with sigil replacement)
var pkg_dir = shop.get_package_dir(pkg)
var ldflags = replace_sigils_array(pkg_tools.get_flags(pkg, 'LDFLAGS', _target), pkg_dir)
// Deduplicate based on the entire LDFLAGS string for this package
var ldflags_key = pkg + ':' + text(ldflags, ' ')
if (!seen_flags[ldflags_key]) {
seen_flags[ldflags_key] = true
arrfor(ldflags, function(flag) {
var f = flag
var lpath = null
if (starts_with(f, '-L') && !starts_with(f, '-L/')) {
lpath = text(f, 2)
if (!starts_with(lpath, pkg_dir)) {
f = '-L"' + pkg_dir + '/' + lpath + '"'
}
}
push(all_ldflags, f)
})
}
})
if (length(all_objects) == 0) {
log.error('No object files to link'); disrupt
}
// Link
var cc = toolchains[_target].c
var target_ldflags = toolchains[_target].c_link_args || []
var exe_ext = toolchains[_target].system == 'windows' ? '.exe' : ''
var out_path = output
if (!ends_with(out_path, exe_ext) && exe_ext) {
out_path = out_path + exe_ext
}
var cmd_parts = [cc]
arrfor(all_objects, function(obj) {
push(cmd_parts, '"' + obj + '"')
})
arrfor(all_ldflags, function(flag) {
push(cmd_parts, flag)
})
arrfor(target_ldflags, function(flag) {
push(cmd_parts, flag)
})
push(cmd_parts, '-o', '"' + out_path + '"')
var cmd_str = text(cmd_parts, ' ')
log.console('Linking ' + out_path)
var ret = os.system(cmd_str)
if (ret != 0) {
log.error('Linking failed: ' + cmd_str); disrupt
}
log.console('Built ' + out_path)
return out_path
}
// ============================================================================
// Native .cm compilation (source → mcode → QBE IL → .o → .dylib)
// ============================================================================
// Batched native compilation: split functions into batches, run QBE on each,
// assemble in parallel, return array of .o paths.
// il_parts: {data: text, functions: [text, ...]}
// cc: C compiler path
// tmp_prefix: prefix for temp files (e.g. /tmp/cell_native_<hash>)
function compile_native_single(il_parts, cc, tmp_prefix, extra_flags) {
var _extra = extra_flags || ''
var helpers_il = (il_parts.helpers && length(il_parts.helpers) > 0)
? text(il_parts.helpers, "\n") : ""
var all_fns = text(il_parts.functions, "\n")
var full_il = il_parts.data + "\n\n" + helpers_il + "\n\n" + all_fns
var asm_text = os.qbe(full_il)
var s_path = tmp_prefix + '.s'
var o_path = tmp_prefix + '.o'
var rc = null
fd.slurpwrite(s_path, stone(blob(asm_text)))
rc = os.system(cc + _extra + ' -c ' + s_path + ' -o ' + o_path)
if (rc != 0) {
log.error('Assembly failed'); disrupt
}
return [o_path]
}
// Post-process QBE IL: insert dead labels after ret/jmp (QBE requirement)
function qbe_insert_dead_labels(il_text) {
var lines = array(il_text, "\n")
var result = []
var dead_id = 0
var need_label = false
var i = 0
var line = null
var trimmed = null
while (i < length(lines)) {
line = lines[i]
trimmed = trim(line)
if (need_label && !starts_with(trimmed, '@') && !starts_with(trimmed, '}') && length(trimmed) > 0) {
push(result, "@_dead_" + text(dead_id))
dead_id = dead_id + 1
need_label = false
}
if (starts_with(trimmed, '@') || starts_with(trimmed, '}') || length(trimmed) == 0) {
need_label = false
}
if (starts_with(trimmed, 'ret ') || starts_with(trimmed, 'jmp ')) {
need_label = true
}
push(result, line)
i = i + 1
}
return text(result, "\n")
}
// Compile a .cm source file to a native .dylib via QBE
// Returns the content-addressed dylib path
Build.compile_native = function(src_path, target, buildtype, pkg) {
var _target = target || Build.detect_host_target()
var _buildtype = buildtype || 'release'
var qbe_rt_path = null
if (!fd.is_file(src_path)) {
log.error('Source file not found: ' + src_path); disrupt
}
var tc = toolchains[_target]
var cc = tc.c
var san_flags = native_sanitize_flags()
var san_suffix = length(san_flags) > 0 ? '_asan' : ''
// Step 1: Compile through pipeline
var optimized = shop.compile_file(src_path)
var qbe_macros = use('qbe')
var qbe_emit = use('qbe_emit')
// Step 2: Generate QBE IL
var sym_name = null
if (pkg) {
sym_name = shop.c_symbol_for_file(pkg, fd.basename(src_path))
}
var il_parts = qbe_emit(optimized, qbe_macros, sym_name)
// Content hash for cache key
var src = text(fd.slurp(src_path))
var native_key = native_cache_content(src, _target, san_flags)
var build_dir = get_build_dir()
fd.ensure_dir(build_dir)
var dylib_path = cache_path(native_key, SALT_NATIVE)
if (fd.is_file(dylib_path))
return dylib_path
// Compile and assemble via batched parallel pipeline
var tmp = '/tmp/cell_native_' + content_hash(native_key)
var rt_o_path = '/tmp/cell_qbe_rt' + san_suffix + '.o'
var o_paths = compile_native_single(il_parts, cc, tmp, san_flags)
// Compile QBE runtime stubs if needed
var rc = null
if (!fd.is_file(rt_o_path)) {
qbe_rt_path = shop.get_package_dir('core') + '/src/qbe_rt.c'
rc = os.system(cc + san_flags + ' -c ' + qbe_rt_path + ' -o ' + rt_o_path + ' -fPIC')
if (rc != 0) {
log.error('QBE runtime stubs compilation failed'); disrupt
}
}
// Link dylib
var link_cmd = cc + san_flags + ' -shared -fPIC'
if (tc.system == 'darwin') {
link_cmd = link_cmd + ' -undefined dynamic_lookup'
} else if (tc.system == 'linux') {
link_cmd = link_cmd + ' -Wl,--allow-shlib-undefined'
}
var oi = 0
while (oi < length(o_paths)) {
link_cmd = link_cmd + ' ' + o_paths[oi]
oi = oi + 1
}
link_cmd = link_cmd + ' ' + rt_o_path + ' -o ' + dylib_path
rc = os.system(link_cmd)
if (rc != 0) {
log.error('Linking native dylib failed for: ' + src_path); disrupt
}
log.console('Built native: ' + fd.basename(dylib_path))
return dylib_path
}
// Compile pre-compiled mcode IR to a native .dylib via QBE.
// Use this when the caller already has the optimized IR (avoids calling mcode
// twice and hitting module-level state pollution).
Build.compile_native_ir = function(optimized, src_path, opts) {
var _target = (opts && opts.target) || Build.detect_host_target()
var _buildtype = (opts && opts.buildtype) || 'release'
var pkg = opts && opts.pkg
var qbe_rt_path = null
var tc = toolchains[_target]
var cc = tc.c
var san_flags = native_sanitize_flags()
var san_suffix = length(san_flags) > 0 ? '_asan' : ''
var qbe_macros = use('qbe')
var qbe_emit = use('qbe_emit')
var sym_name = null
if (pkg) {
sym_name = shop.c_symbol_for_file(pkg, fd.basename(src_path))
}
var il_parts = qbe_emit(optimized, qbe_macros, sym_name)
var src = text(fd.slurp(src_path))
var native_key = native_cache_content(src, _target, san_flags)
var build_dir = get_build_dir()
fd.ensure_dir(build_dir)
var dylib_path = cache_path(native_key, SALT_NATIVE)
if (fd.is_file(dylib_path))
return dylib_path
// Compile and assemble via batched parallel pipeline
var tmp = '/tmp/cell_native_' + content_hash(native_key)
var rt_o_path = '/tmp/cell_qbe_rt' + san_suffix + '.o'
var o_paths = compile_native_single(il_parts, cc, tmp, san_flags)
// Compile QBE runtime stubs if needed
var rc = null
if (!fd.is_file(rt_o_path)) {
qbe_rt_path = shop.get_package_dir('core') + '/src/qbe_rt.c'
rc = os.system(cc + san_flags + ' -c ' + qbe_rt_path + ' -o ' + rt_o_path + ' -fPIC')
if (rc != 0) {
log.error('QBE runtime stubs compilation failed'); disrupt
}
}
// Link dylib
var link_cmd = cc + san_flags + ' -shared -fPIC'
if (tc.system == 'darwin') {
link_cmd = link_cmd + ' -undefined dynamic_lookup'
} else if (tc.system == 'linux') {
link_cmd = link_cmd + ' -Wl,--allow-shlib-undefined'
}
var oi = 0
while (oi < length(o_paths)) {
link_cmd = link_cmd + ' ' + o_paths[oi]
oi = oi + 1
}
link_cmd = link_cmd + ' ' + rt_o_path + ' -o ' + dylib_path
rc = os.system(link_cmd)
if (rc != 0) {
log.error('Linking native dylib failed for: ' + src_path); disrupt
}
log.console('Built native: ' + fd.basename(dylib_path))
return dylib_path
}
// ============================================================================
// Module table generation (for static builds)
// ============================================================================
// Compile a .cm module to mach bytecode blob
// Returns the raw mach bytes as a blob
Build.compile_cm_to_mach = function(src_path) {
if (!fd.is_file(src_path)) {
log.error('Source file not found: ' + src_path); disrupt
}
var optimized = shop.compile_file(src_path)
return mach_compile_mcode_bin(src_path, json.encode(optimized))
}
// Generate a module_table.c file that embeds mach bytecode for .cm modules
// modules: array of {name, src_path} — name is the module name, src_path is the .cm file
// output: path to write the generated .c file
Build.generate_module_table = function(modules, output) {
var lines = []
push(lines, '/* Generated module table — do not edit */')
push(lines, '#include <stddef.h>')
push(lines, '#include <string.h>')
push(lines, '')
push(lines, 'struct cell_embedded_entry {')
push(lines, ' const char *name;')
push(lines, ' const unsigned char *data;')
push(lines, ' size_t size;')
push(lines, '};')
push(lines, '')
var entries = []
arrfor(modules, function(mod) {
var safe = replace(replace(replace(mod.name, '/', '_'), '.', '_'), '-', '_')
var mach = Build.compile_cm_to_mach(mod.src_path)
var bytes = array(mach)
var hex = []
arrfor(bytes, function(b) {
push(hex, '0x' + text(b, 'h2'))
})
push(lines, 'static const unsigned char mod_' + safe + '_data[] = {')
push(lines, ' ' + text(hex, ', '))
push(lines, '};')
push(lines, '')
push(entries, safe)
log.console('Embedded: ' + mod.name + ' (' + text(length(bytes)) + ' bytes)')
})
// Lookup function
push(lines, 'const struct cell_embedded_entry *cell_embedded_module_lookup(const char *name) {')
arrfor(modules, function(mod, i) {
var safe = entries[i]
push(lines, ' if (strcmp(name, "' + mod.name + '") == 0) {')
push(lines, ' static const struct cell_embedded_entry e = {"' + mod.name + '", mod_' + safe + '_data, sizeof(mod_' + safe + '_data)};')
push(lines, ' return &e;')
push(lines, ' }')
})
push(lines, ' return (void *)0;')
push(lines, '}')
var c_text = text(lines, '\n')
fd.slurpwrite(output, stone(blob(c_text)))
log.console('Generated ' + output)
return output
}
// ============================================================================
// Convenience functions
// ============================================================================
// Build dynamic libraries for all installed packages
Build.build_all_dynamic = function(target, buildtype, opts) {
var _target = target || Build.detect_host_target()
var _buildtype = buildtype || 'release'
var _opts = opts || {}
var packages = shop.list_packages()
var results = []
var core_mods = null
var total_files = 0
var total_ok = 0
var total_fail = 0
// Build core first
if (find(packages, function(p) { return p == 'core' }) != null) {
core_mods = Build.build_dynamic('core', _target, _buildtype, _opts)
push(results, {package: 'core', modules: core_mods})
}
// Build other packages
arrfor(packages, function(pkg) {
if (pkg == 'core') return
var pkg_mods = Build.build_dynamic(pkg, _target, _buildtype, _opts)
push(results, {package: pkg, modules: pkg_mods})
})
// Print build report
log.build('--- Build Report ---')
arrfor(results, function(r) {
var pkg_dir = shop.get_package_dir(r.package)
var c_files = pkg_tools.get_c_files(r.package, _target, true)
var file_count = length(c_files)
var ok_count = length(r.modules)
var fail_count = file_count - ok_count
total_files = total_files + file_count
total_ok = total_ok + ok_count
total_fail = total_fail + fail_count
if (file_count == 0) return
var status = fail_count == 0 ? 'OK' : `${ok_count}/${file_count}`
log.build(` ${r.package}: ${status}`)
})
log.build(`Total: ${total_ok}/${total_files} compiled, ${total_fail} failed`)
log.build('--------------------')
return results
}
// Export salt constants and cache_path for shop.cm and others
Build.SALT_OBJ = SALT_OBJ
Build.SALT_DYLIB = SALT_DYLIB
Build.SALT_NATIVE = SALT_NATIVE
Build.SALT_MACH = SALT_MACH
Build.SALT_MCODE = SALT_MCODE
Build.SALT_DEPS = SALT_DEPS
Build.SALT_FAIL = SALT_FAIL
Build.SALT_BMFST = SALT_BMFST
Build.SALT_BMFST_DL = SALT_BMFST_DL
Build.cache_path = cache_path
Build.manifest_path = manifest_path
Build.native_sanitize_flags = native_sanitize_flags
Build.native_cache_content = native_cache_content
return Build