Files
cell/build.cm

956 lines
31 KiB
Plaintext

// build.cm - Simplified build utilities for Cell
//
// Key functions:
// Build.compile_file(pkg, file, target) - Compile a C file, returns object path
// Build.build_package(pkg, target) - Build all C files for a package
// Build.build_dynamic(pkg, target) - Build dynamic library for a package
// Build.build_static(packages, target, output) - Build static binary
var fd = use('fd')
var crypto = use('crypto')
var blob = use('blob')
var os = use('os')
var toolchains = use('toolchains')
var shop = use('internal/shop')
var pkg_tools = use('package')
var Build = {}
// ============================================================================
// Sigil replacement
// ============================================================================
// Get the local directory for prebuilt libraries
function get_local_dir() {
return shop.get_local_dir()
}
// Replace sigils in a string
// Supports: $LOCAL -> absolute path to .cell/local, $PACKAGE -> package dir (if provided)
function replace_sigils(str, pkg_dir) {
var local = fd.realpath('.') + '/' + get_local_dir()
var r = replace(str, '$LOCAL', local)
if (pkg_dir) r = replace(r, '$PACKAGE', pkg_dir)
return r
}
// Replace sigils in an array of flags
function replace_sigils_array(flags, pkg_dir) {
var result = []
arrfor(flags, function(flag) {
push(result, replace_sigils(flag, pkg_dir))
})
return result
}
Build.get_local_dir = get_local_dir
// ============================================================================
// Toolchain helpers
// ============================================================================
Build.list_targets = function() {
return array(toolchains)
}
Build.has_target = function(target) {
return toolchains[target] != null
}
Build.detect_host_target = function() {
var platform = os.platform()
var arch = os.arch ? os.arch() : 'arm64'
if (platform == 'macOS' || platform == 'darwin') {
return arch == 'x86_64' ? 'macos_x86_64' : 'macos_arm64'
} else if (platform == 'Linux' || platform == 'linux') {
return arch == 'x86_64' ? 'linux' : 'linux_arm64'
} else if (platform == 'Windows' || platform == 'windows') {
return 'windows'
}
return null
}
// ============================================================================
// Content-addressed build cache
// ============================================================================
function content_hash(str) {
var bb = stone(blob(str))
return text(crypto.blake2(bb, 32), 'h')
}
// Enable AOT ASan by creating .cell/asan_aot in the package root.
function native_sanitize_flags() {
if (fd.is_file('.cell/asan_aot')) {
return ' -fsanitize=address -fno-omit-frame-pointer'
}
return ''
}
// ============================================================================
// Cache key salts — canonical registry
// Every artifact type has a unique salt so hash collisions between types
// are impossible, and no file extensions are needed in build/.
// ============================================================================
var SALT_OBJ = 'obj' // compiled C object file
var SALT_DYLIB = 'dylib' // linked dynamic library
var SALT_NATIVE = 'native' // native-compiled .cm dylib
var SALT_MACH = 'mach' // mach bytecode blob
var SALT_MCODE = 'mcode' // mcode IR (JSON)
var SALT_DEPS = 'deps' // cached cc -MM dependency list
var SALT_FAIL = 'fail' // cached compilation failure
function cache_path(content, salt) {
return get_build_dir() + '/' + content_hash(content + '\n' + salt)
}
// Deterministic manifest path for a package's built dylibs
function manifest_path(pkg) {
return get_build_dir() + '/' + content_hash(pkg + '\n' + 'manifest')
}
function native_cache_content(src, target, san_flags) {
return src + '\n' + target + '\nnative\n' + (san_flags || '')
}
function get_build_dir() {
return shop.get_build_dir()
}
function ensure_dir(path) {
if (fd.stat(path).isDirectory) return
var parts = array(path, '/')
var current = starts_with(path, '/') ? '/' : ''
var i = 0
for (i = 0; i < length(parts); i++) {
if (parts[i] == '') continue
current += parts[i] + '/'
if (!fd.stat(current).isDirectory) fd.mkdir(current)
}
}
Build.ensure_dir = ensure_dir
// ============================================================================
// Dependency scanning helpers
// ============================================================================
// Parse make-style dependency output:
// foo.o: foo.c header1.h \
// header2.h
// Returns array of dependency file paths (skips the target)
function parse_makefile_deps(dep_text) {
var joined = replace(dep_text, /\\\n\s*/, ' ')
var colon_pos = search(joined, ':')
if (colon_pos == null) return []
var rest = trim(text(joined, colon_pos + 1))
var parts = filter(array(rest, /\s+/), function(p) {
return length(p) > 0
})
return parts
}
// Run cc -MM to get the preprocessor dependency list.
// Returns array of dependency file paths.
function get_c_deps(cc, flags, src_path) {
var dep_file = '/tmp/cell_deps_' + content_hash(src_path) + '.d'
var dep_cmd = [cc, '-MM', '-MG', '-MF', '"' + dep_file + '"']
dep_cmd = array(dep_cmd, flags)
push(dep_cmd, '"' + src_path + '"')
var ret = os.system(text(dep_cmd, ' ') + ' 2>/dev/null')
if (ret != 0) return [src_path]
if (!fd.is_file(dep_file)) return [src_path]
var dep_text = text(fd.slurp(dep_file))
return parse_makefile_deps(dep_text)
}
// Build a full hash string from the compilation command and all dependency
// file contents. This is the content key for the object file.
function hash_all_deps(cmd_str, deps) {
var parts = [cmd_str]
arrfor(deps, function(dep_path) {
if (fd.is_file(dep_path))
push(parts, dep_path + '\n' + text(fd.slurp(dep_path)))
else
push(parts, dep_path + '\n<missing>')
})
return text(parts, '\n')
}
// ============================================================================
// Compilation
// ============================================================================
// Compile a single C file for a package
// Returns the object file path (content-addressed in .cell/build)
Build.compile_file = function(pkg, file, target, opts) {
var _opts = opts || {}
var _buildtype = _opts.buildtype || 'release'
var pkg_dir = shop.get_package_dir(pkg)
var src_path = pkg_dir + '/' + file
var core_dir = null
if (!fd.is_file(src_path)) {
print('Source file not found: ' + src_path)
return null
}
// Use pre-fetched cflags if provided, otherwise fetch them
var cflags = _opts.cflags || replace_sigils_array(pkg_tools.get_flags(pkg, 'CFLAGS', target), pkg_dir)
var target_cflags = toolchains[target].c_args || []
var cc = toolchains[target].c
// Symbol name for this file
var sym_name = shop.c_symbol_for_file(pkg, file)
// Build common flags (shared between dep scan and compilation)
var common_flags = []
// Add buildtype-specific flags
if (_buildtype == 'release') {
common_flags = array(common_flags, ['-O3', '-DNDEBUG'])
} else if (_buildtype == 'debug') {
common_flags = array(common_flags, ['-O2', '-g'])
} else if (_buildtype == 'minsize') {
common_flags = array(common_flags, ['-Os', '-DNDEBUG'])
}
push(common_flags, '-DCELL_USE_NAME=' + sym_name)
push(common_flags, '-I"' + pkg_dir + '"')
// Auto-discover include/ directory
if (fd.is_dir(pkg_dir + '/include')) {
push(common_flags, '-I"' + pkg_dir + '/include"')
}
// External packages need core's source dir for cell.h, quickjs.h, blob.h
if (pkg != 'core') {
core_dir = shop.get_package_dir('core')
push(common_flags, '-I"' + core_dir + '/source"')
}
// Add package CFLAGS (resolve relative -I paths)
arrfor(cflags, function(flag) {
var f = flag
var ipath = null
if (starts_with(f, '-I') && !starts_with(f, '-I/')) {
ipath = text(f, 2)
if (!starts_with(ipath, pkg_dir)) {
f = '-I"' + pkg_dir + '/' + ipath + '"'
}
}
push(common_flags, f)
})
// Add target CFLAGS
arrfor(target_cflags, function(flag) {
push(common_flags, flag)
})
// Build full compilation command
var cmd_parts = [cc, '-c', '-fPIC']
cmd_parts = array(cmd_parts, common_flags)
push(cmd_parts, '"' + src_path + '"')
var cmd_str = text(cmd_parts, ' ')
if (_opts.verbose) {
print('[verbose] CFLAGS: ' + text(cflags, ' '))
print('[verbose] compile: ' + cmd_str)
}
// Two-level cache: quick hash for deps file, full hash for object
var file_content = fd.slurp(src_path)
var quick_content = cmd_str + '\n' + text(file_content)
var deps_path = cache_path(quick_content, SALT_DEPS)
var fail_path = cache_path(quick_content, SALT_FAIL)
var build_dir = get_build_dir()
ensure_dir(build_dir)
// Check for cached failure (skip files that previously failed to compile)
if (fd.is_file(fail_path)) {
if (_opts.verbose) print('[verbose] skipping ' + file + ' (cached failure)')
log.shop('skip ' + file + ' (cached failure)')
return null
}
var deps = null
var full_content = null
var obj_path = null
// Warm path: read cached dep list, verify by hashing all deps
if (fd.is_file(deps_path)) {
deps = filter(array(text(fd.slurp(deps_path)), '\n'), function(p) {
return length(p) > 0
})
full_content = hash_all_deps(cmd_str, deps)
obj_path = cache_path(full_content, SALT_OBJ)
if (fd.is_file(obj_path)) {
if (_opts.verbose) print('[verbose] cache hit: ' + file)
log.shop('cache hit ' + file)
return obj_path
}
log.shop('cache stale ' + file + ' (header changed)')
}
// Cold path: run cc -MM to discover deps
log.shop('dep scan ' + file)
deps = get_c_deps(cc, common_flags, src_path)
full_content = hash_all_deps(cmd_str, deps)
obj_path = cache_path(full_content, SALT_OBJ)
// Check if object exists (might exist from previous build with same deps)
if (fd.is_file(obj_path)) {
fd.slurpwrite(deps_path, stone(blob(text(deps, '\n'))))
if (_opts.verbose) print('[verbose] cache hit: ' + file + ' (after dep scan)')
log.shop('cache hit ' + file + ' (after dep scan)')
return obj_path
}
// Compile
log.shop('compiling ' + file)
log.console('Compiling ' + file)
var err_path = '/tmp/cell_build_err_' + content_hash(src_path) + '.log'
var full_cmd = cmd_str + ' -o "' + obj_path + '" 2>"' + err_path + '"'
var err_text = null
var missing = null
var err_lines = null
var first_err = null
var ret = os.system(full_cmd)
if (ret != 0) {
if (fd.is_file(err_path)) {
err_text = text(fd.slurp(err_path))
}
if (err_text) {
missing = search(err_text, /fatal error: [''].*[''] file not found/)
if (missing == null) missing = search(err_text, /fatal error: .*: No such file or directory/)
}
if (missing != null) {
err_lines = array(err_text, "\n")
first_err = length(err_lines) > 0 ? err_lines[0] : err_text
print(file + ': ' + first_err + ' (SDK not installed?)')
} else {
print('Compilation failed: ' + file)
if (err_text) print(err_text)
else print('Command: ' + full_cmd)
}
// Cache the failure so we don't retry on every build
fd.slurpwrite(fail_path, stone(blob(err_text || 'compilation failed')))
return null
}
// Save deps for future warm-path lookups
fd.slurpwrite(deps_path, stone(blob(text(deps, '\n'))))
return obj_path
}
// Build all C files for a package
// Returns array of object file paths
Build.build_package = function(pkg, target, exclude_main, buildtype) {
var _target = target || Build.detect_host_target()
var _buildtype = buildtype || 'release'
var c_files = pkg_tools.get_c_files(pkg, _target, exclude_main)
var objects = []
// Pre-fetch cflags once
var pkg_dir = shop.get_package_dir(pkg)
var cached_cflags = replace_sigils_array(pkg_tools.get_flags(pkg, 'CFLAGS', _target), pkg_dir)
arrfor(c_files, function(file) {
var obj = Build.compile_file(pkg, file, _target, {buildtype: _buildtype, cflags: cached_cflags})
push(objects, obj)
})
return objects
}
// ============================================================================
// Dynamic library building
// ============================================================================
// Compute link content string from all inputs that affect the dylib output
function compute_link_content(objects, ldflags, target_ldflags, opts) {
// Sort objects for deterministic hash
var sorted_objects = sort(objects)
// Build a string representing all link inputs
var parts = []
push(parts, 'target:' + text(opts.target))
push(parts, 'cc:' + text(opts.cc))
arrfor(sorted_objects, function(obj) {
// Object paths are content-addressed, so the path itself is the hash
push(parts, 'obj:' + text(obj))
})
arrfor(ldflags, function(flag) {
push(parts, 'ldflag:' + text(flag))
})
arrfor(target_ldflags, function(flag) {
push(parts, 'target_ldflag:' + text(flag))
})
return text(parts, '\n')
}
// Build a per-module dynamic library for a single C file
// Returns the content-addressed dylib path in .cell/build/<hash>.<target>.dylib
Build.build_module_dylib = function(pkg, file, target, opts) {
var _opts = opts || {}
var _target = target || Build.detect_host_target()
var _buildtype = _opts.buildtype || 'release'
var _extra = _opts.extra_objects || []
var obj = Build.compile_file(pkg, file, _target, {buildtype: _buildtype, cflags: _opts.cflags})
if (!obj) return null
var tc = toolchains[_target]
var cc = tc.cpp || tc.c
var local_dir = get_local_dir()
var pkg_dir = shop.get_package_dir(pkg)
// Get link flags
var ldflags = replace_sigils_array(pkg_tools.get_flags(pkg, 'LDFLAGS', _target), pkg_dir)
var target_ldflags = tc.c_link_args || []
var resolved_ldflags = []
arrfor(ldflags, function(flag) {
var f = flag
var lpath = null
if (starts_with(f, '-L') && !starts_with(f, '-L/')) {
lpath = text(f, 2)
if (!starts_with(lpath, pkg_dir)) {
f = '-L"' + pkg_dir + '/' + lpath + '"'
}
}
push(resolved_ldflags, f)
})
// Content-addressed output: hash of (all objects + link flags + target)
var all_objects = [obj]
all_objects = array(all_objects, _extra)
var link_content = compute_link_content(all_objects, resolved_ldflags, target_ldflags, {target: _target, cc: cc})
var build_dir = get_build_dir()
ensure_dir(build_dir)
var dylib_path = cache_path(link_content, SALT_DYLIB)
var cmd_parts = null
var cmd_str = null
var ret = null
if (_opts.verbose) {
print('[verbose] LDFLAGS: ' + text(resolved_ldflags, ' '))
}
if (!fd.is_file(dylib_path)) {
cmd_parts = [cc, '-shared', '-fPIC']
if (tc.system == 'darwin') {
cmd_parts = array(cmd_parts, [
'-undefined', 'dynamic_lookup',
'-Wl,-dead_strip',
'-Wl,-rpath,@loader_path/../local',
'-Wl,-rpath,' + local_dir
])
} else if (tc.system == 'linux') {
cmd_parts = array(cmd_parts, [
'-Wl,--allow-shlib-undefined',
'-Wl,--gc-sections',
'-Wl,-rpath,$ORIGIN/../local',
'-Wl,-rpath,' + local_dir
])
} else if (tc.system == 'windows') {
push(cmd_parts, '-Wl,--allow-shlib-undefined')
}
push(cmd_parts, '-L"' + local_dir + '"')
push(cmd_parts, '"' + text(obj) + '"')
arrfor(_extra, function(extra_obj) {
if (extra_obj != null) push(cmd_parts, '"' + text(extra_obj) + '"')
})
cmd_parts = array(cmd_parts, resolved_ldflags)
cmd_parts = array(cmd_parts, target_ldflags)
push(cmd_parts, '-o')
push(cmd_parts, '"' + dylib_path + '"')
cmd_str = text(cmd_parts, ' ')
if (_opts.verbose) print('[verbose] link: ' + cmd_str)
log.shop('linking ' + file)
log.console('Linking module ' + file + ' -> ' + fd.basename(dylib_path))
ret = os.system(cmd_str)
if (ret != 0) {
print('Linking failed: ' + file)
return null
}
} else {
log.shop('link cache hit ' + file)
}
return dylib_path
}
// Build a dynamic library for a package (one dylib per C file)
// Returns array of {file, symbol, dylib} for each module
// Also writes a manifest mapping symbols to dylib paths
Build.build_dynamic = function(pkg, target, buildtype, opts) {
var _target = target || Build.detect_host_target()
var _buildtype = buildtype || 'release'
var _opts = opts || {}
var c_files = pkg_tools.get_c_files(pkg, _target, true)
var results = []
// Pre-fetch cflags once to avoid repeated TOML reads
var pkg_dir = shop.get_package_dir(pkg)
var cached_cflags = replace_sigils_array(pkg_tools.get_flags(pkg, 'CFLAGS', _target), pkg_dir)
// Compile support sources to cached objects
var sources = pkg_tools.get_sources(pkg)
var support_objects = []
if (pkg != 'core') {
arrfor(sources, function(src_file) {
var obj = Build.compile_file(pkg, src_file, _target, {buildtype: _buildtype, cflags: cached_cflags, verbose: _opts.verbose})
if (obj != null) push(support_objects, obj)
})
}
arrfor(c_files, function(file) {
var sym_name = shop.c_symbol_for_file(pkg, file)
var dylib = Build.build_module_dylib(pkg, file, _target, {buildtype: _buildtype, extra_objects: support_objects, cflags: cached_cflags, verbose: _opts.verbose})
if (dylib) {
push(results, {file: file, symbol: sym_name, dylib: dylib})
}
})
// Write manifest so runtime can find dylibs without the build module
var json = use('json')
var mpath = manifest_path(pkg)
fd.slurpwrite(mpath, stone(blob(json.encode(results))))
return results
}
// ============================================================================
// Static binary building
// ============================================================================
// Build a static binary from multiple packages
// packages: array of package names
// output: output binary path
Build.build_static = function(packages, target, output, buildtype) {
var _target = target || Build.detect_host_target()
var _buildtype = buildtype || 'release'
var all_objects = []
var all_ldflags = []
var seen_flags = {}
// Compile all packages
arrfor(packages, function(pkg) {
var is_core = (pkg == 'core')
// For core, include main.c; for others, exclude it
var objects = Build.build_package(pkg, _target, !is_core, _buildtype)
arrfor(objects, function(obj) {
push(all_objects, obj)
})
// Collect LDFLAGS (with sigil replacement)
var pkg_dir = shop.get_package_dir(pkg)
var ldflags = replace_sigils_array(pkg_tools.get_flags(pkg, 'LDFLAGS', _target), pkg_dir)
// Deduplicate based on the entire LDFLAGS string for this package
var ldflags_key = pkg + ':' + text(ldflags, ' ')
if (!seen_flags[ldflags_key]) {
seen_flags[ldflags_key] = true
arrfor(ldflags, function(flag) {
var f = flag
var lpath = null
if (starts_with(f, '-L') && !starts_with(f, '-L/')) {
lpath = text(f, 2)
if (!starts_with(lpath, pkg_dir)) {
f = '-L"' + pkg_dir + '/' + lpath + '"'
}
}
push(all_ldflags, f)
})
}
})
if (length(all_objects) == 0) {
print('No object files to link'); disrupt
}
// Link
var cc = toolchains[_target].c
var target_ldflags = toolchains[_target].c_link_args || []
var exe_ext = toolchains[_target].system == 'windows' ? '.exe' : ''
var out_path = output
if (!ends_with(out_path, exe_ext) && exe_ext) {
out_path = out_path + exe_ext
}
var cmd_parts = [cc]
arrfor(all_objects, function(obj) {
push(cmd_parts, '"' + obj + '"')
})
arrfor(all_ldflags, function(flag) {
push(cmd_parts, flag)
})
arrfor(target_ldflags, function(flag) {
push(cmd_parts, flag)
})
push(cmd_parts, '-o', '"' + out_path + '"')
var cmd_str = text(cmd_parts, ' ')
log.console('Linking ' + out_path)
var ret = os.system(cmd_str)
if (ret != 0) {
print('Linking failed: ' + cmd_str); disrupt
}
log.console('Built ' + out_path)
return out_path
}
// ============================================================================
// Native .cm compilation (source → mcode → QBE IL → .o → .dylib)
// ============================================================================
// Batched native compilation: split functions into batches, run QBE on each,
// assemble in parallel, return array of .o paths.
// il_parts: {data: text, functions: [text, ...]}
// cc: C compiler path
// tmp_prefix: prefix for temp files (e.g. /tmp/cell_native_<hash>)
function compile_native_single(il_parts, cc, tmp_prefix, extra_flags) {
var _extra = extra_flags || ''
var helpers_il = (il_parts.helpers && length(il_parts.helpers) > 0)
? text(il_parts.helpers, "\n") : ""
var all_fns = text(il_parts.functions, "\n")
var full_il = il_parts.data + "\n\n" + helpers_il + "\n\n" + all_fns
var asm_text = os.qbe(full_il)
var s_path = tmp_prefix + '.s'
var o_path = tmp_prefix + '.o'
var rc = null
fd.slurpwrite(s_path, stone(blob(asm_text)))
rc = os.system(cc + _extra + ' -c ' + s_path + ' -o ' + o_path)
if (rc != 0) {
print('Assembly failed'); disrupt
}
return [o_path]
}
// Post-process QBE IL: insert dead labels after ret/jmp (QBE requirement)
function qbe_insert_dead_labels(il_text) {
var lines = array(il_text, "\n")
var result = []
var dead_id = 0
var need_label = false
var i = 0
var line = null
var trimmed = null
while (i < length(lines)) {
line = lines[i]
trimmed = trim(line)
if (need_label && !starts_with(trimmed, '@') && !starts_with(trimmed, '}') && length(trimmed) > 0) {
push(result, "@_dead_" + text(dead_id))
dead_id = dead_id + 1
need_label = false
}
if (starts_with(trimmed, '@') || starts_with(trimmed, '}') || length(trimmed) == 0) {
need_label = false
}
if (starts_with(trimmed, 'ret ') || starts_with(trimmed, 'jmp ')) {
need_label = true
}
push(result, line)
i = i + 1
}
return text(result, "\n")
}
// Compile a .cm source file to a native .dylib via QBE
// Returns the content-addressed dylib path
Build.compile_native = function(src_path, target, buildtype, pkg) {
var _target = target || Build.detect_host_target()
var _buildtype = buildtype || 'release'
var qbe_rt_path = null
if (!fd.is_file(src_path)) {
print('Source file not found: ' + src_path); disrupt
}
var tc = toolchains[_target]
var cc = tc.c
var san_flags = native_sanitize_flags()
var san_suffix = length(san_flags) > 0 ? '_asan' : ''
// Step 1: Compile through pipeline
var optimized = shop.compile_file(src_path)
var qbe_macros = use('qbe')
var qbe_emit = use('qbe_emit')
// Step 2: Generate QBE IL
var sym_name = null
if (pkg) {
sym_name = shop.c_symbol_for_file(pkg, fd.basename(src_path))
}
var il_parts = qbe_emit(optimized, qbe_macros, sym_name)
// Content hash for cache key
var src = text(fd.slurp(src_path))
var native_key = native_cache_content(src, _target, san_flags)
var build_dir = get_build_dir()
ensure_dir(build_dir)
var dylib_path = cache_path(native_key, SALT_NATIVE)
if (fd.is_file(dylib_path))
return dylib_path
// Compile and assemble via batched parallel pipeline
var tmp = '/tmp/cell_native_' + content_hash(native_key)
var rt_o_path = '/tmp/cell_qbe_rt' + san_suffix + '.o'
var o_paths = compile_native_single(il_parts, cc, tmp, san_flags)
// Compile QBE runtime stubs if needed
var rc = null
if (!fd.is_file(rt_o_path)) {
qbe_rt_path = shop.get_package_dir('core') + '/qbe_rt.c'
rc = os.system(cc + san_flags + ' -c ' + qbe_rt_path + ' -o ' + rt_o_path + ' -fPIC')
if (rc != 0) {
print('QBE runtime stubs compilation failed'); disrupt
}
}
// Link dylib
var link_cmd = cc + san_flags + ' -shared -fPIC'
if (tc.system == 'darwin') {
link_cmd = link_cmd + ' -undefined dynamic_lookup'
} else if (tc.system == 'linux') {
link_cmd = link_cmd + ' -Wl,--allow-shlib-undefined'
}
var oi = 0
while (oi < length(o_paths)) {
link_cmd = link_cmd + ' ' + o_paths[oi]
oi = oi + 1
}
link_cmd = link_cmd + ' ' + rt_o_path + ' -o ' + dylib_path
rc = os.system(link_cmd)
if (rc != 0) {
print('Linking native dylib failed for: ' + src_path); disrupt
}
log.console('Built native: ' + fd.basename(dylib_path))
return dylib_path
}
// Compile pre-compiled mcode IR to a native .dylib via QBE.
// Use this when the caller already has the optimized IR (avoids calling mcode
// twice and hitting module-level state pollution).
Build.compile_native_ir = function(optimized, src_path, opts) {
var _target = (opts && opts.target) || Build.detect_host_target()
var _buildtype = (opts && opts.buildtype) || 'release'
var pkg = opts && opts.pkg
var qbe_rt_path = null
var tc = toolchains[_target]
var cc = tc.c
var san_flags = native_sanitize_flags()
var san_suffix = length(san_flags) > 0 ? '_asan' : ''
var qbe_macros = use('qbe')
var qbe_emit = use('qbe_emit')
var sym_name = null
if (pkg) {
sym_name = shop.c_symbol_for_file(pkg, fd.basename(src_path))
}
var il_parts = qbe_emit(optimized, qbe_macros, sym_name)
var src = text(fd.slurp(src_path))
var native_key = native_cache_content(src, _target, san_flags)
var build_dir = get_build_dir()
ensure_dir(build_dir)
var dylib_path = cache_path(native_key, SALT_NATIVE)
if (fd.is_file(dylib_path))
return dylib_path
// Compile and assemble via batched parallel pipeline
var tmp = '/tmp/cell_native_' + content_hash(native_key)
var rt_o_path = '/tmp/cell_qbe_rt' + san_suffix + '.o'
var o_paths = compile_native_single(il_parts, cc, tmp, san_flags)
// Compile QBE runtime stubs if needed
var rc = null
if (!fd.is_file(rt_o_path)) {
qbe_rt_path = shop.get_package_dir('core') + '/qbe_rt.c'
rc = os.system(cc + san_flags + ' -c ' + qbe_rt_path + ' -o ' + rt_o_path + ' -fPIC')
if (rc != 0) {
print('QBE runtime stubs compilation failed'); disrupt
}
}
// Link dylib
var link_cmd = cc + san_flags + ' -shared -fPIC'
if (tc.system == 'darwin') {
link_cmd = link_cmd + ' -undefined dynamic_lookup'
} else if (tc.system == 'linux') {
link_cmd = link_cmd + ' -Wl,--allow-shlib-undefined'
}
var oi = 0
while (oi < length(o_paths)) {
link_cmd = link_cmd + ' ' + o_paths[oi]
oi = oi + 1
}
link_cmd = link_cmd + ' ' + rt_o_path + ' -o ' + dylib_path
rc = os.system(link_cmd)
if (rc != 0) {
print('Linking native dylib failed for: ' + src_path); disrupt
}
log.console('Built native: ' + fd.basename(dylib_path))
return dylib_path
}
// ============================================================================
// Module table generation (for static builds)
// ============================================================================
// Compile a .cm module to mach bytecode blob
// Returns the raw mach bytes as a blob
Build.compile_cm_to_mach = function(src_path) {
if (!fd.is_file(src_path)) {
print('Source file not found: ' + src_path); disrupt
}
var json = use('json')
var optimized = shop.compile_file(src_path)
return mach_compile_mcode_bin(src_path, json.encode(optimized))
}
// Generate a module_table.c file that embeds mach bytecode for .cm modules
// modules: array of {name, src_path} — name is the module name, src_path is the .cm file
// output: path to write the generated .c file
Build.generate_module_table = function(modules, output) {
var lines = []
var json = use('json')
push(lines, '/* Generated module table — do not edit */')
push(lines, '#include <stddef.h>')
push(lines, '#include <string.h>')
push(lines, '')
push(lines, 'struct cell_embedded_entry {')
push(lines, ' const char *name;')
push(lines, ' const unsigned char *data;')
push(lines, ' size_t size;')
push(lines, '};')
push(lines, '')
var entries = []
arrfor(modules, function(mod) {
var safe = replace(replace(replace(mod.name, '/', '_'), '.', '_'), '-', '_')
var mach = Build.compile_cm_to_mach(mod.src_path)
var bytes = array(mach)
var hex = []
arrfor(bytes, function(b) {
push(hex, '0x' + text(b, 'h2'))
})
push(lines, 'static const unsigned char mod_' + safe + '_data[] = {')
push(lines, ' ' + text(hex, ', '))
push(lines, '};')
push(lines, '')
push(entries, safe)
log.console('Embedded: ' + mod.name + ' (' + text(length(bytes)) + ' bytes)')
})
// Lookup function
push(lines, 'const struct cell_embedded_entry *cell_embedded_module_lookup(const char *name) {')
arrfor(modules, function(mod, i) {
var safe = entries[i]
push(lines, ' if (strcmp(name, "' + mod.name + '") == 0) {')
push(lines, ' static const struct cell_embedded_entry e = {"' + mod.name + '", mod_' + safe + '_data, sizeof(mod_' + safe + '_data)};')
push(lines, ' return &e;')
push(lines, ' }')
})
push(lines, ' return (void *)0;')
push(lines, '}')
var c_text = text(lines, '\n')
fd.slurpwrite(output, stone(blob(c_text)))
log.console('Generated ' + output)
return output
}
// ============================================================================
// Convenience functions
// ============================================================================
// Build dynamic libraries for all installed packages
Build.build_all_dynamic = function(target, buildtype, opts) {
var _target = target || Build.detect_host_target()
var _buildtype = buildtype || 'release'
var _opts = opts || {}
var packages = shop.list_packages()
var results = []
var core_mods = null
var total_files = 0
var total_ok = 0
var total_fail = 0
// Build core first
if (find(packages, function(p) { return p == 'core' }) != null) {
core_mods = Build.build_dynamic('core', _target, _buildtype, _opts)
push(results, {package: 'core', modules: core_mods})
}
// Build other packages
arrfor(packages, function(pkg) {
if (pkg == 'core') return
var pkg_mods = Build.build_dynamic(pkg, _target, _buildtype, _opts)
push(results, {package: pkg, modules: pkg_mods})
})
// Print build report
print('\n--- Build Report ---')
arrfor(results, function(r) {
var pkg_dir = shop.get_package_dir(r.package)
var c_files = pkg_tools.get_c_files(r.package, _target, true)
var file_count = length(c_files)
var ok_count = length(r.modules)
var fail_count = file_count - ok_count
total_files = total_files + file_count
total_ok = total_ok + ok_count
total_fail = total_fail + fail_count
if (file_count == 0) return
var status = fail_count == 0 ? 'OK' : `${ok_count}/${file_count}`
print(` ${r.package}: ${status}`)
})
print(`Total: ${total_ok}/${total_files} compiled, ${total_fail} failed`)
print('--------------------\n')
return results
}
// Export salt constants and cache_path for shop.cm and others
Build.SALT_OBJ = SALT_OBJ
Build.SALT_DYLIB = SALT_DYLIB
Build.SALT_NATIVE = SALT_NATIVE
Build.SALT_MACH = SALT_MACH
Build.SALT_MCODE = SALT_MCODE
Build.SALT_DEPS = SALT_DEPS
Build.SALT_FAIL = SALT_FAIL
Build.cache_path = cache_path
Build.manifest_path = manifest_path
Build.native_sanitize_flags = native_sanitize_flags
Build.native_cache_content = native_cache_content
return Build