896 lines
28 KiB
Plaintext
896 lines
28 KiB
Plaintext
// build.cm - Simplified build utilities for Cell
|
|
//
|
|
// Key functions:
|
|
// Build.compile_file(pkg, file, target) - Compile a C file, returns object path
|
|
// Build.build_package(pkg, target) - Build all C files for a package
|
|
// Build.build_dynamic(pkg, target) - Build dynamic library for a package
|
|
// Build.build_static(packages, target, output) - Build static binary
|
|
|
|
var fd = use('fd')
|
|
var crypto = use('crypto')
|
|
var blob = use('blob')
|
|
var os = use('os')
|
|
var toolchains = use('toolchains')
|
|
var shop = use('internal/shop')
|
|
var pkg_tools = use('package')
|
|
|
|
var Build = {}
|
|
|
|
// ============================================================================
|
|
// Sigil replacement
|
|
// ============================================================================
|
|
|
|
// Get the local directory for prebuilt libraries
|
|
function get_local_dir() {
|
|
return shop.get_local_dir()
|
|
}
|
|
|
|
// Replace sigils in a string
|
|
// Supports: $LOCAL -> absolute path to .cell/local, $PACKAGE -> package dir (if provided)
|
|
function replace_sigils(str, pkg_dir) {
|
|
var local = fd.realpath('.') + '/' + get_local_dir()
|
|
var r = replace(str, '$LOCAL', local)
|
|
if (pkg_dir) r = replace(r, '$PACKAGE', pkg_dir)
|
|
return r
|
|
}
|
|
|
|
// Replace sigils in an array of flags
|
|
function replace_sigils_array(flags, pkg_dir) {
|
|
var result = []
|
|
arrfor(flags, function(flag) {
|
|
push(result, replace_sigils(flag, pkg_dir))
|
|
})
|
|
return result
|
|
}
|
|
|
|
Build.get_local_dir = get_local_dir
|
|
|
|
// ============================================================================
|
|
// Toolchain helpers
|
|
// ============================================================================
|
|
|
|
Build.list_targets = function() {
|
|
return array(toolchains)
|
|
}
|
|
|
|
Build.has_target = function(target) {
|
|
return toolchains[target] != null
|
|
}
|
|
|
|
Build.detect_host_target = function() {
|
|
var platform = os.platform()
|
|
var arch = os.arch ? os.arch() : 'arm64'
|
|
|
|
if (platform == 'macOS' || platform == 'darwin') {
|
|
return arch == 'x86_64' ? 'macos_x86_64' : 'macos_arm64'
|
|
} else if (platform == 'Linux' || platform == 'linux') {
|
|
return arch == 'x86_64' ? 'linux' : 'linux_arm64'
|
|
} else if (platform == 'Windows' || platform == 'windows') {
|
|
return 'windows'
|
|
}
|
|
return null
|
|
}
|
|
|
|
// ============================================================================
|
|
// Content-addressed build cache
|
|
// ============================================================================
|
|
|
|
function content_hash(str) {
|
|
var bb = stone(blob(str))
|
|
return text(crypto.blake2(bb, 32), 'h')
|
|
}
|
|
|
|
function get_build_dir() {
|
|
return shop.get_build_dir()
|
|
}
|
|
|
|
function ensure_dir(path) {
|
|
if (fd.stat(path).isDirectory) return
|
|
var parts = array(path, '/')
|
|
var current = starts_with(path, '/') ? '/' : ''
|
|
var i = 0
|
|
for (i = 0; i < length(parts); i++) {
|
|
if (parts[i] == '') continue
|
|
current += parts[i] + '/'
|
|
if (!fd.stat(current).isDirectory) fd.mkdir(current)
|
|
}
|
|
}
|
|
|
|
Build.ensure_dir = ensure_dir
|
|
|
|
// ============================================================================
|
|
// Compilation
|
|
// ============================================================================
|
|
|
|
// Compile a single C file for a package
|
|
// Returns the object file path (content-addressed in .cell/build)
|
|
Build.compile_file = function(pkg, file, target, opts) {
|
|
var _opts = opts || {}
|
|
var _buildtype = _opts.buildtype || 'release'
|
|
var pkg_dir = shop.get_package_dir(pkg)
|
|
var src_path = pkg_dir + '/' + file
|
|
var core_dir = null
|
|
|
|
if (!fd.is_file(src_path)) {
|
|
print('Source file not found: ' + src_path)
|
|
return null
|
|
}
|
|
|
|
// Use pre-fetched cflags if provided, otherwise fetch them
|
|
var cflags = _opts.cflags || replace_sigils_array(pkg_tools.get_flags(pkg, 'CFLAGS', target), pkg_dir)
|
|
var target_cflags = toolchains[target].c_args || []
|
|
var cc = toolchains[target].c
|
|
|
|
// Symbol name for this file
|
|
var sym_name = shop.c_symbol_for_file(pkg, file)
|
|
|
|
// Build command
|
|
var cmd_parts = [cc, '-c', '-fPIC']
|
|
|
|
// Add buildtype-specific flags
|
|
if (_buildtype == 'release') {
|
|
cmd_parts = array(cmd_parts, ['-O3', '-DNDEBUG'])
|
|
} else if (_buildtype == 'debug') {
|
|
cmd_parts = array(cmd_parts, ['-O2', '-g'])
|
|
} else if (_buildtype == 'minsize') {
|
|
cmd_parts = array(cmd_parts, ['-Os', '-DNDEBUG'])
|
|
}
|
|
|
|
push(cmd_parts, '-DCELL_USE_NAME=' + sym_name)
|
|
push(cmd_parts, '-I"' + pkg_dir + '"')
|
|
|
|
// Auto-discover include/ directory
|
|
if (fd.is_dir(pkg_dir + '/include')) {
|
|
push(cmd_parts, '-I"' + pkg_dir + '/include"')
|
|
}
|
|
|
|
// External packages need core's source dir for cell.h, quickjs.h, blob.h
|
|
if (pkg != 'core') {
|
|
core_dir = shop.get_package_dir('core')
|
|
push(cmd_parts, '-I"' + core_dir + '/source"')
|
|
}
|
|
|
|
// Add package CFLAGS (resolve relative -I paths)
|
|
arrfor(cflags, function(flag) {
|
|
var f = flag
|
|
var ipath = null
|
|
if (starts_with(f, '-I') && !starts_with(f, '-I/')) {
|
|
ipath = text(f, 2)
|
|
if (!starts_with(ipath, pkg_dir)) {
|
|
f = '-I"' + pkg_dir + '/' + ipath + '"'
|
|
}
|
|
}
|
|
push(cmd_parts, f)
|
|
})
|
|
|
|
// Add target CFLAGS
|
|
arrfor(target_cflags, function(flag) {
|
|
push(cmd_parts, flag)
|
|
})
|
|
|
|
push(cmd_parts, '"' + src_path + '"')
|
|
|
|
var cmd_str = text(cmd_parts, ' ')
|
|
|
|
// Content hash: command + file content
|
|
var file_content = fd.slurp(src_path)
|
|
var hash_input = cmd_str + '\n' + text(file_content)
|
|
var hash = content_hash(hash_input)
|
|
|
|
var build_dir = get_build_dir()
|
|
ensure_dir(build_dir)
|
|
var obj_path = build_dir + '/' + hash
|
|
|
|
// Check if already compiled
|
|
if (fd.is_file(obj_path)) {
|
|
return obj_path
|
|
}
|
|
|
|
// Compile — capture stderr to detect missing-header vs real errors
|
|
var err_path = '/tmp/cell_build_err_' + hash + '.log'
|
|
var full_cmd = cmd_str + ' -o "' + obj_path + '" 2>"' + err_path + '"'
|
|
var err_text = null
|
|
var missing = null
|
|
var err_lines = null
|
|
var first_err = null
|
|
log.console('Compiling ' + file)
|
|
var ret = os.system(full_cmd)
|
|
if (ret != 0) {
|
|
if (fd.is_file(err_path)) {
|
|
err_text = text(fd.slurp(err_path))
|
|
}
|
|
if (err_text) {
|
|
missing = search(err_text, /fatal error: [''].*[''] file not found/)
|
|
if (missing == null) missing = search(err_text, /fatal error: .*: No such file or directory/)
|
|
}
|
|
if (missing != null) {
|
|
err_lines = array(err_text, "\n")
|
|
first_err = length(err_lines) > 0 ? err_lines[0] : err_text
|
|
print(file + ': ' + first_err + ' (SDK not installed?)')
|
|
} else {
|
|
print('Compilation failed: ' + file)
|
|
if (err_text) print(err_text)
|
|
else print('Command: ' + full_cmd)
|
|
}
|
|
return null
|
|
}
|
|
|
|
return obj_path
|
|
}
|
|
|
|
// Build all C files for a package
|
|
// Returns array of object file paths
|
|
Build.build_package = function(pkg, target, exclude_main, buildtype) {
|
|
var _target = target || Build.detect_host_target()
|
|
var _buildtype = buildtype || 'release'
|
|
var c_files = pkg_tools.get_c_files(pkg, _target, exclude_main)
|
|
var objects = []
|
|
|
|
// Pre-fetch cflags once
|
|
var pkg_dir = shop.get_package_dir(pkg)
|
|
var cached_cflags = replace_sigils_array(pkg_tools.get_flags(pkg, 'CFLAGS', _target), pkg_dir)
|
|
|
|
arrfor(c_files, function(file) {
|
|
var obj = Build.compile_file(pkg, file, _target, {buildtype: _buildtype, cflags: cached_cflags})
|
|
push(objects, obj)
|
|
})
|
|
|
|
return objects
|
|
}
|
|
|
|
// ============================================================================
|
|
// Dynamic library building
|
|
// ============================================================================
|
|
|
|
// Compute link key from all inputs that affect the dylib output
|
|
function compute_link_key(objects, ldflags, target_ldflags, opts) {
|
|
// Sort objects for deterministic hash
|
|
var sorted_objects = sort(objects)
|
|
|
|
// Build a string representing all link inputs
|
|
var parts = []
|
|
push(parts, 'target:' + opts.target)
|
|
push(parts, 'cc:' + opts.cc)
|
|
arrfor(sorted_objects, function(obj) {
|
|
// Object paths are content-addressed, so the path itself is the hash
|
|
push(parts, 'obj:' + obj)
|
|
})
|
|
arrfor(ldflags, function(flag) {
|
|
push(parts, 'ldflag:' + flag)
|
|
})
|
|
arrfor(target_ldflags, function(flag) {
|
|
push(parts, 'target_ldflag:' + flag)
|
|
})
|
|
|
|
return content_hash(text(parts, '\n'))
|
|
}
|
|
|
|
// Build a per-module dynamic library for a single C file
|
|
// Returns the content-addressed dylib path in .cell/build/<hash>.<target>.dylib
|
|
Build.build_module_dylib = function(pkg, file, target, opts) {
|
|
var _opts = opts || {}
|
|
var _target = target || Build.detect_host_target()
|
|
var _buildtype = _opts.buildtype || 'release'
|
|
var _extra = _opts.extra_objects || []
|
|
var obj = Build.compile_file(pkg, file, _target, {buildtype: _buildtype, cflags: _opts.cflags})
|
|
if (!obj) return null
|
|
|
|
var tc = toolchains[_target]
|
|
var dylib_ext = tc.system == 'windows' ? '.dll' : (tc.system == 'darwin' ? '.dylib' : '.so')
|
|
var cc = tc.cpp || tc.c
|
|
var local_dir = get_local_dir()
|
|
var pkg_dir = shop.get_package_dir(pkg)
|
|
|
|
// Get link flags
|
|
var ldflags = replace_sigils_array(pkg_tools.get_flags(pkg, 'LDFLAGS', _target), pkg_dir)
|
|
var target_ldflags = tc.c_link_args || []
|
|
var resolved_ldflags = []
|
|
arrfor(ldflags, function(flag) {
|
|
var f = flag
|
|
var lpath = null
|
|
if (starts_with(f, '-L') && !starts_with(f, '-L/')) {
|
|
lpath = text(f, 2)
|
|
if (!starts_with(lpath, pkg_dir)) {
|
|
f = '-L"' + pkg_dir + '/' + lpath + '"'
|
|
}
|
|
}
|
|
push(resolved_ldflags, f)
|
|
})
|
|
|
|
// Content-addressed output: hash of (all objects + link flags + target)
|
|
var all_objects = [obj]
|
|
all_objects = array(all_objects, _extra)
|
|
var link_key = compute_link_key(all_objects, resolved_ldflags, target_ldflags, {target: _target, cc: cc})
|
|
var build_dir = get_build_dir()
|
|
ensure_dir(build_dir)
|
|
var dylib_path = build_dir + '/' + link_key + '.' + _target + dylib_ext
|
|
var cmd_parts = null
|
|
var cmd_str = null
|
|
var ret = null
|
|
|
|
if (!fd.is_file(dylib_path)) {
|
|
cmd_parts = [cc, '-shared', '-fPIC']
|
|
|
|
if (tc.system == 'darwin') {
|
|
cmd_parts = array(cmd_parts, [
|
|
'-undefined', 'dynamic_lookup',
|
|
'-Wl,-dead_strip',
|
|
'-Wl,-rpath,@loader_path/../local',
|
|
'-Wl,-rpath,' + local_dir
|
|
])
|
|
} else if (tc.system == 'linux') {
|
|
cmd_parts = array(cmd_parts, [
|
|
'-Wl,--allow-shlib-undefined',
|
|
'-Wl,--gc-sections',
|
|
'-Wl,-rpath,$ORIGIN/../local',
|
|
'-Wl,-rpath,' + local_dir
|
|
])
|
|
} else if (tc.system == 'windows') {
|
|
push(cmd_parts, '-Wl,--allow-shlib-undefined')
|
|
}
|
|
|
|
push(cmd_parts, '-L"' + local_dir + '"')
|
|
push(cmd_parts, '"' + obj + '"')
|
|
arrfor(_extra, function(extra_obj) {
|
|
push(cmd_parts, '"' + extra_obj + '"')
|
|
})
|
|
cmd_parts = array(cmd_parts, resolved_ldflags)
|
|
cmd_parts = array(cmd_parts, target_ldflags)
|
|
push(cmd_parts, '-o')
|
|
push(cmd_parts, '"' + dylib_path + '"')
|
|
|
|
cmd_str = text(cmd_parts, ' ')
|
|
log.console('Linking module ' + file + ' -> ' + fd.basename(dylib_path))
|
|
ret = os.system(cmd_str)
|
|
if (ret != 0) {
|
|
print('Linking failed: ' + file)
|
|
return null
|
|
}
|
|
}
|
|
|
|
// Always install to deterministic lib/<pkg>/<stem>.dylib
|
|
// Strip .c/.cpp extension so the loader can find it by module name
|
|
var file_stem = file
|
|
if (ends_with(file_stem, '.cpp')) file_stem = text(file_stem, 0, -4)
|
|
else if (ends_with(file_stem, '.c')) file_stem = text(file_stem, 0, -2)
|
|
var install_dir = shop.get_lib_dir() + '/' + shop.lib_name_for_package(pkg)
|
|
var stem_dir = fd.dirname(file_stem)
|
|
if (stem_dir && stem_dir != '.') {
|
|
install_dir = install_dir + '/' + stem_dir
|
|
}
|
|
ensure_dir(install_dir)
|
|
var install_path = shop.get_lib_dir() + '/' + shop.lib_name_for_package(pkg) + '/' + file_stem + dylib_ext
|
|
fd.slurpwrite(install_path, fd.slurp(dylib_path))
|
|
|
|
return dylib_path
|
|
}
|
|
|
|
// Build a dynamic library for a package (one dylib per C file)
|
|
// Returns array of {file, symbol, dylib} for each module
|
|
// Also writes a manifest mapping symbols to dylib paths
|
|
Build.build_dynamic = function(pkg, target, buildtype) {
|
|
var _target = target || Build.detect_host_target()
|
|
var _buildtype = buildtype || 'release'
|
|
var c_files = pkg_tools.get_c_files(pkg, _target, true)
|
|
var results = []
|
|
|
|
// Pre-fetch cflags once to avoid repeated TOML reads
|
|
var pkg_dir = shop.get_package_dir(pkg)
|
|
var cached_cflags = replace_sigils_array(pkg_tools.get_flags(pkg, 'CFLAGS', _target), pkg_dir)
|
|
|
|
// Compile support sources to cached objects
|
|
var sources = pkg_tools.get_sources(pkg)
|
|
var support_objects = []
|
|
arrfor(sources, function(src_file) {
|
|
var obj = Build.compile_file(pkg, src_file, _target, {buildtype: _buildtype, cflags: cached_cflags})
|
|
push(support_objects, obj)
|
|
})
|
|
|
|
arrfor(c_files, function(file) {
|
|
var sym_name = shop.c_symbol_for_file(pkg, file)
|
|
var dylib = Build.build_module_dylib(pkg, file, _target, {buildtype: _buildtype, extra_objects: support_objects, cflags: cached_cflags})
|
|
if (dylib) {
|
|
push(results, {file: file, symbol: sym_name, dylib: dylib})
|
|
}
|
|
})
|
|
|
|
return results
|
|
}
|
|
|
|
// ============================================================================
|
|
// Static binary building
|
|
// ============================================================================
|
|
|
|
// Build a static binary from multiple packages
|
|
// packages: array of package names
|
|
// output: output binary path
|
|
Build.build_static = function(packages, target, output, buildtype) {
|
|
var _target = target || Build.detect_host_target()
|
|
var _buildtype = buildtype || 'release'
|
|
var all_objects = []
|
|
var all_ldflags = []
|
|
var seen_flags = {}
|
|
|
|
// Compile all packages
|
|
arrfor(packages, function(pkg) {
|
|
var is_core = (pkg == 'core')
|
|
|
|
// For core, include main.c; for others, exclude it
|
|
var objects = Build.build_package(pkg, _target, !is_core, _buildtype)
|
|
|
|
arrfor(objects, function(obj) {
|
|
push(all_objects, obj)
|
|
})
|
|
|
|
// Collect LDFLAGS (with sigil replacement)
|
|
var pkg_dir = shop.get_package_dir(pkg)
|
|
var ldflags = replace_sigils_array(pkg_tools.get_flags(pkg, 'LDFLAGS', _target), pkg_dir)
|
|
|
|
// Deduplicate based on the entire LDFLAGS string for this package
|
|
var ldflags_key = pkg + ':' + text(ldflags, ' ')
|
|
if (!seen_flags[ldflags_key]) {
|
|
seen_flags[ldflags_key] = true
|
|
arrfor(ldflags, function(flag) {
|
|
var f = flag
|
|
var lpath = null
|
|
if (starts_with(f, '-L') && !starts_with(f, '-L/')) {
|
|
lpath = text(f, 2)
|
|
if (!starts_with(lpath, pkg_dir)) {
|
|
f = '-L"' + pkg_dir + '/' + lpath + '"'
|
|
}
|
|
}
|
|
push(all_ldflags, f)
|
|
})
|
|
}
|
|
})
|
|
|
|
if (length(all_objects) == 0) {
|
|
print('No object files to link'); disrupt
|
|
}
|
|
|
|
// Link
|
|
var cc = toolchains[_target].c
|
|
var target_ldflags = toolchains[_target].c_link_args || []
|
|
var exe_ext = toolchains[_target].system == 'windows' ? '.exe' : ''
|
|
|
|
var out_path = output
|
|
if (!ends_with(out_path, exe_ext) && exe_ext) {
|
|
out_path = out_path + exe_ext
|
|
}
|
|
|
|
var cmd_parts = [cc]
|
|
|
|
arrfor(all_objects, function(obj) {
|
|
push(cmd_parts, '"' + obj + '"')
|
|
})
|
|
|
|
arrfor(all_ldflags, function(flag) {
|
|
push(cmd_parts, flag)
|
|
})
|
|
|
|
arrfor(target_ldflags, function(flag) {
|
|
push(cmd_parts, flag)
|
|
})
|
|
|
|
push(cmd_parts, '-o', '"' + out_path + '"')
|
|
|
|
var cmd_str = text(cmd_parts, ' ')
|
|
|
|
log.console('Linking ' + out_path)
|
|
var ret = os.system(cmd_str)
|
|
if (ret != 0) {
|
|
print('Linking failed: ' + cmd_str); disrupt
|
|
}
|
|
|
|
log.console('Built ' + out_path)
|
|
return out_path
|
|
}
|
|
|
|
// ============================================================================
|
|
// Native .cm compilation (source → mcode → QBE IL → .o → .dylib)
|
|
// ============================================================================
|
|
|
|
// Batched native compilation: split functions into batches, run QBE on each,
|
|
// assemble in parallel, return array of .o paths.
|
|
// il_parts: {data: text, functions: [text, ...]}
|
|
// cc: C compiler path
|
|
// tmp_prefix: prefix for temp files (e.g. /tmp/cell_native_<hash>)
|
|
function compile_native_batched(il_parts, cc, tmp_prefix) {
|
|
var nfuncs = length(il_parts.functions)
|
|
var nbatch = 8
|
|
var o_paths = []
|
|
var s_paths = []
|
|
var asm_cmds = []
|
|
var batch_fns = null
|
|
var batch_il = null
|
|
var asm_text = null
|
|
var s_path = null
|
|
var o_path = null
|
|
var end = 0
|
|
var bi = 0
|
|
var fi = 0
|
|
var ai = 0
|
|
var rc = null
|
|
var parallel_cmd = null
|
|
var helpers_il = (il_parts.helpers && length(il_parts.helpers) > 0)
|
|
? text(il_parts.helpers, "\n") : ""
|
|
var prefix = null
|
|
|
|
if (nfuncs < nbatch) nbatch = nfuncs
|
|
if (nbatch < 1) nbatch = 1
|
|
|
|
// Generate .s files: run QBE on each batch
|
|
while (bi < nbatch) {
|
|
batch_fns = []
|
|
end = nfuncs * (bi + 1) / nbatch
|
|
while (fi < end) {
|
|
batch_fns[] = il_parts.functions[fi]
|
|
fi = fi + 1
|
|
}
|
|
// Batch 0 includes helper functions; others reference them as external symbols
|
|
prefix = (bi == 0 && helpers_il != "") ? helpers_il + "\n\n" : ""
|
|
batch_il = il_parts.data + "\n\n" + prefix + text(batch_fns, "\n")
|
|
asm_text = os.qbe(batch_il)
|
|
s_path = tmp_prefix + '_b' + text(bi) + '.s'
|
|
o_path = tmp_prefix + '_b' + text(bi) + '.o'
|
|
fd.slurpwrite(s_path, stone(blob(asm_text)))
|
|
s_paths[] = s_path
|
|
o_paths[] = o_path
|
|
bi = bi + 1
|
|
}
|
|
|
|
// Assemble all batches in parallel
|
|
while (ai < length(s_paths)) {
|
|
asm_cmds[] = cc + ' -c ' + s_paths[ai] + ' -o ' + o_paths[ai]
|
|
ai = ai + 1
|
|
}
|
|
parallel_cmd = text(asm_cmds, ' & ') + ' & wait'
|
|
rc = os.system(parallel_cmd)
|
|
if (rc != 0) {
|
|
print('Parallel assembly failed'); disrupt
|
|
}
|
|
|
|
return o_paths
|
|
}
|
|
|
|
// Post-process QBE IL: insert dead labels after ret/jmp (QBE requirement)
|
|
function qbe_insert_dead_labels(il_text) {
|
|
var lines = array(il_text, "\n")
|
|
var result = []
|
|
var dead_id = 0
|
|
var need_label = false
|
|
var i = 0
|
|
var line = null
|
|
var trimmed = null
|
|
while (i < length(lines)) {
|
|
line = lines[i]
|
|
trimmed = trim(line)
|
|
if (need_label && !starts_with(trimmed, '@') && !starts_with(trimmed, '}') && length(trimmed) > 0) {
|
|
push(result, "@_dead_" + text(dead_id))
|
|
dead_id = dead_id + 1
|
|
need_label = false
|
|
}
|
|
if (starts_with(trimmed, '@') || starts_with(trimmed, '}') || length(trimmed) == 0) {
|
|
need_label = false
|
|
}
|
|
if (starts_with(trimmed, 'ret ') || starts_with(trimmed, 'jmp ')) {
|
|
need_label = true
|
|
}
|
|
push(result, line)
|
|
i = i + 1
|
|
}
|
|
return text(result, "\n")
|
|
}
|
|
|
|
// Compile a .cm source file to a native .dylib via QBE
|
|
// Returns the content-addressed dylib path
|
|
Build.compile_native = function(src_path, target, buildtype, pkg) {
|
|
var _target = target || Build.detect_host_target()
|
|
var _buildtype = buildtype || 'release'
|
|
var qbe_rt_path = null
|
|
var native_stem = null
|
|
var native_install_dir = null
|
|
var native_install_path = null
|
|
|
|
if (!fd.is_file(src_path)) {
|
|
print('Source file not found: ' + src_path); disrupt
|
|
}
|
|
|
|
var tc = toolchains[_target]
|
|
var dylib_ext = tc.system == 'windows' ? '.dll' : (tc.system == 'darwin' ? '.dylib' : '.so')
|
|
var cc = tc.c
|
|
|
|
// Step 1: Read source and compile through pipeline
|
|
var content = fd.slurp(src_path)
|
|
var src = text(content)
|
|
var tokenize = use('tokenize')
|
|
var parse = use('parse')
|
|
var fold = use('fold')
|
|
var mcode_mod = use('mcode')
|
|
var streamline_mod = use('streamline')
|
|
var qbe_macros = use('qbe')
|
|
var qbe_emit = use('qbe_emit')
|
|
|
|
var tok_result = tokenize(src, src_path)
|
|
var ast = parse(tok_result.tokens, src, src_path, tokenize)
|
|
var folded = fold(ast)
|
|
var compiled = mcode_mod(folded)
|
|
var optimized = streamline_mod(compiled)
|
|
|
|
// Step 2: Generate QBE IL
|
|
var sym_name = null
|
|
if (pkg) {
|
|
sym_name = shop.c_symbol_for_file(pkg, fd.basename(src_path))
|
|
}
|
|
var il_parts = qbe_emit(optimized, qbe_macros, sym_name)
|
|
|
|
// Content hash for cache key
|
|
var hash = content_hash(src + '\n' + _target + '\nnative')
|
|
var build_dir = get_build_dir()
|
|
ensure_dir(build_dir)
|
|
|
|
var dylib_path = build_dir + '/' + hash + '.' + _target + dylib_ext
|
|
if (fd.is_file(dylib_path))
|
|
return dylib_path
|
|
|
|
// Compile and assemble via batched parallel pipeline
|
|
var tmp = '/tmp/cell_native_' + hash
|
|
var rt_o_path = '/tmp/cell_qbe_rt.o'
|
|
|
|
var o_paths = compile_native_batched(il_parts, cc, tmp)
|
|
|
|
// Compile QBE runtime stubs if needed
|
|
var rc = null
|
|
if (!fd.is_file(rt_o_path)) {
|
|
qbe_rt_path = shop.get_package_dir('core') + '/qbe_rt.c'
|
|
rc = os.system(cc + ' -c ' + qbe_rt_path + ' -o ' + rt_o_path + ' -fPIC')
|
|
if (rc != 0) {
|
|
print('QBE runtime stubs compilation failed'); disrupt
|
|
}
|
|
}
|
|
|
|
// Link dylib
|
|
var link_cmd = cc + ' -shared -fPIC'
|
|
if (tc.system == 'darwin') {
|
|
link_cmd = link_cmd + ' -undefined dynamic_lookup'
|
|
} else if (tc.system == 'linux') {
|
|
link_cmd = link_cmd + ' -Wl,--allow-shlib-undefined'
|
|
}
|
|
var oi = 0
|
|
while (oi < length(o_paths)) {
|
|
link_cmd = link_cmd + ' ' + o_paths[oi]
|
|
oi = oi + 1
|
|
}
|
|
link_cmd = link_cmd + ' ' + rt_o_path + ' -o ' + dylib_path
|
|
|
|
rc = os.system(link_cmd)
|
|
if (rc != 0) {
|
|
print('Linking native dylib failed for: ' + src_path); disrupt
|
|
}
|
|
|
|
log.console('Built native: ' + fd.basename(dylib_path))
|
|
|
|
// Install to deterministic lib/<pkg>/<stem>.dylib
|
|
if (pkg) {
|
|
native_stem = fd.basename(src_path)
|
|
native_install_dir = shop.get_lib_dir() + '/' + shop.lib_name_for_package(pkg)
|
|
ensure_dir(native_install_dir)
|
|
native_install_path = native_install_dir + '/' + native_stem + dylib_ext
|
|
fd.slurpwrite(native_install_path, fd.slurp(dylib_path))
|
|
}
|
|
|
|
return dylib_path
|
|
}
|
|
|
|
// Compile pre-compiled mcode IR to a native .dylib via QBE.
|
|
// Use this when the caller already has the optimized IR (avoids calling mcode
|
|
// twice and hitting module-level state pollution).
|
|
Build.compile_native_ir = function(optimized, src_path, opts) {
|
|
var _target = (opts && opts.target) || Build.detect_host_target()
|
|
var _buildtype = (opts && opts.buildtype) || 'release'
|
|
var pkg = opts && opts.pkg
|
|
var qbe_rt_path = null
|
|
var native_stem = null
|
|
var native_install_dir = null
|
|
var native_install_path = null
|
|
|
|
var tc = toolchains[_target]
|
|
var dylib_ext = tc.system == 'windows' ? '.dll' : (tc.system == 'darwin' ? '.dylib' : '.so')
|
|
var cc = tc.c
|
|
|
|
var qbe_macros = use('qbe')
|
|
var qbe_emit = use('qbe_emit')
|
|
|
|
var sym_name = null
|
|
if (pkg) {
|
|
sym_name = shop.c_symbol_for_file(pkg, fd.basename(src_path))
|
|
}
|
|
var il_parts = qbe_emit(optimized, qbe_macros, sym_name)
|
|
|
|
var src = text(fd.slurp(src_path))
|
|
var hash = content_hash(src + '\n' + _target + '\nnative')
|
|
var build_dir = get_build_dir()
|
|
ensure_dir(build_dir)
|
|
|
|
var dylib_path = build_dir + '/' + hash + '.' + _target + dylib_ext
|
|
if (fd.is_file(dylib_path))
|
|
return dylib_path
|
|
|
|
// Compile and assemble via batched parallel pipeline
|
|
var tmp = '/tmp/cell_native_' + hash
|
|
var rt_o_path = '/tmp/cell_qbe_rt.o'
|
|
|
|
var o_paths = compile_native_batched(il_parts, cc, tmp)
|
|
|
|
// Compile QBE runtime stubs if needed
|
|
var rc = null
|
|
if (!fd.is_file(rt_o_path)) {
|
|
qbe_rt_path = shop.get_package_dir('core') + '/qbe_rt.c'
|
|
rc = os.system(cc + ' -c ' + qbe_rt_path + ' -o ' + rt_o_path + ' -fPIC')
|
|
if (rc != 0) {
|
|
print('QBE runtime stubs compilation failed'); disrupt
|
|
}
|
|
}
|
|
|
|
// Link dylib
|
|
var link_cmd = cc + ' -shared -fPIC'
|
|
if (tc.system == 'darwin') {
|
|
link_cmd = link_cmd + ' -undefined dynamic_lookup'
|
|
} else if (tc.system == 'linux') {
|
|
link_cmd = link_cmd + ' -Wl,--allow-shlib-undefined'
|
|
}
|
|
var oi = 0
|
|
while (oi < length(o_paths)) {
|
|
link_cmd = link_cmd + ' ' + o_paths[oi]
|
|
oi = oi + 1
|
|
}
|
|
link_cmd = link_cmd + ' ' + rt_o_path + ' -o ' + dylib_path
|
|
|
|
rc = os.system(link_cmd)
|
|
if (rc != 0) {
|
|
print('Linking native dylib failed for: ' + src_path); disrupt
|
|
}
|
|
|
|
log.console('Built native: ' + fd.basename(dylib_path))
|
|
|
|
if (pkg) {
|
|
native_stem = fd.basename(src_path)
|
|
native_install_dir = shop.get_lib_dir() + '/' + shop.lib_name_for_package(pkg)
|
|
ensure_dir(native_install_dir)
|
|
native_install_path = native_install_dir + '/' + native_stem + dylib_ext
|
|
fd.slurpwrite(native_install_path, fd.slurp(dylib_path))
|
|
}
|
|
|
|
return dylib_path
|
|
}
|
|
|
|
// ============================================================================
|
|
// Module table generation (for static builds)
|
|
// ============================================================================
|
|
|
|
// Compile a .cm module to mach bytecode blob
|
|
// Returns the raw mach bytes as a blob
|
|
Build.compile_cm_to_mach = function(src_path) {
|
|
if (!fd.is_file(src_path)) {
|
|
print('Source file not found: ' + src_path); disrupt
|
|
}
|
|
var src = text(fd.slurp(src_path))
|
|
var tokenize = use('tokenize')
|
|
var parse = use('parse')
|
|
var fold = use('fold')
|
|
var mcode_mod = use('mcode')
|
|
var streamline_mod = use('streamline')
|
|
var json = use('json')
|
|
|
|
var tok_result = tokenize(src, src_path)
|
|
var ast = parse(tok_result.tokens, src, src_path, tokenize)
|
|
var folded = fold(ast)
|
|
var compiled = mcode_mod(folded)
|
|
var optimized = streamline_mod(compiled)
|
|
return mach_compile_mcode_bin(src_path, json.encode(optimized))
|
|
}
|
|
|
|
// Generate a module_table.c file that embeds mach bytecode for .cm modules
|
|
// modules: array of {name, src_path} — name is the module name, src_path is the .cm file
|
|
// output: path to write the generated .c file
|
|
Build.generate_module_table = function(modules, output) {
|
|
var lines = []
|
|
var json = use('json')
|
|
push(lines, '/* Generated module table — do not edit */')
|
|
push(lines, '#include <stddef.h>')
|
|
push(lines, '#include <string.h>')
|
|
push(lines, '')
|
|
push(lines, 'struct cell_embedded_entry {')
|
|
push(lines, ' const char *name;')
|
|
push(lines, ' const unsigned char *data;')
|
|
push(lines, ' size_t size;')
|
|
push(lines, '};')
|
|
push(lines, '')
|
|
|
|
var entries = []
|
|
arrfor(modules, function(mod) {
|
|
var safe = replace(replace(replace(mod.name, '/', '_'), '.', '_'), '-', '_')
|
|
var mach = Build.compile_cm_to_mach(mod.src_path)
|
|
var bytes = array(mach)
|
|
var hex = []
|
|
arrfor(bytes, function(b) {
|
|
push(hex, '0x' + text(b, 'h2'))
|
|
})
|
|
push(lines, 'static const unsigned char mod_' + safe + '_data[] = {')
|
|
push(lines, ' ' + text(hex, ', '))
|
|
push(lines, '};')
|
|
push(lines, '')
|
|
push(entries, safe)
|
|
log.console('Embedded: ' + mod.name + ' (' + text(length(bytes)) + ' bytes)')
|
|
})
|
|
|
|
// Lookup function
|
|
push(lines, 'const struct cell_embedded_entry *cell_embedded_module_lookup(const char *name) {')
|
|
arrfor(modules, function(mod, i) {
|
|
var safe = entries[i]
|
|
push(lines, ' if (strcmp(name, "' + mod.name + '") == 0) {')
|
|
push(lines, ' static const struct cell_embedded_entry e = {"' + mod.name + '", mod_' + safe + '_data, sizeof(mod_' + safe + '_data)};')
|
|
push(lines, ' return &e;')
|
|
push(lines, ' }')
|
|
})
|
|
push(lines, ' return (void *)0;')
|
|
push(lines, '}')
|
|
|
|
var c_text = text(lines, '\n')
|
|
fd.slurpwrite(output, stone(blob(c_text)))
|
|
log.console('Generated ' + output)
|
|
return output
|
|
}
|
|
|
|
// ============================================================================
|
|
// Convenience functions
|
|
// ============================================================================
|
|
|
|
// Build dynamic libraries for all installed packages
|
|
Build.build_all_dynamic = function(target, buildtype) {
|
|
var _target = target || Build.detect_host_target()
|
|
var _buildtype = buildtype || 'release'
|
|
|
|
var packages = shop.list_packages()
|
|
var results = []
|
|
var core_mods = null
|
|
var total_files = 0
|
|
var total_ok = 0
|
|
var total_fail = 0
|
|
|
|
// Build core first
|
|
if (find(packages, function(p) { return p == 'core' }) != null) {
|
|
core_mods = Build.build_dynamic('core', _target, _buildtype)
|
|
push(results, {package: 'core', modules: core_mods})
|
|
}
|
|
|
|
// Build other packages
|
|
arrfor(packages, function(pkg) {
|
|
if (pkg == 'core') return
|
|
var pkg_mods = Build.build_dynamic(pkg, _target, _buildtype)
|
|
push(results, {package: pkg, modules: pkg_mods})
|
|
})
|
|
|
|
// Print build report
|
|
print('\n--- Build Report ---')
|
|
arrfor(results, function(r) {
|
|
var pkg_dir = shop.get_package_dir(r.package)
|
|
var c_files = pkg_tools.get_c_files(r.package, _target, true)
|
|
var file_count = length(c_files)
|
|
var ok_count = length(r.modules)
|
|
var fail_count = file_count - ok_count
|
|
total_files = total_files + file_count
|
|
total_ok = total_ok + ok_count
|
|
total_fail = total_fail + fail_count
|
|
if (file_count == 0) return
|
|
var status = fail_count == 0 ? 'OK' : `${ok_count}/${file_count}`
|
|
print(` ${r.package}: ${status}`)
|
|
})
|
|
print(`Total: ${total_ok}/${total_files} compiled, ${total_fail} failed`)
|
|
print('--------------------\n')
|
|
|
|
return results
|
|
}
|
|
|
|
return Build
|