2246 lines
62 KiB
Plaintext
2246 lines
62 KiB
Plaintext
var toml = use('toml')
|
|
|
|
var json = use('json')
|
|
var fd = use('fd')
|
|
var http = use('http')
|
|
var miniz = use('miniz')
|
|
var time = use('time')
|
|
var crypto = use('internal/crypto')
|
|
var blob = use('blob')
|
|
|
|
var pkg_tools = use('package')
|
|
var os = use('internal/os')
|
|
var link = use('link')
|
|
|
|
// These come from env (via core_extras in engine.cm):
|
|
// analyze, run_ast_fn, core_json, use_cache, core_path, shop_path, actor_api,
|
|
// runtime_env, content_hash, cache_path, ensure_build_dir
|
|
var shop_json = core_json
|
|
var global_shop_path = shop_path
|
|
var my$_ = actor_api
|
|
|
|
var core = "core"
|
|
|
|
// Compiler fingerprint: hash of all compiler source files so that any compiler
|
|
// change invalidates the entire build cache. Folded into hash_path().
|
|
var compiler_fingerprint = (function() {
|
|
var files = [
|
|
"tokenize", "parse", "fold", "mcode", "streamline",
|
|
"qbe", "qbe_emit", "ir_stats"
|
|
]
|
|
var combined = ""
|
|
var i = 0
|
|
var path = null
|
|
while (i < length(files)) {
|
|
path = core_path + '/' + files[i] + '.cm'
|
|
if (fd.is_file(path))
|
|
combined = combined + text(fd.slurp(path))
|
|
i = i + 1
|
|
}
|
|
return content_hash(stone(blob(combined)))
|
|
})()
|
|
|
|
// Make a package name safe for use in C identifiers.
|
|
// Replaces /, ., -, @ with _ so the result is a valid C identifier fragment.
|
|
function safe_c_name(name) {
|
|
return replace(replace(replace(replace(name, '/', '_'), '.', '_'), '-', '_'), '@', '_')
|
|
}
|
|
|
|
function pull_from_cache(content)
|
|
{
|
|
var path = hash_path(content)
|
|
if (fd.is_file(path))
|
|
return fd.slurp(path)
|
|
}
|
|
|
|
function put_into_cache(content, obj)
|
|
{
|
|
var path = hash_path(content)
|
|
fd.slurpwrite(path, obj)
|
|
}
|
|
|
|
function hash_path(content, salt)
|
|
{
|
|
var s = salt || 'mach'
|
|
return global_shop_path + '/build/' + content_hash(stone(blob(text(content) + '\n' + s + '\n' + compiler_fingerprint)))
|
|
}
|
|
|
|
var Shop = {}
|
|
|
|
// Stack tracking the chain of use() calls for error reporting
|
|
var use_stack = []
|
|
|
|
var SCOPE_LOCAL = 0
|
|
var SCOPE_PACKAGE = 1
|
|
var SCOPE_CORE = 2
|
|
|
|
var MOD_EXT = '.cm'
|
|
var ACTOR_EXT = '.ce'
|
|
|
|
var dylib_ext = '.dylib' // Default extension
|
|
|
|
|
|
Shop.get_package_dir = function(name) {
|
|
return global_shop_path + '/packages/' + name
|
|
}
|
|
|
|
// Get the packages directory (in the global shop)
|
|
function get_packages_dir() {
|
|
return global_shop_path + '/packages'
|
|
}
|
|
|
|
// Get the core directory (in the global shop)
|
|
var core_package = 'core'
|
|
var _core_realpath = null
|
|
|
|
Shop.get_core_dir = function() {
|
|
return get_packages_dir() + '/' + core_package
|
|
}
|
|
|
|
function is_core_dir(dir) {
|
|
if (dir == 'core') return true
|
|
if (_core_realpath == null)
|
|
_core_realpath = fd.realpath(Shop.get_core_dir()) || false
|
|
return _core_realpath && dir == _core_realpath
|
|
}
|
|
|
|
// Get the reports directory (in the global shop)
|
|
Shop.get_reports_dir = function() {
|
|
return global_shop_path + '/reports'
|
|
}
|
|
|
|
function get_import_package(name) {
|
|
var parts = array(name, '/')
|
|
if (length(parts) > 1)
|
|
return parts[0]
|
|
|
|
return null
|
|
}
|
|
|
|
function is_internal_path(path)
|
|
{
|
|
return path && starts_with(path, 'internal/')
|
|
}
|
|
|
|
function split_explicit_package_import(path)
|
|
{
|
|
if (!path) return null
|
|
var parts = array(path, '/')
|
|
|
|
if (length(parts) < 2) return null
|
|
|
|
var looks_explicit = starts_with(path, '/') || (parts[0] && search(parts[0], '.') != null)
|
|
if (!looks_explicit) return null
|
|
|
|
// Find the longest prefix that is an installed package
|
|
var i = 0
|
|
var pkg_candidate = null
|
|
var mod_path = null
|
|
var candidate_dir = null
|
|
for (i = length(parts) - 1; i >= 1; i--) {
|
|
pkg_candidate = text(array(parts, 0, i), '/')
|
|
mod_path = text(array(parts, i), '/')
|
|
if (!mod_path || length(mod_path) == 0) continue
|
|
|
|
candidate_dir = get_packages_dir() + '/' + fd.safe_package_path(pkg_candidate)
|
|
if (fd.is_file(candidate_dir + '/cell.toml'))
|
|
return {package: pkg_candidate, path: mod_path}
|
|
|
|
if (package_in_shop(pkg_candidate))
|
|
return {package: pkg_candidate, path: mod_path}
|
|
}
|
|
|
|
return null
|
|
}
|
|
|
|
function package_in_shop(package) {
|
|
var lock = Shop.load_lock()
|
|
return package in lock
|
|
}
|
|
|
|
function abs_path_to_package(package_dir)
|
|
{
|
|
if (!fd.is_file(package_dir + '/cell.toml')) {
|
|
log.error('Not a valid package directory (no cell.toml): ' + package_dir)
|
|
disrupt
|
|
}
|
|
|
|
var packages_prefix = get_packages_dir() + '/'
|
|
var packages_prefix_abs = fd.realpath(get_packages_dir())
|
|
if (packages_prefix_abs) packages_prefix_abs = packages_prefix_abs + '/'
|
|
var core_dir = packages_prefix + core_package
|
|
|
|
// Check if this is the core package directory (or its symlink target)
|
|
if (package_dir == core_dir) {
|
|
return 'core'
|
|
}
|
|
// Also check if core_dir is a symlink pointing to package_dir
|
|
var core_target = null
|
|
if (fd.is_link(core_dir)) {
|
|
core_target = fd.readlink(core_dir)
|
|
if (core_target == package_dir || fd.realpath(core_dir) == package_dir) {
|
|
return 'core'
|
|
}
|
|
}
|
|
|
|
if (starts_with(package_dir, packages_prefix))
|
|
return text(package_dir, length(packages_prefix))
|
|
|
|
// Also try absolute path comparison (package_dir may be absolute, packages_prefix relative)
|
|
if (packages_prefix_abs && starts_with(package_dir, packages_prefix_abs))
|
|
return text(package_dir, length(packages_prefix_abs))
|
|
|
|
// Check if this local path is the target of a link
|
|
// If so, return the canonical package name (link origin) instead
|
|
var link_origin = link.get_origin(package_dir)
|
|
if (link_origin) {
|
|
return link_origin
|
|
}
|
|
|
|
// in this case, the dir is the package
|
|
if (package_in_shop(package_dir))
|
|
return package_dir
|
|
|
|
// For local directories (e.g., linked targets), read the package name from cell.toml
|
|
var _toml_path = package_dir + '/cell.toml'
|
|
var content = null
|
|
var cfg = null
|
|
if (fd.is_file(_toml_path)) {
|
|
content = text(fd.slurp(_toml_path))
|
|
cfg = toml.decode(content)
|
|
if (cfg.package)
|
|
return cfg.package
|
|
}
|
|
|
|
return package_dir
|
|
}
|
|
|
|
// given a file, find the absolute path, package name, and import name
|
|
Shop.file_info = function(file) {
|
|
var info = {
|
|
path: file,
|
|
is_module: false,
|
|
is_actor: false,
|
|
package: null,
|
|
name: null
|
|
}
|
|
|
|
if (ends_with(file, MOD_EXT))
|
|
info.is_module = true
|
|
else if (ends_with(file, ACTOR_EXT))
|
|
info.is_actor = true
|
|
|
|
// Find package directory and determine package name
|
|
var pkg_dir = pkg_tools.find_package_dir(file)
|
|
if (pkg_dir) {
|
|
info.package = abs_path_to_package(pkg_dir)
|
|
|
|
if (info.is_actor)
|
|
info.name = text(file, length(pkg_dir) + 1, length(file) - length(ACTOR_EXT))
|
|
else if (info.is_module)
|
|
info.name = text(file, length(pkg_dir) + 1, length(file) - length(MOD_EXT))
|
|
else
|
|
info.name = text(file, length(pkg_dir) + 1)
|
|
}
|
|
|
|
return info
|
|
}
|
|
|
|
function get_import_name(path)
|
|
{
|
|
var parts = array(path, '/')
|
|
if (length(parts) < 2) return null
|
|
return text(array(parts, 1), '/')
|
|
}
|
|
|
|
// Given a path like 'prosperon/sprite' and a package context,
|
|
// resolve the alias 'prosperon' to its canonical package name
|
|
function get_aliased_package(path, package_context) {
|
|
if (!package_context) return null
|
|
var alias = pkg_tools.split_alias(package_context, path)
|
|
if (alias) return alias.package
|
|
return null
|
|
}
|
|
|
|
// Same as get_aliased_package but just returns the package for the alias part
|
|
function get_canonical_package(alias, package_context) {
|
|
if (!package_context) return null
|
|
var result = pkg_tools.split_alias(package_context, alias + '/dummy')
|
|
if (result) return result.package
|
|
return null
|
|
}
|
|
|
|
// Resolve a locator string to its canonical form
|
|
// Handles '.', './', '../', and existing directory paths
|
|
Shop.resolve_locator = function(locator) {
|
|
var resolved = null
|
|
if (locator == '.' || starts_with(locator, './') || starts_with(locator, '../') || fd.is_dir(locator)) {
|
|
resolved = fd.realpath(locator)
|
|
if (resolved) return resolved
|
|
}
|
|
return locator
|
|
}
|
|
|
|
function package_cache_path(pkg)
|
|
{
|
|
return global_shop_path + '/cache/' + replace(replace(pkg, '/', '_'), '@', '_')
|
|
}
|
|
|
|
// Load lock.toml configuration (from global shop)
|
|
var _lock = null
|
|
Shop.load_lock = function() {
|
|
if (_lock)
|
|
return _lock
|
|
|
|
var path = global_shop_path + '/lock.toml'
|
|
|
|
if (!fd.is_file(path))
|
|
return {}
|
|
|
|
var content = text(fd.slurp(path))
|
|
if (!length(content)) return {}
|
|
|
|
_lock = toml.decode(content)
|
|
|
|
return _lock
|
|
}
|
|
|
|
// Save lock.toml configuration (to global shop)
|
|
Shop.save_lock = function(lock) {
|
|
var path = global_shop_path + '/lock.toml'
|
|
fd.slurpwrite(path, stone(blob(toml.encode(lock))))
|
|
_lock = lock
|
|
}
|
|
|
|
|
|
// Shop configuration (shop.toml) with policy flags
|
|
var _shop_config = null
|
|
var _default_policy = {
|
|
allow_dylib: true,
|
|
allow_static: true,
|
|
allow_mach: true,
|
|
allow_compile: true,
|
|
native: false
|
|
}
|
|
|
|
Shop.load_config = function() {
|
|
if (_shop_config) return _shop_config
|
|
if (!global_shop_path) {
|
|
_shop_config = {policy: object(_default_policy)}
|
|
return _shop_config
|
|
}
|
|
var path = global_shop_path + '/shop.toml'
|
|
if (!fd.is_file(path)) {
|
|
_shop_config = {policy: object(_default_policy)}
|
|
fd.slurpwrite(path, stone(blob(toml.encode(_shop_config))))
|
|
return _shop_config
|
|
}
|
|
var content = text(fd.slurp(path))
|
|
if (!length(content)) {
|
|
_shop_config = {policy: object(_default_policy)}
|
|
return _shop_config
|
|
}
|
|
_shop_config = toml.decode(content)
|
|
if (!_shop_config.policy) _shop_config.policy = {}
|
|
var keys = array(_default_policy)
|
|
var i = 0
|
|
for (i = 0; i < length(keys); i++) {
|
|
if (_shop_config.policy[keys[i]] == null)
|
|
_shop_config.policy[keys[i]] = _default_policy[keys[i]]
|
|
}
|
|
return _shop_config
|
|
}
|
|
|
|
function get_policy() {
|
|
var config = Shop.load_config()
|
|
if (native_mode) config.policy.native = true
|
|
return config.policy
|
|
}
|
|
|
|
// Get information about how to resolve a package
|
|
// Local packages always start with /
|
|
// Remote packages must be exactly host/owner/repo (3 components)
|
|
Shop.resolve_package_info = function(pkg) {
|
|
if (starts_with(pkg, '/')) return 'local'
|
|
var parts = array(pkg, '/')
|
|
if (length(parts) == 3 && search(parts[0], 'gitea') != null) return 'gitea'
|
|
return null
|
|
}
|
|
|
|
// Verify if a package name is valid and return status
|
|
Shop.verify_package_name = function(pkg) {
|
|
if (!pkg) { log.error("Empty package name"); disrupt }
|
|
if (pkg == 'local') { log.error("local is not a valid package name"); disrupt }
|
|
if (pkg == 'core') { log.error("core is not a valid package name"); disrupt }
|
|
|
|
if (search(pkg, '://') != null) {
|
|
log.error(`Invalid package name: ${pkg}; did you mean ${array(pkg, '://')[1]}?`)
|
|
disrupt
|
|
}
|
|
}
|
|
|
|
// Convert module package to download URL
|
|
Shop.get_download_url = function(pkg, commit_hash) {
|
|
var info = Shop.resolve_package_info(pkg)
|
|
var parts = null
|
|
var host = null
|
|
var user = null
|
|
var repo = null
|
|
|
|
if (info == 'gitea') {
|
|
parts = array(pkg, '/')
|
|
host = parts[0]
|
|
user = parts[1]
|
|
repo = parts[2]
|
|
|
|
return 'https://' + host + '/' + user + '/' + repo + '/archive/' + commit_hash + '.zip'
|
|
}
|
|
|
|
return null
|
|
}
|
|
|
|
// Get the API URL for checking remote git commits
|
|
Shop.get_api_url = function(pkg) {
|
|
var info = Shop.resolve_package_info(pkg)
|
|
var parts = null
|
|
var host = null
|
|
var user = null
|
|
var repo = null
|
|
|
|
if (info == 'gitea') {
|
|
parts = array(pkg, '/')
|
|
host = parts[0]
|
|
user = parts[1]
|
|
repo = parts[2]
|
|
return 'https://' + host + '/api/v1/repos/' + user + '/' + repo + '/branches/'
|
|
}
|
|
|
|
return null
|
|
}
|
|
|
|
// Extract commit hash from API response
|
|
Shop.extract_commit_hash = function(pkg, response) {
|
|
if (!response) return null
|
|
|
|
var info = Shop.resolve_package_info(pkg)
|
|
|
|
var data = json.decode(response)
|
|
|
|
if (info == 'gitea') {
|
|
if (is_array(data))
|
|
data = data[0]
|
|
return data.commit && data.commit.id
|
|
}
|
|
|
|
return null
|
|
}
|
|
|
|
var open_dls = {}
|
|
var package_dylibs = {} // pkg -> [{file, symbol, dylib}, ...]
|
|
|
|
// Host target detection for native dylib resolution
|
|
function detect_host_target() {
|
|
var platform = os.platform()
|
|
var arch = os.arch ? os.arch() : 'arm64'
|
|
if (platform == 'macOS' || platform == 'darwin')
|
|
return arch == 'x86_64' ? 'macos_x86_64' : 'macos_arm64'
|
|
if (platform == 'Linux' || platform == 'linux')
|
|
return arch == 'x86_64' ? 'linux' : 'linux_arm64'
|
|
if (platform == 'Windows' || platform == 'windows')
|
|
return 'windows'
|
|
return null
|
|
}
|
|
|
|
var host_target = detect_host_target()
|
|
|
|
// Check for a native .cm dylib in the build cache
|
|
// Returns a native descriptor {_native, _handle, _sym}, or null if no native dylib exists
|
|
function try_native_mod_dylib(pkg, stem) {
|
|
var build_mod = use_cache['core/build']
|
|
if (!build_mod) return null
|
|
|
|
var src_path = get_packages_dir() + '/' + fd.safe_package_path(pkg) + '/' + stem
|
|
if (!fd.is_file(src_path)) return null
|
|
|
|
var src = text(fd.slurp(src_path))
|
|
var host = detect_host_target()
|
|
if (!host) return null
|
|
|
|
var san_flags = build_mod.native_sanitize_flags ? build_mod.native_sanitize_flags() : ''
|
|
var native_key = build_mod.native_cache_content ?
|
|
build_mod.native_cache_content(src, host, san_flags) :
|
|
(src + '\n' + host)
|
|
var build_path = build_mod.cache_path(native_key, build_mod.SALT_NATIVE)
|
|
if (!fd.is_file(build_path)) return null
|
|
|
|
log.shop('native dylib cache hit: ' + stem)
|
|
var handle = os.dylib_open(build_path)
|
|
if (!handle) return null
|
|
var sym = Shop.c_symbol_for_file(pkg, stem)
|
|
return {_native: true, _handle: handle, _sym: sym}
|
|
}
|
|
|
|
// Default capabilities injected into scripts
|
|
// These map to $_ properties in engine.cm
|
|
var SHOP_DEFAULT_INJECT = ['$self', '$overling', '$clock', '$delay', '$start', '$receiver', '$contact', '$portal', '$time_limit', '$couple', '$stop', '$unneeded', '$connection', '$fd']
|
|
|
|
// Decide what a given module is allowed to see.
|
|
// This is the capability gate - tweak as needed.
|
|
Shop.script_inject_for = function(file_info) {
|
|
if (!file_info) return []
|
|
|
|
// For now, grant everything to all scripts
|
|
// Later this can be tuned per package/script
|
|
return array(SHOP_DEFAULT_INJECT)
|
|
}
|
|
|
|
// Get capabilities for a script path (public API)
|
|
Shop.get_script_capabilities = function(path) {
|
|
var file_info = Shop.file_info(path)
|
|
return Shop.script_inject_for(file_info)
|
|
}
|
|
|
|
// Build the env object for a module, with runtime fns and $-prefixed capabilities.
|
|
// Matches engine.cm's approach: env properties become free variables in the module.
|
|
function inject_env(inject) {
|
|
var env = {}
|
|
if (runtime_env) {
|
|
arrfor(array(runtime_env), function(k) { env[k] = runtime_env[k] })
|
|
}
|
|
|
|
// Add capability injections with $ prefix
|
|
var i = 0
|
|
var inj = null
|
|
var key = null
|
|
for (i = 0; i < length(inject); i++) {
|
|
inj = inject[i]
|
|
key = inj
|
|
if (key && key[0] == '$') key = text(key, 1)
|
|
if (key == 'fd') env['$fd'] = fd
|
|
else env['$' + key] = my$_[key]
|
|
}
|
|
return env
|
|
}
|
|
|
|
// --- Pipeline API ---
|
|
// Lazy-loaded pipeline modules from use_cache (no re-entrancy risk).
|
|
var _tokenize_mod = null
|
|
var _parse_mod = null
|
|
var _fold_mod = null
|
|
var _mcode_mod = null
|
|
var _streamline_mod = null
|
|
var _index_mod = null
|
|
|
|
var _token_cache = {}
|
|
var _ast_cache = {}
|
|
var _analyze_cache = {}
|
|
var _compile_cache = {}
|
|
var _index_cache = {}
|
|
var _summary_cache = {}
|
|
|
|
var get_tokenize = function() {
|
|
if (!_tokenize_mod) _tokenize_mod = use_cache['core/tokenize'] || use_cache['tokenize']
|
|
return _tokenize_mod
|
|
}
|
|
var get_parse = function() {
|
|
if (!_parse_mod) _parse_mod = use_cache['core/parse'] || use_cache['parse']
|
|
return _parse_mod
|
|
}
|
|
var get_fold = function() {
|
|
if (!_fold_mod) _fold_mod = use_cache['core/fold'] || use_cache['fold']
|
|
return _fold_mod
|
|
}
|
|
var get_mcode = function() {
|
|
if (!_mcode_mod) _mcode_mod = use_cache['core/mcode'] || use_cache['mcode']
|
|
return _mcode_mod
|
|
}
|
|
var get_streamline = function() {
|
|
if (!_streamline_mod) _streamline_mod = use_cache['core/streamline'] || use_cache['streamline']
|
|
return _streamline_mod
|
|
}
|
|
var get_index = function() {
|
|
if (!_index_mod) {
|
|
_index_mod = use_cache['core/index'] || use_cache['index']
|
|
if (!_index_mod) _index_mod = Shop.use('index', 'core')
|
|
}
|
|
return _index_mod
|
|
}
|
|
var _analyze_mod = null
|
|
var get_analyze = function() {
|
|
if (!_analyze_mod) {
|
|
_analyze_mod = use_cache['core/analyze'] || use_cache['analyze']
|
|
if (!_analyze_mod) _analyze_mod = Shop.use('analyze', 'core')
|
|
}
|
|
return _analyze_mod
|
|
}
|
|
|
|
Shop.tokenize_file = function(path) {
|
|
var src = text(fd.slurp(path))
|
|
var key = content_hash(stone(blob(src)))
|
|
if (_token_cache[key]) return _token_cache[key]
|
|
var result = get_tokenize()(src, path)
|
|
_token_cache[key] = result
|
|
return result
|
|
}
|
|
|
|
Shop.parse_file = function(path) {
|
|
var src = text(fd.slurp(path))
|
|
var key = content_hash(stone(blob(src)))
|
|
if (_ast_cache[key]) return _ast_cache[key]
|
|
var tok = Shop.tokenize_file(path)
|
|
var ast = get_parse()(tok.tokens, src, path, get_tokenize())
|
|
_ast_cache[key] = ast
|
|
return ast
|
|
}
|
|
|
|
Shop.analyze_file = function(path) {
|
|
var src = text(fd.slurp(path))
|
|
var key = content_hash(stone(blob(src)))
|
|
if (_analyze_cache[key]) return _analyze_cache[key]
|
|
var ast = Shop.parse_file(path)
|
|
var folded = get_fold()(ast)
|
|
_analyze_cache[key] = folded
|
|
return folded
|
|
}
|
|
|
|
// Resolve import paths on an index in-place.
|
|
Shop.resolve_imports = function(idx_obj, fname) {
|
|
var fi = Shop.file_info(fd.realpath(fname))
|
|
var ctx = fi.package
|
|
var ri = 0
|
|
var rp = null
|
|
var lp = null
|
|
while (ri < length(idx_obj.imports)) {
|
|
rp = Shop.resolve_use_path(idx_obj.imports[ri].module_path, ctx)
|
|
if (rp == null) {
|
|
lp = fd.dirname(fd.realpath(fname)) + '/' + idx_obj.imports[ri].module_path + '.cm'
|
|
if (fd.is_file(lp)) {
|
|
rp = lp
|
|
}
|
|
}
|
|
if (rp != null) {
|
|
idx_obj.imports[ri].resolved_path = rp
|
|
}
|
|
ri = ri + 1
|
|
}
|
|
}
|
|
|
|
Shop.index_file = function(path) {
|
|
var src = text(fd.slurp(path))
|
|
var key = content_hash(stone(blob(src)))
|
|
if (_index_cache[key]) return _index_cache[key]
|
|
var tok = Shop.tokenize_file(path)
|
|
var ast = get_parse()(tok.tokens, src, path, get_tokenize())
|
|
var idx = get_index().index_ast(ast, tok.tokens, path)
|
|
Shop.resolve_imports(idx, path)
|
|
_index_cache[key] = idx
|
|
return idx
|
|
}
|
|
|
|
Shop.summary_file = function(path) {
|
|
var src = text(fd.slurp(path))
|
|
var key = content_hash(stone(blob(src)))
|
|
if (_summary_cache[key]) return _summary_cache[key]
|
|
var idx = Shop.index_file(path)
|
|
var summary = get_analyze().module_summary(idx)
|
|
_summary_cache[key] = summary
|
|
return summary
|
|
}
|
|
|
|
Shop.mcode_file = function(path) {
|
|
var folded = Shop.analyze_file(path)
|
|
return get_mcode()(folded)
|
|
}
|
|
|
|
Shop.compile_file = function(path) {
|
|
var src = text(fd.slurp(path))
|
|
var key = content_hash(stone(blob(src)))
|
|
if (_compile_cache[key]) return _compile_cache[key]
|
|
var compiled = Shop.mcode_file(path)
|
|
var optimized = get_streamline()(compiled)
|
|
_compile_cache[key] = optimized
|
|
return optimized
|
|
}
|
|
|
|
Shop.all_script_paths = function() {
|
|
var packages = Shop.list_packages()
|
|
var result = []
|
|
var i = 0
|
|
var j = 0
|
|
var scripts = null
|
|
var pkg_dir = null
|
|
var has_core = false
|
|
for (i = 0; i < length(packages); i++) {
|
|
if (packages[i] == 'core') has_core = true
|
|
}
|
|
if (!has_core) {
|
|
packages = array(packages, ['core'])
|
|
}
|
|
for (i = 0; i < length(packages); i++) {
|
|
pkg_dir = starts_with(packages[i], '/') ? packages[i] : get_packages_dir() + '/' + fd.safe_package_path(packages[i])
|
|
scripts = get_package_scripts(packages[i])
|
|
for (j = 0; j < length(scripts); j++) {
|
|
result[] = {
|
|
package: packages[i],
|
|
rel_path: scripts[j],
|
|
full_path: pkg_dir + '/' + scripts[j]
|
|
}
|
|
}
|
|
}
|
|
return result
|
|
}
|
|
|
|
// Lazy-loaded compiler modules for on-the-fly compilation
|
|
var _mcode_mod = null
|
|
var _streamline_mod = null
|
|
|
|
// Compile a module and return its bytecode blob.
|
|
// The bytecode is cached on disk by content hash.
|
|
function resolve_mod_fn(path, pkg) {
|
|
if (!fd.is_file(path)) { log.error(`path ${path} is not a file`); disrupt }
|
|
|
|
var content = text(fd.slurp(path))
|
|
if (length(content) == 0) { log.error(`${path}: empty file`); disrupt }
|
|
var content_key = stone(blob(content))
|
|
var native_result = null
|
|
var cached = null
|
|
var ast = null
|
|
var compiled = null
|
|
var ir = null
|
|
var optimized = null
|
|
var mcode_json = null
|
|
var cached_mcode_path = null
|
|
var _pkg_dir = null
|
|
var _stem = null
|
|
var policy = null
|
|
var build_mod = null
|
|
var dylib_path = null
|
|
var handle = null
|
|
var sym = null
|
|
|
|
policy = get_policy()
|
|
|
|
// Compute _pkg_dir and _stem early so all paths can use them
|
|
if (pkg) {
|
|
_pkg_dir = get_packages_dir() + '/' + fd.safe_package_path(pkg)
|
|
if (starts_with(path, _pkg_dir + '/')) {
|
|
_stem = text(path, length(_pkg_dir) + 1)
|
|
}
|
|
}
|
|
|
|
// Check for native .cm dylib at deterministic path first
|
|
if (policy.allow_dylib && pkg && _stem) {
|
|
native_result = try_native_mod_dylib(pkg, _stem)
|
|
if (native_result != null) return native_result
|
|
}
|
|
|
|
// Native compilation path: compile to native dylib instead of mach
|
|
if (policy.native && policy.allow_compile) {
|
|
build_mod = use_cache['core/build']
|
|
if (build_mod) {
|
|
dylib_path = build_mod.compile_native(path, null, null, pkg)
|
|
if (dylib_path) {
|
|
handle = os.dylib_open(dylib_path)
|
|
if (handle) {
|
|
sym = pkg && _stem ? Shop.c_symbol_for_file(pkg, _stem) : null
|
|
return {_native: true, _handle: handle, _sym: sym}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// Check cache for pre-compiled .mach blob
|
|
if (policy.allow_mach) {
|
|
cached = pull_from_cache(content_key)
|
|
if (cached) {
|
|
return cached
|
|
}
|
|
}
|
|
|
|
// Check for cached mcode in content-addressed store
|
|
if (policy.allow_compile) {
|
|
cached_mcode_path = hash_path(content_key, 'mcode')
|
|
if (fd.is_file(cached_mcode_path)) {
|
|
mcode_json = text(fd.slurp(cached_mcode_path))
|
|
compiled = mach_compile_mcode_bin(path, mcode_json)
|
|
put_into_cache(content_key, compiled)
|
|
return compiled
|
|
}
|
|
}
|
|
|
|
// Compile via full pipeline: analyze → mcode → streamline → serialize
|
|
// Load compiler modules from use_cache directly (NOT via Shop.use, which
|
|
// would re-enter resolve_locator → resolve_mod_fn → infinite recursion)
|
|
if (policy.allow_compile) {
|
|
if (!_mcode_mod) _mcode_mod = use_cache['core/mcode'] || use_cache['mcode']
|
|
if (!_streamline_mod) _streamline_mod = use_cache['core/streamline'] || use_cache['streamline']
|
|
if (!_mcode_mod || !_streamline_mod) {
|
|
log.error(`error: compiler modules not loaded (mcode=${_mcode_mod != null}, streamline=${_streamline_mod != null})`)
|
|
disrupt
|
|
}
|
|
ast = analyze(content, path)
|
|
ir = _mcode_mod(ast)
|
|
optimized = _streamline_mod(ir)
|
|
mcode_json = shop_json.encode(optimized)
|
|
|
|
// Cache mcode (architecture-independent) in content-addressed store
|
|
fd.ensure_dir(global_shop_path + '/build')
|
|
fd.slurpwrite(cached_mcode_path, stone(blob(mcode_json)))
|
|
|
|
// Cache mach blob
|
|
compiled = mach_compile_mcode_bin(path, mcode_json)
|
|
put_into_cache(content_key, compiled)
|
|
|
|
return compiled
|
|
}
|
|
|
|
log.error(`Module ${path} could not be loaded: no artifact found or all methods blocked by policy`)
|
|
disrupt
|
|
}
|
|
|
|
// given a path and a package context
|
|
// return module info about where it was found
|
|
// Resolve a module path to {path, scope, pkg} without compiling.
|
|
function resolve_path(path, ctx)
|
|
{
|
|
var explicit = split_explicit_package_import(path)
|
|
var explicit_path = null
|
|
var core_dir = null
|
|
var core_file_path = null
|
|
var is_core = null
|
|
var scope = null
|
|
var alias_path = null
|
|
var ctx_dir = null
|
|
var ctx_path = null
|
|
var alias = null
|
|
var package_path = null
|
|
var lock = null
|
|
var best_pkg = null
|
|
var best_remainder = null
|
|
var shop_dir = null
|
|
var shop_file = null
|
|
|
|
if (explicit) {
|
|
if (is_internal_path(explicit.path) && ctx && explicit.package != ctx)
|
|
explicit = null
|
|
}
|
|
if (explicit) {
|
|
explicit_path = get_packages_dir() + '/' + fd.safe_package_path(explicit.package) + '/' + explicit.path
|
|
if (fd.is_file(explicit_path))
|
|
return {path: explicit_path, scope: SCOPE_PACKAGE, pkg: explicit.package}
|
|
}
|
|
|
|
if (!ctx) {
|
|
core_dir = Shop.get_core_dir()
|
|
core_file_path = core_dir + '/' + path
|
|
if (fd.is_file(core_file_path))
|
|
return {path: core_file_path, scope: SCOPE_CORE, pkg: 'core'}
|
|
return null
|
|
}
|
|
|
|
if (starts_with(ctx, '/'))
|
|
ctx_dir = ctx
|
|
else
|
|
ctx_dir = get_packages_dir() + '/' + fd.safe_package_path(ctx)
|
|
ctx_path = ctx_dir + '/' + path
|
|
|
|
if (fd.is_file(ctx_path)) {
|
|
is_core = (ctx == 'core') || is_core_dir(ctx_dir)
|
|
scope = is_core ? SCOPE_CORE : SCOPE_LOCAL
|
|
return {path: ctx_path, scope: scope, pkg: is_core ? 'core' : ctx}
|
|
}
|
|
|
|
if (is_internal_path(path))
|
|
return null
|
|
|
|
alias = pkg_tools.split_alias(ctx, path)
|
|
if (alias) {
|
|
alias_path = get_packages_dir() + '/' + fd.safe_package_path(alias.package) + '/' + alias.path
|
|
if (fd.is_file(alias_path))
|
|
return {path: alias_path, scope: SCOPE_PACKAGE, pkg: ctx}
|
|
}
|
|
|
|
package_path = get_packages_dir() + '/' + fd.safe_package_path(path)
|
|
if (fd.is_file(package_path))
|
|
return {path: package_path, scope: SCOPE_PACKAGE, pkg: ctx}
|
|
|
|
// Shop package scanning: longest prefix match against lock.toml entries
|
|
lock = Shop.load_lock()
|
|
best_pkg = null
|
|
best_remainder = null
|
|
arrfor(array(lock), function(pkg_name) {
|
|
if (starts_with(path, pkg_name + '/')) {
|
|
if (!best_pkg || length(pkg_name) > length(best_pkg)) {
|
|
best_pkg = pkg_name
|
|
best_remainder = text(path, length(pkg_name) + 1)
|
|
}
|
|
}
|
|
})
|
|
if (best_pkg && best_remainder) {
|
|
shop_dir = get_packages_dir() + '/' + fd.safe_package_path(best_pkg)
|
|
shop_file = shop_dir + '/' + best_remainder
|
|
if (fd.is_file(shop_file))
|
|
return {path: shop_file, scope: SCOPE_PACKAGE, pkg: best_pkg}
|
|
}
|
|
|
|
core_dir = Shop.get_core_dir()
|
|
core_file_path = core_dir + '/' + path
|
|
if (fd.is_file(core_file_path))
|
|
return {path: core_file_path, scope: SCOPE_CORE, pkg: 'core'}
|
|
|
|
return null
|
|
}
|
|
|
|
function resolve_locator(path, ctx)
|
|
{
|
|
var info = resolve_path(path, ctx)
|
|
if (info == null) return null
|
|
var fn = resolve_mod_fn(info.path, info.pkg)
|
|
return {path: info.path, scope: info.scope, symbol: fn}
|
|
}
|
|
|
|
// Generate symbol name for a C module file
|
|
// e.g., make_c_symbol('core', 'math') -> 'js_core_math_use'
|
|
function make_c_symbol(pkg, file) {
|
|
var pkg_safe = safe_c_name(pkg)
|
|
var file_safe = safe_c_name(file)
|
|
return 'js_' + pkg_safe + '_' + file_safe + '_use'
|
|
}
|
|
|
|
// Compute the manifest path for a package (must match build.cm's manifest_path)
|
|
function dylib_manifest_path(pkg) {
|
|
var hash = content_hash(stone(blob(pkg + '\n' + 'manifest')))
|
|
return global_shop_path + '/build/' + hash
|
|
}
|
|
|
|
// Read a pre-built dylib manifest for a package.
|
|
// Returns the array of {file, symbol, dylib} or null.
|
|
function read_dylib_manifest(pkg) {
|
|
var mpath = dylib_manifest_path(pkg)
|
|
if (!fd.is_file(mpath)) return null
|
|
var content = text(fd.slurp(mpath))
|
|
if (!content || length(content) == 0) return null
|
|
return json.decode(content)
|
|
}
|
|
|
|
// Ensure all C modules for a package are built and loaded.
|
|
// Returns the array of {file, symbol, dylib} results, cached per package.
|
|
function ensure_package_dylibs(pkg) {
|
|
if (package_dylibs[pkg] != null) return package_dylibs[pkg]
|
|
if (pkg == 'core') {
|
|
package_dylibs[pkg] = []
|
|
return []
|
|
}
|
|
|
|
var results = null
|
|
var build_mod = use_cache['core/build']
|
|
var target = null
|
|
var c_files = null
|
|
|
|
if (build_mod) {
|
|
target = detect_host_target()
|
|
if (!target) return null
|
|
|
|
c_files = pkg_tools.get_c_files(pkg, target, true)
|
|
if (!c_files || length(c_files) == 0) {
|
|
package_dylibs[pkg] = []
|
|
return []
|
|
}
|
|
|
|
log.shop('ensuring C modules for ' + pkg)
|
|
results = build_mod.build_dynamic(pkg, target, 'release', {})
|
|
} else {
|
|
// No build module at runtime — read manifest from cell build
|
|
results = read_dylib_manifest(pkg)
|
|
if (!results) return null
|
|
log.shop('loaded manifest for ' + pkg + ' (' + text(length(results)) + ' modules)')
|
|
}
|
|
|
|
if (results == null) results = []
|
|
package_dylibs[pkg] = results
|
|
|
|
// Preload all sibling dylibs with RTLD_LAZY|RTLD_GLOBAL
|
|
arrfor(results, function(r) {
|
|
var handle = null
|
|
if (r.dylib && !open_dls[r.dylib]) {
|
|
handle = os.dylib_preload(r.dylib)
|
|
if (handle) open_dls[r.dylib] = handle
|
|
}
|
|
})
|
|
|
|
return results
|
|
}
|
|
|
|
// Try to resolve a C symbol by building the package on demand
|
|
// Returns a loader function or null
|
|
function try_dylib_symbol(sym, pkg, file_stem) {
|
|
var dylibs = ensure_package_dylibs(pkg)
|
|
if (!dylibs || length(dylibs) == 0) return null
|
|
|
|
var c_file = file_stem + '.c'
|
|
var cpp_file = file_stem + '.cpp'
|
|
var idx = find(dylibs, function(r) {
|
|
return r.file == c_file || r.file == cpp_file
|
|
})
|
|
if (idx == null) return null
|
|
var entry = dylibs[idx]
|
|
if (!entry || !entry.dylib) return null
|
|
|
|
var handle = open_dls[entry.dylib]
|
|
if (!handle) {
|
|
handle = os.dylib_open(entry.dylib)
|
|
if (handle) open_dls[entry.dylib] = handle
|
|
}
|
|
if (!handle) { log.shop(`try_dylib: no handle for ${entry.dylib}`); return null }
|
|
if (!os.dylib_has_symbol(handle, sym)) { log.shop(`try_dylib: no symbol ${sym} in ${entry.dylib}`); return null }
|
|
|
|
log.shop('resolved ' + sym + ' from build cache')
|
|
return function() { return os.dylib_symbol(handle, sym) }
|
|
}
|
|
|
|
// Resolve a C symbol by searching:
|
|
// At each scope: check build-cache dylib first, then internal (static)
|
|
function resolve_c_symbol(path, _pkg_ctx) {
|
|
var package_context = is_core_dir(_pkg_ctx) ? 'core' : _pkg_ctx
|
|
var explicit = split_explicit_package_import(path)
|
|
var sym = null
|
|
var loader = null
|
|
var _path = null
|
|
var core_sym = null
|
|
var canon_pkg = null
|
|
var mod_name = null
|
|
var file_stem = null
|
|
var policy = null
|
|
|
|
policy = get_policy()
|
|
|
|
if (explicit) {
|
|
if (is_internal_path(explicit.path) && package_context && explicit.package != package_context)
|
|
explicit = null
|
|
}
|
|
if (explicit) {
|
|
sym = make_c_symbol(explicit.package, explicit.path)
|
|
file_stem = replace(explicit.path, '.c', '')
|
|
|
|
// Check build-cache dylib first
|
|
if (policy.allow_dylib) {
|
|
loader = try_dylib_symbol(sym, explicit.package, file_stem)
|
|
if (loader) {
|
|
return {
|
|
symbol: loader,
|
|
scope: SCOPE_PACKAGE,
|
|
package: explicit.package,
|
|
path: sym
|
|
}
|
|
}
|
|
}
|
|
|
|
// Then check internal/static
|
|
if (policy.allow_static && os.internal_exists(sym)) {
|
|
return {
|
|
symbol: function() { return os.load_internal(sym) },
|
|
scope: SCOPE_PACKAGE,
|
|
package: explicit.package,
|
|
path: sym
|
|
}
|
|
}
|
|
}
|
|
|
|
// If no package context, only check core
|
|
if (!package_context || package_context == 'core') {
|
|
core_sym = make_c_symbol('core', path)
|
|
|
|
// Check build-cache dylib first for core
|
|
if (policy.allow_dylib) {
|
|
loader = try_dylib_symbol(core_sym, 'core', path)
|
|
if (loader) {
|
|
return {
|
|
symbol: loader,
|
|
scope: SCOPE_CORE,
|
|
path: core_sym
|
|
}
|
|
}
|
|
}
|
|
|
|
if (policy.allow_static && os.internal_exists(core_sym)) {
|
|
return {
|
|
symbol: function() { return os.load_internal(core_sym) },
|
|
scope: SCOPE_CORE,
|
|
path: core_sym
|
|
}
|
|
}
|
|
return null
|
|
}
|
|
|
|
// 1. Check own package (build-cache dylib first, then internal)
|
|
sym = make_c_symbol(package_context, path)
|
|
|
|
if (policy.allow_dylib) {
|
|
loader = try_dylib_symbol(sym, package_context, path)
|
|
if (loader) {
|
|
return {
|
|
symbol: loader,
|
|
scope: SCOPE_LOCAL,
|
|
path: sym
|
|
}
|
|
}
|
|
}
|
|
|
|
if (policy.allow_static && os.internal_exists(sym)) {
|
|
return {
|
|
symbol: function() { return os.load_internal(sym) },
|
|
scope: SCOPE_LOCAL,
|
|
path: sym
|
|
}
|
|
}
|
|
|
|
if (is_internal_path(path))
|
|
return null
|
|
|
|
// 2. Check aliased package imports (e.g. 'prosperon/sprite')
|
|
var pkg_alias = get_import_package(path)
|
|
if (pkg_alias) {
|
|
canon_pkg = get_aliased_package(path, package_context)
|
|
if (canon_pkg) {
|
|
mod_name = get_import_name(path)
|
|
sym = make_c_symbol(canon_pkg, mod_name)
|
|
|
|
if (policy.allow_dylib) {
|
|
loader = try_dylib_symbol(sym, canon_pkg, mod_name)
|
|
if (loader) {
|
|
return {
|
|
symbol: loader,
|
|
scope: SCOPE_PACKAGE,
|
|
package: canon_pkg,
|
|
path: sym
|
|
}
|
|
}
|
|
}
|
|
|
|
if (policy.allow_static && os.internal_exists(sym)) {
|
|
return {
|
|
symbol: function() { return os.load_internal(sym) },
|
|
scope: SCOPE_PACKAGE,
|
|
package: canon_pkg,
|
|
path: sym
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// 3. Check core (build-cache dylib first, then internal)
|
|
core_sym = make_c_symbol('core', path)
|
|
|
|
if (policy.allow_dylib) {
|
|
loader = try_dylib_symbol(core_sym, 'core', path)
|
|
if (loader) {
|
|
return {
|
|
symbol: loader,
|
|
scope: SCOPE_CORE,
|
|
path: core_sym
|
|
}
|
|
}
|
|
}
|
|
|
|
if (policy.allow_static && os.internal_exists(core_sym)) {
|
|
return {
|
|
symbol: function() { return os.load_internal(core_sym) },
|
|
scope: SCOPE_CORE,
|
|
path: core_sym
|
|
}
|
|
}
|
|
|
|
return null
|
|
}
|
|
|
|
// Cache for resolved module info
|
|
var module_info_cache = {}
|
|
|
|
function resolve_module_info(path, package_context) {
|
|
var lookup_key = package_context ? package_context + ':' + path : ':' + path
|
|
|
|
if (module_info_cache[lookup_key])
|
|
return module_info_cache[lookup_key]
|
|
|
|
var c_resolve = resolve_c_symbol(path, package_context) || {scope:999}
|
|
var mod_resolve = resolve_locator(path + '.cm', package_context) || {scope:999}
|
|
var min_scope = min(c_resolve.scope, mod_resolve.scope)
|
|
|
|
if (min_scope == 999)
|
|
return null
|
|
|
|
var cache_key = null
|
|
var real_path = null
|
|
var real_info = null
|
|
var pkg_alias = null
|
|
var canon_pkg = null
|
|
var mod_name = null
|
|
|
|
if (mod_resolve.scope == SCOPE_CORE) {
|
|
cache_key = 'core/' + path
|
|
} else if (mod_resolve.scope < 900 && mod_resolve.path) {
|
|
real_path = fd.realpath(mod_resolve.path)
|
|
if (real_path) {
|
|
real_info = Shop.file_info(real_path)
|
|
if (real_info.package && real_info.name)
|
|
cache_key = real_info.package + '/' + real_info.name
|
|
else
|
|
cache_key = real_path
|
|
}
|
|
}
|
|
|
|
if (!cache_key) {
|
|
if (min_scope == SCOPE_CORE)
|
|
cache_key = 'core/' + path
|
|
else if (min_scope == SCOPE_LOCAL && package_context)
|
|
cache_key = package_context + '/' + path
|
|
else if (min_scope == SCOPE_PACKAGE) {
|
|
pkg_alias = get_import_package(path)
|
|
if (pkg_alias) {
|
|
canon_pkg = get_canonical_package(pkg_alias, package_context)
|
|
if (canon_pkg) {
|
|
mod_name = get_import_name(path)
|
|
cache_key = canon_pkg + '/' + mod_name
|
|
} else
|
|
cache_key = path
|
|
} else
|
|
cache_key = path
|
|
} else
|
|
cache_key = path
|
|
}
|
|
|
|
var info = {
|
|
cache_key: cache_key,
|
|
c_resolve: c_resolve,
|
|
mod_resolve: mod_resolve,
|
|
min_scope: min_scope
|
|
}
|
|
|
|
module_info_cache[lookup_key] = info
|
|
return info
|
|
}
|
|
|
|
function get_module_cache_key(path, package_context) {
|
|
var info = resolve_module_info(path, package_context)
|
|
return info ? info.cache_key : null
|
|
}
|
|
|
|
Shop.is_loaded = function is_loaded(path, package_context) {
|
|
var cache_key = get_module_cache_key(path, package_context)
|
|
return use_cache[cache_key] != null
|
|
}
|
|
|
|
// Create a use function bound to a specific package context
|
|
function make_use_fn(pkg) {
|
|
return function(path) {
|
|
return Shop.use(path, pkg)
|
|
}
|
|
}
|
|
|
|
// Call a C module loader and execute the entrypoint
|
|
function call_c_module(c_resolve) {
|
|
var mod = c_resolve.symbol()
|
|
// if (is_function(mod))
|
|
// return mod()
|
|
return mod
|
|
}
|
|
|
|
function execute_module(info)
|
|
{
|
|
var c_resolve = info.c_resolve
|
|
var mod_resolve = info.mod_resolve
|
|
|
|
var used = null
|
|
var file_info = null
|
|
var inject = null
|
|
var env = null
|
|
var pkg = null
|
|
|
|
if (mod_resolve.scope < 900) {
|
|
// Check if native dylib was resolved (descriptor with _handle and _sym)
|
|
if (is_object(mod_resolve.symbol) && mod_resolve.symbol._native) {
|
|
file_info = Shop.file_info(mod_resolve.path)
|
|
inject = Shop.script_inject_for(file_info)
|
|
env = inject_env(inject)
|
|
pkg = file_info.package
|
|
env.use = make_use_fn(pkg)
|
|
env = stone(env)
|
|
used = os.native_module_load_named(
|
|
mod_resolve.symbol._handle, mod_resolve.symbol._sym, env)
|
|
} else {
|
|
// Build env with runtime fns, capabilities, and use function
|
|
file_info = Shop.file_info(mod_resolve.path)
|
|
inject = Shop.script_inject_for(file_info)
|
|
env = inject_env(inject)
|
|
pkg = file_info.package
|
|
env.use = make_use_fn(pkg)
|
|
env = stone(env)
|
|
|
|
// Load compiled bytecode with env
|
|
used = mach_load(mod_resolve.symbol, env)
|
|
}
|
|
} else if (c_resolve.scope < 900) {
|
|
// C only
|
|
used = call_c_module(c_resolve)
|
|
} else {
|
|
log.shop(`Module could not be found (c_resolve scope=${info.c_resolve.scope}, mod_resolve scope=${info.mod_resolve.scope}, cache_key=${info.cache_key})`); disrupt
|
|
}
|
|
|
|
if (!used) { log.error(`Module ${info} returned null`); disrupt }
|
|
|
|
return used
|
|
}
|
|
|
|
function get_module(path, package_context) {
|
|
var info = resolve_module_info(path, package_context)
|
|
var _ctx_dir = null
|
|
var _alias = null
|
|
|
|
if (!info) {
|
|
log.shop(`Module '${path}' could not be found in package '${package_context}'`)
|
|
_ctx_dir = package_context ? (starts_with(package_context, '/') ? package_context : get_packages_dir() + '/' + fd.safe_package_path(package_context)) : null
|
|
if (_ctx_dir) {
|
|
if (fd.is_file(_ctx_dir + '/' + path + '.c') || fd.is_file(_ctx_dir + '/' + path + '.cpp'))
|
|
log.shop(`C source exists at ${_ctx_dir}/${path}.c but was not compiled - run 'cell build'`)
|
|
}
|
|
_alias = pkg_tools.split_alias(package_context, path)
|
|
if (_alias == null && search(path, '/') != null)
|
|
log.shop(`Alias '${array(path, '/')[0]}' could not be resolved in package '${package_context}'`)
|
|
disrupt
|
|
}
|
|
|
|
return execute_module(info)
|
|
}
|
|
|
|
Shop.use = function use(path, _pkg_ctx) {
|
|
if (!is_text(path)) {
|
|
log.error("use() expects a text module path, but received a non-text value")
|
|
disrupt
|
|
}
|
|
var package_context = is_core_dir(_pkg_ctx) ? 'core' : _pkg_ctx
|
|
// Check for embedded module (static builds)
|
|
var embed_key = 'embedded:' + path
|
|
var embedded = null
|
|
var embed_env = null
|
|
if (use_cache[embed_key]) return use_cache[embed_key]
|
|
if (os.embedded_module) {
|
|
embedded = os.embedded_module(path)
|
|
if (embedded) {
|
|
embed_env = inject_env(SHOP_DEFAULT_INJECT)
|
|
embed_env.use = make_use_fn(package_context)
|
|
embed_env = stone(embed_env)
|
|
use_cache[embed_key] = mach_load(embedded, embed_env)
|
|
return use_cache[embed_key]
|
|
}
|
|
}
|
|
|
|
var info = resolve_module_info(path, package_context)
|
|
var _ctx_dir2 = null
|
|
var _alias2 = null
|
|
var _use_entry = path + ' (package: ' + package_context + ')'
|
|
var _chain = null
|
|
if (!info) {
|
|
log.shop(`Module '${path}' could not be found in package '${package_context}'`)
|
|
_ctx_dir2 = package_context ? (starts_with(package_context, '/') ? package_context : get_packages_dir() + '/' + fd.safe_package_path(package_context)) : null
|
|
if (_ctx_dir2) {
|
|
if (fd.is_file(_ctx_dir2 + '/' + path + '.c') || fd.is_file(_ctx_dir2 + '/' + path + '.cpp'))
|
|
log.shop(`C source exists at ${_ctx_dir2}/${path}.c but was not compiled - run 'cell build'`)
|
|
}
|
|
_alias2 = pkg_tools.split_alias(package_context, path)
|
|
if (_alias2 == null && search(path, '/') != null)
|
|
log.shop(`Alias '${array(path, '/')[0]}' could not be resolved in package '${package_context}'`)
|
|
if (length(use_stack) > 0) {
|
|
_chain = 'use() chain:'
|
|
arrfor(use_stack, function(frame) { _chain = _chain + '\n -> ' + frame })
|
|
_chain = _chain + '\n -> ' + path + ' [NOT FOUND]'
|
|
log.error(_chain)
|
|
}
|
|
disrupt
|
|
}
|
|
|
|
if (use_cache[info.cache_key])
|
|
return use_cache[info.cache_key]
|
|
|
|
push(use_stack, _use_entry)
|
|
var _use_result = null
|
|
var _use_ok = false
|
|
var _load = function() {
|
|
_use_result = execute_module(info)
|
|
_use_ok = true
|
|
} disruption {
|
|
pop(use_stack)
|
|
disrupt
|
|
}
|
|
_load()
|
|
pop(use_stack)
|
|
use_cache[info.cache_key] = _use_result
|
|
return _use_result
|
|
}
|
|
|
|
// Resolve a use() module path to a filesystem path without compiling.
|
|
// Returns the absolute path string, or null if not found.
|
|
Shop.resolve_use_path = function(path, ctx) {
|
|
var info = resolve_path(path + '.cm', ctx)
|
|
if (info == null) return null
|
|
return info.path
|
|
}
|
|
|
|
// Resolve a program (.ce) path using the unified resolver.
|
|
// Returns {path, scope, pkg} or null.
|
|
// If the path looks like a remote package locator and is not found locally,
|
|
// attempts to auto-fetch and install it.
|
|
Shop.resolve_program = function(prog, package_context) {
|
|
var info = resolve_path(prog + '.ce', package_context)
|
|
if (info) return info
|
|
|
|
// Find best matching package from lock or infer from path
|
|
var lock = Shop.load_lock()
|
|
var best_pkg = null
|
|
var best_remainder = null
|
|
var parts = array(prog, '/')
|
|
var candidate = null
|
|
var pkg_info = null
|
|
var _auto = null
|
|
arrfor(array(lock), function(pkg_name) {
|
|
if (starts_with(prog, pkg_name + '/')) {
|
|
if (!best_pkg || length(pkg_name) > length(best_pkg)) {
|
|
best_pkg = pkg_name
|
|
best_remainder = text(prog, length(pkg_name) + 1)
|
|
}
|
|
}
|
|
})
|
|
|
|
// If not in lock, try gitea-style 3-component package (host/owner/repo)
|
|
if (!best_pkg && length(parts) > 3) {
|
|
candidate = text(array(parts, 0, 3), '/')
|
|
pkg_info = Shop.resolve_package_info(candidate)
|
|
if (pkg_info && pkg_info != 'local') {
|
|
best_pkg = candidate
|
|
best_remainder = text(array(parts, 3), '/')
|
|
}
|
|
}
|
|
|
|
if (!best_pkg || !best_remainder) return null
|
|
|
|
// Auto-install the package and all its dependencies
|
|
log.console('fetching ' + best_pkg + '...')
|
|
_auto = function() {
|
|
Shop.sync_with_deps(best_pkg)
|
|
} disruption {
|
|
return null
|
|
}
|
|
_auto()
|
|
|
|
info = resolve_path(prog + '.ce', package_context)
|
|
return info
|
|
}
|
|
|
|
// Resolve a use() module path to {resolved_path, package, type} without compiling.
|
|
// type is 'script', 'native', or null. Checks .cm files, C symbols, and aliases.
|
|
Shop.resolve_import_info = function(path, ctx) {
|
|
var mod_info = resolve_path(path + '.cm', ctx)
|
|
var c_info = null
|
|
var c_pkg = null
|
|
if (mod_info)
|
|
return {resolved_path: mod_info.path, package: mod_info.pkg, type: 'script'}
|
|
|
|
c_info = resolve_c_symbol(path, ctx)
|
|
if (c_info && c_info.scope < 900) {
|
|
c_pkg = c_info.package
|
|
if (!c_pkg) {
|
|
if (c_info.scope == SCOPE_CORE) c_pkg = 'core'
|
|
else c_pkg = ctx
|
|
}
|
|
return {resolved_path: null, package: c_pkg, type: 'native'}
|
|
}
|
|
|
|
return null
|
|
}
|
|
|
|
// Get cache path for a package and commit
|
|
function get_cache_path(pkg, commit) {
|
|
return global_shop_path + '/cache/' + replace(replace(pkg, '@','_'), '/','_') + '_' + commit + '.zip'
|
|
}
|
|
|
|
function get_package_abs_dir(package)
|
|
{
|
|
return get_packages_dir() + '/' + fd.safe_package_path(package)
|
|
}
|
|
|
|
// Fetch the latest commit hash from remote for a package
|
|
// Returns null for local packages or if fetch fails
|
|
function fetch_remote_hash(pkg) {
|
|
var api_url = Shop.get_api_url(pkg)
|
|
|
|
if (!api_url) return null
|
|
|
|
|
|
var _fetch_hash = function() {
|
|
var resp = http.fetch(api_url)
|
|
return Shop.extract_commit_hash(pkg, text(resp))
|
|
} disruption {
|
|
return null
|
|
}
|
|
return _fetch_hash()
|
|
}
|
|
|
|
// Download a zip for a package at a specific commit and cache it
|
|
// Returns the zip blob or null on failure
|
|
function download_zip(pkg, commit_hash) {
|
|
var cache_path = get_cache_path(pkg, commit_hash)
|
|
fd.ensure_dir(global_shop_path + '/cache')
|
|
|
|
var download_url = Shop.get_download_url(pkg, commit_hash)
|
|
if (!download_url) {
|
|
log.error("Could not determine download URL for " + pkg)
|
|
return null
|
|
}
|
|
|
|
var _download = function() {
|
|
var zip_blob = http.fetch(download_url)
|
|
fd.slurpwrite(cache_path, zip_blob)
|
|
return zip_blob
|
|
} disruption {
|
|
return null
|
|
}
|
|
return _download()
|
|
}
|
|
|
|
// Get zip from cache, returns null if not cached
|
|
function get_cached_zip(pkg, commit_hash) {
|
|
var cache_path = get_cache_path(pkg, commit_hash)
|
|
if (fd.is_file(cache_path))
|
|
return fd.slurp(cache_path)
|
|
|
|
return null
|
|
}
|
|
|
|
// Fetch: Ensure the zip on disk matches what's in the lock file
|
|
// For local packages, this is a no-op
|
|
// For remote packages, downloads the zip if not present or hash mismatch
|
|
// Returns: { status: 'local'|'cached'|'downloaded'|'error', message: string }
|
|
Shop.fetch = function(pkg) {
|
|
var lock = Shop.load_lock()
|
|
var lock_entry = lock[pkg]
|
|
var info = Shop.resolve_package_info(pkg)
|
|
|
|
if (info == 'local') {
|
|
return { status: 'local' }
|
|
}
|
|
|
|
// No lock entry - can't fetch without knowing what commit
|
|
if (!lock_entry || !lock_entry.commit) {
|
|
return { status: 'error', message: "No lock entry for " + pkg + " - run update first" }
|
|
}
|
|
|
|
var commit = lock_entry.commit
|
|
var expected_hash = lock_entry.zip_hash
|
|
|
|
// Check if we have the zip cached
|
|
var zip_blob = get_cached_zip(pkg, commit)
|
|
|
|
var actual_hash = null
|
|
if (zip_blob) {
|
|
// If we have a hash on record, verify it
|
|
if (expected_hash) {
|
|
actual_hash = text(crypto.blake2(zip_blob), 'h')
|
|
if (actual_hash == expected_hash) {
|
|
return { status: 'cached' }
|
|
}
|
|
log.shop("Zip hash mismatch for " + pkg + ", re-fetching...")
|
|
} else {
|
|
// No hash stored yet - compute and store it
|
|
actual_hash = text(crypto.blake2(zip_blob), 'h')
|
|
lock_entry.zip_hash = actual_hash
|
|
Shop.save_lock(lock)
|
|
return { status: 'cached' }
|
|
}
|
|
}
|
|
|
|
// Download the zip
|
|
var new_zip = download_zip(pkg, commit)
|
|
if (!new_zip) {
|
|
return { status: 'error', message: "Failed to download " + pkg }
|
|
}
|
|
|
|
// Store the hash
|
|
var new_hash = text(crypto.blake2(new_zip), 'h')
|
|
lock_entry.zip_hash = new_hash
|
|
Shop.save_lock(lock)
|
|
|
|
return { status: 'downloaded' }
|
|
}
|
|
|
|
// Extract: Extract a package to its target directory
|
|
// For linked packages, creates a symlink to the link target
|
|
// For local packages, creates a symlink to the local path
|
|
// For remote packages, extracts from the provided zip blob
|
|
// Returns true on success
|
|
Shop.extract = function(pkg) {
|
|
var target_dir = get_package_abs_dir(pkg)
|
|
|
|
// Check if this package is linked
|
|
var link_target = link.get_target(pkg)
|
|
if (link_target) {
|
|
// Use the link - create symlink to link target
|
|
link.sync_one(pkg, link_target)
|
|
return true
|
|
}
|
|
|
|
var info = Shop.resolve_package_info(pkg)
|
|
|
|
if (info == 'local') {
|
|
if (fd.is_link(target_dir))
|
|
fd.unlink(target_dir)
|
|
if (fd.is_dir(target_dir))
|
|
fd.rmdir(target_dir)
|
|
fd.symlink(pkg, target_dir)
|
|
|
|
return true
|
|
}
|
|
|
|
// Check if already extracted at correct commit
|
|
var lock = Shop.load_lock()
|
|
var lock_entry = lock[pkg]
|
|
var extracted_commit_file = null
|
|
var extracted_commit = null
|
|
if (lock_entry && lock_entry.commit) {
|
|
extracted_commit_file = target_dir + '/.cell_commit'
|
|
if (fd.is_file(extracted_commit_file)) {
|
|
extracted_commit = trim(text(fd.slurp(extracted_commit_file)))
|
|
if (extracted_commit == lock_entry.commit) {
|
|
// Already extracted at this commit, skip
|
|
return true
|
|
}
|
|
}
|
|
}
|
|
|
|
var zip_blob = get_package_zip(pkg)
|
|
|
|
if (!zip_blob) {
|
|
log.error("No zip blob available for " + pkg)
|
|
disrupt
|
|
}
|
|
|
|
// Extract zip for remote package
|
|
install_zip(zip_blob, target_dir)
|
|
|
|
// Write marker file with the extracted commit
|
|
if (lock_entry && lock_entry.commit) {
|
|
fd.slurpwrite(target_dir + '/.cell_commit', stone(blob(lock_entry.commit)))
|
|
}
|
|
|
|
return true
|
|
}
|
|
|
|
function get_package_zip(pkg)
|
|
{
|
|
var lock = Shop.load_lock()
|
|
var lock_entry = lock[pkg]
|
|
|
|
if (!lock_entry || !lock_entry.commit)
|
|
return null
|
|
|
|
var commit = lock_entry.commit
|
|
|
|
// Try to get from cache first
|
|
var cached = get_cached_zip(pkg, commit)
|
|
if (cached)
|
|
return cached
|
|
|
|
// Not in cache, download it
|
|
return download_zip(pkg, commit)
|
|
}
|
|
|
|
// Update: Check for new version, update lock, fetch and extract
|
|
// Returns the new lock entry if updated, null if already up to date or failed
|
|
Shop.update = function(pkg) {
|
|
Shop.verify_package_name(pkg)
|
|
var lock = Shop.load_lock()
|
|
var lock_entry = lock[pkg]
|
|
var info = Shop.resolve_package_info(pkg)
|
|
|
|
if (!info) {
|
|
log.error("Not a valid package locator: " + pkg)
|
|
return null
|
|
}
|
|
|
|
log.shop(`checking ${pkg}`)
|
|
|
|
var new_entry = null
|
|
if (info == 'local') {
|
|
// Check if local path exists
|
|
if (!fd.is_dir(pkg)) {
|
|
log.shop(` Local path does not exist: ${pkg}`)
|
|
return null
|
|
}
|
|
// Local packages always get a lock entry
|
|
new_entry = {
|
|
type: 'local',
|
|
updated: time.number()
|
|
}
|
|
lock[pkg] = new_entry
|
|
Shop.save_lock(lock)
|
|
return new_entry
|
|
}
|
|
|
|
var local_commit = lock_entry ? lock_entry.commit : null
|
|
var remote_commit = fetch_remote_hash(pkg)
|
|
|
|
log.shop(`local commit: ${local_commit}`)
|
|
log.shop(`remote commit: ${remote_commit}`)
|
|
|
|
if (!remote_commit) {
|
|
log.error("Could not resolve commit for " + pkg)
|
|
return null
|
|
}
|
|
|
|
if (local_commit == remote_commit)
|
|
return null
|
|
|
|
new_entry = {
|
|
type: info,
|
|
commit: remote_commit,
|
|
updated: time.number()
|
|
}
|
|
|
|
lock[pkg] = new_entry
|
|
Shop.save_lock(lock)
|
|
|
|
return new_entry
|
|
}
|
|
|
|
// Sync a package: ensure it's in lock, fetched, extracted, and compiled
|
|
// opts.refresh - check remote for updates even if lock entry exists
|
|
// opts.no_build - skip C module build step
|
|
// opts.target - explicit build target (auto-detected if not provided)
|
|
// opts.buildtype - 'release'|'debug'|'minsize' (default 'release')
|
|
Shop.sync = function(pkg, opts) {
|
|
var lock = Shop.load_lock()
|
|
var info = Shop.resolve_package_info(pkg)
|
|
var build_mod = null
|
|
var target = null
|
|
var _build_c = null
|
|
|
|
// Step 1: Ensure lock entry (update if refresh or not in lock)
|
|
if ((opts && opts.refresh) || !lock[pkg])
|
|
Shop.update(pkg)
|
|
|
|
// Step 2: Fetch zip (no-op for local packages)
|
|
if (info && info != 'local')
|
|
Shop.fetch(pkg)
|
|
|
|
// Step 3: Extract to packages dir
|
|
Shop.extract(pkg)
|
|
|
|
// Step 4: Compile scripts
|
|
Shop.build_package_scripts(pkg)
|
|
|
|
// Step 5: Build C modules
|
|
if (!opts || !opts.no_build) {
|
|
build_mod = use_cache['core/build']
|
|
if (build_mod) {
|
|
target = (opts && opts.target) ? opts.target : build_mod.detect_host_target()
|
|
_build_c = function() {
|
|
build_mod.build_dynamic(pkg, target, (opts && opts.buildtype) ? opts.buildtype : 'release')
|
|
} disruption {
|
|
// Not all packages have C code
|
|
}
|
|
_build_c()
|
|
}
|
|
}
|
|
}
|
|
|
|
// Sync a package and all its dependencies (BFS)
|
|
Shop.sync_with_deps = function(pkg, opts) {
|
|
var visited = {}
|
|
var queue = [pkg]
|
|
var qi = 0
|
|
var current = null
|
|
var deps = null
|
|
var dep_locator = null
|
|
var _read_deps = null
|
|
|
|
while (qi < length(queue)) {
|
|
current = queue[qi]
|
|
qi = qi + 1
|
|
if (visited[current]) continue
|
|
visited[current] = true
|
|
|
|
Shop.sync(current, opts)
|
|
|
|
_read_deps = function() {
|
|
deps = pkg_tools.dependencies(current)
|
|
} disruption {
|
|
deps = null
|
|
}
|
|
_read_deps()
|
|
|
|
if (deps) {
|
|
arrfor(array(deps), function(alias) {
|
|
dep_locator = deps[alias]
|
|
if (!visited[dep_locator])
|
|
push(queue, dep_locator)
|
|
})
|
|
}
|
|
}
|
|
}
|
|
|
|
function install_zip(zip_blob, target_dir) {
|
|
var zip = miniz.read(zip_blob)
|
|
if (!zip) { log.error("Failed to read zip archive"); disrupt }
|
|
|
|
if (fd.is_link(target_dir)) fd.unlink(target_dir)
|
|
if (fd.is_dir(target_dir)) fd.rmdir(target_dir, 1)
|
|
|
|
log.shop("Extracting to " + target_dir)
|
|
fd.ensure_dir(target_dir)
|
|
|
|
var count = zip.count()
|
|
var created_dirs = {}
|
|
|
|
var i = 0
|
|
var filename = null
|
|
var slash_pos = null
|
|
var rel_path = null
|
|
var full_path = null
|
|
var dir_path = null
|
|
var file_data = null
|
|
for (i = 0; i < count; i++) {
|
|
if (zip.is_directory(i)) continue
|
|
filename = zip.get_filename(i)
|
|
slash_pos = search(filename, '/')
|
|
if (slash_pos == null) continue
|
|
if (slash_pos + 1 >= length(filename)) continue
|
|
rel_path = text(filename, slash_pos + 1)
|
|
full_path = target_dir + '/' + rel_path
|
|
dir_path = fd.dirname(full_path)
|
|
|
|
if (!created_dirs[dir_path]) {
|
|
fd.ensure_dir(dir_path)
|
|
created_dirs[dir_path] = true
|
|
}
|
|
file_data = zip.slurp(filename)
|
|
|
|
stone(file_data)
|
|
|
|
fd.slurpwrite(full_path, file_data)
|
|
}
|
|
}
|
|
|
|
// High-level: Remove a package from the shop
|
|
Shop.remove = function(pkg) {
|
|
// Remove from lock
|
|
var lock = Shop.load_lock()
|
|
if (lock[pkg]) {
|
|
delete lock[pkg]
|
|
Shop.save_lock(lock)
|
|
}
|
|
|
|
// Remove package symlink/directory
|
|
var pkg_dir = get_packages_dir() + '/' + fd.safe_package_path(pkg)
|
|
if (fd.is_link(pkg_dir)) {
|
|
fd.unlink(pkg_dir)
|
|
} else if (fd.is_dir(pkg_dir)) {
|
|
fd.rmdir(pkg_dir, 1)
|
|
}
|
|
|
|
// Invalidate package dylib cache
|
|
package_dylibs[pkg] = null
|
|
|
|
log.console("Removed " + pkg)
|
|
return true
|
|
}
|
|
|
|
// Compile a module
|
|
// List all files in a package
|
|
|
|
Shop.file_reload = function(file)
|
|
{
|
|
var info = Shop.file_info(file)
|
|
if (!info.is_module) return
|
|
|
|
var pkg = info.package
|
|
|
|
Shop.module_reload(info.name, pkg)
|
|
}
|
|
|
|
Shop.module_reload = function(path, package) {
|
|
if (!Shop.is_loaded(path,package)) return
|
|
|
|
// Clear the module info cache for this path
|
|
var lookup_key = package ? package + ':' + path : ':' + path
|
|
module_info_cache[lookup_key] = null
|
|
|
|
// Invalidate package dylib cache so next resolve triggers rebuild
|
|
if (package) {
|
|
package_dylibs[package] = null
|
|
}
|
|
|
|
var info = resolve_module_info(path, package)
|
|
if (!info) return
|
|
|
|
var cache_key = info.cache_key
|
|
var old = use_cache[cache_key]
|
|
use_cache[cache_key] = null
|
|
|
|
var newmod = get_module(path, package)
|
|
use_cache[cache_key] = newmod
|
|
|
|
if (old && is_object(old) && is_object(newmod)) {
|
|
arrfor(array(newmod), function(k) { old[k] = newmod[k] })
|
|
arrfor(array(old), function(k) {
|
|
if (!(k in newmod)) old[k] = null
|
|
})
|
|
use_cache[cache_key] = old
|
|
}
|
|
}
|
|
|
|
function get_package_scripts(package)
|
|
{
|
|
var files = pkg_tools.list_files(package)
|
|
var scripts = []
|
|
|
|
var i = 0
|
|
var file = null
|
|
for (i = 0; i < length(files); i++) {
|
|
file = files[i]
|
|
if (ends_with(file, '.cm') || ends_with(file, '.ce')) {
|
|
push(scripts, file)
|
|
}
|
|
}
|
|
|
|
return scripts
|
|
}
|
|
|
|
// Extract use() call arguments from source text.
|
|
// Returns an array of literal string arguments found in use('...') calls.
|
|
function extract_use_calls(source) {
|
|
var uses = []
|
|
var idx = 0
|
|
var start = 0
|
|
var end = 0
|
|
var arg = null
|
|
idx = search(source, "use(")
|
|
while (idx != null) {
|
|
start = idx + 5
|
|
end = search(text(source, start), "'")
|
|
if (end == null) end = search(text(source, start), '"')
|
|
if (end != null) {
|
|
arg = text(source, start, start + end)
|
|
push(uses, arg)
|
|
}
|
|
idx = search(text(source, idx + 4), "use(")
|
|
if (idx != null) idx = idx + (source.length - (source.length - idx))
|
|
else break
|
|
}
|
|
return uses
|
|
}
|
|
|
|
Shop.build_package_scripts = function(package)
|
|
{
|
|
// compiles all .ce and .cm files in a package
|
|
// continues past failures and returns results
|
|
var scripts = get_package_scripts(package)
|
|
var pkg_dir = starts_with(package, '/') ? package : get_package_abs_dir(package)
|
|
var errors = []
|
|
var ok = 0
|
|
|
|
arrfor(scripts, function(script, i) {
|
|
var _try = function() {
|
|
resolve_mod_fn(pkg_dir + '/' + script, package)
|
|
ok = ok + 1
|
|
} disruption {
|
|
push(errors, script)
|
|
log.console(" compile error: " + package + '/' + script)
|
|
}
|
|
_try()
|
|
})
|
|
|
|
return {ok: ok, errors: errors, total: length(scripts)}
|
|
}
|
|
|
|
// Check if all use() calls in a package's scripts can be resolved.
|
|
// Returns {ok, unresolved: [{script, module}], total}
|
|
Shop.audit_use_resolution = function(package) {
|
|
var scripts = get_package_scripts(package)
|
|
var pkg_dir = starts_with(package, '/') ? package : get_package_abs_dir(package)
|
|
var unresolved = []
|
|
var checked = 0
|
|
var src = null
|
|
var content = null
|
|
var uses = null
|
|
var info = null
|
|
|
|
arrfor(scripts, function(script) {
|
|
var _check = function() {
|
|
src = pkg_dir + '/' + script
|
|
if (!fd.is_file(src)) return
|
|
content = text(fd.slurp(src))
|
|
if (!content || length(content) == 0) return
|
|
|
|
// Simple regex-free extraction: find use(' and use(" patterns
|
|
uses = []
|
|
var pos = 0
|
|
var rest = content
|
|
var ui = null
|
|
var quote = null
|
|
var end = null
|
|
var arg = null
|
|
while (length(rest) > 0) {
|
|
ui = search(rest, "use(")
|
|
if (ui == null) break
|
|
rest = text(rest, ui + 4)
|
|
if (length(rest) == 0) break
|
|
quote = text(rest, 0, 1)
|
|
if (quote != "'" && quote != '"') continue
|
|
rest = text(rest, 1)
|
|
end = search(rest, quote)
|
|
if (end == null) continue
|
|
arg = text(rest, 0, end)
|
|
if (length(arg) > 0) push(uses, arg)
|
|
rest = text(rest, end + 1)
|
|
}
|
|
|
|
arrfor(uses, function(mod) {
|
|
var _resolve = function() {
|
|
info = resolve_module_info(mod, package)
|
|
if (!info) push(unresolved, {script: script, module: mod})
|
|
} disruption {}
|
|
_resolve()
|
|
})
|
|
checked = checked + 1
|
|
} disruption {}
|
|
_check()
|
|
})
|
|
|
|
return {ok: checked, unresolved: unresolved, total: length(scripts)}
|
|
}
|
|
|
|
Shop.get_package_scripts = get_package_scripts
|
|
|
|
Shop.list_packages = function()
|
|
{
|
|
var lock = Shop.load_lock()
|
|
return array(lock)
|
|
}
|
|
|
|
// Get the lib directory for dynamic libraries
|
|
Shop.get_lib_dir = function() {
|
|
return global_shop_path + '/lib'
|
|
}
|
|
|
|
Shop.ensure_dir = fd.ensure_dir
|
|
Shop.install_zip = install_zip
|
|
Shop.ensure_package_dylibs = ensure_package_dylibs
|
|
|
|
Shop.get_local_dir = function() {
|
|
return global_shop_path + "/local"
|
|
}
|
|
|
|
// Get the build cache directory
|
|
Shop.get_build_dir = function() {
|
|
return global_shop_path + '/build'
|
|
}
|
|
|
|
// Get the absolute path for a package
|
|
Shop.get_package_dir = function(pkg) {
|
|
return get_packages_dir() + '/' + fd.safe_package_path(pkg)
|
|
}
|
|
|
|
// Generate C symbol name for a file within a package
|
|
// e.g., c_symbol_for_file('gitea.pockle.world/john/prosperon', 'sprite.c')
|
|
// -> 'js_gitea_pockle_world_john_prosperon_sprite_use'
|
|
Shop.c_symbol_for_file = function(pkg, file) {
|
|
var pkg_safe = safe_c_name(pkg)
|
|
var file_safe = safe_c_name(fd.stem(file))
|
|
var suffix = ends_with(file, '.ce') ? '_program' : '_use'
|
|
return 'js_' + pkg_safe + '_' + file_safe + suffix
|
|
}
|
|
|
|
// Generate C symbol prefix for a package
|
|
// e.g., c_symbol_prefix('core') -> 'js_core_'
|
|
Shop.c_symbol_prefix = function(pkg) {
|
|
return 'js_' + safe_c_name(pkg) + '_'
|
|
}
|
|
|
|
// Get the library directory name for a package
|
|
Shop.lib_name_for_package = function(pkg) {
|
|
return fd.safe_package_path(pkg)
|
|
}
|
|
|
|
// Load a module explicitly as mach bytecode, bypassing dylib resolution.
|
|
// Returns the loaded module value. Disrupts if the module cannot be found.
|
|
Shop.load_as_mach = function(path, pkg) {
|
|
var locator = resolve_locator(path + '.cm', pkg)
|
|
var file_path = null
|
|
var content = null
|
|
var content_key = null
|
|
var cached = null
|
|
var cached_mcode_path = null
|
|
var mcode_json = null
|
|
var compiled = null
|
|
var ast = null
|
|
var ir = null
|
|
var optimized = null
|
|
var file_info = null
|
|
var inject = null
|
|
var env = null
|
|
|
|
if (!locator) { log.error('Module ' + path + ' not found'); disrupt }
|
|
|
|
file_path = locator.path
|
|
content = text(fd.slurp(file_path))
|
|
content_key = stone(blob(content))
|
|
|
|
// Try cached mach blob
|
|
cached = pull_from_cache(content_key)
|
|
if (cached) compiled = cached
|
|
|
|
// Try cached mcode -> compile to mach
|
|
if (!compiled) {
|
|
cached_mcode_path = hash_path(content_key, 'mcode')
|
|
if (fd.is_file(cached_mcode_path)) {
|
|
mcode_json = text(fd.slurp(cached_mcode_path))
|
|
compiled = mach_compile_mcode_bin(file_path, mcode_json)
|
|
put_into_cache(content_key, compiled)
|
|
}
|
|
}
|
|
|
|
// Full compile from source
|
|
if (!compiled) {
|
|
if (!_mcode_mod) _mcode_mod = use_cache['core/mcode'] || use_cache['mcode']
|
|
if (!_streamline_mod) _streamline_mod = use_cache['core/streamline'] || use_cache['streamline']
|
|
if (!_mcode_mod || !_streamline_mod) {
|
|
log.error('error: compiler modules not loaded')
|
|
disrupt
|
|
}
|
|
ast = analyze(content, file_path)
|
|
ir = _mcode_mod(ast)
|
|
optimized = _streamline_mod(ir)
|
|
mcode_json = shop_json.encode(optimized)
|
|
cached_mcode_path = hash_path(content_key, 'mcode')
|
|
fd.ensure_dir(global_shop_path + '/build')
|
|
fd.slurpwrite(cached_mcode_path, stone(blob(mcode_json)))
|
|
compiled = mach_compile_mcode_bin(file_path, mcode_json)
|
|
put_into_cache(content_key, compiled)
|
|
}
|
|
|
|
// Load the mach blob with proper env
|
|
file_info = Shop.file_info(file_path)
|
|
inject = Shop.script_inject_for(file_info)
|
|
env = inject_env(inject)
|
|
env.use = make_use_fn(file_info.package)
|
|
env = stone(env)
|
|
return mach_load(compiled, env)
|
|
}
|
|
|
|
// Load a module explicitly as a native dylib, bypassing mach resolution.
|
|
// Returns the loaded module value, or null if no dylib exists.
|
|
Shop.load_as_dylib = function(path, pkg) {
|
|
var locator = resolve_locator(path + '.cm', pkg)
|
|
var file_path = null
|
|
var file_info = null
|
|
var pkg_dir = null
|
|
var stem = null
|
|
var result = null
|
|
var real_pkg = pkg
|
|
var inject = null
|
|
var env = null
|
|
|
|
if (!locator) { log.error('Module ' + path + ' not found'); disrupt }
|
|
|
|
file_path = locator.path
|
|
if (!real_pkg) {
|
|
file_info = Shop.file_info(file_path)
|
|
real_pkg = file_info.package
|
|
}
|
|
if (!real_pkg) return null
|
|
|
|
pkg_dir = get_packages_dir() + '/' + fd.safe_package_path(real_pkg)
|
|
if (!starts_with(file_path, pkg_dir + '/')) return null
|
|
stem = text(file_path, length(pkg_dir) + 1)
|
|
result = try_native_mod_dylib(real_pkg, stem)
|
|
if (!result) return null
|
|
|
|
// Build env and load the native module
|
|
if (!file_info) file_info = Shop.file_info(file_path)
|
|
inject = Shop.script_inject_for(file_info)
|
|
env = inject_env(inject)
|
|
env.use = make_use_fn(real_pkg)
|
|
env = stone(env)
|
|
return os.native_module_load_named(result._handle, result._sym, env)
|
|
}
|
|
|
|
Shop.audit_packages = function() {
|
|
var packages = Shop.list_packages()
|
|
|
|
var bad = []
|
|
|
|
arrfor(packages, function(package, i) {
|
|
if (package == 'core') return
|
|
if (fd.is_dir(package)) return
|
|
if (fetch_remote_hash(package)) return
|
|
push(bad, package)
|
|
})
|
|
|
|
return bad
|
|
}
|
|
|
|
// Parse a package locator and return info about it
|
|
// Returns { path: canonical_path, name: package_name, type: 'local'|'gitea'|null }
|
|
Shop.parse_package = function(locator) {
|
|
if (!locator) return null
|
|
|
|
// Strip version suffix if present
|
|
var clean = locator
|
|
if (search(locator, '@') != null) {
|
|
clean = array(locator, '@')[0]
|
|
}
|
|
|
|
var info = Shop.resolve_package_info(clean)
|
|
if (!info) return null
|
|
|
|
// Extract package name (last component of path)
|
|
var parts = array(clean, '/')
|
|
var name = parts[length(parts) - 1]
|
|
|
|
return {
|
|
path: clean,
|
|
name: name,
|
|
type: info
|
|
}
|
|
}
|
|
|
|
Shop.use_native = function(path, package_context) {
|
|
var src_path = path
|
|
if (!starts_with(path, '/'))
|
|
src_path = fd.realpath(path)
|
|
if (!fd.is_file(src_path)) { log.error('File not found: ' + path); disrupt }
|
|
|
|
var file_info = Shop.file_info(src_path)
|
|
var pkg = file_info.package || package_context
|
|
|
|
var sym_name = null
|
|
if (pkg)
|
|
sym_name = Shop.c_symbol_for_file(pkg, fd.basename(src_path))
|
|
|
|
var build = Shop.use('build', 'core')
|
|
var dylib_path = build.compile_native(src_path, null, null, pkg)
|
|
|
|
var handle = os.dylib_open(dylib_path)
|
|
if (!handle) { log.error('Failed to open native dylib: ' + dylib_path); disrupt }
|
|
|
|
// Build env with runtime functions and capabilities
|
|
var inject = Shop.script_inject_for(file_info)
|
|
var env = inject_env(inject)
|
|
env.use = make_use_fn(pkg)
|
|
env = stone(env)
|
|
|
|
if (sym_name)
|
|
return os.native_module_load_named(handle, sym_name, env)
|
|
return os.native_module_load(handle, env)
|
|
}
|
|
|
|
return Shop
|