Files
cell/internal/shop.cm

1905 lines
51 KiB
Plaintext

var toml = use('toml')
var json = use('json')
var fd = use('fd')
var http = use('http')
var miniz = use('miniz')
var time = use('time')
var crypto = use('crypto')
var blob = use('blob')
var pkg_tools = use('package')
var os = use('os')
var link = use('link')
// These come from env (via core_extras in engine.cm):
// analyze, run_ast_fn, core_json, use_cache, shop_path, actor_api, runtime_env,
// content_hash, cache_path, ensure_build_dir
var shop_json = core_json
var global_shop_path = shop_path
var my$_ = actor_api
var core = "core"
// Make a package name safe for use in C identifiers.
// Replaces /, ., -, @ with _ so the result is a valid C identifier fragment.
function safe_c_name(name) {
return replace(replace(replace(replace(name, '/', '_'), '.', '_'), '-', '_'), '@', '_')
}
function pull_from_cache(content)
{
var path = hash_path(content)
if (fd.is_file(path))
return fd.slurp(path)
}
function put_into_cache(content, obj)
{
var path = hash_path(content)
fd.slurpwrite(path, obj)
}
function ensure_dir(path) {
if (fd.stat(path).isDirectory) return
var parts = array(path, '/')
var current = starts_with(path, '/') ? '/' : ''
var i = 0
for (i = 0; i < length(parts); i++) {
if (parts[i] == '') continue
current = current + parts[i] + '/'
if (!fd.stat(current).isDirectory) {
fd.mkdir(current)
}
}
}
function hash_path(content)
{
return global_shop_path + '/build' + '/' + content_hash(content)
}
var Shop = {}
var SCOPE_LOCAL = 0
var SCOPE_PACKAGE = 1
var SCOPE_CORE = 2
var MOD_EXT = '.cm'
var ACTOR_EXT = '.ce'
var dylib_ext = '.dylib' // Default extension
Shop.get_package_dir = function(name) {
return global_shop_path + '/packages/' + name
}
// Get the packages directory (in the global shop)
function get_packages_dir() {
return global_shop_path + '/packages'
}
// Get the core directory (in the global shop)
var core_package = 'core'
Shop.get_core_dir = function() {
return get_packages_dir() + '/' + core_package
}
// Get the links file path (in the global shop)
function get_links_path() {
return global_shop_path + '/link.toml'
}
// Get the reports directory (in the global shop)
Shop.get_reports_dir = function() {
return global_shop_path + '/reports'
}
function get_import_package(name) {
var parts = array(name, '/')
if (length(parts) > 1)
return parts[0]
return null
}
function is_internal_path(path)
{
return path && starts_with(path, 'internal/')
}
function split_explicit_package_import(path)
{
if (!path) return null
var parts = array(path, '/')
if (length(parts) < 2) return null
var looks_explicit = starts_with(path, '/') || (parts[0] && search(parts[0], '.') != null)
if (!looks_explicit) return null
// Find the longest prefix that is an installed package
var i = 0
var pkg_candidate = null
var mod_path = null
var candidate_dir = null
for (i = length(parts) - 1; i >= 1; i--) {
pkg_candidate = text(array(parts, 0, i), '/')
mod_path = text(array(parts, i), '/')
if (!mod_path || length(mod_path) == 0) continue
candidate_dir = get_packages_dir() + '/' + safe_package_path(pkg_candidate)
if (fd.is_file(candidate_dir + '/cell.toml'))
return {package: pkg_candidate, path: mod_path}
if (package_in_shop(pkg_candidate))
return {package: pkg_candidate, path: mod_path}
if (Shop.resolve_package_info(pkg_candidate))
return {package: pkg_candidate, path: mod_path}
}
return null
}
function package_in_shop(package) {
var lock = Shop.load_lock()
return package in lock
}
function abs_path_to_package(package_dir)
{
if (!fd.is_file(package_dir + '/cell.toml')) {
print('Not a valid package directory (no cell.toml): ' + package_dir)
disrupt
}
var packages_prefix = get_packages_dir() + '/'
var core_dir = packages_prefix + core_package
// Check if this is the core package directory (or its symlink target)
if (package_dir == core_dir) {
return 'core'
}
// Also check if core_dir is a symlink pointing to package_dir
var core_target = null
if (fd.is_link(core_dir)) {
core_target = fd.readlink(core_dir)
if (core_target == package_dir || fd.realpath(core_dir) == package_dir) {
return 'core'
}
}
if (starts_with(package_dir, packages_prefix))
return text(package_dir, length(packages_prefix))
// Check if this local path is the target of a link
// If so, return the canonical package name (link origin) instead
var link_origin = link.get_origin(package_dir)
if (link_origin) {
return link_origin
}
// in this case, the dir is the package
if (package_in_shop(package_dir))
return package_dir
// For local directories (e.g., linked targets), read the package name from cell.toml
var _toml_path = package_dir + '/cell.toml'
var content = null
var cfg = null
if (fd.is_file(_toml_path)) {
content = text(fd.slurp(_toml_path))
cfg = toml.decode(content)
if (cfg.package)
return cfg.package
}
return null
}
// given a file, find the absolute path, package name, and import name
Shop.file_info = function(file) {
var info = {
path: file,
is_module: false,
is_actor: false,
package: null,
name: null
}
if (ends_with(file, MOD_EXT))
info.is_module = true
else if (ends_with(file, ACTOR_EXT))
info.is_actor = true
// Find package directory and determine package name
var pkg_dir = pkg_tools.find_package_dir(file)
if (pkg_dir) {
info.package = abs_path_to_package(pkg_dir)
if (info.is_actor)
info.name = text(file, length(pkg_dir) + 1, length(file) - length(ACTOR_EXT))
else if (info.is_module)
info.name = text(file, length(pkg_dir) + 1, length(file) - length(MOD_EXT))
else
info.name = text(file, length(pkg_dir) + 1)
}
return info
}
function get_import_name(path)
{
var parts = array(path, '/')
if (length(parts) < 2) return null
return text(array(parts, 1), '/')
}
// Given a path like 'prosperon/sprite' and a package context,
// resolve the alias 'prosperon' to its canonical package name
function get_aliased_package(path, package_context) {
if (!package_context) return null
var alias = pkg_tools.split_alias(package_context, path)
if (alias) return alias.package
return null
}
// Same as get_aliased_package but just returns the package for the alias part
function get_canonical_package(alias, package_context) {
if (!package_context) return null
var result = pkg_tools.split_alias(package_context, alias + '/dummy')
if (result) return result.package
return null
}
// return the safe path for the package
// guaranteed to be validated
function safe_package_path(pkg)
{
// For absolute paths, replace / with _ to create a valid directory name
// Also replace @ with _
if (pkg && starts_with(pkg, '/'))
return replace(replace(pkg, '/', '_'), '@', '_')
return replace(pkg, '@', '_')
}
function package_cache_path(pkg)
{
return global_shop_path + '/cache/' + replace(replace(pkg, '/', '_'), '@', '_')
}
// Load lock.toml configuration (from global shop)
var _lock = null
Shop.load_lock = function() {
if (_lock)
return _lock
var path = global_shop_path + '/lock.toml'
if (!fd.is_file(path))
return {}
var content = text(fd.slurp(path))
if (!length(content)) return {}
_lock = toml.decode(content)
return _lock
}
// Save lock.toml configuration (to global shop)
Shop.save_lock = function(lock) {
var path = global_shop_path + '/lock.toml'
fd.slurpwrite(path, stone(blob(toml.encode(lock))));
}
// Shop configuration (shop.toml) with policy flags
var _shop_config = null
var _default_policy = {
allow_dylib: true,
allow_static: true,
allow_mach: true,
allow_compile: true,
native: false
}
Shop.load_config = function() {
if (_shop_config) return _shop_config
if (!global_shop_path) {
_shop_config = {policy: object(_default_policy)}
return _shop_config
}
var path = global_shop_path + '/shop.toml'
if (!fd.is_file(path)) {
_shop_config = {policy: object(_default_policy)}
fd.slurpwrite(path, stone(blob(toml.encode(_shop_config))))
return _shop_config
}
var content = text(fd.slurp(path))
if (!length(content)) {
_shop_config = {policy: object(_default_policy)}
return _shop_config
}
_shop_config = toml.decode(content)
if (!_shop_config.policy) _shop_config.policy = {}
var keys = array(_default_policy)
var i = 0
for (i = 0; i < length(keys); i++) {
if (_shop_config.policy[keys[i]] == null)
_shop_config.policy[keys[i]] = _default_policy[keys[i]]
}
return _shop_config
}
function get_policy() {
var config = Shop.load_config()
if (native_mode) config.policy.native = true
return config.policy
}
// Get information about how to resolve a package
// Local packages always start with /
Shop.resolve_package_info = function(pkg) {
if (starts_with(pkg, '/')) return 'local'
if (search(pkg, 'gitea') != null) return 'gitea'
return null
}
// Verify if a package name is valid and return status
Shop.verify_package_name = function(pkg) {
if (!pkg) { print("Empty package name"); disrupt }
if (pkg == 'local') { print("local is not a valid package name"); disrupt }
if (pkg == 'core') { print("core is not a valid package name"); disrupt }
if (search(pkg, '://') != null) {
print(`Invalid package name: ${pkg}; did you mean ${array(pkg, '://')[1]}?`)
disrupt
}
}
// Convert module package to download URL
Shop.get_download_url = function(pkg, commit_hash) {
var info = Shop.resolve_package_info(pkg)
var parts = null
var host = null
var user = null
var repo = null
if (info == 'gitea') {
parts = array(pkg, '/')
host = parts[0]
user = parts[1]
repo = parts[2]
return 'https://' + host + '/' + user + '/' + repo + '/archive/' + commit_hash + '.zip'
}
return null
}
// Get the API URL for checking remote git commits
Shop.get_api_url = function(pkg) {
var info = Shop.resolve_package_info(pkg)
var parts = null
var host = null
var user = null
var repo = null
if (info == 'gitea') {
parts = array(pkg, '/')
host = parts[0]
user = parts[1]
repo = parts[2]
return 'https://' + host + '/api/v1/repos/' + user + '/' + repo + '/branches/'
}
return null
}
// Extract commit hash from API response
Shop.extract_commit_hash = function(pkg, response) {
if (!response) return null
var info = Shop.resolve_package_info(pkg)
var data = json.decode(response)
if (info == 'gitea') {
if (is_array(data))
data = data[0]
return data.commit && data.commit.id
}
return null
}
var open_dls = {}
// Host target detection for native dylib resolution
function detect_host_target() {
var platform = os.platform()
var arch = os.arch ? os.arch() : 'arm64'
if (platform == 'macOS' || platform == 'darwin')
return arch == 'x86_64' ? 'macos_x86_64' : 'macos_arm64'
if (platform == 'Linux' || platform == 'linux')
return arch == 'x86_64' ? 'linux' : 'linux_arm64'
if (platform == 'Windows' || platform == 'windows')
return 'windows'
return null
}
var host_target = detect_host_target()
// Check for a native .cm dylib at the deterministic lib path
// Returns a native descriptor {_native, _handle, _sym}, or null if no native dylib exists
// Also checks staleness: if source has changed, the content-addressed build artifact
// won't exist for the new hash, so the installed dylib is treated as stale.
function try_native_mod_dylib(pkg, stem) {
var dylib_path = get_dylib_path(pkg, stem)
var src_path = null
var src = null
var host = null
var hash = null
var tc_ext = null
var build_path = null
var handle = null
var sym = null
if (!fd.is_file(dylib_path)) return null
// Staleness check: verify the content-addressed build artifact exists
src_path = get_packages_dir() + '/' + safe_package_path(pkg) + '/' + stem
if (fd.is_file(src_path)) {
src = text(fd.slurp(src_path))
host = detect_host_target()
hash = content_hash(src + '\n' + host + '\nnative')
tc_ext = dylib_ext
build_path = global_shop_path + '/build/' + hash + '.' + host + tc_ext
if (!fd.is_file(build_path)) return null
}
handle = os.dylib_open(dylib_path)
if (!handle) return null
sym = Shop.c_symbol_for_file(pkg, stem)
return {_native: true, _handle: handle, _sym: sym}
}
// Default capabilities injected into scripts
// These map to $_ properties in engine.cm
var SHOP_DEFAULT_INJECT = ['$self', '$overling', '$clock', '$delay', '$start', '$receiver', '$contact', '$portal', '$time_limit', '$couple', '$stop', '$unneeded', '$connection', '$fd']
// Decide what a given module is allowed to see.
// This is the capability gate - tweak as needed.
Shop.script_inject_for = function(file_info) {
if (!file_info) return []
// For now, grant everything to all scripts
// Later this can be tuned per package/script
return array(SHOP_DEFAULT_INJECT)
}
// Get capabilities for a script path (public API)
Shop.get_script_capabilities = function(path) {
var file_info = Shop.file_info(path)
return Shop.script_inject_for(file_info)
}
// Build the env object for a module, with runtime fns and $-prefixed capabilities.
// Matches engine.cm's approach: env properties become free variables in the module.
function inject_env(inject) {
var env = {}
if (runtime_env) {
arrfor(array(runtime_env), function(k) { env[k] = runtime_env[k] })
}
// Add capability injections with $ prefix
var i = 0
var inj = null
var key = null
for (i = 0; i < length(inject); i++) {
inj = inject[i]
key = inj
if (key && key[0] == '$') key = text(key, 1)
if (key == 'fd') env['$fd'] = fd
else env['$' + key] = my$_[key]
}
return env
}
// --- Pipeline API ---
// Lazy-loaded pipeline modules from use_cache (no re-entrancy risk).
var _tokenize_mod = null
var _parse_mod = null
var _fold_mod = null
var _mcode_mod = null
var _streamline_mod = null
var _index_mod = null
var _token_cache = {}
var _ast_cache = {}
var _analyze_cache = {}
var _compile_cache = {}
var _index_cache = {}
var get_tokenize = function() {
if (!_tokenize_mod) _tokenize_mod = use_cache['core/tokenize'] || use_cache['tokenize']
return _tokenize_mod
}
var get_parse = function() {
if (!_parse_mod) _parse_mod = use_cache['core/parse'] || use_cache['parse']
return _parse_mod
}
var get_fold = function() {
if (!_fold_mod) _fold_mod = use_cache['core/fold'] || use_cache['fold']
return _fold_mod
}
var get_mcode = function() {
if (!_mcode_mod) _mcode_mod = use_cache['core/mcode'] || use_cache['mcode']
return _mcode_mod
}
var get_streamline = function() {
if (!_streamline_mod) _streamline_mod = use_cache['core/streamline'] || use_cache['streamline']
return _streamline_mod
}
var get_index = function() {
if (!_index_mod) {
_index_mod = use_cache['core/index'] || use_cache['index']
if (!_index_mod) _index_mod = Shop.use('index', 'core')
}
return _index_mod
}
Shop.tokenize_file = function(path) {
var src = text(fd.slurp(path))
var key = content_hash(stone(blob(src)))
if (_token_cache[key]) return _token_cache[key]
var result = get_tokenize()(src, path)
_token_cache[key] = result
return result
}
Shop.parse_file = function(path) {
var src = text(fd.slurp(path))
var key = content_hash(stone(blob(src)))
if (_ast_cache[key]) return _ast_cache[key]
var tok = Shop.tokenize_file(path)
var ast = get_parse()(tok.tokens, src, path, get_tokenize())
_ast_cache[key] = ast
return ast
}
Shop.analyze_file = function(path) {
var src = text(fd.slurp(path))
var key = content_hash(stone(blob(src)))
if (_analyze_cache[key]) return _analyze_cache[key]
var ast = Shop.parse_file(path)
var folded = get_fold()(ast)
_analyze_cache[key] = folded
return folded
}
// Resolve import paths on an index in-place.
Shop.resolve_imports = function(idx_obj, fname) {
var fi = Shop.file_info(fd.realpath(fname))
var ctx = fi.package
var ri = 0
var rp = null
var lp = null
while (ri < length(idx_obj.imports)) {
rp = Shop.resolve_use_path(idx_obj.imports[ri].module_path, ctx)
if (rp == null) {
lp = fd.dirname(fd.realpath(fname)) + '/' + idx_obj.imports[ri].module_path + '.cm'
if (fd.is_file(lp)) {
rp = lp
}
}
if (rp != null) {
idx_obj.imports[ri].resolved_path = rp
}
ri = ri + 1
}
}
Shop.index_file = function(path) {
var src = text(fd.slurp(path))
var key = content_hash(stone(blob(src)))
if (_index_cache[key]) return _index_cache[key]
var tok = Shop.tokenize_file(path)
var ast = get_parse()(tok.tokens, src, path, get_tokenize())
var idx = get_index().index_ast(ast, tok.tokens, path)
Shop.resolve_imports(idx, path)
_index_cache[key] = idx
return idx
}
Shop.mcode_file = function(path) {
var folded = Shop.analyze_file(path)
return get_mcode()(folded)
}
Shop.compile_file = function(path) {
var src = text(fd.slurp(path))
var key = content_hash(stone(blob(src)))
if (_compile_cache[key]) return _compile_cache[key]
var compiled = Shop.mcode_file(path)
var optimized = get_streamline()(compiled)
_compile_cache[key] = optimized
return optimized
}
Shop.all_script_paths = function() {
var packages = Shop.list_packages()
var result = []
var i = 0
var j = 0
var scripts = null
var pkg_dir = null
var has_core = false
for (i = 0; i < length(packages); i++) {
if (packages[i] == 'core') has_core = true
}
if (!has_core) {
packages = array(packages, ['core'])
}
for (i = 0; i < length(packages); i++) {
pkg_dir = starts_with(packages[i], '/') ? packages[i] : get_packages_dir() + '/' + safe_package_path(packages[i])
scripts = get_package_scripts(packages[i])
for (j = 0; j < length(scripts); j++) {
result[] = {
package: packages[i],
rel_path: scripts[j],
full_path: pkg_dir + '/' + scripts[j]
}
}
}
return result
}
// Lazy-loaded compiler modules for on-the-fly compilation
var _mcode_mod = null
var _streamline_mod = null
// Compile a module and return its bytecode blob.
// The bytecode is cached on disk by content hash.
function resolve_mod_fn(path, pkg) {
if (!fd.is_file(path)) { print(`path ${path} is not a file`); disrupt }
var content = text(fd.slurp(path))
if (length(content) == 0) { print(`${path}: empty file`); disrupt }
var content_key = stone(blob(content))
var native_result = null
var cached = null
var ast = null
var compiled = null
var mach_path = null
var mach_blob = null
var mcode_path = null
var ir = null
var optimized = null
var mcode_json = null
var cached_mcode_path = null
var _pkg_dir = null
var _stem = null
var policy = null
var build_mod = null
var dylib_path = null
var handle = null
var sym = null
policy = get_policy()
// Compute _pkg_dir and _stem early so all paths can use them
if (pkg) {
_pkg_dir = get_packages_dir() + '/' + safe_package_path(pkg)
if (starts_with(path, _pkg_dir + '/')) {
_stem = text(path, length(_pkg_dir) + 1)
}
}
// Check for native .cm dylib at deterministic path first
if (policy.allow_dylib && pkg && _stem) {
native_result = try_native_mod_dylib(pkg, _stem)
if (native_result != null) return native_result
}
// Native compilation path: compile to native dylib instead of mach
if (policy.native && policy.allow_compile) {
build_mod = use_cache['core/build']
if (build_mod) {
dylib_path = build_mod.compile_native(path, null, null, pkg)
if (dylib_path) {
handle = os.dylib_open(dylib_path)
if (handle) {
sym = pkg && _stem ? Shop.c_symbol_for_file(pkg, _stem) : null
return {_native: true, _handle: handle, _sym: sym}
}
}
}
}
// Check cache for pre-compiled .mach blob
if (policy.allow_mach) {
cached = pull_from_cache(content_key)
if (cached) {
return cached
}
}
// Check for cached mcode in content-addressed store (salted hash to distinguish from mach)
if (policy.allow_compile) {
cached_mcode_path = global_shop_path + '/build/' + content_hash(stone(blob(text(content_key) + "\nmcode")))
if (fd.is_file(cached_mcode_path)) {
mcode_json = text(fd.slurp(cached_mcode_path))
compiled = mach_compile_mcode_bin(path, mcode_json)
put_into_cache(content_key, compiled)
return compiled
}
}
// Compile via full pipeline: analyze → mcode → streamline → serialize
// Load compiler modules from use_cache directly (NOT via Shop.use, which
// would re-enter resolve_locator → resolve_mod_fn → infinite recursion)
if (policy.allow_compile) {
if (!_mcode_mod) _mcode_mod = use_cache['core/mcode'] || use_cache['mcode']
if (!_streamline_mod) _streamline_mod = use_cache['core/streamline'] || use_cache['streamline']
if (!_mcode_mod || !_streamline_mod) {
print(`error: compiler modules not loaded (mcode=${_mcode_mod != null}, streamline=${_streamline_mod != null})`)
disrupt
}
ast = analyze(content, path)
ir = _mcode_mod(ast)
optimized = _streamline_mod(ir)
mcode_json = shop_json.encode(optimized)
// Cache mcode (architecture-independent) in content-addressed store
ensure_dir(global_shop_path + '/build')
fd.slurpwrite(cached_mcode_path, stone(blob(mcode_json)))
// Cache mach blob
compiled = mach_compile_mcode_bin(path, mcode_json)
put_into_cache(content_key, compiled)
return compiled
}
print(`Module ${path} could not be loaded: no artifact found or all methods blocked by policy`)
disrupt
}
// given a path and a package context
// return module info about where it was found
// Resolve a module path to {path, scope, pkg} without compiling.
function resolve_path(path, ctx)
{
var explicit = split_explicit_package_import(path)
var explicit_path = null
var core_dir = null
var core_file_path = null
var is_core = null
var scope = null
var alias_path = null
var ctx_dir = null
var ctx_path = null
var alias = null
var package_path = null
if (explicit) {
if (is_internal_path(explicit.path) && ctx && explicit.package != ctx)
explicit = null
}
if (explicit) {
explicit_path = get_packages_dir() + '/' + safe_package_path(explicit.package) + '/' + explicit.path
if (fd.is_file(explicit_path))
return {path: explicit_path, scope: SCOPE_PACKAGE, pkg: explicit.package}
}
if (!ctx) {
core_dir = Shop.get_core_dir()
core_file_path = core_dir + '/' + path
if (fd.is_file(core_file_path))
return {path: core_file_path, scope: SCOPE_CORE, pkg: 'core'}
return null
}
if (starts_with(ctx, '/'))
ctx_dir = ctx
else
ctx_dir = get_packages_dir() + '/' + safe_package_path(ctx)
ctx_path = ctx_dir + '/' + path
if (fd.is_file(ctx_path)) {
is_core = (ctx == 'core') || (ctx_dir == Shop.get_core_dir())
scope = is_core ? SCOPE_CORE : SCOPE_LOCAL
return {path: ctx_path, scope: scope, pkg: ctx}
}
if (is_internal_path(path))
return null
alias = pkg_tools.split_alias(ctx, path)
if (alias) {
alias_path = get_packages_dir() + '/' + safe_package_path(alias.package) + '/' + alias.path
if (fd.is_file(alias_path))
return {path: alias_path, scope: SCOPE_PACKAGE, pkg: ctx}
}
package_path = get_packages_dir() + '/' + safe_package_path(path)
if (fd.is_file(package_path))
return {path: package_path, scope: SCOPE_PACKAGE, pkg: ctx}
core_dir = Shop.get_core_dir()
core_file_path = core_dir + '/' + path
if (fd.is_file(core_file_path))
return {path: core_file_path, scope: SCOPE_CORE, pkg: 'core'}
return null
}
function resolve_locator(path, ctx)
{
var info = resolve_path(path, ctx)
if (info == null) return null
var fn = resolve_mod_fn(info.path, info.pkg)
return {path: info.path, scope: info.scope, symbol: fn}
}
// Generate symbol name for a C module file
// e.g., make_c_symbol('core', 'math') -> 'js_core_math_use'
function make_c_symbol(pkg, file) {
var pkg_safe = safe_c_name(pkg)
var file_safe = safe_c_name(file)
return 'js_' + pkg_safe + '_' + file_safe + '_use'
}
// Get the deterministic dylib path for a module in lib/<pkg>/<stem>.dylib
function get_dylib_path(pkg, stem) {
return global_shop_path + '/lib/' + safe_package_path(pkg) + '/' + stem + dylib_ext
}
// Get the deterministic mach path for a module in lib/<pkg>/<stem>.mach
function get_mach_path(pkg, stem) {
return global_shop_path + '/lib/' + safe_package_path(pkg) + '/' + stem + '.mach'
}
// Open a per-module dylib and return the dlopen handle
function open_module_dylib(dylib_path) {
if (open_dls[dylib_path]) return open_dls[dylib_path]
if (!fd.is_file(dylib_path)) return null
open_dls[dylib_path] = os.dylib_open(dylib_path)
return open_dls[dylib_path]
}
// Try to resolve a C symbol from the deterministic dylib path
// Returns a loader function or null
function try_dylib_symbol(sym, pkg, file_stem) {
var dylib_path = get_dylib_path(pkg, file_stem)
var handle = open_module_dylib(dylib_path)
if (!handle) return null
if (!os.dylib_has_symbol(handle, sym)) return null
return function() { return os.dylib_symbol(handle, sym) }
}
// Resolve a C symbol by searching:
// At each scope: check lib/ dylib first, then internal (static)
function resolve_c_symbol(path, package_context) {
var explicit = split_explicit_package_import(path)
var sym = null
var loader = null
var _path = null
var core_sym = null
var canon_pkg = null
var mod_name = null
var file_stem = null
var policy = null
policy = get_policy()
if (explicit) {
if (is_internal_path(explicit.path) && package_context && explicit.package != package_context)
explicit = null
}
if (explicit) {
sym = make_c_symbol(explicit.package, explicit.path)
file_stem = replace(explicit.path, '.c', '')
// Check lib/ dylib first
if (policy.allow_dylib) {
loader = try_dylib_symbol(sym, explicit.package, file_stem)
if (loader) {
return {
symbol: loader,
scope: SCOPE_PACKAGE,
package: explicit.package,
path: sym
}
}
}
// Then check internal/static
if (policy.allow_static && os.internal_exists(sym)) {
return {
symbol: function() { return os.load_internal(sym) },
scope: SCOPE_PACKAGE,
package: explicit.package,
path: sym
}
}
}
// If no package context, only check core
if (!package_context || package_context == 'core') {
core_sym = make_c_symbol('core', path)
// Check lib/ dylib first for core
if (policy.allow_dylib) {
loader = try_dylib_symbol(core_sym, 'core', path)
if (loader) {
return {
symbol: loader,
scope: SCOPE_CORE,
path: core_sym
}
}
}
if (policy.allow_static && os.internal_exists(core_sym)) {
return {
symbol: function() { return os.load_internal(core_sym) },
scope: SCOPE_CORE,
path: core_sym
}
}
return null
}
// 1. Check own package (dylib first, then internal)
sym = make_c_symbol(package_context, path)
if (policy.allow_dylib) {
loader = try_dylib_symbol(sym, package_context, path)
if (loader) {
return {
symbol: loader,
scope: SCOPE_LOCAL,
path: sym
}
}
}
if (policy.allow_static && os.internal_exists(sym)) {
return {
symbol: function() { return os.load_internal(sym) },
scope: SCOPE_LOCAL,
path: sym
}
}
if (is_internal_path(path))
return null
// 2. Check aliased package imports (e.g. 'prosperon/sprite')
var pkg_alias = get_import_package(path)
if (pkg_alias) {
canon_pkg = get_aliased_package(path, package_context)
if (canon_pkg) {
mod_name = get_import_name(path)
sym = make_c_symbol(canon_pkg, mod_name)
if (policy.allow_dylib) {
loader = try_dylib_symbol(sym, canon_pkg, mod_name)
if (loader) {
return {
symbol: loader,
scope: SCOPE_PACKAGE,
package: canon_pkg,
path: sym
}
}
}
if (policy.allow_static && os.internal_exists(sym)) {
return {
symbol: function() { return os.load_internal(sym) },
scope: SCOPE_PACKAGE,
package: canon_pkg,
path: sym
}
}
}
}
// 3. Check core (dylib first, then internal)
core_sym = make_c_symbol('core', path)
if (policy.allow_dylib) {
loader = try_dylib_symbol(core_sym, 'core', path)
if (loader) {
return {
symbol: loader,
scope: SCOPE_CORE,
path: core_sym
}
}
}
if (policy.allow_static && os.internal_exists(core_sym)) {
return {
symbol: function() { return os.load_internal(core_sym) },
scope: SCOPE_CORE,
path: core_sym
}
}
return null
}
// Cache for resolved module info
var module_info_cache = {}
function resolve_module_info(path, package_context) {
var lookup_key = package_context ? package_context + ':' + path : ':' + path
if (module_info_cache[lookup_key])
return module_info_cache[lookup_key]
var c_resolve = resolve_c_symbol(path, package_context) || {scope:999}
var mod_resolve = resolve_locator(path + '.cm', package_context) || {scope:999}
var min_scope = min(c_resolve.scope, mod_resolve.scope)
if (min_scope == 999)
return null
var cache_key = null
var real_path = null
var real_info = null
var pkg_alias = null
var canon_pkg = null
var mod_name = null
if (mod_resolve.scope == SCOPE_CORE) {
cache_key = 'core/' + path
} else if (mod_resolve.scope < 900 && mod_resolve.path) {
real_path = fd.realpath(mod_resolve.path)
if (real_path) {
real_info = Shop.file_info(real_path)
if (real_info.package && real_info.name)
cache_key = real_info.package + '/' + real_info.name
else
cache_key = real_path
}
}
if (!cache_key) {
if (min_scope == SCOPE_CORE)
cache_key = 'core/' + path
else if (min_scope == SCOPE_LOCAL && package_context)
cache_key = package_context + '/' + path
else if (min_scope == SCOPE_PACKAGE) {
pkg_alias = get_import_package(path)
if (pkg_alias) {
canon_pkg = get_canonical_package(pkg_alias, package_context)
if (canon_pkg) {
mod_name = get_import_name(path)
cache_key = canon_pkg + '/' + mod_name
} else
cache_key = path
} else
cache_key = path
} else
cache_key = path
}
var info = {
cache_key: cache_key,
c_resolve: c_resolve,
mod_resolve: mod_resolve,
min_scope: min_scope
}
module_info_cache[lookup_key] = info
return info
}
function get_module_cache_key(path, package_context) {
var info = resolve_module_info(path, package_context)
return info ? info.cache_key : null
}
Shop.is_loaded = function is_loaded(path, package_context) {
var cache_key = get_module_cache_key(path, package_context)
return use_cache[cache_key] != null
}
// Create a use function bound to a specific package context
function make_use_fn(pkg) {
return function(path) {
return Shop.use(path, pkg)
}
}
// Call a C module loader and execute the entrypoint
function call_c_module(c_resolve) {
var mod = c_resolve.symbol()
// if (is_function(mod))
// return mod()
return mod
}
function execute_module(info)
{
var c_resolve = info.c_resolve
var mod_resolve = info.mod_resolve
var used = null
var file_info = null
var inject = null
var env = null
var pkg = null
if (mod_resolve.scope < 900) {
// Check if native dylib was resolved (descriptor with _handle and _sym)
if (is_object(mod_resolve.symbol) && mod_resolve.symbol._native) {
file_info = Shop.file_info(mod_resolve.path)
inject = Shop.script_inject_for(file_info)
env = inject_env(inject)
pkg = file_info.package
env.use = make_use_fn(pkg)
env = stone(env)
used = os.native_module_load_named(
mod_resolve.symbol._handle, mod_resolve.symbol._sym, env)
} else {
// Build env with runtime fns, capabilities, and use function
file_info = Shop.file_info(mod_resolve.path)
inject = Shop.script_inject_for(file_info)
env = inject_env(inject)
pkg = file_info.package
env.use = make_use_fn(pkg)
env = stone(env)
// Load compiled bytecode with env
used = mach_load(mod_resolve.symbol, env)
}
} else if (c_resolve.scope < 900) {
// C only
used = call_c_module(c_resolve)
} else {
print(`Module ${info.path} could not be found`); disrupt
}
if (!used) { print(`Module ${info} returned null`); disrupt }
return used
}
function get_module(path, package_context) {
var info = resolve_module_info(path, package_context)
if (!info) { print(`Module ${path} could not be found in ${package_context}`); disrupt }
return execute_module(info)
}
Shop.use = function use(path, package_context) {
// Check for embedded module (static builds)
var embed_key = 'embedded:' + path
var embedded = null
var embed_env = null
if (use_cache[embed_key]) return use_cache[embed_key]
if (os.embedded_module) {
embedded = os.embedded_module(path)
if (embedded) {
embed_env = inject_env(SHOP_DEFAULT_INJECT)
embed_env.use = make_use_fn(package_context)
embed_env = stone(embed_env)
use_cache[embed_key] = mach_load(embedded, embed_env)
return use_cache[embed_key]
}
}
var info = resolve_module_info(path, package_context)
if (!info) { print(`Module ${path} could not be found in ${package_context}`); disrupt }
if (use_cache[info.cache_key])
return use_cache[info.cache_key]
use_cache[info.cache_key] = execute_module(info)
return use_cache[info.cache_key]
}
Shop.resolve_locator = resolve_locator
// Resolve a use() module path to a filesystem path without compiling.
// Returns the absolute path string, or null if not found.
Shop.resolve_use_path = function(path, ctx) {
var info = resolve_path(path + '.cm', ctx)
if (info == null) return null
return info.path
}
// Get cache path for a package and commit
function get_cache_path(pkg, commit) {
return global_shop_path + '/cache/' + replace(replace(pkg, '@','_'), '/','_') + '_' + commit + '.zip'
}
function get_package_abs_dir(package)
{
return get_packages_dir() + '/' + safe_package_path(package)
}
// Fetch the latest commit hash from remote for a package
// Returns null for local packages or if fetch fails
function fetch_remote_hash(pkg) {
var api_url = Shop.get_api_url(pkg)
if (!api_url) return null
var _fetch_hash = function() {
var resp = http.fetch(api_url)
return Shop.extract_commit_hash(pkg, text(resp))
} disruption {
return null
}
return _fetch_hash()
}
// Download a zip for a package at a specific commit and cache it
// Returns the zip blob or null on failure
function download_zip(pkg, commit_hash) {
var cache_path = get_cache_path(pkg, commit_hash)
ensure_dir(global_shop_path + '/cache')
var download_url = Shop.get_download_url(pkg, commit_hash)
if (!download_url) {
log.error("Could not determine download URL for " + pkg)
return null
}
var _download = function() {
var zip_blob = http.fetch(download_url)
fd.slurpwrite(cache_path, zip_blob)
return zip_blob
} disruption {
return null
}
return _download()
}
// Get zip from cache, returns null if not cached
function get_cached_zip(pkg, commit_hash) {
var cache_path = get_cache_path(pkg, commit_hash)
if (fd.is_file(cache_path))
return fd.slurp(cache_path)
return null
}
// Fetch: Ensure the zip on disk matches what's in the lock file
// For local packages, this is a no-op
// For remote packages, downloads the zip if not present or hash mismatch
// Returns: { status: 'local'|'cached'|'downloaded'|'error', message: string }
Shop.fetch = function(pkg) {
var lock = Shop.load_lock()
var lock_entry = lock[pkg]
var info = Shop.resolve_package_info(pkg)
if (info == 'local') {
return { status: 'local' }
}
// No lock entry - can't fetch without knowing what commit
if (!lock_entry || !lock_entry.commit) {
return { status: 'error', message: "No lock entry for " + pkg + " - run update first" }
}
var commit = lock_entry.commit
var expected_hash = lock_entry.zip_hash
// Check if we have the zip cached
var zip_blob = get_cached_zip(pkg, commit)
var actual_hash = null
if (zip_blob) {
// If we have a hash on record, verify it
if (expected_hash) {
actual_hash = text(crypto.blake2(zip_blob), 'h')
if (actual_hash == expected_hash) {
return { status: 'cached' }
}
log.console("Zip hash mismatch for " + pkg + ", re-fetching...")
} else {
// No hash stored yet - compute and store it
actual_hash = text(crypto.blake2(zip_blob), 'h')
lock_entry.zip_hash = actual_hash
Shop.save_lock(lock)
return { status: 'cached' }
}
}
// Download the zip
var new_zip = download_zip(pkg, commit)
if (!new_zip) {
return { status: 'error', message: "Failed to download " + pkg }
}
// Store the hash
var new_hash = text(crypto.blake2(new_zip), 'h')
lock_entry.zip_hash = new_hash
Shop.save_lock(lock)
return { status: 'downloaded' }
}
// Extract: Extract a package to its target directory
// For linked packages, creates a symlink to the link target
// For local packages, creates a symlink to the local path
// For remote packages, extracts from the provided zip blob
// Returns true on success
Shop.extract = function(pkg) {
var target_dir = get_package_abs_dir(pkg)
// Check if this package is linked
var link_target = link.get_target(pkg)
if (link_target) {
// Use the link - create symlink to link target
link.sync_one(pkg, link_target)
return true
}
var info = Shop.resolve_package_info(pkg)
if (info == 'local') {
if (fd.is_link(target_dir))
fd.unlink(target_dir)
if (fd.is_dir(target_dir))
fd.rmdir(target_dir)
fd.symlink(pkg, target_dir)
return true
}
// Check if already extracted at correct commit
var lock = Shop.load_lock()
var lock_entry = lock[pkg]
var extracted_commit_file = null
var extracted_commit = null
if (lock_entry && lock_entry.commit) {
extracted_commit_file = target_dir + '/.cell_commit'
if (fd.is_file(extracted_commit_file)) {
extracted_commit = trim(text(fd.slurp(extracted_commit_file)))
if (extracted_commit == lock_entry.commit) {
// Already extracted at this commit, skip
return true
}
}
}
var zip_blob = get_package_zip(pkg)
if (!zip_blob) {
print("No zip blob available for " + pkg)
disrupt
}
// Extract zip for remote package
install_zip(zip_blob, target_dir)
// Write marker file with the extracted commit
if (lock_entry && lock_entry.commit) {
fd.slurpwrite(target_dir + '/.cell_commit', stone(blob(lock_entry.commit)))
}
return true
}
function get_package_zip(pkg)
{
var lock = Shop.load_lock()
var lock_entry = lock[pkg]
if (!lock_entry || !lock_entry.commit)
return null
var commit = lock_entry.commit
// Try to get from cache first
var cached = get_cached_zip(pkg, commit)
if (cached)
return cached
// Not in cache, download it
return download_zip(pkg, commit)
}
// Update: Check for new version, update lock, fetch and extract
// Returns the new lock entry if updated, null if already up to date or failed
Shop.update = function(pkg) {
var lock = Shop.load_lock()
var lock_entry = lock[pkg]
var info = Shop.resolve_package_info(pkg)
log.console(`checking ${pkg}`)
var new_entry = null
if (info == 'local') {
// Check if local path exists
if (!fd.is_dir(pkg)) {
log.console(` Local path does not exist: ${pkg}`)
return null
}
// Local packages always get a lock entry
new_entry = {
type: 'local',
updated: time.number()
}
lock[pkg] = new_entry
Shop.save_lock(lock)
return new_entry
}
var local_commit = lock_entry ? lock_entry.commit : null
var remote_commit = fetch_remote_hash(pkg)
log.console(`local commit: ${local_commit}`)
log.console(`remote commit: ${remote_commit}`)
if (!remote_commit) {
log.error("Could not resolve commit for " + pkg)
return null
}
if (local_commit == remote_commit)
return null
new_entry = {
type: info,
commit: remote_commit,
updated: time.number()
}
lock[pkg] = new_entry
Shop.save_lock(lock)
return new_entry
}
function install_zip(zip_blob, target_dir) {
var zip = miniz.read(zip_blob)
if (!zip) { print("Failed to read zip archive"); disrupt }
if (fd.is_link(target_dir)) fd.unlink(target_dir)
if (fd.is_dir(target_dir)) fd.rmdir(target_dir, 1)
log.console("Extracting to " + target_dir)
ensure_dir(target_dir)
var count = zip.count()
var created_dirs = {}
var i = 0
var filename = null
var slash_pos = null
var rel_path = null
var full_path = null
var dir_path = null
var file_data = null
for (i = 0; i < count; i++) {
if (zip.is_directory(i)) continue
filename = zip.get_filename(i)
slash_pos = search(filename, '/')
if (slash_pos == null) continue
if (slash_pos + 1 >= length(filename)) continue
rel_path = text(filename, slash_pos + 1)
full_path = target_dir + '/' + rel_path
dir_path = fd.dirname(full_path)
if (!created_dirs[dir_path]) {
ensure_dir(dir_path)
created_dirs[dir_path] = true
}
file_data = zip.slurp(filename)
stone(file_data)
fd.slurpwrite(full_path, file_data)
}
}
// High-level: Remove a package from the shop
Shop.remove = function(pkg) {
// Remove from lock
var lock = Shop.load_lock()
if (lock[pkg]) {
delete lock[pkg]
Shop.save_lock(lock)
}
// Remove package symlink/directory
var pkg_dir = get_packages_dir() + '/' + safe_package_path(pkg)
if (fd.is_link(pkg_dir)) {
fd.unlink(pkg_dir)
} else if (fd.is_dir(pkg_dir)) {
fd.rmdir(pkg_dir, 1)
}
// Remove built dylibs
var lib_dir = global_shop_path + '/lib/' + safe_package_path(pkg)
if (fd.is_dir(lib_dir)) {
fd.rmdir(lib_dir, 1)
}
log.console("Removed " + pkg)
return true
}
Shop.get = function(pkg) {
var lock = Shop.load_lock()
var info = null
var commit = null
if (!lock[pkg]) {
info = Shop.resolve_package_info(pkg)
if (!info) {
print("Invalid package: " + pkg); disrupt
}
commit = null
if (info != 'local') {
commit = fetch_remote_hash(pkg)
if (!commit) {
print("Could not resolve commit for " + pkg); disrupt
}
}
lock[pkg] = {
type: info,
commit: commit,
updated: time.number()
}
Shop.save_lock(lock)
}
}
// Compile a module
// List all files in a package
Shop.file_reload = function(file)
{
var info = Shop.file_info(file)
if (!info.is_module) return
var pkg = info.package
Shop.module_reload(info.name, pkg)
}
Shop.module_reload = function(path, package) {
if (!Shop.is_loaded(path,package)) return
// Clear the module info cache for this path
var lookup_key = package ? package + ':' + path : ':' + path
module_info_cache[lookup_key] = null
// Close old dylib handle if any
var old_dylib_path = null
if (package) {
old_dylib_path = get_dylib_path(package, path)
if (open_dls[old_dylib_path]) {
os.dylib_close(open_dls[old_dylib_path])
open_dls[old_dylib_path] = null
}
}
var info = resolve_module_info(path, package)
if (!info) return
var cache_key = info.cache_key
var old = use_cache[cache_key]
use_cache[cache_key] = null
var newmod = get_module(path, package)
use_cache[cache_key] = newmod
if (old && is_object(old) && is_object(newmod)) {
arrfor(array(newmod), function(k) { old[k] = newmod[k] })
arrfor(array(old), function(k) {
if (!(k in newmod)) old[k] = null
})
use_cache[cache_key] = old
}
}
function get_package_scripts(package)
{
var files = pkg_tools.list_files(package)
var scripts = []
var i = 0
var file = null
for (i = 0; i < length(files); i++) {
file = files[i]
if (ends_with(file, '.cm') || ends_with(file, '.ce')) {
push(scripts, file)
}
}
return scripts
}
Shop.build_package_scripts = function(package)
{
// compiles all .ce and .cm files in a package
// continues past failures and returns results
var scripts = get_package_scripts(package)
var pkg_dir = starts_with(package, '/') ? package : get_package_abs_dir(package)
var errors = []
var ok = 0
arrfor(scripts, function(script, i) {
var _try = function() {
resolve_mod_fn(pkg_dir + '/' + script, package)
ok = ok + 1
} disruption {
push(errors, script)
}
_try()
})
return {ok: ok, errors: errors, total: length(scripts)}
}
Shop.get_package_scripts = get_package_scripts
Shop.list_packages = function()
{
var lock = Shop.load_lock()
return array(lock)
}
// Get the lib directory for dynamic libraries
Shop.get_lib_dir = function() {
return global_shop_path + '/lib'
}
Shop.ensure_dir = ensure_dir
Shop.get_local_dir = function() {
return global_shop_path + "/local"
}
// Get the build cache directory
Shop.get_build_dir = function() {
return global_shop_path + '/build'
}
// Get the absolute path for a package
Shop.get_package_dir = function(pkg) {
return get_packages_dir() + '/' + safe_package_path(pkg)
}
// Generate C symbol name for a file within a package
// e.g., c_symbol_for_file('gitea.pockle.world/john/prosperon', 'sprite.c')
// -> 'js_gitea_pockle_world_john_prosperon_sprite_use'
Shop.c_symbol_for_file = function(pkg, file) {
var pkg_safe = safe_c_name(pkg)
var file_safe = safe_c_name(fd.stem(file))
var suffix = ends_with(file, '.ce') ? '_program' : '_use'
return 'js_' + pkg_safe + '_' + file_safe + suffix
}
// Generate C symbol prefix for a package
// e.g., c_symbol_prefix('core') -> 'js_core_'
Shop.c_symbol_prefix = function(pkg) {
return 'js_' + safe_c_name(pkg) + '_'
}
// Get the library directory name for a package
Shop.lib_name_for_package = function(pkg) {
return safe_package_path(pkg)
}
// Returns { ok: bool, results: [{pkg, ok, error}] }
// Get the deterministic dylib path for a module (public API)
Shop.get_dylib_path = function(pkg, stem) {
return get_dylib_path(pkg, stem)
}
// Get the deterministic mach path for a module (public API)
Shop.get_mach_path = function(pkg, stem) {
return get_mach_path(pkg, stem)
}
// Load a module explicitly as mach bytecode, bypassing dylib resolution.
// Returns the loaded module value. Disrupts if the module cannot be found.
Shop.load_as_mach = function(path, pkg) {
var locator = resolve_locator(path + '.cm', pkg)
var file_path = null
var content = null
var content_key = null
var cached = null
var cached_mcode_path = null
var mcode_json = null
var compiled = null
var ast = null
var ir = null
var optimized = null
var pkg_dir = null
var stem = null
var mach_path = null
var file_info = null
var inject = null
var env = null
if (!locator) { print('Module ' + path + ' not found'); disrupt }
file_path = locator.path
content = text(fd.slurp(file_path))
content_key = stone(blob(content))
// Try installed .mach in lib/
if (pkg) {
pkg_dir = get_packages_dir() + '/' + safe_package_path(pkg)
if (starts_with(file_path, pkg_dir + '/')) {
stem = text(file_path, length(pkg_dir) + 1)
mach_path = get_mach_path(pkg, stem)
if (fd.is_file(mach_path)) {
compiled = fd.slurp(mach_path)
}
}
}
// Try cached mach blob
if (!compiled) {
cached = pull_from_cache(content_key)
if (cached) compiled = cached
}
// Try cached mcode -> compile to mach
if (!compiled) {
cached_mcode_path = global_shop_path + '/build/' + content_hash(stone(blob(text(content_key) + "\nmcode")))
if (fd.is_file(cached_mcode_path)) {
mcode_json = text(fd.slurp(cached_mcode_path))
compiled = mach_compile_mcode_bin(file_path, mcode_json)
put_into_cache(content_key, compiled)
}
}
// Full compile from source
if (!compiled) {
if (!_mcode_mod) _mcode_mod = use_cache['core/mcode'] || use_cache['mcode']
if (!_streamline_mod) _streamline_mod = use_cache['core/streamline'] || use_cache['streamline']
if (!_mcode_mod || !_streamline_mod) {
print('error: compiler modules not loaded')
disrupt
}
ast = analyze(content, file_path)
ir = _mcode_mod(ast)
optimized = _streamline_mod(ir)
mcode_json = shop_json.encode(optimized)
cached_mcode_path = global_shop_path + '/build/' + content_hash(stone(blob(text(content_key) + "\nmcode")))
ensure_dir(global_shop_path + '/build')
fd.slurpwrite(cached_mcode_path, stone(blob(mcode_json)))
compiled = mach_compile_mcode_bin(file_path, mcode_json)
put_into_cache(content_key, compiled)
}
// Load the mach blob with proper env
file_info = Shop.file_info(file_path)
inject = Shop.script_inject_for(file_info)
env = inject_env(inject)
env.use = make_use_fn(file_info.package)
env = stone(env)
return mach_load(compiled, env)
}
// Load a module explicitly as a native dylib, bypassing mach resolution.
// Returns the loaded module value, or null if no dylib exists.
Shop.load_as_dylib = function(path, pkg) {
var locator = resolve_locator(path + '.cm', pkg)
var file_path = null
var file_info = null
var pkg_dir = null
var stem = null
var result = null
var real_pkg = pkg
var inject = null
var env = null
if (!locator) { print('Module ' + path + ' not found'); disrupt }
file_path = locator.path
if (!real_pkg) {
file_info = Shop.file_info(file_path)
real_pkg = file_info.package
}
if (!real_pkg) return null
pkg_dir = get_packages_dir() + '/' + safe_package_path(real_pkg)
if (!starts_with(file_path, pkg_dir + '/')) return null
stem = text(file_path, length(pkg_dir) + 1)
result = try_native_mod_dylib(real_pkg, stem)
if (!result) return null
// Build env and load the native module
if (!file_info) file_info = Shop.file_info(file_path)
inject = Shop.script_inject_for(file_info)
env = inject_env(inject)
env.use = make_use_fn(real_pkg)
env = stone(env)
return os.native_module_load_named(result._handle, result._sym, env)
}
Shop.audit_packages = function() {
var packages = Shop.list_packages()
var bad = []
arrfor(packages, function(package, i) {
if (package == 'core') return
if (fd.is_dir(package)) return
if (fetch_remote_hash(package)) return
push(bad, package)
})
return bad
}
// Parse a package locator and return info about it
// Returns { path: canonical_path, name: package_name, type: 'local'|'gitea'|null }
Shop.parse_package = function(locator) {
if (!locator) return null
// Strip version suffix if present
var clean = locator
if (search(locator, '@') != null) {
clean = array(locator, '@')[0]
}
var info = Shop.resolve_package_info(clean)
if (!info) return null
// Extract package name (last component of path)
var parts = array(clean, '/')
var name = parts[length(parts) - 1]
return {
path: clean,
name: name,
type: info
}
}
Shop.use_native = function(path, package_context) {
var src_path = path
if (!starts_with(path, '/'))
src_path = fd.realpath(path)
if (!fd.is_file(src_path)) { print('File not found: ' + path); disrupt }
var file_info = Shop.file_info(src_path)
var pkg = file_info.package || package_context
var sym_name = null
if (pkg)
sym_name = Shop.c_symbol_for_file(pkg, fd.basename(src_path))
var build = Shop.use('build', 'core')
var dylib_path = build.compile_native(src_path, null, null, pkg)
var handle = os.dylib_open(dylib_path)
if (!handle) { print('Failed to open native dylib: ' + dylib_path); disrupt }
// Build env with runtime functions and capabilities
var inject = Shop.script_inject_for(file_info)
var env = inject_env(inject)
env.use = make_use_fn(pkg)
env = stone(env)
if (sym_name)
return os.native_module_load_named(handle, sym_name, env)
return os.native_module_load(handle, env)
}
return Shop