update local dirs

This commit is contained in:
2025-12-04 14:56:11 -06:00
parent 20fa012b57
commit 22d3adca93
3 changed files with 189 additions and 291 deletions

View File

@@ -584,7 +584,7 @@ function turn(msg)
send_messages()
}
log.console(`is main ${cell.args.main}`)
log.console(`FIXME: need to get main from config, not just set to true`)
actor_mod.register_actor(cell.id, turn, true, config.system.ar_timer)
if (config.system.actor_memory)

View File

@@ -617,9 +617,28 @@ JSC_CCALL(fd_is_link,
if (!path) return JS_EXCEPTION;
struct stat st;
if (lstat(path, &st) == 0 && S_ISLNK(st.st_mode))
return JS_NewBool(js, 1);
return JS_NewBool(js, 0);
int is_link = (lstat(path, &st) == 0 && S_ISLNK(st.st_mode));
JS_FreeCString(js, path);
return JS_NewBool(js, is_link);
)
JSC_CCALL(fd_readlink,
const char *path = JS_ToCString(js, argv[0]);
if (!path) return JS_EXCEPTION;
#ifdef _WIN32
JS_FreeCString(js, path);
return JS_ThrowInternalError(js, "readlink not supported on Windows");
#else
char buf[PATH_MAX];
ssize_t len = readlink(path, buf, sizeof(buf) - 1);
JS_FreeCString(js, path);
if (len < 0) {
return JS_ThrowInternalError(js, "readlink failed: %s", strerror(errno));
}
buf[len] = '\0';
return JS_NewString(js, buf);
#endif
)
static const JSCFunctionListEntry js_fd_funcs[] = {
@@ -646,6 +665,7 @@ static const JSCFunctionListEntry js_fd_funcs[] = {
MIST_FUNC_DEF(fd, enumerate, 2),
MIST_FUNC_DEF(fd, symlink, 2),
MIST_FUNC_DEF(fd, realpath, 1),
MIST_FUNC_DEF(fd, readlink, 1),
};
JSValue js_fd_use(JSContext *js) {

View File

@@ -247,6 +247,12 @@ Shop.parse_package = function(pkg) {
version = versionParts[1]
}
// Handle absolute paths (local modules)
// /User/john/mod -> User/john/mod
if (path.startsWith('/')) {
path = path.substring(1)
}
// Extract name (last part of path)
var name = path.split('/').pop()
@@ -257,6 +263,17 @@ Shop.parse_package = function(pkg) {
}
}
// Get information about how to resolve a package
Shop.resolve_package_info = function(pkg) {
if (pkg.startsWith('/')) {
return { type: 'local', path: pkg }
}
if (pkg.includes('gitea.')) {
return { type: 'gitea' }
}
return { type: 'unknown' }
}
// Verify if a package name is valid and return status
Shop.verify_package_name = function(pkg) {
if (!pkg) throw new Error("Empty package name")
@@ -269,6 +286,9 @@ Shop.verify_package_name = function(pkg) {
// Convert module package to download URL
Shop.get_download_url = function(pkg, commit_hash) {
var info = Shop.resolve_package_info(pkg)
if (info.type == 'local') return null
var parsed = Shop.parse_package(pkg)
if (!parsed) return null
@@ -308,6 +328,9 @@ Shop.remove_dependency = function(alias) {
// Get the API URL for checking remote git commits
Shop.get_api_url = function(pkg) {
var info = Shop.resolve_package_info(pkg)
if (info.type == 'local') return null
var parsed = Shop.parse_package(pkg)
if (!parsed) return null
@@ -610,6 +633,7 @@ function resolve_c_symbol(path, package_context)
var pkg_safe = canon_pkg.replace(/\//g, '_').replace(/-/g, '_').replace(/\./g, '_')
var sym_name = `js_${pkg_safe}_${mod_sym}_use`
log.console(`looking for ${sym_name}`)
if (fd.is_file(dl_path)) {
if (!open_dls[dl_path]) open_dls[dl_path] = os.dylib_open(dl_path)
@@ -650,6 +674,8 @@ Shop.use = function(path, package_context) {
var c_resolve = resolve_c_symbol(path, package_context) || {scope:999}
var mod_resolve = resolve_locator(path, '.cm', package_context) || {scope:999}
log.console(`c_resolve: ${json.encode(c_resolve)}, mod_resolve: ${json.encode(mod_resolve)} for ${path} in package ${package_context}`)
var min_scope = Math.min(c_resolve.scope, mod_resolve.scope)
if (min_scope == 999)
@@ -750,135 +776,19 @@ function verify_zip_contents(zip, target_dir) {
return true
}
// Install from a raw package (not from config)
function install_from_package(pkg, locked_hash, expected_zip_hash) {
var parsed = Shop.parse_package(pkg)
var target_dir = '.cell/modules/' + parsed.path
// 1. Get Commit Hash - use locked hash if provided, otherwise fetch
var commit_hash = locked_hash
if (!commit_hash) {
var api_url = Shop.get_api_url(pkg)
if (api_url) {
try {
var resp = http.fetch(api_url)
var resp_text = text(resp)
commit_hash = Shop.extract_commit_hash(pkg, resp_text)
} catch (e) {
log.console("Warning: Failed to fetch API info: " + e)
}
}
} else {
log.console("Using locked commit: " + commit_hash)
}
if (!commit_hash) {
log.error("Could not determine commit hash for " + pkg)
return null
}
// 2. Check Cache / Download Zip
var cache_path = get_cache_path(pkg, commit_hash)
var zip_blob = null
var zip_hash = null
var use_cache = false
if (fd.is_file(cache_path)) {
log.console("Found cached zip: " + cache_path)
try {
var cached = fd.slurp(cache_path)
var computed_hash = text(crypto.blake2(cached), 'h')
if (expected_zip_hash && computed_hash != expected_zip_hash) {
log.console("Cache hash mismatch. Expected: " + expected_zip_hash + ", Got: " + computed_hash)
log.console("Redownloading...")
} else {
zip_blob = cached
zip_hash = computed_hash
use_cache = true
}
} catch (e) {
log.error("Failed to read cache: " + e)
}
}
if (!use_cache) {
var download_url = Shop.get_download_url(pkg, commit_hash)
if (!download_url) {
log.error("Could not determine download URL for " + pkg)
return null
}
log.console("Downloading from " + download_url)
try {
zip_blob = http.fetch(download_url)
zip_hash = text(crypto.blake2(zip_blob), 'h')
// Save to cache
ensure_dir(cache_path.substring(0, cache_path.lastIndexOf('/')))
fd.slurpwrite(cache_path, zip_blob)
log.console("Cached to " + cache_path)
} catch (e) {
log.error(e)
return null
}
}
// 3. Verify and Unpack
var zip = miniz.read(zip_blob)
if (!zip) throw new Error("Failed to read zip archive")
var needs_unpack = !use_cache
// If using cache, verify existing installation strictly
if (use_cache && fd.is_dir(target_dir)) {
if (!verify_zip_contents(zip, target_dir)) {
log.console("Verification failed for " + pkg + ". Reinstalling...")
needs_unpack = true
}
} else if (use_cache && !fd.is_dir(target_dir)) {
needs_unpack = true
}
if (needs_unpack) {
if (fd.is_dir(target_dir)) {
log.console("Clearing module directory for fresh install...")
rm_recursive(target_dir)
}
log.console("Unpacking to " + target_dir)
ensure_dir(target_dir)
var count = zip.count()
for (var i = 0; i < count; i++) {
if (zip.is_directory(i)) continue
var filename = zip.get_filename(i)
log.console(filename)
var parts = filename.split('/')
if (parts.length > 1) {
parts.shift()
var rel_path = parts.join('/')
var full_path = target_dir + '/' + rel_path
var dir_path = full_path.substring(0, full_path.lastIndexOf('/'))
ensure_dir(dir_path)
var content = zip.slurp(filename)
fd.slurpwrite(full_path, content)
}
}
} else {
log.console("Verified existing installation.")
}
return { commit: commit_hash, package: pkg, path: parsed.path, zip_hash: zip_hash }
}
// High-level: Add a package, install it, and install all transitive dependencies
// Like `bun add` or `npm install <pkg>`
Shop.get = function(pkg, alias) {
Shop.init()
if (fd.is_dir(pkg)) {
log.console("Found directory: " + pkg)
pkg = fd.realpath(pkg)
log.console("Resolved to: " + pkg)
}
var info = Shop.resolve_package_info(pkg)
if (info.type == 'unknown') {
log.error("Could not resolve package: " + pkg)
return false
}
var parsed = Shop.parse_package(pkg)
if (!alias) alias = parsed.name
@@ -889,129 +799,159 @@ Shop.get = function(pkg, alias) {
if (!config.dependencies) config.dependencies = {}
config.dependencies[alias] = pkg
Shop.save_config(config)
// Install the package and dependencies
var queue = [pkg]
var processed = {}
var lock = Shop.load_lock(null)
while (queue.length > 0) {
var current_pkg = queue.shift()
if (processed[current_pkg]) continue
processed[current_pkg] = true
log.console("Installing " + current_pkg + "...")
var lock_info = lock[current_pkg] || lock[Shop.parse_package(current_pkg).name]
var locked_hash = lock_info ? lock_info.commit : null
var zip_hash = lock_info ? lock_info.zip_hash : null
var result = install_from_package(current_pkg, locked_hash, zip_hash)
if (result) {
lock[current_pkg] = {
package: current_pkg,
commit: result.commit,
zip_hash: result.zip_hash,
updated: time.number()
}
// Read package config to find dependencies
var parsed = Shop.parse_package(current_pkg)
var pkg_config = Shop.load_config(parsed.path)
if (pkg_config && pkg_config.dependencies) {
for (var k in pkg_config.dependencies) {
var dep_pkg = pkg_config.dependencies[k]
if (!processed[dep_pkg]) {
queue.push(dep_pkg)
}
}
}
} else {
if (current_pkg == pkg) {
log.console("Failed to install requested package " + alias)
return false
} else {
log.console("Failed to install dependency " + current_pkg)
}
}
}
Shop.save_lock(lock)
log.console("Done.")
return true
}
Shop.fetch = function(package)
{
}
// High-level: Update a specific package
// Like `bun update <pkg>`
// Update a specific package
Shop.update = function(pkg) {
var config = Shop.load_config()
var lock = Shop.load_lock()
var parsed = Shop.parse_package(pkg)
var info = Shop.resolve_package_info(pkg)
var target_dir = '.cell/modules/' + parsed.path
// Check if replaced
if (config.replace && config.replace[pkg]) {
log.console("Skipping update for replaced package " + pkg)
Shop.update(config.replace[pkg])
var result = info.type == 'local'
? update_local(pkg, info, target_dir)
: update_remote(pkg, info, target_dir, lock[pkg])
if (!result) {
log.error("Failed to update " + parsed.path)
return false
}
// Find existing lock info
var lock_info = lock[pkg]
lock[pkg] = {
package: pkg,
commit: result.commit,
zip_hash: result.zip_hash,
updated: time.number()
}
Shop.save_lock(lock)
log.console("Updated " + parsed.path + ".")
return true
}
function update_local(pkg, info, target_dir) {
if (fd.is_link(target_dir)) {
if (fd.readlink(target_dir) == info.path)
return { commit: "local", package: pkg, zip_hash: "local" }
else
fd.unlink(target_dir)
}
var parent_dir = target_dir.substring(0, target_dir.lastIndexOf('/'))
ensure_dir(parent_dir)
if (fd.is_dir(target_dir)) fd.rmdir(target_dir)
try {
fd.symlink(info.path, target_dir)
log.console("Linked " + target_dir + " -> " + info.path)
return { commit: "local", package: pkg, zip_hash: "local" }
} catch(e) {
log.error("Failed to create symlink: " + e)
return null
}
}
function update_remote(pkg, info, target_dir, lock_info) {
var local_hash = lock_info ? lock_info.commit : null
var remote_hash = fetch_remote_hash(pkg)
if (!remote_hash && !local_hash) {
log.error("Could not resolve commit for " + pkg)
return null
}
var target_hash = remote_hash || local_hash
if (local_hash == target_hash) {
log.console(pkg + " is already up to date.")
return lock_info
}
if (local_hash)
log.console("Updating " + pkg + " " + local_hash.substring(0,8) + " -> " + target_hash.substring(0,8))
else
log.console("Installing " + pkg + "...")
var zip_blob = get_or_download_zip(pkg, target_hash)
if (!zip_blob) return null
var zip_hash = text(crypto.blake2(zip_blob), 'h')
install_zip(zip_blob, target_dir)
return { commit: target_hash, package: pkg, zip_hash: zip_hash }
}
function fetch_remote_hash(pkg) {
var api_url = Shop.get_api_url(pkg)
var remote_hash = null
if (!api_url) return null
// Check for updates if possible
if (api_url) {
try {
var resp = http.fetch(api_url)
return Shop.extract_commit_hash(pkg, text(resp))
} catch (e) {
log.console("Warning: Could not check for updates for " + pkg)
return null
}
}
function get_or_download_zip(pkg, commit_hash) {
var cache_path = get_cache_path(pkg, commit_hash)
if (fd.is_file(cache_path)) {
log.console("Found cached zip: " + cache_path)
try {
var resp = http.fetch(api_url)
remote_hash = Shop.extract_commit_hash(pkg, text(resp))
return fd.slurp(cache_path)
} catch (e) {
log.console("Warning: Could not check for updates for " + pkg)
log.error("Failed to read cache: " + e)
}
}
var target_hash = remote_hash
if (!target_hash) {
log.error("Could not resolve remote commit for " + pkg)
return false
var download_url = Shop.get_download_url(pkg, commit_hash)
if (!download_url) {
log.error("Could not determine download URL for " + pkg)
return null
}
if (local_hash && remote_hash == local_hash) {
log.console(alias + " is already up to date.")
return true
log.console("Downloading from " + download_url)
try {
var zip_blob = http.fetch(download_url)
ensure_dir(cache_path.substring(0, cache_path.lastIndexOf('/')))
fd.slurpwrite(cache_path, zip_blob)
log.console("Cached to " + cache_path)
return zip_blob
} catch (e) {
log.error(e)
return null
}
}
function install_zip(zip_blob, target_dir) {
var zip = miniz.read(zip_blob)
if (!zip) throw new Error("Failed to read zip archive")
if (local_hash) {
log.console("Updating " + alias + " " + local_hash.substring(0,8) + " -> " + remote_hash.substring(0,8))
} else {
log.console("Installing " + alias + "...")
if (fd.is_dir(target_dir)) rm_recursive(target_dir)
log.console("Unpacking to " + target_dir)
ensure_dir(target_dir)
var count = zip.count()
for (var i = 0; i < count; i++) {
if (zip.is_directory(i)) continue
var filename = zip.get_filename(i)
var parts = filename.split('/')
if (parts.length <= 1) continue
parts.shift()
var rel_path = parts.join('/')
var full_path = target_dir + '/' + rel_path
var dir_path = full_path.substring(0, full_path.lastIndexOf('/'))
ensure_dir(dir_path)
var content = zip.slurp(filename)
fd.slurpwrite(full_path, content)
}
// Install with fresh download (no zip hash to force redownload)
var result = install_from_package(pkg, target_hash, null)
if (result) {
// Update lock
lock[pkg] = {
package: pkg,
commit: result.commit,
zip_hash: result.zip_hash,
updated: time.number()
}
Shop.save_lock(lock)
log.console("Updated " + alias + ".")
return true
}
log.error("Failed to update " + alias)
return false
}
// High-level: Remove a package and clean up
@@ -1078,65 +1018,6 @@ Shop.remove_replacement = function(alias) {
return true
}
// Install all dependencies from config (like `bun install`)
Shop.install_all = function() {
Shop.init()
var config = Shop.load_config()
if (!config || !config.dependencies) {
log.console("No dependencies to install.")
return true
}
var lock = Shop.load_lock(null)
var queue = []
var processed = {}
for (var alias in config.dependencies) {
queue.push(config.dependencies[alias])
}
while (queue.length > 0) {
var pkg = queue.shift()
if (processed[pkg]) continue
processed[pkg] = true
log.console("Installing " + pkg + "...")
var lock_info = lock[pkg] || lock[Shop.parse_package(pkg).name] // Fallback to old format check
var locked_hash = lock_info ? lock_info.commit : null
var zip_hash = lock_info ? lock_info.zip_hash : null
var result = install_from_package(pkg, locked_hash, zip_hash)
if (result) {
lock[pkg] = {
package: pkg,
commit: result.commit,
zip_hash: result.zip_hash,
updated: time.number()
}
// Read package config to find dependencies
var parsed = Shop.parse_package(pkg)
var pkg_config = Shop.load_config(parsed.path)
if (pkg_config && pkg_config.dependencies) {
for (var k in pkg_config.dependencies) {
var dep_pkg = pkg_config.dependencies[k]
if (!processed[dep_pkg]) {
queue.push(dep_pkg)
}
}
}
}
}
Shop.save_lock(lock)
log.console("Done.")
return true
}
// Compile a module
// List all files in a package
Shop.list_files = function(pkg) {
@@ -1298,9 +1179,6 @@ Shop.build_package = function(package)
if (platform == 'macOS') link_flags = '-shared -undefined dynamic_lookup'
var ldflags = get_flags(config, platform, 'LDFLAGS')
log.console(platform)
log.console(ldflags)
log.console(json.encode(config))
if (ldflags != '') link_flags += ' ' + ldflags
var temp_lib = 'cellmod_temp' + dylib_ext