fix fetch

This commit is contained in:
2026-01-09 11:49:22 -06:00
parent 86a70bce3a
commit f16586eaa2
6 changed files with 130 additions and 68 deletions

2
fd.c
View File

@@ -559,7 +559,7 @@ JSC_CCALL(fd_slurpwrite,
size_t len;
const char *data = js_get_blob_data(js, &len, argv[1]);
if (!data)
if (!data && len > 0)
return JS_EXCEPTION;
const char *str = JS_ToCString(js, argv[0]);

View File

@@ -45,42 +45,45 @@ if (target_pkg) {
packages_to_fetch = all_packages
}
log.console("Fetching " + text(packages_to_fetch.length) + " package(s)...")
var remote_count = 0
for (var pkg of packages_to_fetch) {
var entry = lock[pkg]
if (pkg != 'core' && (!entry || entry.type != 'local'))
remote_count++
}
var success_count = 0
var skip_count = 0
if (remote_count > 0)
log.console(`Fetching ${text(remote_count)} remote package(s)...`)
var downloaded_count = 0
var cached_count = 0
var fail_count = 0
for (var pkg of packages_to_fetch) {
var entry = lock[pkg]
// Skip local packages
if (entry && entry.type == 'local') {
skip_count++
continue
}
// Skip core (handled separately)
if (pkg == 'core') {
skip_count++
continue
}
if (pkg == 'core') continue
var result = shop.fetch(pkg)
if (result) {
if (result.zip_blob) {
log.console("Fetched: " + pkg)
success_count++
} else {
skip_count++
}
} else {
log.error("Failed to fetch: " + pkg)
if (result.status == 'local') {
// Local packages are just symlinks, nothing to fetch
continue
} else if (result.status == 'cached') {
cached_count++
} else if (result.status == 'downloaded') {
log.console(" Downloaded: " + pkg)
downloaded_count++
} else if (result.status == 'error') {
log.error(" Failed: " + pkg + (result.message ? " - " + result.message : ""))
fail_count++
}
}
log.console("")
log.console("Fetch complete: " + text(success_count) + " fetched, " + text(skip_count) + " skipped, " + text(fail_count) + " failed")
var parts = []
if (downloaded_count > 0) parts.push(`${text(downloaded_count)} downloaded`)
if (cached_count > 0) parts.push(`${text(cached_count)} cached`)
if (fail_count > 0) parts.push(`${text(fail_count)} failed`)
if (parts.length == 0) parts.push("nothing to fetch")
log.console("Fetch complete: " + parts.join(", "))
$stop()

View File

@@ -101,7 +101,18 @@ function gather_packages(pkg_locator) {
var lock = shop.load_lock()
if (!lock[pkg_locator]) {
if (!dry_run) {
shop.update(pkg_locator)
var update_result = shop.update(pkg_locator)
if (update_result) {
shop.extract(pkg_locator)
} else {
// Update failed - package might not be fetchable
log.console("Warning: Could not fetch " + pkg_locator)
return
}
}
} else {
// Package is in lock, ensure it's extracted
if (!dry_run) {
shop.extract(pkg_locator)
}
}
@@ -116,7 +127,7 @@ function gather_packages(pkg_locator) {
} catch (e) {
// Package might not have dependencies or cell.toml issue
if (!dry_run) {
log.console("Warning: Could not read dependencies for " + pkg_locator + ": " + e.message)
log.console(`Warning: Could not read dependencies for ${pkg_locator}: ${e.message}`)
}
}
}

View File

@@ -869,15 +869,12 @@ function download_zip(pkg, commit_hash) {
return null
}
log.console("Downloading from " + download_url)
try {
var zip_blob = http.fetch(download_url)
log.console(`putting to ${cache_path}`)
fd.slurpwrite(cache_path, zip_blob)
log.console("Cached to " + cache_path)
return zip_blob
} catch (e) {
log.error(e)
log.error("Download failed for " + pkg + ": " + e)
return null
}
}
@@ -892,19 +889,22 @@ function get_cached_zip(pkg, commit_hash) {
}
// Fetch: Ensure the zip on disk matches what's in the lock file
// For local packages, this is a no-op (returns true)
// For local packages, this is a no-op
// For remote packages, downloads the zip if not present or hash mismatch
// Returns true on success
// Returns: { status: 'local'|'cached'|'downloaded'|'error', message: string }
Shop.fetch = function(pkg) {
var lock = Shop.load_lock()
var lock_entry = lock[pkg]
var info = Shop.resolve_package_info(pkg)
if (info == 'local') return null
if (info == 'local') {
return { status: 'local' }
}
// No lock entry - can't fetch without knowing what commit
if (!lock_entry || !lock_entry.commit)
throw new Error("No lock entry for " + pkg + " - run update first")
if (!lock_entry || !lock_entry.commit) {
return { status: 'error', message: "No lock entry for " + pkg + " - run update first" }
}
var commit = lock_entry.commit
var expected_hash = lock_entry.zip_hash
@@ -913,18 +913,34 @@ Shop.fetch = function(pkg) {
var zip_blob = get_cached_zip(pkg, commit)
if (zip_blob) {
// Verify hash matches
// If we have a hash on record, verify it
if (expected_hash) {
var actual_hash = text(crypto.blake2(zip_blob), 'h')
if (actual_hash == expected_hash)
return true
if (actual_hash == expected_hash) {
return { status: 'cached' }
}
log.console("Zip hash mismatch for " + pkg + ", re-fetching...")
} else {
// No hash stored yet - compute and store it
var actual_hash = text(crypto.blake2(zip_blob), 'h')
lock_entry.zip_hash = actual_hash
Shop.save_lock(lock)
return { status: 'cached' }
}
}
// Download the zip
download_zip(pkg, commit)
var new_zip = download_zip(pkg, commit)
if (!new_zip) {
return { status: 'error', message: "Failed to download " + pkg }
}
return true
// Store the hash
var new_hash = text(crypto.blake2(new_zip), 'h')
lock_entry.zip_hash = new_hash
Shop.save_lock(lock)
return { status: 'downloaded' }
}
// Extract: Extract a package to its target directory
@@ -955,6 +971,20 @@ Shop.extract = function(pkg) {
return true
}
// Check if already extracted at correct commit
var lock = Shop.load_lock()
var lock_entry = lock[pkg]
if (lock_entry && lock_entry.commit) {
var extracted_commit_file = target_dir + '/.cell_commit'
if (fd.is_file(extracted_commit_file)) {
var extracted_commit = text(fd.slurp(extracted_commit_file)).trim()
if (extracted_commit == lock_entry.commit) {
// Already extracted at this commit, skip
return true
}
}
}
var zip_blob = get_package_zip(pkg)
if (!zip_blob)
@@ -962,6 +992,12 @@ Shop.extract = function(pkg) {
// Extract zip for remote package
install_zip(zip_blob, target_dir)
// Write marker file with the extracted commit
if (lock_entry && lock_entry.commit) {
fd.slurpwrite(target_dir + '/.cell_commit', stone(new blob(lock_entry.commit)))
}
return true
}
@@ -1015,14 +1051,14 @@ Shop.update = function(pkg) {
log.console(`local commit: ${local_commit}`)
log.console(`remote commit: ${remote_commit}`)
if (local_commit == remote_commit)
return null
if (!remote_commit) {
log.error("Could not resolve commit for " + pkg)
return null
}
if (local_commit == remote_commit)
return null
var new_entry = {
type: info,
commit: remote_commit,
@@ -1046,6 +1082,7 @@ function install_zip(zip_blob, target_dir) {
ensure_dir(target_dir)
var count = zip.count()
var created_dirs = {}
for (var i = 0; i < count; i++) {
if (zip.is_directory(i)) continue
@@ -1058,8 +1095,15 @@ function install_zip(zip_blob, target_dir) {
var full_path = target_dir + '/' + rel_path
var dir_path = full_path.substring(0, full_path.lastIndexOf('/'))
if (!created_dirs[dir_path]) {
ensure_dir(dir_path)
fd.slurpwrite(full_path, zip.slurp(filename))
created_dirs[dir_path] = true
}
var file_data = zip.slurp(filename)
stone(file_data)
fd.slurpwrite(full_path, file_data)
}
}

View File

@@ -30,9 +30,13 @@ package.load_config = function(name)
{
var config_path = get_path(name) + '/cell.toml'
if (!fd.is_file(config_path))
throw new Error(`${config_path} isn't a path`)
throw new Error(`${config_path} does not exist`)
return toml.decode(text(fd.slurp(config_path)))
var content = text(fd.slurp(config_path))
if (!content || content.trim().length == 0)
return {}
return toml.decode(content)
}
package.save_config = function(name, config)

View File

@@ -39142,6 +39142,7 @@ JSValue js_new_blob_stoned_copy(JSContext *js, void *data, size_t bytes)
void *js_get_blob_data(JSContext *js, size_t *size, JSValue v)
{
blob *b = js_get_blob(js, v);
*size = (b->length + 7) / 8;
if (!b) {
JS_ThrowReferenceError(js, "get_blob_data: not called on a blob");
return NULL;
@@ -39157,7 +39158,6 @@ void *js_get_blob_data(JSContext *js, size_t *size, JSValue v)
return NULL;
}
*size = (b->length + 7) / 8;
return b->data;
}