diff --git a/fd.c b/fd.c index 5396a832..3709f897 100644 --- a/fd.c +++ b/fd.c @@ -559,7 +559,7 @@ JSC_CCALL(fd_slurpwrite, size_t len; const char *data = js_get_blob_data(js, &len, argv[1]); - if (!data) + if (!data && len > 0) return JS_EXCEPTION; const char *str = JS_ToCString(js, argv[0]); diff --git a/fetch.ce b/fetch.ce index e20823cf..ce33eefe 100644 --- a/fetch.ce +++ b/fetch.ce @@ -45,42 +45,45 @@ if (target_pkg) { packages_to_fetch = all_packages } -log.console("Fetching " + text(packages_to_fetch.length) + " package(s)...") +var remote_count = 0 +for (var pkg of packages_to_fetch) { + var entry = lock[pkg] + if (pkg != 'core' && (!entry || entry.type != 'local')) + remote_count++ +} -var success_count = 0 -var skip_count = 0 +if (remote_count > 0) + log.console(`Fetching ${text(remote_count)} remote package(s)...`) + +var downloaded_count = 0 +var cached_count = 0 var fail_count = 0 for (var pkg of packages_to_fetch) { - var entry = lock[pkg] - - // Skip local packages - if (entry && entry.type == 'local') { - skip_count++ - continue - } - // Skip core (handled separately) - if (pkg == 'core') { - skip_count++ - continue - } - + if (pkg == 'core') continue + var result = shop.fetch(pkg) - if (result) { - if (result.zip_blob) { - log.console("Fetched: " + pkg) - success_count++ - } else { - skip_count++ - } - } else { - log.error("Failed to fetch: " + pkg) + if (result.status == 'local') { + // Local packages are just symlinks, nothing to fetch + continue + } else if (result.status == 'cached') { + cached_count++ + } else if (result.status == 'downloaded') { + log.console(" Downloaded: " + pkg) + downloaded_count++ + } else if (result.status == 'error') { + log.error(" Failed: " + pkg + (result.message ? " - " + result.message : "")) fail_count++ } } log.console("") -log.console("Fetch complete: " + text(success_count) + " fetched, " + text(skip_count) + " skipped, " + text(fail_count) + " failed") +var parts = [] +if (downloaded_count > 0) parts.push(`${text(downloaded_count)} downloaded`) +if (cached_count > 0) parts.push(`${text(cached_count)} cached`) +if (fail_count > 0) parts.push(`${text(fail_count)} failed`) +if (parts.length == 0) parts.push("nothing to fetch") +log.console("Fetch complete: " + parts.join(", ")) $stop() diff --git a/install.ce b/install.ce index 734e13d7..e700612d 100644 --- a/install.ce +++ b/install.ce @@ -101,7 +101,18 @@ function gather_packages(pkg_locator) { var lock = shop.load_lock() if (!lock[pkg_locator]) { if (!dry_run) { - shop.update(pkg_locator) + var update_result = shop.update(pkg_locator) + if (update_result) { + shop.extract(pkg_locator) + } else { + // Update failed - package might not be fetchable + log.console("Warning: Could not fetch " + pkg_locator) + return + } + } + } else { + // Package is in lock, ensure it's extracted + if (!dry_run) { shop.extract(pkg_locator) } } @@ -116,7 +127,7 @@ function gather_packages(pkg_locator) { } catch (e) { // Package might not have dependencies or cell.toml issue if (!dry_run) { - log.console("Warning: Could not read dependencies for " + pkg_locator + ": " + e.message) + log.console(`Warning: Could not read dependencies for ${pkg_locator}: ${e.message}`) } } } diff --git a/internal/shop.cm b/internal/shop.cm index 099f404c..5f615412 100644 --- a/internal/shop.cm +++ b/internal/shop.cm @@ -862,22 +862,19 @@ function fetch_remote_hash(pkg) { // Returns the zip blob or null on failure function download_zip(pkg, commit_hash) { var cache_path = get_cache_path(pkg, commit_hash) - + var download_url = Shop.get_download_url(pkg, commit_hash) if (!download_url) { log.error("Could not determine download URL for " + pkg) return null } - log.console("Downloading from " + download_url) try { var zip_blob = http.fetch(download_url) - log.console(`putting to ${cache_path}`) fd.slurpwrite(cache_path, zip_blob) - log.console("Cached to " + cache_path) return zip_blob } catch (e) { - log.error(e) + log.error("Download failed for " + pkg + ": " + e) return null } } @@ -892,39 +889,58 @@ function get_cached_zip(pkg, commit_hash) { } // Fetch: Ensure the zip on disk matches what's in the lock file -// For local packages, this is a no-op (returns true) +// For local packages, this is a no-op // For remote packages, downloads the zip if not present or hash mismatch -// Returns true on success +// Returns: { status: 'local'|'cached'|'downloaded'|'error', message: string } Shop.fetch = function(pkg) { var lock = Shop.load_lock() var lock_entry = lock[pkg] var info = Shop.resolve_package_info(pkg) - if (info == 'local') return null + if (info == 'local') { + return { status: 'local' } + } // No lock entry - can't fetch without knowing what commit - if (!lock_entry || !lock_entry.commit) - throw new Error("No lock entry for " + pkg + " - run update first") - + if (!lock_entry || !lock_entry.commit) { + return { status: 'error', message: "No lock entry for " + pkg + " - run update first" } + } + var commit = lock_entry.commit var expected_hash = lock_entry.zip_hash - + // Check if we have the zip cached var zip_blob = get_cached_zip(pkg, commit) - + if (zip_blob) { - // Verify hash matches - var actual_hash = text(crypto.blake2(zip_blob), 'h') - if (actual_hash == expected_hash) - return true - - log.console("Zip hash mismatch for " + pkg + ", re-fetching...") + // If we have a hash on record, verify it + if (expected_hash) { + var actual_hash = text(crypto.blake2(zip_blob), 'h') + if (actual_hash == expected_hash) { + return { status: 'cached' } + } + log.console("Zip hash mismatch for " + pkg + ", re-fetching...") + } else { + // No hash stored yet - compute and store it + var actual_hash = text(crypto.blake2(zip_blob), 'h') + lock_entry.zip_hash = actual_hash + Shop.save_lock(lock) + return { status: 'cached' } + } } - - // Download the zip - download_zip(pkg, commit) - return true + // Download the zip + var new_zip = download_zip(pkg, commit) + if (!new_zip) { + return { status: 'error', message: "Failed to download " + pkg } + } + + // Store the hash + var new_hash = text(crypto.blake2(new_zip), 'h') + lock_entry.zip_hash = new_hash + Shop.save_lock(lock) + + return { status: 'downloaded' } } // Extract: Extract a package to its target directory @@ -934,7 +950,7 @@ Shop.fetch = function(pkg) { // Returns true on success Shop.extract = function(pkg) { var target_dir = get_package_abs_dir(pkg) - + // Check if this package is linked var link_target = link.get_target(pkg) if (link_target) { @@ -942,7 +958,7 @@ Shop.extract = function(pkg) { link.sync_one(pkg, link_target) return true } - + var info = Shop.resolve_package_info(pkg) if (info == 'local') { @@ -955,6 +971,20 @@ Shop.extract = function(pkg) { return true } + // Check if already extracted at correct commit + var lock = Shop.load_lock() + var lock_entry = lock[pkg] + if (lock_entry && lock_entry.commit) { + var extracted_commit_file = target_dir + '/.cell_commit' + if (fd.is_file(extracted_commit_file)) { + var extracted_commit = text(fd.slurp(extracted_commit_file)).trim() + if (extracted_commit == lock_entry.commit) { + // Already extracted at this commit, skip + return true + } + } + } + var zip_blob = get_package_zip(pkg) if (!zip_blob) @@ -962,6 +992,12 @@ Shop.extract = function(pkg) { // Extract zip for remote package install_zip(zip_blob, target_dir) + + // Write marker file with the extracted commit + if (lock_entry && lock_entry.commit) { + fd.slurpwrite(target_dir + '/.cell_commit', stone(new blob(lock_entry.commit))) + } + return true } @@ -1015,14 +1051,14 @@ Shop.update = function(pkg) { log.console(`local commit: ${local_commit}`) log.console(`remote commit: ${remote_commit}`) - if (local_commit == remote_commit) - return null - if (!remote_commit) { log.error("Could not resolve commit for " + pkg) return null } + if (local_commit == remote_commit) + return null + var new_entry = { type: info, commit: remote_commit, @@ -1038,28 +1074,36 @@ Shop.update = function(pkg) { function install_zip(zip_blob, target_dir) { var zip = miniz.read(zip_blob) if (!zip) throw new Error("Failed to read zip archive") - + if (fd.is_link(target_dir)) fd.unlink(target_dir) if (fd.is_dir(target_dir)) fd.rmdir(target_dir, 1) - + log.console("Extracting to " + target_dir) ensure_dir(target_dir) - + var count = zip.count() - + var created_dirs = {} + for (var i = 0; i < count; i++) { if (zip.is_directory(i)) continue var filename = zip.get_filename(i) var parts = filename.split('/') if (parts.length <= 1) continue - + parts.shift() var rel_path = parts.join('/') var full_path = target_dir + '/' + rel_path var dir_path = full_path.substring(0, full_path.lastIndexOf('/')) - - ensure_dir(dir_path) - fd.slurpwrite(full_path, zip.slurp(filename)) + + if (!created_dirs[dir_path]) { + ensure_dir(dir_path) + created_dirs[dir_path] = true + } + var file_data = zip.slurp(filename) + + stone(file_data) + + fd.slurpwrite(full_path, file_data) } } diff --git a/package.cm b/package.cm index 77554bb7..16fa3fec 100644 --- a/package.cm +++ b/package.cm @@ -30,9 +30,13 @@ package.load_config = function(name) { var config_path = get_path(name) + '/cell.toml' if (!fd.is_file(config_path)) - throw new Error(`${config_path} isn't a path`) + throw new Error(`${config_path} does not exist`) - return toml.decode(text(fd.slurp(config_path))) + var content = text(fd.slurp(config_path)) + if (!content || content.trim().length == 0) + return {} + + return toml.decode(content) } package.save_config = function(name, config) diff --git a/source/quickjs.c b/source/quickjs.c index 6d08f27b..65dacd75 100644 --- a/source/quickjs.c +++ b/source/quickjs.c @@ -39142,6 +39142,7 @@ JSValue js_new_blob_stoned_copy(JSContext *js, void *data, size_t bytes) void *js_get_blob_data(JSContext *js, size_t *size, JSValue v) { blob *b = js_get_blob(js, v); + *size = (b->length + 7) / 8; if (!b) { JS_ThrowReferenceError(js, "get_blob_data: not called on a blob"); return NULL; @@ -39157,7 +39158,6 @@ void *js_get_blob_data(JSContext *js, size_t *size, JSValue v) return NULL; } - *size = (b->length + 7) / 8; return b->data; }