check hashes on cached files

This commit is contained in:
2025-12-03 13:53:47 -06:00
parent ddfa636ac0
commit 85e0e3dab1
2 changed files with 186 additions and 43 deletions

View File

@@ -443,14 +443,18 @@ JSC_CCALL(fd_is_dir,
return JS_NewBool(js, S_ISDIR(st.st_mode));
)
JSC_SCALL(fd_slurpwrite,
JSC_CCALL(fd_slurpwrite,
size_t len;
const char *data = js_get_blob_data(js, &len, argv[1]);
if (!data)
return JS_ThrowTypeError(js, "blob expected");
const char *str = JS_ToCString(js, argv[0]);
if (!str) return JS_EXCEPTION;
int fd = open(str, O_WRONLY | O_CREAT | O_TRUNC, 0644);
JS_FreeCString(js, str);
if (fd < 0)
return JS_ThrowInternalError(js, "open failed: %s", strerror(errno));

View File

@@ -7,6 +7,8 @@ var http = use('http')
var miniz = use('miniz')
var time = use('time')
var js = use('js')
var crypto = use('crypto')
var utf8 = use('utf8')
var qop
var core_qop
@@ -103,12 +105,6 @@ Shop.get_c_symbol = function get_c_symbol(name) {
return os.load_internal(symname)
}
function slurpwrite(path, content) {
var f = fd.open(path)
fd.write(f, content)
fd.close(f)
}
function ensure_dir(path) {
if (fd.stat(path).isDirectory) return true
@@ -149,7 +145,7 @@ Shop.load_config = function(module) {
// Save cell.toml configuration
Shop.save_config = function(config) {
slurpwrite(shop_path, toml.encode(config));
fd.slurpwrite(shop_path, utf8.encode(toml.encode(config)));
}
// Load lock.toml configuration
@@ -166,7 +162,7 @@ Shop.load_lock = function() {
// Save lock.toml configuration
Shop.save_lock = function(lock) {
slurpwrite('.cell/lock.toml', toml.encode(lock));
fd.slurpwrite('.cell/lock.toml', utf8.encode(toml.encode(lock)));
}
// Initialize .cell directory structure
@@ -183,8 +179,12 @@ Shop.init = function() {
fd.mkdir('.cell/build')
}
if (!fd.is_dir('.cell/cache')) {
fd.mkdir('.cell/cache')
}
if (!fd.is_file('.cell/lock.toml')) {
slurpwrite('.cell/lock.toml', '# Lock file for module integrity\n');
fd.slurpwrite('.cell/lock.toml', '# Lock file for module integrity\n');
}
return true
@@ -402,7 +402,7 @@ Shop.install = function(alias) {
ensure_dir(dir_path)
var content = zip.slurp(filename)
slurpwrite(full_path, content)
fd.slurpwrite(full_path, content)
}
}
@@ -589,8 +589,61 @@ function install_package_deps(canonical_name, installed) {
return installed
}
// Get cache path for a locator and commit
function get_cache_path(locator, commit) {
var parsed = Shop.parse_locator(locator)
if (!parsed) return null
var slug = parsed.path.split('/').join('_')
return `.cell/cache/${slug}_${commit}.zip`
}
function rm_recursive(path) {
var st = fd.stat(path)
if (!st) return
if (!st.isDirectory) {
fd.unlink(path)
return
}
var list = fd.readdir(path)
if (list) {
for (var i = 0; i < list.length; i++) {
var item = list[i]
if (item == '.' || item == '..') continue
rm_recursive(path + "/" + item)
}
}
fd.rmdir(path)
}
function get_all_files(dir, prefix, results) {
prefix = prefix || ""
results = results || []
var list = fd.readdir(dir)
if (!list) return results
for (var i = 0; i < list.length; i++) {
var item = list[i]
if (item == '.' || item == '..') continue
var full_path = dir + "/" + item
var rel_path = prefix ? prefix + "/" + item : item
var st = fd.stat(full_path)
if (st.isDirectory) {
get_all_files(full_path, rel_path, results)
} else {
results.push(rel_path)
}
}
return results
}
// Install from a raw locator (not from config)
function install_from_locator(locator, locked_hash) {
function install_from_locator(locator, locked_hash, expected_zip_hash) {
var parsed = Shop.parse_locator(locator)
var target_dir = '.cell/modules/' + parsed.path
@@ -611,7 +664,37 @@ function install_from_locator(locator, locked_hash) {
log.console("Using locked commit: " + commit_hash)
}
// 2. Download Zip
if (!commit_hash) {
log.error("Could not determine commit hash for " + locator)
return null
}
// 2. Check Cache / Download Zip
var cache_path = get_cache_path(locator, commit_hash)
var zip_blob = null
var zip_hash = null
var use_cache = false
if (fd.is_file(cache_path)) {
log.console("Found cached zip: " + cache_path)
try {
var cached = fd.slurp(cache_path)
var computed_hash = text(crypto.blake2(cached), 'h')
if (expected_zip_hash && computed_hash != expected_zip_hash) {
log.console("Cache hash mismatch. Expected: " + expected_zip_hash + ", Got: " + computed_hash)
log.console("Redownloading...")
} else {
zip_blob = cached
zip_hash = computed_hash
use_cache = true
}
} catch (e) {
log.error("Failed to read cache: " + e)
}
}
if (!use_cache) {
var download_url = Shop.get_download_url(locator, commit_hash)
if (!download_url) {
log.error("Could not determine download URL for " + locator)
@@ -619,44 +702,92 @@ function install_from_locator(locator, locked_hash) {
}
log.console("Downloading from " + download_url)
var zip_blob
try {
zip_blob = http.fetch(download_url)
zip_hash = text(crypto.blake2(zip_blob), 'h')
// Save to cache
ensure_dir(cache_path.substring(0, cache_path.lastIndexOf('/')))
fd.slurpwrite(cache_path, zip_blob)
log.console("Cached to " + cache_path)
} catch (e) {
log.error("Download failed: " + e)
log.error(e)
return null
}
}
// 3. Unpack
log.console("Unpacking to " + target_dir)
ensure_dir(target_dir)
// 3. Verify and Unpack
var zip = miniz.read(zip_blob)
if (!zip)
throw new Error("Failed to read zip archive")
if (!zip) throw new Error("Failed to read zip archive")
var count = zip.count()
log.console(`zip contains ${count} entries`)
var expected_files = {}
var needs_unpack = !use_cache
// Collect expected files from zip
for (var i = 0; i < count; i++) {
if (zip.is_dir(i)) continue
var filename = zip.get_filename(i)
log.console(filename)
var parts = filename.split('/')
if (parts.length > 1) {
parts.shift()
var rel_path = parts.join('/')
expected_files[rel_path] = i // Store index
}
}
// If using cache, verify existing installation
if (use_cache && fd.is_dir(target_dir)) {
// Check for missing files
for (var rel_path in expected_files) {
if (!fd.is_file(target_dir + '/' + rel_path)) {
log.console("Verification failed: Missing file " + rel_path)
needs_unpack = true
break
}
}
// Check for extra files
if (!needs_unpack) {
var existing_files = get_all_files(target_dir)
for (var i = 0; i < existing_files.length; i++) {
if (!expected_files[existing_files[i]]) {
log.console("Verification failed: Extra file " + existing_files[i])
needs_unpack = true
break
}
}
}
} else if (use_cache && !fd.is_dir(target_dir)) {
needs_unpack = true
}
if (needs_unpack) {
if (fd.is_dir(target_dir)) {
log.console("Clearing module directory for fresh install...")
rm_recursive(target_dir)
}
log.console("Unpacking to " + target_dir)
ensure_dir(target_dir)
for (var rel_path in expected_files) {
var i = expected_files[rel_path]
var filename = zip.get_filename(i)
var full_path = target_dir + '/' + rel_path
var dir_path = full_path.substring(0, full_path.lastIndexOf('/'))
ensure_dir(dir_path)
var content = zip.slurp(filename)
slurpwrite(full_path, content)
fd.slurpwrite(full_path, content)
}
} else {
log.console("Verified existing installation.")
}
return { commit: commit_hash, locator: locator, path: parsed.path }
return { commit: commit_hash, locator: locator, path: parsed.path, zip_hash: zip_hash }
}
// High-level: Add a package, install it, and install all transitive dependencies
@@ -687,6 +818,7 @@ Shop.get = function(locator, alias) {
lock[alias] = {
locator: locator,
commit: result.commit,
zip_hash: result.zip_hash,
updated: time.number()
}
Shop.save_lock(lock)
@@ -735,6 +867,8 @@ Shop.update_all = function(alias) {
var resp_text = text(resp)
var remote_hash = Shop.extract_commit_hash(locator, resp_text)
var local_hash = lock[dep_alias] ? lock[dep_alias].commit : null
var local_zip_hash = lock[dep_alias] ? lock[dep_alias].zip_hash : null
if (!fd.is_dir(target_dir) || remote_hash != local_hash) {
log.console(dep_alias + ": updating " + (local_hash ? local_hash.substring(0,8) : "(new)") + " -> " + remote_hash.substring(0,8))
@@ -743,11 +877,12 @@ Shop.update_all = function(alias) {
fd.rmdir(target_dir)
// Reinstall
var result = install_from_locator(locator, remote_hash)
var result = install_from_locator(locator, remote_hash, local_zip_hash)
if (result) {
lock[dep_alias] = {
locator: locator,
commit: result.commit,
zip_hash: result.zip_hash,
updated: time.number()
}
@@ -755,7 +890,9 @@ Shop.update_all = function(alias) {
install_package_deps(parsed.path, {})
}
} else {
log.console(dep_alias + ": up to date")
// Even if up to date commit-wise, run install to verify/repair using cache
var result = install_from_locator(locator, local_hash, local_zip_hash)
log.console(dep_alias + ": verified")
}
} catch (e) {
log.error("Failed to check " + dep_alias)
@@ -830,12 +967,14 @@ Shop.install_all = function() {
log.console("Installing " + alias + "...")
var locked_hash = lock[alias] ? lock[alias].commit : null
var result = install_from_locator(locator, locked_hash)
var zip_hash = lock[alias] ? lock[alias].zip_hash : null
var result = install_from_locator(locator, locked_hash, zip_hash)
if (result) {
installed[parsed.path] = true
lock[alias] = {
locator: locator,
commit: result.commit,
zip_hash: result.zip_hash,
updated: time.number()
}
}