update shop

This commit is contained in:
2025-12-03 15:29:42 -06:00
parent 85e0e3dab1
commit 1769d7f456
16 changed files with 342 additions and 317 deletions

View File

@@ -8,7 +8,7 @@ libtype = get_option('default_library')
link = []
src = []
add_project_arguments('-Wno-gnu-label-as-value', language: ['c'])
add_project_arguments('-Wno-gnu-label-as-value', '-Wno-int-conversion', language: ['c'])
git_tag_cmd = run_command('git', 'describe', '--tags', '--abbrev=0', check: false)
cell_version = 'unknown'

View File

@@ -5,9 +5,6 @@
#include "monocypher.h"
// External declaration if not in cell.h
void *js_get_blob_data_bits(JSContext *js, size_t *bits, JSValue v);
/*
Crypto Module Documentation
@@ -58,69 +55,26 @@ void *js_get_blob_data_bits(JSContext *js, size_t *bits, JSValue v);
// Helper to get blob data and check exact bit length
static void *get_blob_check_bits(JSContext *js, JSValue val, size_t expected_bits, const char *name) {
size_t bits;
void *data = js_get_blob_data_bits(js, &bits, val);
if (!data) {
JS_ThrowTypeError(js, "%s: expected a blob", name);
return NULL;
void* result = js_get_blob_data_bits(js, &bits, val);
if (result == -1) {
return NULL; // Exception already thrown by js_get_blob_data_bits
}
if (bits != expected_bits) {
JS_ThrowTypeError(js, "%s: expected %zu bits, got %zu", name, expected_bits, bits);
return NULL;
}
return data;
return result;
}
// Helper to get any blob data (checking it is a stoned blob)
static void *get_blob_any(JSContext *js, JSValue val, size_t *out_bits, const char *name) {
void *data = js_get_blob_data_bits(js, out_bits, val);
if (!data) {
JS_ThrowTypeError(js, "%s: expected a blob", name);
void *result = js_get_blob_data_bits(js, out_bits, val);
if (result == -1)
return NULL;
}
return data;
return result;
}
JSValue js_crypto_keypair(JSContext *js, JSValue self, int argc, JSValue *argv) {
JSValue ret = JS_NewObject(js);
// Generate 32 random bytes using os.random_blob
JSValue global = JS_GetGlobalObject(js);
JSValue os = JS_GetPropertyStr(js, global, "os");
JSValue random_blob = JS_GetPropertyStr(js, os, "random_blob");
JSValue size_val = JS_NewInt32(js, 32); // 32 bytes
JSValue blob = JS_Call(js, random_blob, os, 1, &size_val);
JS_FreeValue(js, size_val);
JS_FreeValue(js, random_blob);
JS_FreeValue(js, os);
JS_FreeValue(js, global);
if (JS_IsException(blob)) return blob;
size_t bits;
uint8_t *data = js_get_blob_data_bits(js, &bits, blob);
if (!data || bits != 256) {
JS_FreeValue(js, blob);
return JS_ThrowInternalError(js, "failed to get 256 bits of random bytes");
}
uint8_t priv[32];
uint8_t pub[32];
memcpy(priv, data, 32);
JS_FreeValue(js, blob); // Done with random blob
// Clamp the private key
priv[0] &= 248;
priv[31] &= 127;
priv[31] |= 64;
crypto_x25519_public_key(pub, priv);
JS_SetPropertyStr(js, ret, "public", js_new_blob_stoned_copy(js, pub, 32));
JS_SetPropertyStr(js, ret, "private", js_new_blob_stoned_copy(js, priv, 32));
return ret;
}
JSValue js_crypto_shared(JSContext *js, JSValue self, int argc, JSValue *argv)
{
@@ -147,7 +101,7 @@ JSValue js_crypto_blake2(JSContext *js, JSValue self, int argc, JSValue *argv)
size_t data_bits;
uint8_t *data = get_blob_any(js, argv[0], &data_bits, "crypto.blake2 data");
if (!data) return JS_EXCEPTION;
if (data == -1) return JS_EXCEPTION;
int32_t hash_len = 32;
if (argc > 1) {
@@ -276,7 +230,6 @@ JSValue js_crypto_unlock(JSContext *js, JSValue self, int argc, JSValue *argv) {
}
static const JSCFunctionListEntry js_crypto_funcs[] = {
JS_CFUNC_DEF("keypair", 0, js_crypto_keypair),
JS_CFUNC_DEF("shared", 2, js_crypto_shared),
JS_CFUNC_DEF("blake2", 1, js_crypto_blake2),
JS_CFUNC_DEF("sign", 2, js_crypto_sign),

View File

@@ -40,17 +40,7 @@ static int js2fd(JSContext *ctx, JSValueConst val)
// Helper function for writing
static ssize_t js_fd_write_helper(JSContext *js, int fd, JSValue val)
{
size_t len;
ssize_t wrote;
if (JS_IsString(val)) {
const char *data = JS_ToCStringLen(js, &len, val);
wrote = write(fd, data, len);
JS_FreeCString(js, data);
} else {
unsigned char *data = js_get_blob_data(js, &len, val);
wrote = write(fd, data, len);
}
return wrote;
}
@@ -86,9 +76,19 @@ JSC_CCALL(fd_write,
int fd = js2fd(js, argv[0]);
if (fd < 0) return JS_EXCEPTION;
ssize_t wrote = js_fd_write_helper(js, fd, argv[1]);
if (wrote < 0)
return JS_ThrowInternalError(js, "write failed: %s", strerror(errno));
size_t len;
ssize_t wrote;
if (JS_IsString(argv[1])) {
const char *data = JS_ToCStringLen(js, &len, argv[1]);
if (!data) return JS_EXCEPTION;
wrote = write(fd, data, len);
JS_FreeCString(js, data);
} else {
void *data = js_get_blob_data(js, &len, argv[1]);
if (data == -1)
return JS_EXCEPTION;
wrote = write(fd, data, len);
}
return JS_NewInt64(js, wrote);
)
@@ -447,8 +447,11 @@ JSC_CCALL(fd_slurpwrite,
size_t len;
const char *data = js_get_blob_data(js, &len, argv[1]);
if (!data)
return JS_ThrowTypeError(js, "blob expected");
if (data == (const char *)-1)
return JS_EXCEPTION;
if (len == 0)
return JS_ThrowTypeError(js, "No data provided to write");
const char *str = JS_ToCString(js, argv[0]);

View File

@@ -69,7 +69,8 @@ JSC_CCALL(js_compile_blob,
JSC_CCALL(js_compile_unblob,
size_t size;
void *data = js_get_blob_data(js, &size, argv[0]);
if (!data) return JS_ThrowReferenceError(js, "Must be a stoned blob.");
if (data == -1) return JS_EXCEPTION;
if (!data) return JS_ThrowReferenceError(js, "No data present in blob.");
return JS_ReadObject(js, data, size, JS_READ_OBJ_BYTECODE);
)

View File

@@ -33,8 +33,10 @@ JSC_CCALL(kim_encode,
JSC_CCALL(kim_decode,
size_t kim_len;
void *kim_data = js_get_blob_data(js, &kim_len, argv[0]);
if (!kim_data) return JS_ThrowTypeError(js, "Expected blob");
int result = js_get_blob_data(js, &kim_len, argv[0]);
if (result == -1) return JS_EXCEPTION;
if (result == 0) return JS_NewString(js, "");
void *kim_data = result;
// Allocate UTF-8 buffer (worst case: 4 bytes per kim byte)
size_t utf8_size = kim_len * 4;

View File

@@ -42,13 +42,20 @@ static JSValue js_miniz_read(JSContext *js, JSValue self, int argc, JSValue *arg
{
size_t len;
void *data = js_get_blob_data(js, &len, argv[0]);
if (!data)
return JS_ThrowReferenceError(js, "Could not create data.\n");
if (data == -1)
return JS_EXCEPTION;
mz_zip_archive *zip = calloc(sizeof(*zip),1);
mz_zip_archive *zip = calloc(sizeof(*zip), 1);
if (!zip)
return JS_ThrowOutOfMemory(js);
mz_bool success = mz_zip_reader_init_mem(zip, data, len, 0);
if (!success) {
int err = mz_zip_get_last_error(zip);
if (err)
return JS_ThrowInternalError(js, "miniz error: %s\n", mz_zip_get_error_string(err));
free(zip);
return JS_ThrowInternalError(js, "Failed to initialize zip reader: %s", mz_zip_get_error_string(err));
}
JSValue jszip = JS_NewObjectClass(js, js_reader_class_id);
JS_SetOpaque(jszip, zip);
@@ -102,9 +109,8 @@ static JSValue js_miniz_compress(JSContext *js, JSValue this_val,
in_ptr = cstring;
} else {
in_ptr = js_get_blob_data(js, &in_len, argv[0]);
if (!in_ptr)
return JS_ThrowTypeError(js,
"Argument must be a string or ArrayBuffer");
if (in_ptr == -1)
return JS_EXCEPTION;
}
/* ─── 2. Allocate an output buffer big enough ────────────── */
@@ -147,9 +153,8 @@ static JSValue js_miniz_decompress(JSContext *js,
/* grab compressed data */
size_t in_len;
void *in_ptr = js_get_blob_data(js, &in_len, argv[0]);
if (!in_ptr)
return JS_ThrowTypeError(js,
"decompress: first arg must be an ArrayBuffer");
if (in_ptr == -1)
return JS_EXCEPTION;
/* zlib header present → tell tinfl to parse it */
size_t out_len = 0;
@@ -191,9 +196,9 @@ JSValue js_writer_add_file(JSContext *js, JSValue self, int argc, JSValue *argv)
size_t dataLen;
void *data = js_get_blob_data(js, &dataLen, argv[1]);
if (!data) {
if (data == -1) {
JS_FreeCString(js, pathInZip);
return JS_ThrowTypeError(js, "Second argument must be an ArrayBuffer");
return JS_EXCEPTION;
}
int success = mz_zip_writer_add_mem(zip, pathInZip, data, dataLen, MZ_DEFAULT_COMPRESSION);
@@ -357,7 +362,6 @@ JSValue js_reader_count(JSContext *js, JSValue self, int argc, JSValue *argv)
mz_zip_archive *zip = js2reader(js, self);
if (!zip)
return JS_ThrowInternalError(js, "Invalid zip reader");
return JS_NewUint32(js, mz_zip_reader_get_num_files(zip));
}

View File

@@ -170,6 +170,10 @@ static void nota_encode_value(NotaEncodeContext *enc, JSValueConst val, JSValueC
if (js_is_blob(ctx, replaced)) {
size_t buf_len;
void *buf_data = js_get_blob_data(ctx, &buf_len, replaced);
if (buf_data == -1) {
JS_FreeValue(ctx, replaced);
return; // JS_EXCEPTION will be handled by caller
}
nota_write_blob(&enc->nb, (unsigned long long)buf_len * 8, (const char*)buf_data);
break;
}
@@ -327,6 +331,7 @@ static JSValue js_nota_decode(JSContext *js, JSValueConst self, int argc, JSValu
size_t len;
unsigned char *nota = js_get_blob_data(js, &len, argv[0]);
if (nota == -1) return JS_EXCEPTION;
if (!nota) return JS_NULL;
JSValue reviver = (argc > 1 && JS_IsFunction(js, argv[1])) ? argv[1] : JS_NULL;

View File

@@ -248,31 +248,6 @@ static JSValue js_os_version(JSContext *js, JSValue self, int argc, JSValue *arg
return ret;
}
JSC_CCALL(os_buffer2string,
if (argc < 1) {
return JS_ThrowTypeError(js, "buffer2string expects an ArrayBuffer");
}
size_t len;
uint8_t *buf = js_get_blob_data(js, &len, argv[0]);
if (!buf) {
return JS_ThrowTypeError(js, "First argument must be an ArrayBuffer");
}
// Create a null-terminated string from the buffer
char *str = js_malloc(js, len + 1);
if (!str) {
return JS_ThrowInternalError(js, "Failed to allocate memory");
}
memcpy(str, buf, len);
str[len] = '\0';
JSValue result = JS_NewString(js, str);
js_free(js, str);
return result;
)
#define JSOBJ_ADD_FIELD(OBJ, STRUCT, FIELD, TYPE) \
JS_SetPropertyStr(js, OBJ, #FIELD, TYPE##2js(js,STRUCT.FIELD));\
@@ -514,7 +489,6 @@ static const JSCFunctionListEntry js_os_funcs[] = {
MIST_FUNC_DEF(os, now, 0),
MIST_FUNC_DEF(os, rusage, 0),
MIST_FUNC_DEF(os, mallinfo, 0),
MIST_FUNC_DEF(os, buffer2string, 1),
MIST_FUNC_DEF(os, system, 1),
MIST_FUNC_DEF(os, exit, 0),
MIST_FUNC_DEF(os, sleep, 1),

View File

@@ -106,8 +106,10 @@ static int js_qop_ensure_index(JSContext *js, qop_desc *qop) {
JSC_CCALL(qop_open,
size_t len;
void *data = js_get_blob_data(js, &len, argv[0]);
if (!data)
ret = JS_ThrowReferenceError(js, "Could not get blob data.\n");
if (data == -1)
ret = JS_EXCEPTION;
else if (!data)
ret = JS_ThrowReferenceError(js, "Empty blob");
else {
qop_desc *qop = js_malloc(js, sizeof(qop_desc));
if (!qop)
@@ -361,9 +363,13 @@ static JSValue js_writer_add_file(JSContext *js, JSValue self, int argc, JSValue
size_t data_len;
void *data = js_get_blob_data(js, &data_len, argv[1]);
if (data == (void*)-1) {
JS_FreeCString(js, path);
return JS_EXCEPTION;
}
if (!data) {
JS_FreeCString(js, path);
return JS_ThrowTypeError(js, "Second argument must be a blob");
return JS_ThrowTypeError(js, "No blob data present");
}
if (w->len >= w->capacity) {

View File

@@ -411,26 +411,30 @@ Shop.install = function(alias) {
return { commit: commit_hash, locator: locator }
}
// Verify dependencies
Shop.verify = function() {
var config = Shop.load_config()
if (!config || !config.dependencies) return true
function lock_locator(loc)
{
var lock = Shop.load_lock()
var all_ok = true
for (var alias in config.dependencies) {
var dir = Shop.get_module_dir(alias)
if (!dir) {
// Might be a replace that is invalid or something else
continue
}
Shop.check_cache = function(locator) {
var parsed = Shop.parse_locator(locator)
if (!parsed) return null
var cache_path = `.cell/cache/${parsed.path}.zip`
if (fd.is_file(cache_path)) {
log.console("Found cached zip: " + cache_path)
return true
}
if (!fd.stat(dir).isDirectory) {
log.error("Missing dependency: " + alias + " (expected at " + dir + ")")
all_ok = false
} else {
// Check if empty?
}
}
return all_ok
return false
}
// Verify dependencies
Shop.verify = function(locator) {
// each locator should be a package
}
var open_dls = {}
@@ -549,46 +553,6 @@ Shop.use = function(path, package_context) {
Shop.resolve_locator = resolve_locator
// Install a package and all its transitive dependencies
// This is the internal workhorse - installs from a specific package context
function install_package_deps(canonical_name, installed) {
installed = installed || {}
// Load the package's config to find its dependencies
var pkg_config = Shop.load_config(canonical_name)
if (!pkg_config || !pkg_config.dependencies) return installed
for (var alias in pkg_config.dependencies) {
var locator = pkg_config.dependencies[alias]
var parsed = Shop.parse_locator(locator)
var dep_canonical = parsed.path
// Skip if already installed in this run
if (installed[dep_canonical]) continue
// Check if already exists on disk
var target_dir = '.cell/modules/' + dep_canonical
if (fd.is_dir(target_dir)) {
log.console(" " + alias + " already installed")
installed[dep_canonical] = true
// Still recurse into its deps
install_package_deps(dep_canonical, installed)
continue
}
// Install this dependency
log.console(" Installing transitive dependency: " + alias + " (" + locator + ")")
var result = install_from_locator(locator)
if (result) {
installed[dep_canonical] = true
// Recurse into this package's dependencies
install_package_deps(dep_canonical, installed)
}
}
return installed
}
// Get cache path for a locator and commit
function get_cache_path(locator, commit) {
var parsed = Shop.parse_locator(locator)
@@ -642,6 +606,44 @@ function get_all_files(dir, prefix, results) {
return results
}
// Verify zip contents against target directory
function verify_zip_contents(zip, target_dir) {
var count = zip.count()
var expected_files = {}
for (var i = 0; i < count; i++) {
if (zip.is_directory(i)) continue
var filename = zip.get_filename(i)
var parts = filename.split('/')
if (parts.length > 1) {
parts.shift()
var rel_path = parts.join('/')
expected_files[rel_path] = true
var full_path = target_dir + '/' + rel_path
if (!fd.is_file(full_path)) return false
var content_zip = zip.slurp(filename)
var content_disk = fd.slurp(full_path)
if (content_zip.length != content_disk.length) return false
var hash_zip = text(crypto.blake2(content_zip), 'h')
var hash_disk = text(crypto.blake2(content_disk), 'h')
if (hash_zip != hash_disk) return false
}
}
// Check for extra files
var existing_files = get_all_files(target_dir)
for (var i = 0; i < existing_files.length; i++) {
if (!expected_files[existing_files[i]]) return false
}
return true
}
// Install from a raw locator (not from config)
function install_from_locator(locator, locked_hash, expected_zip_hash) {
var parsed = Shop.parse_locator(locator)
@@ -721,43 +723,13 @@ function install_from_locator(locator, locked_hash, expected_zip_hash) {
var zip = miniz.read(zip_blob)
if (!zip) throw new Error("Failed to read zip archive")
var count = zip.count()
var expected_files = {}
var needs_unpack = !use_cache
// Collect expected files from zip
for (var i = 0; i < count; i++) {
if (zip.is_dir(i)) continue
var filename = zip.get_filename(i)
var parts = filename.split('/')
if (parts.length > 1) {
parts.shift()
var rel_path = parts.join('/')
expected_files[rel_path] = i // Store index
}
}
// If using cache, verify existing installation
// If using cache, verify existing installation strictly
if (use_cache && fd.is_dir(target_dir)) {
// Check for missing files
for (var rel_path in expected_files) {
if (!fd.is_file(target_dir + '/' + rel_path)) {
log.console("Verification failed: Missing file " + rel_path)
if (!verify_zip_contents(zip, target_dir)) {
log.console("Verification failed for " + locator + ". Reinstalling...")
needs_unpack = true
break
}
}
// Check for extra files
if (!needs_unpack) {
var existing_files = get_all_files(target_dir)
for (var i = 0; i < existing_files.length; i++) {
if (!expected_files[existing_files[i]]) {
log.console("Verification failed: Extra file " + existing_files[i])
needs_unpack = true
break
}
}
}
} else if (use_cache && !fd.is_dir(target_dir)) {
needs_unpack = true
@@ -772,10 +744,15 @@ function install_from_locator(locator, locked_hash, expected_zip_hash) {
log.console("Unpacking to " + target_dir)
ensure_dir(target_dir)
for (var rel_path in expected_files) {
var i = expected_files[rel_path]
var count = zip.count()
for (var i = 0; i < count; i++) {
if (zip.is_directory(i)) continue
var filename = zip.get_filename(i)
var parts = filename.split('/')
if (parts.length > 1) {
parts.shift()
var rel_path = parts.join('/')
var full_path = target_dir + '/' + rel_path
var dir_path = full_path.substring(0, full_path.lastIndexOf('/'))
ensure_dir(dir_path)
@@ -783,6 +760,7 @@ function install_from_locator(locator, locked_hash, expected_zip_hash) {
var content = zip.slurp(filename)
fd.slurpwrite(full_path, content)
}
}
} else {
log.console("Verified existing installation.")
}
@@ -806,29 +784,54 @@ Shop.get = function(locator, alias) {
config.dependencies[alias] = locator
Shop.save_config(config)
// Install the package
var result = install_from_locator(locator)
if (!result) {
log.error("Failed to install " + alias)
return false
}
// Update lock file for root
// Install the package and dependencies
var queue = [locator]
var processed = {}
var lock = Shop.load_lock(null)
lock[alias] = {
locator: locator,
while (queue.length > 0) {
var current_locator = queue.shift()
if (processed[current_locator]) continue
processed[current_locator] = true
log.console("Installing " + current_locator + "...")
var lock_info = lock[current_locator] || lock[Shop.parse_locator(current_locator).name]
var locked_hash = lock_info ? lock_info.commit : null
var zip_hash = lock_info ? lock_info.zip_hash : null
var result = install_from_locator(current_locator, locked_hash, zip_hash)
if (result) {
lock[current_locator] = {
locator: current_locator,
commit: result.commit,
zip_hash: result.zip_hash,
updated: time.number()
}
// Read package config to find dependencies
var parsed = Shop.parse_locator(current_locator)
var pkg_config = Shop.load_config(parsed.path)
if (pkg_config && pkg_config.dependencies) {
for (var k in pkg_config.dependencies) {
var dep_locator = pkg_config.dependencies[k]
if (!processed[dep_locator]) {
queue.push(dep_locator)
}
}
}
} else {
if (current_locator == locator) {
log.error("Failed to install requested package " + alias)
return false
} else {
log.error("Failed to install dependency " + current_locator)
}
}
}
Shop.save_lock(lock)
log.console("Installed " + alias)
// Install transitive dependencies
log.console("Resolving transitive dependencies...")
install_package_deps(parsed.path, {})
log.console("Done.")
return true
}
@@ -843,60 +846,85 @@ Shop.update_all = function(alias) {
}
var lock = Shop.load_lock()
var to_update = alias ? [alias] : Object.keys(config.dependencies)
var queue = []
var processed = {}
for (var i = 0; i < to_update.length; i++) {
var dep_alias = to_update[i]
var locator = config.dependencies[dep_alias]
if (!locator) {
log.error("Dependency not found: " + dep_alias)
continue
// Initialize queue
if (alias) {
if (config.dependencies[alias]) {
queue.push(config.dependencies[alias])
} else {
log.error("Dependency not found: " + alias)
return
}
} else {
for (var k in config.dependencies) {
queue.push(config.dependencies[k])
}
}
while (queue.length > 0) {
var locator = queue.shift()
if (processed[locator]) continue
processed[locator] = true
// Find existing lock info
var lock_info = lock[locator]
var local_hash = lock_info ? lock_info.commit : null
var local_zip_hash = lock_info ? lock_info.zip_hash : null
var api_url = Shop.get_api_url(locator)
if (!api_url) {
log.console(dep_alias + ": cannot check for updates (no API URL)")
var remote_hash = null
// Check for updates if possible
if (api_url) {
try {
var resp = http.fetch(api_url)
remote_hash = Shop.extract_commit_hash(locator, text(resp))
} catch (e) {
log.console("Warning: Could not check for updates for " + locator)
}
}
var target_hash = remote_hash || local_hash
if (!target_hash) {
log.error("Could not resolve commit for " + locator)
continue
}
try {
var parsed = Shop.parse_locator(locator)
var target_dir = `.cell/modules/${parsed.path}`
var resp = http.fetch(api_url)
var resp_text = text(resp)
var remote_hash = Shop.extract_commit_hash(locator, resp_text)
var local_hash = lock[dep_alias] ? lock[dep_alias].commit : null
var local_zip_hash = lock[dep_alias] ? lock[dep_alias].zip_hash : null
var is_update = remote_hash && local_hash && (remote_hash != local_hash)
if (is_update) {
log.console("Updating " + locator + " " + local_hash.substring(0,8) + " -> " + remote_hash.substring(0,8))
} else {
log.console("Checking " + locator + "...")
}
if (!fd.is_dir(target_dir) || remote_hash != local_hash) {
log.console(dep_alias + ": updating " + (local_hash ? local_hash.substring(0,8) : "(new)") + " -> " + remote_hash.substring(0,8))
// Install/Verify
// If updating, we pass null as local_zip_hash to force fresh download/check
// If verifying, we pass local_zip_hash
var result = install_from_locator(locator, target_hash, is_update ? null : local_zip_hash)
// Remove old directory
if (fd.is_dir(target_dir))
fd.rmdir(target_dir)
// Reinstall
var result = install_from_locator(locator, remote_hash, local_zip_hash)
if (result) {
lock[dep_alias] = {
// Update lock
lock[locator] = {
locator: locator,
commit: result.commit,
zip_hash: result.zip_hash,
updated: time.number()
}
// Reinstall transitive deps
install_package_deps(parsed.path, {})
// Read package config to find dependencies
var parsed = Shop.parse_locator(locator)
var pkg_config = Shop.load_config(parsed.path)
if (pkg_config && pkg_config.dependencies) {
for (var k in pkg_config.dependencies) {
var dep_locator = pkg_config.dependencies[k]
if (!processed[dep_locator]) {
queue.push(dep_locator)
}
}
} else {
// Even if up to date commit-wise, run install to verify/repair using cache
var result = install_from_locator(locator, local_hash, local_zip_hash)
log.console(dep_alias + ": verified")
}
} catch (e) {
log.error("Failed to check " + dep_alias)
log.error(e)
}
}
@@ -923,7 +951,8 @@ Shop.remove = function(alias) {
// Remove from lock
var lock = Shop.load_lock()
delete lock[alias]
if (lock[locator]) delete lock[locator]
if (lock[alias]) delete lock[alias] // Cleanup old format
Shop.save_lock(lock)
// Remove directory
@@ -951,41 +980,46 @@ Shop.install_all = function() {
}
var lock = Shop.load_lock(null)
var installed = {}
var queue = []
var processed = {}
for (var alias in config.dependencies) {
var locator = config.dependencies[alias]
var parsed = Shop.parse_locator(locator)
var target_dir = '.cell/modules/' + parsed.path
// Check if already installed
if (fd.is_dir(target_dir)) {
log.console(alias + ": already installed")
installed[parsed.path] = true
continue
queue.push(config.dependencies[alias])
}
log.console("Installing " + alias + "...")
var locked_hash = lock[alias] ? lock[alias].commit : null
var zip_hash = lock[alias] ? lock[alias].zip_hash : null
while (queue.length > 0) {
var locator = queue.shift()
if (processed[locator]) continue
processed[locator] = true
log.console("Installing " + locator + "...")
var lock_info = lock[locator] || lock[Shop.parse_locator(locator).name] // Fallback to old format check
var locked_hash = lock_info ? lock_info.commit : null
var zip_hash = lock_info ? lock_info.zip_hash : null
var result = install_from_locator(locator, locked_hash, zip_hash)
if (result) {
installed[parsed.path] = true
lock[alias] = {
lock[locator] = {
locator: locator,
commit: result.commit,
zip_hash: result.zip_hash,
updated: time.number()
}
}
}
// Now install transitive dependencies for all root deps
log.console("Resolving transitive dependencies...")
for (var alias in config.dependencies) {
var locator = config.dependencies[alias]
// Read package config to find dependencies
var parsed = Shop.parse_locator(locator)
install_package_deps(parsed.path, installed)
var pkg_config = Shop.load_config(parsed.path)
if (pkg_config && pkg_config.dependencies) {
for (var k in pkg_config.dependencies) {
var dep_locator = pkg_config.dependencies[k]
if (!processed[dep_locator]) {
queue.push(dep_locator)
}
}
}
}
}
Shop.save_lock(lock)
@@ -993,6 +1027,7 @@ Shop.install_all = function() {
return true
}
// Compile a module
Shop.compile_module = function(alias) {
var module_dir = Shop.get_module_dir(alias)

View File

@@ -303,6 +303,12 @@ JSC_CCALL(socket_send,
JS_FreeCString(js, data);
} else {
unsigned char *data = js_get_blob_data(js, &len, argv[1]);
if (data == (unsigned char *)-1) {
return JS_EXCEPTION;
}
if (len == 0) {
return JS_ThrowReferenceError(js, "No data to send");
}
sent = send(sockfd, (const char *)data, len, flags);
}
@@ -385,6 +391,12 @@ JSC_CCALL(socket_sendto,
JS_FreeCString(js, data);
} else {
unsigned char *data = js_get_blob_data(js, &len, argv[1]);
if (data == (unsigned char *)-1) {
return JS_EXCEPTION;
}
if (len == 0) {
return JS_ThrowReferenceError(js, "No data to send");
}
sent = sendto(sockfd, (const char *)data, len, flags, to_addr, to_len);
}

View File

@@ -95,7 +95,8 @@ JSC_SCALL(utf8_encode,
JSC_CCALL(utf8_decode,
size_t len;
void *data = js_get_blob_data(js, &len, argv[0]);
if (!data) return JS_ThrowTypeError(js, "Expected blob");
if (data == (void*)-1) return JS_EXCEPTION;
if (!data || len == 0) return JS_ThrowTypeError(js, "No data present in blob");
// Create null-terminated string
char *str = malloc(len + 1);

View File

@@ -13,6 +13,7 @@ extern "C" {
JSValue js_blob_use(JSContext *js);
JSValue js_new_blob_stoned_copy(JSContext *js, void *data, size_t bytes);
void *js_get_blob_data(JSContext *js, size_t *size, JSValue v);
void *js_get_blob_data_bits(JSContext *js, size_t *bits, JSValue v);
int js_is_blob(JSContext *js, JSValue v);
double cell_random();

View File

@@ -43,6 +43,12 @@ JSC_CCALL(os_mailbox_push,
*/
size_t size;
void *data = js_get_blob_data(js, &size, argv[1]);
if (data == (void*)-1) {
return JS_EXCEPTION;
}
if (size == 0) {
return JS_ThrowInternalError(js, "No data present in blob");
}
// Create a new blob and copy the data
blob *msg_blob = blob_new(size * 8); // Convert bytes to bits

View File

@@ -531,8 +531,15 @@ JSValue js_new_blob_stoned_copy(JSContext *js, void *data, size_t bytes)
void *js_get_blob_data(JSContext *js, size_t *size, JSValue v)
{
blob *b = js2blob(js, v);
if (!b || !b->is_stone)
return NULL;
if (!b) {
JS_ThrowReferenceError(js, "get_blob_data: not called on a blob");
return -1;
}
if (!b->is_stone) {
JS_ThrowReferenceError(js, "attempted to read data from a non-stone blob");
return -1;
}
*size = (b->length + 7) / 8; // Return actual byte size based on bit length
return b->data;
@@ -541,8 +548,14 @@ void *js_get_blob_data(JSContext *js, size_t *size, JSValue v)
void *js_get_blob_data_bits(JSContext *js, size_t *bits, JSValue v)
{
blob *b = js2blob(js, v);
if (!b || !b->is_stone)
if (!b) {
JS_ThrowReferenceError(js, "get_blob_data_bits: not called on a blob");
return NULL;
}
if (!b->is_stone) {
JS_ThrowReferenceError(js, "attempted to read data from a non-stone blob");
return NULL;
}
*bits = b->length;
return b->data;

View File

@@ -155,7 +155,15 @@ static void wota_encode_value(WotaEncodeContext *enc, JSValueConst val, JSValueC
if (js_is_blob(ctx, replaced)) {
size_t buf_len;
void *buf_data = js_get_blob_data(ctx, &buf_len, replaced);
if (buf_data == (void *)-1) {
JS_FreeValue(ctx, replaced);
return; // JS_EXCEPTION will be handled by caller
}
if (buf_len == 0) {
wota_write_blob(&enc->wb, 0, "");
} else {
wota_write_blob(&enc->wb, (unsigned long long)buf_len * 8, (const char *)buf_data);
}
break;
}
if (JS_IsArray(ctx, replaced)) {
@@ -363,7 +371,8 @@ static JSValue js_wota_decode(JSContext *ctx, JSValueConst this_val, int argc, J
if (argc < 1) return JS_NULL;
size_t len;
uint8_t *buf = js_get_blob_data(ctx, &len, argv[0]);
if (!buf) return JS_NULL;
if (buf == (uint8_t *)-1) return JS_EXCEPTION;
if (!buf || len == 0) return JS_ThrowTypeError(ctx, "No blob data present");
JSValue reviver = (argc > 1 && JS_IsFunction(ctx, argv[1])) ? argv[1] : JS_NULL;
char *data_ptr = (char *)buf;
JSValue result = JS_NULL;