This commit is contained in:
2025-11-24 23:08:40 -06:00
parent b613c7b6fa
commit 8bc31e3ac6
25 changed files with 1358 additions and 879 deletions

View File

@@ -1,36 +1,19 @@
var cellfs = this
// CellFS: A filesystem implementation using miniz and raw OS filesystem
// Reimplements PhysFS functionality for archives and direct file access
// Supports mounting multiple sources (fs, zip) and named mounts (@name)
var fd = use('fd')
var miniz = use('miniz')
// Internal state
var mounts = [] // Array of {path, type, handle} - type: 'zip' or 'dir'
var write_dir = null
var path_cache = {} // Cache for resolve_path results
var mounts = [] // Array of {source, type, handle, name}
// Helper to normalize paths (but preserve leading slash for mount points)
function normalize_path(path, preserve_leading_slash) {
if (!path) return preserve_leading_slash ? "/" : ""
var had_leading_slash = path.startsWith('/')
// Helper to normalize paths
function normalize_path(path) {
if (!path) return ""
// Remove leading/trailing slashes and normalize
path = path.replace(/^\/+|\/+$/g, "")
// Restore leading slash if requested and it was there originally
if (preserve_leading_slash && had_leading_slash) {
path = "/" + path
}
return path
}
// Helper to check if path is absolute
function is_absolute(path) {
return path.startsWith("/")
}
// Helper to join paths
function join_paths(base, rel) {
base = base.replace(/\/+$/, "")
rel = rel.replace(/^\/+/, "")
return base + "/" + rel
return path.replace(/^\/+|\/+$/g, "")
}
// Helper to get directory from path
@@ -47,438 +30,242 @@ function basename(path) {
return path.substring(idx + 1)
}
// Find mount point for a given path
function find_mount(path) {
for (var i = mounts.length - 1; i >= 0; i--) {
var mount = mounts[i]
if (path.startsWith(mount.path)) {
return mount
// Helper to join paths
function join_paths(base, rel) {
base = base.replace(/\/+$/, "")
rel = rel.replace(/^\/+/, "")
if (!base) return rel
if (!rel) return base
return base + "/" + rel
}
// Check if a file exists in a specific mount
function mount_exists(mount, path) {
if (mount.type == 'zip') {
try {
mount.handle.mod(path)
return true
} catch (e) {
return false
}
} else { // fs
var full_path = join_paths(mount.source, path)
try {
var st = fd.stat(full_path)
return st.isFile
} catch (e) {
return false
}
}
}
// Resolve a path to a specific mount and relative path
// Returns { mount, path } or throws/returns null
function resolve(path, must_exist) {
path = normalize_path(path)
// Check for named mount
if (path.startsWith("@")) {
var idx = path.indexOf("/")
var mount_name = ""
var rel_path = ""
if (idx == -1) {
mount_name = path.substring(1)
rel_path = ""
} else {
mount_name = path.substring(1, idx)
rel_path = path.substring(idx + 1)
}
// Find named mount
var mount = null
for (var m of mounts) {
if (m.name == mount_name) {
mount = m
break
}
}
if (!mount) {
throw new Error("Unknown mount point: @" + mount_name)
}
return { mount: mount, path: rel_path }
}
// Search path
for (var mount of mounts) {
if (mount_exists(mount, path)) {
return { mount: mount, path: path }
}
}
if (must_exist) {
// throw new Error("File not found in any mount: " + path)
return null
}
return null
}
// Resolve a virtual path to actual filesystem or archive
function resolve_path(vpath) {
var original_vpath = vpath
vpath = normalize_path(vpath)
// Check cache first
if (path_cache[vpath]) {
return path_cache[vpath]
}
if (!vpath) {
var result = {type: 'dir', path: '.', mount_path: ''}
path_cache[vpath] = result
return result
}
var mount = find_mount(vpath)
if (!mount) {
// No mount found, treat as direct filesystem access
var result = {type: 'dir', path: vpath, mount_path: ''}
path_cache[vpath] = result
return result
}
// Calculate relative path within mount
var rel_path = vpath.substring(mount.path.length)
rel_path = rel_path.replace(/^\/+/, "")
var result = {
type: mount.type,
path: rel_path,
mount_path: mount.path,
handle: mount.handle
}
path_cache[vpath] = result
return result
}
// Check if file exists
function exists(path) {
// Mount a source
function mount(source, name) {
// Check if source exists
var st = null
try {
stat(path)
return true
st = fd.stat(source)
} catch (e) {
return false
throw new Error("Mount source not found: " + source)
}
}
// Get file stats
function stat(path) {
var resolved = resolve_path(path)
if (resolved.type == 'zip') {
// For ZIP archives, get file info from miniz
var zip = resolved.handle
if (!zip) throw new Error("Invalid ZIP handle")
var file_path = resolved.path
if (!file_path) {
// Root directory stats
return {
filesize: 0,
modtime: 0,
createtime: 0,
accesstime: 0,
isDirectory: true
}
}
try {
var mod_time = zip.mod(file_path)
// For ZIP files, we don't have full stat info, just mod time
return {
filesize: 0, // Would need to extract to get size
modtime: mod_time * 1000, // Convert to milliseconds
createtime: mod_time * 1000,
accesstime: mod_time * 1000,
isDirectory: false
}
} catch (e) {
throw new Error("File not found in archive: " + file_path)
}
} else {
// Direct filesystem access using fd
var fd_mod = use('fd')
var full_path = resolved.path
try {
var fd_stat = fd_mod.fstat(fd_mod.open(full_path, 'r'))
return {
filesize: fd_stat.size,
modtime: fd_stat.mtime,
createtime: fd_stat.ctime,
accesstime: fd_stat.atime,
isDirectory: fd_stat.isDirectory
}
} catch (e) {
throw new Error("File not found: " + full_path)
}
var mount_info = {
source: source,
name: name || null,
type: 'fs',
handle: null
}
}
// Read entire file as bytes
function slurpbytes(path) {
var resolved = resolve_path(path)
if (resolved.type == 'zip') {
var zip = resolved.handle
if (!zip) throw new Error("Invalid ZIP handle")
try {
return zip.slurp(resolved.path)
} catch (e) {
throw new Error("Failed to read from archive: " + e.message)
}
} else {
// Direct filesystem access
var fd_mod = use('fd')
var fd = fd_mod.open(resolved.path, 'r')
try {
var fd_stat = fd_mod.fstat(fd)
var f = fd_mod.read(fd, fd_stat.size)
fd_mod.close(fd)
return f
} catch (e) {
throw new Error("Failed to read file: " + e.message)
}
}
}
// Read entire file as string
function slurp(path) {
var bytes = slurpbytes(path)
return bytes
// Convert bytes to string - assuming UTF-8
return String.fromCharCode.apply(null, new Uint8Array(bytes))
}
// Write data to file
function slurpwrite(data, path) {
var resolved = resolve_path(path)
if (resolved.type == 'zip') {
throw new Error("Cannot write to ZIP archives")
}
// Direct filesystem access
var fd_mod = use('fd')
var flags = resolved.path == path ? 'w' : 'w' // Overwrite
var fd = fd_mod.open(resolved.path, flags)
try {
if (typeof data == 'string') {
fd_mod.write(fd, data)
} else {
// Assume ArrayBuffer/Uint8Array
fd_mod.write(fd, data)
}
} finally {
fd_mod.close(fd)
}
}
// Mount an archive or directory
function mount(source, mount_point, prepend) {
prepend = prepend != null ? prepend : false
var miniz_mod = use('miniz')
// Try to load as ZIP first
try {
// For ZIP mounting, try to read the source file directly first
if (st.isDirectory) {
mount_info.type = 'fs'
} else if (st.isFile) {
// Assume zip
var zip_data = null
try {
var fd_mod = use('fd')
var fd = fd_mod.open(source, 'r')
// Get file size first
var fd_stat = fd_mod.fstat(fd)
// Read entire file
zip_data = fd_mod.read(fd, fd_stat.size)
fd_mod.close(fd)
} catch (e) {
// If direct read fails, try through resolve_path
zip_data = slurpbytes(source)
// Always read as bytes for zip
var f = fd.open(source, 'r')
var s = fd.fstat(f)
zip_data = fd.read(f, s.size)
fd.close(f)
var zip = miniz.read(zip_data)
if (!zip || typeof zip.count != 'function') {
throw new Error("Invalid ZIP file: " + source)
}
var zip = miniz_mod.read(zip_data)
// Debug: check if zip is valid
if (!zip || typeof zip.count != 'function') {
throw new Error("Invalid ZIP reader")
}
var mount_info = {
path: normalize_path(mount_point || "/", true),
type: 'zip',
handle: zip,
source: source
}
if (prepend) {
mounts.unshift(mount_info)
} else {
mounts.push(mount_info)
}
return
} catch (e) {
// Not a ZIP, treat as directory
log.console("ZIP mounting failed for " + source + ": " + e.message)
}
// Mount as directory
var mount_info = {
path: normalize_path(mount_point || "/", true),
type: 'dir',
handle: null,
source: source
}
if (prepend) {
mounts.unshift(mount_info)
mount_info.type = 'zip'
mount_info.handle = zip
} else {
mounts.push(mount_info)
throw new Error("Unsupported mount source type: " + source)
}
// Clear cache since mounts changed
path_cache = {}
mounts.push(mount_info)
log.console(`Mounted ${source} ${name ? 'as @' + name : ''}`)
}
// Unmount a path
function unmount(path) {
path = normalize_path(path)
// Unmount
function unmount(name_or_source) {
for (var i = 0; i < mounts.length; i++) {
if (mounts[i].path == path) {
if (mounts[i].name == name_or_source || mounts[i].source == name_or_source) {
mounts.splice(i, 1)
// Clear cache since mounts changed
path_cache = {}
return
}
}
throw new Error("Mount point not found: " + path)
throw new Error("Mount not found: " + name_or_source)
}
// Set write directory
function writepath(path) {
write_dir = path
}
// Simple glob matching (basic implementation)
function match(pattern, str) {
// Very basic glob matching - could be enhanced
if (pattern == str) return true
if (pattern == "*") return true
if (pattern.includes("*")) {
var regex = new RegExp(pattern.replace(/\*/g, ".*"))
return regex.test(str)
}
return false
}
// Basic globfs implementation
function globfs(patterns, start_path) {
start_path = start_path || ""
var results = []
// For simplicity, just enumerate and filter
try {
var files = enumerate(start_path, true)
for (var file of files) {
for (var pattern of patterns) {
if (match(pattern, file)) {
results.push(file)
break
}
}
}
} catch (e) {
// Ignore errors
}
return results
}
// Enumerate files in directory
function enumerate(path, recurse) {
recurse = recurse != undefined ? recurse : false
var resolved = resolve_path(path)
if (resolved.type == 'zip') {
var zip = resolved.handle
if (!zip) return []
var files = []
var prefix = resolved.path ? resolved.path + "/" : ""
for (var i = 0; i < zip.count(); i++) {
var filename = zip.get_filename(i)
if (!filename) continue
if (prefix && !filename.startsWith(prefix)) continue
var rel_name = filename.substring(prefix.length)
if (!rel_name) continue
// For non-recursive, don't include subdirectories
if (!recurse && rel_name.includes("/")) continue
files.push(join_paths(path, rel_name))
}
return files
// Read file
function slurp(path) {
var res = resolve(path, true)
if (!res) throw new Error("File not found: " + path)
if (res.mount.type == 'zip') {
return res.mount.handle.slurp(res.path)
} else {
// Direct filesystem enumeration - simplified for now
// In a full implementation, would need directory reading capabilities
return []
var full_path = join_paths(res.mount.source, res.path)
return fd.slurp(full_path)
}
}
// Check if path is directory
function is_directory(path) {
try {
var st = stat(path)
return st.isDirectory
} catch (e) {
return false
// Read file as bytes
function slurpbytes(path) {
var res = resolve(path, true)
if (!res) throw new Error("File not found: " + path)
if (res.mount.type == 'zip') {
return res.mount.handle.slurp(res.path)
} else {
var full_path = join_paths(res.mount.source, res.path)
var f = fd.open(full_path, 'r')
var s = fd.fstat(f)
var data = fd.read(f, s.size)
fd.close(f)
return data
}
}
// Get mount point for path
function mountpoint(path) {
var mount = find_mount(path)
return mount ? mount.path : null
// Write file
function slurpwrite(path, data) {
if (!path.startsWith("@")) {
throw new Error("slurpwrite requires a named mount (e.g. @name/file.txt)")
}
var res = resolve(path, false)
// For named mounts, resolve returns the mount even if file doesn't exist
if (res.mount.type == 'zip') {
throw new Error("Cannot write to zip mount: @" + res.mount.name)
}
var full_path = join_paths(res.mount.source, res.path)
var f = fd.open(full_path, 'w')
fd.write(f, data)
fd.close(f)
}
// Check existence
function exists(path) {
var res = resolve(path, false)
if (path.startsWith("@")) {
return mount_exists(res.mount, res.path)
}
return res != null
}
// Stat
function stat(path) {
var res = resolve(path, true)
if (!res) throw new Error("File not found: " + path)
if (res.mount.type == 'zip') {
var mod = res.mount.handle.mod(res.path)
return {
filesize: 0,
modtime: mod * 1000,
isDirectory: false
}
} else {
var full_path = join_paths(res.mount.source, res.path)
var s = fd.stat(full_path)
return {
filesize: s.size,
modtime: s.mtime,
isDirectory: s.isDirectory
}
}
}
// Get search paths
function searchpath() {
var paths = []
for (var mount of mounts) {
paths.push(mount.path)
paths.push(mount.source)
}
return paths
}
// File object for writing
function open(path) {
var resolved = resolve_path(path)
// Initialize
mount('.', 'cwd')
if (resolved.type == 'zip') {
throw new Error("Cannot open files for writing in ZIP archives")
}
var fd_mod = use('fd')
var fd = fd_mod.open(resolved.path, 'w')
return {
close: function() { fd_mod.close(fd) },
write: function(data) { fd_mod.write(fd, data) },
buffer: function(size) { /* Not implemented */ },
tell: function() { /* Not implemented */ return 0 },
eof: function() { /* Not implemented */ return false }
}
}
// Directory operations
function mkdir(path) {
var resolved = resolve_path(path)
if (resolved.type == 'zip') {
throw new Error("Cannot create directories in ZIP archives")
}
var fd_mod = use('fd')
fd_mod.mkdir(resolved.path)
}
function rm(path) {
var resolved = resolve_path(path)
if (resolved.type == 'zip') {
throw new Error("Cannot remove files from ZIP archives")
}
var fd_mod = use('fd')
fd_mod.rmdir(resolved.path) // or rm depending on type
}
// Base directory (simplified)
function basedir() {
return "."
}
// User directory (simplified)
function prefdir(org, app) {
return "./user_data"
}
// Get real directory (simplified)
function realdir(path) {
return dirname(path)
}
// Export functions
cellfs.exists = exists
cellfs.stat = stat
cellfs.slurpbytes = slurpbytes
cellfs.slurp = slurp
cellfs.slurpwrite = slurpwrite
// Exports
cellfs.mount = mount
cellfs.unmount = unmount
cellfs.writepath = writepath
cellfs.match = match
cellfs.globfs = globfs
cellfs.enumerate = enumerate
cellfs.is_directory = is_directory
cellfs.mountpoint = mountpoint
cellfs.slurp = slurp
cellfs.slurpbytes = slurpbytes
cellfs.slurpwrite = slurpwrite
cellfs.exists = exists
cellfs.stat = stat
cellfs.searchpath = searchpath
cellfs.open = open
cellfs.mkdir = mkdir
cellfs.rm = rm
cellfs.basedir = basedir
cellfs.prefdir = prefdir
cellfs.realdir = realdir
return cellfs

View File

@@ -73,11 +73,51 @@ var enet = hidden.enet
var nota = hidden.nota
var fd = use_embed('fd')
log.console(fd.getcwd())
log.console(cell.args.program)
var shop_path = '.cell'
var mod_path = '.cell/modules/'
if (!fd.stat('.cell').isDirectory) {
log.console("No cell directory found. Make one.\n");
os.exit(1);
}
function is_file(path) {
try {
var st = fd.stat(path)
return st.isFile
} catch {
return false
}
}
function write_file(path, blob) {
var fd_handle = fd.open(path, 'w')
fd.write(fd_handle, blob)
fd.close(fd_handle)
}
function mkdir_p(dir) {
if (dir == '' || dir == '.') return
try { fd.stat(dir) } catch {
mkdir_p(dir.substring(0, dir.lastIndexOf('/')))
fd.mkdir(dir)
}
}
// Wota decode timing tracking
var wota_decode_times = []
var last_wota_flush = 0
// Strip hidden from cell so nothing else can access it
// But first, list files in the core QOP package
var qop = use_embed('qop')
var core_qop = qop.open(hidden.core_qop_blob)
var utf8 = use_embed('utf8')
delete cell.hidden
function disrupt(err)
@@ -91,9 +131,11 @@ function disrupt(err)
report_to_overling({type:'stop'})
}
for (var id of underlings) {
log.console(`calling on ${id} to disrupt too`)
$_.stop(create_actor({id}))
if (underlings) {
for (var id of underlings) {
log.console(`calling on ${id} to disrupt too`)
$_.stop(create_actor({id}))
}
}
if (err) {
@@ -110,33 +152,10 @@ actor_mod.on_exception(disrupt)
var js = use_embed('js')
var io = use_embed('io')
//log.console(json.encode(fd))
//log.console(fd.fstat)
//log.console(json.encode(fd.fstat('.cell')))
//log.console(fd.fstat('.cell').isDirectory)
if (!fd.stat('.cell').isDirectory) {
log.console("No cell directory found. Make one.\n");
os.exit(1);
}
function slurp(path) {
var st = fd.stat(path)
if (!st.isFile) return null
var fd_handle = fd.open(path, 'r')
var content_blob = fd.read(fd_handle, st.size)
fd.close(fd_handle)
return text(content_blob)
}
var module_alias = {}
var use_cache = {}
var BASEPATH = 'base' + MOD_EXT
var script = slurp(BASEPATH)
log.console(script)
var script = utf8.decode(core_qop.read(BASEPATH))
var fnname = "base"
script = `(function ${fnname}() { ${script}; })`
js.eval(BASEPATH, script)()
@@ -144,54 +163,252 @@ js.eval(BASEPATH, script)()
var inProgress = {}
var loadingStack = []
function resolve_alias(name) {
while(module_alias[name]) name = module_alias[name]
return name
// Track current package context for nested use() calls
var current_package = null
// Get package name from a resolved path
function get_package_from_path(path) {
if (!path) return null
var modules_prefix = '.cell/modules/'
if (path.startsWith(modules_prefix)) {
var rest = path.substring(modules_prefix.length)
var slash_idx = rest.indexOf('/')
if (slash_idx > 0) {
return rest.substring(0, slash_idx)
}
}
return null
}
// Config is loaded later, but we need to access it in resolve_module_path
// This will be set after shop.load_config() is called
var config = null
// Resolve actor program path with package awareness
// Resolution order:
// 1. Current package (root project when pkg_context is null)
// 2. Declared dependencies (from cell.toml)
// 3. core_qop (standard library)
function resolve_actor_path(requested, pkg_context) {
var dependencies = (config && config.dependencies) ? config.dependencies : {}
// Step 1: current package
if (pkg_context) {
var pkg_actor_path = '.cell/modules/' + pkg_context + '/' + requested + ACTOR_EXT
if (is_file(pkg_actor_path)) {
return { path: pkg_actor_path, package_name: pkg_context, isCore: false }
}
// Check if package is locally replaced
if (config && config.replace && config.replace[pkg_context]) {
var replace_path = config.replace[pkg_context]
var full_path = replace_path + '/' + requested + ACTOR_EXT
if (is_file(full_path)) {
return { path: full_path, package_name: pkg_context, isCore: false }
}
}
} else {
var project_actor_path = requested + ACTOR_EXT
if (is_file(project_actor_path)) {
return { path: project_actor_path, package_name: null, isCore: false }
}
}
// Step 2: dependencies (explicit alias first) and replace directives
if (requested.includes('/')) {
var actor_parts = requested.split('/')
var actor_pkg_alias = actor_parts[0]
var actor_sub_path = actor_parts.slice(1).join('/')
// Check for replace directive first
if (config && config.replace && config.replace[actor_pkg_alias]) {
var replace_path = config.replace[actor_pkg_alias]
var full_path = replace_path + '/' + (actor_sub_path || actor_pkg_alias) + ACTOR_EXT
if (is_file(full_path)) {
return { path: full_path, package_name: actor_pkg_alias, isCore: false }
}
} else if (dependencies[actor_pkg_alias]) {
var dep_actor_path = '.cell/modules/' + actor_pkg_alias + '/' + actor_sub_path + ACTOR_EXT
if (is_file(dep_actor_path)) {
return { path: dep_actor_path, package_name: actor_pkg_alias, isCore: false }
}
}
} else {
// Check replace directives for simple actor names
if (config && config.replace && config.replace[requested]) {
var replace_path = config.replace[requested]
var full_path = replace_path + '/' + requested + ACTOR_EXT
if (is_file(full_path)) {
return { path: full_path, package_name: requested, isCore: false }
}
}
// Check dependencies for simple actor names
for (var actor_alias in dependencies) {
var dep_actor_simple = '.cell/modules/' + actor_alias + '/' + requested + ACTOR_EXT
if (is_file(dep_actor_simple)) {
return { path: dep_actor_simple, package_name: actor_alias, isCore: false }
}
}
}
// Step 3: core
try {
core_qop.read(requested + ACTOR_EXT)
return { path: requested + ACTOR_EXT, package_name: null, isCore: true }
} catch (e) {
// Not in core
}
return null
}
// Resolve module path with package awareness
// Resolution order:
// 1. Current package (root project when pkg_context is null)
// 2. Declared dependencies (from cell.toml)
// 3. core_qop (standard library)
function resolve_module_path(requested, pkg_context) {
var dependencies = (config && config.dependencies) ? config.dependencies : {}
// Step 1: current package
if (pkg_context) {
var pkg_module_path = '.cell/modules/' + pkg_context + '/' + requested + MOD_EXT
if (is_file(pkg_module_path)) {
return { path: pkg_module_path, package_name: pkg_context, isCore: false }
}
// Check if package is locally replaced
if (config && config.replace && config.replace[pkg_context]) {
var replace_path = config.replace[pkg_context]
var full_path = replace_path + '/' + requested + MOD_EXT
if (is_file(full_path)) {
return { path: full_path, package_name: pkg_context, isCore: false }
}
}
} else {
var project_module_path = requested + MOD_EXT
if (is_file(project_module_path)) {
return { path: project_module_path, package_name: null, isCore: false }
}
}
// Step 2: dependencies (explicit alias first) and replace directives
if (requested.includes('/')) {
var module_parts = requested.split('/')
var module_pkg_alias = module_parts[0]
var module_sub = module_parts.slice(1).join('/')
// Check for replace directive first
if (config && config.replace && config.replace[module_pkg_alias]) {
var replace_path = config.replace[module_pkg_alias]
var full_path = replace_path + '/' + (module_sub || module_pkg_alias) + MOD_EXT
if (is_file(full_path)) {
return { path: full_path, package_name: module_pkg_alias, isCore: false }
}
} else if (dependencies[module_pkg_alias]) {
var dep_module_path = '.cell/modules/' + module_pkg_alias + '/' + module_sub + MOD_EXT
if (is_file(dep_module_path)) {
return { path: dep_module_path, package_name: module_pkg_alias, isCore: false }
}
}
} else {
// Check replace directives for simple module names
if (config && config.replace && config.replace[requested]) {
var replace_path = config.replace[requested]
var full_path = replace_path + '/' + requested + MOD_EXT
if (is_file(full_path)) {
return { path: full_path, package_name: requested, isCore: false }
}
}
// Check dependencies for simple module names
for (var module_alias in dependencies) {
var dep_module_simple = '.cell/modules/' + module_alias + '/' + requested + MOD_EXT
if (is_file(dep_module_simple)) {
return { path: dep_module_simple, package_name: module_alias, isCore: false }
}
}
}
// Step 3: core
try {
core_qop.read(requested + MOD_EXT)
return { path: requested + MOD_EXT, package_name: null, isCore: true }
} catch (e) {
// Not in core
}
return null
}
globalThis.use = function use(file, ...args) {
/* Package-aware module resolution:
1. If in a package context, check within that package first
2. Check local project files
3. Check declared dependencies (from cell.toml [dependencies])
4. Check core_qop (standard library)
There's also the possibility of native C code;
there may be, in a package, a .so/.dll/.dylib
that can be loaded. If that exists, as well as a .cm file, the
.so/.dll/.dylib is loaded and the .cm file is ran with the
loaded module as this.
for embedded modules, it's the same, but in the cell runtime, so no .so/.dll/.dylib
is loaded.
*/
var requested = file
var key = resolve_alias(file)
if (use_cache[key]) return use_cache[key]
// We'll check for circular dependencies after we determine the path
var path = null
// Check embedded modules first (these are always available)
var embed_mod = use_embed(requested)
// First check if we're loading from a script and look in its directory
if(loadingStack.length > 0) {
var cur = loadingStack[loadingStack.length-1]
if(cur.includes('/')) {
var dir = cur.substring(0,cur.lastIndexOf('/'))
var cand = dir + '/' + requested + MOD_EXT
if(io.exists(cand) && !io.is_directory(cand))
path = cand
}
}
// Resolve the module path with package awareness
var resolved = resolve_module_path(requested, current_package)
// If not found locally, check the normal path
if(!path) {
var cand = requested + MOD_EXT
if(io.exists(cand) && !io.is_directory(cand))
path = cand
}
// Generate cache key based on resolution
var cache_key = resolved
? (resolved.isCore ? 'core:' + resolved.path : resolved.path)
: requested
if (!path && !embed_mod)
throw new Error(`Module ${file} could not be found`)
// — if its purely embedded, well use the requested name as our key —
var canonical = embed_mod
? requested
: io.realdir(path) + '/' + path // or realpath(path)
if (use_cache[cache_key]) return use_cache[cache_key]
if (!resolved && !embed_mod)
throw new Error(`Module ${file} could not be found (package context: ${current_package || 'none'})`)
// If only embedded module exists, return it
if (!path && embed_mod) {
use_cache[file] = embed_mod
if (!resolved && embed_mod) {
use_cache[cache_key] = embed_mod
return embed_mod
}
var path = resolved.path
var isCore = resolved.isCore
var module_package = resolved.package_name
// If core module, load it
if (isCore) {
var ret = null
try {
var script = utf8.decode(core_qop.read(path))
var mod_script = `(function setup_${requested.replace(/[^a-zA-Z0-9_]/g, '_')}_module(arg, $_){${script};})`
var fn = js.compile(path, mod_script)
fn = js.eval_compile(fn)
var context = embed_mod ? embed_mod : {}
ret = fn.call(context, args, $_)
} catch (e) {
// Script component doesn't exist, fall back to embedded module
// log.console("use: core module " + path + " has no script component, using embedded module")
}
if (!ret && embed_mod) {
ret = embed_mod
} else if (!ret) {
throw new Error(`Use must be used with a module, but ${path} doesn't return a value`)
}
use_cache[cache_key] = ret
return ret
}
// Check for circular dependencies using the resolved path
if (path && loadingStack.includes(path)) {
let cycleIndex = loadingStack.indexOf(path)
@@ -204,53 +421,56 @@ globalThis.use = function use(file, ...args) {
)
}
log.console("use: loading file " + path + " (package: " + (module_package || 'local') + ")")
inProgress[path] = true
loadingStack.push(path)
// Save and set package context for nested use() calls
var prev_package = current_package
current_package = module_package
// Determine the compiled file path in .cell directory
var cleanPath = (io.realdir(path) + "/" + path).replace(/[:\\]/g, '/').replace(/\/+/g, '/')
var cleanPath = path.replace(/[:\\]/g, '/').replace(/\/+/g, '/')
var compiledPath = ".cell/build/" + cleanPath + '.o'
io.mkdir(compiledPath.dir())
mkdir_p(compiledPath.substring(0, compiledPath.lastIndexOf('/')))
// Check if compiled version exists and is newer than source
var useCompiled = false
if (io.exists(compiledPath)) {
var srcStat = io.stat(path)
var compiledStat = io.stat(compiledPath)
if (compiledStat.modtime >= srcStat.modtime) {
useCompiled = true
}
}
var srcStat = fd.stat(path)
var compiledStat = fd.stat(compiledPath)
// if (srcStat && compiledStat && compiledStat.mtime > srcStat.mtime) {
// useCompiled = true
// }
var fn
var mod_name = path.name()
var mod_name = path.substring(path.lastIndexOf('/') + 1, path.lastIndexOf('.'))
if (useCompiled) {
var compiledBlob = io.slurpbytes(compiledPath)
var compiledBlob = fd.slurp(compiledPath)
fn = js.compile_unblob(compiledBlob)
fn = js.eval_compile(fn)
} else {
// Compile from source
var script = io.slurp(path)
var script = utf8.decode(fd.slurp(path))
var mod_script = `(function setup_${mod_name}_module(arg, $_){${script};})`
fn = js.compile(path, mod_script)
// Save compiled version to .cell directory
var compiled = js.compile_blob(fn)
io.slurpwrite(compiledPath, compiled)
// var compiled = js.compile_blob(fn)
// write_file(compiledPath, compiled)
fn = js.eval_compile(fn)
}
// Create context - if embedded module exists, script extends it
var context = {}
if (embed_mod)
context.__proto__ = embed_mod
// Create context - if embedded module exists, use it as 'this'
var context = embed_mod ? embed_mod : {}
// Call the script - pass embedded module as 'this' if it exists
var ret = fn.call(context, args, $_)
// Restore previous package context
current_package = prev_package
// If script doesn't return anything, check if we have embedded module
if (!ret && embed_mod) {
ret = embed_mod
@@ -262,17 +482,19 @@ globalThis.use = function use(file, ...args) {
delete inProgress[path]
// Cache the result
use_cache[file] = ret
use_cache[cache_key] = ret
return ret
}
globalThis.json = use('json')
log.console(json.encode(cell))
var time = use('time')
var st_now = time.number()
var shop = use('shop')
var config = shop.load_config()
log.console(`use shop in ${time.number() - st_now} seconds`)
config = shop.load_config()
var default_config = {
ar_timer: 60,
actor_memory:0,
@@ -281,6 +503,7 @@ var default_config = {
main: false,
}
config ??= {}
config.system ??= {}
config.system.__proto__ = default_config
@@ -307,7 +530,6 @@ function load_actor_config(program) {
}
var blob = use('blob')
var blob_stone = blob.prototype.stone
var blob_stonep = blob.prototype.stonep;
delete blob.prototype.stone;
@@ -515,9 +737,23 @@ $_.receiver[cell.DOC] = "registers a function that will receive all messages..."
$_.start = function start(cb, program, ...args) {
if (!program) return
// Resolve the actor program path with package awareness
var resolved_program = resolve_actor_path(program, current_package)
if (!resolved_program) {
throw new Error(`Actor program ${program} could not be found (package context: ${current_package || 'none'})`)
}
var id = guid()
if (args.length == 1 && Array.isArray(args[0])) args = args[0]
var startup = { id, overling: $_, root, arg: args, program }
var startup = {
id,
overling: $_,
root,
arg: args,
program: resolved_program.path,
package_context: resolved_program.package_name // Pass package context to new actor
}
greeters[id] = cb
message_queue.push({ startup })
}
@@ -715,6 +951,18 @@ overling = cell.args.overling
root = cell.args.root
root ??= $_
// Set package context from parent actor (if spawned from a package)
if (cell.args.package_context) {
current_package = cell.args.package_context
log.console(`Actor initialized with package context: ${current_package}`)
} else {
// Infer package context from program path
current_package = get_package_from_path(cell.args.program)
if (current_package) {
log.console(`Actor inferred package context from path: ${current_package}`)
}
}
if (overling) {
$_.couple(overling) // auto couple to overling
@@ -816,37 +1064,30 @@ function enet_check()
// enet_check();
var init_end = time.number()
log.console(`initialization completed in ${init_end-st_now} seconds`)
var load_program_start = time.number()
// Finally, run the program
actor_mod.setname(cell.args.program)
var prog = null
var progPath = cell.args.program
var prog = cell.args.program
if (io.exists(progPath + ACTOR_EXT) && !io.is_directory(progPath + ACTOR_EXT)) {
prog = progPath + ACTOR_EXT
} else if (io.exists(progPath) && io.is_directory(progPath)) {
// First check for folder's name as a file
var folderName = progPath.split('/').pop()
var folderNamePath = progPath + '/' + folderName + ACTOR_EXT
if (io.exists(folderNamePath) && !io.is_directory(folderNamePath)) {
prog = folderNamePath
} else {
// Fall back to main.ce
var mainPath = progPath + '/main' + ACTOR_EXT
if (io.exists(mainPath) && !io.is_directory(mainPath)) {
prog = mainPath
}
}
// Resolve the main program path
var resolved_prog = resolve_actor_path(cell.args.program, current_package)
if (!resolved_prog) {
throw new Error(`Main program ${cell.args.program} could not be found`)
}
if (!prog)
throw new Error(cell.args.program + " not found.");
prog = resolved_prog.path
var progDir = io.realdir(prog) + "/" + prog.substring(0, prog.lastIndexOf('/'))
io.mount(progDir.replace(/\/+$/, ''))
var progContent = io.slurp(prog)
var progContent
if (resolved_prog.isCore) {
progContent = utf8.decode(core_qop.read(prog))
} else {
progContent = utf8.decode(fd.slurp(prog))
}
var prog_script = `(function ${cell.args.program.name()}_start($_, arg) { var args = arg; ${progContent} })`
@@ -854,7 +1095,9 @@ var prog_script = `(function ${cell.args.program.name()}_start($_, arg) { var ar
var startfn = js.eval(cell.args.program, prog_script);
log.console(`program compiled in ${time.number()-st_now} seconds`)
log.console(`program compiled in ${time.number()-load_program_start} seconds`)
var exec_start = time.number()
$_.clock(_ => {
var val = startfn($_, cell.args.arg);
@@ -863,6 +1106,8 @@ $_.clock(_ => {
throw new Error('Program must not return anything');
})
log.console(`program queued in ${time.number()-exec_start} seconds`)
log.console(`program executed in ${time.number()-st_now} seconds`)
})()

22
scripts/fetch.ce Normal file
View File

@@ -0,0 +1,22 @@
// cell fetch - Install dependencies
var shop = use('shop')
log.console("Fetching dependencies...")
if (!shop.init()) {
log.error("Failed to initialize .cell directory")
$_.stop()
return
}
var deps = shop.get_dependencies()
var count = 0
for (var alias in deps) {
if (shop.install(alias)) {
count++
}
}
log.console(`Fetched ${count} dependencies.`)
$_.stop()

View File

@@ -1,6 +1,6 @@
// cell get <locator> - Fetch a module and add it to dependencies
var io = use('io')
var fd = use('fd')
var shop = use('shop')
var miniz = use('miniz')
var http = use('http')
@@ -32,7 +32,7 @@ if (!parsed) {
}
// Initialize shop if needed
if (!io.exists('.cell/cell.toml')) {
if (!fd.stat('.cell/cell.toml').isFile) {
log.console("No cell.toml found. Initializing...")
shop.init()
}
@@ -65,8 +65,8 @@ shop.add_dependency(alias, locator)
// Create module directory
var module_dir = '.cell/modules/' + alias + '@' + parsed.version
if (!io.exists(module_dir)) {
io.mkdir(module_dir)
if (!fd.stat(module_dir).isDirectory) {
fd.mkdir(module_dir)
}
// TODO: Actually fetch the module from the repository

View File

@@ -1,14 +1,16 @@
// cell help - Display help information for cell commands
var io = use('io')
var fd = use('fd')
var utf8 = use('utf8')
var command = args.length > 0 ? args[0] : null
// Display specific command help
if (command) {
var man_file = 'scripts/man/' + command + '.man'
if (io.exists(man_file)) {
var content = io.slurp(man_file)
var stat = fd.stat(man_file);
if (stat && stat.isFile) {
var content = utf8.decode(fd.slurp(man_file))
log.console(content)
} else {
log.error("No help available for command: " + command)
@@ -20,8 +22,9 @@ if (command) {
// Display general help
var cell_man = 'scripts/man/cell.man'
if (io.exists(cell_man)) {
var content = io.slurp(cell_man)
var stat = fd.stat(cell_man);
if (stat && stat.isFile) {
var content = utf8.decode(fd.slurp(cell_man))
log.console(content)
} else {
// Fallback if man file doesn't exist

View File

@@ -1,12 +1,18 @@
var shop = use('shop')
var http = use('http')
var miniz = use('miniz')
var io = use('io')
var fd = use('fd')
var crypto = use('crypto')
var text = use('text')
var toml = use('toml')
var time = use('time')
function slurpwrite(path, data) {
var f = fd.open(path, "w")
f.write(data)
f.close()
}
var uses = {}
uses.download = function()
@@ -17,15 +23,15 @@ uses.download = function()
var lock_path = '.cell/lock.toml'
// Ensure directories exist
if (!io.exists(cache_dir))
io.mkdir(cache_dir)
if (!io.exists(modules_dir))
io.mkdir(modules_dir)
if (!fd.stat(cache_dir).isDirectory)
fd.mkdir(cache_dir)
if (!fd.stat(modules_dir).isDirectory)
fd.mkdir(modules_dir)
// Load or create lock file
var lock = {}
if (io.exists(lock_path)) {
var lock_content = io.slurp(lock_path)
if (fd.stat(lock_path).isFile) {
var lock_content = fd.slurp(lock_path)
lock = toml.decode(lock_content)
}
if (!lock.modules) lock.modules = {}
@@ -57,7 +63,7 @@ uses.download = function()
if (!lock.modules[mod] || !lock.modules[mod].hash) {
log.console(`${mod}: not in lock file, will download`)
need_download = true
} else if (!io.exists(cache_path)) {
} else if (!fd.stat(cache_path).isDirectory) {
log.console(`${mod}: cache missing, will download`)
need_download = true
} else if (remote_commit && (!lock.modules[mod].commit || lock.modules[mod].commit != remote_commit)) {
@@ -70,7 +76,7 @@ uses.download = function()
if (!need_download) {
// Verify cached file hash
log.console(`${mod}: verifying cached version`)
zip = io.slurpbytes(cache_path)
zip = fd.slurp(cache_path)
var hash = crypto.hash(zip)
var hash_b32 = text(hash, "t")
@@ -89,7 +95,7 @@ uses.download = function()
log.console(`downloading ${mod} at ${mods[mod]}`)
log.console(shop.get_download_url(mods[mod]))
zip = http.fetch(shop.get_download_url(mods[mod]))
io.slurpwrite(cache_path, zip)
slurpwrite(cache_path, zip)
log.console(`${mod}: downloaded ${zip.length} bytes`)
// Calculate and store hash
@@ -109,7 +115,7 @@ uses.download = function()
log.console(`${mod}: hash = ${hash_b32}`)
// Save updated lock file
io.slurpwrite(lock_path, toml.encode(lock))
slurpwrite(lock_path, toml.encode(lock))
}
// Extract the module
@@ -119,13 +125,13 @@ uses.download = function()
log.console(`extracting ${mod} (${count} files)...`)
// Remove existing module directory if it exists (for clean updates)
if (io.exists(module_path)) {
if (fd.stat(module_path).isDirectory) {
log.console(`${mod}: removing old version...`)
io.rmdir(module_path)
fd.rmdir(module_path)
}
// Create module directory
io.mkdir(module_path)
fd.mkdir(module_path)
// Extract each file
for (var i = 0; i < count; i++) {
@@ -151,14 +157,14 @@ uses.download = function()
var dir = module_path
for (var j = 0; j < parts.length - 1; j++) {
dir = dir + '/' + parts[j]
if (!io.exists(dir))
io.mkdir(dir)
if (!fd.stat(dir).isDirectory)
fd.mkdir(dir)
}
}
// Extract and write file
var data = reader.slurp(reader.get_filename(i))
io.slurpwrite(filepath, data)
slurpwrite(filepath, data)
}
log.console(`${mod}: extracted to ${module_path}`)
@@ -168,4 +174,4 @@ uses.download = function()
if (uses[arg[0]])
uses[arg[0]]()
else
console.log(`Command ${arg[0]} not understood.`)
log.console(`Command ${arg[0]} not understood.`)

43
scripts/remove.ce Normal file
View File

@@ -0,0 +1,43 @@
// cell remove <alias> - Remove a module from dependencies
var shop = use('shop')
var fd = use('fd')
if (args.length < 1) {
log.console("Usage: cell remove <alias>")
$_.stop()
return
}
var alias = args[0]
// Check if cell.toml exists
if (!fd.stat('.cell/cell.toml').isFile) {
log.error("No cell.toml found.")
$_.stop()
return
}
// Get module directory before removing dependency
var module_dir = shop.get_module_dir(alias)
// Remove from dependencies
if (shop.remove_dependency(alias)) {
log.console("Removed dependency: " + alias)
// Remove module directory
if (module_dir && fd.stat(module_dir).isDirectory) {
log.console("Removing module directory: " + module_dir)
try {
fd.rmdir(module_dir)
} catch (e) {
log.error("Failed to remove module directory: " + e)
}
} else {
log.console("Module directory not found or already removed.")
}
} else {
log.error("Dependency not found: " + alias)
}
$_.stop()

49
scripts/replace.ce Normal file
View File

@@ -0,0 +1,49 @@
// cell replace <alias> <path> - Add or update a replace directive for a dependency
var fd = use('fd')
var shop = use('shop')
if (args.length < 2) {
log.console("Usage: cell replace <alias> <path>")
log.console("Examples:")
log.console(" cell replace prosperon ../prosperon")
log.console(" cell replace extramath ../my-fork-of-extramath")
$_.stop()
return
}
var alias = args[0]
var path = args[1]
// Initialize shop if needed
if (!fd.stat('.cell/cell.toml').isFile) {
log.console("No cell.toml found. Initializing...")
shop.init()
}
// Load current config
var config = shop.load_config()
if (!config) {
log.error("Failed to load cell.toml")
$_.stop()
return
}
// Check if the alias exists in dependencies
if (!config.dependencies || !config.dependencies[alias]) {
log.console("Warning: '" + alias + "' is not in dependencies. Adding replace anyway.")
}
// Ensure replace section exists
if (!config.replace) {
config.replace = {}
}
// Add or update the replace directive
config.replace[alias] = path
shop.save_config(config)
log.console("Added replace directive: " + alias + " = " + path)
log.console("Run 'cell build' to apply changes")
$_.stop()

View File

@@ -1,47 +1,87 @@
// Module shop system for managing dependencies and mods
var io = use('io')
var toml = use('toml')
var json = use('json')
var fd = use('fd')
var utf8 = use('utf8')
var http = use('http')
var miniz = use('miniz')
var time = use('time')
var Shop = {}
var shop_path = '.cell/cell.toml'
var lock_path = '.cell/lock.toml'
function slurpwrite(path, content) {
var f = fd.open(path)
fd.write(f, content)
fd.close(f)
}
function ensure_dir(path) {
if (fd.stat(path).isDirectory) return true
var parts = path.split('/')
var current = ''
for (var i = 0; i < parts.length; i++) {
if (parts[i] == '') continue
current += parts[i] + '/'
if (!fd.stat(current).isDirectory) {
fd.mkdir(current)
}
}
return true
}
// Load cell.toml configuration
Shop.load_config = function() {
if (!io.exists(shop_path))
if (!fd.stat(shop_path).isFile)
return null
var content = io.slurp(shop_path)
var content = utf8.decode(fd.slurp(shop_path))
return toml.decode(content)
}
// Save cell.toml configuration
Shop.save_config = function(config) {
io.slurpwrite(shop_path, toml.encode(config))
slurpwrite(shop_path, toml.encode(config));
}
// Load lock.toml configuration
Shop.load_lock = function() {
if (!fd.stat(lock_path).isFile)
return {}
var content = utf8.decode(fd.slurp(lock_path))
return toml.decode(content) || {}
}
// Save lock.toml configuration
Shop.save_lock = function(lock) {
slurpwrite(lock_path, toml.encode(lock));
}
// Initialize .cell directory structure
Shop.init = function() {
if (!io.exists('.cell')) {
io.mkdir('.cell')
if (!fd.stat('.cell').isDirectory) {
fd.mkdir('.cell')
}
if (!io.exists('.cell/modules')) {
io.mkdir('.cell/modules')
if (!fd.stat('.cell/modules').isDirectory) {
fd.mkdir('.cell/modules')
}
if (!io.exists('.cell/build')) {
io.mkdir('.cell/build')
if (!fd.stat('.cell/build').isDirectory) {
fd.mkdir('.cell/build')
}
if (!io.exists('.cell/patches')) {
io.mkdir('.cell/patches')
if (!fd.stat('.cell/patches').isDirectory) {
fd.mkdir('.cell/patches')
}
if (!io.exists('.cell/lock.toml')) {
io.slurpwrite('.cell/lock.toml', '# Lock file for module integrity\n')
if (!fd.stat('.cell/lock.toml').isFile) {
slurpwrite('.cell/lock.toml', '# Lock file for module integrity\n');
}
return true
@@ -86,6 +126,18 @@ Shop.get_download_url = function(locator) {
if (hostAndPath.includes('gitlab.')) {
return 'https://' + hostAndPath + '/-/archive/' + parsed.version + '/' + parsed.name + '-' + parsed.version + '.zip'
}
} else {
// Implicit https
var hostAndPath = parsed.path
// Gitea pattern: gitea.pockle.world/user/repo@branch
if (hostAndPath.includes('gitea.')) {
return 'https://' + hostAndPath + '/archive/' + parsed.version + '.zip'
}
// GitHub pattern: github.com/user/repo@tag
if (hostAndPath.includes('github.com')) {
return 'https://' + hostAndPath + '/archive/refs/tags/' + parsed.version + '.zip'
}
}
// Fallback to original locator if no pattern matches
@@ -109,41 +161,57 @@ Shop.add_dependency = function(alias, locator) {
return true
}
// Remove a dependency
Shop.remove_dependency = function(alias) {
var config = Shop.load_config()
if (!config) {
log.error("No cell.toml found")
return false
}
if (!config.dependencies || !config.dependencies[alias]) {
return false
}
delete config.dependencies[alias]
Shop.save_config(config)
return true
}
// Get the API URL for checking remote git commits
Shop.get_api_url = function(locator) {
var parsed = Shop.parse_locator(locator)
if (!parsed) return null
// Handle different git hosting patterns
var hostAndPath = parsed.path
if (locator.startsWith('https://')) {
// Remove https:// prefix for parsing
var cleanLocator = locator.substring(8)
var hostAndPath = cleanLocator.split('@')[0]
var parts = hostAndPath.split('/')
// Gitea pattern: gitea.pockle.world/user/repo@branch
if (hostAndPath.includes('gitea.')) {
var host = parts[0]
var user = parts[1]
var repo = parts[2]
return 'https://' + host + '/api/v1/repos/' + user + '/' + repo + '/branches/' + parsed.version
}
// GitHub pattern: github.com/user/repo@tag or @branch
if (hostAndPath.includes('github.com')) {
var user = parts[1]
var repo = parts[2]
// Try branch first, then tag
return 'https://api.github.com/repos/' + user + '/' + repo + '/branches/' + parsed.version
}
// GitLab pattern: gitlab.com/user/repo@tag
if (hostAndPath.includes('gitlab.')) {
var user = parts[1]
var repo = parts[2]
var projectId = encodeURIComponent(user + '/' + repo)
return 'https://' + parts[0] + '/api/v4/projects/' + projectId + '/repository/branches/' + parsed.version
}
hostAndPath = locator.substring(8).split('@')[0]
}
var parts = hostAndPath.split('/')
// Gitea pattern: gitea.pockle.world/user/repo@branch
if (hostAndPath.includes('gitea.')) {
var host = parts[0]
var user = parts[1]
var repo = parts[2]
return 'https://' + host + '/api/v1/repos/' + user + '/' + repo + '/branches/' + parsed.version
}
// GitHub pattern: github.com/user/repo@tag or @branch
if (hostAndPath.includes('github.com')) {
var user = parts[1]
var repo = parts[2]
// Try branch first, then tag
return 'https://api.github.com/repos/' + user + '/' + repo + '/branches/' + parsed.version
}
// GitLab pattern: gitlab.com/user/repo@tag
if (hostAndPath.includes('gitlab.')) {
var user = parts[1]
var repo = parts[2]
var projectId = encodeURIComponent(user + '/' + repo)
return 'https://' + parts[0] + '/api/v4/projects/' + projectId + '/repository/branches/' + parsed.version
}
// Fallback - return null if no API pattern matches
@@ -184,15 +252,160 @@ Shop.get_module_dir = function(alias) {
return null
}
var version = config.dependencies[alias]
var module_name = alias + '@' + version.split('@')[1]
// Check if replaced
if (config.replace && config.replace[version]) {
return config.replace[version]
if (config.replace && config.replace[alias]) {
return config.replace[alias]
}
var locator = config.dependencies[alias]
var parsed = Shop.parse_locator(locator)
if (!parsed) return null
return '.cell/modules/' + parsed.path
}
// Install a dependency
Shop.install = function(alias) {
var config = Shop.load_config()
if (!config || !config.dependencies || !config.dependencies[alias]) {
log.error("Dependency not found in config: " + alias)
return false
}
var locator = config.dependencies[alias]
var parsed = Shop.parse_locator(locator)
var target_dir = '.cell/modules/' + parsed.path
log.console("Installing " + alias + " (" + locator + ")...")
// 1. Get Commit Hash
var api_url = Shop.get_api_url(locator)
var commit_hash = null
if (api_url) {
try {
log.console("Fetching info from " + api_url)
var resp = http.fetch(api_url)
var resp_text = utf8.decode(resp)
commit_hash = Shop.extract_commit_hash(locator, resp_text)
log.console("Resolved commit: " + commit_hash)
} catch (e) {
log.console("Warning: Failed to fetch API info: " + e)
}
}
// 2. Download Zip
var download_url = Shop.get_download_url(locator)
if (!download_url) {
log.error("Could not determine download URL for " + locator)
return false
}
return '.cell/modules/' + module_name
log.console("Downloading from " + download_url)
var zip_blob
try {
zip_blob = http.fetch(download_url)
} catch (e) {
log.error("Download failed: " + e)
return false
}
// 3. Unpack
log.console("Unpacking to " + target_dir)
ensure_dir(target_dir)
var zip = miniz.read(zip_blob)
if (!zip) {
log.error("Failed to read zip archive")
return false
}
var count = zip.count()
for (var i = 0; i < count; i++) {
if (zip.is_directory(i)) continue
var filename = zip.get_filename(i)
// Strip top-level directory
var parts = filename.split('/')
if (parts.length > 1) {
parts.shift() // Remove root folder
var rel_path = parts.join('/')
var full_path = target_dir + '/' + rel_path
var dir_path = full_path.substring(0, full_path.lastIndexOf('/'))
ensure_dir(dir_path)
var content = zip.slurp(filename)
slurpwrite(full_path, content)
}
}
// 4. Update Lock
if (commit_hash) {
var lock = Shop.load_lock()
lock[alias] = {
locator: locator,
commit: commit_hash,
updated: time.number()
}
Shop.save_lock(lock)
}
log.console("Installed " + alias)
return true
}
// Verify dependencies
Shop.verify = function() {
var config = Shop.load_config()
if (!config || !config.dependencies) return true
var all_ok = true
for (var alias in config.dependencies) {
var dir = Shop.get_module_dir(alias)
if (!dir) {
// Might be a replace that is invalid or something else
continue
}
if (!fd.stat(dir).isDirectory) {
log.error("Missing dependency: " + alias + " (expected at " + dir + ")")
all_ok = false
} else {
// Check if empty?
}
}
return all_ok
}
// Check for updates
Shop.update = function() {
var config = Shop.load_config()
if (!config || !config.dependencies) return
var lock = Shop.load_lock()
for (var alias in config.dependencies) {
var locator = config.dependencies[alias]
var api_url = Shop.get_api_url(locator)
if (api_url) {
try {
var resp = http.fetch(api_url)
var resp_text = utf8.decode(resp)
var remote_hash = Shop.extract_commit_hash(locator, resp_text)
var local_hash = lock[alias] ? lock[alias].commit : null
if (remote_hash && remote_hash != local_hash) {
log.console("Update available for " + alias + ": " + local_hash + " -> " + remote_hash)
Shop.install(alias)
} else {
log.console(alias + " is up to date.")
}
} catch (e) {
log.error("Failed to check update for " + alias + ": " + e)
}
}
}
}
// Compile a module
@@ -221,4 +434,102 @@ Shop.build = function() {
return true
}
// Get all declared dependencies as a map of alias -> locator
Shop.get_dependencies = function() {
var config = Shop.load_config()
if (!config || !config.dependencies) {
return {}
}
return config.dependencies
}
// Resolve a module path given a package context
// Returns { path, package_name } or null if not found
// Resolution order:
// 1. Local to the current package (if package_name is set)
// 2. Declared dependencies (by alias)
// 3. Core modules (handled by caller)
Shop.resolve_module = function(module_name, package_name, is_file_fn) {
var config = Shop.load_config()
var dependencies = (config && config.dependencies) || {}
// If we're in a package context, check the package first
if (package_name) {
var pkg_path = '.cell/modules/' + package_name + '/' + module_name + '.cm'
if (is_file_fn(pkg_path)) {
return { path: pkg_path, package_name: package_name }
}
}
// Check if module_name contains a slash (explicit package reference)
if (module_name.includes('/')) {
var parts = module_name.split('/')
var pkg_alias = parts[0]
var sub_module = parts.slice(1).join('/')
// Check if it's a declared dependency
if (dependencies[pkg_alias]) {
// Need to resolve alias to canonical path
var locator = dependencies[pkg_alias]
var parsed = Shop.parse_locator(locator)
var canonical_path = parsed.path
var dep_path = '.cell/modules/' + canonical_path + '/' + sub_module + '.cm'
if (is_file_fn(dep_path)) {
return { path: dep_path, package_name: pkg_alias }
}
}
// Check local path (relative to project root)
var local_path = module_name + '.cm'
if (is_file_fn(local_path)) {
return { path: local_path, package_name: null }
}
} else {
// Simple module name - check local first, then dependencies
var local_path = module_name + '.cm'
if (is_file_fn(local_path)) {
return { path: local_path, package_name: null }
}
// Check each declared dependency for this module
for (var alias in dependencies) {
var locator = dependencies[alias]
var parsed = Shop.parse_locator(locator)
var canonical_path = parsed.path
var dep_path = '.cell/modules/' + canonical_path + '/' + module_name + '.cm'
if (is_file_fn(dep_path)) {
return { path: dep_path, package_name: alias }
}
}
}
return null
}
// Get the package name from a file path
// e.g., '.cell/modules/extramath/spline.cm' -> 'extramath'
// e.g., 'myfile.cm' -> null
Shop.get_package_from_path = function(path) {
if (!path) return null
var modules_prefix = '.cell/modules/'
if (path.startsWith(modules_prefix)) {
var rest = path.substring(modules_prefix.length)
// This logic is tricky with nested paths like gitea.pockle.world/john/prosperon
// We probably need to reverse map from path to alias using config
var config = Shop.load_config()
if (config && config.dependencies) {
for (var alias in config.dependencies) {
var locator = config.dependencies[alias]
var parsed = Shop.parse_locator(locator)
if (rest.startsWith(parsed.path + '/')) {
return alias
}
}
}
}
return null
}
return Shop

View File

@@ -1,7 +1,5 @@
/* text.cm - text conversion and formatting utilities */
/* -------- helper functions ----------------------------------------- */
var blob = use('blob')
var utf8 = use('utf8')

View File

@@ -1,5 +1,3 @@
/* time.js exports: {record, number, text} */
var time = this;
/* -------- host helpers -------------------------------------------------- */
@@ -223,6 +221,4 @@ time[cell.DOC] = {
text : "time.text([val], [fmt], [zone], [dst]) → formatted string (token Z = DST)."
};
/* -------- public exports ------------------------------------------------ */
return { record, number, text };

View File

@@ -2,6 +2,7 @@
// Supports basic TOML features needed for the module system
function parse_toml(text) {
if (typeof text != 'string') return null
var lines = text.split('\n')
var result = {}
var current_section = result

View File

@@ -1,118 +1,6 @@
// cell update [alias] - Check for updates and optionally install them
var io = use('io')
// cell update - Check for updates
var shop = use('shop')
var http = use('http')
var toml = use('toml')
var json = use('json')
var os = use('os')
if (!io.exists('.cell/cell.toml')) {
log.error("No cell.toml found. Run 'cell init' first.")
$_.stop()
return
}
var config = shop.load_config()
if (!config || !config.dependencies) {
log.console("No dependencies to update")
$_.stop()
return
}
// Load lock file
var lock_path = '.cell/lock.toml'
var lock = {}
if (io.exists(lock_path)) {
var lock_content = io.slurp(lock_path)
lock = toml.decode(lock_content)
}
if (!lock.modules) lock.modules = {}
var updates_available = []
// Check specific dependency or all
var deps_to_check = {}
if (args.length > 0) {
var alias = args[0]
if (!config.dependencies[alias]) {
log.error("Dependency '" + alias + "' not found")
$_.stop()
return
}
deps_to_check[alias] = config.dependencies[alias]
} else {
deps_to_check = config.dependencies
}
// Check each dependency for updates
for (var alias in deps_to_check) {
var locator = deps_to_check[alias]
log.console("Checking " + alias + " (" + locator + ")...")
// Get API URL to check commits
var api_url = shop.get_api_url(locator)
if (!api_url) {
log.console(" Cannot check updates (no API support)")
continue
}
try {
log.console(api_url)
var api_response = http.fetch(api_url)
var remote_commit = shop.extract_commit_hash(locator, text(api_response))
if (!remote_commit) {
log.console(" Failed to get remote commit")
continue
}
var local_commit = lock.modules[alias] && lock.modules[alias].commit
if (!local_commit) {
log.console(" No local commit tracked")
updates_available.push({
alias: alias,
locator: locator,
local_commit: null,
remote_commit: remote_commit
})
} else if (local_commit != remote_commit) {
log.console(" Update available!")
log.console(" Local: " + local_commit.substring(0, 8))
log.console(" Remote: " + remote_commit.substring(0, 8))
updates_available.push({
alias: alias,
locator: locator,
local_commit: local_commit,
remote_commit: remote_commit
})
} else {
log.console(" Up to date (" + local_commit.substring(0, 8) + ")")
}
} catch (e) {
log.console(" Failed to check: " + e)
}
}
if (updates_available.length == 0) {
log.console("\nAll dependencies are up to date!")
$_.stop()
return
}
log.console("\n" + updates_available.length + " update(s) available:")
for (var i = 0; i < updates_available.length; i++) {
var update = updates_available[i]
log.console(" - " + update.alias)
}
// If specific dependency was requested, auto-install
if (args.length > 0 && updates_available.length > 0) {
log.console("\nDownloading update...")
os.system("cell mod download")
} else if (updates_available.length > 0) {
log.console("\nRun 'cell mod download' to install updates")
}
log.console("Checking for updates...")
shop.update()
$_.stop()

12
scripts/verify.ce Normal file
View File

@@ -0,0 +1,12 @@
// cell verify - Verify dependencies
var shop = use('shop')
log.console("Verifying dependencies...")
if (shop.verify()) {
log.console("All dependencies are present.")
} else {
log.error("Some dependencies are missing.")
}
$_.stop()

View File

@@ -61,6 +61,9 @@
#define QOP_IMPLEMENTATION
#include "qop.h"
#define JS_BLOB_IMPLEMENTATION
#include "qjs_blob.h"
#define likely(x) __builtin_expect(!!(x), 1)
#define unlikely(x) __builtin_expect(!!(x), 0)
@@ -79,7 +82,6 @@ static SDL_SpinLock main_queue_lock = 0;
static SDL_Mutex *actors_mutex = NULL;
static struct { char *key; cell_rt *value; } *actors = NULL;
static unsigned char *zip_buffer_global = NULL;
static char *prosperon = NULL;
static qop_desc qop_core;
static qop_file *qop_hashmap = NULL;
cell_rt *root_cell = NULL;
@@ -121,6 +123,10 @@ static void exit_handler(void)
free(qop_hashmap);
qop_hashmap = NULL;
}
if (qop_core.data) {
free(qop_core.data);
qop_core.data = NULL;
}
qop_close(&qop_core);
SDL_Quit();
@@ -355,12 +361,6 @@ static const JSMallocFunctions mimalloc_funcs = {
};
#endif
static void free_zip(void)
{
free(zip_buffer_global);
zip_buffer_global = NULL;
}
int get_executable_path(char *buffer, unsigned int buffer_size) {
#if defined(__linux__)
ssize_t len = readlink("/proc/self/exe", buffer, buffer_size - 1);
@@ -389,10 +389,38 @@ int prosperon_mount_core(void)
return 0;
}
// Open the QOP archive appended to this executable
int archive_size = qop_open(exe_path, &qop_core);
// Load the entire executable into memory
FILE *fh = fopen(exe_path, "rb");
if (!fh) {
printf("ERROR: Could not open executable\n");
return 0;
}
fseek(fh, 0, SEEK_END);
long file_size = ftell(fh);
fseek(fh, 0, SEEK_SET);
unsigned char *buf = malloc(file_size);
if (!buf) {
printf("ERROR: Could not allocate memory for executable\n");
fclose(fh);
return 0;
}
if (fread(buf, 1, file_size, fh) != (size_t)file_size) {
printf("ERROR: Could not read executable\n");
free(buf);
fclose(fh);
return 0;
}
fclose(fh);
// Open the QOP archive from the in-memory data
int archive_size = qop_open_data(buf, file_size, &qop_core);
if (archive_size == 0) {
printf("ERROR: Could not open QOP archive\n");
free(buf);
return 0;
}
@@ -401,6 +429,7 @@ int prosperon_mount_core(void)
if (!qop_hashmap) {
printf("ERROR: Could not allocate memory for QOP hashmap\n");
qop_close(&qop_core);
free(buf);
return 0;
}
@@ -410,6 +439,7 @@ int prosperon_mount_core(void)
free(qop_hashmap);
qop_hashmap = NULL;
qop_close(&qop_core);
free(buf);
return 0;
}
@@ -813,7 +843,18 @@ void script_startup(cell_rt *prt)
JS_SetContextOpaque(js, prt);
prt->context = js;
ffi_load(js);
// Add core QOP blob to hidden
JSValue globalThis = JS_GetGlobalObject(js);
JSValue prosp = JS_GetPropertyStr(js, globalThis, "prosperon");
JSValue hidden = JS_GetPropertyStr(js, prosp, "hidden");
size_t archive_size = qop_core.data_size - qop_core.files_offset;
JSValue blob = js_new_blob_stoned_copy(js, qop_core.data + qop_core.files_offset, archive_size);
JS_SetPropertyStr(js, hidden, "core_qop_blob", blob);
JS_FreeValue(js, hidden);
JS_FreeValue(js, prosp);
JS_FreeValue(js, globalThis);
// Find and load engine.cm from QOP archive
qop_file *engine_file = qop_find(&qop_core, ENGINE);
if (!engine_file) {
@@ -852,7 +893,7 @@ int uncaught_exception(JSContext *js, JSValue v)
SDL_UnlockMutex(rt->mutex);
return 1;
}
JSValue exp = JS_GetException(js);
JSValue ret = JS_Call(js, rt->on_exception, JS_NULL, 1, &exp);
JS_FreeValue(js,ret);
@@ -915,7 +956,6 @@ static void add_runners(int n)
static void loop()
{
int msgs = 0;
while (!SDL_GetAtomicInt(&shutting_down)) {
SDL_WaitSemaphore(main_sem);
SDL_LockSpinlock(&main_queue_lock);
@@ -925,9 +965,7 @@ static void loop()
arrdel(main_queue, 0);
}
SDL_UnlockSpinlock(&main_queue_lock);
msgs++;
actor_turn(actor);
continue;
}
}
@@ -953,7 +991,6 @@ int main(int argc, char **argv)
tracy_profiling_enabled = profile_enabled;
#endif
prosperon = argv[0];
PHYSFS_init(argv[0]);
/* Load QOP package attached to executable - this is now mandatory! */

View File

@@ -121,7 +121,14 @@ static JSValue js_blob_constructor(JSContext *ctx, JSValueConst new_target,
return JS_ThrowOutOfMemory(ctx);
}
return blob2js(ctx, bd);
JSValue ret = blob2js(ctx, bd);
// Ensure the returned object's prototype is set correctly for instanceof
JSValue ctor_proto = JS_GetPropertyStr(ctx, new_target, "prototype");
if (!JS_IsException(ctor_proto)) {
JS_SetPrototype(ctx, ret, ctor_proto);
}
JS_FreeValue(ctx, ctor_proto);
return ret;
}
// blob.write_bit(logical)
@@ -504,8 +511,10 @@ JSValue js_blob_use(JSContext *js) {
// Set the prototype on the constructor
JSValue proto = JS_GetClassProto(js, js_blob_id);
JS_SetConstructor(js, ctor, proto);
// Explicitly set the prototype property to ensure instanceof works
JS_SetPropertyStr(js, ctor, "__prototype__", JS_DupValue(js, proto));
JS_FreeValue(js, proto);
return ctor;
}

View File

@@ -12,6 +12,7 @@
#ifdef _WIN32
#include <io.h>
#include <direct.h>
#include <windows.h>
#define mkdir(path, mode) _mkdir(path)
#define rmdir _rmdir
#define getcwd _getcwd
@@ -22,6 +23,7 @@
#else
#include <unistd.h>
#include <dirent.h>
#include <sys/mman.h>
#endif
// Helper to convert JS value to file descriptor
@@ -115,6 +117,57 @@ JSC_CCALL(fd_read,
return ret;
)
JSC_SCALL(fd_slurp,
struct stat st;
if (stat(str, &st) != 0)
return JS_ThrowReferenceError(js, "stat failed: %s", strerror(errno));
if (!S_ISREG(st.st_mode))
return JS_ThrowTypeError(js, "path is not a regular file");
size_t size = st.st_size;
if (size == 0)
return js_new_blob_stoned_copy(js, NULL, 0);
#ifndef _WIN32
int fd = open(str, O_RDONLY);
if (fd < 0)
return JS_ThrowReferenceError(js, "open failed: %s", strerror(errno));
void *data = mmap(NULL, size, PROT_READ, MAP_PRIVATE, fd, 0);
if (data == MAP_FAILED) {
close(fd);
return JS_ThrowReferenceError(js, "mmap failed: %s", strerror(errno));
}
ret = js_new_blob_stoned_copy(js, data, size);
munmap(data, size);
close(fd);
#else
// Windows: use memory mapping for optimal performance
HANDLE hFile = CreateFileA(str, GENERIC_READ, FILE_SHARE_READ, NULL, OPEN_EXISTING, FILE_ATTRIBUTE_NORMAL, NULL);
if (hFile == INVALID_HANDLE_VALUE)
return JS_ThrowReferenceError(js, "CreateFile failed: %lu", GetLastError());
HANDLE hMapping = CreateFileMappingA(hFile, NULL, PAGE_READONLY, 0, 0, NULL);
if (hMapping == NULL) {
CloseHandle(hFile);
return JS_ThrowReferenceError(js, "CreateFileMapping failed: %lu", GetLastError());
}
void *data = MapViewOfFile(hMapping, FILE_MAP_READ, 0, 0, 0);
if (data == NULL) {
CloseHandle(hMapping);
CloseHandle(hFile);
return JS_ThrowReferenceError(js, "MapViewOfFile failed: %lu", GetLastError());
}
ret = js_new_blob_stoned_copy(js, data, size);
UnmapViewOfFile(data);
CloseHandle(hMapping);
CloseHandle(hFile);
#endif
)
JSC_CCALL(fd_lseek,
int fd = js2fd(js, argv[0]);
if (fd < 0) return JS_EXCEPTION;
@@ -217,7 +270,7 @@ JSC_CCALL(fd_fstat,
JSC_SCALL(fd_stat,
struct stat st;
if (stat(str, &st) != 0)
ret = JS_ThrowReferenceError(js, "stat failed: %s", strerror(errno));
return JS_NewObject(js);
JSValue obj = JS_NewObject(js);
JS_SetPropertyStr(js, obj, "size", JS_NewInt64(js, st.st_size));
@@ -255,6 +308,7 @@ static const JSCFunctionListEntry js_fd_funcs[] = {
MIST_FUNC_DEF(fd, open, 2),
MIST_FUNC_DEF(fd, write, 2),
MIST_FUNC_DEF(fd, read, 2),
MIST_FUNC_DEF(fd, slurp, 1),
MIST_FUNC_DEF(fd, lseek, 3),
MIST_FUNC_DEF(fd, getcwd, 0),
MIST_FUNC_DEF(fd, rmdir, 1),

View File

@@ -38,18 +38,25 @@ static int js_qop_ensure_index(JSContext *js, qop_desc *qop) {
}
JSC_SCALL(qop_open,
qop_desc *qop = js_malloc(js, sizeof(qop_desc));
if (!qop)
ret = JS_ThrowOutOfMemory(js);
int size = qop_open(str, qop);
if (size == 0) {
js_free(js, qop);
ret = JS_ThrowReferenceError(js, "Failed to open QOP archive: %s", str);
} else {
JSValue obj = JS_NewObjectClass(js, js_qop_archive_class_id);
JS_SetOpaque(obj, qop);
ret = obj;
size_t len;
void *data = js_get_blob_data(js, &len, argv[0]);
if (!data)
ret = JS_ThrowReferenceError(js, "Could not get blob data.\n");
else {
qop_desc *qop = js_malloc(js, sizeof(qop_desc));
if (!qop)
ret = JS_ThrowOutOfMemory(js);
else {
int size = qop_open_data((const unsigned char *)data, len, qop);
if (size == 0) {
js_free(js, qop);
ret = JS_ThrowReferenceError(js, "Failed to open QOP archive from blob");
} else {
JSValue obj = JS_NewObjectClass(js, js_qop_archive_class_id);
JS_SetOpaque(obj, qop);
ret = obj;
}
}
}
)

View File

@@ -58,6 +58,7 @@ extern "C" {
#include <stdio.h>
#include <string.h>
#include <stddef.h>
#define QOP_FLAG_NONE 0
#define QOP_FLAG_COMPRESSED_ZSTD (1 << 0)
@@ -74,6 +75,9 @@ typedef struct {
typedef struct {
FILE *fh;
const unsigned char *data;
size_t data_size;
size_t data_pos;
qop_file *hashmap;
unsigned int files_offset;
unsigned int index_offset;
@@ -87,6 +91,11 @@ typedef struct {
// failure.
int qop_open(const char *path, qop_desc *qop);
// Open an archive from memory data. The supplied qop_desc will be filled with the
// information from the data header. Returns the size of the archive or 0 on
// failure.
int qop_open_data(const unsigned char *data, size_t data_size, qop_desc *qop);
// Read the index from an opened archive. The supplied buffer will be filled
// with the index data and must be at least qop->hashmap_size bytes long.
// No ownership is taken of the buffer; if you allocated it with malloc() you
@@ -175,6 +184,72 @@ static qop_uint64_t qop_read_64(FILE *fh) {
((qop_uint64_t)b[1] << 8) | ((qop_uint64_t)b[0]);
}
static void qop_seek(qop_desc *qop, long offset, int whence) {
if (qop->fh) {
fseek(qop->fh, offset, whence);
} else {
if (whence == SEEK_SET) {
qop->data_pos = offset;
} else if (whence == SEEK_END) {
qop->data_pos = qop->data_size + offset;
} else if (whence == SEEK_CUR) {
qop->data_pos += offset;
}
}
}
static unsigned short qop_read_16_desc(qop_desc *qop) {
if (qop->fh) {
return qop_read_16(qop->fh);
} else {
if (qop->data_pos + sizeof(unsigned short) > qop->data_size) return 0;
unsigned char b[sizeof(unsigned short)];
memcpy(b, qop->data + qop->data_pos, sizeof(unsigned short));
qop->data_pos += sizeof(unsigned short);
return (b[1] << 8) | b[0];
}
}
static unsigned int qop_read_32_desc(qop_desc *qop) {
if (qop->fh) {
return qop_read_32(qop->fh);
} else {
if (qop->data_pos + sizeof(unsigned int) > qop->data_size) return 0;
unsigned char b[sizeof(unsigned int)];
memcpy(b, qop->data + qop->data_pos, sizeof(unsigned int));
qop->data_pos += sizeof(unsigned int);
return (b[3] << 24) | (b[2] << 16) | (b[1] << 8) | b[0];
}
}
static qop_uint64_t qop_read_64_desc(qop_desc *qop) {
if (qop->fh) {
return qop_read_64(qop->fh);
} else {
if (qop->data_pos + sizeof(qop_uint64_t) > qop->data_size) return 0;
unsigned char b[sizeof(qop_uint64_t)];
memcpy(b, qop->data + qop->data_pos, sizeof(qop_uint64_t));
qop->data_pos += sizeof(qop_uint64_t);
return
((qop_uint64_t)b[7] << 56) | ((qop_uint64_t)b[6] << 48) |
((qop_uint64_t)b[5] << 40) | ((qop_uint64_t)b[4] << 32) |
((qop_uint64_t)b[3] << 24) | ((qop_uint64_t)b[2] << 16) |
((qop_uint64_t)b[1] << 8) | ((qop_uint64_t)b[0]);
}
}
static size_t qop_fread(qop_desc *qop, void *buf, size_t size, size_t nmemb) {
if (qop->fh) {
return fread(buf, size, nmemb, qop->fh);
} else {
size_t total = size * nmemb;
if (qop->data_pos + total > qop->data_size) return 0;
memcpy(buf, qop->data + qop->data_pos, total);
qop->data_pos += total;
return nmemb;
}
}
int qop_open(const char *path, qop_desc *qop) {
FILE *fh = fopen(path, "rb");
if (!fh) {
@@ -189,6 +264,9 @@ int qop_open(const char *path, qop_desc *qop) {
}
qop->fh = fh;
qop->data = NULL;
qop->data_size = 0;
qop->data_pos = 0;
qop->hashmap = NULL;
unsigned int index_len = qop_read_32(fh);
unsigned int archive_size = qop_read_32(fh);
@@ -218,31 +296,72 @@ int qop_open(const char *path, qop_desc *qop) {
return size;
}
int qop_open_data(const unsigned char *data, size_t data_size, qop_desc *qop) {
if (!data || data_size <= QOP_HEADER_SIZE) {
return 0;
}
qop->fh = NULL;
qop->data = data;
qop->data_size = data_size;
qop->data_pos = 0;
qop->hashmap = NULL;
qop_seek(qop, data_size - QOP_HEADER_SIZE, SEEK_SET);
unsigned int index_len = qop_read_32_desc(qop);
unsigned int archive_size = qop_read_32_desc(qop);
unsigned int magic = qop_read_32_desc(qop);
// Check magic, make sure index_len is possible with the data size
if (
magic != QOP_MAGIC ||
index_len * QOP_INDEX_SIZE > (unsigned int)(data_size - QOP_HEADER_SIZE)
) {
return 0;
}
// Find a good size for the hashmap: power of 2, at least 1.5x num entries
unsigned int hashmap_len = 1;
unsigned int min_hashmap_len = index_len * 1.5;
while (hashmap_len < min_hashmap_len) {
hashmap_len <<= 1;
}
qop->files_offset = data_size - archive_size;
qop->index_len = index_len;
qop->index_offset = data_size - qop->index_len * QOP_INDEX_SIZE - QOP_HEADER_SIZE;
qop->hashmap_len = hashmap_len;
qop->hashmap_size = qop->hashmap_len * sizeof(qop_file);
return data_size;
}
int qop_read_index(qop_desc *qop, void *buffer) {
qop->hashmap = buffer;
int mask = qop->hashmap_len - 1;
memset(qop->hashmap, 0, qop->hashmap_size);
fseek(qop->fh, qop->index_offset, SEEK_SET);
qop_seek(qop, qop->index_offset, SEEK_SET);
for (unsigned int i = 0; i < qop->index_len; i++) {
qop_uint64_t hash = qop_read_64(qop->fh);
qop_uint64_t hash = qop_read_64_desc(qop);
int idx = hash & mask;
while (qop->hashmap[idx].size > 0) {
idx = (idx + 1) & mask;
}
qop->hashmap[idx].hash = hash;
qop->hashmap[idx].offset = qop_read_32(qop->fh);
qop->hashmap[idx].size = qop_read_32(qop->fh);
qop->hashmap[idx].path_len = qop_read_16(qop->fh);
qop->hashmap[idx].flags = qop_read_16(qop->fh);
qop->hashmap[idx].offset = qop_read_32_desc(qop);
qop->hashmap[idx].size = qop_read_32_desc(qop);
qop->hashmap[idx].path_len = qop_read_16_desc(qop);
qop->hashmap[idx].flags = qop_read_16_desc(qop);
}
return qop->index_len;
}
void qop_close(qop_desc *qop) {
fclose(qop->fh);
if (qop->fh) {
fclose(qop->fh);
}
}
qop_file *qop_find(qop_desc *qop, const char *path) {
@@ -264,18 +383,18 @@ qop_file *qop_find(qop_desc *qop, const char *path) {
}
int qop_read_path(qop_desc *qop, qop_file *file, char *dest) {
fseek(qop->fh, qop->files_offset + file->offset, SEEK_SET);
return fread(dest, 1, file->path_len, qop->fh);
qop_seek(qop, qop->files_offset + file->offset, SEEK_SET);
return qop_fread(qop, dest, 1, file->path_len);
}
int qop_read(qop_desc *qop, qop_file *file, unsigned char *dest) {
fseek(qop->fh, qop->files_offset + file->offset + file->path_len, SEEK_SET);
return fread(dest, 1, file->size, qop->fh);
qop_seek(qop, qop->files_offset + file->offset + file->path_len, SEEK_SET);
return qop_fread(qop, dest, 1, file->size);
}
int qop_read_ex(qop_desc *qop, qop_file *file, unsigned char *dest, unsigned int start, unsigned int len) {
fseek(qop->fh, qop->files_offset + file->offset + file->path_len + start, SEEK_SET);
return fread(dest, 1, len, qop->fh);
qop_seek(qop, qop->files_offset + file->offset + file->path_len + start, SEEK_SET);
return qop_fread(qop, dest, 1, len);
}

View File

@@ -1,17 +0,0 @@
// Main entry point for jj_mod
var utils = use("./utils")
log.console("jj_mod loaded! Version 0.6.3")
return {
utils: utils,
version: "0.6.3",
create_thing: function(name) {
return {
name: name,
id: utils.random_range(1000, 9999)
}
}
}

View File

@@ -1,15 +0,0 @@
// Example module file for jj_mod
function format_number(n) {
return n.toFixed(2)
}
function random_range(min, max) {
return Math.random() * (max - min) + min
}
return {
format_number: format_number,
random_range: random_range,
PI: 3.14159
}

View File

@@ -1,21 +0,0 @@
module = "test-shop"
engine = "mist/prosperon@v0.9.3"
entrypoint = "main.js"
[dependencies]
jj_mod = "git.world/jj/mod@v0.6.3"
prosperon_extras = "git.world/mist/prosperon-extras@v1.0.0"
[aliases]
mod = "jj_mod"
extras = "prosperon_extras"
[replace]
# For local development
# "git.world/jj/mod@v0.6.3" = "../local-jj-mod"
[patches]
# jj_mod = "patches/jj_mod-fix.patch"
[mods]
enabled = []

View File

@@ -1,15 +0,0 @@
// Helper module for testing relative imports
function greet(name) {
log.console("Hello, " + name + "!")
}
function calculate(a, b) {
return a + b
}
return {
greet: greet,
calculate: calculate,
version: "1.0.0"
}

View File

@@ -1,41 +0,0 @@
// Example main.js that uses the module system
log.console("=== Module System Test ===")
// Test bare imports
try {
var sprite = use("sprite")
log.console("✓ Loaded sprite from bare import")
} catch (e) {
log.console("✗ Failed to load sprite: " + e)
}
// Test relative imports
try {
var helper = use("./helper")
log.console("✓ Loaded helper from relative import")
helper.greet("Module System")
} catch (e) {
log.console("✗ Failed to load helper: " + e)
}
// Test scheme-qualified imports
try {
var core_time = use("core://time")
log.console("✓ Loaded time from core:// scheme")
} catch (e) {
log.console("✗ Failed to load core://time: " + e)
}
// Test aliased module (if configured in shop.toml)
try {
var mod = use("mod/utils")
log.console("✓ Loaded mod/utils from aliased module")
} catch (e) {
log.console("✗ Failed to load mod/utils: " + e)
}
log.console("")
log.console("Test complete!")
$_.stop()

View File

@@ -18,16 +18,17 @@ var test_content = "Hello, World! This is a test file for performance comparison
log.console("Creating test file...")
io.writepath('.')
// Make cellfs mirror all of io's search paths
io.mount('.')
var io_paths = io.searchpath()
log.console(io_paths)
log.console(io_paths.length)
log.console(typeof io_paths)
for (var i = 0; i < io_paths.length; i++) {
var path = io_paths[i]
try {
// Ensure path starts with /
if (!path.startsWith('/')) {
path = '/' + path
}
cellfs.mount(path, path)
cellfs.mount(path)
} catch (e) {
// Some paths might not be mountable, skip them
}