add man files; add mod hash checking; add text decoding for blob

This commit is contained in:
2025-06-02 11:10:18 -05:00
parent d9b316270d
commit f70f65d1c3
20 changed files with 1077 additions and 495 deletions

View File

@@ -1,3 +1,3 @@
[dependencies] [dependencies]
extramath = "https://gitea.pockle.world/john/extramath@head" extramath = "https://gitea.pockle.world/john/extramath@master"

5
.cell/lock.toml Normal file
View File

@@ -0,0 +1,5 @@
[modules]
[modules.extramath]
hash = "4244JXYZT7IMYQFYXOSPRK7VFCH4FBYQCQ5FCKYXMGA4QMN6RMPA===="
url = "https://gitea.pockle.world/john/extramath@master"
downloaded = "Monday June 2 10:41:16.23 AM -5 2025 AD"

View File

@@ -276,6 +276,14 @@ function deepFreeze(object) {
globalThis.stone = deepFreeze globalThis.stone = deepFreeze
stone.p = function(object) stone.p = function(object)
{ {
if (object instanceof blob) {
try {
object.read_logical(0)
return true
} catch(e) {
return false
}
}
return Object.isFrozen(object) return Object.isFrozen(object)
} }

View File

@@ -2,6 +2,8 @@
var io = use('io') var io = use('io')
var shop = use('shop') var shop = use('shop')
var miniz = use('miniz')
var http = use('http')
if (args.length < 1) { if (args.length < 1) {
log.console("Usage: cell get <locator> [alias]") log.console("Usage: cell get <locator> [alias]")

64
scripts/man/build.man Normal file
View File

@@ -0,0 +1,64 @@
CELL-BUILD(1) Cell Manual CELL-BUILD(1)
NAME
cell build - Compile all modules to bytecode
SYNOPSIS
cell build
DESCRIPTION
The build command compiles all JavaScript modules in the modules/
directory to bytecode format. Compiled modules are stored in
.cell/build/ with a .jso extension and load faster than source files.
Cell's module system automatically uses compiled versions when
available, falling back to source files if needed.
OPERATION
1. Scans .cell/modules/ for all dependencies
2. Compiles each .js file to bytecode using QuickJS
3. Saves compiled bytecode with .jso extension in .cell/build/
4. Preserves module structure and naming
5. Updates module resolution to prefer compiled versions
COMPILATION PROCESS
For each module:
- Source: .cell/modules/chess@v1.0.0/main.js
- Output: .cell/build/chess@v1.0.0/main.jso
The compilation uses QuickJS bytecode format which:
- Loads faster than parsing source
- Provides some obfuscation
- Maintains full compatibility
EXAMPLES
Build all modules:
cell build
Typical workflow:
cell get git.world/jj/chess@v1.0.0
cell build
# Your code now uses compiled chess module
NOTES
- Compilation is optional - source modules work without building
- Compiled modules are preferred over source when available
- The .jso extension indicates QuickJS bytecode
- Bytecode is platform-independent
- TODO: Actual compilation implementation pending
FILES
.cell/build/
Contains all compiled modules
.cell/modules/
Source modules read from here
PERFORMANCE
Compiled modules provide:
- Faster load times (no parsing needed)
- Reduced memory usage during loading
- Same runtime performance as source
SEE ALSO
cell(1), cell-get(1), cell-vendor(1)

124
scripts/man/cell.man Normal file
View File

@@ -0,0 +1,124 @@
CELL(1) Cell Manual CELL(1)
NAME
cell - The Cell module system for Prosperon game engine
SYNOPSIS
cell <command> [arguments]
DESCRIPTION
Cell is a module and dependency management system for Prosperon,
inspired by Go modules. It provides tools for managing dependencies,
building modules, and maintaining reproducible builds.
Cell uses a manifest file (cell.toml) to track dependencies and
project configuration. All Cell data is stored in the .cell/
directory within your project.
COMMANDS
init
Initialize a new Cell project with .cell/ directory structure
get <module> [alias]
Fetch a module and add it as a dependency
update <alias> [version]
Update a dependency to a new version
list
List installed modules and their status
vendor
Copy all dependencies into modules/ for hermetic builds
build
Compile all modules to bytecode in build/
patch <alias>
Create a patch file for local modifications to a dependency
help [command]
Display help information for Cell or a specific command
DIRECTORY STRUCTURE
.cell/
├── cell.toml Project manifest
├── lock.toml Dependency lock file with checksums
├── modules/ Vendored source modules
├── build/ Compiled bytecode modules
└── patches/ Local patches for dependencies
CONFIGURATION
The cell.toml file contains:
module = "my-game"
engine = "mist/prosperon@v0.9.3"
entrypoint = "main.js"
[dependencies]
alias = "git.world/user/module@version"
[aliases]
short = "alias"
[replace]
"git.world/user/module@v1.0.0" = "./local/path"
[patches]
module = "./patches/module.patch"
[mods]
enabled = ["mod1", "mod2"]
MODULE LOCATORS
Modules are identified by locators in the format:
host/owner/name@version
Examples:
- git.world/jj/mod@v0.6.3
- git.world/jj/mod@head
- git.world/jj/mod (defaults to @head)
IMPORT RESOLUTION
Cell supports multiple import styles:
1. Scheme-qualified: core://time, std://json
2. Relative paths: ./helper, ../utils
3. Bare imports: resolved via dependencies and aliases
MODULE PRECEDENCE
Modules are mounted in the following order (highest to lowest):
1. Mods (user modifications)
2. Self (project root)
3. Aliases (dependencies)
4. Compiled modules
5. Source modules
6. Core modules
EXAMPLES
Initialize a new project:
cell init
Add a dependency:
cell get git.world/jj/chess@v1.0.0
cell get git.world/jj/chess # uses latest
Update a dependency:
cell update chess v1.1.0
List installed modules:
cell list
Vendor dependencies:
cell vendor
Build all modules:
cell build
SEE ALSO
Run 'cell help <command>' for detailed information on each command.
AUTHORS
Cell is part of the Prosperon game engine project.

70
scripts/man/get.man Normal file
View File

@@ -0,0 +1,70 @@
CELL-GET(1) Cell Manual CELL-GET(1)
NAME
cell get - Fetch a module and add it as a dependency
SYNOPSIS
cell get <locator> [alias]
DESCRIPTION
The get command fetches a module from a repository and adds it to
your project's dependencies in cell.toml. The module is downloaded
to .cell/modules/ and can be imported using the specified alias.
If no version is specified in the locator, the command defaults to
using the head/master branch of the repository.
ARGUMENTS
locator
The module identifier in the format:
host/owner/name[@version]
Examples:
- git.world/jj/chess@v1.0.0
- git.world/jj/chess@head
- git.world/jj/chess (defaults to @head)
alias
Optional custom name for importing the module.
If not specified, uses the module name from the locator.
OPERATION
1. Parses the module locator
2. Initializes .cell/ if it doesn't exist
3. Adds the dependency to cell.toml
4. Creates the module directory in .cell/modules/
5. Downloads the module files (TODO: implementation pending)
6. Updates lock.toml with checksums (TODO: implementation pending)
EXAMPLES
Add a specific version:
cell get git.world/jj/chess@v1.0.0
Add latest version:
cell get git.world/jj/chess
Add with custom alias:
cell get git.world/jj/chess-engine@v2.0.0 chess
After adding, import in your code:
var chess = use('chess')
NOTES
- If a dependency with the same alias already exists, use the
update command instead
- The actual fetching from remote repositories is not yet
implemented; currently creates the directory structure only
- Module directories are named as: alias@version
FILES
.cell/cell.toml
Updated with the new dependency
.cell/modules/
Module files are stored here
.cell/lock.toml
Will contain checksums once implemented
SEE ALSO
cell(1), cell-update(1), cell-vendor(1)

54
scripts/man/init.man Normal file
View File

@@ -0,0 +1,54 @@
CELL-INIT(1) Cell Manual CELL-INIT(1)
NAME
cell init - Initialize a new Cell project
SYNOPSIS
cell init
DESCRIPTION
The init command creates the .cell/ directory structure for a new
Cell project. This includes all necessary subdirectories and a
default cell.toml configuration file.
If a .cell/ directory already exists, init will create any missing
subdirectories but will not overwrite existing files.
DIRECTORY STRUCTURE
Creates the following structure:
.cell/
├── cell.toml Project manifest with default configuration
├── lock.toml Empty lock file for dependency checksums
├── modules/ Directory for vendored source modules
├── build/ Directory for compiled bytecode modules
└── patches/ Directory for local dependency patches
DEFAULT CONFIGURATION
The generated cell.toml contains:
module = "my-game"
engine = "mist/prosperon@v0.9.3"
entrypoint = "main.js"
dependencies = {}
aliases = {}
replace = {}
patches = {}
[mods]
enabled = []
EXAMPLES
Initialize a new Cell project:
cell init
This will create the .cell/ directory if it doesn't exist and
populate it with the default structure.
NOTES
- The init command is idempotent - running it multiple times is safe
- Existing files are never overwritten
- Edit .cell/cell.toml after initialization to configure your project
SEE ALSO
cell(1), cell-get(1)

77
scripts/man/list.man Normal file
View File

@@ -0,0 +1,77 @@
CELL-LIST(1) Cell Manual CELL-LIST(1)
NAME
cell list - List installed modules and their status
SYNOPSIS
cell list
DESCRIPTION
The list command displays all modules currently installed in your
project, showing their aliases, locators, and status. It provides
an overview of which modules are downloaded, vendored, compiled,
or patched.
The command reads the cell.toml file and checks the filesystem to
determine the current state of each module.
OUTPUT FORMAT
For each module, the command displays:
- The alias used for importing
- The full module locator with version
- Download status in .cell/modules/
- Vendoring status in modules/
- Compilation status in .cell/build/
- Applied patches
STATUS INDICATORS
✓ Downloaded Module exists in .cell/modules/
✗ Not downloaded Module needs to be fetched with 'cell get'
✓ Vendored Module copied to modules/ directory
✓ Compiled Module bytecode exists in .cell/build/
✓ Patch exists Patch file found for this module
EXAMPLES
List all modules:
cell list
Example output:
Installed modules:
chess -> git.world/jj/chess@v1.0.0
✓ Downloaded to .cell/modules/chess@v1.0.0
✓ Vendored to modules/chess@v1.0.0
✓ Compiled to .cell/build/chess@v1.0.0
engine -> git.world/jj/engine@v2.1.0
✓ Downloaded to .cell/modules/engine@v2.1.0
✗ Not vendored
✗ Not compiled
Patches:
chess -> ./patches/chess.patch
✓ Patch file exists
NOTES
- The command only reads configuration, it doesn't modify anything
- Invalid locators are marked but don't stop the listing
- An empty dependencies section shows "No modules installed"
FILES
.cell/cell.toml
Read to get the list of dependencies
.cell/modules/
Checked for downloaded modules
modules/
Checked for vendored modules
.cell/build/
Checked for compiled modules
.cell/patches/
Checked for patch files
SEE ALSO
cell(1), cell-get(1), cell-update(1), cell-vendor(1)

79
scripts/man/patch.man Normal file
View File

@@ -0,0 +1,79 @@
CELL-PATCH(1) Cell Manual CELL-PATCH(1)
NAME
cell patch - Create a patch file for local modifications
SYNOPSIS
cell patch <alias>
DESCRIPTION
The patch command creates a patch file capturing local modifications
made to a dependency. This allows you to maintain custom changes to
upstream modules that persist across updates.
Patches are stored in .cell/patches/ and automatically applied when
modules are fetched or updated.
ARGUMENTS
alias
The dependency alias to create a patch for
OPERATION
1. Compares the current module state with the original
2. Generates a unified diff of all changes
3. Saves the patch to .cell/patches/alias.patch
4. Updates cell.toml to reference the patch
PATCH APPLICATION
Patches are automatically applied:
- After 'cell get' fetches a module
- After 'cell update' downloads a new version
- During 'cell vendor' operations
Application order matches the order in cell.toml [patches] section.
EXAMPLES
Create a patch after modifying a dependency:
# Edit files in .cell/modules/chess@v1.0.0/
cell patch chess
This creates:
.cell/patches/chess.patch
And updates cell.toml:
[patches]
chess = "./patches/chess.patch"
PATCH FORMAT
Patches use unified diff format:
--- a/main.js
+++ b/main.js
@@ -10,3 +10,4 @@
function init() {
console.log("Starting chess engine");
+ console.log("With custom modifications");
}
NOTES
- Keep patches small and focused
- Document why each patch is needed
- Test patches with new versions during updates
- Patches may fail to apply if upstream changes conflict
- TODO: Actual implementation pending
FILES
.cell/patches/
Directory containing all patch files
.cell/cell.toml
Updated with patch references
WORKFLOW
1. cell get git.world/jj/chess@v1.0.0
2. Modify files in .cell/modules/chess@v1.0.0/
3. cell patch chess
4. Commit .cell/patches/chess.patch to version control
5. Future 'cell get' commands will apply the patch
SEE ALSO
cell(1), cell-get(1), cell-update(1)

54
scripts/man/update.man Normal file
View File

@@ -0,0 +1,54 @@
CELL-UPDATE(1) Cell Manual CELL-UPDATE(1)
NAME
cell update - Update a dependency to a new version
SYNOPSIS
cell update <alias> <version>
DESCRIPTION
The update command changes the version of an existing dependency
in your project. It updates the cell.toml file and prepares the
new version for download.
ARGUMENTS
alias
The dependency alias as defined in cell.toml
version
The new version to update to (e.g., v1.2.0, head)
OPERATION
1. Loads the current cell.toml configuration
2. Verifies the dependency exists
3. Updates the version in the dependencies section
4. Saves the updated configuration
5. Creates the new module directory
6. Downloads the new version (TODO: implementation pending)
EXAMPLES
Update to a specific version:
cell update chess v1.2.0
Update to latest:
cell update chess head
The dependency entry will be updated from:
chess = "git.world/jj/chess@v1.0.0"
To:
chess = "git.world/jj/chess@v1.2.0"
NOTES
- The old version remains in .cell/modules/ until manually removed
- Run 'cell build' after updating to recompile modules
- The update preserves the original module path, only changing version
FILES
.cell/cell.toml
Updated with the new version
.cell/modules/
New version directory created
SEE ALSO
cell(1), cell-get(1), cell-build(1)

61
scripts/man/vendor.man Normal file
View File

@@ -0,0 +1,61 @@
CELL-VENDOR(1) Cell Manual CELL-VENDOR(1)
NAME
cell vendor - Copy all dependencies locally for hermetic builds
SYNOPSIS
cell vendor
DESCRIPTION
The vendor command copies all dependencies from .cell/modules/ into
a top-level modules/ directory in your project. This creates a
hermetic build environment where all dependencies are committed
alongside your code.
Vendoring is useful for:
- Ensuring builds work without network access
- Committing exact dependency versions to version control
- Protecting against upstream repositories disappearing
- Creating fully reproducible builds
OPERATION
1. Reads all dependencies from cell.toml
2. Creates modules/ directory if it doesn't exist
3. Copies each dependency from .cell/modules/ to modules/
4. Preserves directory structure and all files
5. Optionally updates import paths (TODO: implementation pending)
DIRECTORY STRUCTURE
Before vendoring:
.cell/modules/
├── chess@v1.0.0/
└── engine@v2.1.0/
After vendoring:
modules/
├── chess@v1.0.0/
└── engine@v2.1.0/
EXAMPLES
Vendor all dependencies:
cell vendor
After vendoring, commit the modules directory:
git add modules/
git commit -m "Vendor dependencies"
NOTES
- Vendored modules take precedence over .cell/modules/
- The modules/ directory can be committed to version control
- Run vendor after adding or updating dependencies
- Removes the need for 'cell get' on fresh checkouts
FILES
modules/
Created and populated with all dependencies
.cell/cell.toml
Read to determine which modules to vendor
SEE ALSO
cell(1), cell-get(1), cell-build(1)

139
scripts/mod.ce Normal file
View File

@@ -0,0 +1,139 @@
var shop = use('shop')
var http = use('http')
var miniz = use('miniz')
var io = use('io')
var crypto = use('crypto')
var text = use('text')
var toml = use('toml')
var time = use('time')
var uses = {}
uses.download = function()
{
var mods = shop.load_config().dependencies
var cache_dir = '.cell/cache'
var modules_dir = '.cell/modules'
var lock_path = '.cell/lock.toml'
// Ensure directories exist
if (!io.exists(cache_dir))
io.mkdir(cache_dir)
if (!io.exists(modules_dir))
io.mkdir(modules_dir)
// Load or create lock file
var lock = {}
if (io.exists(lock_path)) {
var lock_content = io.slurp(lock_path)
lock = toml.decode(lock_content)
}
if (!lock.modules) lock.modules = {}
for (var mod in mods) {
var cache_path = cache_dir + '/' + mod + '.zip'
var module_path = modules_dir + '/' + mod
var zip
var need_download = false
// Check if module exists in lock file
if (!lock.modules[mod] || !lock.modules[mod].hash) {
log.console(`${mod}: not in lock file, will download`)
need_download = true
} else if (!io.exists(cache_path)) {
log.console(`${mod}: cache missing, will download`)
need_download = true
}
if (!need_download) {
// Verify cached file hash
log.console(`${mod}: verifying cached version`)
zip = io.slurpbytes(cache_path)
var hash = crypto.hash(zip)
var hash_b32 = text(hash, "t")
if (hash_b32 !== lock.modules[mod].hash) {
log.console(`${mod}: hash mismatch, will redownload`)
log.console(` expected: ${lock.modules[mod].hash}`)
log.console(` actual: ${hash_b32}`)
need_download = true
} else {
log.console(`${mod}: hash verified`)
}
}
if (need_download) {
// Download the module
log.console(`downloading ${mod} at ${mods[mod]}`)
log.console(shop.get_download_url(mods[mod]))
zip = http.fetch(shop.get_download_url(mods[mod]))
io.slurpwrite(cache_path, zip)
log.console(`${mod}: downloaded ${zip.length} bytes`)
// Calculate and store hash
var hash = crypto.hash(zip)
var hash_b32 = text(hash, "t")
lock.modules[mod] = {
hash: hash_b32,
url: mods[mod],
downloaded: time.text()
}
log.console(`${mod}: hash = ${hash_b32}`)
// Save updated lock file
io.slurpwrite(lock_path, toml.encode(lock))
}
// Extract the module
var reader = miniz.read(zip)
var count = reader.count()
log.console(`extracting ${mod} (${count} files)...`)
// Create module directory
if (!io.exists(module_path))
io.mkdir(module_path)
// Extract each file
for (var i = 0; i < count; i++) {
if (reader.is_directory(i))
continue
var filename = reader.get_filename(i)
// Strip the module name prefix if present
var prefix = mod + '/'
if (filename.indexOf(prefix) === 0)
filename = filename.substring(prefix.length)
// Skip if filename is empty after stripping
if (!filename)
continue
var filepath = module_path + '/' + filename
// Create subdirectories if needed
var parts = filename.split('/')
if (parts.length > 1) {
var dir = module_path
for (var j = 0; j < parts.length - 1; j++) {
dir = dir + '/' + parts[j]
if (!io.exists(dir))
io.mkdir(dir)
}
}
// Extract and write file
var data = reader.slurp(reader.get_filename(i))
io.slurpwrite(filepath, data)
}
log.console(`${mod}: extracted to ${module_path}`)
}
}
if (uses[arg[0]])
uses[arg[0]]()
else
console.log(`Command ${arg[0]} not understood.`)

View File

@@ -6,12 +6,12 @@ var json = use('json')
var Shop = {} var Shop = {}
var shop_path = '.cell/cell.toml'
// Load cell.toml configuration // Load cell.toml configuration
Shop.load_config = function() { Shop.load_config = function() {
var shop_path = '.cell/cell.toml' if (!io.exists(shop_path))
if (!io.exists(shop_path)) {
return null return null
}
var content = io.slurp(shop_path) var content = io.slurp(shop_path)
return toml.decode(content) return toml.decode(content)
@@ -19,62 +19,7 @@ Shop.load_config = function() {
// Save cell.toml configuration // Save cell.toml configuration
Shop.save_config = function(config) { Shop.save_config = function(config) {
// Simple TOML writer for our needs io.slurpwrite(shop_path, toml.encode(config))
var lines = []
// Top-level strings
if (config.module) lines.push('module = "' + config.module + '"')
if (config.engine) lines.push('engine = "' + config.engine + '"')
if (config.entrypoint) lines.push('entrypoint = "' + config.entrypoint + '"')
// Dependencies section
if (config.dependencies && Object.keys(config.dependencies).length > 0) {
lines.push('')
lines.push('[dependencies]')
for (var key in config.dependencies) {
lines.push(key + ' = "' + config.dependencies[key] + '"')
}
}
// Aliases section
if (config.aliases && Object.keys(config.aliases).length > 0) {
lines.push('')
lines.push('[aliases]')
for (var key in config.aliases) {
lines.push(key + ' = "' + config.aliases[key] + '"')
}
}
// Replace section
if (config.replace && Object.keys(config.replace).length > 0) {
lines.push('')
lines.push('[replace]')
for (var key in config.replace) {
lines.push('"' + key + '" = "' + config.replace[key] + '"')
}
}
// Patches section
if (config.patches && Object.keys(config.patches).length > 0) {
lines.push('')
lines.push('[patches]')
for (var key in config.patches) {
lines.push(key + ' = "' + config.patches[key] + '"')
}
}
// Mods section
if (config.mods && config.mods.enabled && config.mods.enabled.length > 0) {
lines.push('')
lines.push('[mods]')
lines.push('enabled = [')
for (var i = 0; i < config.mods.enabled.length; i++) {
lines.push(' "' + config.mods.enabled[i] + '",')
}
lines.push(']')
}
io.slurpwrite('.cell/cell.toml', lines.join('\n'))
} }
// Initialize .cell directory structure // Initialize .cell directory structure
@@ -95,22 +40,6 @@ Shop.init = function() {
io.mkdir('.cell/patches') io.mkdir('.cell/patches')
} }
if (!io.exists('.cell/cell.toml')) {
var default_config = {
module: "my-game",
engine: "mist/prosperon@v0.9.3",
entrypoint: "main.js",
dependencies: {},
aliases: {},
replace: {},
patches: {},
mods: {
enabled: []
}
}
Shop.save_config(default_config)
}
if (!io.exists('.cell/lock.toml')) { if (!io.exists('.cell/lock.toml')) {
io.slurpwrite('.cell/lock.toml', '# Lock file for module integrity\n') io.slurpwrite('.cell/lock.toml', '# Lock file for module integrity\n')
} }
@@ -118,94 +47,6 @@ Shop.init = function() {
return true return true
} }
// Mount modules according to the specification
Shop.mount = function() {
var config = Shop.load_config()
if (!config) {
log.error("No cell.toml found")
return false
}
// 1. Mount mods first (highest priority, prepend=1)
if (config.mods && config.mods.enabled) {
for (var i = 0; i < config.mods.enabled.length; i++) {
var mod_path = config.mods.enabled[i]
if (io.exists(mod_path)) {
io.mount(mod_path, "/", true) // prepend=true
log.console("Mounted mod: " + mod_path)
}
}
}
// 2. Self is already mounted (project root)
// This happens in prosperon.c
// 3. Mount aliases (dependencies)
if (config.dependencies) {
for (var alias in config.dependencies) {
var version = config.dependencies[alias]
var parsed = Shop.parse_locator(version)
var module_name = alias
if (parsed && parsed.version) {
module_name = alias + '@' + parsed.version
}
// Check if replaced with local path
var mount_path = '.cell/modules/' + module_name
if (config.replace && config.replace[version]) {
mount_path = config.replace[version]
}
// Try compiled version first
var compiled_path = '.cell/build/' + module_name
if (io.exists(compiled_path)) {
io.mount(compiled_path, alias, false) // Mount at alias name
log.console("Mounted compiled: " + alias + " at /" + alias + " from " + compiled_path)
} else if (io.exists(mount_path)) {
io.mount(mount_path, alias, false) // Mount at alias name
log.console("Mounted source: " + alias + " at /" + alias + " from " + mount_path)
}
// Also handle short aliases
if (config.aliases) {
for (var short_alias in config.aliases) {
if (config.aliases[short_alias] === alias) {
if (io.exists(compiled_path)) {
io.mount(compiled_path, short_alias, false)
log.console("Mounted alias: " + short_alias + " -> " + alias)
} else if (io.exists(mount_path)) {
io.mount(mount_path, short_alias, false)
log.console("Mounted alias: " + short_alias + " -> " + alias)
}
}
}
}
}
}
// 4. Mount compiled modules directory
if (io.exists('.cell/build')) {
io.mount('.cell/build', "modules", false)
log.console("Mounted compiled modules at /modules")
}
// 5. Mount source modules directory
if (io.exists('.cell/modules')) {
io.mount('.cell/modules', "modules-src", false)
log.console("Mounted source modules at /modules-src")
}
// 6. Mount core if available
if (io.exists('.cell/modules/core')) {
io.mount('.cell/modules/core', "core", false)
log.console("Mounted core at /core")
}
// 6. Core is already mounted in prosperon.c
return true
}
// Parse module locator (e.g., "git.world/jj/mod@v0.6.3") // Parse module locator (e.g., "git.world/jj/mod@v0.6.3")
Shop.parse_locator = function(locator) { Shop.parse_locator = function(locator) {
var parts = locator.split('@') var parts = locator.split('@')
@@ -220,6 +61,37 @@ Shop.parse_locator = function(locator) {
} }
} }
// Convert module locator to download URL
Shop.get_download_url = function(locator) {
var parsed = Shop.parse_locator(locator)
if (!parsed) return null
// Handle different git hosting patterns
if (locator.startsWith('https://')) {
// Remove https:// prefix for parsing
var cleanLocator = locator.substring(8)
var hostAndPath = cleanLocator.split('@')[0]
// Gitea pattern: gitea.pockle.world/user/repo@branch
if (hostAndPath.includes('gitea.')) {
return 'https://' + hostAndPath + '/archive/' + parsed.version + '.zip'
}
// GitHub pattern: github.com/user/repo@tag
if (hostAndPath.includes('github.com')) {
return 'https://' + hostAndPath + '/archive/refs/tags/' + parsed.version + '.zip'
}
// GitLab pattern: gitlab.com/user/repo@tag
if (hostAndPath.includes('gitlab.')) {
return 'https://' + hostAndPath + '/-/archive/' + parsed.version + '/' + parsed.name + '-' + parsed.version + '.zip'
}
}
// Fallback to original locator if no pattern matches
return locator
}
// Add a dependency // Add a dependency
Shop.add_dependency = function(alias, locator) { Shop.add_dependency = function(alias, locator) {
var config = Shop.load_config() var config = Shop.load_config()
@@ -263,8 +135,6 @@ Shop.compile_module = function(alias) {
return false return false
} }
// TODO: Implement actual compilation
// For now, just copy .js files to .cell/build with .o extension
log.console("Would compile module: " + alias + " from " + module_dir) log.console("Would compile module: " + alias + " from " + module_dir)
return true return true
} }

View File

@@ -178,16 +178,93 @@ function text() {
} }
} }
// Default: convert to lowercase hex // Default: interpret as UTF-8 text
var hex_digits = "0123456789abcdef"; var byte_count = Math.floor(bit_length / 8);
for (var i = 0; i < bit_length; i += 8) { var bytes = [];
// Read bytes from the blob
for (var i = 0; i < byte_count; i++) {
var byte_val = 0; var byte_val = 0;
for (var j = 0; j < 8 && i + j < bit_length; j++) { for (var j = 0; j < 8; j++) {
var bit = arg.read_logical(i + j); var bit_pos = i * 8 + j;
var bit = arg.read_logical(bit_pos);
if (bit) byte_val |= (1 << j); if (bit) byte_val |= (1 << j);
} }
result += hex_digits[(byte_val >> 4) & 0xF]; bytes.push(byte_val);
result += hex_digits[byte_val & 0xF]; }
// Convert bytes to UTF-8 string
var result = "";
var i = 0;
while (i < bytes.length) {
var b1 = bytes[i];
var codepoint;
var nextI;
if (b1 < 0x80) {
// 1-byte ASCII
codepoint = b1;
nextI = i + 1;
} else if (b1 < 0xC0) {
// Invalid start byte, treat as replacement character
codepoint = 0xFFFD;
nextI = i + 1;
} else if (b1 < 0xE0) {
// 2-byte sequence
if (i + 1 < bytes.length && (bytes[i + 1] & 0xC0) === 0x80) {
codepoint = ((b1 & 0x1F) << 6) | (bytes[i + 1] & 0x3F);
nextI = i + 2;
} else {
codepoint = 0xFFFD;
nextI = i + 1;
}
} else if (b1 < 0xF0) {
// 3-byte sequence
if (i + 2 < bytes.length &&
(bytes[i + 1] & 0xC0) === 0x80 &&
(bytes[i + 2] & 0xC0) === 0x80) {
codepoint = ((b1 & 0x0F) << 12) |
((bytes[i + 1] & 0x3F) << 6) |
(bytes[i + 2] & 0x3F);
nextI = i + 3;
} else {
codepoint = 0xFFFD;
nextI = i + 1;
}
} else if (b1 < 0xF8) {
// 4-byte sequence
if (i + 3 < bytes.length &&
(bytes[i + 1] & 0xC0) === 0x80 &&
(bytes[i + 2] & 0xC0) === 0x80 &&
(bytes[i + 3] & 0xC0) === 0x80) {
codepoint = ((b1 & 0x07) << 18) |
((bytes[i + 1] & 0x3F) << 12) |
((bytes[i + 2] & 0x3F) << 6) |
(bytes[i + 3] & 0x3F);
nextI = i + 4;
} else {
codepoint = 0xFFFD;
nextI = i + 1;
}
} else {
// Invalid start byte
codepoint = 0xFFFD;
nextI = i + 1;
}
// Convert codepoint to string
if (codepoint <= 0xFFFF) {
result += String.fromCharCode(codepoint);
} else if (codepoint <= 0x10FFFF) {
// Convert to surrogate pair for JavaScript
codepoint -= 0x10000;
result += String.fromCharCode(0xD800 + (codepoint >> 10));
result += String.fromCharCode(0xDC00 + (codepoint & 0x3FF));
} else {
result += String.fromCharCode(0xFFFD); // Replacement character
}
i = nextI;
} }
return result; return result;

View File

@@ -4,6 +4,7 @@
#include <string.h> #include <string.h>
#include "monocypher.h" #include "monocypher.h"
#include "qjs_blob.h"
#include <stdint.h> #include <stdint.h>
#include <stddef.h> #include <stddef.h>
@@ -183,25 +184,60 @@ JSValue js_crypto_shared(JSContext *js, JSValue self, int argc, JSValue *argv)
JSValue js_crypto_random(JSContext *js, JSValue self, int argc, JSValue *argv) JSValue js_crypto_random(JSContext *js, JSValue self, int argc, JSValue *argv)
{ {
// 1) Pull 64 bits of cryptographically secure randomness // 1) Pull 64 bits of cryptographically secure randomness
uint64_t r; uint64_t r;
if (randombytes(&r, sizeof(r)) != 0) { if (randombytes(&r, sizeof(r)) != 0) {
// If something fails (extremely rare), throw an error // If something fails (extremely rare), throw an error
return JS_ThrowInternalError(js, "crypto.random: unable to get random bytes"); return JS_ThrowInternalError(js, "crypto.random: unable to get random bytes");
}
// 2) Convert r to a double in the range [0,1).
// We divide by (UINT64_MAX + 1.0) to ensure we never produce exactly 1.0.
double val = (double)r / ((double)UINT64_MAX + 1.0);
// 3) Return that as a JavaScript number
return JS_NewFloat64(js, val);
}
JSValue js_crypto_hash(JSContext *js, JSValue self, int argc, JSValue *argv)
{
if (argc < 1)
return JS_ThrowTypeError(js, "hash requires at least one argument");
// Get input data
size_t data_len;
void *data = js_get_blob_data(js, &data_len, argv[0]);
if (!data)
return JS_ThrowTypeError(js, "hash: first argument must be an ArrayBuffer");
// Get hash length (default 32)
int32_t hash_len = 32;
if (argc > 1) {
if (JS_ToInt32(js, &hash_len, argv[1]))
return JS_EXCEPTION;
if (hash_len < 1 || hash_len > 64)
return JS_ThrowRangeError(js, "hash length must be between 1 and 64");
} }
// 2) Convert r to a double in the range [0,1). // Allocate output buffer
// We divide by (UINT64_MAX + 1.0) to ensure we never produce exactly 1.0. uint8_t *hash = js_malloc(js, hash_len);
double val = (double)r / ((double)UINT64_MAX + 1.0); if (!hash)
return JS_EXCEPTION;
// 3) Return that as a JavaScript number // Compute BLAKE2b hash
return JS_NewFloat64(js, val); crypto_blake2b(hash, hash_len, data, data_len);
// Return as blob
JSValue result = js_new_blob_stoned_copy(js, hash, hash_len);
js_free(js, hash);
return result;
} }
static const JSCFunctionListEntry js_crypto_funcs[] = { static const JSCFunctionListEntry js_crypto_funcs[] = {
JS_CFUNC_DEF("keypair", 0, js_crypto_keypair), JS_CFUNC_DEF("keypair", 0, js_crypto_keypair),
JS_CFUNC_DEF("shared", 1, js_crypto_shared), JS_CFUNC_DEF("shared", 1, js_crypto_shared),
JS_CFUNC_DEF("random", 0, js_crypto_random), JS_CFUNC_DEF("random", 0, js_crypto_random),
JS_CFUNC_DEF("hash", 2, js_crypto_hash),
}; };
JSValue js_crypto_use(JSContext *js) JSValue js_crypto_use(JSContext *js)

View File

@@ -271,7 +271,6 @@ cleanup_tls:
return blob; return blob;
} }
// Export the function as “fetch2” (for example)
static const JSCFunctionListEntry js_http_funcs[] = { static const JSCFunctionListEntry js_http_funcs[] = {
JS_CFUNC_DEF("fetch", 2, js_fetch_picoparser), JS_CFUNC_DEF("fetch", 2, js_fetch_picoparser),
}; };

View File

@@ -42,17 +42,15 @@ static JSValue js_miniz_read(JSContext *js, JSValue self, int argc, JSValue *arg
{ {
size_t len; size_t len;
void *data = js_get_blob_data(js, &len, argv[0]); void *data = js_get_blob_data(js, &len, argv[0]);
if (!data) { if (!data)
printf("Could not create data.\n"); return JS_ThrowReferenceError(js, "Could not create data.\n");
return JS_UNDEFINED;
}
mz_zip_archive *zip = calloc(sizeof(*zip),1); mz_zip_archive *zip = calloc(sizeof(*zip),1);
int success = mz_zip_reader_init_mem(zip, data, len, 0); int success = mz_zip_reader_init_mem(zip, data, len, 0);
int err = mz_zip_get_last_error(zip); int err = mz_zip_get_last_error(zip);
if (err) { if (err)
printf("%s\n", mz_zip_get_error_string(err)); return JS_ThrowInternalError(js, "miniz error: %s\n", mz_zip_get_error_string(err));
return JS_UNDEFINED;
}
JSValue jszip = JS_NewObjectClass(js, js_reader_class_id); JSValue jszip = JS_NewObjectClass(js, js_reader_class_id);
JS_SetOpaque(jszip, zip); JS_SetOpaque(jszip, zip);
return jszip; return jszip;
@@ -61,9 +59,25 @@ static JSValue js_miniz_read(JSContext *js, JSValue self, int argc, JSValue *arg
static JSValue js_miniz_write(JSContext *js, JSValue self, int argc, JSValue *argv) static JSValue js_miniz_write(JSContext *js, JSValue self, int argc, JSValue *argv)
{ {
const char *file = JS_ToCString(js, argv[0]); const char *file = JS_ToCString(js, argv[0]);
mz_zip_archive *zip = malloc(sizeof(*zip)); if (!file)
mz_zip_writer_init_file(zip, file, 0); return JS_EXCEPTION;
JS_FreeCString(js,file);
mz_zip_archive *zip = calloc(sizeof(*zip), 1);
if (!zip) {
JS_FreeCString(js, file);
return JS_ThrowOutOfMemory(js);
}
mz_bool success = mz_zip_writer_init_file(zip, file, 0);
JS_FreeCString(js, file);
if (!success) {
int err = mz_zip_get_last_error(zip);
mz_zip_writer_end(zip);
free(zip);
return JS_ThrowInternalError(js, "Failed to initialize zip writer: %s", mz_zip_get_error_string(err));
}
JSValue jszip = JS_NewObjectClass(js, js_writer_class_id); JSValue jszip = JS_NewObjectClass(js, js_writer_class_id);
JS_SetOpaque(jszip, zip); JS_SetOpaque(jszip, zip);
return jszip; return jszip;
@@ -207,50 +221,162 @@ static const JSCFunctionListEntry js_writer_funcs[] = {
JSValue js_reader_mod(JSContext *js, JSValue self, int argc, JSValue *argv) JSValue js_reader_mod(JSContext *js, JSValue self, int argc, JSValue *argv)
{ {
const char *file = JS_ToCString(js,argv[0]); const char *file = JS_ToCString(js,argv[0]);
if (!file)
return JS_EXCEPTION;
mz_zip_archive *zip = js2reader(js, self); mz_zip_archive *zip = js2reader(js, self);
if (!zip) {
JS_FreeCString(js, file);
return JS_ThrowInternalError(js, "Invalid zip reader");
}
mz_zip_archive_file_stat pstat; mz_zip_archive_file_stat pstat;
mz_uint index = mz_zip_reader_locate_file(zip, file, NULL, 0); mz_uint index = mz_zip_reader_locate_file(zip, file, NULL, 0);
JS_FreeCString(js,file);
if (index == -1) return JS_UNDEFINED;
mz_zip_reader_file_stat(zip, index, &pstat); if (index == (mz_uint)-1) {
JS_FreeCString(js, file);
return JS_ThrowReferenceError(js, "File '%s' not found in archive", file);
}
JS_FreeCString(js, file);
if (!mz_zip_reader_file_stat(zip, index, &pstat)) {
int err = mz_zip_get_last_error(zip);
return JS_ThrowInternalError(js, "Failed to get file stats: %s", mz_zip_get_error_string(err));
}
return JS_NewFloat64(js, pstat.m_time); return JS_NewFloat64(js, pstat.m_time);
} }
JSValue js_reader_exists(JSContext *js, JSValue self, int argc, JSValue *argv) JSValue js_reader_exists(JSContext *js, JSValue self, int argc, JSValue *argv)
{ {
const char *file = JS_ToCString(js,argv[0]); const char *file = JS_ToCString(js,argv[0]);
if (!file)
return JS_EXCEPTION;
mz_zip_archive *zip = js2reader(js, self); mz_zip_archive *zip = js2reader(js, self);
if (!zip) {
JS_FreeCString(js, file);
return JS_ThrowInternalError(js, "Invalid zip reader");
}
mz_uint index = mz_zip_reader_locate_file(zip, file, NULL, 0); mz_uint index = mz_zip_reader_locate_file(zip, file, NULL, 0);
JS_FreeCString(js,file); JS_FreeCString(js,file);
if (index == -1) return JS_NewBool(js, 0); if (index == (mz_uint)-1) return JS_NewBool(js, 0);
return JS_NewBool(js, 1); return JS_NewBool(js, 1);
} }
JSValue js_reader_slurp(JSContext *js, JSValue self, int argc, JSValue *argv) JSValue js_reader_slurp(JSContext *js, JSValue self, int argc, JSValue *argv)
{ {
const char *file = JS_ToCString(js,argv[0]); const char *file = JS_ToCString(js,argv[0]);
if (!file)
return JS_EXCEPTION;
mz_zip_archive *zip = js2reader(js, self); mz_zip_archive *zip = js2reader(js, self);
if (!zip) {
JS_FreeCString(js, file);
return JS_ThrowInternalError(js, "Invalid zip reader");
}
size_t len; size_t len;
void *data = mz_zip_reader_extract_file_to_heap(zip, file, &len, 0); void *data = mz_zip_reader_extract_file_to_heap(zip, file, &len, 0);
JS_FreeCString(js,file);
if (!data) if (!data) {
return JS_UNDEFINED; int err = mz_zip_get_last_error(zip);
const char *filename = file;
JS_FreeCString(js, file);
return JS_ThrowInternalError(js, "Failed to extract file '%s': %s", filename, mz_zip_get_error_string(err));
}
JSValue ret; JS_FreeCString(js, file);
if (JS_ToBool(js, argv[1]))
ret = JS_NewStringLen(js, data, len); JSValue ret = js_new_blob_stoned_copy(js, data, len);
else
ret = js_new_blob_stoned_copy(js, data, len);
free(data); free(data);
return ret; return ret;
} }
JSValue js_reader_list(JSContext *js, JSValue self, int argc, JSValue *argv)
{
mz_zip_archive *zip = js2reader(js, self);
if (!zip)
return JS_ThrowInternalError(js, "Invalid zip reader");
mz_uint num_files = mz_zip_reader_get_num_files(zip);
JSValue arr = JS_NewArray(js);
if (JS_IsException(arr))
return arr;
mz_uint arr_index = 0;
for (mz_uint i = 0; i < num_files; i++) {
mz_zip_archive_file_stat file_stat;
if (!mz_zip_reader_file_stat(zip, i, &file_stat))
continue;
JSValue filename = JS_NewString(js, file_stat.m_filename);
if (JS_IsException(filename)) {
JS_FreeValue(js, arr);
return filename;
}
JS_SetPropertyUint32(js, arr, arr_index++, filename);
}
return arr;
}
JSValue js_reader_is_directory(JSContext *js, JSValue self, int argc, JSValue *argv)
{
if (argc < 1)
return JS_ThrowTypeError(js, "is_directory requires a file index");
int32_t index;
if (JS_ToInt32(js, &index, argv[0]))
return JS_EXCEPTION;
mz_zip_archive *zip = js2reader(js, self);
if (!zip)
return JS_ThrowInternalError(js, "Invalid zip reader");
return JS_NewBool(js, mz_zip_reader_is_file_a_directory(zip, index));
}
JSValue js_reader_get_filename(JSContext *js, JSValue self, int argc, JSValue *argv)
{
if (argc < 1)
return JS_ThrowTypeError(js, "get_filename requires a file index");
int32_t index;
if (JS_ToInt32(js, &index, argv[0]))
return JS_EXCEPTION;
mz_zip_archive *zip = js2reader(js, self);
if (!zip)
return JS_ThrowInternalError(js, "Invalid zip reader");
mz_zip_archive_file_stat file_stat;
if (!mz_zip_reader_file_stat(zip, index, &file_stat))
return JS_ThrowInternalError(js, "Failed to get file stats");
return JS_NewString(js, file_stat.m_filename);
}
JSValue js_reader_count(JSContext *js, JSValue self, int argc, JSValue *argv)
{
mz_zip_archive *zip = js2reader(js, self);
if (!zip)
return JS_ThrowInternalError(js, "Invalid zip reader");
return JS_NewUint32(js, mz_zip_reader_get_num_files(zip));
}
static const JSCFunctionListEntry js_reader_funcs[] = { static const JSCFunctionListEntry js_reader_funcs[] = {
JS_CFUNC_DEF("mod", 1, js_reader_mod), JS_CFUNC_DEF("mod", 1, js_reader_mod),
JS_CFUNC_DEF("exists", 1, js_reader_exists), JS_CFUNC_DEF("exists", 1, js_reader_exists),
JS_CFUNC_DEF("slurp", 2, js_reader_slurp), JS_CFUNC_DEF("slurp", 1, js_reader_slurp),
JS_CFUNC_DEF("list", 0, js_reader_list),
JS_CFUNC_DEF("is_directory", 1, js_reader_is_directory),
JS_CFUNC_DEF("get_filename", 1, js_reader_get_filename),
JS_CFUNC_DEF("count", 0, js_reader_count),
}; };
JSValue js_miniz_use(JSContext *js) JSValue js_miniz_use(JSContext *js)

View File

@@ -1,239 +1,25 @@
// http_test.js var http = use('http')
var http = use('http'); var text = use('text')
var os = use('os');
var got = false // Test with a simpler endpoint first
var count = 0 log.console("Testing httpbin.org chunked response...")
http.fetch("https://dictionary.ink/find?word=theological", { try {
on_data: e => { var b = http.fetch("https://httpbin.org/stream/3")
log.console(e.length) log.console(b.length)
count++ var text1 = text(b)
}, log.console("httpbin response length:", text1.length)
callback: e => { log.console("httpbin response:", text1)
for (var i in e) log.console(i) } catch (e) {
log.console(e.data) log.console("httpbin error:", e)
got = true
}
})
log.console(`got hit ${count} times`)
os.exit()
// Deep comparison function (unchanged from previous version)
function deepCompare(expected, actual, path = '') {
if (expected === actual) return { passed: true, messages: [] };
if (typeof expected === 'string' && typeof actual === 'string') {
if (expected === actual) {
return { passed: true, messages: [] };
}
return {
passed: false,
messages: [`String mismatch at ${path}: expected "${expected}", got "${actual}"`]
};
}
if (typeof expected === 'object' && expected !== null &&
typeof actual === 'object' && actual !== null) {
const expKeys = Object.keys(expected).sort();
const actKeys = Object.keys(actual).sort();
if (JSON.stringify(expKeys) !== JSON.stringify(actKeys)) {
return {
passed: false,
messages: [`Object keys mismatch at ${path}: expected ${expKeys}, got ${actKeys}`]
};
}
let messages = [];
for (let key of expKeys) {
const result = deepCompare(expected[key], actual[key], `${path}.${key}`);
if (!result.passed) messages.push(...result.messages);
}
return { passed: messages.length === 0, messages };
}
return {
passed: false,
messages: [`Value mismatch at ${path}: expected ${JSON.stringify(expected)}, got ${JSON.stringify(actual)}`]
};
} }
// Test cases (slightly modified to include state tracking) log.console("\nTesting dictionary.ink...")
var testCases = [ try {
{ var b2 = http.fetch("https://dictionary.ink/find?word=theological")
name: "Basic GET request", log.console(b2.length)
url: "https://api.github.com", var text2 = text(b2)
expected: { contains: "GitHub" }, log.console("dictionary response length:", text2.length)
validate: function(result) { return result.toLowerCase().includes("github"); }, log.console("dictionary first 500 chars:", text2.substring(0, 500))
completed: false, } catch (e) {
result: null, log.console("dictionary error:", e)
error: null
},
{
name: "JSON response",
url: "https://api.github.com/users/octocat",
expected: { login: "octocat" },
validate: function(result) {
let parsed = JSON.parse(result);
return deepCompare({ login: "octocat" }, { login: parsed.login });
},
completed: false,
result: null,
error: null
},
{
name: "Follow redirect",
url: "http://github.com",
expected: { contains: "gihtub" },
validate: function(result) { return result.toLowerCase().includes("github"); },
completed: false,
result: null,
error: null
},
{
name: "Invalid URL",
url: "http://nonexistent.domain.xyz",
expectError: true,
validate: function(result) { return true; },
completed: false,
result: null,
error: null
},
{
name: "Malformed URL",
url: "not-a-url",
expectError: true,
validate: function(result) { return true; },
completed: false,
result: null,
error: null
},
{
name: "Large response",
url: "https://www.gutenberg.org/files/1342/1342-0.txt",
expected: { contains: "Pride and Prejudice" },
validate: function(result) { return result.includes("Pride and Prejudice"); },
completed: false,
result: null,
error: null
}
];
// Test execution state
var results = [];
var testCount = 0;
var activeRequests = 0;
var timeout = 5000; // 5 seconds timeout per test
// Start tests
function startTests() {
testCount = testCases.length;
activeRequests = testCount;
for (let i = 0; i < testCases.length; i++) {
let test = testCases[i];
let testName = `Test ${i + 1}: ${test.name}`;
http.fetch(test.url, function(result) {
test.completed = true;
activeRequests--;
if (result.error) {
test.error = result.error;
} else {
test.result = result.data;
}
});
}
// Start polling loop
pollTests();
} }
// Poll and check test completion
function pollTests() {
let startTime = os.now();
while (true) {
http.poll();
let allCompleted = activeRequests === 0;
let timedOut = (os.now() - startTime) >= timeout;
if (allCompleted || timedOut) {
processResults();
break;
}
// Sleep a bit to avoid pegging the CPU (requires a C function or std.sleep)
// TODO: Implement!
}
}
// Process and report results
function processResults() {
for (let i = 0; i < testCases.length; i++) {
let test = testCases[i];
let testName = `Test ${i + 1}: ${test.name}`;
let passed = true;
let messages = [];
if (!test.completed) {
passed = false;
messages.push("Test timed out");
} else if (test.error) {
if (test.expectError) {
// Expected error occurred
} else {
passed = false;
messages.push(`Request failed: ${test.error}`);
}
} else if (test.expectError) {
passed = false;
messages.push("Expected request to fail but it succeeded");
} else {
const validation = test.validate(test.result);
if (typeof validation === 'boolean') {
if (!validation) {
passed = false;
messages.push(`Validation failed for ${test.url}`);
messages.push(`Expected to contain: ${JSON.stringify(test.expected)}`);
messages.push(`Got: ${test.result.substring(0, 100)}...`);
}
} else if (!validation.passed) {
passed = false;
messages.push(...validation.messages);
}
}
results.push({ testName, passed, messages });
if (!passed) {
log.console(`\nDetailed Failure Report for ${testName}:`);
log.console(`URL: ${test.url}`);
log.console(messages.join("\n"));
log.console("");
}
}
// Summary
log.console("\nTest Summary:");
results.forEach(result => {
log.console(`${result.testName} - ${result.passed ? "Passed" : "Failed"}`);
if (!result.passed) {
log.console(result.messages.join("\n"));
}
});
let passedCount = results.filter(r => r.passed).length;
log.console(`\nResult: ${passedCount}/${testCount} tests passed`);
if (passedCount < testCount) {
log.console("Overall: FAILED");
os.exit(1);
} else {
log.console("Overall: PASSED");
os.exit(0);
}
}
// Run the tests
startTests();

View File

@@ -1,49 +0,0 @@
var http = use('http')
var os = use('os')
var downloader
var download_complete = false
function checkin()
{
if (download_complete) return
send(downloader, {type:'status'}, e => {
log.console("Status:", json.encode(e))
// Check if download is complete or error
if (e.type === 'error' || (e.type === 'status_response' && e.status === 'idle')) {
// Stop checking if no download in progress
return
}
// Continue checking
$_.delay(checkin, 0.5)
})
}
$_.start(e => {
log.console(json.encode(e))
if (e.type === 'greet') {
downloader = e.actor
// Start download
send(downloader, {
type:'download',
url: 'https://dictionary.ink/find?word=palm'
}, e => {
log.console("Download response:", json.encode(e))
download_complete = true
if (e.type === 'complete') {
log.console("Download complete! Size:", e.size, "bytes")
} else if (e.type === 'error') {
log.console("Download failed:", e.error)
}
})
// Start status checking after a small delay
$_.delay(checkin, 0.01)
}
}, "examples/http_download_actor")