1 Commits

Author SHA1 Message Date
John Alanbrook
cecf841d82 managed frames 2025-12-29 22:58:51 -06:00
93 changed files with 18959 additions and 10049 deletions

6
.cell/lock.toml Normal file
View File

@@ -0,0 +1,6 @@
[modules]
[modules.extramath]
hash = "MCLZT3JABTAENS4WVXKGWJ7JPBLZER4YQ5VN2PE7ZD2Z4WYGTIMA===="
url = "https://gitea.pockle.world/john/extramath@master"
downloaded = "Monday June 2 12:07:20.42 PM -5 2025 AD"
commit = "84d81a19a8455bcf8dc494739e9e6d545df6ff2c"

1
.gitignore vendored
View File

@@ -1,6 +1,5 @@
.git/
.obj/
website/
bin/
build/
*.zip

View File

@@ -1,25 +0,0 @@
# Code style
All code is done with 2 spaces for indentation.
For cell script and its integration files, objects are preferred over classes, and preferrably limited use of prototypes, make objects sendable between actors (.ce files).
## cell script format
Cell script files end in .ce or .cm. Cell script is similar to Javascript but with some differences.
Variables are delcared with 'var'. Var behaves like let.
Constants are declared with 'def'.
!= and == are strict, there is no !== or ===.
There is no undefined, only null.
There are no classes, only objects and prototypes.
Prefer backticks for string interpolation. Otherwise, convering non strings with the text() function is required.
Everything should be lowercase.
There are no arraybuffers, only blobs, which work with bits. They must be stoned like stone(blob) before being read from.
## c format
For cell script integration files, everything should be declared static that can be. Most don't have headers at all. Files in a package are not shared between packages.
There is no undefined, so JS_IsNull and JS_NULL should be used only.
## how module loading is done in cell script
Within a package, a c file, if using the correct macros (CELL_USE_FUNCS etc), will be loaded as a module with its name; so png.c inside ac package is loaded as <package>/png, giving you access to its functions.

93
add.ce
View File

@@ -1,103 +1,28 @@
// cell add <locator> [alias] - Add a dependency to the current package
//
// Usage:
// cell add <locator> Add a dependency using default alias
// cell add <locator> <alias> Add a dependency with custom alias
//
// This adds the dependency to cell.toml and installs it to the shop.
// cell add <locator> [alias] - Add and install a package with its dependencies
var shop = use('internal/shop')
var pkg = use('package')
var build = use('build')
var fd = use('fd')
var locator = null
var alias = null
array(args, function(arg) {
if (arg == '--help' || arg == '-h') {
if (args.length < 1) {
log.console("Usage: cell add <locator> [alias]")
log.console("")
log.console("Add a dependency to the current package.")
log.console("")
log.console("Examples:")
log.console(" cell add gitea.pockle.world/john/prosperon")
log.console(" cell add gitea.pockle.world/john/cell-image image")
log.console(" cell add ../local-package")
log.console(" cell add gitea.pockle.world/john/prosperon@main")
log.console(" cell add github.com/user/repo@v1.0.0 myalias")
$stop()
} else if (!starts_with(arg, '-')) {
if (!locator) {
locator = arg
} else if (!alias) {
alias = arg
return
}
}
})
if (!locator) {
log.console("Usage: cell add <locator> [alias]")
$stop()
}
var locator = args[0]
// Resolve relative paths to absolute paths
if (locator == '.' || starts_with(locator, './') || starts_with(locator, '../') || fd.is_dir(locator)) {
if (locator == '.' || locator.startsWith('./') || locator.startsWith('../') || fd.is_dir(locator)) {
var resolved = fd.realpath(locator)
if (resolved) {
locator = resolved
}
}
var alias = args.length > 1 ? args[1] : null
// Generate default alias from locator
if (!alias) {
// Use the last component of the locator as alias
var parts = array(locator, '/')
alias = parts[length(parts) - 1]
// Remove any version suffix
if (search(alias, '@') != null) {
alias = array(alias, '@')[0]
}
}
// Check we're in a package directory
var cwd = fd.realpath('.')
if (!fd.is_file(cwd + '/cell.toml')) {
log.error("Not in a package directory (no cell.toml found)")
$stop()
}
log.console("Adding " + locator + " as '" + alias + "'...")
// Add to local project's cell.toml
try {
pkg.add_dependency(null, locator, alias)
log.console(" Added to cell.toml")
} catch (e) {
log.error("Failed to update cell.toml: " + e)
$stop()
}
// Install to shop
try {
shop.get(locator)
shop.extract(locator)
// Build scripts
shop.build_package_scripts(locator)
// Build C code if any
try {
var target = build.detect_host_target()
build.build_dynamic(locator, target, 'release')
} catch (e) {
// Not all packages have C code
}
log.console(" Installed to shop")
} catch (e) {
log.error("Failed to install: " + e)
$stop()
}
log.console("Added " + alias + " (" + locator + ")")
shop.get(locator, alias)
$stop()

261
bench.ce
View File

@@ -24,45 +24,63 @@ def MAX_BATCH_SIZE = 100000000 // 100M iterations max per batch
// Statistical functions
function median(arr) {
if (length(arr) == 0) return 0
var sorted = sort(arr)
var mid = floor(length(arr) / 2)
if (length(arr) % 2 == 0) {
if (arr.length == 0) return 0
var sorted = arr.slice().sort(function(a, b) { return a - b })
var mid = number.floor(arr.length / 2)
if (arr.length % 2 == 0) {
return (sorted[mid - 1] + sorted[mid]) / 2
}
return sorted[mid]
}
function mean(arr) {
if (length(arr) == 0) return 0
if (arr.length == 0) return 0
var sum = 0
arrfor(arr, function(val) {
sum += val
})
return sum / length(arr)
for (var i = 0; i < arr.length; i++) {
sum += arr[i]
}
return sum / arr.length
}
function stddev(arr, mean_val) {
if (length(arr) < 2) return 0
if (arr.length < 2) return 0
var sum_sq_diff = 0
arrfor(arr, function(val) {
var diff = val - mean_val
for (var i = 0; i < arr.length; i++) {
var diff = arr[i] - mean_val
sum_sq_diff += diff * diff
})
return math.sqrt(sum_sq_diff / (length(arr) - 1))
}
return math.sqrt(sum_sq_diff / (arr.length - 1))
}
function percentile(arr, p) {
if (length(arr) == 0) return 0
var sorted = sort(arr)
var idx = floor(arr) * p / 100
if (idx >= length(arr)) idx = length(arr) - 1
if (arr.length == 0) return 0
var sorted = arr.slice().sort(function(a, b) { return a - b })
var idx = number.floor(arr.length * p / 100)
if (idx >= arr.length) idx = arr.length - 1
return sorted[idx]
}
function min_val(arr) {
if (arr.length == 0) return 0
var m = arr[0]
for (var i = 1; i < arr.length; i++) {
if (arr[i] < m) m = arr[i]
}
return m
}
function max_val(arr) {
if (arr.length == 0) return 0
var m = arr[0]
for (var i = 1; i < arr.length; i++) {
if (arr[i] > m) m = arr[i]
}
return m
}
// Parse arguments similar to test.ce
function parse_args() {
if (length(args) == 0) {
if (args.length == 0) {
if (!testlib.is_valid_package('.')) {
log.console('No cell.toml found in current directory')
return false
@@ -81,7 +99,7 @@ function parse_args() {
}
if (args[0] == 'package') {
if (length(args) < 2) {
if (args.length < 2) {
log.console('Usage: cell bench package <name> [bench]')
log.console(' cell bench package all')
return false
@@ -97,7 +115,7 @@ function parse_args() {
var lock = shop.load_lock()
if (lock[name]) {
target_pkg = name
} else if (starts_with(name, '/') && testlib.is_valid_package(name)) {
} else if (name.startsWith('/') && testlib.is_valid_package(name)) {
target_pkg = name
} else {
if (testlib.is_valid_package('.')) {
@@ -114,7 +132,7 @@ function parse_args() {
}
}
if (length(args) >= 3) {
if (args.length >= 3) {
target_bench = args[2]
}
@@ -126,7 +144,7 @@ function parse_args() {
var bench_path = args[0]
// Normalize path - add benches/ prefix if not present
if (!starts_with(bench_path, 'benches/') && !starts_with(bench_path, '/')) {
if (!bench_path.startsWith('benches/') && !bench_path.startsWith('/')) {
if (!fd.is_file(bench_path + '.cm') && !fd.is_file(bench_path)) {
if (fd.is_file('benches/' + bench_path + '.cm') || fd.is_file('benches/' + bench_path)) {
bench_path = 'benches/' + bench_path
@@ -159,18 +177,19 @@ function collect_benches(package_name, specific_bench) {
var files = pkg.list_files(package_name)
var bench_files = []
arrfor(files, function(f) {
if (starts_with(f, "benches/") && ends_with(f, ".cm")) {
for (var i = 0; i < files.length; i++) {
var f = files[i]
if (f.startsWith("benches/") && f.endsWith(".cm")) {
if (specific_bench) {
var bench_name = text(f, 0, -3)
var bench_name = f.substring(0, f.length - 3)
var match_name = specific_bench
if (!starts_with(match_name, 'benches/')) match_name = 'benches/' + match_name
var match_base = ends_with(match_name, '.cm') ? text(match_name, 0, -3) : match_name
if (bench_name != match_base) return
if (!match_name.startsWith('benches/')) match_name = 'benches/' + match_name
var match_base = match_name.endsWith('.cm') ? match_name.substring(0, match_name.length - 3) : match_name
if (bench_name != match_base) continue
}
bench_files.push(f)
}
push(bench_files, f)
}
})
return bench_files
}
@@ -184,7 +203,7 @@ function calibrate_batch_size(bench_fn, is_batch) {
// Find a batch size that takes at least MIN_SAMPLE_NS
while (n < MAX_BATCH_SIZE) {
// Ensure n is a valid number before calling
if (!is_number(n) || n < 1) {
if (typeof n != 'number' || n < 1) {
n = 1
break
}
@@ -198,7 +217,7 @@ function calibrate_batch_size(bench_fn, is_batch) {
// Double the batch size
var new_n = n * 2
// Check if multiplication produced a valid number
if (!is_number(new_n) || new_n > MAX_BATCH_SIZE) {
if (typeof new_n != 'number' || new_n > MAX_BATCH_SIZE) {
n = MAX_BATCH_SIZE
break
}
@@ -206,12 +225,12 @@ function calibrate_batch_size(bench_fn, is_batch) {
}
// Adjust to target sample duration
if (dt > 0 && dt < TARGET_SAMPLE_NS && is_number(n) && is_number(dt)) {
if (dt > 0 && dt < TARGET_SAMPLE_NS && typeof n == 'number' && typeof dt == 'number') {
var calc = n * TARGET_SAMPLE_NS / dt
if (is_number(calc) && calc > 0) {
var target_n = floor(calc)
if (typeof calc == 'number' && calc > 0) {
var target_n = number.floor(calc)
// Check if floor returned a valid number
if (is_number(target_n) && target_n > 0) {
if (typeof target_n == 'number' && target_n > 0) {
if (target_n > MAX_BATCH_SIZE) target_n = MAX_BATCH_SIZE
if (target_n < MIN_BATCH_SIZE) target_n = MIN_BATCH_SIZE
n = target_n
@@ -220,7 +239,7 @@ function calibrate_batch_size(bench_fn, is_batch) {
}
// Safety check - ensure we always return a valid batch size
if (!is_number(n) || n < 1) {
if (typeof n != 'number' || n < 1) {
n = 1
}
@@ -235,7 +254,7 @@ function run_single_bench(bench_fn, bench_name) {
// 1. Object with { setup, run, teardown } - structured format
// 2. Function that accepts (n) - batch format
// 3. Function that accepts () - legacy format
var is_structured = is_object(bench_fn) && bench_fn.run
var is_structured = typeof bench_fn == 'object' && bench_fn.run
var is_batch = false
var batch_size = 1
var setup_fn = null
@@ -266,7 +285,7 @@ function run_single_bench(bench_fn, bench_name) {
batch_size = calibrate_batch_size(calibrate_fn, is_batch)
// Safety check for structured benchmarks
if (!is_number(batch_size) || batch_size < 1) {
if (typeof batch_size != 'number' || batch_size < 1) {
batch_size = 1
}
} else {
@@ -288,9 +307,8 @@ function run_single_bench(bench_fn, bench_name) {
// Warmup phase
for (var i = 0; i < WARMUP_BATCHES; i++) {
// Ensure batch_size is valid before warmup
if (!is_number(batch_size) || batch_size < 1) {
var type_str = is_null(batch_size) ? 'null' : is_number(batch_size) ? 'number' : is_text(batch_size) ? 'text' : is_object(batch_size) ? 'object' : is_array(batch_size) ? 'array' : is_function(batch_size) ? 'function' : is_logical(batch_size) ? 'logical' : 'unknown'
log.console(`WARNING: batch_size became ${type_str} = ${batch_size}, resetting to 1`)
if (typeof batch_size != 'number' || batch_size < 1) {
log.console(`WARNING: batch_size became ${typeof batch_size} = ${batch_size}, resetting to 1`)
batch_size = 1
}
@@ -314,7 +332,7 @@ function run_single_bench(bench_fn, bench_name) {
// Measurement phase - collect SAMPLES timing samples
for (var i = 0; i < SAMPLES; i++) {
// Double-check batch_size is valid (should never happen, but defensive)
if (!is_number(batch_size) || batch_size < 1) {
if (typeof batch_size != 'number' || batch_size < 1) {
batch_size = 1
}
@@ -330,7 +348,7 @@ function run_single_bench(bench_fn, bench_name) {
if (teardown_fn) teardown_fn(state)
var ns_per_op = is_batch ? duration / batch_size : duration
push(timings_per_op, ns_per_op)
timings_per_op.push(ns_per_op)
} else {
var start = os.now()
if (is_batch) {
@@ -341,15 +359,15 @@ function run_single_bench(bench_fn, bench_name) {
var duration = os.now() - start
var ns_per_op = is_batch ? duration / batch_size : duration
push(timings_per_op, ns_per_op)
timings_per_op.push(ns_per_op)
}
}
// Calculate statistics
var mean_ns = mean(timings_per_op)
var median_ns = median(timings_per_op)
var min_ns = reduce(timings_per_op, min)
var max_ns = reduce(timings_per_op, max)
var min_ns = min_val(timings_per_op)
var max_ns = max_val(timings_per_op)
var stddev_ns = stddev(timings_per_op, mean_ns)
var p95_ns = percentile(timings_per_op, 95)
var p99_ns = percentile(timings_per_op, 99)
@@ -357,20 +375,20 @@ function run_single_bench(bench_fn, bench_name) {
// Calculate ops/s from median
var ops_per_sec = 0
if (median_ns > 0) {
ops_per_sec = floor(1000000000 / median_ns)
ops_per_sec = number.floor(1000000000 / median_ns)
}
return {
name: bench_name,
batch_size: batch_size,
samples: SAMPLES,
mean_ns: round(mean_ns),
median_ns: round(median_ns),
min_ns: round(min_ns),
max_ns: round(max_ns),
stddev_ns: round(stddev_ns),
p95_ns: round(p95_ns),
p99_ns: round(p99_ns),
mean_ns: number.round(mean_ns),
median_ns: number.round(median_ns),
min_ns: number.round(min_ns),
max_ns: number.round(max_ns),
stddev_ns: number.round(stddev_ns),
p95_ns: number.round(p95_ns),
p99_ns: number.round(p99_ns),
ops_per_sec: ops_per_sec
}
}
@@ -378,17 +396,17 @@ function run_single_bench(bench_fn, bench_name) {
// Format nanoseconds for display
function format_ns(ns) {
if (ns < 1000) return `${ns}ns`
if (ns < 1000000) return `${round(ns / 1000 * 100) / 100}µs`
if (ns < 1000000000) return `${round(ns / 1000000 * 100) / 100}ms`
return `${round(ns / 1000000000 * 100) / 100}s`
if (ns < 1000000) return `${number.round(ns / 1000 * 100) / 100}µs`
if (ns < 1000000000) return `${number.round(ns / 1000000 * 100) / 100}ms`
return `${number.round(ns / 1000000000 * 100) / 100}s`
}
// Format ops/sec for display
function format_ops(ops) {
if (ops < 1000) return `${ops} ops/s`
if (ops < 1000000) return `${round(ops / 1000 * 100) / 100}K ops/s`
if (ops < 1000000000) return `${round(ops / 1000000 * 100) / 100}M ops/s`
return `${round(ops / 1000000000 * 100) / 100}G ops/s`
if (ops < 1000000) return `${number.round(ops / 1000 * 100) / 100}K ops/s`
if (ops < 1000000000) return `${number.round(ops / 1000000 * 100) / 100}M ops/s`
return `${number.round(ops / 1000000000 * 100) / 100}G ops/s`
}
// Run benchmarks for a package
@@ -401,13 +419,14 @@ function run_benchmarks(package_name, specific_bench) {
total: 0
}
if (length(bench_files) == 0) return pkg_result
if (bench_files.length == 0) return pkg_result
if (package_name) log.console(`Running benchmarks for ${package_name}`)
else log.console(`Running benchmarks for local package`)
arrfor(bench_files, function(f) {
var mod_path = text(f, 0, -3)
for (var i = 0; i < bench_files.length; i++) {
var f = bench_files[i]
var mod_path = f.substring(0, f.length - 3)
var file_result = {
name: f,
@@ -420,22 +439,24 @@ function run_benchmarks(package_name, specific_bench) {
bench_mod = shop.use(mod_path, use_pkg)
var benches = []
if (is_function(bench_mod)) {
push(benches, {name: 'main', fn: bench_mod})
} else if (is_object(bench_mod)) {
arrfor(array(bench_mod), function(k) {
if (is_function(bench_mod[k]))
push(benches, {name: k, fn: bench_mod[k]})
})
if (typeof bench_mod == 'function') {
benches.push({name: 'main', fn: bench_mod})
} else if (typeof bench_mod == 'object') {
for (var k in bench_mod) {
if (typeof bench_mod[k] == 'function') {
benches.push({name: k, fn: bench_mod[k]})
}
}
}
if (length(benches) > 0) {
if (benches.length > 0) {
log.console(` ${f}`)
arrfor(benches, function(b) {
for (var j = 0; j < benches.length; j++) {
var b = benches[j]
try {
var result = run_single_bench(b.fn, b.name)
result.package = pkg_result.package
push(file_result.benchmarks, result)
file_result.benchmarks.push(result)
pkg_result.total++
log.console(` ${result.name}`)
@@ -452,10 +473,10 @@ function run_benchmarks(package_name, specific_bench) {
name: b.name,
error: e.toString()
}
push(file_result.benchmarks, error_result)
file_result.benchmarks.push(error_result)
pkg_result.total++
}
})
}
}
} catch (e) {
log.console(` Error loading ${f}: ${e}`)
@@ -464,14 +485,14 @@ function run_benchmarks(package_name, specific_bench) {
name: "load_module",
error: `Error loading module: ${e}`
}
push(file_result.benchmarks, error_result)
file_result.benchmarks.push(error_result)
pkg_result.total++
}
if (length(file_result.benchmarks) > 0) {
push(pkg_result.files, file_result)
if (file_result.benchmarks.length > 0) {
pkg_result.files.push(file_result)
}
}
})
return pkg_result
}
@@ -481,29 +502,29 @@ var all_results = []
if (all_pkgs) {
if (testlib.is_valid_package('.')) {
push(all_results, run_benchmarks(null, null))
all_results.push(run_benchmarks(null, null))
}
var packages = shop.list_packages()
arrfor(packages, function(pkg) {
push(all_results, run_benchmarks(pkg, null))
})
for (var i = 0; i < packages.length; i++) {
all_results.push(run_benchmarks(packages[i], null))
}
} else {
push(all_results, run_benchmarks(target_pkg, target_bench))
all_results.push(run_benchmarks(target_pkg, target_bench))
}
// Calculate totals
var total_benches = 0
arrfor(all_results, function(result) {
total_benches += result.total
})
for (var i = 0; i < all_results.length; i++) {
total_benches += all_results[i].total
}
log.console(`----------------------------------------`)
log.console(`Benchmarks: ${total_benches} total`)
// Generate reports
function generate_reports() {
var timestamp = text(floor(time.number()))
var timestamp = text(number.floor(time.number()))
var report_dir = shop.get_reports_dir() + '/bench_' + timestamp
testlib.ensure_dir(report_dir)
@@ -513,28 +534,34 @@ Total benchmarks: ${total_benches}
=== SUMMARY ===
`
arrfor(all_results, function(pkg_res) {
if (pkg_res.total == 0) return
for (var i = 0; i < all_results.length; i++) {
var pkg_res = all_results[i]
if (pkg_res.total == 0) continue
txt_report += `Package: ${pkg_res.package}\n`
arrfor(pkg_res.files, function(f) {
for (var j = 0; j < pkg_res.files.length; j++) {
var f = pkg_res.files[j]
txt_report += ` ${f.name}\n`
arrfor(f.benchmarks, function(b) {
for (var k = 0; k < f.benchmarks.length; k++) {
var b = f.benchmarks[k]
if (b.error) {
txt_report += ` ERROR ${b.name}: ${b.error}\n`
} else {
txt_report += ` ${b.name}: ${format_ns(b.median_ns)}/op (${format_ops(b.ops_per_sec)})\n`
}
})
})
})
}
}
}
txt_report += `\n=== DETAILED RESULTS ===\n`
arrfor(all_results, function(pkg_res) {
if (pkg_res.total == 0) return
for (var i = 0; i < all_results.length; i++) {
var pkg_res = all_results[i]
if (pkg_res.total == 0) continue
arrfor(pkg_res.files, function(f) {
arrfor(f.benchmarks, function(b) {
if (b.error) return
for (var j = 0; j < pkg_res.files.length; j++) {
var f = pkg_res.files[j]
for (var k = 0; k < f.benchmarks.length; k++) {
var b = f.benchmarks[k]
if (b.error) continue
txt_report += `\n${pkg_res.package}::${b.name}\n`
txt_report += ` batch_size: ${b.batch_size} samples: ${b.samples}\n`
@@ -546,28 +573,30 @@ Total benchmarks: ${total_benches}
txt_report += ` p95: ${format_ns(b.p95_ns)}\n`
txt_report += ` p99: ${format_ns(b.p99_ns)}\n`
txt_report += ` ops/s: ${format_ops(b.ops_per_sec)}\n`
})
})
})
}
}
}
testlib.ensure_dir(report_dir)
fd.slurpwrite(`${report_dir}/bench.txt`, stone(blob(txt_report)))
fd.slurpwrite(`${report_dir}/bench.txt`, stone(new blob(txt_report)))
log.console(`Report written to ${report_dir}/bench.txt`)
// Generate JSON per package
arrfor(all_results, function(pkg_res) {
if (pkg_res.total == 0) return
for (var i = 0; i < all_results.length; i++) {
var pkg_res = all_results[i]
if (pkg_res.total == 0) continue
var pkg_benches = []
arrfor(pkg_res.files, function(f) {
arrfor(f.benchmarks, function(benchmark) {
push(pkg_benches, benchmark)
})
})
for (var j = 0; j < pkg_res.files.length; j++) {
var f = pkg_res.files[j]
for (var k = 0; k < f.benchmarks.length; k++) {
pkg_benches.push(f.benchmarks[k])
}
}
var json_path = `${report_dir}/${replace(pkg_res.package, /\//, '_')}.json`
fd.slurpwrite(json_path, stone(blob(json.encode(pkg_benches))))
})
var json_path = `${report_dir}/${pkg_res.package.replace(/\//g, '_')}.json`
fd.slurpwrite(json_path, stone(new blob(json.encode(pkg_benches))))
}
}
generate_reports()

View File

@@ -20,14 +20,14 @@ function make_shapes(n) {
for (var i = 0; i < n; i++) {
var o = { a: i }
o[`p${i}`] = i
push(out, o)
out.push(o)
}
return out
}
function make_packed_array(n) {
var a = []
for (var i = 0; i < n; i++) push(a, i)
for (var i = 0; i < n; i++) a.push(i)
return a
}
@@ -203,8 +203,8 @@ return {
var x = 0
for (var j = 0; j < n; j++) {
var a = []
for (var i = 0; i < 256; i++) push(a, i)
x = (x + length(a)) | 0
for (var i = 0; i < 256; i++) a.push(i)
x = (x + a.length) | 0
}
return blackhole(sink, x)
},
@@ -216,7 +216,7 @@ return {
for (var j = 0; j < n; j++) {
var s = ""
for (var i = 0; i < 16; i++) s = s + "x"
x = (x + length(s)) | 0
x = (x + s.length) | 0
}
return blackhole(sink, x)
},

View File

@@ -1,5 +1,5 @@
function mainThread() {
var maxDepth = max(6, Number(arg[0] || 16));
var maxDepth = number.max(6, Number(arg[0] || 16));
var stretchDepth = maxDepth + 1;
var check = itemCheck(bottomUpTree(stretchDepth));
@@ -7,7 +7,7 @@ function mainThread() {
var longLivedTree = bottomUpTree(maxDepth);
for (var depth = 4; depth <= maxDepth; depth += 2) {
for (let depth = 4; depth <= maxDepth; depth += 2) {
var iterations = 1 << maxDepth - depth + 4;
work(iterations, depth);
}
@@ -16,8 +16,8 @@ function mainThread() {
}
function work(iterations, depth) {
var check = 0;
for (var i = 0; i < iterations; i++)
let check = 0;
for (let i = 0; i < iterations; i++)
check += itemCheck(bottomUpTree(depth));
log.console(`${iterations}\t trees of depth ${depth}\t check: ${check}`);
}
@@ -34,8 +34,8 @@ function itemCheck(node) {
function bottomUpTree(depth) {
return depth > 0
? TreeNode(bottomUpTree(depth - 1), bottomUpTree(depth - 1))
: TreeNode(null, null);
? new TreeNode(bottomUpTree(depth - 1), bottomUpTree(depth - 1))
: new TreeNode(null, null);
}
mainThread()

View File

@@ -2,8 +2,8 @@ var blob = use('blob')
var math = use('math/radians')
function eratosthenes (n) {
var sieve = blob(n, true)
var sqrtN = whole(math.sqrt(n));
var sieve = new blob(n, true)
var sqrtN = number.whole(math.sqrt(n));
for (i = 2; i <= sqrtN; i++)
if (sieve.read_logical(i))
@@ -17,7 +17,7 @@ var sieve = eratosthenes(10000000);
stone(sieve)
var c = 0
for (var i = 0; i < length(sieve); i++)
for (var i = 0; i < sieve.length; i++)
if (sieve.read_logical(i)) c++
log.console(c)

View File

@@ -1,6 +1,6 @@
function fannkuch(n) {
var perm1 = [n]
for (var i = 0; i < n; i++) perm1[i] = i
for (let i = 0; i < n; i++) perm1[i] = i
var perm = [n]
var count = [n]
var f = 0, flips = 0, nperm = 0, checksum = 0
@@ -18,7 +18,7 @@ function fannkuch(n) {
while (k != 0) {
i = 0
while (2*i < k) {
var t = perm[i]; perm[i] = perm[k-i]; perm[k-i] = t
let t = perm[i]; perm[i] = perm[k-i]; perm[k-i] = t
i += 1
}
k = perm[0]
@@ -34,10 +34,10 @@ function fannkuch(n) {
log.console( checksum )
return flips
}
var p0 = perm1[0]
let p0 = perm1[0]
i = 0
while (i < r) {
var j = i + 1
let j = i + 1
perm1[i] = perm1[j]
i = j
}

View File

@@ -7,9 +7,9 @@ function fib(n) {
var now = time.number()
var arr = [1,2,3,4,5]
arrfor(arr, function(i) {
for (var i in arr) {
log.console(fib(28))
})
}
log.console(`elapsed: ${time.number()-now}`)

View File

@@ -109,12 +109,12 @@ function benchArrayOps() {
var pushTime = measureTime(function() {
var arr = [];
for (var i = 0; i < iterations.medium; i++) {
push(arr, i);
arr.push(i);
}
});
var arr = [];
for (var i = 0; i < 10000; i++) push(arr, i);
for (var i = 0; i < 10000; i++) arr.push(i);
var accessTime = measureTime(function() {
var sum = 0;
@@ -126,7 +126,7 @@ function benchArrayOps() {
var iterateTime = measureTime(function() {
var sum = 0;
for (var j = 0; j < 1000; j++) {
for (var i = 0; i < length(arr); i++) {
for (var i = 0; i < arr.length; i++) {
sum += arr[i];
}
}
@@ -151,12 +151,13 @@ function benchObjectCreation() {
});
function Point(x, y) {
return {x,y}
this.x = x;
this.y = y;
}
var defructorTime = measureTime(function() {
for (var i = 0; i < iterations.medium; i++) {
var p = Point(i, i * 2);
var p = new Point(i, i * 2);
}
});
@@ -198,19 +199,19 @@ function benchStringOps() {
var strings = [];
for (var i = 0; i < 1000; i++) {
push(strings, "string" + i);
strings.push("string" + i);
}
var joinTime = measureTime(function() {
for (var i = 0; i < iterations.complex; i++) {
var result = text(strings, ",");
var result = strings.join(",");
}
});
var splitTime = measureTime(function() {
var str = "a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p";
for (var i = 0; i < iterations.medium; i++) {
var parts = array(str, ",");
var parts = str.split(",");
}
});
@@ -238,7 +239,7 @@ function benchArithmetic() {
var result = 1.5;
for (var i = 0; i < iterations.simple; i++) {
result = math.sine(result) + math.cosine(i * 0.01);
result = math.sqrt(abs(result)) + 0.1;
result = math.sqrt(number.abs(result)) + 0.1;
}
});
@@ -269,13 +270,13 @@ function benchClosures() {
var closureCreateTime = measureTime(function() {
var funcs = [];
for (var i = 0; i < iterations.medium; i++) {
push(funcs, makeAdder(i));
funcs.push(makeAdder(i));
}
});
var adders = [];
for (var i = 0; i < 1000; i++) {
push(adders, makeAdder(i));
adders.push(makeAdder(i));
}
var closureCallTime = measureTime(function() {

View File

@@ -8,15 +8,15 @@ var w = h
log.console(`P4\n${w} ${h}`);
for (var y = 0; y < h; ++y) {
for (let y = 0; y < h; ++y) {
// Create a blob for the row - we need w bits
var row = blob(w);
var row = new blob(w);
for (var x = 0; x < w; ++x) {
for (let x = 0; x < w; ++x) {
zr = zi = tr = ti = 0;
cr = 2 * x / w - 1.5;
ci = 2 * y / h - 1;
for (var i = 0; i < iter && (tr + ti <= limit * limit); ++i) {
for (let i = 0; i < iter && (tr + ti <= limit * limit); ++i) {
zi = 2 * zr * zi + ci;
zr = tr - ti + cr;
tr = zr * zr;

View File

@@ -3,11 +3,17 @@ var SOLAR_MASS = 4 * pi * pi;
var DAYS_PER_YEAR = 365.24;
function Body(x, y, z, vx, vy, vz, mass) {
return {x, y, z, vx, vy, vz, mass};
this.x = x;
this.y = y;
this.z = z;
this.vx = vx;
this.vy = vy;
this.vz = vz;
this.mass = mass;
}
function Jupiter() {
return Body(
return new Body(
4.84143144246472090e+00,
-1.16032004402742839e+00,
-1.03622044471123109e-01,
@@ -19,7 +25,7 @@ function Jupiter() {
}
function Saturn() {
return Body(
return new Body(
8.34336671824457987e+00,
4.12479856412430479e+00,
-4.03523417114321381e-01,
@@ -31,7 +37,7 @@ function Saturn() {
}
function Uranus() {
return Body(
return new Body(
1.28943695621391310e+01,
-1.51111514016986312e+01,
-2.23307578892655734e-01,
@@ -43,7 +49,7 @@ function Uranus() {
}
function Neptune() {
return Body(
return new Body(
1.53796971148509165e+01,
-2.59193146099879641e+01,
1.79258772950371181e-01,
@@ -55,7 +61,7 @@ function Neptune() {
}
function Sun() {
return Body(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, SOLAR_MASS);
return new Body(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, SOLAR_MASS);
}
var bodies = Array(Sun(), Jupiter(), Saturn(), Uranus(), Neptune());
@@ -64,7 +70,7 @@ function offsetMomentum() {
var px = 0;
var py = 0;
var pz = 0;
var size = length(bodies);
var size = bodies.length;
for (var i = 0; i < size; i++) {
var body = bodies[i];
var mass = body.mass;
@@ -80,7 +86,7 @@ function offsetMomentum() {
}
function advance(dt) {
var size = length(bodies);
var size = bodies.length;
for (var i = 0; i < size; i++) {
var bodyi = bodies[i];
@@ -121,7 +127,7 @@ function advance(dt) {
function energy() {
var e = 0;
var size = length(bodies);
var size = bodies.length;
for (var i = 0; i < size; i++) {
var bodyi = bodies[i];

View File

@@ -9,7 +9,7 @@ var newarr = []
var accstr = ""
for (var i = 0; i < 10000; i++) {
accstr += i;
newarrpush(i.toString())
newarr.push(i.toString())
}
// Arrays to store timing results
var jsonDecodeTimes = [];
@@ -19,35 +19,34 @@ var notaDecodeTimes = [];
var notaSizes = [];
// Run 100 tests
for (var i = 0; i < 100; i++) {
for (let i = 0; i < 100; i++) {
// JSON Decode test
var start = os.now();
let start = os.now();
var jll = json.decode(ll);
jsonDecodeTimespush((os.now() - start) * 1000);
jsonDecodeTimes.push((os.now() - start) * 1000);
// JSON Encode test
start = os.now();
var jsonStr = JSON.stringify(jll);
jsonEncodeTimespush((os.now() - start) * 1000);
let jsonStr = JSON.stringify(jll);
jsonEncodeTimes.push((os.now() - start) * 1000);
// NOTA Encode test
start = os.now();
var nll = nota.encode(jll);
notaEncodeTimespush((os.now() - start) * 1000);
notaEncodeTimes.push((os.now() - start) * 1000);
// NOTA Decode test
start = os.now();
var oll = nota.decode(nll);
notaDecodeTimespush((os.now() - start) * 1000);
notaDecodeTimes.push((os.now() - start) * 1000);
}
// Calculate statistics
function getStats(arr) {
return {
avg: reduce(arr, (a,b) => a+b, 0) / length(arr),
min: reduce(arr, min),
max: reduce(arr, max)
};
def avg = arr.reduce((a, b) => a + b) / arr.length;
def min = number.min(...arr);
def max = number.max(...arr);
return { avg, min, max };
}
// Pretty print results

View File

@@ -1,13 +1,13 @@
def math = use('math/radians');
const math = require('math/radians');
function A(i,j) {
return 1/((i+j)*(i+j+1)/2+i+1);
}
function Au(u,v) {
for (var i=0; i<length(u); ++i) {
for (var i=0; i<u.length; ++i) {
var t = 0;
for (var j=0; j<length(u); ++j)
for (var j=0; j<u.length; ++j)
t += A(i,j) * u[j];
v[i] = t;
@@ -15,9 +15,9 @@ function Au(u,v) {
}
function Atu(u,v) {
for (var i=0; i<length(u); ++i) {
for (var i=0; i<u.length; ++i) {
var t = 0;
for (var j=0; j<length(u); ++j)
for (var j=0; j<u.length; ++j)
t += A(j,i) * u[j];
v[i] = t;

View File

@@ -14,18 +14,18 @@
// Helper to run a function repeatedly and measure total time in seconds.
// Returns elapsed time in seconds.
function measureTime(fn, iterations) {
var t1 = os.now();
for (var i = 0; i < iterations; i++) {
let t1 = os.now();
for (let i = 0; i < iterations; i++) {
fn();
}
var t2 = os.now();
let t2 = os.now();
return t2 - t1;
}
// We'll define a function that does `encode -> decode` for a given value:
function roundTripWota(value) {
var encoded = wota.encode(value);
var decoded = wota.decode(encoded);
let encoded = wota.encode(value);
let decoded = wota.decode(encoded);
// Not doing a deep compare here, just measuring performance.
// (We trust the test suite to verify correctness.)
}
@@ -63,9 +63,15 @@ def benchmarks = [
{
name: "Large Array (1k numbers)",
// A thousand random numbers
data: [ array(1000, i => i *0.5) ],
data: [ Array.from({length:1000}, (_, i) => i * 0.5) ],
iterations: 1000
},
{
name: "Large Binary Blob (256KB)",
// A 256KB ArrayBuffer
data: [ new Uint8Array(256 * 1024).buffer ],
iterations: 200
}
];
// Print a header
@@ -73,23 +79,28 @@ log.console("Wota Encode/Decode Benchmark");
log.console("===================\n");
// We'll run each benchmark scenario in turn.
arrfor(benchmarks, function(bench) {
var totalIterations = bench.iterations * length(bench.data);
for (let bench of benchmarks) {
// We'll measure how long it takes to do 'iterations' *for each test value*
// in bench.data. The total loop count is `bench.iterations * bench.data.length`.
// Then we compute an overall encode+decode throughput (ops/s).
let totalIterations = bench.iterations * bench.data.length;
// We'll define a function that does a roundTrip for *each* data item in bench.data
// to measure in one loop iteration. Then we multiply by bench.iterations.
function runAllData() {
arrfor(bench.data, roundTripWota)
for (let val of bench.data) {
roundTripWota(val);
}
}
var elapsedSec = measureTime(runAllData, bench.iterations);
var opsPerSec = (totalIterations / elapsedSec).toFixed(1);
let elapsedSec = measureTime(runAllData, bench.iterations);
let opsPerSec = (totalIterations / elapsedSec).toFixed(1);
log.console(`${bench.name}:`);
log.console(` Iterations: ${bench.iterations} × ${length(bench.data)} data items = ${totalIterations}`);
log.console(` Iterations: ${bench.iterations} × ${bench.data.length} data items = ${totalIterations}`);
log.console(` Elapsed: ${elapsedSec.toFixed(3)} s`);
log.console(` Throughput: ${opsPerSec} encode+decode ops/sec\n`);
})
}
// All done
log.console("Benchmark completed.\n");

View File

@@ -13,7 +13,7 @@
//
// Parse command line arguments
if (length(arg) != 2) {
if (arg.length != 2) {
log.console('Usage: cell benchmark_wota_nota_json.ce <LibraryName> <ScenarioName>');
$stop()
}
@@ -32,7 +32,7 @@ def libraries = [
decode: wota.decode,
// wota produces an ArrayBuffer. We'll count `buffer.byteLength` as size.
getSize(encoded) {
return length(encoded);
return encoded.length;
}
},
{
@@ -41,7 +41,7 @@ def libraries = [
decode: nota.decode,
// nota also produces an ArrayBuffer:
getSize(encoded) {
return length(encoded);
return encoded.length;
}
},
{
@@ -50,8 +50,9 @@ def libraries = [
decode: json.decode,
// json produces a JS string. We'll measure its UTF-16 code unit length
// as a rough "size". Alternatively, you could convert to UTF-8 for
// a more accurate byte size. Here we just use `string.length`.
getSize(encodedStr) {
return length(encodedStr);
return encodedStr.length;
}
}
];
@@ -97,7 +98,7 @@ def benchmarks = [
},
{
name: "large_array",
data: [ array(1000, i => i) ],
data: [ Array.from({length:1000}, (_, i) => i) ],
iterations: 1000
},
];
@@ -107,9 +108,9 @@ def benchmarks = [
////////////////////////////////////////////////////////////////////////////////
function measureTime(fn) {
var start = os.now();
let start = os.now();
fn();
var end = os.now();
let end = os.now();
return (end - start); // in seconds
}
@@ -127,19 +128,19 @@ function runBenchmarkForLibrary(lib, bench) {
// Pre-store the encoded results for all items so we can measure decode time
// in a separate pass. Also measure total size once.
var encodedList = [];
var totalSize = 0;
let encodedList = [];
let totalSize = 0;
// 1) Measure ENCODING
var encodeTime = measureTime(() => {
for (var i = 0; i < bench.iterations; i++) {
let encodeTime = measureTime(() => {
for (let i = 0; i < bench.iterations; i++) {
// For each data item, encode it
for (var j = 0; j < length(bench.data); j++) {
var e = lib.encode(bench.data[j]);
for (let j = 0; j < bench.data.length; j++) {
let e = lib.encode(bench.data[j]);
// store only in the very first iteration, so we can decode them later
// but do not store them every iteration or we blow up memory.
if (i == 0) {
push(encodedList, e);
encodedList.push(e);
totalSize += lib.getSize(e);
}
}
@@ -147,9 +148,13 @@ function runBenchmarkForLibrary(lib, bench) {
});
// 2) Measure DECODING
var decodeTime = measureTime(() => {
for (var i = 0; i < bench.iterations; i++) {
arrfor(encodedList, lib.decode)
let decodeTime = measureTime(() => {
for (let i = 0; i < bench.iterations; i++) {
// decode everything we stored during the first iteration
for (let e of encodedList) {
let decoded = lib.decode(e);
// not verifying correctness here, just measuring speed
}
}
});
@@ -161,18 +166,18 @@ function runBenchmarkForLibrary(lib, bench) {
////////////////////////////////////////////////////////////////////////////////
// Find the requested library and scenario
var lib = libraries[find(libraries, l => l.name == lib_name)];
var bench = benchmarks[find(benchmarks, b => b.name == scenario_name)];
var lib = libraries.find(l => l.name == lib_name);
var bench = benchmarks.find(b => b.name == scenario_name);
if (!lib) {
log.console('Unknown library:', lib_name);
log.console('Available libraries:', text(array(libraries, l => l.name), ', '));
log.console('Available libraries:', libraries.map(l => l.name).join(', '));
$stop()
}
if (!bench) {
log.console('Unknown scenario:', scenario_name);
log.console('Available scenarios:', text(array(benchmarks, b => b.name), ', '));
log.console('Available scenarios:', benchmarks.map(b => b.name).join(', '));
$stop()
}
@@ -180,7 +185,7 @@ if (!bench) {
var { encodeTime, decodeTime, totalSize } = runBenchmarkForLibrary(lib, bench);
// Output json for easy parsing by hyperfine or other tools
var totalOps = bench.iterations * length(bench.data);
var totalOps = bench.iterations * bench.data.length;
var result = {
lib: lib_name,
scenario: scenario_name,

View File

@@ -1,11 +1,9 @@
// cell build [<locator>] - Build dynamic libraries locally for the current machine
// cell build [options] - Build dynamic libraries locally for the current machine
//
// Usage:
// cell build Build dynamic libraries for all packages in shop
// cell build . Build dynamic library for current directory package
// cell build <locator> Build dynamic library for specific package
// cell build Build dynamic libraries for all packages
// cell build -p <pkg> Build dynamic library for specific package
// cell build -t <target> Cross-compile dynamic libraries for target platform
// cell build -b <type> Build type: release (default), debug, or minsize
var build = use('build')
var shop = use('internal/shop')
@@ -14,28 +12,25 @@ var fd = use('fd')
var target = null
var target_package = null
var buildtype = 'release'
var force_rebuild = false
var dry_run = false
var buildtype = 'debug'
for (var i = 0; i < length(args); i++) {
for (var i = 0; i < args.length; i++) {
if (args[i] == '-t' || args[i] == '--target') {
if (i + 1 < length(args)) {
if (i + 1 < args.length) {
target = args[++i]
} else {
log.error('-t requires a target')
$stop()
}
} else if (args[i] == '-p' || args[i] == '--package') {
// Legacy support for -p flag
if (i + 1 < length(args)) {
if (i + 1 < args.length) {
target_package = args[++i]
} else {
log.error('-p requires a package name')
$stop()
}
} else if (args[i] == '-b' || args[i] == '--buildtype') {
if (i + 1 < length(args)) {
if (i + 1 < args.length) {
buildtype = args[++i]
if (buildtype != 'release' && buildtype != 'debug' && buildtype != 'minsize') {
log.error('Invalid buildtype: ' + buildtype + '. Must be release, debug, or minsize')
@@ -45,30 +40,13 @@ for (var i = 0; i < length(args); i++) {
log.error('-b requires a buildtype (release, debug, minsize)')
$stop()
}
} else if (args[i] == '--force') {
force_rebuild = true
} else if (args[i] == '--dry-run') {
dry_run = true
} else if (args[i] == '--list-targets') {
log.console('Available targets:')
var targets = build.list_targets()
for (var t = 0; t < length(targets); t++) {
for (var t = 0; t < targets.length; t++) {
log.console(' ' + targets[t])
}
$stop()
} else if (!starts_with(args[i], '-') && !target_package) {
// Positional argument - treat as package locator
target_package = args[i]
}
}
// Resolve local paths to absolute paths
if (target_package) {
if (target_package == '.' || starts_with(target_package, './') || starts_with(target_package, '../') || fd.is_dir(target_package)) {
var resolved = fd.realpath(target_package)
if (resolved) {
target_package = resolved
}
}
}
@@ -80,16 +58,16 @@ if (!target) {
if (target && !build.has_target(target)) {
log.error('Invalid target: ' + target)
log.console('Available targets: ' + text(build.list_targets(), ', '))
log.console('Available targets: ' + build.list_targets().join(', '))
$stop()
}
var packages = shop.list_packages()
log.console('Preparing packages...')
arrfor(packages, function(package) {
if (package == 'core') return
for (var package of packages) {
if (package == 'core') continue
shop.extract(package)
})
}
if (target_package) {
// Build single package
@@ -110,7 +88,7 @@ if (target_package) {
var success = 0
var failed = 0
for (var i = 0; i < length(results); i++) {
for (var i = 0; i < results.length; i++) {
if (results[i].library) {
success++
} else if (results[i].error) {

269
build.cm
View File

@@ -28,15 +28,15 @@ function get_local_dir() {
// Replace sigils in a string
// Currently supports: $LOCAL -> .cell/local full path
function replace_sigils(str) {
return replace(str, '$LOCAL', get_local_dir())
return str.replaceAll('$LOCAL', get_local_dir())
}
// Replace sigils in an array of flags
function replace_sigils_array(flags) {
var result = []
arrfor(flags, function(flag) {
push(result, replace_sigils(flag))
})
for (var i = 0; i < flags.length; i++) {
result.push(replace_sigils(flags[i]))
}
return result
}
@@ -73,7 +73,7 @@ Build.detect_host_target = function() {
// ============================================================================
function content_hash(str) {
var bb = stone(blob(str))
var bb = stone(new blob(str))
return text(crypto.blake2(bb, 32), 'h')
}
@@ -83,12 +83,14 @@ function get_build_dir() {
function ensure_dir(path) {
if (fd.stat(path).isDirectory) return
var parts = array(path, '/')
var current = starts_with(path, '/') ? '/' : ''
for (var i = 0; i < length(parts); i++) {
var parts = path.split('/')
var current = path.startsWith('/') ? '/' : ''
for (var i = 0; i < parts.length; i++) {
if (parts[i] == '') continue
current += parts[i] + '/'
if (!fd.stat(current).isDirectory) fd.mkdir(current)
if (!fd.stat(current).isDirectory) {
fd.mkdir(current)
}
}
}
@@ -105,7 +107,7 @@ Build.compile_file = function(pkg, file, target, buildtype = 'release') {
var src_path = pkg_dir + '/' + file
if (!fd.is_file(src_path)) {
throw Error('Source file not found: ' + src_path)
throw new Error('Source file not found: ' + src_path)
}
// Get flags (with sigil replacement)
@@ -121,32 +123,33 @@ Build.compile_file = function(pkg, file, target, buildtype = 'release') {
// Add buildtype-specific flags
if (buildtype == 'release') {
cmd_parts = array(cmd_parts, ['-O3', '-DNDEBUG'])
cmd_parts.push('-O3', '-DNDEBUG')
} else if (buildtype == 'debug') {
cmd_parts = array(cmd_parts, ['-O2', '-g'])
cmd_parts.push('-O2', '-g')
} else if (buildtype == 'minsize') {
cmd_parts = array(cmd_parts, ['-Os', '-DNDEBUG'])
cmd_parts.push('-Os', '-DNDEBUG')
}
push(cmd_parts, '-DCELL_USE_NAME=' + sym_name)
push(cmd_parts, '-I"' + pkg_dir + '"')
cmd_parts.push('-DCELL_USE_NAME=' + sym_name)
cmd_parts.push('-I"' + pkg_dir + '"')
// Add package CFLAGS (resolve relative -I paths)
arrfor(cflags, function(flag) {
if (starts_with(flag, '-I') && !starts_with(flag, '-I/')) {
flag = '-I"' + pkg_dir + '/' + text(flag, 2) + '"'
for (var i = 0; i < cflags.length; i++) {
var flag = cflags[i]
if (flag.startsWith('-I') && !flag.startsWith('-I/')) {
flag = '-I"' + pkg_dir + '/' + flag.substring(2) + '"'
}
cmd_parts.push(flag)
}
push(cmd_parts, flag)
})
// Add target CFLAGS
arrfor(target_cflags, function(flag) {
push(cmd_parts, flag)
})
for (var i = 0; i < target_cflags.length; i++) {
cmd_parts.push(target_cflags[i])
}
push(cmd_parts, '"' + src_path + '"')
cmd_parts.push('"' + src_path + '"')
var cmd_str = text(cmd_parts, ' ')
var cmd_str = cmd_parts.join(' ')
// Content hash: command + file content
var file_content = fd.slurp(src_path)
@@ -167,7 +170,7 @@ Build.compile_file = function(pkg, file, target, buildtype = 'release') {
log.console('Compiling ' + file)
var ret = os.system(full_cmd)
if (ret != 0) {
throw Error('Compilation failed: ' + file)
throw new Error('Compilation failed: ' + file)
}
return obj_path
@@ -179,10 +182,10 @@ Build.build_package = function(pkg, target = Build.detect_host_target(), exclude
var c_files = pkg_tools.get_c_files(pkg, target, exclude_main)
var objects = []
arrfor(c_files, function(file) {
var obj = Build.compile_file(pkg, file, target, buildtype)
push(objects, obj)
})
for (var i = 0; i < c_files.length; i++) {
var obj = Build.compile_file(pkg, c_files[i], target, buildtype)
objects.push(obj)
}
return objects
}
@@ -190,50 +193,23 @@ Build.build_package = function(pkg, target = Build.detect_host_target(), exclude
// ============================================================================
// Dynamic library building
// ============================================================================
// Compute link key from all inputs that affect the dylib output
function compute_link_key(objects, ldflags, target_ldflags, target, cc) {
// Sort objects for deterministic hash
var sorted_objects = sort(objects)
// Build a string representing all link inputs
var parts = []
push(parts, 'target:' + target)
push(parts, 'cc:' + cc)
arrfor(sorted_objects, function(obj) {
// Object paths are content-addressed, so the path itself is the hash
push(parts, 'obj:' + obj)
})
arrfor(ldflags, function(flag) {
push(parts, 'ldflag:' + flag)
})
arrfor(target_ldflags, function(flag) {
push(parts, 'target_ldflag:' + flag)
})
return content_hash(text(parts, '\n'))
}
// Build a dynamic library for a package
// Output goes to .cell/lib/<package_name>.<ext>
// Dynamic libraries do NOT link against core; undefined symbols are resolved at dlopen time
// Uses content-addressed store + symlink for caching
Build.build_dynamic = function(pkg, target = Build.detect_host_target(), buildtype = 'release') {
var objects = Build.build_package(pkg, target, true, buildtype) // exclude main.c
if (length(objects) == 0) {
if (objects.length == 0) {
log.console('No C files in ' + pkg)
return null
}
var lib_dir = shop.get_lib_dir()
var store_dir = lib_dir + '/store'
ensure_dir(lib_dir)
ensure_dir(store_dir)
var lib_name = shop.lib_name_for_package(pkg)
var dylib_ext = toolchains[target].system == 'windows' ? '.dll' : (toolchains[target].system == 'darwin' ? '.dylib' : '.so')
var stable_path = lib_dir + '/' + lib_name + dylib_ext
var lib_path = lib_dir + '/' + lib_name + dylib_ext
// Get link flags (with sigil replacement)
var ldflags = replace_sigils_array(pkg_tools.get_flags(pkg, 'LDFLAGS', target))
@@ -243,91 +219,64 @@ Build.build_dynamic = function(pkg, target = Build.detect_host_target(), buildty
var local_dir = get_local_dir()
var tc = toolchains[target]
// Resolve relative -L paths in ldflags for hash computation
var resolved_ldflags = []
arrfor(ldflags, function(flag) {
if (starts_with(flag, '-L') && !starts_with(flag, '-L/')) {
flag = '-L"' + pkg_dir + '/' + text(flag, 2) + '"'
}
push(resolved_ldflags, flag)
})
// Compute link key
var link_key = compute_link_key(objects, resolved_ldflags, target_ldflags, target, cc)
var store_path = store_dir + '/' + lib_name + '-' + link_key + dylib_ext
// Check if already linked in store
if (fd.is_file(store_path)) {
// Ensure symlink points to the store file
if (fd.is_link(stable_path)) {
var current_target = fd.readlink(stable_path)
if (current_target == store_path) {
// Already up to date
return stable_path
}
fd.unlink(stable_path)
} else if (fd.is_file(stable_path)) {
fd.unlink(stable_path)
}
fd.symlink(store_path, stable_path)
return stable_path
}
// Build link command
var cmd_parts = [cc, '-shared', '-fPIC']
// Platform-specific flags for undefined symbols (resolved at dlopen) and size optimization
if (tc.system == 'darwin') {
cmd_parts = array(cmd_parts, [
'-undefined', 'dynamic_lookup',
'-Wl,-dead_strip',
'-Wl,-install_name,' + stable_path,
'-Wl,-rpath,@loader_path/../local',
'-Wl,-rpath,' + local_dir
])
// Allow undefined symbols - they will be resolved when dlopen'd into the main executable
cmd_parts.push('-undefined', 'dynamic_lookup')
// Dead-strip unused code
cmd_parts.push('-Wl,-dead_strip')
// rpath for .cell/local libraries
cmd_parts.push('-Wl,-rpath,@loader_path/../local')
cmd_parts.push('-Wl,-rpath,' + local_dir)
} else if (tc.system == 'linux') {
cmd_parts = array(cmd_parts, [
'-Wl,--allow-shlib-undefined',
'-Wl,--gc-sections',
'-Wl,-rpath,$ORIGIN/../local',
'-Wl,-rpath,' + local_dir
])
// Allow undefined symbols at link time
cmd_parts.push('-Wl,--allow-shlib-undefined')
// Garbage collect unused sections
cmd_parts.push('-Wl,--gc-sections')
// rpath for .cell/local libraries
cmd_parts.push('-Wl,-rpath,$ORIGIN/../local')
cmd_parts.push('-Wl,-rpath,' + local_dir)
} else if (tc.system == 'windows') {
// Windows DLLs: use --allow-shlib-undefined for mingw
push(cmd_parts, '-Wl,--allow-shlib-undefined')
cmd_parts.push('-Wl,--allow-shlib-undefined')
}
// Add .cell/local to library search path
push(cmd_parts, '-L"' + local_dir + '"')
cmd_parts.push('-L"' + local_dir + '"')
arrfor(objects, function(obj) {
push(cmd_parts, '"' + obj + '"')
})
for (var i = 0; i < objects.length; i++) {
cmd_parts.push('"' + objects[i] + '"')
}
// Do NOT link against core library - symbols resolved at dlopen time
cmd_parts = array(cmd_parts, resolved_ldflags)
cmd_parts = array(cmd_parts, target_ldflags)
push(cmd_parts, '-o')
push(cmd_parts, '"' + store_path + '"')
// Add LDFLAGS (resolve relative -L paths)
for (var i = 0; i < ldflags.length; i++) {
var flag = ldflags[i]
if (flag.startsWith('-L') && !flag.startsWith('-L/')) {
flag = '-L"' + pkg_dir + '/' + flag.substring(2) + '"'
}
cmd_parts.push(flag)
}
var cmd_str = text(cmd_parts, ' ')
for (var i = 0; i < target_ldflags.length; i++) {
cmd_parts.push(target_ldflags[i])
}
log.console('Linking ' + lib_name + dylib_ext)
cmd_parts.push('-o', '"' + lib_path + '"')
var cmd_str = cmd_parts.join(' ')
log.console('Linking ' + lib_path)
var ret = os.system(cmd_str)
if (ret != 0) {
throw Error('Linking failed: ' + pkg)
throw new Error('Linking failed: ' + pkg)
}
// Update symlink to point to the new store file
if (fd.is_link(stable_path)) {
fd.unlink(stable_path)
} else if (fd.is_file(stable_path)) {
fd.unlink(stable_path)
}
fd.symlink(store_path, stable_path)
return stable_path
return lib_path
}
// ============================================================================
@@ -343,36 +292,38 @@ Build.build_static = function(packages, target = Build.detect_host_target(), out
var seen_flags = {}
// Compile all packages
arrfor(packages, function(pkg) {
for (var i = 0; i < packages.length; i++) {
var pkg = packages[i]
var is_core = (pkg == 'core')
// For core, include main.c; for others, exclude it
var objects = Build.build_package(pkg, target, !is_core, buildtype)
arrfor(objects, function(obj) {
push(all_objects, obj)
})
for (var j = 0; j < objects.length; j++) {
all_objects.push(objects[j])
}
// Collect LDFLAGS (with sigil replacement)
var ldflags = replace_sigils_array(pkg_tools.get_flags(pkg, 'LDFLAGS', target))
var pkg_dir = shop.get_package_dir(pkg)
// Deduplicate based on the entire LDFLAGS string for this package
var ldflags_key = pkg + ':' + text(ldflags, ' ')
var ldflags_key = pkg + ':' + ldflags.join(' ')
if (!seen_flags[ldflags_key]) {
seen_flags[ldflags_key] = true
arrfor(ldflags, function(flag) {
for (var j = 0; j < ldflags.length; j++) {
var flag = ldflags[j]
// Resolve relative -L paths
if (starts_with(flag, '-L') && !starts_with(flag, '-L/')) {
flag = '-L"' + pkg_dir + '/' + text(flag, 2) + '"'
if (flag.startsWith('-L') && !flag.startsWith('-L/')) {
flag = '-L"' + pkg_dir + '/' + flag.substring(2) + '"'
}
all_ldflags.push(flag)
}
}
push(all_ldflags, flag)
})
}
})
if (length(all_objects) == 0) {
throw Error('No object files to link')
if (all_objects.length == 0) {
throw new Error('No object files to link')
}
// Link
@@ -380,32 +331,32 @@ Build.build_static = function(packages, target = Build.detect_host_target(), out
var target_ldflags = toolchains[target].c_link_args || []
var exe_ext = toolchains[target].system == 'windows' ? '.exe' : ''
if (!ends_with(output, exe_ext) && exe_ext) {
if (!output.endsWith(exe_ext) && exe_ext) {
output = output + exe_ext
}
var cmd_parts = [cc]
arrfor(all_objects, function(obj) {
push(cmd_parts, '"' + obj + '"')
})
for (var i = 0; i < all_objects.length; i++) {
cmd_parts.push('"' + all_objects[i] + '"')
}
arrfor(all_ldflags, function(flag) {
push(cmd_parts, flag)
})
for (var i = 0; i < all_ldflags.length; i++) {
cmd_parts.push(all_ldflags[i])
}
arrfor(target_ldflags, function(flag) {
push(cmd_parts, flag)
})
for (var i = 0; i < target_ldflags.length; i++) {
cmd_parts.push(target_ldflags[i])
}
push(cmd_parts, '-o', '"' + output + '"')
cmd_parts.push('-o', '"' + output + '"')
var cmd_str = text(cmd_parts, ' ')
var cmd_str = cmd_parts.join(' ')
log.console('Linking ' + output)
var ret = os.system(cmd_str)
if (ret != 0) {
throw Error('Linking failed with command: ' + cmd_str)
throw new Error('Linking failed with command: ' + cmd_str)
}
log.console('Built ' + output)
@@ -424,30 +375,30 @@ Build.build_all_dynamic = function(target, buildtype = 'release') {
var results = []
// Build core first
if (find(packages, 'core') != null) {
if (packages.indexOf('core') >= 0) {
try {
var lib = Build.build_dynamic('core', target, buildtype)
push(results, { package: 'core', library: lib })
results.push({ package: 'core', library: lib })
} catch (e) {
log.error('Failed to build core: ' + text(e))
push(results, { package: 'core', error: e })
results.push({ package: 'core', error: e })
}
}
// Build other packages
arrfor(packages, function(pkg) {
if (pkg == 'core') return
for (var i = 0; i < packages.length; i++) {
var pkg = packages[i]
if (pkg == 'core') continue
try {
var lib = Build.build_dynamic(pkg, target, buildtype)
push(results, { package: pkg, library: lib })
results.push({ package: pkg, library: lib })
} catch (e) {
log.error('Failed to build ' + pkg + ': ')
log.console(e.message)
log.console(e.stack)
push(results, { package: pkg, error: e })
log.error(e)
results.push({ package: pkg, error: e })
}
}
})
return results
}

181
cellfs.cm
View File

@@ -17,7 +17,30 @@ var writepath = "."
function normalize_path(path) {
if (!path) return ""
// Remove leading/trailing slashes and normalize
return replace(path, /^\/+|\/+$/, "")
return path.replace(/^\/+|\/+$/g, "")
}
// Helper to get directory from path
function dirname(path) {
var idx = path.lastIndexOf("/")
if (idx == -1) return ""
return path.substring(0, idx)
}
// Helper to get basename from path
function basename(path) {
var idx = path.lastIndexOf("/")
if (idx == -1) return path
return path.substring(idx + 1)
}
// Helper to join paths
function join_paths(base, rel) {
base = base.replace(/\/+$/, "")
rel = rel.replace(/^\/+/, "")
if (!base) return rel
if (!rel) return base
return base + "/" + rel
}
// Check if a file exists in a specific mount
@@ -36,7 +59,7 @@ function mount_exists(mount, path) {
return false
}
} else { // fs
var full_path = fd.join_paths(mount.source, path)
var full_path = join_paths(mount.source, path)
try {
var st = fd.stat(full_path)
return st.isFile || st.isDirectory
@@ -63,7 +86,7 @@ function is_directory(path) {
return false;
}
} else { // fs
var full_path = fd.join_paths(mount.source, path)
var full_path = join_paths(mount.source, path)
try {
var st = fd.stat(full_path)
return st.isDirectory
@@ -79,50 +102,44 @@ function resolve(path, must_exist) {
path = normalize_path(path)
// Check for named mount
if (starts_with(path, "@")) {
var idx = search(path, "/")
if (path.startsWith("@")) {
var idx = path.indexOf("/")
var mount_name = ""
var rel_path = ""
if (idx == null) {
mount_name = text(path, 1)
if (idx == -1) {
mount_name = path.substring(1)
rel_path = ""
} else {
mount_name = text(path, 1, idx)
rel_path = text(path, idx + 1)
mount_name = path.substring(1, idx)
rel_path = path.substring(idx + 1)
}
// Find named mount
var mount = null
arrfor(mounts, function(m) {
for (var m of mounts) {
if (m.name == mount_name) {
mount = m
return true
break
}
}
}, false, true)
if (!mount) {
throw Error("Unknown mount point: @" + mount_name)
throw new Error("Unknown mount point: @" + mount_name)
}
return { mount: mount, path: rel_path }
}
// Search path
var found_mount = null
arrfor(mounts, function(mount) {
for (var mount of mounts) {
if (mount_exists(mount, path)) {
found_mount = { mount: mount, path: path }
return true
return { mount: mount, path: path }
}
}, false, true)
if (found_mount) {
return found_mount
}
if (must_exist) {
throw Error("File not found in any mount: " + path)
throw new Error("File not found in any mount: " + path)
}
}
@@ -157,8 +174,8 @@ function mount(source, name) {
mount_info.zip_blob = blob // keep blob alive
} else {
var zip = miniz.read(blob)
if (!is_object(zip) || !is_function(zip.count)) {
throw Error("Invalid archive file (not zip or qop): " + source)
if (!zip || typeof zip.count != 'function') {
throw new Error("Invalid archive file (not zip or qop): " + source)
}
mount_info.type = 'zip'
@@ -166,32 +183,36 @@ function mount(source, name) {
mount_info.zip_blob = blob // keep blob alive
}
} else {
throw Error("Unsupported mount source type: " + source)
throw new Error("Unsupported mount source type: " + source)
}
push(mounts, mount_info)
mounts.push(mount_info)
}
// Unmount
function unmount(name_or_source) {
mounts = filter(mounts, function(mount) {
return mount.name != name_or_source && mount.source != name_or_source
})
for (var i = 0; i < mounts.length; i++) {
if (mounts[i].name == name_or_source || mounts[i].source == name_or_source) {
mounts.splice(i, 1)
return
}
}
throw new Error("Mount not found: " + name_or_source)
}
// Read file
function slurp(path) {
var res = resolve(path, true)
if (!res) throw Error("File not found: " + path)
if (!res) throw new Error("File not found: " + path)
if (res.mount.type == 'zip') {
return res.mount.handle.slurp(res.path)
} else if (res.mount.type == 'qop') {
var data = res.mount.handle.read(res.path)
if (!data) throw Error("File not found in qop: " + path)
if (!data) throw new Error("File not found in qop: " + path)
return data
} else {
var full_path = fd.join_paths(res.mount.source, res.path)
var full_path = join_paths(res.mount.source, res.path)
return fd.slurp(full_path)
}
}
@@ -208,7 +229,7 @@ function slurpwrite(path, data) {
// Check existence
function exists(path) {
var res = resolve(path, false)
if (starts_with(path, "@")) {
if (path.startsWith("@")) {
return mount_exists(res.mount, res.path)
}
return res != null
@@ -217,7 +238,7 @@ function exists(path) {
// Stat
function stat(path) {
var res = resolve(path, true)
if (!res) throw Error("File not found: " + path)
if (!res) throw new Error("File not found: " + path)
if (res.mount.type == 'zip') {
var mod = res.mount.handle.mod(res.path)
@@ -228,14 +249,14 @@ function stat(path) {
}
} else if (res.mount.type == 'qop') {
var s = res.mount.handle.stat(res.path)
if (!s) throw Error("File not found in qop: " + path)
if (!s) throw new Error("File not found in qop: " + path)
return {
filesize: s.size,
modtime: s.modtime,
isDirectory: s.isDirectory
}
} else {
var full_path = fd.join_paths(res.mount.source, res.path)
var full_path = join_paths(res.mount.source, res.path)
var s = fd.stat(full_path)
return {
filesize: s.size,
@@ -247,7 +268,7 @@ function stat(path) {
// Get search paths
function searchpath() {
return array(mounts)
return mounts.slice()
}
// Mount a package using the shop system
@@ -261,7 +282,7 @@ function mount_package(name) {
var dir = shop.get_package_dir(name)
if (!dir) {
throw Error("Package not found: " + name)
throw new Error("Package not found: " + name)
}
mount(dir, name)
@@ -275,16 +296,16 @@ function match(str, pattern) {
function rm(path) {
var res = resolve(path, true)
if (res.mount.type != 'fs') throw Error("Cannot delete from non-fs mount")
if (res.mount.type != 'fs') throw new Error("Cannot delete from non-fs mount")
var full_path = fd.join_paths(res.mount.source, res.path)
var full_path = join_paths(res.mount.source, res.path)
var st = fd.stat(full_path)
if (st.isDirectory) fd.rmdir(full_path)
else fd.unlink(full_path)
}
function mkdir(path) {
var full = fd.join_paths(writepath, path)
var full = join_paths(writepath, path)
fd.mkdir(full)
}
@@ -303,7 +324,7 @@ function prefdir(org, app) {
function realdir(path) {
var res = resolve(path, false)
if (!res) return null
return fd.join_paths(res.mount.source, res.path)
return join_paths(res.mount.source, res.path)
}
function enumerate(path, recurse) {
@@ -316,21 +337,21 @@ function enumerate(path, recurse) {
var list = fd.readdir(curr_full)
if (!list) return
arrfor(list, function(item) {
for (var item of list) {
var item_rel = rel_prefix ? rel_prefix + "/" + item : item
push(results, item_rel)
results.push(item_rel)
if (recurse) {
var st = fd.stat(fd.join_paths(curr_full, item))
var st = fd.stat(join_paths(curr_full, item))
if (st.isDirectory) {
visit(fd.join_paths(curr_full, item), item_rel)
visit(join_paths(curr_full, item), item_rel)
}
}
}
})
}
if (res.mount.type == 'fs') {
var full = fd.join_paths(res.mount.source, res.path)
var full = join_paths(res.mount.source, res.path)
var st = fd.stat(full)
if (st && st.isDirectory) {
visit(full, "")
@@ -338,29 +359,29 @@ function enumerate(path, recurse) {
} else if (res.mount.type == 'qop') {
var all = res.mount.handle.list()
var prefix = res.path ? res.path + "/" : ""
var prefix_len = length(prefix)
var prefix_len = prefix.length
// Use a set to avoid duplicates if we are simulating directories
var seen = {}
arrfor(all, function(p) {
if (starts_with(p, prefix)) {
var rel = text(p, prefix_len)
if (length(rel) == 0) return
for (var p of all) {
if (p.startsWith(prefix)) {
var rel = p.substring(prefix_len)
if (rel.length == 0) continue
if (!recurse) {
var slash = search(rel, '/')
if (slash != null) {
rel = text(rel, 0, slash)
var slash = rel.indexOf('/')
if (slash != -1) {
rel = rel.substring(0, slash)
}
}
if (!seen[rel]) {
seen[rel] = true
push(results, rel)
results.push(rel)
}
}
}
})
}
return results
@@ -372,25 +393,17 @@ function globfs(globs, dir) {
var results = []
function check_neg(path) {
var result = false
arrfor(globs, function(g) {
if (starts_with(g, "!") && wildstar.match(text(g, 1), path, wildstar.WM_WILDSTAR)) {
result = true
return true
for (var g of globs) {
if (g.startsWith("!") && wildstar.match(g.substring(1), path, wildstar.WM_WILDSTAR)) return true;
}
}, false, true)
return result
return false;
}
function check_pos(path) {
var result = false
arrfor(globs, function(g) {
if (!starts_with(g, "!") && wildstar.match(g, path, wildstar.WM_WILDSTAR)) {
result = true
return true
for (var g of globs) {
if (!g.startsWith("!") && wildstar.match(g, path, wildstar.WM_WILDSTAR)) return true;
}
}, false, true)
return result
return false;
}
function visit(curr_full, rel_prefix) {
@@ -399,10 +412,10 @@ function globfs(globs, dir) {
var list = fd.readdir(curr_full)
if (!list) return
arrfor(list, function(item) {
for (var item of list) {
var item_rel = rel_prefix ? rel_prefix + "/" + item : item
var child_full = fd.join_paths(curr_full, item)
var child_full = join_paths(curr_full, item)
var st = fd.stat(child_full)
if (st.isDirectory) {
@@ -411,14 +424,14 @@ function globfs(globs, dir) {
}
} else {
if (!check_neg(item_rel) && check_pos(item_rel)) {
push(results, item_rel)
results.push(item_rel)
}
}
}
})
}
if (res.mount.type == 'fs') {
var full = fd.join_paths(res.mount.source, res.path)
var full = join_paths(res.mount.source, res.path)
var st = fd.stat(full)
if (st && st.isDirectory) {
visit(full, "")
@@ -426,18 +439,18 @@ function globfs(globs, dir) {
} else if (res.mount.type == 'qop') {
var all = res.mount.handle.list()
var prefix = res.path ? res.path + "/" : ""
var prefix_len = length(prefix)
var prefix_len = prefix.length
arrfor(all, function(p) {
if (starts_with(p, prefix)) {
var rel = text(p, prefix_len)
if (length(rel) == 0) return
for (var p of all) {
if (p.startsWith(prefix)) {
var rel = p.substring(prefix_len)
if (rel.length == 0) continue
if (!check_neg(rel) && check_pos(rel)) {
push(results, rel)
results.push(rel)
}
}
}
})
}
return results

216
clean.ce
View File

@@ -1,218 +1,26 @@
// cell clean [<scope>] - Remove cached material to force refetch/rebuild
//
// Usage:
// cell clean Clean build outputs for current directory package
// cell clean . Clean build outputs for current directory package
// cell clean <locator> Clean build outputs for specific package
// cell clean shop Clean entire shop
// cell clean world Clean all world packages
//
// Options:
// --build Remove build outputs only (default)
// --fetch Remove fetched sources only
// --all Remove both build outputs and fetched sources
// --deep Apply to full dependency closure
// --dry-run Show what would be deleted
// cell clean - Remove build artifacts from global shop
var shop = use('internal/shop')
var pkg = use('package')
var fd = use('fd')
var shop = use('internal/shop')
var scope = null
var clean_build = false
var clean_fetch = false
var deep = false
var dry_run = false
var build_dir = shop.get_shop_path() + '/build'
for (var i = 0; i < length(args); i++) {
if (args[i] == '--build') {
clean_build = true
} else if (args[i] == '--fetch') {
clean_fetch = true
} else if (args[i] == '--all') {
clean_build = true
clean_fetch = true
} else if (args[i] == '--deep') {
deep = true
} else if (args[i] == '--dry-run') {
dry_run = true
} else if (args[i] == '--help' || args[i] == '-h') {
log.console("Usage: cell clean [<scope>] [options]")
log.console("")
log.console("Remove cached material to force refetch/rebuild.")
log.console("")
log.console("Scopes:")
log.console(" <locator> Clean specific package")
log.console(" shop Clean entire shop")
log.console(" world Clean all world packages")
log.console("")
log.console("Options:")
log.console(" --build Remove build outputs only (default)")
log.console(" --fetch Remove fetched sources only")
log.console(" --all Remove both build outputs and fetched sources")
log.console(" --deep Apply to full dependency closure")
log.console(" --dry-run Show what would be deleted")
if (!fd.is_dir(build_dir)) {
log.console("No build directory found at " + build_dir)
$stop()
} else if (!starts_with(args[i], '-')) {
scope = args[i]
}
return
}
// Default to --build if nothing specified
if (!clean_build && !clean_fetch) {
clean_build = true
}
log.console("Cleaning build artifacts...")
// Default scope to current directory
if (!scope) {
scope = '.'
}
// Resolve local paths for single package scope
var is_shop_scope = (scope == 'shop')
var is_world_scope = (scope == 'world')
if (!is_shop_scope && !is_world_scope) {
if (scope == '.' || starts_with(scope, './') || starts_with(scope, '../') || fd.is_dir(scope)) {
var resolved = fd.realpath(scope)
if (resolved) {
scope = resolved
}
}
}
var files_to_delete = []
var dirs_to_delete = []
// Gather packages to clean
var packages_to_clean = []
if (is_shop_scope) {
packages_to_clean = shop.list_packages()
} else if (is_world_scope) {
// For now, world is the same as shop
packages_to_clean = shop.list_packages()
} else {
// Single package
push(packages_to_clean, scope)
if (deep) {
// Remove the build directory
try {
var deps = pkg.gather_dependencies(scope)
arrfor(deps, function(dep) {
push(packages_to_clean, dep)
})
fd.rm(build_dir)
log.console("Build directory removed: " + build_dir)
} catch (e) {
// Skip if can't read dependencies
}
}
log.error(e)
}
// Gather files to clean
var lib_dir = shop.get_lib_dir()
var build_dir = shop.get_build_dir()
var packages_dir = replace(shop.get_package_dir(''), /\/$/, '') // Get base packages dir
if (clean_build) {
if (is_shop_scope) {
// Clean entire build and lib directories
if (fd.is_dir(build_dir)) {
push(dirs_to_delete, build_dir)
}
if (fd.is_dir(lib_dir)) {
push(dirs_to_delete, lib_dir)
}
} else {
// Clean specific package libraries
arrfor(packages_to_clean, function(p) {
if (p == 'core') return
var lib_name = shop.lib_name_for_package(p)
var dylib_ext = '.dylib'
var lib_path = lib_dir + '/' + lib_name + dylib_ext
if (fd.is_file(lib_path)) {
push(files_to_delete, lib_path)
}
// Also check for .so and .dll
var so_path = lib_dir + '/' + lib_name + '.so'
var dll_path = lib_dir + '/' + lib_name + '.dll'
if (fd.is_file(so_path)) {
push(files_to_delete, so_path)
}
if (fd.is_file(dll_path)) {
push(files_to_delete, dll_path)
}
})
}
}
if (clean_fetch) {
if (is_shop_scope) {
// Clean entire packages directory (dangerous!)
if (fd.is_dir(packages_dir)) {
push(dirs_to_delete, packages_dir)
}
} else {
// Clean specific package directories
arrfor(packages_to_clean, function(p) {
if (p == 'core') return
var pkg_dir = shop.get_package_dir(p)
if (fd.is_dir(pkg_dir) || fd.is_link(pkg_dir)) {
push(dirs_to_delete, pkg_dir)
}
})
}
}
// Execute or report
if (dry_run) {
log.console("Would delete:")
if (length(files_to_delete) == 0 && length(dirs_to_delete) == 0) {
log.console(" (nothing to clean)")
} else {
arrfor(files_to_delete, function(f) {
log.console(" [file] " + f)
})
arrfor(dirs_to_delete, function(d) {
log.console(" [dir] " + d)
})
}
} else {
var deleted_count = 0
arrfor(files_to_delete, function(f) {
try {
fd.unlink(f)
log.console("Deleted: " + f)
deleted_count++
} catch (e) {
log.error("Failed to delete " + f + ": " + e)
}
})
arrfor(dirs_to_delete, function(d) {
try {
if (fd.is_link(d)) {
fd.unlink(d)
} else {
fd.rmdir(d, 1) // recursive
}
log.console("Deleted: " + d)
deleted_count++
} catch (e) {
log.error("Failed to delete " + d + ": " + e)
}
})
if (deleted_count == 0) {
log.console("Nothing to clean.")
} else {
log.console("")
log.console("Clean complete: " + text(deleted_count) + " item(s) deleted.")
}
}
log.console("Clean complete!")
$stop()

View File

@@ -7,7 +7,7 @@ var fd = use('fd')
var http = use('http')
var miniz = use('miniz')
if (length(args) < 2) {
if (args.length < 2) {
log.console("Usage: cell clone <origin> <path>")
log.console("Clones a cell package to a local path and links it.")
$stop()
@@ -18,7 +18,7 @@ var origin = args[0]
var target_path = args[1]
// Resolve target path to absolute
if (target_path == '.' || starts_with(target_path, './') || starts_with(target_path, '../')) {
if (target_path == '.' || target_path.startsWith('./') || target_path.startsWith('../')) {
var resolved = fd.realpath(target_path)
if (resolved) {
target_path = resolved
@@ -27,12 +27,12 @@ if (target_path == '.' || starts_with(target_path, './') || starts_with(target_p
var cwd = fd.realpath('.')
if (target_path == '.') {
target_path = cwd
} else if (starts_with(target_path, './')) {
target_path = cwd + text(target_path, 1)
} else if (starts_with(target_path, '../')) {
} else if (target_path.startsWith('./')) {
target_path = cwd + target_path.substring(1)
} else if (target_path.startsWith('../')) {
// Go up one directory from cwd
var parent = fd.dirname(cwd)
target_path = parent + text(target_path, 2)
var parent = cwd.substring(0, cwd.lastIndexOf('/'))
target_path = parent + target_path.substring(2)
}
}
}
@@ -92,13 +92,14 @@ try {
for (var i = 0; i < count; i++) {
if (zip.is_directory(i)) continue
var filename = zip.get_filename(i)
var first_slash = search(filename, '/')
if (first_slash == null) continue
if (first_slash + 1 >= length(filename)) continue
var parts = filename.split('/')
if (parts.length <= 1) continue
var rel_path = text(filename, first_slash + 1)
// Skip the first directory (repo-commit prefix)
parts.shift()
var rel_path = parts.join('/')
var full_path = target_path + '/' + rel_path
var dir_path = fd.dirname(full_path)
var dir_path = full_path.substring(0, full_path.lastIndexOf('/'))
// Ensure directory exists
if (!fd.is_dir(dir_path)) {

View File

@@ -31,30 +31,30 @@ function print_help() {
// Parse a dot-notation key into path segments
function parse_key(key) {
return array(key, '.')
return key.split('.')
}
// Get a value from nested object using path
function get_nested(obj, path) {
var current = obj
arrfor(path, function(segment) {
if (is_null(current) || !is_object(current)) return null
for (var segment of path) {
if (!current || typeof current != 'object') return null
current = current[segment]
})
}
return current
}
// Set a value in nested object using path
function set_nested(obj, path, value) {
var current = obj
for (var i = 0; i < length(path) - 1; i++) {
for (var i = 0; i < path.length - 1; i++) {
var segment = path[i]
if (is_null(current[segment]) || !is_object(current[segment])) {
if (!current[segment] || typeof current[segment] != 'object') {
current[segment] = {}
}
current = current[segment]
}
current[path[length(path) - 1]] = value
current[path[path.length - 1]] = value
}
// Parse value string into appropriate type
@@ -64,7 +64,7 @@ function parse_value(str) {
if (str == 'false') return false
// Number (including underscores)
var num_str = replace(str, /_/g, '')
var num_str = str.replace(/_/g, '')
if (/^-?\d+$/.test(num_str)) return parseInt(num_str)
if (/^-?\d*\.\d+$/.test(num_str)) return parseFloat(num_str)
@@ -74,29 +74,29 @@ function parse_value(str) {
// Format value for display
function format_value(val) {
if (is_text(val)) return '"' + val + '"'
if (is_number(val) && val >= 1000) {
if (typeof val == 'string') return '"' + val + '"'
if (typeof val == 'number' && val >= 1000) {
// Add underscores to large numbers
return replace(val.toString(), /\B(?=(\d{3})+(?!\d))/g, '_')
return val.toString().replace(/\B(?=(\d{3})+(?!\d))/g, '_')
}
return text(val)
}
// Print configuration tree recursively
function print_config(obj, prefix = '') {
arrfor(array(obj), function(key) {
for (var key in obj) {
var val = obj[key]
var full_key = prefix ? prefix + '.' + key : key
if (is_object(val))
if (isa(val, object))
print_config(val, full_key)
else
log.console(full_key + ' = ' + format_value(val))
})
}
}
// Main command handling
if (length(args) == 0) {
if (args.length == 0) {
print_help()
$stop()
return
@@ -110,9 +110,6 @@ if (!config) {
}
var command = args[0]
var key
var path
var value
switch (command) {
case 'help':
@@ -128,14 +125,14 @@ switch (command) {
break
case 'get':
if (length(args) < 2) {
if (args.length < 2) {
log.error("Usage: cell config get <key>")
$stop()
return
}
key = args[1]
path = parse_key(key)
value = get_nested(config, path)
var key = args[1]
var path = parse_key(key)
var value = get_nested(config, path)
if (value == null) {
log.error("Key not found: " + key)
@@ -148,7 +145,7 @@ switch (command) {
break
case 'set':
if (length(args) < 3) {
if (args.length < 3) {
log.error("Usage: cell config set <key> <value>")
$stop()
return
@@ -164,8 +161,8 @@ switch (command) {
'ar_timer', 'actor_memory', 'net_service',
'reply_timeout', 'actor_max', 'stack_max'
]
if (find(valid_system_keys, path[1]) == null) {
log.error("Invalid system key. Valid keys: " + text(valid_system_keys, ', '))
if (!valid_system_keys.includes(path[1])) {
log.error("Invalid system key. Valid keys: " + valid_system_keys.join(', '))
$stop()
return
}
@@ -178,7 +175,7 @@ switch (command) {
case 'actor':
// Handle actor-specific configuration
if (length(args) < 3) {
if (args.length < 3) {
log.error("Usage: cell config actor <name> <command> [options]")
$stop()
return
@@ -193,7 +190,7 @@ switch (command) {
switch (actor_cmd) {
case 'list':
if (length(array(config.actors[actor_name])) == 0) {
if (array(config.actors[actor_name]).length == 0) {
log.console("No configuration for actor: " + actor_name)
} else {
log.console("# Configuration for actor: " + actor_name)
@@ -203,14 +200,14 @@ switch (command) {
break
case 'get':
if (length(args) < 4) {
if (args.length < 4) {
log.error("Usage: cell config actor <name> get <key>")
$stop()
return
}
key = args[3]
path = parse_key(key)
value = get_nested(config.actors[actor_name], path)
var key = args[3]
var path = parse_key(key)
var value = get_nested(config.actors[actor_name], path)
if (value == null) {
log.error("Key not found for actor " + actor_name + ": " + key)
@@ -220,15 +217,15 @@ switch (command) {
break
case 'set':
if (length(args) < 5) {
if (args.length < 5) {
log.error("Usage: cell config actor <name> set <key> <value>")
$stop()
return
}
key = args[3]
var key = args[3]
var value_str = args[4]
path = parse_key(key)
value = parse_value(value_str)
var path = parse_key(key)
var value = parse_value(value_str)
set_nested(config.actors[actor_name], path, value)
pkg.save_config(config)

View File

@@ -231,7 +231,7 @@ JSValue js_crypto_unlock(JSContext *js, JSValue self, int argc, JSValue *argv) {
static const JSCFunctionListEntry js_crypto_funcs[] = {
JS_CFUNC_DEF("shared", 2, js_crypto_shared),
JS_CFUNC_DEF("blake2", 2, js_crypto_blake2),
JS_CFUNC_DEF("blake2", 1, js_crypto_blake2),
JS_CFUNC_DEF("sign", 2, js_crypto_sign),
JS_CFUNC_DEF("verify", 3, js_crypto_verify),
JS_CFUNC_DEF("lock", 3, js_crypto_lock),

View File

@@ -230,10 +230,10 @@ var json = use('json')
Check type or prototype chain.
```javascript
is_number(42) // true
is_text("hi") // true
is_array([1,2]) // true
is_object({}) // true
isa(42, number) // true
isa("hi", text) // true
isa([1,2], array) // true
isa({}, object) // true
isa(child, parent) // true if parent is in prototype chain
```
@@ -270,7 +270,7 @@ Cell supports regex patterns in string functions, but not standalone regex objec
```javascript
text.search("hello world", /world/)
replace("hello", /l/g, "L")
text.replace("hello", /l/g, "L")
```
## Error Handling

View File

@@ -35,7 +35,7 @@ cell hello
## Standard Library
- [text](library/text.md) — string manipulation
- [number](library/number.md) — numeric operations (functions are global: `floor()`, `max()`, etc.)
- [number](library/number.md) — numeric operations
- [array](library/array.md) — array utilities
- [object](library/object.md) — object utilities
- [blob](library/blob.md) — binary data

View File

@@ -46,98 +46,98 @@ number("0xff", "j") // 255
## Methods
### abs(n)
### number.abs(n)
Absolute value.
```javascript
abs(-5) // 5
abs(5) // 5
number.abs(-5) // 5
number.abs(5) // 5
```
### sign(n)
### number.sign(n)
Returns -1, 0, or 1.
```javascript
sign(-5) // -1
sign(0) // 0
sign(5) // 1
number.sign(-5) // -1
number.sign(0) // 0
number.sign(5) // 1
```
### floor(n, place)
### number.floor(n, place)
Round down.
```javascript
floor(4.9) // 4
floor(4.567, 2) // 4.56
number.floor(4.9) // 4
number.floor(4.567, 2) // 4.56
```
### ceiling(n, place)
### number.ceiling(n, place)
Round up.
```javascript
ceiling(4.1) // 5
ceiling(4.123, 2) // 4.13
number.ceiling(4.1) // 5
number.ceiling(4.123, 2) // 4.13
```
### round(n, place)
### number.round(n, place)
Round to nearest.
```javascript
round(4.5) // 5
round(4.567, 2) // 4.57
number.round(4.5) // 5
number.round(4.567, 2) // 4.57
```
### trunc(n, place)
### number.trunc(n, place)
Truncate toward zero.
```javascript
trunc(4.9) // 4
trunc(-4.9) // -4
number.trunc(4.9) // 4
number.trunc(-4.9) // -4
```
### whole(n)
### number.whole(n)
Get the integer part.
```javascript
whole(4.9) // 4
whole(-4.9) // -4
number.whole(4.9) // 4
number.whole(-4.9) // -4
```
### fraction(n)
### number.fraction(n)
Get the fractional part.
```javascript
fraction(4.75) // 0.75
number.fraction(4.75) // 0.75
```
### min(...values)
### number.min(...values)
Return the smallest value.
```javascript
min(3, 1, 4, 1, 5) // 1
number.min(3, 1, 4, 1, 5) // 1
```
### max(...values)
### number.max(...values)
Return the largest value.
```javascript
max(3, 1, 4, 1, 5) // 5
number.max(3, 1, 4, 1, 5) // 5
```
### remainder(dividend, divisor)
### number.remainder(dividend, divisor)
Compute remainder.
```javascript
remainder(17, 5) // 2
number.remainder(17, 5) // 2
```

View File

@@ -70,18 +70,18 @@ text.search("hello world", "xyz") // null
text.search("hello hello", "hello", 1) // 6
```
### text.replace(text, target, replacement, cap)
### text.replace(text, target, replacement, limit)
Replace occurrences of `target` with `replacement`. If `cap` is not specified, replaces all occurrences.
Replace occurrences of `target` with `replacement`.
```javascript
text.replace("hello", "l", "L") // "heLLo" (replaces all)
text.replace("hello", "l", "L", 1) // "heLlo" (replaces first only)
text.replace("hello", "l", "L") // "heLLo"
text.replace("hello", "l", "L", 1) // "heLlo"
// With function
text.replace("hello", "l", function(match, pos) {
return pos == 2 ? "L" : match
}) // "heLLo" (replaces all by default)
}) // "heLlo"
```
### text.format(text, collection, transformer)

View File

@@ -0,0 +1,231 @@
# Managed Stack Frames Implementation Plan
This document outlines the requirements and invariants for implementing fully managed stack frames in QuickJS, eliminating recursion through the C stack for JS->JS calls.
## Overview
The goal is to maintain interpreter state entirely on managed stacks (value stack + frame stack) rather than relying on C stack frames. This enables:
- **Call IC fast path**: Direct dispatch to C functions without js_call_c_function overhead
- **Proper stack traces**: Error().stack works correctly even through optimized paths
- **Tail call optimization**: Possible without C stack growth
- **Debugging/profiling**: Full interpreter state always inspectable
## Current State
- Property IC: Implemented with per-function polymorphic IC (up to 4 shapes per site)
- Call IC: Infrastructure exists but disabled (`CALL_IC_ENABLED 0`) because it bypasses stack frame setup required for Error().stack
## Golden Invariant
**At any time, the entire live interpreter state must be reconstructible from:**
```
(ctx->value_stack, value_top) + (ctx->frame_stack, frame_top)
```
No critical state may live only in C locals.
## Implementation Requirements
### 1. Offset Semantics (use `size_t` / `uint32_t`)
Replace pointer-based addressing with offset-based addressing:
```c
typedef struct JSStackFrame {
uint32_t sp_offset; // Offset into ctx->value_stack
uint32_t var_offset; // Start of local variables
uint32_t arg_offset; // Start of arguments
// ... continuation info below
} JSStackFrame;
```
**Rationale**: Offsets survive stack reallocation, pointers don't.
### 2. Consistent `sp_offset` Semantics
Define clearly and consistently:
- `sp_offset` = current stack pointer offset from `ctx->value_stack`
- On function entry: `sp_offset` points to first free slot after arguments
- On function exit: `sp_offset` restored to caller's expected position
### 3. Continuation Info (Caller State Restoration)
Each frame must store enough to restore caller state on return:
```c
typedef struct JSStackFrame {
// ... other fields
// Continuation info
const uint8_t *caller_pc; // Return address in caller's bytecode
uint32_t caller_sp_offset; // Caller's stack pointer
JSFunctionBytecode *caller_b; // Caller's bytecode (for IC cache)
// Current function info
JSFunctionBytecode *b; // Current function's bytecode
JSValue *var_buf; // Can be offset-based
JSValue *arg_buf; // Can be offset-based
JSValue this_val;
} JSStackFrame;
```
### 4. Exception Handler Stack Depth Restoration
Exception handlers must record the `sp_offset` at handler entry so `throw` can restore the correct stack depth:
```c
typedef struct JSExceptionHandler {
uint32_t sp_offset; // Stack depth to restore on throw
const uint8_t *catch_pc; // Where to jump on exception
// ...
} JSExceptionHandler;
```
On `throw`:
1. Unwind frame stack to find appropriate handler
2. Restore `sp_offset` to handler's recorded value
3. Push exception value
4. Jump to `catch_pc`
### 5. Aliased `argv` Handling
When `arguments` object exists, `argv` may be aliased. The frame must track this:
```c
typedef struct JSStackFrame {
// ...
uint16_t flags;
#define JS_FRAME_ALIASED_ARGV (1 << 0)
#define JS_FRAME_STRICT (1 << 1)
// ...
JSObject *arguments_obj; // Non-NULL if arguments object created
} JSStackFrame;
```
When `JS_FRAME_ALIASED_ARGV` is set, writes to `arguments[i]` must update the corresponding local variable.
### 6. Stack Trace Accuracy (`sf->cur_pc`)
**Critical**: `sf->cur_pc` must be updated before any operation that could:
- Throw an exception
- Call into another function
- Trigger GC
Currently the interpreter does:
```c
sf->cur_pc = pc; // Before potentially-throwing ops
```
With managed frames, ensure this is consistently done or use a different mechanism (e.g., store pc in frame on every call).
### 7. GC Integration
The GC must be able to mark all live values on the managed stacks:
```c
void js_gc_mark_value_stack(JSRuntime *rt) {
for (JSContext *ctx = rt->context_list; ctx; ctx = ctx->link) {
JSValue *p = ctx->value_stack;
JSValue *end = ctx->value_stack + ctx->value_top;
while (p < end) {
JS_MarkValue(rt, *p);
p++;
}
}
}
void js_gc_mark_frame_stack(JSRuntime *rt) {
for (JSContext *ctx = rt->context_list; ctx; ctx = ctx->link) {
JSStackFrame *sf = ctx->frame_stack;
JSStackFrame *end = ctx->frame_stack + ctx->frame_top;
while (sf < end) {
JS_MarkValue(rt, sf->this_val);
// Mark any other JSValue fields in frame
sf++;
}
}
}
```
### 8. Main Interpreter Loop Changes
Transform from recursive to iterative:
```c
// Current (recursive):
JSValue JS_CallInternal(...) {
// ...
CASE(OP_call):
// Recursive call to JS_CallInternal
ret = JS_CallInternal(ctx, func, ...);
// ...
}
// Target (iterative):
JSValue JS_CallInternal(...) {
// ...
CASE(OP_call):
// Push new frame, update pc to callee entry
push_frame(ctx, ...);
pc = new_func->byte_code_buf;
BREAK; // Continue in same loop iteration
CASE(OP_return):
// Pop frame, restore caller state
ret_val = sp[-1];
pop_frame(ctx, &pc, &sp, &b);
sp[0] = ret_val;
BREAK; // Continue executing caller
// ...
}
```
## Call IC Integration (After Managed Frames)
Once managed frames are complete, Call IC becomes safe:
```c
CASE(OP_call_method):
// ... resolve method ...
if (JS_VALUE_GET_TAG(method) == JS_TAG_OBJECT) {
JSObject *p = JS_VALUE_GET_OBJ(method);
// Check Call IC
CallICEntry *entry = call_ic_lookup(cache, pc_offset, p->shape);
if (entry && entry->cfunc) {
// Direct C call - safe because frame is on managed stack
push_minimal_frame(ctx, pc, sp_offset);
ret = entry->cfunc(ctx, this_val, argc, argv);
pop_minimal_frame(ctx);
// Handle return...
}
}
// Slow path: full call
```
## Testing Strategy
1. **Stack trace tests**: Verify Error().stack works through all call patterns
2. **Exception tests**: Verify throw/catch restores correct stack depth
3. **GC stress tests**: Verify all values are properly marked during GC
4. **Benchmark**: Compare performance before/after
## Migration Steps
1. [ ] Add offset fields to JSStackFrame alongside existing pointers
2. [ ] Create push_frame/pop_frame helper functions
3. [ ] Convert OP_call to use push_frame instead of recursion (JS->JS calls)
4. [ ] Convert OP_return to use pop_frame
5. [ ] Update exception handling to use offset-based stack restoration
6. [ ] Update GC to walk managed stacks
7. [ ] Remove/deprecate recursive JS_CallInternal calls for JS functions
8. [ ] Enable Call IC for C functions
9. [ ] Benchmark and optimize
## References
- Current IC implementation: `source/quickjs.c` lines 12567-12722 (ICCache, prop_ic_*)
- Current stack frame: `source/quickjs.c` JSStackFrame definition
- OP_call_method: `source/quickjs.c` lines 13654-13718

View File

@@ -0,0 +1,234 @@
// HTTP Download Actor
// Handles download requests and progress queries
var http = use('http');
var os = use('os');
// Actor state
var state = {
downloading: false,
current_url: null,
total_bytes: 0,
downloaded_bytes: 0,
start_time: 0,
error: null,
connection: null,
download_msg: null,
chunks: []
};
// Helper to calculate progress percentage
function get_progress() {
if (state.total_bytes == 0) {
return 0;
}
return number.round((state.downloaded_bytes / state.total_bytes) * 100);
}
// Helper to format status response
function get_status() {
if (!state.downloading) {
return {
status: 'idle',
error: state.error
};
}
var elapsed = os.now() - state.start_time;
var bytes_per_sec = elapsed > 0 ? state.downloaded_bytes / elapsed : 0;
return {
status: 'downloading',
url: state.current_url,
progress: get_progress(),
downloaded_bytes: state.downloaded_bytes,
total_bytes: state.total_bytes,
elapsed_seconds: elapsed,
bytes_per_second: number.round(bytes_per_sec)
};
}
// Main message receiver
$receiver(function(msg) {
switch (msg.type) {
case 'download':
if (state.downloading) {
send(msg, {
type: 'error',
error: 'Already downloading',
current_url: state.current_url
});
return;
}
if (!msg.url) {
send(msg, {
type: 'error',
error: 'No URL provided'
});
return;
}
// Start download
state.downloading = true;
state.current_url = msg.url;
state.total_bytes = 0;
state.downloaded_bytes = 0;
state.start_time = os.now();
state.error = null;
state.download_msg = msg;
state.chunks = [];
try {
// Start the connection
state.connection = http.fetch_start(msg.url, msg.options || {});
if (!state.connection) {
throw new Error('Failed to start download');
}
// Schedule the first chunk read
$delay(read_next_chunk, 0);
} catch (e) {
state.error = e.toString();
state.downloading = false;
send(msg, {
type: 'error',
error: state.error,
url: msg.url
});
}
break;
case 'status':
log.console(`got status request. current is ${get_status()}`)
send(msg, {
type: 'status_response',
...get_status()
});
break;
case 'cancel':
if (state.downloading) {
// Cancel the download
if (state.connection) {
http.fetch_close(state.connection);
state.connection = null;
}
state.downloading = false;
state.current_url = null;
state.download_msg = null;
state.chunks = [];
send(msg, {
type: 'cancelled',
message: 'Download cancelled',
url: state.current_url
});
} else {
send(msg, {
type: 'error',
error: 'No download in progress'
});
}
break;
default:
send(msg, {
type: 'error',
error: 'Unknown message type: ' + msg.type
});
}
});
// Non-blocking chunk reader
function read_next_chunk() {
if (!state.downloading || !state.connection) {
return;
}
try {
var chunk = http.fetch_read_chunk(state.connection);
if (chunk == null) {
// Download complete
finish_download();
return;
}
// Store chunk
state.chunks.push(chunk);
// Update progress
var info = http.fetch_info(state.connection);
state.downloaded_bytes = info.bytes_read;
if (info.headers_complete && info.content_length > 0) {
state.total_bytes = info.content_length;
}
// Schedule next chunk read
$delay(read_next_chunk, 0);
} catch (e) {
// Error during download
state.error = e.toString();
if (state.connection) {
http.fetch_close(state.connection);
}
if (state.download_msg) {
send(state.download_msg, {
type: 'error',
error: state.error,
url: state.current_url
});
}
// Reset state
state.downloading = false;
state.connection = null;
state.download_msg = null;
state.chunks = [];
}
}
// Complete the download and send result
function finish_download() {
if (state.connection) {
http.fetch_close(state.connection);
}
// Combine all chunks into single ArrayBuffer
var total_size = 0;
for (var i = 0; i < state.chunks.length; i++) {
total_size += state.chunks[i].byteLength;
}
var result = new ArrayBuffer(total_size);
var view = new Uint8Array(result);
var offset = 0;
for (var i = 0; i < state.chunks.length; i++) {
var chunk_view = new Uint8Array(state.chunks[i]);
view.set(chunk_view, offset);
offset += state.chunks[i].byteLength;
}
// Send complete message
if (state.download_msg) {
send(state.download_msg, {
type: 'complete',
url: state.current_url,
data: result,
size: result.byteLength,
duration: os.now() - state.start_time
});
}
// Reset state
state.downloading = false;
state.connection = null;
state.current_url = null;
state.download_msg = null;
state.chunks = [];
}

View File

@@ -10,7 +10,7 @@ var match_id = 0;
$portal(e => {
log.console("NAT server: received connection request");
if (!is_actor(e.actor))
if (!isa(e.actor, actor))
send(e, {reason: "Must provide the actor you want to connect."});
if (waiting_client) {

13
fd.c
View File

@@ -502,9 +502,10 @@ JSC_SCALL(fd_readdir,
ret = JS_ThrowInternalError(js, "FindFirstFile failed for %s", path);
} else {
ret = JS_NewArray(js);
int i = 0;
do {
if (strcmp(ffd.cFileName, ".") == 0 || strcmp(ffd.cFileName, "..") == 0) continue;
JS_ArrayPush(js, ret,JS_NewString(js, ffd.cFileName));
JS_SetPropertyUint32(js, ret, i++, JS_NewString(js, ffd.cFileName));
} while (FindNextFile(hFind, &ffd) != 0);
FindClose(hFind);
}
@@ -514,9 +515,10 @@ JSC_SCALL(fd_readdir,
d = opendir(str);
if (d) {
ret = JS_NewArray(js);
int i = 0;
while ((dir = readdir(d)) != NULL) {
if (strcmp(dir->d_name, ".") == 0 || strcmp(dir->d_name, "..") == 0) continue;
JS_ArrayPush(js, ret, JS_NewString(js, dir->d_name));
JS_SetPropertyUint32(js, ret, i++, JS_NewString(js, dir->d_name));
}
closedir(d);
} else {
@@ -557,23 +559,20 @@ JSC_CCALL(fd_slurpwrite,
size_t len;
const char *data = js_get_blob_data(js, &len, argv[1]);
if (!data && len > 0)
if (data == (const char *)-1)
return JS_EXCEPTION;
const char *str = JS_ToCString(js, argv[0]);
if (!str) return JS_EXCEPTION;
int fd = open(str, O_WRONLY | O_CREAT | O_TRUNC, 0644);
if (fd < 0) {
JS_FreeCString(js, str);
if (fd < 0)
return JS_ThrowInternalError(js, "open failed for %s: %s", str, strerror(errno));
}
ssize_t written = write(fd, data, len);
close(fd);
JS_FreeCString(js, str);
if (written != (ssize_t)len)
return JS_ThrowInternalError(js, "write failed for %s: %s", str, strerror(errno));

58
fd.cm
View File

@@ -1,67 +1,31 @@
var fd = this
var wildstar = use('wildstar')
function last_pos(str, sep) {
var last = null
replace(str, sep, function(m, pos) {
last = pos
return m
})
return last
}
// Helper to join paths
function join_paths(base, rel) {
base = replace(base, /\/+$/, "")
rel = replace(rel, /^\/+/, "")
base = base.replace(/\/+$/, "")
rel = rel.replace(/^\/+/, "")
if (!base) return rel
if (!rel) return base
return base + "/" + rel
}
fd.join_paths = join_paths
fd.basename = function basename(path) {
var last = last_pos(path, '/')
if (last == null) return path
return text(path, last+1)
}
fd.dirname = function dirname(path) {
var last = last_pos(path, '/')
if (last == null) return ""
return text(path,0,last)
}
fd.stem = function stem(path) {
var last = last_pos(path, '.')
if (last == null) return path
return text(path,0,last)
}
fd.globfs = function(globs, dir) {
if (dir == null) dir = "."
var results = []
function check_neg(path) {
var found = false;
arrfor(globs, function(g) {
if (starts_with(g, "!") && wildstar.match(text(g, 1), path, wildstar.WM_WILDSTAR)) {
found = true;
return true;
for (var g of globs) {
if (g.startsWith("!") && wildstar.match(g.substring(1), path, wildstar.WM_WILDSTAR)) return true;
}
}, null, true);
return found;
return false;
}
function check_pos(path) {
var found = false;
arrfor(globs, function(g) {
if (!starts_with(g, "!") && wildstar.match(g, path, wildstar.WM_WILDSTAR)) {
found = true;
return true;
for (var g of globs) {
if (!g.startsWith("!") && wildstar.match(g, path, wildstar.WM_WILDSTAR)) return true;
}
}, null, true);
return found;
return false;
}
function visit(curr_full, rel_prefix) {
@@ -70,7 +34,7 @@ fd.globfs = function(globs, dir) {
var list = fd.readdir(curr_full)
if (!list) return
arrfor(list, function(item) {
for (var item of list) {
var item_rel = rel_prefix ? rel_prefix + "/" + item : item
var child_full = join_paths(curr_full, item)
@@ -82,10 +46,10 @@ fd.globfs = function(globs, dir) {
}
} else {
if (!check_neg(item_rel) && check_pos(item_rel)) {
push(results, item_rel)
results.push(item_rel)
}
}
}
});
}
var st = fd.stat(dir)

View File

@@ -13,7 +13,7 @@ var shop = use('internal/shop')
// Parse arguments
var target_pkg = null
for (var i = 0; i < length(args); i++) {
for (var i = 0; i < args.length; i++) {
if (args[i] == '--help' || args[i] == '-h') {
log.console("Usage: cell fetch [package]")
log.console("Fetch package zips from remote sources.")
@@ -24,7 +24,7 @@ for (var i = 0; i < length(args); i++) {
log.console("This command ensures that the zip files on disk match what's in")
log.console("the lock file. For local packages, this is a no-op.")
$stop()
} else if (!starts_with(args[i], '-')) {
} else if (!args[i].startsWith('-')) {
target_pkg = args[i]
}
}
@@ -35,55 +35,52 @@ var packages_to_fetch = []
if (target_pkg) {
// Fetch specific package
if (find(all_packages, target_pkg) == null) {
if (!all_packages.includes(target_pkg)) {
log.error("Package not found: " + target_pkg)
$stop()
}
push(packages_to_fetch, target_pkg)
packages_to_fetch.push(target_pkg)
} else {
// Fetch all packages
packages_to_fetch = all_packages
}
var remote_count = 0
arrfor(packages_to_fetch, function(pkg) {
var entry = lock[pkg]
if (pkg != 'core' && (!entry || entry.type != 'local'))
remote_count++
}, null, null)
log.console("Fetching " + text(packages_to_fetch.length) + " package(s)...")
if (remote_count > 0)
log.console(`Fetching ${text(remote_count)} remote package(s)...`)
var downloaded_count = 0
var cached_count = 0
var success_count = 0
var skip_count = 0
var fail_count = 0
arrfor(packages_to_fetch, function(pkg) {
for (var pkg of packages_to_fetch) {
var entry = lock[pkg]
// Skip local packages
if (entry && entry.type == 'local') {
skip_count++
continue
}
// Skip core (handled separately)
if (pkg == 'core') return
if (pkg == 'core') {
skip_count++
continue
}
var result = shop.fetch(pkg)
if (result.status == 'local') {
// Local packages are just symlinks, nothing to fetch
return
} else if (result.status == 'cached') {
cached_count++
} else if (result.status == 'downloaded') {
log.console(" Downloaded: " + pkg)
downloaded_count++
} else if (result.status == 'error') {
log.error(" Failed: " + pkg + (result.message ? " - " + result.message : ""))
if (result) {
if (result.zip_blob) {
log.console("Fetched: " + pkg)
success_count++
} else {
skip_count++
}
} else {
log.error("Failed to fetch: " + pkg)
fail_count++
}
}, null, null)
}
log.console("")
var parts = []
if (downloaded_count > 0) push(parts, `${text(downloaded_count)} downloaded`)
if (cached_count > 0) push(parts, `${text(cached_count)} cached`)
if (fail_count > 0) push(parts, `${text(fail_count)} failed`)
if (length(parts) == 0) push(parts, "nothing to fetch")
log.console("Fetch complete: " + text(parts, ", "))
log.console("Fetch complete: " + text(success_count) + " fetched, " + text(skip_count) + " skipped, " + text(fail_count) + " failed")
$stop()

236
graph.ce
View File

@@ -1,236 +0,0 @@
// cell graph [<locator>] - Emit dependency graph
//
// Usage:
// cell graph Graph current directory package
// cell graph . Graph current directory package
// cell graph <locator> Graph specific package
// cell graph --world Graph all packages in shop (world set)
//
// Options:
// --format <fmt> Output format: tree (default), dot, json
// --resolved Show resolved view with links applied (default)
// --locked Show lock view without links
// --world Graph all packages in shop
var shop = use('internal/shop')
var pkg = use('package')
var link = use('link')
var fd = use('fd')
var json = use('json')
var target_locator = null
var format = 'tree'
var show_locked = false
var show_world = false
for (var i = 0; i < length(args); i++) {
if (args[i] == '--format' || args[i] == '-f') {
if (i + 1 < length(args)) {
format = args[++i]
if (format != 'tree' && format != 'dot' && format != 'json') {
log.error('Invalid format: ' + format + '. Must be tree, dot, or json')
$stop()
}
} else {
log.error('--format requires a format type')
$stop()
}
} else if (args[i] == '--resolved') {
show_locked = false
} else if (args[i] == '--locked') {
show_locked = true
} else if (args[i] == '--world') {
show_world = true
} else if (args[i] == '--help' || args[i] == '-h') {
log.console("Usage: cell graph [<locator>] [options]")
log.console("")
log.console("Emit the dependency graph.")
log.console("")
log.console("Options:")
log.console(" --format <fmt> Output format: tree (default), dot, json")
log.console(" --resolved Show resolved view with links applied (default)")
log.console(" --locked Show lock view without links")
log.console(" --world Graph all packages in shop")
$stop()
} else if (!starts_with(args[i], '-')) {
target_locator = args[i]
}
}
var links = show_locked ? {} : link.load()
// Get effective locator (after links)
function get_effective(locator) {
return links[locator] || locator
}
// Build graph data structure
var nodes = {}
var edges = []
function add_node(locator) {
if (nodes[locator]) return
var lock = shop.load_lock()
var lock_entry = lock[locator]
var link_target = links[locator]
var info = shop.resolve_package_info(locator)
nodes[locator] = {
id: locator,
effective: get_effective(locator),
linked: link_target != null,
local: info == 'local',
commit: lock_entry && lock_entry.commit ? text(lock_entry.commit, 0, 8) : null
}
}
function gather_graph(locator, visited) {
if (visited[locator]) return
visited[locator] = true
add_node(locator)
try {
var deps = pkg.dependencies(locator)
if (deps) {
arrfor(array(deps), function(alias) {
var dep_locator = deps[alias]
add_node(dep_locator)
push(edges, { from: locator, to: dep_locator, alias: alias })
gather_graph(dep_locator, visited)
})
}
} catch (e) {
// Package might not have dependencies
}
}
// Gather graph from roots
var roots = []
if (show_world) {
// Use all packages in shop as roots
var packages = shop.list_packages()
arrfor(packages, function(p) {
if (p != 'core') {
push(roots, p)
}
})
} else {
// Default to current directory
if (!target_locator) {
target_locator = '.'
}
// Resolve local paths
if (target_locator == '.' || starts_with(target_locator, './') || starts_with(target_locator, '../') || fd.is_dir(target_locator)) {
var resolved = fd.realpath(target_locator)
if (resolved) {
target_locator = resolved
}
}
push(roots, target_locator)
}
arrfor(roots, function(root) {
gather_graph(root, {})
})
// Output based on format
if (format == 'tree') {
function print_tree(locator, prefix, is_last, visited) {
if (visited[locator]) {
log.console(prefix + (is_last ? "\\-- " : "|-- ") + locator + " (circular)")
return
}
visited[locator] = true
var node = nodes[locator]
var suffix = ""
if (node.linked) suffix += " -> " + node.effective
if (node.commit) suffix += " @" + node.commit
if (node.local) suffix += " (local)"
log.console(prefix + (is_last ? "\\-- " : "|-- ") + locator + suffix)
// Get children
var children = []
arrfor(edges, function(e) {
if (e.from == locator) {
push(children, e)
}
})
for (var i = 0; i < length(children); i++) {
var child_prefix = prefix + (is_last ? " " : "| ")
print_tree(children[i].to, child_prefix, i == length(children) - 1, visited)
}
}
for (var i = 0; i < length(roots); i++) {
log.console(roots[i])
var children = []
arrfor(edges, function(e) {
if (e.from == roots[i]) {
push(children, e)
}
})
for (var j = 0; j < length(children); j++) {
print_tree(children[j].to, "", j == length(children) - 1, {})
}
if (i < length(roots) - 1) log.console("")
}
} else if (format == 'dot') {
log.console("digraph dependencies {")
log.console(" rankdir=TB;")
log.console(" node [shape=box];")
log.console("")
// Node definitions
arrfor(array(nodes), function(id) {
var node = nodes[id]
var label = id
if (node.commit) label += "\\n@" + node.commit
var attrs = 'label="' + label + '"'
if (node.linked) attrs += ', style=dashed'
if (node.local) attrs += ', color=blue'
// Safe node ID for dot
var safe_id = replace(id, /[^a-zA-Z0-9]/g, '_')
log.console(' ' + safe_id + ' [' + attrs + '];')
})
log.console("")
// Edges
arrfor(edges, function(e) {
var from_id = replace(e.from, /[^a-zA-Z0-9]/g, '_')
var to_id = replace(e.to, /[^a-zA-Z0-9]/g, '_')
var label = e.alias != e.to ? 'label="' + e.alias + '"' : ''
log.console(' ' + from_id + ' -> ' + to_id + (label ? ' [' + label + ']' : '') + ';')
})
log.console("}")
} else if (format == 'json') {
var output = {
nodes: [],
edges: []
}
arrfor(array(nodes), function(id) {
push(output.nodes, nodes[id])
})
output.edges = edges
log.console(json.encode(output))
}
$stop()

44
help.ce
View File

@@ -2,7 +2,7 @@
var fd = use('fd')
var command = length(args) > 0 ? args[0] : null
var command = args.length > 0 ? args[0] : null
// Display specific command help
if (command) {
@@ -27,41 +27,21 @@ if (stat && stat.isFile) {
log.console(content)
} else {
// Fallback if man file doesn't exist
log.console("cell - The Cell package manager")
log.console("cell - The Cell module system for Prosperon")
log.console("")
log.console("Usage: cell <command> [arguments]")
log.console("")
log.console("Package Management:")
log.console(" install <locator> Install a package and its dependencies")
log.console(" update [locator] Update packages from remote sources")
log.console(" remove <locator> Remove a package from the shop")
log.console(" add <locator> Add a dependency to current package")
log.console("Commands:")
log.console(" init Initialize a new Cell project")
log.console(" get Fetch and add a module dependency")
log.console(" update Update a dependency to a new version")
log.console(" vendor Copy all dependencies locally")
log.console(" build Compile all modules to bytecode")
log.console(" patch Create a patch for a module")
log.console(" config Manage system and actor configurations")
log.console(" help Show this help message")
log.console("")
log.console("Building:")
log.console(" build [locator] Build dynamic libraries for packages")
log.console(" clean [scope] Remove build artifacts")
log.console("")
log.console("Linking (Local Development):")
log.console(" link <origin> <target> Link a package to a local path")
log.console(" unlink <origin> Remove a package link")
log.console(" clone <origin> <path> Clone and link a package locally")
log.console("")
log.console("Information:")
log.console(" list [scope] List packages and dependencies")
log.console(" ls [locator] List modules and actors in a package")
log.console(" why <locator> Show reverse dependencies")
log.console(" search <query> Search for packages, modules, or actors")
log.console("")
log.console("Diagnostics:")
log.console(" resolve [locator] Print fully resolved dependency closure")
log.console(" graph [locator] Emit dependency graph (tree, dot, json)")
log.console(" verify [scope] Verify integrity and consistency")
log.console("")
log.console("Other:")
log.console(" help [command] Show help for a command")
log.console(" version Show cell version")
log.console("")
log.console("Run 'cell <command> --help' for more information on a command.")
log.console("Run 'cell help <command>' for more information on a command.")
}
$stop()

View File

@@ -1,185 +1,62 @@
// cell install <locator> - Install a package to the shop
//
// Usage:
// cell install <locator> Install a package and its dependencies
// cell install . Install current directory package
//
// Options:
// --target <triple> Build for target platform
// --refresh Refresh floating refs before locking
// --dry-run Show what would be installed
// Does not modify the current project's cell.toml
var shop = use('internal/shop')
var build = use('build')
var pkg = use('package')
var fd = use('fd')
if (length(args) < 1) {
log.console("Usage: cell install <locator> [options]")
log.console("")
log.console("Options:")
log.console(" --target <triple> Build for target platform")
log.console(" --refresh Refresh floating refs before locking")
log.console(" --dry-run Show what would be installed")
$stop()
}
var locator = null
var target_triple = null
var refresh = false
var dry_run = false
for (var i = 0; i < length(args); i++) {
if (args[i] == '--target' || args[i] == '-t') {
if (i + 1 < length(args)) {
target_triple = args[++i]
} else {
log.error('--target requires a triple')
$stop()
}
} else if (args[i] == '--refresh') {
refresh = true
} else if (args[i] == '--dry-run') {
dry_run = true
} else if (args[i] == '--help' || args[i] == '-h') {
log.console("Usage: cell install <locator> [options]")
log.console("")
log.console("Install a package and its dependencies to the shop.")
log.console("")
log.console("Options:")
log.console(" --target <triple> Build for target platform")
log.console(" --refresh Refresh floating refs before locking")
log.console(" --dry-run Show what would be installed")
$stop()
} else if (!starts_with(args[i], '-')) {
locator = args[i]
}
}
if (!locator) {
if (args.length < 1) {
log.console("Usage: cell install <locator>")
$stop()
return
}
var locator = args[0]
// Resolve relative paths to absolute paths
// Local paths like '.' or '../foo' need to be converted to absolute paths
if (locator == '.' || starts_with(locator, './') || starts_with(locator, '../') || fd.is_dir(locator)) {
if (locator == '.' || locator.startsWith('./') || locator.startsWith('../') || fd.is_dir(locator)) {
var resolved = fd.realpath(locator)
if (resolved) {
locator = resolved
}
}
// Default target
if (!target_triple) {
target_triple = build.detect_host_target()
}
log.console("Installing " + locator + "...")
// Gather all packages that will be installed
var packages_to_install = []
var skipped_packages = []
var visited = {}
var pkg = use('package')
function gather_packages(pkg_locator) {
// Recursive install function that handles dependencies
function install_package(pkg_locator, visited) {
if (visited[pkg_locator]) return
visited[pkg_locator] = true
// Check if this is a local path that doesn't exist
if (starts_with(pkg_locator, '/') && !fd.is_dir(pkg_locator)) {
push(skipped_packages, pkg_locator)
log.console(" Skipping missing local package: " + pkg_locator)
return
}
// First, add to lock.toml
shop.update(pkg_locator)
push(packages_to_install, pkg_locator)
// Extract/symlink the package so we can read its cell.toml
shop.extract(pkg_locator)
// Try to read dependencies
// Now get direct dependencies and install them first
try {
// For packages not yet extracted, we need to update and extract first to read deps
var lock = shop.load_lock()
if (!lock[pkg_locator]) {
if (!dry_run) {
var update_result = shop.update(pkg_locator)
if (update_result) {
shop.extract(pkg_locator)
} else {
// Update failed - package might not be fetchable
log.console("Warning: Could not fetch " + pkg_locator)
return
}
}
} else {
// Package is in lock, ensure it's extracted
if (!dry_run) {
shop.extract(pkg_locator)
}
}
var deps = pkg.dependencies(pkg_locator)
if (deps) {
arrfor(array(deps), function(alias) {
for (var alias in deps) {
var dep_locator = deps[alias]
gather_packages(dep_locator)
})
log.console("Installing dependency " + dep_locator)
install_package(dep_locator, visited)
}
}
} catch (e) {
// Package might not have dependencies or cell.toml issue
if (!dry_run) {
log.console(`Warning: Could not read dependencies for ${pkg_locator}: ${e.message}`)
}
}
log.console("Warning: Could not read dependencies for " + pkg_locator + ": " + e.message)
}
// Gather all packages
gather_packages(locator)
if (dry_run) {
log.console("Would install:")
arrfor(packages_to_install, function(p) {
var lock = shop.load_lock()
var exists = lock[p] != null
log.console(" " + p + (exists ? " (already installed)" : ""))
})
if (length(skipped_packages) > 0) {
log.console("")
log.console("Would skip (missing local paths):")
arrfor(skipped_packages, function(p) {
log.console(" " + p)
})
}
$stop()
// Build the package after all dependencies are installed
build.build_package(pkg_locator)
}
// Install each package
function install_package(pkg_locator) {
// Update lock entry
shop.update(pkg_locator)
// Extract/symlink the package
shop.extract(pkg_locator)
// Build scripts
shop.build_package_scripts(pkg_locator)
// Build C code
try {
build.build_dynamic(pkg_locator, target_triple, 'release')
} catch (e) {
// Not all packages have C code
}
}
arrfor(packages_to_install, function(p) {
log.console(" Installing " + p + "...")
install_package(p)
})
var summary = "Installed " + text(length(packages_to_install)) + " package(s)."
if (length(skipped_packages) > 0) {
summary += " Skipped " + text(length(skipped_packages)) + " missing local path(s)."
}
log.console(summary)
install_package(locator, {})
log.console("Installed " + locator)
$stop()

View File

@@ -25,7 +25,7 @@ var ACTOR_EXT = '.ce'
var load_internal = os.load_internal
function use_embed(name) {
return load_internal("js_" + name + "_use")
return load_internal(`js_${name}_use`)
}
globalThis.logical = function(val1)
@@ -37,36 +37,20 @@ globalThis.logical = function(val1)
return null;
}
globalThis.some = function(arr, pred) {
return find(arr, pred) != null
}
globalThis.every = function(arr, pred) {
return find(arr, x => not(pred(x))) == null
}
globalThis.starts_with = function(str, prefix) {
return search(str, prefix) == 0
}
globalThis.ends_with = function(str, suffix) {
return search(str, suffix, -length(suffix)) != null
}
var js = use_embed('js')
var fd = use_embed('fd')
// Get the shop path from HOME environment
var home = os.getenv('HOME') || os.getenv('USERPROFILE')
if (!home) {
throw Error('Could not determine home directory')
throw new Error('Could not determine home directory')
}
var shop_path = home + '/.cell'
var packages_path = shop_path + '/packages'
var core_path = packages_path + '/core'
if (!fd.is_dir(core_path)) {
throw Error('Cell shop not found at ' + shop_path + '. Run "cell install" to set up.')
throw new Error('Cell shop not found at ' + shop_path + '. Run "cell install" to set up.')
}
var use_cache = {}
@@ -78,7 +62,7 @@ function use_core(path) {
if (use_cache[cache_key])
return use_cache[cache_key];
var sym = use_embed(replace(path, '/', '_'))
var sym = use_embed(path.replace('/','_'))
// Core scripts are in packages/core/
var file_path = core_path + '/' + path + MOD_EXT
@@ -88,7 +72,7 @@ function use_core(path) {
var script = text(script_blob)
var mod = `(function setup_module(use){${script}})`
var fn = js.eval('core:' + path, mod)
var result = call(fn,sym, [use_core])
var result = fn.call(sym, use_core);
use_cache[cache_key] = result;
return result;
}
@@ -99,6 +83,12 @@ function use_core(path) {
var blob = use_core('blob')
// Capture Object and Array methods before they're deleted
Object.prototype.toString = function()
{
return json.encode(this)
}
globalThis.actor = function()
{
@@ -108,24 +98,66 @@ var actor_mod = use_core('actor')
var wota = use_core('wota')
var nota = use_core('nota')
globalThis.is_actor = function(value) {
return is_object(value) && value[ACTORDATA]
globalThis.isa = function(value, master) {
if (master == null) return false
// isa(value, function) - check if function.prototype is in chain
if (typeof master == 'function') {
// Special type checks
if (master == stone) return is_stone(value)
if (master == number) return is_number(value)
if (master == text) return is_text(value)
if (master == logical) return is_logical(value)
if (master == array) return is_array(value)
if (master == object) return is_object(value)
if (master == fn) return is_function(value)
if (master == actor) return is_object(value) && value[ACTORDATA]
// Check prototype chain
if (master.prototype) {
var proto = _getPrototypeOf(value)
while (proto != null) {
if (proto == master.prototype) return true
proto = _getPrototypeOf(proto)
}
}
return false
}
// isa(object, master_object) - check prototype chain
if (typeof master == 'object') {
var proto = _getPrototypeOf(value)
while (proto != null) {
if (proto == master) return true
proto = _getPrototypeOf(proto)
}
return false
}
return false
}
var ENETSERVICE = 0.1
var REPLYTIMEOUT = 60 // seconds before replies are ignored
var nullguard = false
function caller_data(depth = 0)
{
var file = "nofile"
var line = 0
var caller = array(Error().stack, "\n")[1+depth]
var caller = new Error().stack.split("\n")[1+depth]
if (!nullguard && is_null(caller)) {
os.print(`caller_data now getting null`)
os.print("\n")
nullguard = true
}
if (caller) {
var md = extract(caller, /\((.*)\:/)
var md = caller.match(/\((.*)\:/)
var m = md ? md[1] : "SCRIPT"
if (m) file = m
md = extract(caller, /\:(\d*)\)/)
md = caller.match(/\:(\d*)\)/)
m = md ? md[1] : 0
if (m) line = m
}
@@ -134,62 +166,52 @@ function caller_data(depth = 0)
}
function console_rec(line, file, msg) {
return `[${text(_cell.id, 0, 5)}] [${file}:${line}]: ${msg}\n`
return `[${_cell.id.slice(0,5)}] [${file}:${line}]: ${msg}\n`
// time: [${time.text("mb d yyyy h:nn:ss")}]
}
globalThis.log = function(name, args) {
globalThis.log = {}
log.console = function(msg)
{
var caller = caller_data(1)
var msg = args[0]
switch(name) {
case 'console':
os.print(console_rec(caller.line, caller.file, msg))
break
case 'error':
msg = msg ?? Error()
if (is_proto(msg, Error))
msg = msg.name + ": " + msg.message + "\n" + msg.stack
os.print(console_rec(caller.line, caller.file, msg))
break
case 'system':
msg = "[SYSTEM] " + msg
os.print(console_rec(caller.line, caller.file, msg))
break
default:
log.console(`unknown log type: ${name}`)
break
}
log.error = function(msg = new Error())
{
var caller = caller_data(1)
if (msg instanceof Error)
msg = msg.name + ": " + msg.message + "\n" + msg.stack
os.print(console_rec(caller.line,caller.file,msg))
}
log.system = function(msg) {
msg = "[SYSTEM] " + msg
log.console(msg)
}
function disrupt(err)
{
if (is_function(err.toString)) {
os.print(err.toString())
os.print("\n")
os.print(err.stack)
}
if (overling) {
if (err) {
// with an err, this is a forceful disrupt
var reason = (is_proto(err, Error)) ? err.stack : err
var reason = (err instanceof Error) ? err.stack : err
report_to_overling({type:'disrupt', reason})
} else
report_to_overling({type:'stop'})
}
if (underlings) {
var unders = array(underlings)
arrfor(unders, function(id, index) {
for (var id of underlings) {
log.console(`calling on ${id} to disrupt too`)
$_.stop(create_actor({id}))
})
}
}
if (err) {
if (err.message)
log.console(err.message)
log.console(err);
if (err.stack)
log.console(err.stack)
}
@@ -197,8 +219,6 @@ function disrupt(err)
actor_mod.disrupt()
}
actor_mod.on_exception(disrupt)
_cell.args = _cell.hidden.init
@@ -232,9 +252,9 @@ globalThis.sequence = pronto.sequence
$_.time_limit = function(requestor, seconds)
{
if (!pronto.is_requestor(requestor))
throw Error('time_limit: first argument must be a requestor');
if (!is_number(seconds) || seconds <= 0)
throw Error('time_limit: seconds must be a positive number');
throw new Error('time_limit: first argument must be a requestor');
if (!isa(seconds, number) || seconds <= 0)
throw new Error('time_limit: seconds must be a positive number');
return function time_limit_requestor(callback, value) {
pronto.check_callback(callback, 'time_limit')
@@ -330,12 +350,12 @@ REPLYTIMEOUT = config.reply_timeout
function guid(bits = 256)
{
var guid = blob(bits, os.random)
var guid = new blob(bits, os.random)
stone(guid)
return text(guid,'h')
}
var HEADER = {}
var HEADER = key()
// takes a function input value that will eventually be called with the current time in number form.
$_.clock = function(fn) {
@@ -345,7 +365,7 @@ $_.clock = function(fn) {
})
}
var underlings = {} // this is more like "all actors that are notified when we die"
var underlings = new Set() // this is more like "all actors that are notified when we die"
var overling = null
var root = null
@@ -398,8 +418,8 @@ var portal_fn = null
// takes a function input value that will eventually be called with the current time in number form.
$_.portal = function(fn, port) {
if (portal) throw Error(`Already started a portal listening on ${portal.port}`)
if (!port) throw Error("Requires a valid port.")
if (portal) throw new Error(`Already started a portal listening on ${portal.port}`)
if (!port) throw new Error("Requires a valid port.")
log.system(`starting a portal on port ${port}`)
portal = enet.create_host({address: "any", port})
portal_fn = fn
@@ -412,16 +432,14 @@ function handle_host(e) {
peers[`${e.peer.address}:${e.peer.port}`] = e.peer
var queue = peer_queue.get(e.peer)
if (queue) {
arrfor(queue, (msg, index) => e.peer.send(nota.encode(msg)))
for (var msg of queue) e.peer.send(nota.encode(msg))
log.system(`sent ${msg} out of queue`)
peer_queue.delete(e.peer)
}
break
case "disconnect":
peer_queue.delete(e.peer)
arrfor(array(peers), function(id, index) {
if (peers[id] == e.peer) delete peers[id]
})
for (var id in peers) if (peers[id] == e.peer) delete peers[id]
log.system('portal got disconnect from ' + e.peer.address + ":" + e.peer.port)
break
case "receive":
@@ -431,15 +449,16 @@ function handle_host(e) {
data.replycc[ACTORDATA].port = e.peer.port
}
function populate_actor_addresses(obj) {
if (!is_object(obj)) return
if (!isa(obj, object)) return
if (obj[ACTORDATA] && !obj[ACTORDATA].address) {
obj[ACTORDATA].address = e.peer.address
obj[ACTORDATA].port = e.peer.port
}
arrfor(array(obj), function(key, index) {
if (key in obj)
for (var key in obj) {
if (object.has(obj, key)) {
populate_actor_addresses(obj[key])
})
}
}
}
if (data.data) populate_actor_addresses(data.data)
turn(data)
@@ -457,18 +476,20 @@ $_.receiver = function receiver(fn) {
receive_fn = fn
}
$_.start = function start(cb, program) {
$_.start = function start(cb, program, ...args) {
if (!program) return
var id = guid()
if (args.length == 1 && Array.isArray(args[0])) args = args[0]
var startup = {
id,
overling: $_.self,
root,
arg: args,
program,
}
greeters[id] = cb
push(message_queue, { startup })
message_queue.push({ startup })
}
// stops an underling or self.
@@ -477,10 +498,10 @@ $_.stop = function stop(actor) {
need_stop = true
return
}
if (!is_actor(actor))
throw Error('Can only call stop on an actor.')
if (is_null(underlings[actor[ACTORDATA].id]))
throw Error('Can only call stop on an underling or self.')
if (!isa(actor, actor))
throw new Error('Can only call stop on an actor.')
if (!underlings.has(actor[ACTORDATA].id))
throw new Error('Can only call stop on an underling or self.')
sys_msg(actor, {kind:"stop"})
}
@@ -492,6 +513,11 @@ $_.unneeded = function unneeded(fn, seconds) {
// schedules the invocation of a function after a specified amount of time.
$_.delay = function delay(fn, seconds = 0) {
if (seconds <= 0) {
$_.clock(fn)
return
}
function delay_turn() {
fn()
send_messages()
@@ -503,16 +529,16 @@ $_.delay = function delay(fn, seconds = 0) {
var enet = use_core('enet')
// causes this actor to stop when another actor stops.
var couplings = {}
var couplings = new Set()
$_.couple = function couple(actor) {
if (actor == $_.self) return // can't couple to self
couplings[actor[ACTORDATA].id] = true
couplings.add(actor[ACTORDATA].id)
sys_msg(actor, {kind:'couple', from: $_.self})
log.system(`coupled to ${actor}`)
}
function actor_prep(actor, send) {
push(message_queue, {actor,send});
message_queue.push({actor,send});
}
// Send a message immediately without queuing
@@ -528,9 +554,9 @@ function actor_send(actor, message) {
if (actor[HEADER] && !actor[HEADER].replycc) // attempting to respond to a message but sender is not expecting; silently drop
return
if (!is_actor(actor) && !is_actor(actor.replycc)) throw Error(`Must send to an actor object. Attempted send to ${actor}`)
if (!isa(actor, actor) && !isa(actor.replycc, actor)) throw new Error(`Must send to an actor object. Attempted send to ${actor}`)
if (!is_object(message)) throw Error('Must send an object record.')
if (typeof message != 'object') throw new Error('Must send an object record.')
// message to self
if (actor[ACTORDATA].id == _cell.id) {
@@ -541,7 +567,7 @@ function actor_send(actor, message) {
// message to actor in same flock
if (actor[ACTORDATA].id && actor_mod.mailbox_exist(actor[ACTORDATA].id)) {
var wota_blob = wota.encode(message)
// log.console(`sending wota blob of ${length(wota_blob)/8} bytes`)
// log.console(`sending wota blob of ${wota_blob.length/8} bytes`)
actor_mod.mailbox_push(actor[ACTORDATA].id, wota_blob)
return
}
@@ -579,36 +605,36 @@ var need_stop = false
// if we've been flagged to stop, bail out before doing anything
if (need_stop) {
disrupt()
message_queue = []
message_queue.length = 0
return
}
arrfor(message_queue, function(msg, index) {
for (var msg of message_queue) {
if (msg.startup) {
// now is the time to actually spin up the actor
actor_mod.createactor(msg.startup)
} else {
actor_send(msg.actor, msg.send)
}
})
}
message_queue = []
message_queue.length = 0
}
var replies = {}
globalThis.send = function send(actor, message, reply) {
if (!is_object(actor))
throw Error(`Must send to an actor object. Provided: ${actor}`);
if (typeof actor != 'object')
throw new Error(`Must send to an actor object. Provided: ${actor}`);
if (!is_object(message))
throw Error('Message must be an object')
if (typeof message != 'object')
throw new Error('Message must be an object')
var send = {type:"user", data: message}
if (actor[HEADER] && actor[HEADER].replycc) {
var header = actor[HEADER]
if (!header.replycc || !is_actor(header.replycc))
throw Error(`Supplied actor had a return, but it's not a valid actor! ${actor[HEADER]}`)
if (!header.replycc || !isa(header.replycc, actor))
throw new Error(`Supplied actor had a return, but it's not a valid actor! ${actor[HEADER]}`)
actor = header.replycc
send.return = header.reply
@@ -696,7 +722,7 @@ function handle_actor_disconnect(id) {
delete greeters[id]
}
log.system(`actor ${id} disconnected`)
if (!is_null(couplings[id])) disrupt("coupled actor died") // couplings now disrupts instead of stop
if (couplings.has(id)) disrupt("coupled actor died") // couplings now disrupts instead of stop
}
function handle_sysym(msg)
@@ -711,7 +737,7 @@ function handle_sysym(msg)
var greeter = greeters[from[ACTORDATA].id]
if (greeter) greeter(msg.message)
if (msg.message.type == 'disrupt')
delete underlings[from[ACTORDATA].id]
underlings.delete(from[ACTORDATA].id)
break
case 'contact':
if (portal_fn) {
@@ -719,11 +745,11 @@ function handle_sysym(msg)
letter2[HEADER] = msg
delete msg.data
portal_fn(letter2)
} else throw Error('Got a contact message, but no portal is established.')
} else throw new Error('Got a contact message, but no portal is established.')
break
case 'couple': // from must be notified when we die
from = msg.from
underlings[from[ACTORDATA].id] = true
underlings.add(from[ACTORDATA].id)
log.system(`actor ${from} is coupled to me`)
break
}
@@ -784,34 +810,46 @@ if (!locator) {
}
if (!locator)
throw Error(`Main program ${_cell.args.program} could not be found`)
throw new Error(`Main program ${_cell.args.program} could not be found`)
stone(globalThis)
var rads = use_core("math/radians")
log.console(rads)
log.console("now, should be nofile:0")
$_.clock(_ => {
log.console("in clock")
// Get capabilities for the main program
var file_info = shop.file_info ? shop.file_info(locator.path) : null
var inject = shop.script_inject_for ? shop.script_inject_for(file_info) : []
// Build env object for injection
var env = {}
for (var i = 0; i < length(inject); i++) {
log.console("injection")
// Build values array for injection
var vals = []
log.console(`number to inject is ${inject.length}`)
log.console('when the log.console statements are in the loop, with backticks, it runs but with errors on the injectables especially substring not seeming to work; without them, it totally fails')
for (var i = 0; i < inject.length; i++) {
var key = inject[i]
if (key && key[0] == '$') key = text(key, 1)
if (key == 'fd') env[key] = fd
else env[key] = $_[key]
log.console(`injecting ${i}, which is ${key}`) // when this line is present, works; when not present, does not work
if (key && key[0] == '$') key = key.substring(1)
if (key == 'fd') vals.push(fd)
else vals.push($_[key])
log.console(`split at 1 was ${key}`)
}
// Create use function bound to the program's package
var pkg = file_info ? file_info.package : null
var use_fn = function(path) { return shop.use(path, pkg) }
// Call with signature: setup_module(args, use, env)
// The script wrapper binds $delay, $start, etc. from env
var val = call(locator.symbol, null, [_cell.args.arg, use_fn, env])
// Call with signature: setup_module(args, use, ...capabilities)
// The script wrapper builds $_ from the injected capabilities for backward compatibility
var val = locator.symbol.call(null, _cell.args.arg, use_fn, ...vals)
if (val)
throw Error('Program must not return anything');
throw new Error('Program must not return anything');
})
})()

63
internal/json.c Normal file
View File

@@ -0,0 +1,63 @@
#include "cell.h"
static JSValue js_json_encode(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv) {
if (argc < 1) return JS_ThrowTypeError(ctx, "json.encode requires at least 1 argument");
JSValue global = JS_GetGlobalObject(ctx);
JSValue json = JS_GetPropertyStr(ctx, global, "JSON");
JSValue stringify = JS_GetPropertyStr(ctx, json, "stringify");
JSValue args[3];
args[0] = argv[0]; // value
args[1] = (argc > 1) ? argv[1] : JS_NULL; // replacer
args[2] = (argc > 2) ? argv[2] : JS_NewInt32(ctx, 1); // space, default 1
JSValue result = JS_Call(ctx, stringify, json, 3, args);
JS_FreeValue(ctx, stringify);
JS_FreeValue(ctx, json);
JS_FreeValue(ctx, global);
if (argc <= 2) JS_FreeValue(ctx, args[2]);
return result;
}
static JSValue js_json_decode(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv) {
if (argc < 1) return JS_ThrowTypeError(ctx, "json.decode requires at least 1 argument");
if (!JS_IsString(argv[0])) {
JSValue err = JS_NewError(ctx);
JS_DefinePropertyValueStr(ctx, err, "message",
JS_NewString(ctx, "couldn't parse text: not a string"),
JS_PROP_WRITABLE | JS_PROP_CONFIGURABLE);
return JS_Throw(ctx, err);
}
JSValue global = JS_GetGlobalObject(ctx);
JSValue json = JS_GetPropertyStr(ctx, global, "JSON");
JSValue parse = JS_GetPropertyStr(ctx, json, "parse");
JSValue args[2];
args[0] = argv[0]; // text
args[1] = (argc > 1) ? argv[1] : JS_NULL; // reviver
JSValue result = JS_Call(ctx, parse, json, argc > 1 ? 2 : 1, args);
JS_FreeValue(ctx, parse);
JS_FreeValue(ctx, json);
JS_FreeValue(ctx, global);
return result;
}
static const JSCFunctionListEntry js_json_funcs[] = {
JS_CFUNC_DEF("encode", 1, js_json_encode),
JS_CFUNC_DEF("decode", 1, js_json_decode),
};
JSValue js_json_use(JSContext *js) {
JSValue export = JS_NewObject(js);
JS_SetPropertyFunctionList(js, export, js_json_funcs, sizeof(js_json_funcs)/sizeof(JSCFunctionListEntry));
return export;
}

View File

@@ -106,7 +106,7 @@ char *js_do_nota_decode(JSContext *js, JSValue *tmp, char *nota, JSValue holder,
nota = js_do_nota_decode(js, &inner, nota, holder, JS_NULL, reviver);
JSValue obj = JS_NewObject(js);
cell_rt *crt = JS_GetContextOpaque(js);
// JS_SetProperty(js, obj, crt->actor_sym, inner);
JS_SetProperty(js, obj, crt->actor_sym, inner);
*tmp = obj;
} else {
switch(b) {
@@ -198,8 +198,7 @@ static void nota_encode_value(NotaEncodeContext *enc, JSValueConst val, JSValueC
}
cell_rt *crt = JS_GetContextOpaque(ctx);
// JSValue adata = JS_GetProperty(ctx, replaced, crt->actor_sym);
JSValue adata = JS_NULL;
JSValue adata = JS_GetProperty(ctx, replaced, crt->actor_sym);
if (!JS_IsNull(adata)) {
nota_write_sym(&enc->nb, NOTA_PRIVATE);
nota_encode_value(enc, adata, replaced, JS_NULL);
@@ -214,7 +213,7 @@ static void nota_encode_value(NotaEncodeContext *enc, JSValueConst val, JSValueC
nota_stack_push(enc, replaced);
JSValue to_json = JS_GetPropertyStr(ctx, replaced, "toJSON");
if (JS_IsFunction(to_json)) {
if (JS_IsFunction(ctx, to_json)) {
JSValue result = JS_Call(ctx, to_json, replaced, 0, NULL);
JS_FreeValue(ctx, to_json);
if (!JS_IsException(result)) {
@@ -239,14 +238,14 @@ static void nota_encode_value(NotaEncodeContext *enc, JSValueConst val, JSValueC
uint32_t non_function_count = 0;
for (uint32_t i = 0; i < plen; i++) {
JSValue prop_val = JS_GetProperty(ctx, replaced, ptab[i].atom);
if (!JS_IsFunction(prop_val)) non_function_count++;
if (!JS_IsFunction(ctx, prop_val)) non_function_count++;
JS_FreeValue(ctx, prop_val);
}
nota_write_record(&enc->nb, non_function_count);
for (uint32_t i = 0; i < plen; i++) {
JSValue prop_val = JS_GetProperty(ctx, replaced, ptab[i].atom);
if (!JS_IsFunction(prop_val)) {
if (!JS_IsFunction(ctx, prop_val)) {
const char *prop_name = JS_AtomToCString(ctx, ptab[i].atom);
JSValue prop_key = JS_AtomToValue(ctx, ptab[i].atom);
nota_write_text(&enc->nb, prop_name);
@@ -338,7 +337,7 @@ static JSValue js_nota_encode(JSContext *ctx, JSValueConst this_val, int argc, J
enc->ctx = ctx;
enc->visitedStack = JS_NewArray(ctx);
enc->cycle = 0;
enc->replacer = (argc > 1 && JS_IsFunction(argv[1])) ? argv[1] : JS_NULL;
enc->replacer = (argc > 1 && JS_IsFunction(ctx, argv[1])) ? argv[1] : JS_NULL;
nota_buffer_init(&enc->nb, 128);
nota_encode_value(enc, argv[0], JS_NULL, JS_NewString(ctx, ""));
@@ -366,7 +365,7 @@ static JSValue js_nota_decode(JSContext *js, JSValueConst self, int argc, JSValu
if (nota == -1) return JS_EXCEPTION;
if (!nota) return JS_NULL;
JSValue reviver = (argc > 1 && JS_IsFunction(argv[1])) ? argv[1] : JS_NULL;
JSValue reviver = (argc > 1 && JS_IsFunction(js, argv[1])) ? argv[1] : JS_NULL;
JSValue ret;
JSValue holder = JS_NewObject(js);
js_do_nota_decode(js, &ret, (char*)nota, holder, JS_NewString(js, ""), reviver);

View File

@@ -1,5 +1,4 @@
var toml = use('toml')
var json = use('json')
var fd = use('fd')
var http = use('http')
@@ -8,7 +7,6 @@ var time = use('time')
var js = use('js')
var crypto = use('crypto')
var blob = use('blob')
var pkg_tools = use('package')
var os = use('os')
var link = use('link')
@@ -30,9 +28,9 @@ function put_into_cache(content, obj)
function ensure_dir(path) {
if (fd.stat(path).isDirectory) return
var parts = array(path, '/')
var current = starts_with(path, '/') ? '/' : ''
for (var i = 0; i < length(parts); i++) {
var parts = path.split('/')
var current = path.startsWith('/') ? '/' : ''
for (var i = 0; i < parts.length; i++) {
if (parts[i] == '') continue
current += parts[i] + '/'
if (!fd.stat(current).isDirectory) {
@@ -93,8 +91,8 @@ Shop.get_reports_dir = function() {
}
function get_import_package(name) {
var parts = array(name, '/')
if (length(parts) > 1)
var parts = name.split('/')
if (parts.length > 1)
return parts[0]
return null
@@ -102,24 +100,24 @@ function get_import_package(name) {
function is_internal_path(path)
{
return path && starts_with(path, 'internal/')
return path && path.startsWith('internal/')
}
function split_explicit_package_import(path)
{
if (!path) return null
var parts = array(path, '/')
var parts = path.split('/')
if (length(parts) < 2) return null
if (parts.length < 2) return null
var looks_explicit = starts_with(path, '/') || (parts[0] && search(parts[0], '.') != null)
var looks_explicit = path.startsWith('/') || (parts[0] && parts[0].includes('.'))
if (!looks_explicit) return null
// Find the longest prefix that is an installed package
for (var i = length(parts) - 1; i >= 1; i--) {
var pkg_candidate = text(array(parts, 0, i), '/')
var mod_path = text(array(parts, i), '/')
if (!mod_path || length(mod_path) == 0) continue
for (var i = parts.length - 1; i >= 1; i--) {
var pkg_candidate = parts.slice(0, i).join('/')
var mod_path = parts.slice(i).join('/')
if (!mod_path || mod_path.length == 0) continue
var candidate_dir = get_packages_dir() + '/' + safe_package_path(pkg_candidate)
if (fd.is_file(candidate_dir + '/cell.toml'))
@@ -143,7 +141,7 @@ function package_in_shop(package) {
function abs_path_to_package(package_dir)
{
if (!fd.is_file(package_dir + '/cell.toml'))
throw Error('Not a valid package directory (no cell.toml): ' + package_dir)
throw new Error('Not a valid package directory (no cell.toml): ' + package_dir)
var packages_prefix = get_packages_dir() + '/'
var core_dir = packages_prefix + core_package
@@ -160,15 +158,8 @@ function abs_path_to_package(package_dir)
}
}
if (starts_with(package_dir, packages_prefix))
return text(package_dir, length(packages_prefix))
// Check if this local path is the target of a link
// If so, return the canonical package name (link origin) instead
var link_origin = link.get_origin(package_dir)
if (link_origin) {
return link_origin
}
if (package_dir.startsWith(packages_prefix))
return package_dir.substring(packages_prefix.length)
// in this case, the dir is the package
if (package_in_shop(package_dir))
@@ -197,9 +188,9 @@ Shop.file_info = function(file) {
name: null
}
if (ends_with(file, MOD_EXT))
if (file.endsWith(MOD_EXT))
info.is_module = true
else if (ends_with(file, ACTOR_EXT))
else if (file.endsWith(ACTOR_EXT))
info.is_actor = true
// Find package directory and determine package name
@@ -208,11 +199,11 @@ Shop.file_info = function(file) {
info.package = abs_path_to_package(pkg_dir)
if (info.is_actor)
info.name = text(file, length(pkg_dir) + 1, length(file) - length(ACTOR_EXT))
info.name = file.substring(pkg_dir.length + 1, file.length - ACTOR_EXT.length)
else if (info.is_module)
info.name = text(file, length(pkg_dir) + 1, length(file) - length(MOD_EXT))
info.name = file.substring(pkg_dir.length + 1, file.length - MOD_EXT.length)
else
info.name = text(file, length(pkg_dir) + 1)
info.name = file.substring(pkg_dir.length + 1)
}
return info
@@ -220,9 +211,9 @@ Shop.file_info = function(file) {
function get_import_name(path)
{
var parts = array(path, '/')
if (length(parts) < 2) return null
return text(array(parts, 1), '/')
var parts = path.split('/')
if (parts.length < 2) return null
return parts.slice(1).join('/')
}
// Given a path like 'prosperon/sprite' and a package context,
@@ -248,14 +239,14 @@ function safe_package_path(pkg)
{
// For absolute paths, replace / with _ to create a valid directory name
// Also replace @ with _
if (pkg && starts_with(pkg, '/'))
return replace(replace(pkg, '/', '_'), '@', '_')
return replace(pkg, '@', '_')
if (pkg && pkg.startsWith('/'))
return pkg.replaceAll('/', '_').replaceAll('@', '_')
return pkg.replaceAll('@', '_')
}
function package_cache_path(pkg)
{
return global_shop_path + '/cache/' + replace(replace(pkg, '/', '_'), '@', '_')
return global_shop_path + '/cache/' + pkg.replaceAll('/', '_').replaceAll('@', '_')
}
function get_shared_lib_path()
@@ -275,7 +266,7 @@ Shop.load_lock = function() {
return {}
var content = text(fd.slurp(path))
if (!length(content)) return {}
if (!content.length) return {}
_lock = toml.decode(content)
@@ -285,26 +276,26 @@ Shop.load_lock = function() {
// Save lock.toml configuration (to global shop)
Shop.save_lock = function(lock) {
var path = global_shop_path + '/lock.toml'
fd.slurpwrite(path, stone(blob(toml.encode(lock))));
fd.slurpwrite(path, stone(new blob(toml.encode(lock))));
}
// Get information about how to resolve a package
// Local packages always start with /
Shop.resolve_package_info = function(pkg) {
if (starts_with(pkg, '/')) return 'local'
if (search(pkg, 'gitea') != null) return 'gitea'
if (pkg.startsWith('/')) return 'local'
if (pkg.includes('gitea')) return 'gitea'
return null
}
// Verify if a package name is valid and return status
Shop.verify_package_name = function(pkg) {
if (!pkg) throw Error("Empty package name")
if (pkg == 'local') throw Error("local is not a valid package name")
if (pkg == 'core') throw Error("core is not a valid package name")
if (!pkg) throw new Error("Empty package name")
if (pkg == 'local') throw new Error("local is not a valid package name")
if (pkg == 'core') throw new Error("core is not a valid package name")
if (search(pkg, '://') != null)
throw Error(`Invalid package name: ${pkg}; did you mean ${array(pkg, '://')[1]}?`)
if (pkg.includes('://'))
throw new Error(`Invalid package name: ${pkg}; did you mean ${pkg.split('://')[1]}?`)
}
// Convert module package to download URL
@@ -312,7 +303,7 @@ Shop.get_download_url = function(pkg, commit_hash) {
var info = Shop.resolve_package_info(pkg)
if (info == 'gitea') {
var parts = array(pkg, '/')
var parts = pkg.split('/')
var host = parts[0]
var user = parts[1]
var repo = parts[2]
@@ -328,7 +319,7 @@ Shop.get_api_url = function(pkg) {
var info = Shop.resolve_package_info(pkg)
if (info == 'gitea') {
var parts = array(pkg, '/')
var parts = pkg.split('/')
var host = parts[0]
var user = parts[1]
var repo = parts[2]
@@ -347,7 +338,7 @@ Shop.extract_commit_hash = function(pkg, response) {
var data = json.decode(response)
if (info == 'gitea') {
if (is_array(data))
if (isa(data, array))
data = data[0]
return data.commit && data.commit.id
}
@@ -362,6 +353,11 @@ var open_dls = {}
// These map to $_ properties in engine.cm
var SHOP_DEFAULT_INJECT = ['$self', '$overling', '$clock', '$delay', '$start', '$receiver', '$contact', '$portal', '$time_limit', '$couple', '$stop', '$unneeded', '$connection', '$fd']
function strip_dollar(name) {
if (name && name[0] == '$') return name.substring(1)
return name
}
// Decide what a given module is allowed to see.
// This is the capability gate - tweak as needed.
Shop.script_inject_for = function(file_info) {
@@ -378,25 +374,19 @@ Shop.get_script_capabilities = function(path) {
return Shop.script_inject_for(file_info)
}
function inject_env(inject) {
var env = {}
for (var i = 0; i < length(inject); i++) {
var inj = inject[i]
var key = trim(inj, '$')
if (key == 'fd') env[key] = fd
else env[key] = my$_[key]
}
return env
function inject_params(inject) {
if (!inject || !inject.length) return ''
return ', ' + inject.join(', ')
}
function inject_bindings_code(inject) {
var lines = []
for (var i = 0; i < length(inject); i++) {
var inj = inject[i]
var key = trim(inj, '$')
push(lines, `var $${key} = env["${key}"];`)
function inject_values(inject) {
var vals = []
for (var i = 0; i < inject.length; i++) {
var key = strip_dollar(inject[i])
if (key == 'fd') vals.push(fd)
else vals.push(my$_[key])
}
return text(lines, '\n')
return vals
}
// Build the use function for a specific package context
@@ -407,21 +397,16 @@ function make_use_fn_code(pkg_arg) {
// for script forms, path is the canonical path of the module
var script_form = function(path, script, pkg, inject) {
var pkg_arg = pkg ? `'${pkg}'` : 'null'
var binds = inject_bindings_code(inject)
var params = inject_params(inject)
var fn = `(function setup_module(args, use, env){
def arg = args;
def PACKAGE = ${pkg_arg};
${binds}
${script}
})`
var fn = `(function setup_module(args, use${params}){ def arg = args; def PACKAGE = ${pkg_arg}; ${script}})`
return fn
}
// Resolve module function, hashing it in the process
// path is the exact path to the script file
function resolve_mod_fn(path, pkg) {
if (!fd.is_file(path)) throw Error(`path ${path} is not a file`)
if (!fd.is_file(path)) throw new Error(`path ${path} is not a file`)
var file_info = Shop.file_info(path)
var file_pkg = file_info.package
@@ -429,7 +414,7 @@ function resolve_mod_fn(path, pkg) {
var content = text(fd.slurp(path))
var script = script_form(path, content, file_pkg, inject);
var obj = pull_from_cache(stone(blob(script)))
var obj = pull_from_cache(stone(new blob(script)))
if (obj) {
var fn = js.compile_unblob(obj)
return js.eval_compile(fn)
@@ -441,7 +426,7 @@ function resolve_mod_fn(path, pkg) {
var fn = js.compile(compile_name, script)
put_into_cache(stone(blob(script)), js.compile_blob(fn))
put_into_cache(stone(new blob(script)), js.compile_blob(fn))
return js.eval_compile(fn)
}
@@ -478,7 +463,7 @@ function resolve_locator(path, ctx)
// If ctx is an absolute path (starts with /), use it directly
// Otherwise, look it up in the packages directory
var ctx_dir
if (starts_with(ctx, '/')) {
if (ctx.startsWith('/')) {
ctx_dir = ctx
} else {
ctx_dir = get_packages_dir() + '/' + safe_package_path(ctx)
@@ -525,17 +510,25 @@ function resolve_locator(path, ctx)
// Generate symbol name for a C module file
// Uses the same format as Shop.c_symbol_for_file
// Symbol names are based on canonical package names, not link targets
// Resolves linked packages to their actual target first
function make_c_symbol(pkg, file) {
var pkg_safe = replace(replace(replace(pkg, '/', '_'), '.', '_'), '-', '_')
var file_safe = replace(replace(replace(file, '/', '_'), '.', '_'), '-', '_')
// Check if this package is linked - if so, use the link target for symbol name
var link_target = link.get_target(pkg)
var resolved_pkg = link_target ? link_target : pkg
var pkg_safe = resolved_pkg.replace(/\//g, '_').replace(/\./g, '_').replace(/-/g, '_')
var file_safe = file.replace(/\//g, '_').replace(/\./g, '_').replace(/-/g, '_')
return 'js_' + pkg_safe + '_' + file_safe + '_use'
}
// Get the library path for a package in .cell/lib
// Library names are based on canonical package names, not link targets
// Resolves linked packages to their actual target first
function get_lib_path(pkg) {
var lib_name = replace(replace(replace(pkg, '/', '_'), '.', '_'), '-', '_')
// Check if this package is linked - if so, use the link target
var link_target = link.get_target(pkg)
var resolved_pkg = link_target ? link_target : pkg
var lib_name = resolved_pkg.replace(/\//g, '_').replace(/\./g, '_').replace(/-/g, '_')
return global_shop_path + '/lib/' + lib_name + dylib_ext
}
@@ -549,7 +542,7 @@ Shop.open_package_dylib = function(pkg) {
var resolved_pkg = link_target ? link_target : pkg
var pkg_dir;
if (starts_with(resolved_pkg, '/')) {
if (resolved_pkg.startsWith('/')) {
pkg_dir = resolved_pkg
} else {
pkg_dir = get_packages_dir() + '/' + safe_package_path(resolved_pkg)
@@ -561,29 +554,20 @@ Shop.open_package_dylib = function(pkg) {
var content = text(fd.slurp(toml_path))
var cfg = toml.decode(content)
if (cfg.dependencies) {
arrfor(array(cfg.dependencies), function(alias, i) {
for (var alias in cfg.dependencies) {
var dep_pkg = cfg.dependencies[alias]
try {
Shop.open_package_dylib(dep_pkg)
} catch (dep_e) {
// Dependency dylib load failed, continue with others
}
})
}
} catch (e) {
// Error reading toml, continue
// Ignore errors reading cell.toml
}
}
var dl_path = get_lib_path(pkg)
if (fd.is_file(dl_path)) {
if (!open_dls[dl_path]) {
try {
open_dls[dl_path] = os.dylib_open(dl_path)
} catch (e) {
dylib_visited[pkg] = false
throw e
}
}
}
}
@@ -592,7 +576,8 @@ Shop.open_package_dylib = function(pkg) {
// 1. If package_context is null, only check core internal symbols
// 2. Otherwise: own package (internal then dylib) -> other packages (internal then dylib) -> core (internal only)
// Core is never loaded as a dynamic library via dlopen
function resolve_c_symbol(path, package_context) {
function resolve_c_symbol(path, package_context)
{
var explicit = split_explicit_package_import(path)
if (explicit) {
if (is_internal_path(explicit.path) && package_context && explicit.package != package_context)
@@ -623,7 +608,7 @@ function resolve_c_symbol(path, package_context) {
// If no package context, only check core internal symbols
if (!package_context || package_context == 'core') {
path = replace(path, '/', '_')
path = path.replace('/', '_')
var core_sym = `js_${path}_use`
if (os.internal_exists(core_sym)) {
return {
@@ -692,7 +677,7 @@ function resolve_c_symbol(path, package_context) {
}
// 3. Check core internal symbols (core is never a dynamic library)
var core_sym = `js_${replace(path, '/', '_')}_use`
var core_sym = `js_${path}_use`
if (os.internal_exists(core_sym)) {
return {
symbol: function() { return os.load_internal(core_sym) },
@@ -715,7 +700,7 @@ function resolve_module_info(path, package_context) {
var c_resolve = resolve_c_symbol(path, package_context) || {scope:999}
var mod_resolve = resolve_locator(path + '.cm', package_context) || {scope:999}
var min_scope = min(c_resolve.scope, mod_resolve.scope)
var min_scope = number.min(c_resolve.scope, mod_resolve.scope)
if (min_scope == 999)
return null
@@ -782,14 +767,6 @@ function make_use_fn(pkg) {
}
}
// Call a C module loader and execute the entrypoint
function call_c_module(c_resolve) {
var mod = c_resolve.symbol()
// if (is_function(mod))
// return mod()
return mod
}
function execute_module(info)
{
var c_resolve = info.c_resolve
@@ -800,31 +777,26 @@ function execute_module(info)
if (mod_resolve.scope < 900) {
var context = null
if (c_resolve.scope < 900) {
context = call_c_module(c_resolve)
context = c_resolve.symbol(null, $_)
}
// Get file info to determine inject list
var file_info = Shop.file_info(mod_resolve.path)
var inject = Shop.script_inject_for(file_info)
var env = inject_env(inject)
var vals = inject_values(inject)
var pkg = file_info.package
var use_fn = make_use_fn(pkg)
// Call with signature: setup_module(args, use, env)
// Call with signature: setup_module(args, use, ...capabilities)
// args is null for module loading
used = call(mod_resolve.symbol, context, [null, use_fn, env])
used = mod_resolve.symbol.call(context, null, use_fn, ...vals)
} else if (c_resolve.scope < 900) {
// C only
used = call_c_module(c_resolve)
used = c_resolve.symbol(null, my$_)
} else {
throw Error(`Module ${info.path} could not be found`)
}
// if (is_function(used))
// throw Error('C module loader returned a function; did you forget to call it?')
if (!used)
throw Error(`Module ${info} returned null`)
throw new Error(`Module ${info.path} could not be found`)
} if (!used)
throw new Error(`Module ${info} returned null`)
// stone(used)
return used
@@ -834,7 +806,7 @@ function get_module(path, package_context) {
var info = resolve_module_info(path, package_context)
if (!info)
throw Error(`Module ${path} could not be found in ${package_context}`)
throw new Error(`Module ${path} could not be found in ${package_context}`)
return execute_module(info)
}
@@ -842,7 +814,7 @@ function get_module(path, package_context) {
Shop.use = function use(path, package_context) {
var info = resolve_module_info(path, package_context)
if (!info)
throw Error(`Module ${path} could not be found in ${package_context}`)
throw new Error(`Module ${path} could not be found in ${package_context}`)
if (use_cache[info.cache_key])
return use_cache[info.cache_key]
@@ -854,7 +826,7 @@ Shop.resolve_locator = resolve_locator
// Get cache path for a package and commit
function get_cache_path(pkg, commit) {
return global_shop_path + '/cache/' + replace(replace(pkg, '@','_'), '/','_') + '_' + commit + '.zip'
return global_shop_path + '/cache/' + pkg.replaceAll('@','_').replaceAll('/','_') + '_' + commit + '.zip'
}
function get_package_abs_dir(package)
@@ -890,12 +862,15 @@ function download_zip(pkg, commit_hash) {
return null
}
log.console("Downloading from " + download_url)
try {
var zip_blob = http.fetch(download_url)
log.console(`putting to ${cache_path}`)
fd.slurpwrite(cache_path, zip_blob)
log.console("Cached to " + cache_path)
return zip_blob
} catch (e) {
log.error("Download failed for " + pkg + ": " + e)
log.error(e)
return null
}
}
@@ -910,22 +885,19 @@ function get_cached_zip(pkg, commit_hash) {
}
// Fetch: Ensure the zip on disk matches what's in the lock file
// For local packages, this is a no-op
// For local packages, this is a no-op (returns true)
// For remote packages, downloads the zip if not present or hash mismatch
// Returns: { status: 'local'|'cached'|'downloaded'|'error', message: string }
// Returns true on success
Shop.fetch = function(pkg) {
var lock = Shop.load_lock()
var lock_entry = lock[pkg]
var info = Shop.resolve_package_info(pkg)
if (info == 'local') {
return { status: 'local' }
}
if (info == 'local') return null
// No lock entry - can't fetch without knowing what commit
if (!lock_entry || !lock_entry.commit) {
return { status: 'error', message: "No lock entry for " + pkg + " - run update first" }
}
if (!lock_entry || !lock_entry.commit)
throw new Error("No lock entry for " + pkg + " - run update first")
var commit = lock_entry.commit
var expected_hash = lock_entry.zip_hash
@@ -934,34 +906,18 @@ Shop.fetch = function(pkg) {
var zip_blob = get_cached_zip(pkg, commit)
if (zip_blob) {
// If we have a hash on record, verify it
if (expected_hash) {
// Verify hash matches
var actual_hash = text(crypto.blake2(zip_blob), 'h')
if (actual_hash == expected_hash) {
return { status: 'cached' }
}
if (actual_hash == expected_hash)
return true
log.console("Zip hash mismatch for " + pkg + ", re-fetching...")
} else {
// No hash stored yet - compute and store it
var actual_hash = text(crypto.blake2(zip_blob), 'h')
lock_entry.zip_hash = actual_hash
Shop.save_lock(lock)
return { status: 'cached' }
}
}
// Download the zip
var new_zip = download_zip(pkg, commit)
if (!new_zip) {
return { status: 'error', message: "Failed to download " + pkg }
}
download_zip(pkg, commit)
// Store the hash
var new_hash = text(crypto.blake2(new_zip), 'h')
lock_entry.zip_hash = new_hash
Shop.save_lock(lock)
return { status: 'downloaded' }
return true
}
// Extract: Extract a package to its target directory
@@ -992,33 +948,13 @@ Shop.extract = function(pkg) {
return true
}
// Check if already extracted at correct commit
var lock = Shop.load_lock()
var lock_entry = lock[pkg]
if (lock_entry && lock_entry.commit) {
var extracted_commit_file = target_dir + '/.cell_commit'
if (fd.is_file(extracted_commit_file)) {
var extracted_commit = trim(text(fd.slurp(extracted_commit_file)))
if (extracted_commit == lock_entry.commit) {
// Already extracted at this commit, skip
return true
}
}
}
var zip_blob = get_package_zip(pkg)
if (!zip_blob)
throw Error("No zip blob available for " + pkg)
throw new Error("No zip blob available for " + pkg)
// Extract zip for remote package
install_zip(zip_blob, target_dir)
// Write marker file with the extracted commit
if (lock_entry && lock_entry.commit) {
fd.slurpwrite(target_dir + '/.cell_commit', stone(blob(lock_entry.commit)))
}
return true
}
@@ -1050,21 +986,9 @@ Shop.update = function(pkg) {
log.console(`checking ${pkg}`)
if (info == 'local') {
// Check if local path exists
if (!fd.is_dir(pkg)) {
log.console(` Local path does not exist: ${pkg}`)
return null
}
// Local packages always get a lock entry
var new_entry = {
type: 'local',
if (info == 'local') return {
updated: time.number()
}
lock[pkg] = new_entry
Shop.save_lock(lock)
return new_entry
}
var local_commit = lock_entry ? lock_entry.commit : null
var remote_commit = fetch_remote_hash(pkg)
@@ -1072,14 +996,14 @@ Shop.update = function(pkg) {
log.console(`local commit: ${local_commit}`)
log.console(`remote commit: ${remote_commit}`)
if (local_commit == remote_commit)
return null
if (!remote_commit) {
log.error("Could not resolve commit for " + pkg)
return null
}
if (local_commit == remote_commit)
return null
var new_entry = {
type: info,
commit: remote_commit,
@@ -1094,7 +1018,7 @@ Shop.update = function(pkg) {
function install_zip(zip_blob, target_dir) {
var zip = miniz.read(zip_blob)
if (!zip) throw Error("Failed to read zip archive")
if (!zip) throw new Error("Failed to read zip archive")
if (fd.is_link(target_dir)) fd.unlink(target_dir)
if (fd.is_dir(target_dir)) fd.rmdir(target_dir, 1)
@@ -1103,27 +1027,20 @@ function install_zip(zip_blob, target_dir) {
ensure_dir(target_dir)
var count = zip.count()
var created_dirs = {}
for (var i = 0; i < count; i++) {
if (zip.is_directory(i)) continue
var filename = zip.get_filename(i)
var slash_pos = search(filename, '/')
if (slash_pos == null) continue
if (slash_pos + 1 >= length(filename)) continue
var rel_path = text(filename, slash_pos + 1)
var parts = filename.split('/')
if (parts.length <= 1) continue
parts.shift()
var rel_path = parts.join('/')
var full_path = target_dir + '/' + rel_path
var dir_path = fd.dirname(full_path)
var dir_path = full_path.substring(0, full_path.lastIndexOf('/'))
if (!created_dirs[dir_path]) {
ensure_dir(dir_path)
created_dirs[dir_path] = true
}
var file_data = zip.slurp(filename)
stone(file_data)
fd.slurpwrite(full_path, file_data)
fd.slurpwrite(full_path, zip.slurp(filename))
}
}
@@ -1146,14 +1063,14 @@ Shop.get = function(pkg) {
if (!lock[pkg]) {
var info = Shop.resolve_package_info(pkg)
if (!info) {
throw Error("Invalid package: " + pkg)
throw new Error("Invalid package: " + pkg)
}
var commit = null
if (info != 'local') {
commit = fetch_remote_hash(pkg)
if (!commit) {
throw Error("Could not resolve commit for " + pkg)
throw new Error("Could not resolve commit for " + pkg)
}
}
@@ -1195,14 +1112,12 @@ Shop.module_reload = function(path, package) {
var old = use_cache[cache_key]
var newmod = get_module(path, package)
arrfor(array(newmod), function(i, idx) {
for (var i in newmod)
old[i] = newmod[i]
})
arrfor(array(old), function(i, idx) {
for (var i in old)
if (!(i in newmod))
old[i] = null
})
}
function get_package_scripts(package)
@@ -1210,10 +1125,10 @@ function get_package_scripts(package)
var files = pkg_tools.list_files(package)
var scripts = []
for (var i = 0; i < length(files); i++) {
for (var i = 0; i < files.length; i++) {
var file = files[i]
if (ends_with(file, '.cm') || ends_with(file, '.ce')) {
push(scripts, file)
if (file.endsWith('.cm') || file.endsWith('.ce')) {
scripts.push(file)
}
}
@@ -1226,9 +1141,8 @@ Shop.build_package_scripts = function(package)
var scripts = get_package_scripts(package)
var pkg_dir = get_package_abs_dir(package)
arrfor(scripts, function(script, i) {
for (var script of scripts)
resolve_mod_fn(pkg_dir + '/' + script, package)
})
}
Shop.list_packages = function()
@@ -1260,22 +1174,22 @@ Shop.get_package_dir = function(pkg) {
// e.g., c_symbol_for_file('gitea.pockle.world/john/prosperon', 'sprite.c')
// -> 'js_gitea_pockle_world_john_prosperon_sprite_use'
Shop.c_symbol_for_file = function(pkg, file) {
var pkg_safe = replace(replace(replace(pkg, '/', '_'), '.', '_'), '-', '_')
var file_safe = replace(replace(fd.stem(file), '/', '_'), '.', '_')
var pkg_safe = pkg.replace(/\//g, '_').replace(/\./g, '_').replace(/-/g, '_')
var file_safe = file.substring(0, file.lastIndexOf('.')).replace(/\//g, '_').replace(/\./g, '_').replace(/-/g, '_')
return 'js_' + pkg_safe + '_' + file_safe + '_use'
}
// Generate C symbol prefix for a package
// e.g., c_symbol_prefix('gitea.pockle.world/john/prosperon') -> 'js_gitea_pockle_world_john_prosperon_'
Shop.c_symbol_prefix = function(pkg) {
var pkg_safe = replace(replace(replace(pkg, '/', '_'), '.', '_'), '-', '_')
var pkg_safe = pkg.replace(/\//g, '_').replace(/\./g, '_').replace(/-/g, '_')
return 'js_' + pkg_safe + '_'
}
// Get the library name for a package (without extension)
// e.g., 'gitea.pockle.world/john/prosperon' -> 'gitea_pockle_world_john_prosperon'
Shop.lib_name_for_package = function(pkg) {
return replace(replace(replace(pkg, '/', '_'), '.', '_'), '-', '_')
return pkg.replace(/\//g, '_').replace(/\./g, '_').replace(/-/g, '_')
}
// Returns { ok: bool, results: [{pkg, ok, error}] }
@@ -1284,12 +1198,12 @@ Shop.audit_packages = function() {
var bad = []
arrfor(packages, function(package, i) {
if (package == 'core') return
if (fd.is_dir(package)) return
if (fetch_remote_hash(package)) return
push(bad, package)
})
for (var package of packages) {
if (package == 'core') continue
if (fd.is_dir(package)) continue
if (fetch_remote_hash(package)) continue
bad.push(package)
}
return bad
}
@@ -1301,16 +1215,16 @@ Shop.parse_package = function(locator) {
// Strip version suffix if present
var clean = locator
if (search(locator, '@') != null) {
clean = array(locator, '@')[0]
if (locator.includes('@')) {
clean = locator.split('@')[0]
}
var info = Shop.resolve_package_info(clean)
if (!info) return null
// Extract package name (last component of path)
var parts = array(clean, '/')
var name = parts[length(parts) - 1]
var parts = clean.split('/')
var name = parts[parts.length - 1]
return {
path: clean,

View File

@@ -24,7 +24,7 @@ function get_pkg_dir(package_name) {
if (!package_name) {
return fd.realpath('.')
}
if (starts_with(package_name, '/')) {
if (package_name.startsWith('/')) {
return package_name
}
var shop = use('internal/shop')
@@ -35,9 +35,9 @@ function get_pkg_dir(package_name) {
function ensure_dir(path) {
if (fd.is_dir(path)) return true
var parts = array(path, '/')
var current = starts_with(path, '/') ? '/' : ''
for (var i = 0; i < length(parts); i++) {
var parts = path.split('/')
var current = path.startsWith('/') ? '/' : ''
for (var i = 0; i < parts.length; i++) {
if (parts[i] == '') continue
current += parts[i] + '/'
if (!fd.is_dir(current)) {

27
link.ce
View File

@@ -17,7 +17,7 @@ var shop = use('internal/shop')
var fd = use('fd')
var toml = use('toml')
if (length(args) < 1) {
if (args.length < 1) {
log.console("Usage: link <command> [args] or link [package] <target>")
log.console("Commands:")
log.console(" list List all active links")
@@ -35,25 +35,25 @@ var cmd = args[0]
if (cmd == 'list') {
var links = link.load()
var count = 0
arrfor(array(links), function(k) {
for (var k in links) {
log.console(k + " -> " + links[k])
count++
})
}
if (count == 0) log.console("No links.")
} else if (cmd == 'sync') {
log.console("Syncing links...")
var result = link.sync_all(shop)
log.console("Synced " + result.synced + " link(s)")
if (length(result.errors) > 0) {
if (result.errors.length > 0) {
log.console("Errors:")
for (var i = 0; i < length(result.errors); i++) {
for (var i = 0; i < result.errors.length; i++) {
log.console(" " + result.errors[i])
}
}
} else if (cmd == 'delete' || cmd == 'rm') {
if (length(args) < 2) {
if (args.length < 2) {
log.console("Usage: link delete <package>")
$stop()
return
@@ -92,7 +92,7 @@ if (cmd == 'list') {
}
var arg1 = args[start_idx]
var arg2 = (length(args) > start_idx + 1) ? args[start_idx + 1] : null
var arg2 = (args.length > start_idx + 1) ? args[start_idx + 1] : null
if (!arg1) {
log.console("Error: target or package required")
@@ -108,13 +108,13 @@ if (cmd == 'list') {
// Resolve target if it's a local path
if (target == '.' || fd.is_dir(target)) {
target = fd.realpath(target)
} else if (starts_with(target, './') || starts_with(target, '../')) {
} else if (target.startsWith('./') || target.startsWith('../')) {
// Relative path that doesn't exist yet - try to resolve anyway
var cwd = fd.realpath('.')
if (starts_with(target, './')) {
target = cwd + text(target, 1)
if (target.startsWith('./')) {
target = cwd + target.substring(1)
} else {
// For ../ paths, var fd.realpath handle it if possible
// For ../ paths, let fd.realpath handle it if possible
target = fd.realpath(target) || target
}
}
@@ -127,7 +127,7 @@ if (cmd == 'list') {
// Resolve path
if (target == '.' || fd.is_dir(target)) {
target = fd.realpath(target)
} else if (starts_with(target, './') || starts_with(target, '../')) {
} else if (target.startsWith('./') || target.startsWith('../')) {
target = fd.realpath(target) || target
}
@@ -158,7 +158,7 @@ if (cmd == 'list') {
}
// Validate: if target is a local path, it must have cell.toml
if (starts_with(target, '/')) {
if (target.startsWith('/')) {
if (!fd.is_file(target + '/cell.toml')) {
log.console("Error: " + target + " is not a valid package (no cell.toml)")
$stop()
@@ -171,7 +171,6 @@ if (cmd == 'list') {
link.add(pkg_name, target, shop)
} catch (e) {
log.console("Error: " + e.message)
log.error(e)
$stop()
return
}

114
link.cm
View File

@@ -21,9 +21,9 @@ function get_packages_dir() {
// return the safe path for the package
function safe_package_path(pkg) {
// For absolute paths, replace / with _ to create a valid directory name
if (pkg && starts_with(pkg, '/'))
return replace(replace(pkg, '/', '_'), '@', '_')
return replace(pkg, '@', '_')
if (pkg && pkg.startsWith('/'))
return pkg.replaceAll('/', '_').replaceAll('@', '_')
return pkg.replaceAll('@', '_')
}
function get_package_abs_dir(package) {
@@ -32,9 +32,9 @@ function get_package_abs_dir(package) {
function ensure_dir(path) {
if (fd.stat(path).isDirectory) return
var parts = array(path, '/')
var current = starts_with(path, '/') ? '/' : ''
for (var i = 0; i < length(parts); i++) {
var parts = path.split('/')
var current = path.startsWith('/') ? '/' : ''
for (var i = 0; i < parts.length; i++) {
if (parts[i] == '') continue
current += parts[i] + '/'
if (!fd.stat(current).isDirectory) {
@@ -47,7 +47,7 @@ function ensure_dir(path) {
// If target is a local path (starts with /), return it directly
// If target is a package name, return the package directory
function resolve_link_target(target) {
if (starts_with(target, '/')) {
if (target.startsWith('/')) {
return target
}
// Target is another package - resolve to its directory
@@ -81,23 +81,21 @@ Link.save = function(links) {
link_cache = links
var cfg = { links: links }
var path = get_links_path()
var b = blob(toml.encode(cfg))
stone(b)
fd.slurpwrite(path, b)
fd.slurpwrite(path, new blob(toml.encode(cfg)))
}
Link.add = function(canonical, target, shop) {
// Validate canonical package exists in shop
var lock = shop.load_lock()
if (!lock[canonical]) {
throw Error('Package ' + canonical + ' is not installed. Install it first with: cell get ' + canonical)
throw new Error('Package ' + canonical + ' is not installed. Install it first with: cell get ' + canonical)
}
// Validate target is a valid package
if (starts_with(target, '/')) {
if (target.startsWith('/')) {
// Local path - must have cell.toml
if (!fd.is_file(target + '/cell.toml')) {
throw Error('Target ' + target + ' is not a valid package (no cell.toml)')
throw new Error('Target ' + target + ' is not a valid package (no cell.toml)')
}
} else {
// Remote package target - ensure it's installed
@@ -111,37 +109,6 @@ Link.add = function(canonical, target, shop) {
// Create the symlink immediately
Link.sync_one(canonical, target, shop)
// Install dependencies of the linked package
// Read the target's cell.toml to find its dependencies
var target_path = starts_with(target, '/') ? target : get_package_abs_dir(target)
var toml_path = target_path + '/cell.toml'
if (fd.is_file(toml_path)) {
try {
var content = text(fd.slurp(toml_path))
var cfg = toml.decode(content)
if (cfg.dependencies) {
arrfor(array(cfg.dependencies), function(alias) {
var dep_locator = cfg.dependencies[alias]
// Skip local dependencies that don't exist
if (starts_with(dep_locator, '/') && !fd.is_dir(dep_locator)) {
log.console(" Skipping missing local dependency: " + dep_locator)
return
}
// Install the dependency if not already in shop
try {
shop.get(dep_locator)
shop.extract(dep_locator)
} catch (e) {
log.console(` Warning: Could not install dependency ${dep_locator}: ${e.message}`)
log.error(e)
}
})
}
} catch (e) {
log.console(` Warning: Could not read dependencies from ${toml_path}`)
}
}
log.console("Linked " + canonical + " -> " + target)
return true
}
@@ -166,12 +133,12 @@ Link.remove = function(canonical) {
Link.clear = function() {
// Remove all symlinks first
var links = Link.load()
arrfor(array(links), function(canonical) {
for (var canonical in links) {
var target_dir = get_package_abs_dir(canonical)
if (fd.is_link(target_dir)) {
fd.unlink(target_dir)
}
})
}
Link.save({})
log.console("Cleared all links")
@@ -184,7 +151,7 @@ Link.sync_one = function(canonical, target, shop) {
var link_target = resolve_link_target(target)
// Ensure parent directories exist
var parent = fd.dirname(target_dir)
var parent = target_dir.substring(0, target_dir.lastIndexOf('/'))
ensure_dir(parent)
// Check current state
@@ -210,58 +177,32 @@ Link.sync_one = function(canonical, target, shop) {
return true
}
// Sync all links - ensure all symlinks are in place and dependencies are installed
// Sync all links - ensure all symlinks are in place
Link.sync_all = function(shop) {
var links = Link.load()
var count = 0
var errors = []
arrfor(array(links), function(canonical) {
for (var canonical in links) {
var target = links[canonical]
try {
// Validate target exists
var link_target = resolve_link_target(target)
if (!fd.is_dir(link_target)) {
push(errors, canonical + ': target ' + link_target + ' does not exist')
return
errors.push(canonical + ': target ' + link_target + ' does not exist')
continue
}
if (!fd.is_file(link_target + '/cell.toml')) {
push(errors, canonical + ': target ' + link_target + ' is not a valid package')
return
errors.push(canonical + ': target ' + link_target + ' is not a valid package')
continue
}
Link.sync_one(canonical, target, shop)
// Install dependencies of the linked package
var toml_path = link_target + '/cell.toml'
try {
var content = text(fd.slurp(toml_path))
var cfg = toml.decode(content)
if (cfg.dependencies) {
arrfor(array(cfg.dependencies), function(alias) {
var dep_locator = cfg.dependencies[alias]
// Skip local dependencies that don't exist
if (starts_with(dep_locator, '/') && !fd.is_dir(dep_locator)) {
return
}
// Install the dependency if not already in shop
try {
shop.get(dep_locator)
shop.extract(dep_locator)
} catch (e) {
// Silently continue - dependency may already be installed
}
})
}
} catch (e) {
// Could not read dependencies - continue anyway
}
count++
} catch (e) {
push(errors, canonical + ': ' + e.message)
errors.push(canonical + ': ' + e.message)
}
}
})
return { synced: count, errors: errors }
}
@@ -278,15 +219,4 @@ Link.get_target = function(canonical) {
return links[canonical] || null
}
// Get the canonical package name that links to this target (reverse lookup)
// Returns null if no package links to this target
Link.get_origin = function(target) {
var links = Link.load()
var found = null
arrfor(array(links), function(origin) {
if (links[origin] == target) found = origin
})
return found
}
return Link

202
list.ce
View File

@@ -1,168 +1,84 @@
// cell list [<scope>] - List packages and dependencies
//
// Usage:
// cell list List dependencies of current package
// cell list shop List all packages in shop with status
// cell list <locator> List dependency tree for a package
// list installed packages
// cell list -> list packages installed in this package
// cell list all -> list all packages (including those that are there due to installed packages)
// cell list package <name> -> list the packages for the package <name>
var shop = use('internal/shop')
var pkg = use('package')
var link = use('link')
var fd = use('fd')
var mode = 'local'
var target_pkg = null
if (args && length(args) > 0) {
if (args[0] == 'shop') {
if (args && args.length > 0) {
if (args[0] == 'all') {
mode = 'all'
} else if (args[0] == 'shop') {
mode = 'shop'
} else if (args[0] == '--help' || args[0] == '-h') {
log.console("Usage: cell list [<scope>]")
log.console("")
log.console("List packages and dependencies.")
log.console("")
log.console("Scopes:")
log.console(" (none) List dependencies of current package")
log.console(" shop List all packages in shop with status")
log.console(" <locator> List dependency tree for a package")
} else if (args[0] == 'package') {
if (args.length < 2) {
log.console("Usage: cell list package <name>")
$stop()
} else {
return
}
mode = 'package'
target_pkg = args[0]
// Resolve local paths
if (target_pkg == '.' || starts_with(target_pkg, './') || starts_with(target_pkg, '../') || fd.is_dir(target_pkg)) {
var resolved = fd.realpath(target_pkg)
if (resolved) {
target_pkg = resolved
}
}
}
}
var links = link.load()
var lock = shop.load_lock()
function print_deps(ctx, indent) {
indent = indent || ""
var deps
try {
deps = pkg.dependencies(ctx)
} catch (e) {
log.console(indent + " (could not read dependencies)")
target_pkg = args[1]
} else {
log.console("Usage:")
log.console(" cell list : list local packages")
log.console(" cell list all : list all recursive packages")
log.console(" cell list package <name>: list dependencies of <name>")
log.console(" cell list shop : list all packages in shop")
$stop()
return
}
if (!deps) {
log.console(indent + " (none)")
return
}
var aliases = array(deps)
aliases = sort(aliases)
if (length(aliases) == 0) {
log.console(indent + " (none)")
return
}
for (var i = 0; i < length(aliases); i++) {
var alias = aliases[i]
var locator = deps[alias]
var link_target = links[locator]
var lock_entry = lock[locator]
var line = indent + " " + alias
if (alias != locator) {
line += " -> " + locator
}
// Add status indicators
var status = []
if (link_target) {
push(status, "linked -> " + link_target)
}
if (lock_entry && lock_entry.commit) {
push(status, "@" + text(lock_entry.commit, 0, 8))
}
if (lock_entry && lock_entry.type == 'local') {
push(status, "local")
}
if (!lock_entry) {
push(status, "not installed")
}
if (length(status) > 0) {
line += " [" + text(status, ", ") + "]"
}
log.console(line)
}
}
if (mode == 'local') {
log.console("Dependencies:")
log.console("Installed Packages (Local):")
print_deps(null)
} else if (mode == 'package') {
log.console("Dependencies for " + target_pkg + ":")
print_deps(target_pkg)
// Resolve alias to canonical package path
var canon = shop.get_canonical_package(target_pkg, null)
if (!canon) {
log.console("Package '" + target_pkg + "' not found in local dependencies.")
} else {
log.console("Dependencies for " + target_pkg + " (" + canon + "):")
print_deps(canon)
}
} else if (mode == 'all') {
log.console("All Packages:")
var all = shop.list_packages(null)
// list_packages returns an array of package strings (locators)
// We want to perhaps sort them
all.sort()
for (var i = 0; i < all.length; i++) {
log.console(" " + all[i])
}
if (all.length == 0) log.console(" (none)")
} else if (mode == 'shop') {
log.console("Shop packages:")
log.console("")
log.console("Shop Packages:")
var all = shop.list_packages()
var packages = shop.list_packages()
if (length(packages) == 0) {
if (all.length == 0)
log.console(" (none)")
else
all.forEach(package => log.console(" " + package))
}
function print_deps(ctx) {
var deps = pkg.dependencies(ctx)
var aliases = []
for (var k in deps) aliases.push(k)
aliases.sort()
if (aliases.length == 0) {
log.console(" (none)")
} else {
packages = sort(packages)
// Group by type
var local_pkgs = []
var linked_pkgs = []
var remote_pkgs = []
arrfor(packages, function(p) {
if (p == 'core') return
var lock_entry = lock[p]
var link_target = links[p]
if (link_target) {
push(linked_pkgs, p)
} else if (lock_entry && lock_entry.type == 'local') {
push(local_pkgs, p)
} else {
push(remote_pkgs, p)
for (var i = 0; i < aliases.length; i++) {
var alias = aliases[i]
var locator = deps[alias]
log.console(" " + alias + " -> " + locator)
}
})
if (length(linked_pkgs) > 0) {
log.console("Linked packages:")
arrfor(linked_pkgs, function(p) {
var target = links[p]
log.console(" " + p + " -> " + target)
})
log.console("")
}
if (length(local_pkgs) > 0) {
log.console("Local packages:")
arrfor(local_pkgs, function(p) {
log.console(" " + p)
})
log.console("")
}
if (length(remote_pkgs) > 0) {
log.console("Remote packages:")
arrfor(remote_pkgs, function(p) {
var lock_entry = lock[p]
var commit = lock_entry && lock_entry.commit ? " @" + text(lock_entry.commit, 0, 8) : ""
log.console(" " + p + commit)
})
log.console("")
}
log.console("Total: " + text(length(packages)) + " package(s)")
}
}

12
ls.ce
View File

@@ -11,22 +11,22 @@ var modules = package.list_modules(pkg)
var programs = package.list_programs(pkg)
log.console("Modules in " + pkg + ":")
modules = sort(modules)
if (length(modules) == 0) {
modules.sort()
if (modules.length == 0) {
log.console(" (none)")
} else {
for (var i = 0; i < length(modules); i++) {
for (var i = 0; i < modules.length; i++) {
log.console(" " + modules[i])
}
}
log.console("")
log.console("Programs in " + pkg + ":")
programs = sort(programs)
if (length(programs) == 0) {
programs.sort()
if (programs.length == 0) {
log.console(" (none)")
} else {
for (var i = 0; i < length(programs); i++) {
for (var i = 0; i < programs.length; i++) {
log.console(" " + programs[i])
}
}

View File

@@ -145,7 +145,7 @@ static JSValue js_enet_host_service(JSContext *ctx, JSValueConst this_val, int a
ENetHost *host = JS_GetOpaque(this_val, enet_host_id);
if (!host) return JS_EXCEPTION;
if (argc < 1 || !JS_IsFunction(argv[0])) return JS_ThrowTypeError(ctx, "Expected a callback function as first argument");
if (argc < 1 || !JS_IsFunction(ctx, argv[0])) return JS_ThrowTypeError(ctx, "Expected a callback function as first argument");
JSValue callback = JS_DupValue(ctx, argv[0]);
double secs;
@@ -437,8 +437,8 @@ static const JSCFunctionListEntry js_enet_host_funcs[] = {
JS_CFUNC_DEF("connect", 2, js_enet_host_connect),
JS_CFUNC_DEF("flush", 0, js_enet_host_flush),
JS_CFUNC_DEF("broadcast", 1, js_enet_host_broadcast),
// JS_CGETSET_DEF("port", js_enet_host_get_port, NULL),
// JS_CGETSET_DEF("address", js_enet_host_get_address, NULL),
JS_CGETSET_DEF("port", js_enet_host_get_port, NULL),
JS_CGETSET_DEF("address", js_enet_host_get_address, NULL),
};
static JSValue js_enet_peer_get_rtt(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
@@ -552,20 +552,20 @@ static const JSCFunctionListEntry js_enet_peer_funcs[] = {
JS_CFUNC_DEF("ping", 0, js_enet_peer_ping),
JS_CFUNC_DEF("throttle_configure", 3, js_enet_peer_throttle_configure),
JS_CFUNC_DEF("timeout", 3, js_enet_peer_timeout),
// JS_CGETSET_DEF("rtt", js_enet_peer_get_rtt, NULL),
// JS_CGETSET_DEF("incoming_bandwidth", js_enet_peer_get_incoming_bandwidth, NULL),
// JS_CGETSET_DEF("outgoing_bandwidth", js_enet_peer_get_outgoing_bandwidth, NULL),
// JS_CGETSET_DEF("last_send_time", js_enet_peer_get_last_send_time, NULL),
// JS_CGETSET_DEF("last_receive_time", js_enet_peer_get_last_receive_time, NULL),
// JS_CGETSET_DEF("mtu", js_enet_peer_get_mtu, NULL),
// JS_CGETSET_DEF("outgoing_data_total", js_enet_peer_get_outgoing_data_total, NULL),
// JS_CGETSET_DEF("incoming_data_total", js_enet_peer_get_incoming_data_total, NULL),
// JS_CGETSET_DEF("rtt_variance", js_enet_peer_get_rtt_variance, NULL),
// JS_CGETSET_DEF("packet_loss", js_enet_peer_get_packet_loss, NULL),
// JS_CGETSET_DEF("state", js_enet_peer_get_state, NULL),
// JS_CGETSET_DEF("reliable_data_in_transit", js_enet_peer_get_reliable_data_in_transit, NULL),
// JS_CGETSET_DEF("port", js_enet_peer_get_port, NULL),
// JS_CGETSET_DEF("address", js_enet_peer_get_address, NULL),
JS_CGETSET_DEF("rtt", js_enet_peer_get_rtt, NULL),
JS_CGETSET_DEF("incoming_bandwidth", js_enet_peer_get_incoming_bandwidth, NULL),
JS_CGETSET_DEF("outgoing_bandwidth", js_enet_peer_get_outgoing_bandwidth, NULL),
JS_CGETSET_DEF("last_send_time", js_enet_peer_get_last_send_time, NULL),
JS_CGETSET_DEF("last_receive_time", js_enet_peer_get_last_receive_time, NULL),
JS_CGETSET_DEF("mtu", js_enet_peer_get_mtu, NULL),
JS_CGETSET_DEF("outgoing_data_total", js_enet_peer_get_outgoing_data_total, NULL),
JS_CGETSET_DEF("incoming_data_total", js_enet_peer_get_incoming_data_total, NULL),
JS_CGETSET_DEF("rtt_variance", js_enet_peer_get_rtt_variance, NULL),
JS_CGETSET_DEF("packet_loss", js_enet_peer_get_packet_loss, NULL),
JS_CGETSET_DEF("state", js_enet_peer_get_state, NULL),
JS_CGETSET_DEF("reliable_data_in_transit", js_enet_peer_get_reliable_data_in_transit, NULL),
JS_CGETSET_DEF("port", js_enet_peer_get_port, NULL),
JS_CGETSET_DEF("address", js_enet_peer_get_address, NULL),
};
JSValue js_enet_use(JSContext *ctx)

34
pack.ce
View File

@@ -14,7 +14,7 @@ var output_name = 'app'
var target_package = null
var buildtype = 'debug'
if (length(args) < 1) {
if (args.length < 1) {
log.error('Usage: cell pack <package> [options]')
log.error('')
log.error('Options:')
@@ -22,30 +22,30 @@ if (length(args) < 1) {
log.error(' -t, --target <target> Cross-compile for target platform')
log.error(' -b, --buildtype <type> Build type: release, debug, minsize (default: release)')
log.error('')
log.error('Available targets: ' + text(build.list_targets(), ', '))
log.error('Available targets: ' + build.list_targets().join(', '))
$stop()
return
}
target_package = args[0]
for (var i = 1; i < length(args); i++) {
for (var i = 1; i < args.length; i++) {
if (args[i] == '-t' || args[i] == '--target') {
if (i + 1 < length(args)) {
if (i + 1 < args.length) {
target = args[++i]
} else {
log.error('-t requires a target')
$stop()
}
} else if (args[i] == '-o' || args[i] == '--output') {
if (i + 1 < length(args)) {
if (i + 1 < args.length) {
output_name = args[++i]
} else {
log.error('-o requires an output name')
$stop()
}
} else if (args[i] == '-b' || args[i] == '--buildtype') {
if (i + 1 < length(args)) {
if (i + 1 < args.length) {
buildtype = args[++i]
if (buildtype != 'release' && buildtype != 'debug' && buildtype != 'minsize') {
log.error('Invalid buildtype: ' + buildtype + '. Must be release, debug, or minsize')
@@ -63,7 +63,7 @@ for (var i = 1; i < length(args); i++) {
log.console(' -t, --target <target> Cross-compile for target platform')
log.console(' -b, --buildtype <type> Build type: release, debug, minsize (default: release)')
log.console('')
log.console('Available targets: ' + text(build.list_targets(), ', '))
log.console('Available targets: ' + build.list_targets().join(', '))
$stop()
} else {
log.error('Unknown option: ' + args[i])
@@ -79,7 +79,7 @@ if (!target) {
if (target && !build.has_target(target)) {
log.error('Invalid target: ' + target)
log.console('Available targets: ' + text(build.list_targets(), ', '))
log.console('Available targets: ' + build.list_targets().join(', '))
$stop()
}
@@ -87,29 +87,29 @@ if (target && !build.has_target(target)) {
var packages = ['core']
var deps = pkg_tools.gather_dependencies(target_package)
for (var i = 0; i < length(deps); i++) {
push(packages, deps[i])
for (var i = 0; i < deps.length; i++) {
packages.push(deps[i])
}
push(packages, target_package)
packages.push(target_package)
// Remove duplicates
var unique_packages = []
var seen = {}
for (var i = 0; i < length(packages); i++) {
for (var i = 0; i < packages.length; i++) {
if (!seen[packages[i]]) {
seen[packages[i]] = true
push(unique_packages, packages[i])
unique_packages.push(packages[i])
}
}
packages = unique_packages
log.console('Preparing packages...')
arrfor(packages, function(package) {
if (package == 'core') return
for (var package of packages) {
if (package == 'core') continue
shop.extract(package)
})
}
log.console('Building static binary from ' + text(length(packages)) + ' packages: ' + text(packages, ', '))
log.console('Building static binary from ' + text(packages.length) + ' packages: ' + packages.join(', '))
try {
var result = build.build_static(packages, target, output_name, buildtype)

View File

@@ -1,21 +1,17 @@
var package = {}
var fd = use('fd')
var toml = use('toml')
var json = use('json')
var os = use('os')
var link = use('link')
// Cache for loaded configs to avoid toml re-parsing corruption
var config_cache = {}
// Convert package name to a safe directory name
// For absolute paths (local packages), replace / with _
// For remote packages, keep slashes as they use nested directories
function safe_package_path(pkg) {
if (!pkg) return pkg
if (starts_with(pkg, '/'))
return replace(replace(pkg, '/', '_'), '@', '_')
return replace(pkg, '@', '_')
if (pkg.startsWith('/'))
return pkg.replaceAll('/', '_').replaceAll('@', '_')
return pkg.replaceAll('@', '_')
}
function get_path(name)
@@ -24,50 +20,19 @@ function get_path(name)
if (!name)
return fd.realpath('.')
// If name is already an absolute path, use it directly
if (starts_with(name, '/'))
if (name.startsWith('/'))
return name
// Check if this package is linked - if so, use the link target directly
// This avoids symlink-related issues with file reading
var link_target = link.get_target(name)
if (link_target) {
// If link target is a local path, use it directly
if (starts_with(link_target, '/'))
return link_target
// Otherwise it's another package name, resolve that
return os.global_shop_path + '/packages/' + replace(replace(link_target, '/', '_'), '@', '_')
}
// Remote packages use nested directories, so don't transform slashes
return os.global_shop_path + '/packages/' + replace(name, '@', '_')
return os.global_shop_path + '/packages/' + name.replaceAll('@', '_')
}
package.load_config = function(name)
{
var config_path = get_path(name) + '/cell.toml'
if (!fd.is_file(config_path))
throw new Error(`${config_path} isn't a path`)
// Return cached config if available
if (config_cache[config_path])
return config_cache[config_path]
if (!fd.is_file(config_path)) {
throw Error(`${config_path} does not exist`)
}
var content = text(fd.slurp(config_path))
if (!content || length(trim(content)) == 0)
return {}
var result = toml.decode(content)
if (!result) {
return {}
}
// Deep copy to avoid toml module's shared state bug and cache it
result = json.decode(json.encode(result))
config_cache[config_path] = result
return result
return toml.decode(text(fd.slurp(config_path)))
}
package.save_config = function(name, config)
@@ -86,11 +51,11 @@ package.find_alias = function(name, locator)
var config = package.load_config(name)
if (!config.dependencies) return null
var found = null
arrfor(array(config.dependencies), function(alias) {
if (config.dependencies[alias] == locator) found = alias
})
return found
for (var alias in config.dependencies)
if (config.dependencies[alias] == locator)
return alias
return null
}
package.alias_to_package = function(name, alias)
@@ -130,15 +95,19 @@ package.find_package_dir = function(file)
var absolute = fd.realpath(file)
var dir = absolute
if (fd.is_file(dir))
dir = fd.dirname(dir)
if (fd.is_file(dir)) {
var last_slash = dir.lastIndexOf('/')
if (last_slash > 0) dir = dir.substring(0, last_slash)
}
while (dir && length(dir) > 0) {
while (dir && dir.length > 0) {
var toml_path = dir + '/cell.toml'
if (fd.is_file(toml_path)) {
return dir
}
dir = fd.dirname(dir)
var last_slash = dir.lastIndexOf('/')
if (last_slash <= 0) break
dir = dir.substring(0, last_slash)
}
return null
@@ -151,26 +120,19 @@ package.find_package_dir = function(file)
// Returns null if no alias is found for the given path
package.split_alias = function(name, path)
{
if (!path || length(path) == 0) {
if (!path || path.length == 0) {
return null
}
var parts = array(path, '/')
var parts = path.split('/')
var first_part = parts[0]
try {
var config = package.load_config(name)
if (!config) return null
var deps = config.dependencies
if (deps && deps[first_part]) {
var dep_locator = deps[first_part]
var remaining_path = text(array(parts, 1), '/')
if (config.dependencies && config.dependencies[first_part]) {
var dep_locator = config.dependencies[first_part]
var remaining_path = parts.slice(1).join('/')
return { package: dep_locator, path: remaining_path }
}
} catch (e) {
// Config doesn't exist or couldn't be loaded
}
return null
}
@@ -187,13 +149,13 @@ package.gather_dependencies = function(name)
var deps = package.dependencies(pkg_name)
if (!deps) return
arrfor(array(deps), function(alias) {
for (var alias in deps) {
var locator = deps[alias]
if (!all_deps[locator]) {
all_deps[locator] = true
gather_recursive(locator)
}
})
}
}
gather_recursive(name)
@@ -209,10 +171,10 @@ package.list_files = function(pkg) {
var list = fd.readdir(current_dir)
if (!list) return
for (var i = 0; i < length(list); i++) {
for (var i = 0; i < list.length; i++) {
var item = list[i]
if (item == '.' || item == '..') continue
if (starts_with(item, '.')) continue
if (item.startsWith('.')) continue
// Skip build directories in root
@@ -223,7 +185,7 @@ package.list_files = function(pkg) {
if (st.isDirectory) {
walk(full_path, rel_path)
} else {
push(files, rel_path)
files.push(rel_path)
}
}
}
@@ -237,9 +199,9 @@ package.list_files = function(pkg) {
package.list_modules = function(name) {
var files = package.list_files(name)
var modules = []
for (var i = 0; i < length(files); i++) {
if (ends_with(files[i], '.cm')) {
push(modules, text(files[i], 0, -3))
for (var i = 0; i < files.length; i++) {
if (files[i].endsWith('.cm')) {
modules.push(files[i].substring(0, files[i].length - 3))
}
}
return modules
@@ -248,9 +210,9 @@ package.list_modules = function(name) {
package.list_programs = function(name) {
var files = package.list_files(name)
var programs = []
for (var i = 0; i < length(files); i++) {
if (ends_with(files[i], '.ce')) {
push(programs, text(files[i], 0, -3))
for (var i = 0; i < files.length; i++) {
if (files[i].endsWith('.ce')) {
programs.push(files[i].substring(0, files[i].length - 3))
}
}
return programs
@@ -267,13 +229,13 @@ package.get_flags = function(name, flag_type, target) {
// Base flags
if (config.compilation && config.compilation[flag_type]) {
var base = config.compilation[flag_type]
flags = array(flags, filter(array(base, /\s+/), function(f) { return length(f) > 0 }))
flags = flags.concat(base.split(/\s+/).filter(function(f) { return f.length > 0 }))
}
// Target-specific flags
if (target && config.compilation && config.compilation[target] && config.compilation[target][flag_type]) {
var target_flags = config.compilation[target][flag_type]
flags = array(flags, filter(array(target_flags, /\s+/), function(f) { return length(f) > 0 }))
flags = flags.concat(target_flags.split(/\s+/).filter(function(f) { return f.length > 0 }))
}
return flags
@@ -290,27 +252,31 @@ package.get_c_files = function(name, target, exclude_main) {
// Group files by their base name (without target suffix)
var groups = {} // base_key -> { generic: file, variants: { target: file } }
for (var i = 0; i < length(files); i++) {
for (var i = 0; i < files.length; i++) {
var file = files[i]
if (!ends_with(file, '.c') && !ends_with(file, '.cpp')) continue
if (!file.endsWith('.c') && !file.endsWith('.cpp')) continue
var ext = ends_with(file, '.cpp') ? '.cpp' : '.c'
var base = text(file, 0, -length(ext))
var name_part = fd.basename(base)
var dir_part = fd.dirname(base)
var dir = (dir_part && dir_part != '.') ? dir_part + '/' : ''
var ext = file.endsWith('.cpp') ? '.cpp' : '.c'
var base = file.substring(0, file.length - ext.length)
var dir = ''
var name_part = base
var slash = base.lastIndexOf('/')
if (slash >= 0) {
dir = base.substring(0, slash + 1)
name_part = base.substring(slash + 1)
}
// Check for target suffix
var is_variant = false
var variant_target = null
var generic_name = name_part
for (var t = 0; t < length(known_targets); t++) {
for (var t = 0; t < known_targets.length; t++) {
var suffix = '_' + known_targets[t]
if (ends_with(name_part, suffix)) {
if (name_part.endsWith(suffix)) {
is_variant = true
variant_target = known_targets[t]
generic_name = text(name_part, 0, -length(suffix))
generic_name = name_part.substring(0, name_part.length - suffix.length)
break
}
}
@@ -329,7 +295,7 @@ package.get_c_files = function(name, target, exclude_main) {
// Select appropriate file from each group
var result = []
arrfor(array(groups), function(key) {
for (var key in groups) {
var group = groups[key]
var selected = null
@@ -343,12 +309,14 @@ package.get_c_files = function(name, target, exclude_main) {
if (selected) {
// Skip main.c if requested
if (exclude_main) {
var basename = fd.basename(selected)
if (basename == 'main.c' || starts_with(basename, 'main_')) return
var basename = selected
var s = selected.lastIndexOf('/')
if (s >= 0) basename = selected.substring(s + 1)
if (basename == 'main.c' || basename.startsWith('main_')) continue
}
result.push(selected)
}
push(result, selected)
}
})
return result
}

224
parseq.cm Normal file
View File

@@ -0,0 +1,224 @@
// parseq.js (Misty edition)
// Douglas Crockford → adapted for Misty by ChatGPT, 20250529
// Better living thru eventuality!
/*
 The original parseq.js relied on the browser's setTimeout and ran in
 milliseconds. In Misty we may be given an optional @.delay capability
 (arguments[0]) and time limits are expressed in **seconds**. This rewrite
 removes the setTimeout dependency, uses the @.delay capability when it is
 present, and provides the factories described in the Misty specification:
    fallback, par_all, par_any, race, sequence
 Each factory returns a **requestor** function as described by the spec.
*/
def delay = arg[0] // may be null
// ———————————————————————————————————————— helpers
function make_reason (factory, excuse, evidence) {
def reason = new Error(`parseq.${factory}${excuse ? ': ' + excuse : ''}`)
reason.evidence = evidence
return reason
}
function is_requestor (fn) {
return typeof fn == 'function' && (fn.length == 1 || fn.length == 2)
}
function check_requestors (list, factory) {
if (!isa(list, array) || list.some(r => !is_requestor(r)))
throw make_reason(factory, 'Bad requestor list.', list)
}
function check_callback (cb, factory) {
if (typeof cb != 'function' || cb.length != 2)
throw make_reason(factory, 'Not a callback.', cb)
}
function schedule (fn, seconds) {
if (seconds == null || seconds <= 0) return fn()
if (typeof delay == 'function') return delay(fn, seconds)
throw make_reason('schedule', '@.delay capability required for timeouts.')
}
// ———————————————————————————————————————— core runner
function run (factory, requestors, initial, action, time_limit, throttle = 0) {
let cancel_list = new Array(requestors.length)
let next = 0
let timer_cancel
function cancel (reason = make_reason(factory, 'Cancel.')) {
if (timer_cancel) timer_cancel(), timer_cancel = null
if (!cancel_list) return
cancel_list.forEach(c => { try { if (typeof c == 'function') c(reason) } catch (_) {} })
cancel_list = null
}
function start_requestor (value) {
if (!cancel_list || next >= requestors.length) return
let idx = next++
def req = requestors[idx]
try {
cancel_list[idx] = req(function req_cb (val, reason) {
if (!cancel_list || idx == null) return
cancel_list[idx] = null
action(val, reason, idx)
idx = null
if (factory == 'sequence') start_requestor(val)
else if (throttle) start_requestor(initial)
}, value)
} catch (ex) {
action(null, ex, idx)
idx = null
if (factory == 'sequence') start_requestor(value)
else if (throttle) start_requestor(initial)
}
}
if (time_limit != null) {
if (typeof time_limit != 'number' || time_limit < 0)
throw make_reason(factory, 'Bad time limit.', time_limit)
if (time_limit > 0) timer_cancel = schedule(() => cancel(make_reason(factory, 'Timeout.', time_limit)), time_limit)
}
def concurrent = throttle ? number.min(throttle, requestors.length) : requestors.length
for (let i = 0; i < concurrent; i++) start_requestor(initial)
return cancel
}
// ———————————————————————————————————————— factories
function _normalize (collection, factory) {
if (isa(collection)) return { names: null, list: collection }
if (collection && typeof collection == 'object') {
def names = array(collection)
def list = names.map(k => collection[k]).filter(is_requestor)
return { names, list }
}
throw make_reason(factory, 'Expected array or record.', collection)
}
function _denormalize (names, list) {
if (!names) return list
def obj = meme(null)
names.forEach((k, i) => { obj[k] = list[i] })
return obj
}
function par_all (collection, time_limit, throttle) {
def factory = 'par_all'
def { names, list } = _normalize(collection, factory)
if (list.length == 0) return (cb, v) => cb(names ? {} : [])
check_requestors(list, factory)
return function par_all_req (cb, initial) {
check_callback(cb, factory)
let pending = list.length
def results = new Array(list.length)
def cancel = run(factory, list, initial, (val, reason, idx) => {
if (val == null) {
cancel(reason)
return cb(null, reason)
}
results[idx] = val
if (--pending == 0) cb(_denormalize(names, results))
}, time_limit, throttle)
return cancel
}
}
function par_any (collection, time_limit, throttle) {
def factory = 'par_any'
def { names, list } = _normalize(collection, factory)
if (list.length == 0) return (cb, v) => cb(names ? {} : [])
check_requestors(list, factory)
return function par_any_req (cb, initial) {
check_callback(cb, factory)
let pending = list.length
def successes = new Array(list.length)
def cancel = run(factory, list, initial, (val, reason, idx) => {
pending--
if (val != null) successes[idx] = val
if (successes.some(v => v != null)) {
if (!pending) cancel(make_reason(factory, 'Finished.'))
return cb(_denormalize(names, successes.filter(v => v != null)))
}
if (!pending) cb(null, make_reason(factory, 'No successes.'))
}, time_limit, throttle)
return cancel
}
}
function race (list, time_limit, throttle) {
def factory = throttle == 1 ? 'fallback' : 'race'
if (!isa(list, array) || list.length == 0)
throw make_reason(factory, 'No requestors.')
check_requestors(list, factory)
return function race_req (cb, initial) {
check_callback(cb, factory)
let done = false
def cancel = run(factory, list, initial, (val, reason, idx) => {
if (done) return
if (val != null) {
done = true
cancel(make_reason(factory, 'Loser.', idx))
cb(val)
} else if (--list.length == 0) {
done = true
cancel(reason)
cb(null, reason)
}
}, time_limit, throttle)
return cancel
}
}
function fallback (list, time_limit) {
return race(list, time_limit, 1)
}
function sequence (list, time_limit) {
def factory = 'sequence'
if (!isa(list, array)) throw make_reason(factory, 'Not an array.', list)
check_requestors(list, factory)
if (list.length == 0) return (cb, v) => cb(v)
return function sequence_req (cb, initial) {
check_callback(cb, factory)
let idx = 0
function next (value) {
if (idx >= list.length) return cb(value)
try {
list[idx++](function seq_cb (val, reason) {
if (val == null) return cb(null, reason)
next(val)
}, value)
} catch (ex) {
cb(null, ex)
}
}
next(initial)
}
}
return {
fallback,
par_all,
par_any,
race,
sequence
}

View File

@@ -102,7 +102,7 @@ static void scores_cb(PDScoresList *scores, const char *errorMessage) {
JSC_SCALL(scoreboards_addScore,
if (!pd_scoreboards) return JS_ThrowInternalError(js, "scoreboards not initialized");
uint32_t value = (uint32_t)js2number(js, argv[1]);
if (argc > 2 && JS_IsFunction(argv[2])) {
if (argc > 2 && JS_IsFunction(js, argv[2])) {
g_scoreboard_js = js;
JS_FreeValue(js, g_add_score_callback);
g_add_score_callback = JS_DupValue(js, argv[2]);
@@ -112,7 +112,7 @@ JSC_SCALL(scoreboards_addScore,
JSC_SCALL(scoreboards_getPersonalBest,
if (!pd_scoreboards) return JS_ThrowInternalError(js, "scoreboards not initialized");
if (argc > 1 && JS_IsFunction(argv[1])) {
if (argc > 1 && JS_IsFunction(js, argv[1])) {
g_scoreboard_js = js;
JS_FreeValue(js, g_personal_best_callback);
g_personal_best_callback = JS_DupValue(js, argv[1]);
@@ -129,7 +129,7 @@ JSC_CCALL(scoreboards_freeScore,
JSC_CCALL(scoreboards_getScoreboards,
if (!pd_scoreboards) return JS_ThrowInternalError(js, "scoreboards not initialized");
if (argc > 0 && JS_IsFunction(argv[0])) {
if (argc > 0 && JS_IsFunction(js, argv[0])) {
g_scoreboard_js = js;
JS_FreeValue(js, g_boards_list_callback);
g_boards_list_callback = JS_DupValue(js, argv[0]);
@@ -145,7 +145,7 @@ JSC_CCALL(scoreboards_freeBoardsList,
JSC_SCALL(scoreboards_getScores,
if (!pd_scoreboards) return JS_ThrowInternalError(js, "scoreboards not initialized");
if (argc > 1 && JS_IsFunction(argv[1])) {
if (argc > 1 && JS_IsFunction(js, argv[1])) {
g_scoreboard_js = js;
JS_FreeValue(js, g_scores_callback);
g_scores_callback = JS_DupValue(js, argv[1]);

View File

@@ -4,22 +4,22 @@
// Time is in seconds.
function make_reason(factory, excuse, evidence) {
def reason = Error(`pronto.${factory}${excuse ? ': ' + excuse : ''}`)
def reason = new Error(`pronto.${factory}${excuse ? ': ' + excuse : ''}`)
reason.evidence = evidence
return reason
}
function is_requestor(fn) {
return is_function(fn) && (length(fn) == 1 || length(fn) == 2)
return typeof fn == 'function' && (fn.length == 1 || fn.length == 2)
}
function check_requestors(list, factory) {
if (!is_array(list) || some(list, r => !is_requestor(r)))
if (!isa(list, array) || list.some(r => !is_requestor(r)))
throw make_reason(factory, 'Bad requestor array.', list)
}
function check_callback(cb, factory) {
if (!is_function(cb) || length(cb) != 2)
if (typeof cb != 'function' || cb.length != 2)
throw make_reason(factory, 'Not a callback.', cb)
}
@@ -27,7 +27,7 @@ function check_callback(cb, factory) {
// Tries each requestor in order until one succeeds.
function fallback(requestor_array) {
def factory = 'fallback'
if (!is_array(requestor_array) || length(requestor_array) == 0)
if (!isa(requestor_array, array) || requestor_array.length == 0)
throw make_reason(factory, 'Empty requestor array.')
check_requestors(requestor_array, factory)
@@ -47,7 +47,7 @@ function fallback(requestor_array) {
function try_next() {
if (cancelled) return
if (index >= length(requestor_array)) {
if (index >= requestor_array.length) {
callback(null, make_reason(factory, 'All requestors failed.'))
return
}
@@ -79,25 +79,25 @@ function fallback(requestor_array) {
// Runs requestors in parallel, collecting all results.
function parallel(requestor_array, throttle, need) {
def factory = 'parallel'
if (!is_array(requestor_array))
if (!isa(requestor_array, array))
throw make_reason(factory, 'Not an array.', requestor_array)
check_requestors(requestor_array, factory)
def length = length(requestor_array)
def length = requestor_array.length
if (length == 0)
return function(callback, value) { callback([]) }
if (need == null) need = length
if (!is_number(need) || need < 0 || need > length)
if (typeof need != 'number' || need < 0 || need > length)
throw make_reason(factory, 'Bad need.', need)
if (throttle != null && (!is_number(throttle) || throttle < 1))
if (throttle != null && (typeof throttle != 'number' || throttle < 1))
throw make_reason(factory, 'Bad throttle.', throttle)
return function parallel_requestor(callback, value) {
check_callback(callback, factory)
def results = array(length)
def cancel_list = array(length)
def results = new Array(length)
def cancel_list = new Array(length)
var next_index = 0
var successes = 0
var failures = 0
@@ -106,8 +106,8 @@ function parallel(requestor_array, throttle, need) {
function cancel(reason) {
if (finished) return
finished = true
arrfor(cancel_list, c => {
try { if (is_function(c)) c(reason) } catch (_) {}
cancel_list.forEach(c => {
try { if (typeof c == 'function') c(reason) } catch (_) {}
})
}
@@ -153,8 +153,7 @@ function parallel(requestor_array, throttle, need) {
}
}
def concurrent = throttle ? min(throttle, length) : length
def concurrent = throttle ? number.min(throttle, length) : length
for (var i = 0; i < concurrent; i++) start_one()
return cancel
@@ -165,22 +164,22 @@ function parallel(requestor_array, throttle, need) {
// Runs requestors in parallel, returns first success(es).
function race(requestor_array, throttle, need) {
def factory = 'race'
if (!is_array(requestor_array) || length(requestor_array) == 0)
if (!isa(requestor_array, array) || requestor_array.length == 0)
throw make_reason(factory, 'Empty requestor array.')
check_requestors(requestor_array, factory)
def length = length(requestor_array)
def length = requestor_array.length
if (need == null) need = 1
if (!is_number(need) || need < 1 || need > length)
if (typeof need != 'number' || need < 1 || need > length)
throw make_reason(factory, 'Bad need.', need)
if (throttle != null && (!is_number(throttle) || throttle < 1))
if (throttle != null && (typeof throttle != 'number' || throttle < 1))
throw make_reason(factory, 'Bad throttle.', throttle)
return function race_requestor(callback, value) {
check_callback(callback, factory)
def results = array(length)
def cancel_list = array(length)
def results = new Array(length)
def cancel_list = new Array(length)
var next_index = 0
var successes = 0
var failures = 0
@@ -189,8 +188,8 @@ function race(requestor_array, throttle, need) {
function cancel(reason) {
if (finished) return
finished = true
arrfor(cancel_list, c => {
try { if (is_function(c)) c(reason) } catch (_) {}
cancel_list.forEach(c => {
try { if (typeof c == 'function') c(reason) } catch (_) {}
})
}
@@ -239,7 +238,7 @@ function race(requestor_array, throttle, need) {
}
}
def concurrent = throttle ? min(throttle, length) : length
def concurrent = throttle ? number.min(throttle, length) : length
for (var i = 0; i < concurrent; i++) start_one()
return cancel
@@ -250,11 +249,11 @@ function race(requestor_array, throttle, need) {
// Runs requestors one at a time, passing result to next.
function sequence(requestor_array) {
def factory = 'sequence'
if (!is_array(requestor_array))
if (!isa(requestor_array, array))
throw make_reason(factory, 'Not an array.', requestor_array)
check_requestors(requestor_array, factory)
if (length(requestor_array) == 0)
if (requestor_array.length == 0)
return function(callback, value) { callback(value) }
return function sequence_requestor(callback, value) {
@@ -273,7 +272,7 @@ function sequence(requestor_array) {
function run_next(val) {
if (cancelled) return
if (index >= length(requestor_array)) {
if (index >= requestor_array.length) {
callback(val)
return
}
@@ -305,7 +304,7 @@ function sequence(requestor_array) {
// Converts a unary function into a requestor.
function requestorize(unary) {
def factory = 'requestorize'
if (!is_function(unary))
if (typeof unary != 'function')
throw make_reason(factory, 'Not a function.', unary)
return function requestorized(callback, value) {
@@ -319,12 +318,45 @@ function requestorize(unary) {
}
}
// objectify(factory_fn)
// Converts a factory that takes arrays to one that takes objects.
function objectify(factory_fn) {
def factory = 'objectify'
if (typeof factory_fn != 'function')
throw make_reason(factory, 'Not a factory.', factory_fn)
return function objectified_factory(object_of_requestors, ...rest) {
if (!isa(object_of_requestors, object))
throw make_reason(factory, 'Expected an object.', object_of_requestors)
def keys = array(object_of_requestors)
def requestor_array = keys.map(k => object_of_requestors[k])
def inner_requestor = factory_fn(requestor_array, ...rest)
return function(callback, value) {
return inner_requestor(function(results, reason) {
if (results == null) {
callback(null, reason)
} else if (isa(results, array)) {
def obj = {}
keys.forEach((k, i) => { obj[k] = results[i] })
callback(obj, reason)
} else {
callback(results, reason)
}
}, value)
}
}
}
return {
fallback,
parallel,
race,
sequence,
requestorize,
objectify,
is_requestor,
check_callback
}

8
qop.c
View File

@@ -450,10 +450,10 @@ static const JSCFunctionListEntry js_qop_writer_funcs[] = {
static const JSCFunctionListEntry js_qop_funcs[] = {
MIST_FUNC_DEF(qop, open, 1),
MIST_FUNC_DEF(qop, write, 1),
JS_PROP_INT32_DEF("FLAG_NONE", QOP_FLAG_NONE, 0),
JS_PROP_INT32_DEF("FLAG_COMPRESSED_ZSTD", QOP_FLAG_COMPRESSED_ZSTD, 0),
JS_PROP_INT32_DEF("FLAG_COMPRESSED_DEFLATE", QOP_FLAG_COMPRESSED_DEFLATE, 0),
JS_PROP_INT32_DEF("FLAG_ENCRYPTED", QOP_FLAG_ENCRYPTED, 0),
JS_PROP_INT32_DEF("FLAG_NONE", QOP_FLAG_NONE, JS_PROP_ENUMERABLE),
JS_PROP_INT32_DEF("FLAG_COMPRESSED_ZSTD", QOP_FLAG_COMPRESSED_ZSTD, JS_PROP_ENUMERABLE),
JS_PROP_INT32_DEF("FLAG_COMPRESSED_DEFLATE", QOP_FLAG_COMPRESSED_DEFLATE, JS_PROP_ENUMERABLE),
JS_PROP_INT32_DEF("FLAG_ENCRYPTED", QOP_FLAG_ENCRYPTED, JS_PROP_ENUMERABLE),
};
JSValue js_qop_use(JSContext *js) {

View File

@@ -3,10 +3,10 @@ var qop = use('qop')
function print_usage() {
log.console("Usage: qopconv [OPTION...] FILE...")
log.console(" -u <archive> .. unpack archive")
log.console(" -l <archive> .. list contents of archive")
log.console(" -u <archive> ... unpack archive")
log.console(" -l <archive> ... list contents of archive")
log.console(" -d <dir> ....... change read dir when creating archives")
log.console(" <sources...> <archive> .. create archive from sources")
log.console(" <sources...> <archive> ... create archive from sources")
}
function list(archive_path) {
@@ -24,12 +24,12 @@ function list(archive_path) {
}
var files = archive.list()
arrfor(files, function(f) {
for (var f of files) {
var s = archive.stat(f)
// Format: index hash size path
// We don't have index/hash easily available in JS binding yet, just size/path
log.console(`${f} (${s.size} bytes)`)
})
}
archive.close()
}
@@ -48,26 +48,26 @@ function unpack(archive_path) {
}
var files = archive.list()
arrfor(files, function(f) {
for (var f of files) {
var data = archive.read(f)
if (data) {
// Ensure directory exists
var dir = fd.dirname(f)
var dir = f.substring(0, f.lastIndexOf('/'))
if (dir) {
// recursive mkdir
var parts = array(dir, '/')
var parts = dir.split('/')
var curr = "."
arrfor(parts, function(p) {
for (var p of parts) {
curr += "/" + p
try { fd.mkdir(curr) } catch(e) {}
})
}
}
var fh = fd.open(f, "w")
fd.write(fh, data)
fd.close(fh)
log.console("Extracted " + f)
}
})
}
archive.close()
}
@@ -89,11 +89,11 @@ function pack(sources, archive_path, read_dir) {
if (st.isDirectory) {
var list = fd.readdir(full_path)
arrfor(list, function(item) {
if (item == "." || item == "..") return
for (var item of list) {
if (item == "." || item == "..") continue
var sub = path == "." ? item : path + "/" + item
add_recursive(sub)
})
}
} else {
var data = fd.slurp(full_path)
if (data) {
@@ -103,22 +103,22 @@ function pack(sources, archive_path, read_dir) {
}
}
arrfor(sources, function(s) {
for (var s of sources) {
add_recursive(s)
})
}
writer.finalize()
log.console("Created " + archive_path)
}
if (!is_array(arg) || length(arg) < 1) {
if (typeof arg == 'undefined' || arg.length < 1) {
print_usage()
} else {
if (arg[0] == "-l") {
if (length(arg) < 2) print_usage()
if (arg.length < 2) print_usage()
else list(arg[1])
} else if (arg[0] == "-u") {
if (length(arg) < 2) print_usage()
if (arg.length < 2) print_usage()
else unpack(arg[1])
} else {
var sources = []
@@ -130,12 +130,12 @@ if (!is_array(arg) || length(arg) < 1) {
i = 2
}
for (; i < length(arg) - 1; i++) {
push(sources, arg[i])
for (; i < arg.length - 1; i++) {
sources.push(arg[i])
}
archive = arg[length(arg) - 1]
archive = arg[arg.length - 1]
if (length(sources) == 0) {
if (sources.length == 0) {
print_usage()
} else {
pack(sources, archive, read_dir)

View File

@@ -14,7 +14,7 @@ rnd.random_fit = function()
rnd.random_whole = function(num)
{
return floor(rnd.random() * num)
return number.floor(rnd.random() * num)
}
rnd.random_range = function(min,max)

View File

@@ -1,105 +1,24 @@
// cell remove <locator> - Remove a package from the shop
//
// Usage:
// cell remove <locator> Remove a package from the shop
// cell remove . Remove current directory package from shop
//
// Options:
// --prune Also remove packages no longer needed by any root
// --dry-run Show what would be removed
// cell remove <alias|path> - Remove a package from dependencies or shop
var shop = use('internal/shop')
var pkg = use('package')
var link = use('link')
var fd = use('fd')
var target_pkg = null
var prune = false
var dry_run = false
for (var i = 0; i < length(args); i++) {
if (args[i] == '--prune') {
prune = true
} else if (args[i] == '--dry-run') {
dry_run = true
} else if (args[i] == '--help' || args[i] == '-h') {
log.console("Usage: cell remove <locator> [options]")
log.console("")
log.console("Remove a package from the shop.")
log.console("")
log.console("Options:")
log.console(" --prune Also remove packages no longer needed by any root")
log.console(" --dry-run Show what would be removed")
if (args.length < 1) {
log.console("Usage: cell remove <alias|path>")
$stop()
} else if (!starts_with(args[i], '-')) {
target_pkg = args[i]
}
return
}
if (!target_pkg) {
log.console("Usage: cell remove <locator> [options]")
$stop()
}
var pkg = args[0]
// Resolve relative paths to absolute paths
if (target_pkg == '.' || starts_with(target_pkg, './') || starts_with(target_pkg, '../') || fd.is_dir(target_pkg)) {
var resolved = fd.realpath(target_pkg)
if (pkg == '.' || pkg.startsWith('./') || pkg.startsWith('../') || fd.is_dir(pkg)) {
var resolved = fd.realpath(pkg)
if (resolved) {
target_pkg = resolved
pkg = resolved
}
}
var packages_to_remove = [target_pkg]
if (prune) {
// Find packages no longer needed
// Get all dependencies of remaining packages
var lock = shop.load_lock()
var all_packages = shop.list_packages()
// Build set of all needed packages (excluding target)
var needed = {}
arrfor(all_packages, function(p) {
if (p == target_pkg || p == 'core') return
// Mark this package and its deps as needed
needed[p] = true
try {
var deps = pkg.gather_dependencies(p)
arrfor(deps, function(dep) {
needed[dep] = true
})
} catch (e) {
// Skip if can't read deps
}
})
// Find packages that are NOT needed
arrfor(all_packages, function(p) {
if (p == 'core') return
if (!needed[p] && find(packages_to_remove, p) == null) {
push(packages_to_remove, p)
}
})
}
if (dry_run) {
log.console("Would remove:")
arrfor(packages_to_remove, function(p) {
log.console(" " + p)
})
} else {
arrfor(packages_to_remove, function(p) {
// Remove any link for this package
if (link.is_linked(p)) {
link.remove(p)
}
// Remove from shop
shop.remove(p)
})
log.console("Removed " + text(length(packages_to_remove)) + " package(s).")
}
shop.remove(pkg)
$stop()

View File

@@ -1,196 +0,0 @@
// cell resolve [<locator>] - Print fully resolved dependency closure
//
// Usage:
// cell resolve Resolve current directory package
// cell resolve . Resolve current directory package
// cell resolve <locator> Resolve specific package
//
// Options:
// --target <triple> Annotate builds for target platform
// --locked Show lock state without applying links
// --refresh Refresh floating refs before printing
var shop = use('internal/shop')
var pkg = use('package')
var link = use('link')
var build = use('build')
var fd = use('fd')
var target_locator = null
var target_triple = null
var show_locked = false
var refresh_first = false
for (var i = 0; i < length(args); i++) {
if (args[i] == '--target' || args[i] == '-t') {
if (i + 1 < length(args)) {
target_triple = args[++i]
} else {
log.error('--target requires a triple')
$stop()
}
} else if (args[i] == '--locked') {
show_locked = true
} else if (args[i] == '--refresh') {
refresh_first = true
} else if (args[i] == '--help' || args[i] == '-h') {
log.console("Usage: cell resolve [<locator>] [options]")
log.console("")
log.console("Print the fully resolved dependency closure.")
log.console("")
log.console("Options:")
log.console(" --target <triple> Annotate builds for target platform")
log.console(" --locked Show lock state without applying links")
log.console(" --refresh Refresh floating refs before printing")
$stop()
} else if (!starts_with(args[i], '-')) {
target_locator = args[i]
}
}
// Default to current directory
if (!target_locator) {
target_locator = '.'
}
// Resolve local paths
if (target_locator == '.' || starts_with(target_locator, './') || starts_with(target_locator, '../') || fd.is_dir(target_locator)) {
var resolved = fd.realpath(target_locator)
if (resolved) {
target_locator = resolved
}
}
// Check if it's a valid package
if (!fd.is_file(target_locator + '/cell.toml')) {
// Try to find it in the shop
var pkg_dir = shop.get_package_dir(target_locator)
if (!fd.is_file(pkg_dir + '/cell.toml')) {
log.error("Not a valid package: " + target_locator)
$stop()
}
}
// Detect target if not specified
if (!target_triple) {
target_triple = build.detect_host_target()
}
var lock = shop.load_lock()
var links = link.load()
// Gather all dependencies recursively
var all_deps = {}
var visited = {}
function gather_deps(locator, depth) {
if (visited[locator]) return
visited[locator] = true
all_deps[locator] = { depth: depth }
try {
var deps = pkg.dependencies(locator)
if (deps) {
arrfor(array(deps), function(alias) {
var dep_locator = deps[alias]
gather_deps(dep_locator, depth + 1)
})
}
} catch (e) {
// Package might not have dependencies
}
}
gather_deps(target_locator, 0)
// Print header
log.console("Resolved dependency closure for: " + target_locator)
log.console("Target: " + target_triple)
log.console("")
// Sort by depth then alphabetically
var sorted = array(array(all_deps), function(locator) { return { locator: locator, depth: all_deps[locator].depth } })
sorted = sort(sorted, "locator")
sorted = sort(sorted, "depth")
for (var i = 0; i < length(sorted); i++) {
var locator = sorted[i].locator
var depth = sorted[i].depth
var indent = ""
for (var j = 0; j < depth; j++) indent += " "
// Get info about this package
var info = shop.resolve_package_info(locator)
var lock_entry = lock[locator]
var link_target = show_locked ? null : links[locator]
var effective_locator = link_target || locator
// Check status
var is_linked = link_target != null
var is_in_lock = lock_entry != null
var is_local = info == 'local'
// Check if fetched (package directory exists)
var pkg_dir = shop.get_package_dir(locator)
var is_fetched = fd.is_dir(pkg_dir) || fd.is_link(pkg_dir)
// Check if built (library exists)
var lib_dir = shop.get_lib_dir()
var lib_name = shop.lib_name_for_package(locator)
var dylib_ext = '.dylib' // TODO: detect from target
var lib_path = lib_dir + '/' + lib_name + dylib_ext
var is_built = fd.is_file(lib_path)
// Format output
var status_parts = []
if (is_linked) push(status_parts, "linked")
if (is_local) push(status_parts, "local")
if (!is_in_lock) push(status_parts, "not in lock")
if (!is_fetched) push(status_parts, "not fetched")
if (is_built) push(status_parts, "built")
var commit_str = ""
if (lock_entry && lock_entry.commit) {
commit_str = " @" + text(lock_entry.commit, 0, 8)
} else if (lock_entry && lock_entry.type == 'local') {
commit_str = " (local)"
}
var line = indent + locator + commit_str
if (is_linked && !show_locked) {
line += " -> " + link_target
}
if (length(status_parts) > 0) {
line += " [" + text(status_parts, ", ") + "]"
}
log.console(line)
// Show compilation inputs if requested (verbose)
if (depth == 0) {
try {
var cflags = pkg.get_flags(locator, 'CFLAGS', target_triple)
var ldflags = pkg.get_flags(locator, 'LDFLAGS', target_triple)
if (length(cflags) > 0 || length(ldflags) > 0) {
log.console(indent + " Compilation inputs:")
if (length(cflags) > 0) {
log.console(indent + " CFLAGS: " + text(cflags, ' '))
}
if (length(ldflags) > 0) {
log.console(indent + " LDFLAGS: " + text(ldflags, ' '))
}
}
} catch (e) {
// Skip if can't read config
}
}
}
log.console("")
log.console("Total: " + text(length(sorted)) + " package(s)")
$stop()

View File

@@ -4,14 +4,14 @@
var shop = use('internal/shop')
var pkg = use('package')
if (length(args) < 1) {
if (args.length < 1) {
log.console("Usage: cell search <query>")
log.console("Searches for packages, actors, or modules matching the query.")
$stop()
return
}
var query = args[0]
var query = args[0].toLowerCase()
var found_packages = []
var found_modules = []
var found_actors = []
@@ -19,34 +19,34 @@ var found_actors = []
// Search through all installed packages
var packages = shop.list_packages()
arrfor(packages, function(package_name) {
for (var package_name of packages) {
// Check if package name matches
if (search(package_name, query) != null) {
push(found_packages, package_name)
if (package_name.toLowerCase().includes(query)) {
found_packages.push(package_name)
}
// Search modules and actors within the package
try {
var modules = pkg.list_modules(package_name)
arrfor(modules, function(mod) {
if (search(mod, query) != null) {
push(found_modules, package_name + ':' + mod)
for (var mod of modules) {
if (mod.toLowerCase().includes(query)) {
found_modules.push(package_name + ':' + mod)
}
}
})
var actors = pkg.list_programs(package_name)
arrfor(actors, function(actor) {
if (search(actor, query) != null) {
push(found_actors, package_name + ':' + actor)
for (var actor of actors) {
if (actor.toLowerCase().includes(query)) {
found_actors.push(package_name + ':' + actor)
}
}
})
} catch (e) {
// Skip packages that can't be read
}
})
}
// Print results
var total = length(found_packages) + length(found_modules) + length(found_actors)
var total = found_packages.length + found_modules.length + found_actors.length
if (total == 0) {
log.console("No results found for '" + query + "'")
@@ -54,27 +54,27 @@ if (total == 0) {
log.console("Found " + text(total) + " result(s) for '" + query + "':")
log.console("")
if (length(found_packages) > 0) {
if (found_packages.length > 0) {
log.console("Packages:")
arrfor(found_packages, function(p) {
for (var p of found_packages) {
log.console(" " + p)
})
}
log.console("")
}
if (length(found_modules) > 0) {
if (found_modules.length > 0) {
log.console("Modules:")
arrfor(found_modules, function(m) {
for (var m of found_modules) {
log.console(" " + m)
})
}
log.console("")
}
if (length(found_actors) > 0) {
if (found_actors.length > 0) {
log.console("Actors:")
arrfor(found_actors, function(a) {
for (var a of found_actors) {
log.console(" " + a)
})
}
}
}

View File

@@ -128,6 +128,7 @@ void script_startup(cell_rt *prt)
JS_AddIntrinsicEval(js);
JS_AddIntrinsicRegExp(js);
JS_AddIntrinsicJSON(js);
JS_AddIntrinsicMapSet(js);
JS_SetContextOpaque(js, prt);
prt->context = js;
@@ -145,8 +146,12 @@ void script_startup(cell_rt *prt)
JS_SetPropertyStr(js, cell, "hidden", hidden_fn);
JS_SetPropertyStr(js, hidden_fn, "os", js_os_use(js));
crt->actor_sym = JS_NewObject(js);
JS_SetPropertyStr(js, hidden_fn, "actorsym", JS_DupValue(js,crt->actor_sym));
const char actorsym_script[] = "Symbol('actordata');";
JSValue actorsym = JS_Eval(js, actorsym_script, sizeof(actorsym_script)-1, "internal", 0);
JS_SetPropertyStr(js, hidden_fn, "actorsym", actorsym);
crt->actor_sym = JS_ValueToAtom(js, actorsym);
if (crt->init_wota) {
JS_SetPropertyStr(js, hidden_fn, "init", wota2value(js, crt->init_wota));
@@ -241,8 +246,10 @@ int cell_init(int argc, char **argv)
int JS_ArrayLength(JSContext *js, JSValue a)
{
int64_t len;
JS_GetLength(js, a, &len);
JSValue length = JS_GetPropertyStr(js, a, "length");
int len;
JS_ToInt32(js,&len,length);
JS_FreeValue(js,length);
return len;
}
@@ -276,6 +283,7 @@ void cell_trace_sethook(cell_hook)
int uncaught_exception(JSContext *js, JSValue v)
{
cell_rt *rt = JS_GetContextOpaque(js);
if (!JS_HasException(js)) {
JS_FreeValue(js,v);
return 1;
@@ -283,21 +291,26 @@ int uncaught_exception(JSContext *js, JSValue v)
JSValue exp = JS_GetException(js);
JSValue message = JS_GetPropertyStr(js, exp, "message");
const char *msg_str = JS_ToCString(js, message);
if (msg_str) {
printf("Exception: %s\n", msg_str);
JS_FreeCString(js, msg_str);
if (JS_IsNull(rt->on_exception)) {
const char *str = JS_ToCString(js, exp);
if (str) {
printf("Uncaught exception: %s\n", str);
JS_FreeCString(js, str);
}
JS_FreeValue(js, message);
JSValue stack = JS_GetPropertyStr(js, exp, "stack");
if (!JS_IsNull(stack)) {
const char *stack_str = JS_ToCString(js, stack);
if (stack_str) {
printf("Stack:\n%s\n", stack_str);
printf("Stack trace:\n%s\n", stack_str);
JS_FreeCString(js, stack_str);
}
}
JS_FreeValue(js, stack);
} else {
JSValue ret = JS_Call(js, rt->on_exception, JS_NULL, 1, &exp);
JS_FreeValue(js, ret);
}
JS_FreeValue(js, exp);
JS_FreeValue(js, v);

View File

@@ -36,7 +36,7 @@ void cell_trace_sethook(cell_hook);
// Macros to help with creating scripts
#define MIST_CFUNC_DEF(name, length, func1, props) { name, props, JS_DEF_CFUNC, 0, .u = { .func = { length, JS_CFUNC_generic, { .generic = func1 } } } }
#define MIST_FUNC_DEF(TYPE, FN, LEN) MIST_CFUNC_DEF(#FN, LEN, js_##TYPE##_##FN, 0)
#define MIST_FUNC_DEF(TYPE, FN, LEN) MIST_CFUNC_DEF(#FN, LEN, js_##TYPE##_##FN, JS_PROP_C_W_E)
#define PROTO_FUNC_DEF(TYPE, FN, LEN) MIST_CFUNC_DEF(#FN, LEN, js_##TYPE##_##FN, 0)
#define JS_SETSIG JSContext *js, JSValue self, JSValue val
@@ -68,6 +68,41 @@ void cell_trace_sethook(cell_hook);
JS_FreeCString(js,str); \
) \
#define MIST_CGETSET_BASE(name, fgetter, fsetter, props) { name, props, JS_DEF_CGETSET, 0, .u = { .getset = { .get = { .getter = fgetter }, .set = { .setter = fsetter } } } }
#define MIST_CGETSET_DEF(name, fgetter, fsetter) MIST_CGETSET_BASE(name, fgetter, fsetter, JS_PROP_CONFIGURABLE | JS_PROP_ENUMERABLE)
#define MIST_CGETET_HID(name, fgetter, fsetter) MIST_CGETSET_BASE(name, fgetter, fsetter, JS_PROP_CONFIGURABLE)
#define MIST_GET(name, fgetter) { #fgetter , JS_PROP_CONFIGURABLE | JS_PROP_ENUMERABLE, JS_DEF_CGETSET, 0, .u = { .getset = { .get = { .getter = js_##name##_get_##fgetter } } } }
#define CGETSET_ADD_NAME(ID, ENTRY, NAME) MIST_CGETSET_DEF(#NAME, js_##ID##_get_##ENTRY, js_##ID##_set_##ENTRY)
#define CGETSET_ADD(ID, ENTRY) MIST_CGETSET_DEF(#ENTRY, js_##ID##_get_##ENTRY, js_##ID##_set_##ENTRY)
#define CGETSET_ADD_HID(ID, ENTRY) MIST_CGETSET_BASE(#ENTRY, js_##ID##_get_##ENTRY, js_##ID##_set_##ENTRY, JS_PROP_CONFIGURABLE)
#define GETSETPAIR(ID, ENTRY, TYPE, FN) \
JSValue js_##ID##_set_##ENTRY (JS_SETSIG) { \
js2##ID (js, self)->ENTRY = js2##TYPE (js,val); \
{FN;} \
return JS_NULL; \
} \
\
JSValue js_##ID##_get_##ENTRY (JSContext *js, JSValue self) { \
return TYPE##2js(js,js2##ID (js, self)->ENTRY); \
} \
#define JSC_GETSET(ID, ENTRY, TYPE) GETSETPAIR( ID , ENTRY , TYPE , ; )
#define JSC_GETSET_APPLY(ID, ENTRY, TYPE) GETSETPAIR(ID, ENTRY, TYPE, ID##_apply(js2##ID (js, self));)
#define JSC_GETSET_CALLBACK(ID, ENTRY) \
JSValue js_##ID##_set_##ENTRY (JS_SETSIG) { \
JSValue fn = js2##ID (js, self)->ENTRY; \
if (!JS_IsNull(fn)) JS_FreeValue(js, fn); \
js2##ID (js, self)->ENTRY = JS_DupValue(js, val); \
return JS_NULL; \
}\
JSValue js_##ID##_get_##ENTRY (JSContext *js, JSValue self) { return JS_DupValue(js, js2##ID (js, self)->ENTRY); } \
#define JSC_GET(ID, ENTRY, TYPE) \
JSValue js_##ID##_get_##ENTRY (JSContext *js, JSValue self) { \
return TYPE##2js(js,js2##ID (js, self)->ENTRY); } \
#define QJSCLASS(TYPE, ...)\
JSClassID js_##TYPE##_id;\
static void js_##TYPE##_finalizer(JSRuntime *rt, JSValue val){\
@@ -144,12 +179,15 @@ JS_SetClassProto(js, js_##TYPE##_id, TYPE##_proto); \
#define QJSCLASSPREP_FUNCS_CTOR(TYPE, CTOR_ARGC) \
({ \
QJSCLASSPREP_FUNCS(TYPE); \
JSValue TYPE##_ctor = JS_NewCFunction2(js, js_##TYPE##_constructor, #TYPE, CTOR_ARGC, JS_CFUNC_generic, 0); \
JSValue TYPE##_ctor = JS_NewCFunction2(js, js_##TYPE##_constructor, #TYPE, CTOR_ARGC, JS_CFUNC_constructor, 0); \
JS_SetConstructor(js, TYPE##_ctor, TYPE##_proto); \
TYPE##_ctor; \
})
#define countof(x) (sizeof(x)/sizeof((x)[0]))
// Common macros for property access
#define JS_GETPROP(JS, TARGET, VALUE, PROP, TYPE) {\
JSValue __##PROP##__v = JS_GetPropertyStr(JS,VALUE,#PROP); \

View File

@@ -53,7 +53,7 @@ typedef struct cell_rt {
int main_thread_only;
int affinity;
JSValue actor_sym;
JSAtom actor_sym;
const char *name; // human friendly name
cell_hook trace_hook;
@@ -63,7 +63,7 @@ cell_rt *create_actor(void *wota);
const char *register_actor(const char *id, cell_rt *actor, int mainthread, double ar);
void actor_disrupt(cell_rt *actor);
JSValue actor_sym(cell_rt *actor);
JSAtom actor_sym(cell_rt *actor);
const char *send_message(const char *id, void *msg);
const char *register_actor(const char *id, cell_rt *actor, int mainthread, double ar);

View File

@@ -111,7 +111,7 @@ JSC_CCALL(actor_on_exception,
)
JSC_CCALL(actor_clock,
if (!JS_IsFunction(argv[0]))
if (!JS_IsFunction(js, argv[0]))
return JS_ThrowReferenceError(js, "Argument must be a function.");
cell_rt *actor = JS_GetContextOpaque(js);
@@ -119,7 +119,7 @@ JSC_CCALL(actor_clock,
)
JSC_CCALL(actor_delay,
if (!JS_IsFunction(argv[0]))
if (!JS_IsFunction(js, argv[0]))
return JS_ThrowReferenceError(js, "Argument must be a function.");
cell_rt *actor = JS_GetContextOpaque(js);

606
source/qjs_blob.c Normal file
View File

@@ -0,0 +1,606 @@
#define BLOB_IMPLEMENTATION
#include "blob.h"
#include "cell.h"
// Get countof from macros if not defined
#ifndef countof
#define countof(x) (sizeof(x)/sizeof((x)[0]))
#endif
// Free function for blob
void blob_free(JSRuntime *rt, blob *b)
{
blob_destroy(b);
}
// Use QJSCLASS macro to generate class boilerplate
QJSCLASS(blob,)
// Constructor function for blob
static JSValue js_blob_constructor(JSContext *ctx, JSValueConst new_target,
int argc, JSValueConst *argv) {
blob *bd = NULL;
// new Blob()
if (argc == 0) {
// empty antestone blob
bd = blob_new(0);
}
// new Blob(capacity)
else if (argc == 1 && JS_IsNumber(argv[0])) {
int64_t capacity_bits;
if (JS_ToInt64(ctx, &capacity_bits, argv[0]) < 0) {
return JS_EXCEPTION;
}
if (capacity_bits < 0) capacity_bits = 0;
bd = blob_new((size_t)capacity_bits);
}
// new Blob(length, logical/random)
else if (argc == 2 && JS_IsNumber(argv[0])) {
int64_t length_bits;
if (JS_ToInt64(ctx, &length_bits, argv[0]) < 0) {
return JS_EXCEPTION;
}
if (length_bits < 0) length_bits = 0;
if (JS_IsBool(argv[1])) {
// Fill with all 0s or all 1s
int is_one = JS_ToBool(ctx, argv[1]);
bd = blob_new_with_fill((size_t)length_bits, is_one);
} else if (JS_IsFunction(ctx, argv[1])) {
/* Random function provided call it and use up to 56 bits at a time */
size_t bytes = (length_bits + 7) / 8;
bd = blob_new((size_t)length_bits);
if (bd) {
bd->length = length_bits;
/* Ensure the backing storage starts out zeroed */
memset(bd->data, 0, bytes);
size_t bits_written = 0;
while (bits_written < length_bits) {
JSValue randval = JS_Call(ctx, argv[1], JS_NULL, 0, NULL);
if (JS_IsException(randval)) {
blob_destroy(bd);
return JS_EXCEPTION;
}
int64_t fitval;
JS_ToInt64(ctx, &fitval, randval);
JS_FreeValue(ctx, randval);
/* Extract up to 56 bits from the random value */
size_t bits_to_use = length_bits - bits_written;
if (bits_to_use > 52) bits_to_use = 52;
/* Write bits from the random value */
for (size_t j = 0; j < bits_to_use; j++) {
size_t bit_pos = bits_written + j;
size_t byte_idx = bit_pos / 8;
size_t bit_idx = bit_pos % 8;
if (fitval & (1LL << j))
bd->data[byte_idx] |= (uint8_t)(1 << bit_idx);
else
bd->data[byte_idx] &= (uint8_t)~(1 << bit_idx);
}
bits_written += bits_to_use;
}
}
} else {
return JS_ThrowTypeError(ctx, "Second argument must be boolean or random function");
}
}
// new Blob(blob, from, to)
else if (argc >= 1 && JS_IsObject(argv[0])) {
// we try copying from another blob if it's of the same class
blob *src = js2blob(ctx, argv[0]);
if (!src) {
return JS_ThrowTypeError(ctx, "Blob constructor: argument 1 not a blob");
}
int64_t from = 0, to = (int64_t)src->length;
if (argc >= 2 && JS_IsNumber(argv[1])) {
JS_ToInt64(ctx, &from, argv[1]);
if (from < 0) from = 0;
}
if (argc >= 3 && JS_IsNumber(argv[2])) {
JS_ToInt64(ctx, &to, argv[2]);
if (to < from) to = from;
if (to > (int64_t)src->length) to = (int64_t)src->length;
}
bd = blob_new_from_blob(src, (size_t)from, (size_t)to);
}
// else fail
else {
return JS_ThrowTypeError(ctx, "Blob constructor: invalid arguments");
}
if (!bd) {
return JS_ThrowOutOfMemory(ctx);
}
return blob2js(ctx, bd);
}
// blob.write_bit(logical)
static JSValue js_blob_write_bit(JSContext *ctx, JSValueConst this_val,
int argc, JSValueConst *argv) {
if (argc < 1) {
return JS_ThrowTypeError(ctx, "write_bit(logical) requires 1 argument");
}
blob *bd = js2blob(ctx, this_val);
if (!bd) {
return JS_ThrowTypeError(ctx, "write_bit: not called on a blob");
}
// Handle numeric 0/1 or boolean
int bit_val;
if (JS_IsNumber(argv[0])) {
int32_t num;
JS_ToInt32(ctx, &num, argv[0]);
if (num != 0 && num != 1) {
return JS_ThrowTypeError(ctx, "write_bit: value must be true, false, 0, or 1");
}
bit_val = num;
} else {
bit_val = JS_ToBool(ctx, argv[0]);
}
if (blob_write_bit(bd, bit_val) < 0) {
return JS_ThrowTypeError(ctx, "write_bit: cannot write (maybe stone or OOM)");
}
return JS_NULL;
}
// blob.write_blob(second_blob)
static JSValue js_blob_write_blob(JSContext *ctx, JSValueConst this_val,
int argc, JSValueConst *argv) {
if (argc < 1) {
return JS_ThrowTypeError(ctx, "write_blob(second_blob) requires 1 argument");
}
blob *bd = js2blob(ctx, this_val);
if (!bd) {
return JS_ThrowTypeError(ctx, "write_blob: not called on a blob");
}
blob *second = js2blob(ctx, argv[0]);
if (!second) {
return JS_ThrowTypeError(ctx, "write_blob: argument must be a blob");
}
if (blob_write_blob(bd, second) < 0) {
return JS_ThrowTypeError(ctx, "write_blob: cannot write to stone blob or OOM");
}
return JS_NULL;
}
// blob.write_dec64(number)
static JSValue js_blob_write_number(JSContext *ctx, JSValueConst this_val,
int argc, JSValueConst *argv) {
if (argc < 1) {
return JS_ThrowTypeError(ctx, "write_dec64(number) requires 1 argument");
}
blob *bd = js2blob(ctx, this_val);
if (!bd)
return JS_ThrowTypeError(ctx, "write_dec64: not called on a blob");
// Get the number as a double and convert to DEC64
double d;
if (JS_ToFloat64(ctx, &d, argv[0]) < 0)
return JS_EXCEPTION;
if (blob_write_dec64(bd, d) < 0)
return JS_ThrowTypeError(ctx, "write_dec64: cannot write to stone blob or OOM");
return JS_NULL;
}
// blob.w16(value) - write a 16-bit value (short)
static JSValue js_blob_w16(JSContext *ctx, JSValueConst this_val,
int argc, JSValueConst *argv) {
if (argc < 1)
return JS_ThrowTypeError(ctx, "w16(value) requires 1 argument");
blob *bd = js2blob(ctx, this_val);
if (!bd)
return JS_ThrowTypeError(ctx, "w16: not called on a blob");
int32_t value;
if (JS_ToInt32(ctx, &value, argv[0]) < 0) return JS_EXCEPTION;
int16_t short_val = (int16_t)value;
if (blob_write_bytes(bd, &short_val, sizeof(int16_t)) < 0)
return JS_ThrowTypeError(ctx, "w16: cannot write to stone blob or OOM");
return JS_NULL;
}
// blob.wf(value) - write a float (32-bit)
static JSValue js_blob_wf(JSContext *ctx, JSValueConst this_val,
int argc, JSValueConst *argv) {
if (argc < 1)
return JS_ThrowTypeError(ctx, "wf(value) requires 1 argument");
blob *bd = js2blob(ctx, this_val);
if (!bd)
return JS_ThrowTypeError(ctx, "wf: not called on a blob");
double d;
if (JS_ToFloat64(ctx, &d, argv[0]) < 0) return JS_EXCEPTION;
float f = (float)d;
if (blob_write_bytes(bd, &f, sizeof(float)) < 0)
return JS_ThrowTypeError(ctx, "wf: cannot write to stone blob or OOM");
return JS_NULL;
}
// blob.write_fit(value, len)
static JSValue js_blob_write_fit(JSContext *ctx, JSValueConst this_val,
int argc, JSValueConst *argv) {
if (argc < 2)
return JS_ThrowTypeError(ctx, "write_fit(value, len) requires 2 arguments");
blob *bd = js2blob(ctx, this_val);
if (!bd)
return JS_ThrowTypeError(ctx, "write_fit: not called on a blob");
int64_t value;
int32_t len;
if (JS_ToInt64(ctx, &value, argv[0]) < 0) return JS_EXCEPTION;
if (JS_ToInt32(ctx, &len, argv[1]) < 0) return JS_EXCEPTION;
if (blob_write_fit(bd, value, len) < 0) {
return JS_ThrowTypeError(ctx, "write_fit: value doesn't fit in specified bits, stone blob, or OOM");
}
return JS_NULL;
}
// blob.write_kim(fit)
static JSValue js_blob_write_text(JSContext *ctx, JSValueConst this_val,
int argc, JSValueConst *argv) {
if (argc < 1)
return JS_ThrowTypeError(ctx, "write_kim(fit) requires 1 argument");
blob *bd = js2blob(ctx, this_val);
if (!bd)
return JS_ThrowTypeError(ctx, "write_kim: not called on a blob");
// Handle number or single character string
const char *str = JS_ToCString(ctx, argv[0]);
if (blob_write_text(bd, str) < 0) {
JS_FreeCString(ctx,str);
return JS_ThrowTypeError(ctx, "write_kim: cannot write to stone blob or OOM");
}
JS_FreeCString(ctx,str);
return JS_NULL;
}
// blob.write_pad(block_size)
static JSValue js_blob_write_pad(JSContext *ctx, JSValueConst this_val,
int argc, JSValueConst *argv) {
if (argc < 1)
return JS_ThrowTypeError(ctx, "write_pad(block_size) requires 1 argument");
blob *bd = js2blob(ctx, this_val);
if (!bd)
return JS_ThrowTypeError(ctx, "write_pad: not called on a blob");
int32_t block_size;
if (JS_ToInt32(ctx, &block_size, argv[0]) < 0) return JS_EXCEPTION;
if (blob_write_pad(bd, block_size) < 0)
return JS_ThrowTypeError(ctx, "write_pad: cannot write (stone blob, OOM, or invalid block size)");
return JS_NULL;
}
// blob.read_logical(from)
static JSValue js_blob_read_logical(JSContext *ctx, JSValueConst this_val,
int argc, JSValueConst *argv) {
if (argc < 1) {
return JS_ThrowTypeError(ctx, "read_logical(from) requires 1 argument");
}
blob *bd = js2blob(ctx, this_val);
if (!bd) {
return JS_ThrowTypeError(ctx, "read_logical: not called on a blob");
}
int64_t pos;
if (JS_ToInt64(ctx, &pos, argv[0]) < 0) {
return JS_ThrowInternalError(ctx, "must provide a positive bit");
}
if (pos < 0) {
return JS_ThrowRangeError(ctx, "read_logical: position must be non-negative");
}
int bit_val;
if (blob_read_bit(bd, (size_t)pos, &bit_val) < 0) {
return JS_ThrowTypeError(ctx, "read_logical: blob must be stone");
}
return JS_NewBool(ctx, bit_val);
}
// blob.read_blob(from, to)
static JSValue js_blob_read_blob(JSContext *ctx, JSValueConst this_val,
int argc, JSValueConst *argv) {
blob *bd = js2blob(ctx, this_val);
if (!bd) {
return JS_ThrowTypeError(ctx, "read_blob: not called on a blob");
}
if (!bd->is_stone) {
return JS_ThrowTypeError(ctx, "read_blob: blob must be stone");
}
int64_t from = 0;
int64_t to = bd->length;
if (argc >= 1) {
if (JS_ToInt64(ctx, &from, argv[0]) < 0) return JS_EXCEPTION;
if (from < 0) from = 0;
}
if (argc >= 2) {
if (JS_ToInt64(ctx, &to, argv[1]) < 0) return JS_EXCEPTION;
if (to > (int64_t)bd->length) to = bd->length;
}
blob *new_bd = blob_read_blob(bd, from, to);
if (!new_bd) {
return JS_ThrowOutOfMemory(ctx);
}
return blob2js(ctx, new_bd);
}
// blob.read_dec64(from)
static JSValue js_blob_read_number(JSContext *ctx, JSValueConst this_val,
int argc, JSValueConst *argv) {
if (argc < 1) {
return JS_ThrowTypeError(ctx, "read_dec64(from) requires 1 argument");
}
blob *bd = js2blob(ctx, this_val);
if (!bd) {
return JS_ThrowTypeError(ctx, "read_number: not called on a blob");
}
if (!bd->is_stone) {
return JS_ThrowTypeError(ctx, "read_number: blob must be stone");
}
double from;
if (JS_ToFloat64(ctx, &from, argv[0]) < 0) return JS_EXCEPTION;
if (from < 0) return JS_ThrowRangeError(ctx, "read_number: position must be non-negative");
double d;
if (blob_read_dec64(bd, from, &d) < 0) {
return JS_ThrowRangeError(ctx, "read_number: out of range");
}
return JS_NewFloat64(ctx, d);
}
// blob.read_fit(from, len)
static JSValue js_blob_read_fit(JSContext *ctx, JSValueConst this_val,
int argc, JSValueConst *argv) {
if (argc < 2) {
return JS_ThrowTypeError(ctx, "read_fit(from, len) requires 2 arguments");
}
blob *bd = js2blob(ctx, this_val);
if (!bd) {
return JS_ThrowTypeError(ctx, "read_fit: not called on a blob");
}
if (!bd->is_stone) {
return JS_ThrowTypeError(ctx, "read_fit: blob must be stone");
}
int64_t from;
int32_t len;
if (JS_ToInt64(ctx, &from, argv[0]) < 0) return JS_EXCEPTION;
if (JS_ToInt32(ctx, &len, argv[1]) < 0) return JS_EXCEPTION;
if (from < 0) {
return JS_ThrowRangeError(ctx, "read_fit: position must be non-negative");
}
int64_t value;
if (blob_read_fit(bd, from, len, &value) < 0) {
return JS_ThrowRangeError(ctx, "read_fit: out of range or invalid length");
}
return JS_NewInt64(ctx, value);
}
// blob.read_text(from)
static JSValue js_blob_read_text(JSContext *ctx, JSValueConst this_val,
int argc, JSValueConst *argv) {
blob *bd = js2blob(ctx, this_val);
if (!bd) {
return JS_ThrowTypeError(ctx, "read_text: not called on a blob");
}
if (!bd->is_stone) {
return JS_ThrowTypeError(ctx, "read_text: blob must be stone");
}
int64_t from;
if (JS_ToInt64(ctx, &from, argv[0]) < 0) return JS_EXCEPTION;
char *text;
size_t bits_read;
if (blob_read_text(bd, from, &text, &bits_read) < 0) {
return JS_ThrowRangeError(ctx, "read_text: out of range or invalid encoding");
}
JSValue result = JS_NewString(ctx, text);
free(text);
return result;
}
// blob.pad?(from, block_size)
static JSValue js_blob_pad_q(JSContext *ctx, JSValueConst this_val,
int argc, JSValueConst *argv) {
if (argc < 2) {
return JS_ThrowTypeError(ctx, "pad?(from, block_size) requires 2 arguments");
}
blob *bd = js2blob(ctx, this_val);
if (!bd) {
return JS_ThrowTypeError(ctx, "pad?: not called on a blob");
}
if (!bd->is_stone) {
return JS_ThrowTypeError(ctx, "pad?: blob must be stone");
}
int64_t from;
int32_t block_size;
if (JS_ToInt64(ctx, &from, argv[0]) < 0) return JS_EXCEPTION;
if (JS_ToInt32(ctx, &block_size, argv[1]) < 0) return JS_EXCEPTION;
return JS_NewBool(ctx, blob_pad_check(bd, from, block_size));
}
// blob.stone()
static JSValue js_blob_stone(JSContext *ctx, JSValueConst this_val,
int argc, JSValueConst *argv) {
blob *bd = js2blob(ctx, this_val);
if (!bd) {
return JS_ThrowTypeError(ctx, "stone: not called on a blob");
}
if (!bd->is_stone) {
blob_make_stone(bd);
}
return JS_NULL;
}
static JSValue js_blob_stonep(JSContext *ctx, JSValueConst this_val,
int argc, JSValueConst *argv) {
blob *bd = js2blob(ctx, this_val);
if (!bd) {
return JS_ThrowTypeError(ctx, "stone: not called on a blob");
}
return JS_NewBool(ctx, bd->is_stone);
}
// blob.length getter
// Return number of bits in the blob
static JSValue js_blob_get_length(JSContext *ctx, JSValueConst this_val, int magic) {
blob *bd = js2blob(ctx, this_val);
if (!bd) {
return JS_ThrowTypeError(ctx, "length: not called on a blob");
}
return JS_NewInt64(ctx, bd->length);
}
// -----------------------------------------------------------------------------
// Exports list
// -----------------------------------------------------------------------------
static const JSCFunctionListEntry js_blob_funcs[] = {
// Write methods
JS_CFUNC_DEF("write_bit", 1, js_blob_write_bit),
JS_CFUNC_DEF("write_blob", 1, js_blob_write_blob),
JS_CFUNC_DEF("write_number", 1, js_blob_write_number),
JS_CFUNC_DEF("write_fit", 2, js_blob_write_fit),
JS_CFUNC_DEF("write_text", 1, js_blob_write_text),
JS_CFUNC_DEF("write_pad", 1, js_blob_write_pad),
JS_CFUNC_DEF("wf", 1, js_blob_wf),
JS_CFUNC_DEF("w16", 1, js_blob_w16),
// Read methods
JS_CFUNC_DEF("read_logical", 1, js_blob_read_logical),
JS_CFUNC_DEF("read_blob", 2, js_blob_read_blob),
JS_CFUNC_DEF("read_number", 1, js_blob_read_number),
JS_CFUNC_DEF("read_fit", 2, js_blob_read_fit),
JS_CFUNC_DEF("read_text", 1, js_blob_read_text),
JS_CFUNC_DEF("pad?", 2, js_blob_pad_q),
// Other methods
JS_CFUNC_DEF("stone", 0, js_blob_stone),
JS_CFUNC_DEF("stonep", 0, js_blob_stonep),
// Length property getter
JS_CGETSET_DEF("length", js_blob_get_length, NULL),
};
JSValue js_blob_use(JSContext *js) {
return QJSCLASSPREP_FUNCS_CTOR(blob, 3);
}
JSValue js_new_blob_stoned_copy(JSContext *js, void *data, size_t bytes)
{
blob *b = blob_new(bytes*8);
memcpy(b->data, data, bytes);
b->length = bytes * 8; // Set the actual length in bits
blob_make_stone(b);
return blob2js(js, b);
}
void *js_get_blob_data(JSContext *js, size_t *size, JSValue v)
{
blob *b = js2blob(js, v);
if (!b) {
JS_ThrowReferenceError(js, "get_blob_data: not called on a blob");
return -1;
}
if (!b->is_stone) {
JS_ThrowReferenceError(js, "attempted to read data from a non-stone blob");
return -1;
}
if (b->length % 8 != 0) {
JS_ThrowReferenceError(js, "attempted to read data from a non-byte aligned blob [length is %d]", b->length);
return -1;
}
*size = (b->length + 7) / 8; // Return actual byte size based on bit length
return b->data;
}
void *js_get_blob_data_bits(JSContext *js, size_t *bits, JSValue v)
{
blob *b = js2blob(js, v);
if (!b) {
JS_ThrowReferenceError(js, "get_blob_data_bits: not called on a blob");
return -1;
}
if (!b->is_stone) {
JS_ThrowReferenceError(js, "attempted to read data from a non-stone blob");
return -1;
}
if (!b->data) {
JS_ThrowReferenceError(js, "attempted to read data from an empty blob");
return -1;
}
if (b->length % 8 != 0) {
JS_ThrowReferenceError(js, "attempted to read data from a non-byte aligned blob");
return -1;
}
if (b->length == 0) {
JS_ThrowReferenceError(js, "attempted to read data from an empty blob");
return -1;
}
*bits = b->length;
return b->data;
}
int js_is_blob(JSContext *js, JSValue v)
{
blob *b = js2blob(js,v);
if (b) return 1;
return 0;
}

View File

@@ -88,7 +88,7 @@ static void encode_object_properties(WotaEncodeContext *enc, JSValueConst val, J
for (uint32_t i = 0; i < plen; i++) {
JSValue prop_val = JS_GetProperty(ctx, val, ptab[i].atom);
if (!JS_IsFunction(prop_val)) {
if (!JS_IsFunction(ctx, prop_val)) {
atoms[non_function_count] = ptab[i].atom;
props[non_function_count++] = prop_val;
} else
@@ -128,7 +128,8 @@ static void wota_encode_value(WotaEncodeContext *enc, JSValueConst val, JSValueC
wota_write_int_word(&enc->wb, d);
break;
}
case JS_TAG_FLOAT64: {
case JS_TAG_FLOAT64:
case JS_TAG_BIG_INT: {
double d;
if (JS_ToFloat64(ctx, &d, replaced) < 0) {
wota_write_sym(&enc->wb, WOTA_NULL);
@@ -185,8 +186,7 @@ static void wota_encode_value(WotaEncodeContext *enc, JSValueConst val, JSValueC
break;
}
cell_rt *crt = JS_GetContextOpaque(ctx);
// JSValue adata = JS_GetProperty(ctx, replaced, crt->actor_sym);
JSValue adata = JS_NULL;
JSValue adata = JS_GetProperty(ctx, replaced, crt->actor_sym);
if (!JS_IsNull(adata)) {
wota_write_sym(&enc->wb, WOTA_PRIVATE);
wota_encode_value(enc, adata, replaced, JS_ATOM_NULL);
@@ -200,7 +200,7 @@ static void wota_encode_value(WotaEncodeContext *enc, JSValueConst val, JSValueC
}
wota_stack_push(enc, replaced);
JSValue to_json = JS_GetPropertyStr(ctx, replaced, "toJSON");
if (JS_IsFunction(to_json)) {
if (JS_IsFunction(ctx, to_json)) {
JSValue result = JS_Call(ctx, to_json, replaced, 0, NULL);
JS_FreeValue(ctx, to_json);
if (!JS_IsException(result)) {
@@ -248,7 +248,7 @@ static char *decode_wota_value(JSContext *ctx, char *data_ptr, JSValue *out_val,
data_ptr = decode_wota_value(ctx, data_ptr, &inner, holder, JS_ATOM_NULL, reviver);
JSValue obj = JS_NewObject(ctx);
cell_rt *crt = JS_GetContextOpaque(ctx);
// JS_SetProperty(ctx, obj, crt->actor_sym, inner);
JS_SetProperty(ctx, obj, crt->actor_sym, inner);
*out_val = obj;
} else if (scode == WOTA_NULL) *out_val = JS_NULL;
else if (scode == WOTA_FALSE) *out_val = JS_NewBool(ctx, 0);
@@ -274,7 +274,8 @@ static char *decode_wota_value(JSContext *ctx, char *data_ptr, JSValue *out_val,
case WOTA_ARR: {
long long c;
data_ptr = wota_read_array(&c, data_ptr);
JSValue arr = JS_NewArrayLen(ctx, c);
JSValue arr = JS_NewArray(ctx);
JS_SetLength(ctx, arr, c);
for (long long i = 0; i < c; i++) {
JSValue elem_val = JS_NULL;
JSAtom idx_atom = JS_NewAtomUInt32(ctx, (uint32_t)i);
@@ -359,7 +360,7 @@ static JSValue js_wota_encode(JSContext *ctx, JSValueConst this_val, int argc, J
{
if (argc < 1) return JS_ThrowTypeError(ctx, "wota.encode requires at least 1 argument");
size_t total_bytes;
void *wota = value2wota(ctx, argv[0], JS_IsFunction(argv[1]) ? argv[1] : JS_NULL, &total_bytes);
void *wota = value2wota(ctx, argv[0], JS_IsFunction(ctx,argv[1]) ? argv[1] : JS_NULL, &total_bytes);
JSValue ret = js_new_blob_stoned_copy(ctx, wota, total_bytes);
free(wota);
return ret;
@@ -372,7 +373,7 @@ static JSValue js_wota_decode(JSContext *ctx, JSValueConst this_val, int argc, J
uint8_t *buf = js_get_blob_data(ctx, &len, argv[0]);
if (buf == (uint8_t *)-1) return JS_EXCEPTION;
if (!buf || len == 0) return JS_ThrowTypeError(ctx, "No blob data present");
JSValue reviver = (argc > 1 && JS_IsFunction(argv[1])) ? argv[1] : JS_NULL;
JSValue reviver = (argc > 1 && JS_IsFunction(ctx, argv[1])) ? argv[1] : JS_NULL;
char *data_ptr = (char *)buf;
JSValue result = JS_NULL;
JSValue holder = JS_NewObject(ctx);

View File

@@ -38,8 +38,10 @@ DEF(def, "def")
DEF(this, "this")
DEF(delete, "delete")
DEF(void, "void")
DEF(typeof, "typeof")
DEF(new, "new")
DEF(in, "in")
DEF(instanceof, "instanceof")
DEF(do, "do")
DEF(while, "while")
DEF(for, "for")

View File

@@ -75,6 +75,7 @@ DEF( push_false, 1, 0, 1, none)
DEF( push_true, 1, 0, 1, none)
DEF( object, 1, 0, 1, none)
DEF( special_object, 2, 0, 1, u8) /* only used at the start of a function */
DEF( rest, 3, 0, 1, u16) /* only used at the start of a function */
DEF( drop, 1, 1, 0, none) /* a -> */
DEF( nip, 1, 2, 1, none) /* a b -> b */
@@ -96,16 +97,19 @@ DEF( rot3r, 1, 3, 3, none) /* a b x -> x a b */
DEF( rot4l, 1, 4, 4, none) /* x a b c -> a b c x */
DEF( rot5l, 1, 5, 5, none) /* x a b c d -> a b c d x */
DEF(call_constructor, 3, 2, 1, npop) /* func new.target args -> ret. arguments are not counted in n_pop */
DEF( call, 3, 1, 1, npop) /* arguments are not counted in n_pop */
DEF( tail_call, 3, 1, 0, npop) /* arguments are not counted in n_pop */
DEF( call_method, 3, 2, 1, npop) /* arguments are not counted in n_pop */
DEF(tail_call_method, 3, 2, 0, npop) /* arguments are not counted in n_pop */
DEF( array_from, 3, 0, 1, npop) /* arguments are not counted in n_pop */
DEF( apply, 3, 3, 1, u16)
DEF( return, 1, 1, 0, none)
DEF( return_undef, 1, 0, 0, none)
DEF( throw, 1, 1, 0, none)
DEF( throw_error, 6, 0, 0, atom_u8)
DEF( eval, 5, 1, 1, npop_u16) /* func args... -> ret_val */
DEF( apply_eval, 3, 2, 1, u16) /* func array -> ret_eval */
DEF( regexp, 1, 2, 1, none) /* create a RegExp object from the pattern and a
bytecode string */
@@ -132,7 +136,9 @@ DEF( put_array_el, 1, 3, 0, none)
DEF( define_field, 5, 2, 1, atom)
DEF( set_name, 5, 1, 1, atom)
DEF(set_name_computed, 1, 2, 2, none)
DEF( set_proto, 1, 2, 1, none)
DEF(define_array_el, 1, 3, 2, none)
DEF( append, 1, 3, 2, none) /* append enumerated object, update length */
DEF(copy_data_properties, 2, 3, 3, u8)
DEF( define_method, 6, 2, 1, atom_u8)
DEF(define_method_computed, 2, 3, 1, u8) /* must come after define_method */
@@ -174,6 +180,16 @@ DEF( make_arg_ref, 7, 0, 2, atom_u16)
DEF(make_var_ref_ref, 7, 0, 2, atom_u16)
DEF( make_var_ref, 5, 0, 2, atom)
DEF( for_in_start, 1, 1, 1, none)
DEF( for_of_start, 1, 1, 3, none)
DEF( for_in_next, 1, 1, 3, none)
DEF( for_of_next, 2, 3, 5, u8)
DEF(iterator_check_object, 1, 1, 1, none)
DEF(iterator_get_value_done, 1, 2, 3, none) /* catch_offset obj -> catch_offset value done */
DEF( iterator_close, 1, 3, 0, none)
DEF( iterator_next, 1, 4, 4, none)
DEF( iterator_call, 2, 4, 5, u8)
/* arithmetic/logic operations */
DEF( neg, 1, 1, 1, none)
DEF( plus, 1, 1, 1, none)
@@ -186,18 +202,15 @@ DEF( inc_loc, 2, 0, 0, loc8)
DEF( add_loc, 2, 1, 0, loc8)
DEF( not, 1, 1, 1, none)
DEF( lnot, 1, 1, 1, none)
DEF( typeof, 1, 1, 1, none)
DEF( delete, 1, 2, 1, none)
DEF( delete_var, 5, 0, 1, atom)
DEF( mul, 1, 2, 1, none)
DEF( mul_float, 1, 2, 1, none)
DEF( div, 1, 2, 1, none)
DEF( div_float, 1, 2, 1, none)
DEF( mod, 1, 2, 1, none)
DEF( add, 1, 2, 1, none)
DEF( add_float, 1, 2, 1, none)
DEF( sub, 1, 2, 1, none)
DEF( sub_float, 1, 2, 1, none)
DEF( pow, 1, 2, 1, none)
DEF( shl, 1, 2, 1, none)
DEF( sar, 1, 2, 1, none)
@@ -206,15 +219,15 @@ DEF( lt, 1, 2, 1, none)
DEF( lte, 1, 2, 1, none)
DEF( gt, 1, 2, 1, none)
DEF( gte, 1, 2, 1, none)
DEF( instanceof, 1, 2, 1, none)
DEF( in, 1, 2, 1, none)
DEF( eq, 1, 2, 1, none)
DEF( neq, 1, 2, 1, none)
DEF( strict_eq, 1, 2, 1, none)
DEF( strict_neq, 1, 2, 1, none)
DEF( and, 1, 2, 1, none)
DEF( xor, 1, 2, 1, none)
DEF( or, 1, 2, 1, none)
/* template literal concatenation - pops N parts, pushes concatenated string */
DEF(template_concat, 3, 0, 1, npop_u16)
/* must be the last non short and non temporary opcode */
DEF( nop, 1, 0, 0, none)
@@ -298,6 +311,8 @@ DEF( set_var_ref1, 1, 1, 1, none_var_ref)
DEF( set_var_ref2, 1, 1, 1, none_var_ref)
DEF( set_var_ref3, 1, 1, 1, none_var_ref)
DEF( get_length, 1, 1, 1, none)
DEF( if_false8, 2, 1, 0, label8)
DEF( if_true8, 2, 1, 0, label8) /* must come after if_false8 */
DEF( goto8, 2, 0, 0, label8) /* must come after if_true8 */
@@ -309,6 +324,7 @@ DEF( call2, 1, 1, 1, npopx)
DEF( call3, 1, 1, 1, npopx)
DEF( is_null, 1, 1, 1, none)
DEF( typeof_is_function, 1, 1, 1, none)
#endif
#undef DEF

File diff suppressed because it is too large Load Diff

View File

@@ -74,12 +74,11 @@ typedef uint32_t JSAtom;
enum {
/* all tags with a reference count are negative */
JS_TAG_FIRST = -10, /* first negative tag */
JS_TAG_SYMBOL = -9,
JS_TAG_STRING = -8,
JS_TAG_STRING_ROPE = -7,
JS_TAG_ARRAY = -6, /* intrinsic array type */
JS_TAG_FUNCTION = -5, /* intrinsic function type */
JS_TAG_FIRST = -9, /* first negative tag */
JS_TAG_BIG_INT = -9,
JS_TAG_SYMBOL = -8,
JS_TAG_STRING = -7,
JS_TAG_STRING_ROPE = -6,
JS_TAG_MODULE = -3, /* used internally */
JS_TAG_FUNCTION_BYTECODE = -2, /* used internally */
JS_TAG_OBJECT = -1,
@@ -87,6 +86,7 @@ enum {
JS_TAG_INT = 0,
JS_TAG_BOOL = 1,
JS_TAG_NULL = 2,
// TAG_UNDEFINED
JS_TAG_UNINITIALIZED = 4,
JS_TAG_CATCH_OFFSET = 5,
JS_TAG_EXCEPTION = 6,
@@ -272,12 +272,36 @@ static inline JS_BOOL JS_VALUE_IS_NAN(JSValue v)
#define JS_EXCEPTION JS_MKVAL(JS_TAG_EXCEPTION, 0)
#define JS_UNINITIALIZED JS_MKVAL(JS_TAG_UNINITIALIZED, 0)
/* flags for object properties - simplified model:
- No per-property writable/configurable (use stone() for immutability)
- Text keys are enumerable, object keys are not */
#define JS_PROP_TMASK (3 << 4) /* mask for NORMAL, VARREF */
/* flags for object properties */
#define JS_PROP_CONFIGURABLE (1 << 0)
#define JS_PROP_WRITABLE (1 << 1)
#define JS_PROP_ENUMERABLE (1 << 2)
#define JS_PROP_C_W_E (JS_PROP_CONFIGURABLE | JS_PROP_WRITABLE | JS_PROP_ENUMERABLE)
#define JS_PROP_LENGTH (1 << 3) /* used internally in Arrays */
#define JS_PROP_TMASK (3 << 4) /* mask for NORMAL, GETSET, VARREF, AUTOINIT */
#define JS_PROP_NORMAL (0 << 4)
#define JS_PROP_VARREF (2 << 4) /* used internally for closures */
#define JS_PROP_GETSET (1 << 4)
#define JS_PROP_VARREF (2 << 4) /* used internally */
#define JS_PROP_AUTOINIT (3 << 4) /* used internally */
/* flags for JS_DefineProperty */
#define JS_PROP_HAS_SHIFT 8
#define JS_PROP_HAS_CONFIGURABLE (1 << 8)
#define JS_PROP_HAS_WRITABLE (1 << 9)
#define JS_PROP_HAS_ENUMERABLE (1 << 10)
#define JS_PROP_HAS_GET (1 << 11)
#define JS_PROP_HAS_SET (1 << 12)
#define JS_PROP_HAS_VALUE (1 << 13)
/* throw an exception if false would be returned
(JS_DefineProperty/JS_SetProperty) */
#define JS_PROP_THROW (1 << 14)
/* throw an exception if false would be returned in strict mode
(JS_SetProperty) */
#define JS_PROP_THROW_STRICT (1 << 15)
#define JS_PROP_NO_ADD (1 << 16) /* internal use */
#define JS_PROP_NO_EXOTIC (1 << 17) /* internal use */
#ifndef JS_DEFAULT_STACK_SIZE
#define JS_DEFAULT_STACK_SIZE (1024 * 1024)
@@ -316,9 +340,6 @@ typedef struct JSMallocFunctions {
typedef struct JSGCObjectHeader JSGCObjectHeader;
JSValue JS_Stone(JSContext *ctx, JSValueConst this_val);
int JS_IsStone(JSContext *ctx, JSValueConst val);
JSRuntime *JS_NewRuntime(void);
/* info lifetime must exceed that of rt */
void JS_SetRuntimeInfo(JSRuntime *rt, const char *info);
@@ -353,9 +374,12 @@ JSValue JS_GetClassProto(JSContext *ctx, JSClassID class_id);
JSContext *JS_NewContextRaw(JSRuntime *rt);
void JS_AddIntrinsicBaseObjects(JSContext *ctx);
void JS_AddIntrinsicEval(JSContext *ctx);
void JS_AddIntrinsicStringNormalize(JSContext *ctx);
void JS_AddIntrinsicRegExpCompiler(JSContext *ctx);
void JS_AddIntrinsicRegExp(JSContext *ctx);
void JS_AddIntrinsicJSON(JSContext *ctx);
void JS_AddIntrinsicProxy(JSContext *ctx);
void JS_AddIntrinsicMapSet(JSContext *ctx);
JSValue js_string_codePointRange(JSContext *ctx, JSValueConst this_val,
int argc, JSValueConst *argv);
@@ -419,9 +443,33 @@ typedef struct JSPropertyEnum {
JSAtom atom;
} JSPropertyEnum;
typedef struct JSPropertyDescriptor {
int flags;
JSValue value;
JSValue getter;
JSValue setter;
} JSPropertyDescriptor;
typedef struct JSClassExoticMethods {
/* Return -1 if exception (can only happen in case of Proxy object),
FALSE if the property does not exists, TRUE if it exists. If 1 is
returned, the property descriptor 'desc' is filled if != NULL. */
int (*get_own_property)(JSContext *ctx, JSPropertyDescriptor *desc,
JSValueConst obj, JSAtom prop);
/* return < 0 if exception, or TRUE/FALSE */
int (*delete_property)(JSContext *ctx, JSValueConst obj, JSAtom prop);
/* return < 0 if exception or TRUE/FALSE */
int (*define_own_property)(JSContext *ctx, JSValueConst this_obj,
JSAtom prop, JSValueConst val,
JSValueConst getter, JSValueConst setter,
int flags);
} JSClassExoticMethods;
typedef void JSClassFinalizer(JSRuntime *rt, JSValue val);
typedef void JSClassGCMark(JSRuntime *rt, JSValueConst val,
JS_MarkFunc *mark_func);
#define JS_CALL_FLAG_CONSTRUCTOR (1 << 0)
typedef JSValue JSClassCall(JSContext *ctx, JSValueConst func_obj,
JSValueConst this_val, int argc, JSValueConst *argv,
int flags);
@@ -430,8 +478,15 @@ typedef struct JSClassDef {
const char *class_name;
JSClassFinalizer *finalizer;
JSClassGCMark *gc_mark;
/* if call != NULL, the object is a function */
/* if call != NULL, the object is a function. If (flags &
JS_CALL_FLAG_CONSTRUCTOR) != 0, the function is called as a
constructor. In this case, 'this_val' is new.target. A
constructor call only happens if the object constructor bit is
set (see JS_SetConstructorBit()). */
JSClassCall *call;
/* XXX: suppress this indirection ? It is here only to save memory
because only a few classes need these methods */
JSClassExoticMethods *exotic;
} JSClassDef;
#define JS_INVALID_CLASS_ID 0
@@ -533,31 +588,15 @@ static inline JS_BOOL JS_IsString(JSValueConst v)
JS_VALUE_GET_TAG(v) == JS_TAG_STRING_ROPE;
}
static inline JS_BOOL JS_IsText(JSValueConst v) { return JS_IsString(v); }
static inline JS_BOOL JS_IsSymbol(JSValueConst v)
{
return JS_VALUE_GET_TAG(v) == JS_TAG_SYMBOL;
}
static inline JS_BOOL JS_IsFunction(JSValueConst v)
{
return JS_VALUE_GET_TAG(v) == JS_TAG_FUNCTION;
}
static inline JS_BOOL JS_IsInteger(JSValueConst v)
{
return JS_VALUE_GET_TAG(v) == JS_TAG_INT;
}
static inline JS_BOOL JS_IsObject(JSValueConst v)
{
return JS_VALUE_GET_TAG(v) == JS_TAG_OBJECT;
}
int JS_IsArray(JSContext *ctx, JSValueConst val);
// Fundamental
int JS_GetLength(JSContext *ctx, JSValueConst obj, int64_t *pres);
JSValue JS_Throw(JSContext *ctx, JSValue obj);
void JS_SetUncatchableException(JSContext *ctx, JS_BOOL flag);
@@ -651,10 +690,14 @@ JSValue JS_NewObjectClass(JSContext *ctx, int class_id);
JSValue JS_NewObjectProto(JSContext *ctx, JSValueConst proto);
JSValue JS_NewObject(JSContext *ctx);
JS_BOOL JS_IsFunction(JSContext* ctx, JSValueConst val);
JS_BOOL JS_IsConstructor(JSContext* ctx, JSValueConst val);
JS_BOOL JS_SetConstructorBit(JSContext *ctx, JSValueConst func_obj, JS_BOOL val);
JSValue JS_NewArray(JSContext *ctx);
JSValue JS_NewArrayLen(JSContext *ctx, uint32_t len);
int JS_ArrayPush(JSContext *ctx, JSValueConst obj, JSValueConst val);
JSValue JS_ArrayPop(JSContext *ctx, JSValueConst obj);
int JS_IsArray(JSContext *ctx, JSValueConst val);
int JS_GetLength(JSContext *ctx, JSValueConst obj, int64_t *pres);
int JS_SetLength(JSContext *ctx, JSValueConst obj, int64_t len);
JSValue JS_GetPropertyInternal(JSContext *ctx, JSValueConst obj,
JSAtom prop, JSValueConst receiver,
@@ -664,40 +707,56 @@ static js_force_inline JSValue JS_GetProperty(JSContext *ctx, JSValueConst this_
{
return JS_GetPropertyInternal(ctx, this_obj, prop, this_obj, 0);
}
JSValue JS_GetPropertyStr(JSContext *ctx, JSValueConst this_obj,
const char *prop);
JSValue JS_GetPropertyUint32(JSContext *ctx, JSValueConst this_obj,
uint32_t idx);
// For records
JSValue JS_GetPropertyStr(JSContext *ctx, JSValueConst this_obj, const char *prop);
int JS_SetPropertyStr(JSContext *ctx, JSValueConst this_obj, const char *prop, JSValue val);
int JS_SetProperty(JSContext *ctx, JSValueConst this_obj, JSAtom prop, JSValue val);
JSValue JS_GetPropertyKey(JSContext *ctx, JSValueConst this_obj, JSValueConst key);
int JS_SetPropertyKey(JSContext *ctx, JSValueConst this_obj, JSValueConst key, JSValue val);
// Must be an array
JSValue JS_GetPropertyNumber(JSContext *ctx, JSValueConst this_obj, int idx);
JSValue JS_GetPropertyUint32(JSContext *ctx, JSValueConst this_obj, uint32_t idx);
int JS_SetPropertyUint32(JSContext *ctx, JSValueConst this_obj, uint32_t idx, JSValue val);
int JS_SetPropertyInt64(JSContext *ctx, JSValueConst this_obj, int64_t idx, JSValue val);
int JS_SetPropertyInternal(JSContext *ctx, JSValueConst obj,
JSAtom prop, JSValue val, JSValueConst this_obj,
int flags);
static inline int JS_SetProperty(JSContext *ctx, JSValueConst this_obj,
JSAtom prop, JSValue val)
{
return JS_SetPropertyInternal(ctx, this_obj, prop, val, this_obj, JS_PROP_THROW);
}
int JS_SetPropertyUint32(JSContext *ctx, JSValueConst this_obj,
uint32_t idx, JSValue val);
int JS_SetPropertyInt64(JSContext *ctx, JSValueConst this_obj,
int64_t idx, JSValue val);
int JS_SetPropertyStr(JSContext *ctx, JSValueConst this_obj,
const char *prop, JSValue val);
int JS_HasProperty(JSContext *ctx, JSValueConst this_obj, JSAtom prop);
int JS_DeleteProperty(JSContext *ctx, JSValueConst obj, JSAtom prop);
int JS_IsExtensible(JSContext *ctx, JSValueConst obj);
int JS_PreventExtensions(JSContext *ctx, JSValueConst obj);
int JS_DeleteProperty(JSContext *ctx, JSValueConst obj, JSAtom prop, int flags);
int JS_SetPrototype(JSContext *ctx, JSValueConst obj, JSValueConst proto_val);
JSValue JS_GetPrototype(JSContext *ctx, JSValueConst val);
/* Get Own Property Names flags */
#define JS_GPN_STRING_MASK (1 << 0)
#define JS_GPN_SYMBOL_MASK (1 << 1)
#define JS_GPN_PRIVATE_MASK (1 << 2)
/* only include the enumerable properties */
#define JS_GPN_ENUM_ONLY (1 << 4)
/* set theJSPropertyEnum.is_enumerable field */
#define JS_GPN_SET_ENUM (1 << 5)
int JS_GetOwnPropertyNames(JSContext *ctx, JSPropertyEnum **ptab,
uint32_t *plen, JSValueConst obj, int flags);
void JS_FreePropertyEnum(JSContext *ctx, JSPropertyEnum *tab,
uint32_t len);
int JS_GetOwnProperty(JSContext *ctx, JSPropertyDescriptor *desc,
JSValueConst obj, JSAtom prop);
JSValue JS_Call(JSContext *ctx, JSValueConst func_obj, JSValueConst this_obj,
int argc, JSValueConst *argv);
JSValue JS_Invoke(JSContext *ctx, JSValueConst this_val, JSAtom atom,
int argc, JSValueConst *argv);
JSValue JS_CallConstructor(JSContext *ctx, JSValueConst func_obj,
int argc, JSValueConst *argv);
JSValue JS_CallConstructor2(JSContext *ctx, JSValueConst func_obj,
JSValueConst new_target,
int argc, JSValueConst *argv);
/* 'input' must be zero terminated i.e. input[input_len] = '\0'. */
JSValue JS_Eval(JSContext *ctx, const char *input, size_t input_len,
const char *filename, int eval_flags);
@@ -706,6 +765,19 @@ JSValue JS_EvalThis(JSContext *ctx, JSValueConst this_obj,
const char *input, size_t input_len,
const char *filename, int eval_flags);
JSValue JS_GetGlobalObject(JSContext *ctx);
int JS_IsInstanceOf(JSContext *ctx, JSValueConst val, JSValueConst obj);
int JS_DefineProperty(JSContext *ctx, JSValueConst this_obj,
JSAtom prop, JSValueConst val,
JSValueConst getter, JSValueConst setter, int flags);
int JS_DefinePropertyValue(JSContext *ctx, JSValueConst this_obj,
JSAtom prop, JSValue val, int flags);
int JS_DefinePropertyValueUint32(JSContext *ctx, JSValueConst this_obj,
uint32_t idx, JSValue val, int flags);
int JS_DefinePropertyValueStr(JSContext *ctx, JSValueConst this_obj,
const char *prop, JSValue val, int flags);
int JS_DefinePropertyGetSet(JSContext *ctx, JSValueConst this_obj,
JSAtom prop, JSValue getter, JSValue setter,
int flags);
void JS_SetOpaque(JSValue obj, void *opaque);
void *JS_GetOpaque(JSValueConst obj, JSClassID class_id);
void *JS_GetOpaque2(JSContext *ctx, JSValueConst obj, JSClassID class_id);
@@ -752,37 +824,36 @@ JSValue JS_ReadObject(JSContext *ctx, const uint8_t *buf, size_t buf_len,
JSValue JS_EvalFunction(JSContext *ctx, JSValue fun_obj);
/* C function definition */
typedef enum JSCFunctionEnum {
typedef enum JSCFunctionEnum { /* XXX: should rename for namespace isolation */
JS_CFUNC_generic,
JS_CFUNC_generic_magic,
JS_CFUNC_constructor,
JS_CFUNC_constructor_magic,
JS_CFUNC_constructor_or_func,
JS_CFUNC_constructor_or_func_magic,
JS_CFUNC_f_f,
JS_CFUNC_f_f_f,
/* Fixed-arity fast paths - no argc/argv overhead */
JS_CFUNC_0, /* JSValue f(ctx, this_val) */
JS_CFUNC_1, /* JSValue f(ctx, this_val, arg0) */
JS_CFUNC_2, /* JSValue f(ctx, this_val, arg0, arg1) */
JS_CFUNC_3, /* JSValue f(ctx, this_val, arg0, arg1, arg2) */
JS_CFUNC_4
JS_CFUNC_getter,
JS_CFUNC_setter,
JS_CFUNC_getter_magic,
JS_CFUNC_setter_magic,
JS_CFUNC_iterator_next,
} JSCFunctionEnum;
/* Fixed-arity C function types for fast paths */
typedef JSValue JSCFunction0(JSContext *ctx, JSValueConst this_val);
typedef JSValue JSCFunction1(JSContext *ctx, JSValueConst this_val, JSValueConst arg0);
typedef JSValue JSCFunction2(JSContext *ctx, JSValueConst this_val, JSValueConst arg0, JSValueConst arg1);
typedef JSValue JSCFunction3(JSContext *ctx, JSValueConst this_val, JSValueConst arg0, JSValueConst arg1, JSValueConst arg2);
typedef JSValue JSCFunction4(JSContext *ctx, JSValueConst this_val, JSValueConst arg0, JSValueConst arg1, JSValueConst arg2, JSValueConst arg3);
typedef union JSCFunctionType {
JSCFunction *generic;
JSValue (*generic_magic)(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv, int magic);
JSCFunction *constructor;
JSValue (*constructor_magic)(JSContext *ctx, JSValueConst new_target, int argc, JSValueConst *argv, int magic);
JSCFunction *constructor_or_func;
double (*f_f)(double);
double (*f_f_f)(double, double);
/* Fixed-arity fast paths */
JSCFunction0 *f0;
JSCFunction1 *f1;
JSCFunction2 *f2;
JSCFunction3 *f3;
JSCFunction4 *f4;
JSValue (*getter)(JSContext *ctx, JSValueConst this_val);
JSValue (*setter)(JSContext *ctx, JSValueConst this_val, JSValueConst val);
JSValue (*getter_magic)(JSContext *ctx, JSValueConst this_val, int magic);
JSValue (*setter_magic)(JSContext *ctx, JSValueConst this_val, JSValueConst val, int magic);
JSValue (*iterator_next)(JSContext *ctx, JSValueConst this_val,
int argc, JSValueConst *argv, int *pdone, int magic);
} JSCFunctionType;
JSValue JS_NewCFunction2(JSContext *ctx, JSCFunction *func,
@@ -804,32 +875,8 @@ static inline JSValue JS_NewCFunctionMagic(JSContext *ctx, JSCFunctionMagic *fun
{
return JS_NewCFunction2(ctx, (JSCFunction *)func, name, length, cproto, magic);
}
/* Fixed-arity fast path constructors */
static inline JSValue JS_NewCFuncFixed0(JSContext *ctx, JSCFunction0 *func, const char *name)
{
return JS_NewCFunction2(ctx, (JSCFunction *)func, name, 0, JS_CFUNC_0, 0);
}
static inline JSValue JS_NewCFuncFixed1(JSContext *ctx, JSCFunction1 *func, const char *name)
{
return JS_NewCFunction2(ctx, (JSCFunction *)func, name, 1, JS_CFUNC_1, 0);
}
static inline JSValue JS_NewCFuncFixed2(JSContext *ctx, JSCFunction2 *func, const char *name)
{
return JS_NewCFunction2(ctx, (JSCFunction *)func, name, 2, JS_CFUNC_2, 0);
}
static inline JSValue JS_NewCFuncFixed3(JSContext *ctx, JSCFunction3 *func, const char *name)
{
return JS_NewCFunction2(ctx, (JSCFunction *)func, name, 3, JS_CFUNC_3, 0);
}
static inline JSValue JS_NewCFuncFixed4(JSContext *ctx, JSCFunction4 *func, const char *name)
{
return JS_NewCFunction2(ctx, (JSCFunction *)func, name, 4, JS_CFUNC_4, 0);
}
void JS_SetConstructor(JSContext *ctx, JSValueConst func_obj,
JSValueConst proto);
/* C property definition */
@@ -844,6 +891,10 @@ typedef struct JSCFunctionListEntry {
uint8_t cproto; /* XXX: should move outside union */
JSCFunctionType cfunc;
} func;
struct {
JSCFunctionType get;
JSCFunctionType set;
} getset;
struct {
const char *name;
int base;
@@ -860,6 +911,8 @@ typedef struct JSCFunctionListEntry {
} JSCFunctionListEntry;
#define JS_DEF_CFUNC 0
#define JS_DEF_CGETSET 1
#define JS_DEF_CGETSET_MAGIC 2
#define JS_DEF_PROP_STRING 3
#define JS_DEF_PROP_INT32 4
#define JS_DEF_PROP_INT64 5
@@ -869,15 +922,12 @@ typedef struct JSCFunctionListEntry {
#define JS_DEF_ALIAS 9
/* Note: c++ does not like nested designators */
#define JS_CFUNC_DEF(name, length, func1) { name, 0, JS_DEF_CFUNC, 0, .u = { .func = { length, JS_CFUNC_generic, { .generic = func1 } } } }
#define JS_CFUNC_MAGIC_DEF(name, length, func1, magic) { name, 0, JS_DEF_CFUNC, magic, .u = { .func = { length, JS_CFUNC_generic_magic, { .generic_magic = func1 } } } }
#define JS_CFUNC_SPECIAL_DEF(name, length, cproto, func1) { name, 0, JS_DEF_CFUNC, 0, .u = { .func = { length, JS_CFUNC_ ## cproto, { .cproto = func1 } } } }
/* Fixed-arity fast path macros */
#define JS_CFUNC0_DEF(name, func1) { name, 0, JS_DEF_CFUNC, 0, .u = { .func = { 0, JS_CFUNC_0, { .f0 = func1 } } } }
#define JS_CFUNC1_DEF(name, func1) { name, 0, JS_DEF_CFUNC, 0, .u = { .func = { 1, JS_CFUNC_1, { .f1 = func1 } } } }
#define JS_CFUNC2_DEF(name, func1) { name, 0, JS_DEF_CFUNC, 0, .u = { .func = { 2, JS_CFUNC_2, { .f2 = func1 } } } }
#define JS_CFUNC3_DEF(name, func1) { name, 0, JS_DEF_CFUNC, 0, .u = { .func = { 3, JS_CFUNC_3, { .f3 = func1 } } } }
#define JS_ITERATOR_NEXT_DEF(name, length, func1, magic) { name, 0, JS_DEF_CFUNC, magic, .u = { .func = { length, JS_CFUNC_iterator_next, { .iterator_next = func1 } } } }
#define JS_CFUNC_DEF(name, length, func1) { name, JS_PROP_WRITABLE | JS_PROP_CONFIGURABLE, JS_DEF_CFUNC, 0, .u = { .func = { length, JS_CFUNC_generic, { .generic = func1 } } } }
#define JS_CFUNC_MAGIC_DEF(name, length, func1, magic) { name, JS_PROP_WRITABLE | JS_PROP_CONFIGURABLE, JS_DEF_CFUNC, magic, .u = { .func = { length, JS_CFUNC_generic_magic, { .generic_magic = func1 } } } }
#define JS_CFUNC_SPECIAL_DEF(name, length, cproto, func1) { name, JS_PROP_WRITABLE | JS_PROP_CONFIGURABLE, JS_DEF_CFUNC, 0, .u = { .func = { length, JS_CFUNC_ ## cproto, { .cproto = func1 } } } }
#define JS_ITERATOR_NEXT_DEF(name, length, func1, magic) { name, JS_PROP_WRITABLE | JS_PROP_CONFIGURABLE, JS_DEF_CFUNC, magic, .u = { .func = { length, JS_CFUNC_iterator_next, { .iterator_next = func1 } } } }
#define JS_CGETSET_DEF(name, fgetter, fsetter) { name, JS_PROP_CONFIGURABLE, JS_DEF_CGETSET, 0, .u = { .getset = { .get = { .getter = fgetter }, .set = { .setter = fsetter } } } }
#define JS_CGETSET_MAGIC_DEF(name, fgetter, fsetter, magic) { name, JS_PROP_CONFIGURABLE, JS_DEF_CGETSET_MAGIC, magic, .u = { .getset = { .get = { .getter_magic = fgetter }, .set = { .setter_magic = fsetter } } } }
#define JS_PROP_STRING_DEF(name, cstr, prop_flags) { name, prop_flags, JS_DEF_PROP_STRING, 0, .u = { .str = cstr } }
#define JS_PROP_INT32_DEF(name, val, prop_flags) { name, prop_flags, JS_DEF_PROP_INT32, 0, .u = { .i32 = val } }
#define JS_PROP_INT64_DEF(name, val, prop_flags) { name, prop_flags, JS_DEF_PROP_INT64, 0, .u = { .i64 = val } }
@@ -912,19 +962,21 @@ void JS_PrintValue(JSContext *ctx, JSPrintValueWrite *write_func, void *write_op
JSValueConst val, const JSPrintValueOptions *options);
typedef struct js_debug {
char name[64];
char filename[96];
int unique;
int line;
const char *name; // nameof function
const char *what;
const char *source; // source code of function
size_t srclen;
const char *filename; // name of file function is in
int nparams;
int vararg;
int line; // line the function is on
int param_n;
int closure_n;
int vararg;
const char *what;
const uint8_t *source;
int srclen;
uint32_t unique; // a unique identifier for this function
} js_debug;
void js_debug_info(JSContext *js, JSValue fn, js_debug *dbg);
void free_js_debug_info(JSContext *js, js_debug *dbg);
typedef void (*js_hook)(JSContext*, int type, js_debug *dbg, void *user);
#define JS_HOOK_CALL 1

View File

@@ -270,7 +270,7 @@ void actor_free(cell_rt *actor)
JS_FreeValue(js, actor->message_handle);
JS_FreeValue(js, actor->on_exception);
JS_FreeValue(js, actor->unneeded);
JS_FreeValue(js, actor->actor_sym);
JS_FreeAtom(js, actor->actor_sym);
for (int i = 0; i < hmlen(actor->timers); i++) {
JS_FreeValue(js, actor->timers[i].value);
@@ -436,7 +436,7 @@ void actor_unneeded(cell_rt *actor, JSValue fn, double seconds)
if (actor->disrupt) return;
JS_FreeValue(actor->context, actor->unneeded);
if (!JS_IsFunction(fn)) {
if (!JS_IsFunction(actor->context, fn)) {
actor->unneeded = JS_NULL;
goto END;
}
@@ -497,7 +497,7 @@ cell_rt *create_actor(void *wota)
actor->message_handle = JS_NULL;
actor->unneeded = JS_NULL;
actor->on_exception = JS_NULL;
actor->actor_sym = JS_NULL;
actor->actor_sym = JS_ATOM_NULL;
arrsetcap(actor->letters, 5);

229
test.ce
View File

@@ -4,16 +4,14 @@ var fd = use('fd')
var time = use('time')
var json = use('json')
var blob = use('blob')
var dbg = use('js')
// run gc with dbg.gc()
log.console("here")
if (!args) args = []
var target_pkg = null // null = current package
var target_test = null // null = all tests, otherwise specific test file
var all_pkgs = false
var gc_after_each_test = false
// Actor test support
def ACTOR_TEST_TIMEOUT = 30000 // 30 seconds timeout for actor tests
@@ -47,17 +45,7 @@ function get_current_package_name() {
// cell test package all - run all tests from all packages
function parse_args() {
var cleaned_args = []
for (var i = 0; i < length(args); i++) {
if (args[i] == '-g') {
gc_after_each_test = true
} else {
push(cleaned_args, args[i])
}
}
args = cleaned_args
if (length(args) == 0) {
if (args.length == 0) {
// cell test - run all tests for current package
if (!is_valid_package('.')) {
log.console('No cell.toml found in current directory')
@@ -78,7 +66,7 @@ function parse_args() {
}
if (args[0] == 'package') {
if (length(args) < 2) {
if (args.length < 2) {
log.console('Usage: cell test package <name> [test]')
log.console(' cell test package all')
return false
@@ -98,7 +86,7 @@ function parse_args() {
var lock = shop.load_lock()
if (lock[name]) {
target_pkg = name
} else if (starts_with(name, '/') && is_valid_package(name)) {
} else if (name.startsWith('/') && is_valid_package(name)) {
target_pkg = name
} else {
// Try to resolve as dependency alias from current package
@@ -116,7 +104,7 @@ function parse_args() {
}
}
if (length(args) >= 3) {
if (args.length >= 3) {
// cell test package <name> <test>
target_test = args[2]
}
@@ -129,7 +117,7 @@ function parse_args() {
var test_path = args[0]
// Normalize path - add tests/ prefix if not present and doesn't start with /
if (!starts_with(test_path, 'tests/') && !starts_with(test_path, '/')) {
if (!test_path.startsWith('tests/') && !test_path.startsWith('/')) {
// Check if file exists as-is first
if (!fd.is_file(test_path + '.cm') && !fd.is_file(test_path)) {
// Try with tests/ prefix
@@ -158,14 +146,15 @@ if (!parse_args()) {
function ensure_dir(path) {
if (fd.is_dir(path)) return true
var parts = array(path, '/')
var current = starts_with(path, '/') ? '/' : ''
for (var i = 0; i < length(parts); i++) {
var parts = path.split('/')
var current = path.startsWith('/') ? '/' : ''
for (var i = 0; i < parts.length; i++) {
if (parts[i] == '') continue
current += parts[i] + '/'
if (!fd.is_dir(current))
if (!fd.is_dir(current)) {
fd.mkdir(current)
}
}
return true
}
@@ -174,7 +163,7 @@ function get_pkg_dir(package_name) {
if (!package_name) {
return fd.realpath('.')
}
if (starts_with(package_name, '/')) {
if (package_name.startsWith('/')) {
return package_name
}
return shop.get_package_dir(package_name)
@@ -189,23 +178,23 @@ function collect_actor_tests(package_name, specific_test) {
var files = pkg.list_files(package_name)
var actor_tests = []
for (var i = 0; i < length(files); i++) {
for (var i = 0; i < files.length; i++) {
var f = files[i]
// Check if file is in tests/ folder and is a .ce actor
if (starts_with(f, "tests/") && ends_with(f, ".ce")) {
if (f.startsWith("tests/") && f.endsWith(".ce")) {
// If specific test requested, filter
if (specific_test) {
var test_name = text(f, 0, -3) // remove .ce
var test_name = f.substring(0, f.length - 3) // remove .ce
var match_name = specific_test
if (!starts_with(match_name, 'tests/')) match_name = 'tests/' + match_name
if (!ends_with(match_name, '.ce')) match_name = match_name
if (!match_name.startsWith('tests/')) match_name = 'tests/' + match_name
if (!match_name.endsWith('.ce')) match_name = match_name
// Match without extension
var test_base = test_name
var match_base = ends_with(match_name, '.ce') ? text(match_name, 0, -3) : match_name
var match_base = match_name.endsWith('.ce') ? match_name.substring(0, match_name.length - 3) : match_name
if (test_base != match_base) continue
}
push(actor_tests,{
actor_tests.push({
package: package_name || "local",
file: f,
path: prefix + '/' + f
@@ -217,7 +206,7 @@ function collect_actor_tests(package_name, specific_test) {
// Spawn an actor test and track it
function spawn_actor_test(test_info) {
var test_name = text(test_info.file, 6, -3) // remove "tests/" and ".ce"
var test_name = test_info.file.substring(6, test_info.file.length - 3) // remove "tests/" and ".ce"
log.console(` [ACTOR] ${test_info.file}`)
var entry = {
@@ -231,14 +220,14 @@ function spawn_actor_test(test_info) {
try {
// Spawn the actor test - it should send back results
var actor_path = text(test_info.path, 0, -3) // remove .ce
var actor_path = test_info.path.substring(0, test_info.path.length - 3) // remove .ce
entry.actor = $start(actor_path)
push(pending_actor_tests, entry)
pending_actor_tests.push(entry)
} catch (e) {
entry.status = "failed"
entry.error = { message: `Failed to spawn actor: ${e}` }
entry.duration_ns = 0
push(actor_test_results, entry)
actor_test_results.push(entry)
log.console(` FAIL ${test_name}: `)
log.error(e)
}
@@ -260,31 +249,31 @@ function run_tests(package_name, specific_test) {
var files = pkg.list_files(package_name)
var test_files = []
for (var i = 0; i < length(files); i++) {
for (var i = 0; i < files.length; i++) {
var f = files[i]
// Check if file is in tests/ folder and is a .cm module (not .ce - those are actor tests)
if (starts_with(f, "tests/") && ends_with(f, ".cm")) {
if (f.startsWith("tests/") && f.endsWith(".cm")) {
// If specific test requested, filter
if (specific_test) {
var test_name = text(f, 0, -3) // remove .cm
var test_name = f.substring(0, f.length - 3) // remove .cm
var match_name = specific_test
if (!starts_with(match_name, 'tests/')) match_name = 'tests/' + match_name
if (!match_name.startsWith('tests/')) match_name = 'tests/' + match_name
// Match without extension
var match_base = ends_with(match_name, '.cm') ? text(match_name, 0, -3) : match_name
var match_base = match_name.endsWith('.cm') ? match_name.substring(0, match_name.length - 3) : match_name
if (test_name != match_base) continue
}
push(test_files, f)
test_files.push(f)
}
}
if (length(test_files) > 0) {
if (test_files.length > 0) {
if (package_name) log.console(`Running tests for ${package_name}`)
else log.console(`Running tests for local package`)
}
for (var i = 0; i < length(test_files); i++) {
for (var i = 0; i < test_files.length; i++) {
var f = test_files[i]
var mod_path = text(f, 0, -3) // remove .cm
var mod_path = f.substring(0, f.length - 3) // remove .cm
var file_result = {
name: f,
@@ -300,19 +289,19 @@ function run_tests(package_name, specific_test) {
test_mod = shop.use(mod_path, use_pkg)
var tests = []
if (is_function(test_mod)) {
push(tests, {name: 'main', fn: test_mod})
} else if (is_object(test_mod)) {
arrfor(array(test_mod), function(k) {
if (is_function(test_mod[k])) {
push(tests, {name: k, fn: test_mod[k]})
if (typeof test_mod == 'function') {
tests.push({name: 'main', fn: test_mod})
} else if (typeof test_mod == 'object') {
for (var k in test_mod) {
if (typeof test_mod[k] == 'function') {
tests.push({name: k, fn: test_mod[k]})
}
}
})
}
if (length(tests) > 0) {
if (tests.length > 0) {
log.console(` ${f}`)
for (var j = 0; j < length(tests); j++) {
for (var j = 0; j < tests.length; j++) {
var t = tests[j]
var test_entry = {
package: pkg_result.package,
@@ -325,9 +314,9 @@ function run_tests(package_name, specific_test) {
try {
var ret = t.fn()
if (is_text(ret)) {
throw Error(ret)
} else if (ret && (is_text(ret.message) || is_proto(ret, Error))) {
if (typeof ret == 'string') {
throw new Error(ret)
} else if (ret && (typeof ret.message == 'string' || ret instanceof Error)) {
throw ret
}
@@ -338,31 +327,28 @@ function run_tests(package_name, specific_test) {
} catch (e) {
test_entry.status = "failed"
test_entry.error = {
message: e,
message: e.toString(),
stack: e.stack || ""
}
if (e.name) test_entry.error.name = e.name
if (is_object(e) && e.message) {
if (typeof e == 'object' && e.message) {
test_entry.error.message = e.message
}
log.console(` FAIL ${t.name} ${test_entry.error.message}`)
if (test_entry.error.stack) {
log.console(` ${text(array(test_entry.error.stack, '\n'), '\n ')}`)
log.console(` ${test_entry.error.stack.split('\n').join('\n ')}`)
}
pkg_result.failed++
file_result.failed++
}
var end_time = time.number()
test_entry.duration_ns = round((end_time - start_time) * 1000000000)
test_entry.duration_ns = number.round((end_time - start_time) * 1000000000)
push(file_result.tests, test_entry)
file_result.tests.push(test_entry)
pkg_result.total++
if (gc_after_each_test) {
dbg.gc()
}
}
}
@@ -375,15 +361,12 @@ function run_tests(package_name, specific_test) {
duration_ns: 0,
error: { message: `Error loading module: ${e}` }
}
push(file_result.tests, test_entry)
file_result.tests.push(test_entry)
pkg_result.failed++
file_result.failed++
pkg_result.total++
if (gc_after_each_test) {
dbg.gc()
}
}
push(pkg_result.files, file_result)
pkg_result.files.push(file_result)
}
return pkg_result
}
@@ -394,25 +377,25 @@ var all_actor_tests = []
if (all_pkgs) {
// Run local first if we're in a valid package
if (is_valid_package('.')) {
push(all_results, run_tests(null, null))
all_actor_tests = array(all_actor_tests, collect_actor_tests(null, null))
all_results.push(run_tests(null, null))
all_actor_tests = all_actor_tests.concat(collect_actor_tests(null, null))
}
// Then all packages in lock
var packages = shop.list_packages()
for (var i = 0; i < length(packages); i++) {
push(all_results, run_tests(packages[i], null))
all_actor_tests = array(all_actor_tests, collect_actor_tests(packages[i], null))
for (var i = 0; i < packages.length; i++) {
all_results.push(run_tests(packages[i], null))
all_actor_tests = all_actor_tests.concat(collect_actor_tests(packages[i], null))
}
} else {
push(all_results, run_tests(target_pkg, target_test))
all_actor_tests = array(all_actor_tests, collect_actor_tests(target_pkg, target_test))
all_results.push(run_tests(target_pkg, target_test))
all_actor_tests = all_actor_tests.concat(collect_actor_tests(target_pkg, target_test))
}
// Spawn actor tests if any
if (length(all_actor_tests) > 0) {
log.console(`Running ${length(all_actor_tests)} actor test(s)...`)
for (var i = 0; i < length(all_actor_tests); i++) {
if (all_actor_tests.length > 0) {
log.console(`Running ${all_actor_tests.length} actor test(s)...`)
for (var i = 0; i < all_actor_tests.length; i++) {
spawn_actor_test(all_actor_tests[i])
}
}
@@ -421,7 +404,7 @@ if (length(all_actor_tests) > 0) {
function handle_actor_message(msg) {
var sender = msg.$sender
var found_idx = -1
for (var i = 0; i < length(pending_actor_tests); i++) {
for (var i = 0; i < pending_actor_tests.length; i++) {
if (pending_actor_tests[i].actor == sender) {
found_idx = i
break
@@ -431,26 +414,26 @@ function handle_actor_message(msg) {
if (found_idx == -1) return
var base_entry = pending_actor_tests[found_idx]
pending_actor_tests = array(array(pending_actor_tests, 0, found_idx), array(pending_actor_tests, found_idx + 1))
pending_actor_tests.splice(found_idx, 1)
var end_time = time.number()
var duration_ns = round((end_time - base_entry.start_time) * 1000000000)
var duration_ns = number.round((end_time - base_entry.start_time) * 1000000000)
var results = []
if (is_array(msg)) {
if (isa(msg, array)) {
results = msg
} else if (msg && is_array(msg.results)) {
} else if (msg && isa(msg.results, array)) {
results = msg.results
} else {
results = [msg]
}
for (var i = 0; i < length(results); i++) {
for (var i = 0; i < results.length; i++) {
var res = results[i] || {}
var entry = {
package: base_entry.package,
file: base_entry.file,
test: res.test || base_entry.test + (length(results) > 1 ? `#${i+1}` : ""),
test: res.test || base_entry.test + (results.length > 1 ? `#${i+1}` : ""),
status: "failed",
duration_ns: duration_ns
}
@@ -467,11 +450,7 @@ function handle_actor_message(msg) {
log.console(` FAIL ${entry.test}: ${entry.error.message}`)
}
push(actor_test_results, entry)
}
if (gc_after_each_test) {
dbg.gc()
actor_test_results.push(entry)
}
check_completion()
@@ -482,27 +461,27 @@ function check_timeouts() {
var now = time.number()
var timed_out = []
for (var i = length(pending_actor_tests) - 1; i >= 0; i--) {
for (var i = pending_actor_tests.length - 1; i >= 0; i--) {
var entry = pending_actor_tests[i]
var elapsed_ms = (now - entry.start_time) * 1000
if (elapsed_ms > ACTOR_TEST_TIMEOUT) {
push(timed_out, i)
timed_out.push(i)
}
}
for (var i = 0; i < length(timed_out); i++) {
for (var i = 0; i < timed_out.length; i++) {
var idx = timed_out[i]
var entry = pending_actor_tests[idx]
pending_actor_tests = array(array(pending_actor_tests, 0, idx), array(pending_actor_tests, idx + 1))
pending_actor_tests.splice(idx, 1)
entry.status = "failed"
entry.error = { message: "Test timed out" }
entry.duration_ns = ACTOR_TEST_TIMEOUT * 1000000
push(actor_test_results, entry)
actor_test_results.push(entry)
log.console(` TIMEOUT ${entry.test}`)
}
if (length(pending_actor_tests) > 0) {
if (pending_actor_tests.length > 0) {
$delay(check_timeouts, 1000)
}
check_completion()
@@ -512,7 +491,7 @@ function check_timeouts() {
var finalized = false
function check_completion() {
if (finalized) return
if (length(pending_actor_tests) > 0) return
if (pending_actor_tests.length > 0) return
finalized = true
finalize_results()
@@ -520,10 +499,10 @@ function check_completion() {
function finalize_results() {
// Add actor test results to all_results
for (var i = 0; i < length(actor_test_results); i++) {
for (var i = 0; i < actor_test_results.length; i++) {
var r = actor_test_results[i]
var pkg_result = null
for (var j = 0; j < length(all_results); j++) {
for (var j = 0; j < all_results.length; j++) {
if (all_results[j].package == r.package) {
pkg_result = all_results[j]
break
@@ -531,11 +510,11 @@ function finalize_results() {
}
if (!pkg_result) {
pkg_result = { package: r.package, files: [], total: 0, passed: 0, failed: 0 }
push(all_results, pkg_result)
all_results.push(pkg_result)
}
var file_result = null
for (var j = 0; j < length(pkg_result.files); j++) {
for (var j = 0; j < pkg_result.files.length; j++) {
if (pkg_result.files[j].name == r.file) {
file_result = pkg_result.files[j]
break
@@ -543,10 +522,10 @@ function finalize_results() {
}
if (!file_result) {
file_result = { name: r.file, tests: [], passed: 0, failed: 0 }
push(pkg_result.files, file_result)
pkg_result.files.push(file_result)
}
push(file_result.tests, r)
file_result.tests.push(r)
pkg_result.total++
if (r.status == "passed") {
pkg_result.passed++
@@ -559,7 +538,7 @@ function finalize_results() {
// Calculate totals
var totals = { total: 0, passed: 0, failed: 0 }
for (var i = 0; i < length(all_results); i++) {
for (var i = 0; i < all_results.length; i++) {
totals.total += all_results[i].total
totals.passed += all_results[i].passed
totals.failed += all_results[i].failed
@@ -574,9 +553,9 @@ function finalize_results() {
// If no actor tests, finalize immediately
var totals
if (length(all_actor_tests) == 0) {
if (all_actor_tests.length == 0) {
totals = { total: 0, passed: 0, failed: 0 }
for (var i = 0; i < length(all_results); i++) {
for (var i = 0; i < all_results.length; i++) {
totals.total += all_results[i].total
totals.passed += all_results[i].passed
totals.failed += all_results[i].failed
@@ -590,7 +569,7 @@ if (length(all_actor_tests) == 0) {
// Generate Reports function
function generate_reports(totals) {
var timestamp = text(floor(time.number()))
var timestamp = text(number.floor(time.number()))
var report_dir = shop.get_reports_dir() + '/test_' + timestamp
ensure_dir(report_dir)
@@ -600,24 +579,24 @@ Total: ${totals.total}, Passed: ${totals.passed}, Failed: ${totals.failed}
=== SUMMARY ===
`
for (var i = 0; i < length(all_results); i++) {
for (var i = 0; i < all_results.length; i++) {
var pkg_res = all_results[i]
if (pkg_res.total == 0) continue
txt_report += `Package: ${pkg_res.package}\n`
for (var j = 0; j < length(pkg_res.files); j++) {
for (var j = 0; j < pkg_res.files.length; j++) {
var f = pkg_res.files[j]
var status = f.failed == 0 ? "PASS" : "FAIL"
txt_report += ` [${status}] ${f.name} (${f.passed}/${length(f.tests)})\n`
txt_report += ` [${status}] ${f.name} (${f.passed}/${f.tests.length})\n`
}
}
txt_report += `\n=== FAILURES ===\n`
var has_failures = false
for (var i = 0; i < length(all_results); i++) {
for (var i = 0; i < all_results.length; i++) {
var pkg_res = all_results[i]
for (var j = 0; j < length(pkg_res.files); j++) {
for (var j = 0; j < pkg_res.files.length; j++) {
var f = pkg_res.files[j]
for (var k = 0; k < length(f.tests); k++) {
for (var k = 0; k < f.tests.length; k++) {
var t = f.tests[k]
if (t.status == "failed") {
has_failures = true
@@ -625,7 +604,7 @@ Total: ${totals.total}, Passed: ${totals.passed}, Failed: ${totals.failed}
if (t.error) {
txt_report += ` Message: ${t.error.message}\n`
if (t.error.stack) {
txt_report += ` Stack:\n${text(array(array(t.error.stack, '\n'), l => ` ${l}`), '\n')}\n`
txt_report += ` Stack:\n${t.error.stack.split('\n').map(function(l){return ` ${l}`}).join('\n')}\n`
}
}
txt_report += `\n`
@@ -636,13 +615,13 @@ Total: ${totals.total}, Passed: ${totals.passed}, Failed: ${totals.failed}
if (!has_failures) txt_report += `None\n`
txt_report += `\n=== DETAILED RESULTS ===\n`
for (var i = 0; i < length(all_results); i++) {
for (var i = 0; i < all_results.length; i++) {
var pkg_res = all_results[i]
if (pkg_res.total == 0) continue
for (var j = 0; j < length(pkg_res.files); j++) {
for (var j = 0; j < pkg_res.files.length; j++) {
var f = pkg_res.files[j]
for (var k = 0; k < length(f.tests); k++) {
for (var k = 0; k < f.tests.length; k++) {
var t = f.tests[k]
var dur = `${t.duration_ns || 0}ns`
var status = t.status == "passed" ? "PASS" : "FAIL"
@@ -651,29 +630,29 @@ Total: ${totals.total}, Passed: ${totals.passed}, Failed: ${totals.failed}
}
}
ensure_dir(report_dir)
fd.slurpwrite(`${report_dir}/test.txt`, stone(blob(txt_report)))
fd.slurpwrite(`${report_dir}/test.txt`, stone(new blob(txt_report)))
log.console(`Report written to ${report_dir}/test.txt`)
// Generate JSON per package
for (var i = 0; i < length(all_results); i++) {
for (var i = 0; i < all_results.length; i++) {
var pkg_res = all_results[i]
if (pkg_res.total == 0) continue
var pkg_tests = []
for (var j = 0; j < length(pkg_res.files); j++) {
for (var j = 0; j < pkg_res.files.length; j++) {
var f = pkg_res.files[j]
for (var k = 0; k < length(f.tests); k++) {
push(pkg_tests, f.tests[k])
for (var k = 0; k < f.tests.length; k++) {
pkg_tests.push(f.tests[k])
}
}
var json_path = `${report_dir}/${replace(pkg_res.package, /\//, '_')}.json`
fd.slurpwrite(json_path, stone(blob(json.encode(pkg_tests))))
var json_path = `${report_dir}/${pkg_res.package.replace(/\//g, '_')}.json`
fd.slurpwrite(json_path, stone(new blob(json.encode(pkg_tests))))
}
}
// If no actor tests, generate reports and stop immediately
if (length(all_actor_tests) == 0) {
if (all_actor_tests.length == 0) {
generate_reports(totals)
$stop()
} else {

View File

@@ -4,34 +4,34 @@ var os = use('os');
function assert(condition, message) {
if (!condition) {
throw Error(message || "Assertion failed");
throw new Error(message || "Assertion failed");
}
}
function assertEqual(actual, expected, message) {
if (actual != expected) {
throw Error(message || "Expected " + expected + ", got " + actual);
throw new Error(message || "Expected " + expected + ", got " + actual);
}
}
return {
test_create_empty_blob: function() {
var b = Blob();
assertEqual(length(b), 0, "Empty blob should have length 0");
var b = new Blob();
assertEqual(b.length, 0, "Empty blob should have length 0");
},
test_create_blob_with_capacity: function() {
var b = Blob(100);
assertEqual(length(b), 0, "New blob with capacity should still have length 0");
var b = new Blob(100);
assertEqual(b.length, 0, "New blob with capacity should still have length 0");
},
test_write_and_read_single_bit: function() {
var b = Blob();
var b = new Blob();
b.write_bit(true);
b.write_bit(false);
b.write_bit(1);
b.write_bit(0);
assertEqual(length(b), 4, "Should have 4 bits after writing");
assertEqual(b.length, 4, "Should have 4 bits after writing");
stone(b);
assertEqual(b.read_logical(0), true, "First bit should be true");
@@ -41,7 +41,7 @@ return {
},
test_out_of_range_read_throws_error: function() {
var b = Blob();
var b = new Blob();
b.write_bit(true);
stone(b);
@@ -63,7 +63,7 @@ return {
},
test_write_and_read_numbers: function() {
var b = Blob();
var b = new Blob();
b.write_number(3.14159);
b.write_number(-42);
b.write_number(0);
@@ -77,7 +77,7 @@ return {
},
test_write_and_read_text: function() {
var b = Blob();
var b = new Blob();
b.write_text("Hello");
b.write_text("World");
b.write_text("🎉");
@@ -87,15 +87,15 @@ return {
},
test_write_and_read_blobs: function() {
var b1 = Blob();
var b1 = new Blob();
b1.write_bit(true);
b1.write_bit(false);
b1.write_bit(true);
var b2 = Blob(10);
var b2 = new Blob(10);
b2.write_blob(b1);
b2.write_bit(false);
assertEqual(length(b2), 4, "Combined blob should have 4 bits");
assertEqual(b2.length, 4, "Combined blob should have 4 bits");
stone(b2);
assertEqual(b2.read_logical(0), true);
@@ -105,37 +105,37 @@ return {
},
test_blob_copy_constructor: function() {
var b1 = Blob();
var b1 = new Blob();
b1.write_bit(true);
b1.write_bit(false);
b1.write_bit(true);
b1.write_bit(true);
stone(b1);
var b2 = Blob(b1);
var b2 = new Blob(b1);
stone(b2);
assertEqual(length(b2), 4, "Copied blob should have same length");
assertEqual(b2.length, 4, "Copied blob should have same length");
assertEqual(b2.read_logical(0), true);
assertEqual(b2.read_logical(3), true);
},
test_blob_partial_copy_constructor: function() {
var b1 = Blob();
var b1 = new Blob();
for (var i = 0; i < 10; i++) {
b1.write_bit(i % 2 == 0);
}
stone(b1);
var b2 = Blob(b1, 2, 7);
var b2 = new Blob(b1, 2, 7);
stone(b2);
assertEqual(length(b2), 5, "Partial copy should have 5 bits");
assertEqual(b2.length, 5, "Partial copy should have 5 bits");
assertEqual(b2.read_logical(0), true);
assertEqual(b2.read_logical(2), true);
},
test_create_blob_with_fill: function() {
var b1 = Blob(8, true);
var b2 = Blob(8, false);
var b1 = new Blob(8, true);
var b2 = new Blob(8, false);
stone(b1);
stone(b2);
@@ -150,7 +150,7 @@ return {
var sequence = [true, false, true, true, false];
var index = 0;
var b = Blob(5, function() {
var b = new Blob(5, function() {
return sequence[index++] ? 1 : 0;
});
@@ -161,13 +161,13 @@ return {
},
test_write_pad_and_check_padding: function() {
var b = Blob();
var b = new Blob();
b.write_bit(true);
b.write_bit(false);
b.write_bit(true);
b.write_pad(8);
assertEqual(length(b), 8, "Should be padded to 8 bits");
assertEqual(b.length, 8, "Should be padded to 8 bits");
stone(b);
assert(b['pad?'](3, 8), "Should detect valid padding at position 3");
@@ -175,7 +175,7 @@ return {
},
test_read_blob_from_stone_blob: function() {
var b1 = Blob();
var b1 = new Blob();
for (var i = 0; i < 16; i++) {
b1.write_bit(i % 3 == 0);
}
@@ -183,14 +183,14 @@ return {
var b2 = b1.read_blob(4, 12);
stone(b2);
assertEqual(length(b2), 8, "Read blob should have 8 bits");
assertEqual(b2.length, 8, "Read blob should have 8 bits");
assertEqual(b2.read_logical(2), true);
assertEqual(b2.read_logical(5), true);
},
test_stone_blob_is_immutable: function() {
var b = Blob();
var b = new Blob();
b.write_bit(true);
stone(b);
@@ -204,7 +204,7 @@ return {
},
test_multiple_stone_calls_are_safe: function() {
var b = Blob();
var b = new Blob();
b.write_bit(true);
assert(!stone.p(b), "Blob should not be a stone before stone() call");
stone(b);
@@ -218,7 +218,7 @@ return {
test_invalid_constructor_arguments: function() {
var threw = false;
try {
var b = Blob("invalid");
var b = new Blob("invalid");
} catch (e) {
threw = true;
}
@@ -226,7 +226,7 @@ return {
},
test_write_bit_validation: function() {
var b = Blob();
var b = new Blob();
b.write_bit(0);
b.write_bit(1);
@@ -240,7 +240,7 @@ return {
},
test_complex_data_round_trip: function() {
var b = Blob();
var b = new Blob();
b.write_text("Test");
b.write_number(123.456);
@@ -248,31 +248,31 @@ return {
b.write_bit(false);
b.write_number(-999.999);
var originalLength = length(b);
var originalLength = b.length;
stone(b);
var b2 = Blob(b);
var b2 = new Blob(b);
stone(b2);
assertEqual(length(b2), originalLength, "Copy should have same length");
assertEqual(b2.length, originalLength, "Copy should have same length");
assertEqual(b2.read_text(0), "Test", "First text should match");
},
test_zero_capacity_blob: function() {
var b = Blob(0);
assertEqual(length(b), 0, "Zero capacity blob should have length 0");
var b = new Blob(0);
assertEqual(b.length, 0, "Zero capacity blob should have length 0");
b.write_bit(true);
assertEqual(length(b), 1, "Should expand when writing");
assertEqual(b.length, 1, "Should expand when writing");
},
test_large_blob_handling: function() {
var b = Blob();
var b = new Blob();
var testSize = 1000;
for (var i = 0; i < testSize; i++) {
b.write_bit(i % 7 == 0);
}
assertEqual(length(b), testSize, "Should have " + testSize + " bits");
assertEqual(b.length, testSize, "Should have " + testSize + " bits");
stone(b);
assertEqual(b.read_logical(0), true, "Bit 0 should be true");
@@ -282,7 +282,7 @@ return {
},
test_non_stone_blob_read_methods_should_throw: function() {
var b = Blob();
var b = new Blob();
b.write_bit(true);
b.write_number(42);
b.write_text("test");
@@ -329,14 +329,14 @@ return {
},
test_empty_text_write_and_read: function() {
var b = Blob();
var b = new Blob();
b.write_text("");
stone(b);
assertEqual(b.read_text(0), "", "Empty string should round-trip");
},
test_invalid_read_positions: function() {
var b = Blob();
var b = new Blob();
b.write_number(42);
stone(b);

View File

@@ -12,7 +12,7 @@ return {
fd.write(f, bigdata)
fd.close(f)
var data = blob()
var data = new blob
var st = time.number()
var f2 = fd.open(tmp, 'r')
var chunksize = 1024 // reduced for test
@@ -20,7 +20,11 @@ return {
while(true) {
var chunk = fd.read(f2, chunksize);
data.write_blob(chunk);
if (length(chunk) < chunksize * 8) break;
// chunk.length is in bits, chunksize is bytes?
// fd.read usually takes bytes. Blob.length is bits.
// If chunk is blob, length is bits.
// fd.read returns blob.
if (chunk.length < chunksize * 8) break;
}
fd.close(f2)
log.console(`read took ${time.number()-st}`)

View File

@@ -15,7 +15,7 @@ return {
}
$receiver(tree => {
var child_reqs = array(tree.children, child => cb => {
var child_reqs = tree.children.map(child => cb => {
$start(e => send(e.actor, child, cb), "tests/comments")
})
@@ -30,10 +30,7 @@ return {
send(tree, reason)
}
var obj = object(result.comment)
obj.children = result.children
obj.time = time.text()
send(tree, obj)
send(tree, { ...result.comment, children: result.children, time: time.text() })
})
})
}

View File

@@ -9,7 +9,7 @@ function load_comment_from_api_requestor(id) {
}
$receiver(tree => {
var child_reqs = array(tree.children, child => cb => {
var child_reqs = tree.children.map(child => cb => {
$start(e => send(e.actor, child, cb), "tests/comments") // Note: recursively calls itself? Original used "tests/comments"
// We should probably change this to "tests/comments_actor" if it's recursive
})
@@ -25,9 +25,6 @@ $receiver(tree => {
send(tree, reason)
}
var obj = object(result.comment)
obj.children = result.children
obj.time = time.text()
send(tree, obj)
send(tree, { ...result.comment, children: result.children, time: time.text() })
})
})

View File

@@ -4,14 +4,14 @@ var time = use('time')
return {
test_guid: function() {
var st = time.number()
var guid = blob(256, $random_fit)
var guid = new blob(256, $random_fit)
stone(guid)
var btime = time.number()-st
st = time.number()
guid = text(guid,'h')
st = time.number()-st
log.console(`took ${btime*1000000} us to make blob; took ${st*1000000} us to make it text`)
log.console(lower(guid))
log.console(length(guid))
log.console(guid.toLowerCase())
log.console(guid.length)
}
}

View File

@@ -1,6 +1,6 @@
return {
test_hang: function() {
log.console(`Going to start hanging .. (disabled)`)
log.console(`Going to start hanging ... (disabled)`)
// while(1) {
// // hang!

View File

@@ -3,5 +3,5 @@ var http = use('http')
return function() {
var url = "http://example.com"
var b2 = http.fetch(url)
if (length(b2) == 0) throw "Empty response"
if (b2.length == 0) throw "Empty response"
}

View File

@@ -54,9 +54,9 @@ return {
var reader = miniz.read(zip_blob)
var listed = reader.list()
if (length(listed) != reader.count())
if (listed.length != reader.count())
throw "list/count mismatch"
if (length(listed) != 2)
if (listed.length != 2)
throw "unexpected entry count"
} finally {
try { fd.unlink(ZIP_PATH) } catch(e) {}

View File

@@ -7,8 +7,8 @@ var EPSILON = 1e-12
function stone_if_needed(b) { if (!stone.p(b)) stone(b) }
function bytes_to_blob(bytes) {
var b = blob()
for (var i = 0; i < length(bytes); i++) {
var b = new blob()
for (var i = 0; i < bytes.length; i++) {
var byte = bytes[i]
for (var bit = 7; bit >= 0; bit--) b.write_bit((byte >> bit) & 1)
}
@@ -20,11 +20,11 @@ function deepCompare(expected, actual, path) {
path = path || ''
if (expected == actual) return { passed: true, messages: [] };
if (is_number(expected) && is_number(actual)) {
if (typeof expected == 'number' && typeof actual == 'number') {
if (isNaN(expected) && isNaN(actual))
return { passed: true, messages: [] };
var diff = abs(expected - actual);
var diff = number.abs(expected - actual);
if (diff <= EPSILON)
return { passed: true, messages: [] };
@@ -37,47 +37,49 @@ function deepCompare(expected, actual, path) {
};
}
if (is_blob(expected) && is_blob(actual)) {
if ((expected instanceof blob) && (actual instanceof blob)) {
stone_if_needed(expected); stone_if_needed(actual)
if (length(expected) != length(actual))
return { passed: false, messages: [`blob length mismatch at ${path}: ${length(expected)} vs ${length(actual)}`] }
for (var i = 0; i < length(expected); i++) {
if (expected.length != actual.length)
return { passed: false, messages: [`blob length mismatch at ${path}: ${expected.length} vs ${actual.length}`] }
for (var i = 0; i < expected.length; i++) {
if (expected.read_logical(i) != actual.read_logical(i))
return { passed: false, messages: [`blob bit mismatch at ${path}[${i}]`] }
}
return { passed: true, messages: [] }
}
if (is_array(expected) && is_array(actual)) {
if (length(expected) != length(actual))
if (isa(expected, array) && isa(actual, array)) {
if (expected.length != actual.length)
return {
passed: false,
messages: [`Array length mismatch at ${path}: expected ${length(expected)}, got ${length(actual)}`]
messages: [`Array length mismatch at ${path}: expected ${expected.length}, got ${actual.length}`]
};
var messages = [];
arrfor(expected, function(val, i) {
var result = deepCompare(val, actual[i], `${path}[${i}]`);
if (!result.passed)
messages = array(messages, result.messages)
})
return { passed: length(messages) == 0, messages: messages };
let messages = [];
for (let i = 0; i < expected.length; i++) {
var result = deepCompare(expected[i], actual[i], `${path}[${i}]`);
if (!result.passed) {
for(var m of result.messages) messages.push(m);
}
}
return { passed: messages.length == 0, messages: messages };
}
if (is_object(expected) && is_object(actual)) {
var expKeys = sort(array(expected))
var actKeys = sort(array(actual))
if (isa(expected, object) && isa(actual, object)) {
var expKeys = array(expected).sort();
var actKeys = array(actual).sort();
if (JSON.stringify(expKeys) != JSON.stringify(actKeys))
return {
passed: false,
messages: [`Object keys mismatch at ${path}: expected ${expKeys}, got ${actKeys}`]
};
var messages = [];
arrfor(expKeys, function(key) {
let messages = [];
for (let key of expKeys) {
var result = deepCompare(expected[key], actual[key], `${path}.${key}`);
if (!result.passed)
messages = array(messages, result.messages)
})
return { passed: length(messages) == 0, messages: messages };
if (!result.passed) {
for(var m of result.messages) messages.push(m);
}
}
return { passed: messages.length == 0, messages: messages };
}
return {
@@ -89,7 +91,7 @@ function deepCompare(expected, actual, path) {
function makeTest(test) {
return function() {
var encoded = test.replacer ? nota.encode(test.input, test.replacer) : nota.encode(test.input);
if (!is_blob(encoded)){
if (!(encoded instanceof blob)){
throw "encode() should return blob";
}
@@ -102,14 +104,14 @@ function makeTest(test) {
var compareResult = deepCompare(expected, decoded);
if (!compareResult.passed) {
throw text(compareResult.messages, '; ');
throw compareResult.messages.join('; ');
}
};
}
var testarr = []
for (var i = 0; i < 500; i++) {
push(testarr, 1)
testarr.push(1)
}
var testCases = [
@@ -153,18 +155,18 @@ var testCases = [
str: "test",
obj: { x: true }
}] },
{ name: 'empty_buffer', input: blob() },
{ name: 'empty_buffer', input: new blob() },
{ name: 'nested_empty_array', input: [[]] },
{ name: 'empty_key_value', input: { "": "" } },
{ name: 'small_float', input: 1e-10 },
{ name: 'replacer_multiply', input: { a: 1, b: 2 },
replacer: (key, value) => is_number(value) ? value * 2 : value,
replacer: (key, value) => typeof value == 'number' ? value * 2 : value,
expected: { a: 2, b: 4 } },
{ name: 'replacer_string_append', input: { x: "test", y: 5 },
replacer: (key, value) => key == 'x' ? value + "!" : value,
expected: { x: "test!", y: 5 } },
{ name: 'reviver_multiply', input: { a: 1, b: 2 },
reviver: (key, value) => is_number(value) ? value * 3 : value,
reviver: (key, value) => typeof value == 'number' ? value * 3 : value,
expected: { a: 3, b: 6 } },
{ name: 'reviver_increment', input: { x: "test", y: 10 },
reviver: (key, value) => key == 'y' ? value + 1 : value,
@@ -172,7 +174,7 @@ var testCases = [
];
var tests = {};
for (var i = 0; i < length(testCases); i++) {
for (var i = 0; i < testCases.length; i++) {
var t = testCases[i];
tests[t.name] = makeTest(t);
}

100
tests/num.cm Normal file
View File

@@ -0,0 +1,100 @@
var num = use('num');
return {
test_num_basic: function() {
// Test matrix creation and operations
var A = new num.Matrix([
[1, 2, 3],
[4, 5, 6],
[7, 8, 10]
]);
log.console("Matrix A:");
log.console(A.toArray());
// Test matrix inversion
var A_inv = A.inverse();
log.console("\nMatrix A inverse:");
log.console(A_inv.toArray());
// Verify A * A_inv = I (approximately)
var I = A.multiply(A_inv);
log.console("\nA * A_inv (should be identity):");
log.console(I.toArray());
// Test array creation
var v = new num.Array([1, 2, 3]);
log.console("\nVector v:");
log.console(v.toArray());
// Test matrix-vector multiplication
var result = A.multiply(v);
log.console("\nA * v:");
log.console(result.toArray());
// Test dot product
var u = new num.Array([4, 5, 6]);
var dot_product = v.dot(u);
log.console("\nv · u =", dot_product);
// Test norm
var v_norm = v.norm();
log.console("||v|| =", v_norm);
// Test matrix-matrix multiplication
var B = new num.Matrix([
[1, 0, 0],
[0, 2, 0],
[0, 0, 3]
]);
var C = A.multiply(B);
log.console("\nA * B:");
log.console(C.toArray());
},
test_num_property: function() {
// Create an array
var arr = new num.Array([10, 20, 30, 40, 50]);
if (arr[0] != 10) throw "arr[0] mismatch"
if (arr[1] != 20) throw "arr[1] mismatch"
if (arr[4] != 50) throw "arr[4] mismatch"
arr[0] = 15
if (arr[0] != 15) throw "arr[0] set failed"
// arr[10] should be null or undefined, check behavior
// log.console("arr[10] =", arr[10]);
if (arr.length != 5) throw "arr.length mismatch"
if (!(arr instanceof num.Array)) throw "instanceof check failed"
},
test_num_setter: function() {
// Create an array
var arr = new num.Array([1, 2, 3, 4, 5]);
// Test setting values
arr[0] = 100;
arr[1] = 200;
arr[2] = 300.5;
if (arr[0] != 100) throw "Setter failed index 0"
if (arr[1] != 200) throw "Setter failed index 1"
if (arr[2] != 300.5) throw "Setter failed index 2"
// Test setting with different types
arr[3] = "123.7"; // Should convert string to number
arr[4] = true; // Should convert boolean to number
// Loose comparison for converted values if needed, or check specific behavior
// Assuming implementation converts:
// log.console("arr[3] =", arr[3]);
// log.console("arr[4] =", arr[4]);
// Test bounds checking - this should fail silently or throw depending on impl
arr[10] = 999;
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -236,12 +236,12 @@ return {
test_tiny_number: function() {
var tiny = 0.0000001
var result = text(tiny, "n")
if (search(result, 'e', 0) == null) throw "Tiny number format failed"
if (result.indexOf('e') == -1) throw "Tiny number format failed"
},
test_huge_number: function() {
var huge = 1e22
var result = text(huge, "n")
if (search(result, 'e', 0) == null) throw "Huge number format failed"
if (result.indexOf('e') == -1) throw "Huge number format failed"
}
}

View File

@@ -2,15 +2,15 @@ var toml = use('toml')
function deep_equal(a, b) {
if (a == b) return true
if (is_null(a) || is_null(b)) return false
if ((is_number(a) && !is_number(b)) || (is_text(a) && !is_text(b)) || (is_object(a) && !is_object(b)) || (is_array(a) && !is_array(b)) || (is_blob(a) && !is_blob(b)) || (is_function(a) && !is_function(b)) || (is_logical(a) && !is_logical(b))) return false
if (a == null || b == null) return false
if (typeof a != typeof b) return false
if (is_object(a)) {
if (typeof a == 'object') {
var keys_a = array(a)
var keys_b = array(b)
if (length(keys_a) != length(keys_b)) return false
if (keys_a.length != keys_b.length) return false
for (var i = 0; i < length(keys_a); i++) {
for (var i = 0; i < keys_a.length; i++) {
if (!deep_equal(a[keys_a[i]], b[keys_a[i]])) return false
}
return true

View File

@@ -1,7 +1,7 @@
var cmds = {
stop: $stop,
disrupt: _ => {
$delay(_ => { throw Error() }, 0.5)
$delay(_ => { throw new Error() }, 0.5)
}
}

View File

@@ -12,53 +12,57 @@ function deep_compare(expected, actual, path) {
path = path || ''
if (expected == actual) return { passed: true, messages: [] }
if (is_number(expected) && is_number(actual)) {
if (typeof expected == 'number' && typeof actual == 'number') {
if (isNaN(expected) && isNaN(actual)) return { passed: true, messages: [] }
var diff = abs(expected - actual)
var diff = number.abs(expected - actual)
if (diff <= EPSILON) return { passed: true, messages: [] }
return { passed: false, messages: [`Value mismatch at ${path}: ${expected} vs ${actual} (diff ${diff})`] }
}
if (is_blob(expected) && is_blob(actual)) {
if ((expected instanceof blob) && (actual instanceof blob)) {
stone_if_needed(expected); stone_if_needed(actual)
if (length(expected) != length(actual))
return { passed: false, messages: [`blob length mismatch at ${path}: ${length(expected)} vs ${length(actual)}`] }
arrfor(array(expected), function(i) {
if (expected.length != actual.length)
return { passed: false, messages: [`blob length mismatch at ${path}: ${expected.length} vs ${actual.length}`] }
for (var i = 0; i < expected.length; i++) {
if (expected.read_logical(i) != actual.read_logical(i))
return { passed: false, messages: [`blob bit mismatch at ${path}[${i}]`] }
})
}
return { passed: true, messages: [] }
}
if (is_array(expected) && is_array(actual)) {
if (length(expected) != length(actual))
return { passed: false, messages: [`Array length mismatch at ${path}: ${length(expected)} vs ${length(actual)}`] }
if (isa(expected, array) && isa(actual, array)) {
if (expected.length != actual.length)
return { passed: false, messages: [`Array length mismatch at ${path}: ${expected.length} vs ${actual.length}`] }
var msgs = []
arrfor(array(expected), function(i) {
for (var i = 0; i < expected.length; i++) {
var res = deep_compare(expected[i], actual[i], `${path}[${i}]`)
if (!res.passed) array(msgs, res.messages)
})
return { passed: length(msgs) == 0, messages: msgs }
if (!res.passed) {
for(var m of res.messages) msgs.push(m)
}
}
return { passed: msgs.length == 0, messages: msgs }
}
if (is_object(expected) && is_object(actual)) {
var expKeys = sort(array(expected))
var actKeys = sort(array(actual))
if (isa(expected, object) && isa(actual, object)) {
var expKeys = array(expected).sort()
var actKeys = array(actual).sort()
if (JSON.stringify(expKeys) != JSON.stringify(actKeys))
return { passed: false, messages: [`Object keys mismatch at ${path}: ${expKeys} vs ${actKeys}`] }
var msgs = []
arrfor(expKeys, function(k) {
for (var k of expKeys) {
var res = deep_compare(expected[k], actual[k], `${path}.${k}`)
if (!res.passed) array(msgs, res.messages)
})
return { passed: length(msgs) == 0, messages: msgs }
if (!res.passed) {
for(var m of res.messages) msgs.push(m)
}
}
return { passed: msgs.length == 0, messages: msgs }
}
return { passed: false, messages: [`Value mismatch at ${path}: ${JSON.stringify(expected)} vs ${JSON.stringify(actual)}`] }
}
var testarr = []
for (var i = 0; i < 500; i++) { push(testarr, 1) }
for (var i = 0; i < 500; i++) { testarr.push(1) }
var testCases = [
{ name: 'zero', input: 0 },
@@ -97,7 +101,7 @@ var testCases = [
{ name: 'nested_object', input: { num: 42, arr: [1, -1, 2.5], str: 'test', obj: { x: true } } },
{ name: 'empty_blob', input: blob() },
{ name: 'empty_blob', input: new blob() },
{ name: 'nested_array', input: [[]] },
{ name: 'empty_key_val', input: { '': '' } },
{ name: 'small_float', input: 1e-10 }
@@ -106,17 +110,17 @@ var testCases = [
function make_test(t) {
return function() {
var enc = wota.encode(t.input)
if (!is_blob(enc)) throw 'encode() should return a blob'
if (!(enc instanceof blob)) throw 'encode() should return a blob'
var dec = wota.decode(enc)
var cmp = deep_compare(t.input, dec)
if (!cmp.passed) throw text(cmp.messages, '; ')
if (!cmp.passed) throw cmp.messages.join('; ')
}
}
var tests = {}
for (var i = 0; i < length(testCases); i++) {
for (var i = 0; i < testCases.length; i++) {
var t = testCases[i]
var name = t.name || ('case_' + i)
tests[name] = make_test(t)

75
time.cm
View File

@@ -47,9 +47,9 @@ time.isleap = function(y) { return time.yearsize(y) == 366; };
/* timecode utility */
time.timecode = function(t, fps = 24)
{
var s = whole(t);
var s = number.whole(t);
var frac = t - s;
return `${s}:${whole(frac * fps)}`;
return `${s}:${number.whole(frac * fps)}`;
};
/* per-month day counts (non-leap) */
@@ -60,9 +60,9 @@ function time_record(num = now(),
zone = computer_zone(),
dst = computer_dst())
{
if (is_object(num)) return num;
if (typeof num == "object") return num;
var monthdays = array(time.monthdays);
var monthdays = time.monthdays.slice();
var rec = {
second : 0, minute : 0, hour : 0,
yday : 0, year : 0,
@@ -77,13 +77,13 @@ function time_record(num = now(),
/* split into day + seconds-of-day */
var hms = num % time.day;
var day = floor(num / time.day);
var day = number.floor(num / time.day);
if (hms < 0) { hms += time.day; day--; }
rec.second = hms % time.minute;
var tmp = floor(hms / time.minute);
var tmp = number.floor(hms / time.minute);
rec.minute = tmp % time.minute;
rec.hour = floor(tmp / time.minute);
rec.hour = number.floor(tmp / time.minute);
rec.weekday = (day + 4_503_599_627_370_496 + 2) % 7; /* 2 → 1970-01-01 was Thursday */
/* year & day-of-year */
@@ -111,8 +111,9 @@ function time_record(num = now(),
function time_number(rec = now())
{
if (is_number(rec)) return rec;
if (typeof rec == "number") return rec;
log.console(typeof rec)
log.console(rec)
log.console(rec.minute)
@@ -156,47 +157,47 @@ function time_text(num = now(),
zone = computer_zone(),
dst = computer_dst())
{
var rec = is_number(num) ? time_record(num, zone, dst) : num;
var rec = (typeof num == "number") ? time_record(num, zone, dst) : num;
zone = rec.zone;
dst = rec.dst;
/* am/pm */
if (search(fmt, "a") != null) {
if (rec.hour >= 13) { rec.hour -= 12; fmt = replace(fmt, "a", "PM"); }
else if (rec.hour == 12) { fmt = replace(fmt, "a", "PM"); }
else if (rec.hour == 0) { rec.hour = 12; fmt = replace(fmt, "a", "AM"); }
else fmt = replace(fmt, "a", "AM");
if (fmt.includes("a")) {
if (rec.hour >= 13) { rec.hour -= 12; fmt = fmt.replaceAll("a", "PM"); }
else if (rec.hour == 12) { fmt = fmt.replaceAll("a", "PM"); }
else if (rec.hour == 0) { rec.hour = 12; fmt = fmt.replaceAll("a", "AM"); }
else fmt = fmt.replaceAll("a", "AM");
}
/* BCE/CE */
var year = rec.year > 0 ? rec.year : rec.year - 1;
if (search(fmt, "c") != null) {
if (year < 0) { year = abs(year); fmt = replace(fmt, "c", "BC"); }
else fmt = replace(fmt, "c", "AD");
if (fmt.includes("c")) {
if (year < 0) { year = number.abs(year); fmt = fmt.replaceAll("c", "BC"); }
else fmt = fmt.replaceAll("c", "AD");
}
/* substitutions */
var full_offset = zone + (dst ? 1 : 0);
fmt = replace(fmt, "yyyy", text(year, "i4"))
fmt = replace(fmt, "y", year);
fmt = replace(fmt, "eee", rec.yday + 1);
fmt = replace(fmt, "dd", text(rec.day, "i2"))
fmt = replace(fmt, "d", rec.day);
fmt = replace(fmt, "hh", text(rec.hour, "i2"));
fmt = replace(fmt, "h", rec.hour);
fmt = replace(fmt, "nn", text(rec.minute, "i2"));
fmt = replace(fmt, "n", rec.minute);
fmt = replace(fmt, "ss", text(rec.second, "i2"));
fmt = replace(fmt, "s", rec.second);
fmt = replace(fmt, "x", dst ? "DST" : ""); /* new */
fmt = replace(fmt, "z", (full_offset >= 0 ? "+" : "") + text(full_offset));
fmt = replace(fmt, /mm[^bB]/g, rec.month + 1);
fmt = replace(fmt, /m[^bB]/g, rec.month + 1);
fmt = replace(fmt, /v[^bB]/g, rec.weekday);
fmt = replace(fmt, "mb", text(time.monthstr[rec.month], 0, 3));
fmt = replace(fmt, "mB", time.monthstr[rec.month]);
fmt = replace(fmt, "vB", time.weekdays[rec.weekday]);
fmt = replace(fmt, "vb", text(time.weekdays[rec.weekday], 0, 3));
fmt = fmt.replaceAll("yyyy", text(year, "i4"))
fmt = fmt.replaceAll("y", year);
fmt = fmt.replaceAll("eee", rec.yday + 1);
fmt = fmt.replaceAll("dd", text(rec.day, "i2"))
fmt = fmt.replaceAll("d", rec.day);
fmt = fmt.replaceAll("hh", text(rec.hour, "i2"));
fmt = fmt.replaceAll("h", rec.hour);
fmt = fmt.replaceAll("nn", text(rec.minute, "i2"));
fmt = fmt.replaceAll("n", rec.minute);
fmt = fmt.replaceAll("ss", text(rec.second, "i2"));
fmt = fmt.replaceAll("s", rec.second);
fmt = fmt.replaceAll("x", dst ? "DST" : ""); /* new */
fmt = fmt.replaceAll("z", (full_offset >= 0 ? "+" : "") + text(full_offset));
fmt = fmt.replaceAll(/mm[^bB]/g, rec.month + 1);
fmt = fmt.replaceAll(/m[^bB]/g, rec.month + 1);
fmt = fmt.replaceAll(/v[^bB]/g, rec.weekday);
fmt = fmt.replaceAll("mb", time.monthstr[rec.month].slice(0, 3));
fmt = fmt.replaceAll("mB", time.monthstr[rec.month]);
fmt = fmt.replaceAll("vB", time.weekdays[rec.weekday]);
fmt = fmt.replaceAll("vb", time.weekdays[rec.weekday].slice(0, 3));
return fmt;
}

212
toml.cm
View File

@@ -1,88 +1,58 @@
// Simple TOML parser for cell modules
// Supports basic TOML features needed for the module system
function toml_unescape(s) {
if (!is_text(s)) return null
// Order matters:
// "\\\"" (backslash + quote) should become "\"", not just '"'
// So: unescape \" first, then unescape \\.
s = replace(s, '\\"', '"')
s = replace(s, '\\\\', '\\')
return s
}
function toml_escape(s) {
if (!is_text(s)) return null
// Order matters:
// escape backslashes first, otherwise escaping quotes introduces new backslashes that would get double-escaped.
s = replace(s, '\\', '\\\\')
s = replace(s, '"', '\\"')
return s
}
function parse_toml(toml_text) {
if (!is_text(toml_text)) return null
// Prefer Misty split if present; fall back to JS split.
var lines = array(toml_text, '\n')
if (lines == null) lines = array(toml_text, '\n')
function parse_toml(text) {
if (typeof text != 'string') return null
var lines = text.split('\n')
var result = {}
var current_section = result
var current_section_name = ''
for (var i = 0; i < length(lines); i++) {
var line = trim(lines[i])
if (line == null) line = lines[i]
for (var i = 0; i < lines.length; i++) {
var line = lines[i].trim()
// Skip empty lines and comments
if (!line || starts_with(line, '#')) continue
if (!line || line.startsWith('#')) continue
// Section header
if (starts_with(line, '[') && ends_with(line, ']')) {
var inner = text(line, 1, -1)
var section_path = parse_key_path(inner)
if (section_path == null) return null
if (line.startsWith('[') && line.endsWith(']')) {
var section_path = parse_key_path(line.slice(1, -1))
current_section = result
current_section_name = text(section_path, '.')
// Reconstruct name for debugging/legacy (not strictly needed for object construction)
current_section_name = section_path.join('.')
for (var j = 0; j < length(section_path); j++) {
for (var j = 0; j < section_path.length; j++) {
var key = section_path[j]
// Only treat null as "missing"; do not clobber false/0/""
if (current_section[key] == null) {
if (!current_section[key]) {
current_section[key] = {}
} else if (!is_object(current_section[key])) {
// Scalar/table collision like: a = 1 then [a.b]
return null
}
current_section = current_section[key]
}
continue
}
// Key-value pair
var eq_index = search(line, '=')
if (eq_index != null && eq_index > 0) {
var key_part = trim(text(line, 0, eq_index))
var value = trim(text(line, eq_index + 1))
if (key_part == null) key_part = trim(text(line, 0, eq_index))
if (value == null) value = trim(text(line, eq_index + 1))
var eq_index = line.indexOf('=')
if (eq_index > 0) {
var key_part = line.substring(0, eq_index).trim()
var value = line.substring(eq_index + 1).trim()
// Handle quoted keys in key-value pairs too if needed?
// For now assuming simple keys or quoted keys
var key = parse_key(key_part)
if (key == null) return null
if (starts_with(value, '"') && ends_with(value, '"')) {
var unquoted = text(value, 1, -1)
current_section[key] = toml_unescape(unquoted)
if (current_section[key] == null) return null
} else if (starts_with(value, '[') && ends_with(value, ']')) {
// Parse value
if (value.startsWith('"') && value.endsWith('"')) {
// String - unescape quotes
current_section[key] = value.slice(1, -1).replace(/\\"/g, '"')
} else if (value.startsWith('[') && value.endsWith(']')) {
// Array
current_section[key] = parse_array(value)
if (current_section[key] == null) return null
} else if (value == 'true' || value == 'false') {
// Boolean
current_section[key] = value == 'true'
} else if (is_number(value)) {
} else if (isa(value, number)) {
// Number
current_section[key] = Number(value)
} else {
// Unquoted string
@@ -95,151 +65,149 @@ function parse_toml(toml_text) {
}
function parse_key(str) {
if (!is_text(str)) return null
if (starts_with(str, '"') && ends_with(str, '"')) {
var inner = text(str, 1, -1)
return toml_unescape(inner)
if (str.startsWith('"') && str.endsWith('"')) {
return str.slice(1, -1).replace(/\\"/g, '"')
}
return str
}
// Split a key path by dots, respecting quotes
function parse_key_path(str) {
if (!is_text(str)) return null
var parts = []
var current = ''
var in_quote = false
for (var i = 0; i < length(str); i++) {
for (var i = 0; i < str.length; i++) {
var c = str[i]
if (c == '"' && (i==0 || str[i-1] != '\\')) {
in_quote = !in_quote
// We don't verify if it's strictly correct TOML quote usage, just rudimentary
} else if (c == '.' && !in_quote) {
var piece = trim(current)
if (piece == null) piece = trim(current)
push(parts, parse_key(piece))
parts.push(parse_key(current.trim()))
current = ''
continue
}
current += c
}
var tail = trim(current)
if (tail == null) tail = trim(current)
if (length(tail) > 0) push(parts, parse_key(tail))
if (current.trim().length > 0)
parts.push(parse_key(current.trim()))
return parts
}
function parse_array(str) {
if (!is_text(str)) return null
// Remove brackets and trim
str = text(str, 1, -1)
str = trim(str)
// Remove brackets
str = str.slice(1, -1).trim()
if (!str) return []
var items = []
var current = ''
var in_quotes = false
for (var i = 0; i < length(str); i++) {
var ch = str[i]
for (var i = 0; i < str.length; i++) {
var char = str[i]
if (ch == '"' && (i == 0 || str[i - 1] != '\\')) {
if (char == '"' && (i == 0 || str[i-1] != '\\')) {
in_quotes = !in_quotes
current += ch
} else if (ch == ',' && !in_quotes) {
var piece = trim(current)
if (piece == null) piece = trim(current)
push(items, parse_value(piece))
current += char
} else if (char == ',' && !in_quotes) {
items.push(parse_value(current.trim()))
current = ''
} else {
current += ch
current += char
}
}
var last = trim(current)
if (last == null) last = trim(current)
if (last) push(items, parse_value(last))
if (current.trim()) {
items.push(parse_value(current.trim()))
}
return items
}
function parse_value(str) {
if (!is_text(str)) return null
if (starts_with(str, '"') && ends_with(str, '"')) {
return toml_unescape(text(str, 1, -1))
}
if (str == 'true' || str == 'false') return str == 'true'
// Use your existing numeric test; TOML numeric formats are richer, but this keeps your "module TOML" scope.
if (!isNaN(Number(str))) return Number(str)
if (str.startsWith('"') && str.endsWith('"')) {
return str.slice(1, -1).replace(/\\"/g, '"')
} else if (str == 'true' || str == 'false') {
return str == 'true'
} else if (!isNaN(Number(str))) {
return Number(str)
} else {
return str
}
}
function encode_toml(obj) {
var result = []
function encode_value(value) {
if (is_text(value)) return '"' + toml_escape(value) + '"'
if (is_logical(value)) return value ? 'true' : 'false'
if (is_number(value)) return text(value)
if (is_array(value)) {
if (typeof value == 'string') {
return '"' + value.replace(/"/g, '\\"') + '"'
} else if (typeof value == 'boolean') {
return value ? 'true' : 'false'
} else if (typeof value == 'number') {
return text(value)
} else if (isa(value, array)) {
var items = []
for (var i = 0; i < length(value); i++) push(items, encode_value(value[i]))
return '[' + text(items, ', ') + ']'
for (var i = 0; i < value.length; i++) {
items.push(encode_value(value[i]))
}
return '[' + items.join(', ') + ']'
}
return text(value)
}
function quote_key(k) {
if (search(k, '.') != null || search(k, '"') != null || search(k, ' ') != null) {
return '"' + toml_escape(k) + '"'
if (k.includes('.') || k.includes('"') || k.includes(' ')) {
return '"' + k.replace(/"/g, '\\"') + '"'
}
return k
}
// First pass: encode top-level simple values
var keys = array(obj)
for (var i = 0; i < length(keys); i++) {
for (var i = 0; i < keys.length; i++) {
var key = keys[i]
var value = obj[key]
if (!is_object(value)) push(result, quote_key(key) + ' = ' + encode_value(value))
if (!isa(value, object)) {
result.push(quote_key(key) + ' = ' + encode_value(value))
}
}
// Second pass: encode nested objects
function encode_section(o, path) {
var keys = array(o)
for (var i = 0; i < length(keys); i++) {
var key = keys[i]
var value = o[key]
function encode_section(obj, path) {
var keys = array(obj)
if (is_object(value)) {
for (var i = 0; i < keys.length; i++) {
var key = keys[i]
var value = obj[key]
if (isa(value, object)) {
// Nested object - create section
// We MUST quote the key segment if it has dots, otherwise it becomes a nested table path
var quoted = quote_key(key)
var section_path = path ? path + '.' + quoted : quoted
push(result, '[' + section_path + ']')
// Direct properties
result.push('[' + section_path + ']')
// First encode direct properties of this section
var section_keys = array(value)
for (var j = 0; j < length(section_keys); j++) {
for (var j = 0; j < section_keys.length; j++) {
var sk = section_keys[j]
var sv = value[sk]
if (!is_object(sv)) push(result, quote_key(sk) + ' = ' + encode_value(sv))
if (!isa(sv, object)) {
result.push(quote_key(sk) + ' = ' + encode_value(sv))
}
}
// Nested sections
// Then encode nested sections
encode_section(value, section_path)
}
}
}
encode_section(obj, '')
return text(result, '\n')
return result.join('\n')
}
return {

View File

@@ -4,7 +4,7 @@
var link = use('link')
var shop = use('internal/shop')
if (length(args) < 1) {
if (args.length < 1) {
log.console("Usage: cell unlink <origin>")
log.console("Removes a link and restores the original package.")
$stop()

124
update.ce
View File

@@ -1,51 +1,34 @@
// cell update [<locator>] - Update packages from remote sources
// cell update - Update packages from remote sources
//
// This command checks for updates to all packages and downloads new versions.
// For local packages, ensures the symlink is correct.
// For remote packages, checks the remote for new commits.
//
// Usage:
// cell update Update all packages in shop
// cell update . Update current directory package
// cell update <locator> Update a specific package
//
// Options:
// --build Run build after updating
// --target <triple> Target platform for build (requires --build)
// --follow-links Update link targets instead of origins
// cell update - Update all packages
// cell update <package> - Update a specific package
var shop = use('internal/shop')
var build = use('build')
var fd = use('fd')
var target_pkg = null
var run_build = false
var target_triple = null
var follow_links = false
// Parse arguments
for (var i = 0; i < length(args); i++) {
for (var i = 0; i < args.length; i++) {
if (args[i] == '--help' || args[i] == '-h') {
log.console("Usage: cell update [<locator>] [options]")
log.console("")
log.console("Usage: cell update [package]")
log.console("Update packages from remote sources.")
log.console("")
log.console("Options:")
log.console(" --build Run build after updating")
log.console(" --target <triple> Target platform for build (requires --build)")
log.console(" --follow-links Update link targets instead of origins")
log.console("Arguments:")
log.console(" package Optional package name to update. If omitted, updates all.")
log.console("")
log.console("This command checks for updates to all packages and downloads")
log.console("new versions. For local packages, ensures the symlink is correct.")
$stop()
} else if (args[i] == '--build') {
run_build = true
} else if (args[i] == '--target' || args[i] == '-t') {
if (i + 1 < length(args)) {
target_triple = args[++i]
} else {
log.error('--target requires a triple')
$stop()
}
} else if (args[i] == '--follow-links') {
follow_links = true
} else if (!starts_with(args[i], '-')) {
} else if (!args[i].startsWith('-')) {
target_pkg = args[i]
// Resolve relative paths to absolute paths
if (target_pkg == '.' || starts_with(target_pkg, './') || starts_with(target_pkg, '../') || fd.is_dir(target_pkg)) {
if (target_pkg == '.' || target_pkg.startsWith('./') || target_pkg.startsWith('../') || fd.is_dir(target_pkg)) {
var resolved = fd.realpath(target_pkg)
if (resolved) {
target_pkg = resolved
@@ -54,93 +37,56 @@ for (var i = 0; i < length(args); i++) {
}
}
// Default target if building
if (run_build && !target_triple) {
target_triple = build.detect_host_target()
}
var link = use('link')
function update_and_fetch(pkg)
{
var lock = shop.load_lock()
var old_entry = lock[pkg]
var old_commit = old_entry ? old_entry.commit : null
// Handle follow-links option
var effective_pkg = pkg
if (follow_links) {
var link_target = link.get_target(pkg)
if (link_target) {
effective_pkg = link_target
log.console(" Following link: " + pkg + " -> " + effective_pkg)
}
}
var new_entry = shop.update(effective_pkg)
var new_entry = shop.update(pkg)
if (new_entry) {
if (new_entry.commit) {
var old_str = old_commit ? text(old_commit, 0, 8) : "(new)"
log.console(" " + effective_pkg + " " + old_str + " -> " + text(new_entry.commit, 0, 8))
shop.fetch(effective_pkg)
var old_str = old_commit ? old_commit.substring(0, 8) : "(new)"
log.console(" " + pkg + " " + old_str + " -> " + new_entry.commit.substring(0, 8))
shop.fetch(pkg)
} else {
// Local package - just ensure symlink is correct
log.console(" " + effective_pkg + " (local)")
log.console(" " + pkg + " (local)")
}
shop.extract(effective_pkg)
shop.build_package_scripts(effective_pkg)
return effective_pkg
shop.extract(pkg)
shop.build_package_scripts(pkg)
return true
}
return null
return false
}
var updated_packages = []
if (target_pkg) {
var updated = update_and_fetch(target_pkg)
if (updated) {
push(updated_packages, updated)
if (update_and_fetch(target_pkg))
log.console("Updated " + target_pkg + ".")
} else {
else
log.console(target_pkg + " is up to date.")
}
} else {
var packages = shop.list_packages()
var pkg_count = length(packages)
var pkg_count = packages.length
log.console("Checking for updates (" + text(pkg_count) + " package" + (pkg_count == 1 ? "" : "s") + ")...")
for (var i = 0; i < length(packages); i++) {
var updated_count = 0
for (var i = 0; i < packages.length; i++) {
var pkg = packages[i]
if (pkg == 'core') continue
var updated = update_and_fetch(pkg)
if (updated) {
push(updated_packages, updated)
if (update_and_fetch(pkg)) {
updated_count++
}
}
if (length(updated_packages) > 0) {
log.console("Updated " + text(length(updated_packages)) + " package" + (length(updated_packages) == 1 ? "" : "s") + ".")
if (updated_count > 0) {
log.console("Updated " + text(updated_count) + " package" + (updated_count == 1 ? "" : "s") + ".")
} else {
log.console("All packages are up to date.")
}
}
// Run build if requested
if (run_build && length(updated_packages) > 0) {
log.console("")
log.console("Building updated packages...")
arrfor(updated_packages, function(pkg) {
try {
var lib = build.build_dynamic(pkg, target_triple, 'release')
if (lib)
log.console(" Built: " + lib)
} catch (e) {
log.error(" Failed to build " + pkg + ": " + e)
}
})
}
$stop()

View File

@@ -1,10 +1,10 @@
var shop = use('internal/shop')
var fd = use('fd')
var cmd = length(args) > 0 ? args[0] : null
var cmd = args.length > 0 ? args[0] : null
if (cmd == 'link') {
if (length(args) < 2) {
if (args.length < 2) {
log.console("Usage: cell upgrade link <core_dir>")
return
}

257
verify.ce
View File

@@ -1,257 +0,0 @@
// cell verify [<scope>] - Verify integrity and consistency
//
// Usage:
// cell verify Verify current directory package
// cell verify . Verify current directory package
// cell verify <locator> Verify specific package
// cell verify shop Verify entire shop
// cell verify world Verify all world roots
//
// Options:
// --deep Traverse full dependency closure
// --target <triple> Verify builds for specific target
var shop = use('internal/shop')
var pkg = use('package')
var link = use('link')
var build = use('build')
var fd = use('fd')
var scope = null
var deep = false
var target_triple = null
for (var i = 0; i < length(args); i++) {
if (args[i] == '--deep') {
deep = true
} else if (args[i] == '--target' || args[i] == '-t') {
if (i + 1 < length(args)) {
target_triple = args[++i]
} else {
log.error('--target requires a triple')
$stop()
}
} else if (args[i] == '--help' || args[i] == '-h') {
log.console("Usage: cell verify [<scope>] [options]")
log.console("")
log.console("Verify integrity and consistency.")
log.console("")
log.console("Scopes:")
log.console(" <locator> Verify specific package")
log.console(" shop Verify entire shop")
log.console(" world Verify all world roots")
log.console("")
log.console("Options:")
log.console(" --deep Traverse full dependency closure")
log.console(" --target <triple> Verify builds for specific target")
$stop()
} else if (!starts_with(args[i], '-')) {
scope = args[i]
}
}
// Default to current directory
if (!scope) {
scope = '.'
}
// Detect target if not specified
if (!target_triple) {
target_triple = build.detect_host_target()
}
var errors = []
var warnings = []
var checked = 0
function add_error(msg) {
push(errors, msg)
}
function add_warning(msg) {
push(warnings, msg)
}
// Verify a single package
function verify_package(locator) {
checked++
var lock = shop.load_lock()
var lock_entry = lock[locator]
var links = link.load()
var link_target = links[locator]
// Check lock entry exists
if (!lock_entry) {
add_error(locator + ": not in lock")
}
// Check package directory exists
var pkg_dir = shop.get_package_dir(locator)
var dir_exists = fd.is_dir(pkg_dir) || fd.is_link(pkg_dir)
if (!dir_exists) {
add_error(locator + ": package directory missing at " + pkg_dir)
return
}
// Check cell.toml exists
if (!fd.is_file(pkg_dir + '/cell.toml')) {
add_error(locator + ": missing cell.toml")
return
}
// For linked packages, verify link target
if (link_target) {
if (starts_with(link_target, '/')) {
// Local path target
if (!fd.is_dir(link_target)) {
add_error(locator + ": link target does not exist: " + link_target)
} else if (!fd.is_file(link_target + '/cell.toml')) {
add_error(locator + ": link target is not a valid package: " + link_target)
}
} else {
// Package target
var target_dir = shop.get_package_dir(link_target)
if (!fd.is_dir(target_dir) && !fd.is_link(target_dir)) {
add_error(locator + ": link target package not found: " + link_target)
}
}
// Check symlink is correct
if (fd.is_link(pkg_dir)) {
var current_target = fd.readlink(pkg_dir)
var expected_target = starts_with(link_target, '/') ? link_target : shop.get_package_dir(link_target)
if (current_target != expected_target) {
add_warning(locator + ": symlink target mismatch (expected " + expected_target + ", got " + current_target + ")")
}
} else {
add_warning(locator + ": linked but directory is not a symlink")
}
}
// Check build output exists
var lib_dir = shop.get_lib_dir()
var lib_name = shop.lib_name_for_package(locator)
var dylib_ext = '.dylib' // TODO: detect from target
var lib_path = lib_dir + '/' + lib_name + dylib_ext
// Only check for builds if package has C files
try {
var c_files = pkg.get_c_files(locator, target_triple, true)
if (c_files && length(c_files) > 0) {
if (!fd.is_file(lib_path)) {
add_warning(locator + ": library not built at " + lib_path)
}
}
} catch (e) {
// Skip build check if can't determine C files
}
}
// Check for link cycles
function check_link_cycles() {
var links = link.load()
function follow_chain(origin, visited) {
if (visited[origin]) {
return origin // cycle detected
}
visited[origin] = true
var target = links[origin]
if (target && links[target]) {
return follow_chain(target, visited)
}
return null
}
arrfor(links, function(origin) {
var cycle_start = follow_chain(origin, {})
if (cycle_start) {
add_error("Link cycle detected starting from: " + origin)
}
})
}
// Check for dangling links
function check_dangling_links() {
var links = link.load()
arrfor(array(links), function(origin) {
var target = links[origin]
if (starts_with(target, '/')) {
if (!fd.is_dir(target)) {
add_error("Dangling link: " + origin + " -> " + target + " (target does not exist)")
}
}
})
}
// Gather packages to verify
var packages_to_verify = []
if (scope == 'shop') {
packages_to_verify = shop.list_packages()
} else if (scope == 'world') {
// For now, world is the same as shop
// In future, this could be a separate concept
packages_to_verify = shop.list_packages()
} else {
// Single package
var locator = scope
// Resolve local paths
if (locator == '.' || starts_with(locator, './') || starts_with(locator, '../') || fd.is_dir(locator)) {
var resolved = fd.realpath(locator)
if (resolved) {
locator = resolved
}
}
if (deep) {
// Gather all dependencies
var all_deps = pkg.gather_dependencies(locator)
push(packages_to_verify, locator)
arrfor(all_deps, function(dep) {
push(packages_to_verify, dep)
})
} else {
push(packages_to_verify, locator)
}
}
log.console("Verifying " + text(length(packages_to_verify)) + " package(s)...")
log.console("")
// Run verification
check_link_cycles()
check_dangling_links()
arrfor(packages_to_verify, function(p) {
if (p == 'core') return
verify_package(p)
})
// Print results
if (length(warnings) > 0) {
log.console("Warnings:")
arrfor(warnings, function(w) {
log.console(" " + w)
})
log.console("")
}
if (length(errors) > 0) {
log.console("Errors:")
arrfor(errors, function(e) {
log.console(" " + e)
})
log.console("")
log.console("Verification FAILED: " + text(length(errors)) + " error(s), " + text(length(warnings)) + " warning(s)")
// Note: would use process.exit(1) if available
} else {
log.console("Verification PASSED: " + text(checked) + " package(s) checked, " + text(length(warnings)) + " warning(s)")
}
$stop()

16
why.ce
View File

@@ -1,7 +1,7 @@
var shop = use('internal/shop')
var pkg = use('package')
if (!args || length(args) < 1) {
if (!args || args.length < 1) {
log.console("Usage: cell why <package>")
$stop()
return
@@ -11,7 +11,7 @@ var target = args[0]
log.console("Searching for '" + target + "'...")
var target_clean = target
if (starts_with(target_clean, '/')) target_clean = text(target_clean, 1)
if (target_clean.startsWith('/')) target_clean = target_clean.substring(1)
var found = false
@@ -23,9 +23,11 @@ function search(current_pkg, stack) {
var deps = pkg.dependencies(current_pkg)
// Sort for consistent output
var aliases = sort(array(deps))
var aliases = []
for (var k in deps) aliases.push(k)
aliases.sort()
for (var i = 0; i < length(aliases); i++) {
for (var i = 0; i < aliases.length; i++) {
var alias = aliases[i]
var locator = deps[alias]
var parsed = shop.parse_package(locator)
@@ -34,7 +36,7 @@ function search(current_pkg, stack) {
var canon = parsed.path
var locator_clean = locator
if (search(locator, '@') != null) locator_clean = array(locator, '@')[0]
if (locator.includes('@')) locator_clean = locator.split('@')[0]
// Check if match
// 1. Alias matches
@@ -60,7 +62,7 @@ function search(current_pkg, stack) {
// Recurse if not seen in current stack (cycle detection)
var cycle = false
for (var j = 0; j < length(stack); j++) {
for (var j = 0; j < stack.length; j++) {
if (stack[j].pkg == canon) {
cycle = true
break
@@ -78,7 +80,7 @@ function print_stack(stack) {
var output = "project"
log.console(output)
for (var i = 0; i < length(stack); i++) {
for (var i = 0; i < stack.length; i++) {
var node = stack[i]
var indent = ""
for (var j = 0; j <= i; j++) indent += " "

View File

@@ -18,14 +18,14 @@ JSC_CCALL(wildstar_match,
static const JSCFunctionListEntry js_wildstar_funcs[] = {
MIST_FUNC_DEF(wildstar, match, 3),
JS_PROP_INT32_DEF("WM_MATCH", WM_MATCH, 0),
JS_PROP_INT32_DEF("WM_NOMATCH", WM_NOMATCH, 0),
JS_PROP_INT32_DEF("WM_NOESCAPE", WM_NOESCAPE, 0),
JS_PROP_INT32_DEF("WM_PATHNAME", WM_PATHNAME, 0),
JS_PROP_INT32_DEF("WM_PERIOD", WM_PERIOD, 0),
JS_PROP_INT32_DEF("WM_LEADING_DIR", WM_LEADING_DIR, 0),
JS_PROP_INT32_DEF("WM_CASEFOLD", WM_CASEFOLD, 0),
JS_PROP_INT32_DEF("WM_WILDSTAR", WM_WILDSTAR, 0),
JS_PROP_INT32_DEF("WM_MATCH", WM_MATCH, JS_PROP_CONFIGURABLE),
JS_PROP_INT32_DEF("WM_NOMATCH", WM_NOMATCH, JS_PROP_CONFIGURABLE),
JS_PROP_INT32_DEF("WM_NOESCAPE", WM_NOESCAPE, JS_PROP_CONFIGURABLE),
JS_PROP_INT32_DEF("WM_PATHNAME", WM_PATHNAME, JS_PROP_CONFIGURABLE),
JS_PROP_INT32_DEF("WM_PERIOD", WM_PERIOD, JS_PROP_CONFIGURABLE),
JS_PROP_INT32_DEF("WM_LEADING_DIR", WM_LEADING_DIR, JS_PROP_CONFIGURABLE),
JS_PROP_INT32_DEF("WM_CASEFOLD", WM_CASEFOLD, JS_PROP_CONFIGURABLE),
JS_PROP_INT32_DEF("WM_WILDSTAR", WM_WILDSTAR, JS_PROP_CONFIGURABLE),
};
JSValue js_wildstar_use(JSContext *js) {