Compare commits
96 Commits
mqbe
...
mcode_stre
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3795533554 | ||
|
|
0acaabd5fa | ||
|
|
f7e2ff13b5 | ||
|
|
36fd0a35f9 | ||
|
|
77c02bf9bf | ||
|
|
f251691146 | ||
|
|
e9ea6ec299 | ||
|
|
bf5fdbc688 | ||
|
|
b960d03eeb | ||
|
|
b4d42fb83d | ||
|
|
0a680a0cd3 | ||
|
|
9f0fd84f4f | ||
|
|
cb9d6e0c0e | ||
|
|
4f18a0b524 | ||
|
|
f296a0c10d | ||
|
|
1df6553577 | ||
|
|
30a9cfee79 | ||
|
|
6fff96d9d9 | ||
|
|
4a50d0587d | ||
|
|
e346348eb5 | ||
|
|
ff560973f3 | ||
|
|
de4b3079d4 | ||
|
|
29227e655b | ||
|
|
588e88373e | ||
|
|
9aca365771 | ||
|
|
c56d4d5c3c | ||
|
|
c1e101b24f | ||
|
|
9f0dfbc6a2 | ||
|
|
5c9403a43b | ||
|
|
89e34ba71d | ||
|
|
73bfa8d7b1 | ||
|
|
4aedb8b0c5 | ||
|
|
ec072f3b63 | ||
|
|
65755d9c0c | ||
|
|
19524b3a53 | ||
|
|
f901332c5b | ||
|
|
add136c140 | ||
|
|
c1a99dfd4c | ||
|
|
7b46c6e947 | ||
|
|
1efb0b1bc9 | ||
|
|
0ba2783b48 | ||
|
|
6de542f0d0 | ||
|
|
6ba4727119 | ||
|
|
900db912a5 | ||
|
|
b771b2b5d8 | ||
|
|
68fb440502 | ||
|
|
e7a2f16004 | ||
|
|
3a8a17ab60 | ||
|
|
8a84be65e1 | ||
|
|
c1910ee1db | ||
|
|
7036cdf2d1 | ||
|
|
fbeec17ce5 | ||
|
|
2c55ae8cb2 | ||
|
|
259bc139fc | ||
|
|
a252412eca | ||
|
|
b327e16463 | ||
|
|
da6f096a56 | ||
|
|
1320ef9f47 | ||
|
|
ed4a5474d5 | ||
|
|
f52dd80d52 | ||
|
|
504e268b9d | ||
|
|
0d47002167 | ||
|
|
b65db63447 | ||
|
|
c1ccff5437 | ||
|
|
2f681fa366 | ||
|
|
682b1cf9cf | ||
|
|
ddf3fc1c77 | ||
|
|
f1a5072ff2 | ||
|
|
f44fb502be | ||
|
|
d75ce916d7 | ||
|
|
fe5dc6ecc9 | ||
|
|
54673e4a04 | ||
|
|
0d8b5cfb04 | ||
|
|
3d71f4a363 | ||
|
|
4deb0e2577 | ||
|
|
67b96e1627 | ||
|
|
4e5f1d8faa | ||
|
|
bd577712d9 | ||
|
|
6df3b741cf | ||
|
|
178837b88d | ||
|
|
120ce9d30c | ||
|
|
58f185b379 | ||
|
|
f7b5252044 | ||
|
|
ded5f7d74b | ||
|
|
fe6033d6cb | ||
|
|
60e61eef76 | ||
|
|
ad863fb89b | ||
|
|
96f8157039 | ||
|
|
c4ff0bc109 | ||
|
|
877250b1d8 | ||
|
|
747227de40 | ||
|
|
3f7e34cd7a | ||
|
|
cef5c50169 | ||
|
|
0428424ec7 | ||
|
|
78e64c5067 | ||
|
|
ff11c49c39 |
1
.gitattributes
vendored
Normal file
1
.gitattributes
vendored
Normal file
@@ -0,0 +1 @@
|
||||
*.mach binary merge=ours
|
||||
13
CLAUDE.md
13
CLAUDE.md
@@ -113,6 +113,19 @@ var v = a[] // pop: v is 3, a is [1, 2]
|
||||
- `packages/` — core packages
|
||||
- `Makefile` — build system (`make` to rebuild, `make bootstrap` for first build)
|
||||
|
||||
## Testing
|
||||
|
||||
After any C runtime changes, run all three test suites before considering the work done:
|
||||
|
||||
```
|
||||
make # rebuild
|
||||
./cell --dev vm_suite # VM-level tests (641 tests)
|
||||
./cell --dev test suite # language-level tests (493 tests)
|
||||
./cell --dev fuzz # fuzzer (100 iterations)
|
||||
```
|
||||
|
||||
All three must pass with 0 failures.
|
||||
|
||||
## Documentation
|
||||
|
||||
The `docs/` folder is the single source of truth. The website at `website/` mounts it via Hugo. Key files:
|
||||
|
||||
24
Makefile
24
Makefile
@@ -5,10 +5,16 @@
|
||||
# or manually build with meson once.
|
||||
#
|
||||
# The cell shop is at ~/.cell and core scripts are installed to ~/.cell/core
|
||||
#
|
||||
# See BUILDING.md for details on the bootstrap process and .mach files.
|
||||
|
||||
CELL_SHOP = $(HOME)/.cell
|
||||
CELL_CORE_PACKAGE = $(CELL_SHOP)/packages/core
|
||||
|
||||
# .cm sources that compile to .mach bytecode
|
||||
MACH_SOURCES = tokenize.cm parse.cm fold.cm mcode.cm \
|
||||
internal/bootstrap.cm internal/engine.cm
|
||||
|
||||
maker: install
|
||||
|
||||
makecell:
|
||||
@@ -16,7 +22,7 @@ makecell:
|
||||
cp cell /opt/homebrew/bin/
|
||||
|
||||
# Install core: symlink this directory to ~/.cell/core
|
||||
install: bootstrap $(CELL_SHOP)
|
||||
install: bootstrap .mach.stamp $(CELL_SHOP)
|
||||
@echo "Linking cell core to $(CELL_CORE_PACKAGE)"
|
||||
rm -rf $(CELL_CORE_PACKAGE)
|
||||
ln -s $(PWD) $(CELL_CORE_PACKAGE)
|
||||
@@ -40,6 +46,16 @@ libcell_runtime.dylib: $(CELL_SHOP)/build/dynamic
|
||||
cell_main: source/main.c libcell_runtime.dylib
|
||||
cc -o cell_main source/main.c -L. -lcell_runtime -Wl,-rpath,@loader_path -Wl,-rpath,/opt/homebrew/lib
|
||||
|
||||
# Regenerate .mach bytecode when any .cm source changes
|
||||
.mach.stamp: $(MACH_SOURCES)
|
||||
./cell --dev regen
|
||||
@touch $@
|
||||
|
||||
# Force-regenerate all .mach bytecode files
|
||||
regen:
|
||||
./cell --core . regen
|
||||
@touch .mach.stamp
|
||||
|
||||
# Create the cell shop directories
|
||||
$(CELL_SHOP):
|
||||
mkdir -p $(CELL_SHOP)
|
||||
@@ -57,7 +73,7 @@ static:
|
||||
|
||||
# Bootstrap: build cell from scratch using meson (only needed once)
|
||||
# Also installs core scripts to ~/.cell/core
|
||||
bootstrap:
|
||||
bootstrap:
|
||||
meson setup build_bootstrap -Dbuildtype=debugoptimized
|
||||
meson compile -C build_bootstrap
|
||||
cp build_bootstrap/cell .
|
||||
@@ -68,7 +84,7 @@ bootstrap:
|
||||
# Clean build artifacts
|
||||
clean:
|
||||
rm -rf $(CELL_SHOP)/build build_bootstrap
|
||||
rm -f cell cell_main libcell_runtime.dylib
|
||||
rm -f cell cell_main libcell_runtime.dylib .mach.stamp
|
||||
|
||||
# Ensure dynamic build directory exists
|
||||
$(CELL_SHOP)/build/dynamic: $(CELL_SHOP)
|
||||
@@ -79,4 +95,4 @@ meson:
|
||||
meson setup build_dbg -Dbuildtype=debugoptimized
|
||||
meson install -C build_dbg
|
||||
|
||||
.PHONY: cell static bootstrap clean meson install
|
||||
.PHONY: cell static bootstrap clean meson install regen
|
||||
|
||||
@@ -319,7 +319,7 @@ JSValue js_reader_list(JSContext *js, JSValue self, int argc, JSValue *argv)
|
||||
JS_FreeValue(js, arr);
|
||||
return filename;
|
||||
}
|
||||
JS_SetPropertyUint32(js, arr, arr_index++, filename);
|
||||
JS_SetPropertyNumber(js, arr, arr_index++, filename);
|
||||
}
|
||||
|
||||
return arr;
|
||||
|
||||
167
bench.ce
167
bench.ce
@@ -8,7 +8,7 @@ var os = use('os')
|
||||
var testlib = use('internal/testlib')
|
||||
var math = use('math/radians')
|
||||
|
||||
if (!args) args = []
|
||||
var _args = args == null ? [] : args
|
||||
|
||||
var target_pkg = null // null = current package
|
||||
var target_bench = null // null = all benchmarks, otherwise specific bench file
|
||||
@@ -55,14 +55,19 @@ function stddev(arr, mean_val) {
|
||||
function percentile(arr, p) {
|
||||
if (length(arr) == 0) return 0
|
||||
var sorted = sort(arr)
|
||||
var idx = floor(arr) * p / 100
|
||||
var idx = floor(length(arr) * p / 100)
|
||||
if (idx >= length(arr)) idx = length(arr) - 1
|
||||
return sorted[idx]
|
||||
}
|
||||
|
||||
// Parse arguments similar to test.ce
|
||||
function parse_args() {
|
||||
if (length(args) == 0) {
|
||||
var name = null
|
||||
var lock = null
|
||||
var resolved = null
|
||||
var bench_path = null
|
||||
|
||||
if (length(_args) == 0) {
|
||||
if (!testlib.is_valid_package('.')) {
|
||||
log.console('No cell.toml found in current directory')
|
||||
return false
|
||||
@@ -71,7 +76,7 @@ function parse_args() {
|
||||
return true
|
||||
}
|
||||
|
||||
if (args[0] == 'all') {
|
||||
if (_args[0] == 'all') {
|
||||
if (!testlib.is_valid_package('.')) {
|
||||
log.console('No cell.toml found in current directory')
|
||||
return false
|
||||
@@ -80,28 +85,28 @@ function parse_args() {
|
||||
return true
|
||||
}
|
||||
|
||||
if (args[0] == 'package') {
|
||||
if (length(args) < 2) {
|
||||
if (_args[0] == 'package') {
|
||||
if (length(_args) < 2) {
|
||||
log.console('Usage: cell bench package <name> [bench]')
|
||||
log.console(' cell bench package all')
|
||||
return false
|
||||
}
|
||||
|
||||
if (args[1] == 'all') {
|
||||
if (_args[1] == 'all') {
|
||||
all_pkgs = true
|
||||
log.console('Benchmarking all packages...')
|
||||
return true
|
||||
}
|
||||
|
||||
var name = args[1]
|
||||
var lock = shop.load_lock()
|
||||
name = _args[1]
|
||||
lock = shop.load_lock()
|
||||
if (lock[name]) {
|
||||
target_pkg = name
|
||||
} else if (starts_with(name, '/') && testlib.is_valid_package(name)) {
|
||||
target_pkg = name
|
||||
} else {
|
||||
if (testlib.is_valid_package('.')) {
|
||||
var resolved = pkg.alias_to_package(null, name)
|
||||
resolved = pkg.alias_to_package(null, name)
|
||||
if (resolved) {
|
||||
target_pkg = resolved
|
||||
} else {
|
||||
@@ -114,8 +119,8 @@ function parse_args() {
|
||||
}
|
||||
}
|
||||
|
||||
if (length(args) >= 3) {
|
||||
target_bench = args[2]
|
||||
if (length(_args) >= 3) {
|
||||
target_bench = _args[2]
|
||||
}
|
||||
|
||||
log.console(`Benchmarking package: ${target_pkg}`)
|
||||
@@ -123,7 +128,7 @@ function parse_args() {
|
||||
}
|
||||
|
||||
// cell bench benches/suite or cell bench <path>
|
||||
var bench_path = args[0]
|
||||
bench_path = _args[0]
|
||||
|
||||
// Normalize path - add benches/ prefix if not present
|
||||
if (!starts_with(bench_path, 'benches/') && !starts_with(bench_path, '/')) {
|
||||
@@ -160,12 +165,15 @@ function collect_benches(package_name, specific_bench) {
|
||||
var files = pkg.list_files(package_name)
|
||||
var bench_files = []
|
||||
arrfor(files, function(f) {
|
||||
var bench_name = null
|
||||
var match_name = null
|
||||
var match_base = null
|
||||
if (starts_with(f, "benches/") && ends_with(f, ".cm")) {
|
||||
if (specific_bench) {
|
||||
var bench_name = text(f, 0, -3)
|
||||
var match_name = specific_bench
|
||||
bench_name = text(f, 0, -3)
|
||||
match_name = specific_bench
|
||||
if (!starts_with(match_name, 'benches/')) match_name = 'benches/' + match_name
|
||||
var match_base = ends_with(match_name, '.cm') ? text(match_name, 0, -3) : match_name
|
||||
match_base = ends_with(match_name, '.cm') ? text(match_name, 0, -3) : match_name
|
||||
if (bench_name != match_base) return
|
||||
}
|
||||
push(bench_files, f)
|
||||
@@ -180,24 +188,25 @@ function calibrate_batch_size(bench_fn, is_batch) {
|
||||
|
||||
var n = MIN_BATCH_SIZE
|
||||
var dt = 0
|
||||
var start = 0
|
||||
var new_n = 0
|
||||
var calc = 0
|
||||
var target_n = 0
|
||||
|
||||
// Find a batch size that takes at least MIN_SAMPLE_NS
|
||||
while (n < MAX_BATCH_SIZE) {
|
||||
// Ensure n is a valid number before calling
|
||||
if (!is_number(n) || n < 1) {
|
||||
n = 1
|
||||
break
|
||||
}
|
||||
|
||||
var start = os.now()
|
||||
start = os.now()
|
||||
bench_fn(n)
|
||||
dt = os.now() - start
|
||||
|
||||
if (dt >= MIN_SAMPLE_NS) break
|
||||
|
||||
// Double the batch size
|
||||
var new_n = n * 2
|
||||
// Check if multiplication produced a valid number
|
||||
new_n = n * 2
|
||||
if (!is_number(new_n) || new_n > MAX_BATCH_SIZE) {
|
||||
n = MAX_BATCH_SIZE
|
||||
break
|
||||
@@ -207,10 +216,9 @@ function calibrate_batch_size(bench_fn, is_batch) {
|
||||
|
||||
// Adjust to target sample duration
|
||||
if (dt > 0 && dt < TARGET_SAMPLE_NS && is_number(n) && is_number(dt)) {
|
||||
var calc = n * TARGET_SAMPLE_NS / dt
|
||||
calc = n * TARGET_SAMPLE_NS / dt
|
||||
if (is_number(calc) && calc > 0) {
|
||||
var target_n = floor(calc)
|
||||
// Check if floor returned a valid number
|
||||
target_n = floor(calc)
|
||||
if (is_number(target_n) && target_n > 0) {
|
||||
if (target_n > MAX_BATCH_SIZE) target_n = MAX_BATCH_SIZE
|
||||
if (target_n < MIN_BATCH_SIZE) target_n = MIN_BATCH_SIZE
|
||||
@@ -219,7 +227,6 @@ function calibrate_batch_size(bench_fn, is_batch) {
|
||||
}
|
||||
}
|
||||
|
||||
// Safety check - ensure we always return a valid batch size
|
||||
if (!is_number(n) || n < 1) {
|
||||
n = 1
|
||||
}
|
||||
@@ -230,72 +237,70 @@ function calibrate_batch_size(bench_fn, is_batch) {
|
||||
// Run a single benchmark function
|
||||
function run_single_bench(bench_fn, bench_name) {
|
||||
var timings_per_op = []
|
||||
|
||||
// Detect benchmark format:
|
||||
// 1. Object with { setup, run, teardown } - structured format
|
||||
// 2. Function that accepts (n) - batch format
|
||||
// 3. Function that accepts () - legacy format
|
||||
var is_structured = is_object(bench_fn) && bench_fn.run
|
||||
var is_batch = false
|
||||
var batch_size = 1
|
||||
var setup_fn = null
|
||||
var run_fn = null
|
||||
var teardown_fn = null
|
||||
var calibrate_fn = null
|
||||
var _detect = null
|
||||
var i = 0
|
||||
var state = null
|
||||
var start = 0
|
||||
var duration = 0
|
||||
var ns_per_op = 0
|
||||
|
||||
if (is_structured) {
|
||||
setup_fn = bench_fn.setup || function() { return null }
|
||||
run_fn = bench_fn.run
|
||||
teardown_fn = bench_fn.teardown || function(state) {}
|
||||
teardown_fn = bench_fn.teardown || function(s) {}
|
||||
|
||||
// Check if run function accepts batch size
|
||||
try {
|
||||
_detect = function() {
|
||||
var test_state = setup_fn()
|
||||
run_fn(1, test_state)
|
||||
is_batch = true
|
||||
if (teardown_fn) teardown_fn(test_state)
|
||||
} catch (e) {
|
||||
} disruption {
|
||||
is_batch = false
|
||||
}
|
||||
_detect()
|
||||
|
||||
// Create wrapper for calibration
|
||||
var calibrate_fn = function(n) {
|
||||
var state = setup_fn()
|
||||
run_fn(n, state)
|
||||
if (teardown_fn) teardown_fn(state)
|
||||
calibrate_fn = function(n) {
|
||||
var s = setup_fn()
|
||||
run_fn(n, s)
|
||||
if (teardown_fn) teardown_fn(s)
|
||||
}
|
||||
batch_size = calibrate_batch_size(calibrate_fn, is_batch)
|
||||
|
||||
// Safety check for structured benchmarks
|
||||
if (!is_number(batch_size) || batch_size < 1) {
|
||||
batch_size = 1
|
||||
}
|
||||
} else {
|
||||
// Simple function format
|
||||
try {
|
||||
_detect = function() {
|
||||
bench_fn(1)
|
||||
is_batch = true
|
||||
} catch (e) {
|
||||
} disruption {
|
||||
is_batch = false
|
||||
}
|
||||
_detect()
|
||||
batch_size = calibrate_batch_size(bench_fn, is_batch)
|
||||
}
|
||||
|
||||
// Safety check - ensure batch_size is valid
|
||||
if (!batch_size || batch_size < 1) {
|
||||
batch_size = 1
|
||||
}
|
||||
|
||||
// Warmup phase
|
||||
for (var i = 0; i < WARMUP_BATCHES; i++) {
|
||||
// Ensure batch_size is valid before warmup
|
||||
for (i = 0; i < WARMUP_BATCHES; i++) {
|
||||
if (!is_number(batch_size) || batch_size < 1) {
|
||||
var type_str = is_null(batch_size) ? 'null' : is_number(batch_size) ? 'number' : is_text(batch_size) ? 'text' : is_object(batch_size) ? 'object' : is_array(batch_size) ? 'array' : is_function(batch_size) ? 'function' : is_logical(batch_size) ? 'logical' : 'unknown'
|
||||
log.console(`WARNING: batch_size became ${type_str} = ${batch_size}, resetting to 1`)
|
||||
batch_size = 1
|
||||
}
|
||||
|
||||
if (is_structured) {
|
||||
var state = setup_fn()
|
||||
state = setup_fn()
|
||||
if (is_batch) {
|
||||
run_fn(batch_size, state)
|
||||
} else {
|
||||
@@ -312,35 +317,34 @@ function run_single_bench(bench_fn, bench_name) {
|
||||
}
|
||||
|
||||
// Measurement phase - collect SAMPLES timing samples
|
||||
for (var i = 0; i < SAMPLES; i++) {
|
||||
// Double-check batch_size is valid (should never happen, but defensive)
|
||||
for (i = 0; i < SAMPLES; i++) {
|
||||
if (!is_number(batch_size) || batch_size < 1) {
|
||||
batch_size = 1
|
||||
}
|
||||
|
||||
if (is_structured) {
|
||||
var state = setup_fn()
|
||||
var start = os.now()
|
||||
state = setup_fn()
|
||||
start = os.now()
|
||||
if (is_batch) {
|
||||
run_fn(batch_size, state)
|
||||
} else {
|
||||
run_fn(state)
|
||||
}
|
||||
var duration = os.now() - start
|
||||
duration = os.now() - start
|
||||
if (teardown_fn) teardown_fn(state)
|
||||
|
||||
var ns_per_op = is_batch ? duration / batch_size : duration
|
||||
ns_per_op = is_batch ? duration / batch_size : duration
|
||||
push(timings_per_op, ns_per_op)
|
||||
} else {
|
||||
var start = os.now()
|
||||
start = os.now()
|
||||
if (is_batch) {
|
||||
bench_fn(batch_size)
|
||||
} else {
|
||||
bench_fn()
|
||||
}
|
||||
var duration = os.now() - start
|
||||
duration = os.now() - start
|
||||
|
||||
var ns_per_op = is_batch ? duration / batch_size : duration
|
||||
ns_per_op = is_batch ? duration / batch_size : duration
|
||||
push(timings_per_op, ns_per_op)
|
||||
}
|
||||
}
|
||||
@@ -354,7 +358,6 @@ function run_single_bench(bench_fn, bench_name) {
|
||||
var p95_ns = percentile(timings_per_op, 95)
|
||||
var p99_ns = percentile(timings_per_op, 99)
|
||||
|
||||
// Calculate ops/s from median
|
||||
var ops_per_sec = 0
|
||||
if (median_ns > 0) {
|
||||
ops_per_sec = floor(1000000000 / median_ns)
|
||||
@@ -408,18 +411,21 @@ function run_benchmarks(package_name, specific_bench) {
|
||||
|
||||
arrfor(bench_files, function(f) {
|
||||
var mod_path = text(f, 0, -3)
|
||||
var load_error = false
|
||||
var bench_mod = null
|
||||
var use_pkg = null
|
||||
var benches = []
|
||||
var error_result = null
|
||||
|
||||
var file_result = {
|
||||
name: f,
|
||||
benchmarks: []
|
||||
}
|
||||
|
||||
try {
|
||||
var bench_mod
|
||||
var use_pkg = package_name ? package_name : fd.realpath('.')
|
||||
var _load_file = function() {
|
||||
use_pkg = package_name ? package_name : fd.realpath('.')
|
||||
bench_mod = shop.use(mod_path, use_pkg)
|
||||
|
||||
var benches = []
|
||||
if (is_function(bench_mod)) {
|
||||
push(benches, {name: 'main', fn: bench_mod})
|
||||
} else if (is_object(bench_mod)) {
|
||||
@@ -432,8 +438,11 @@ function run_benchmarks(package_name, specific_bench) {
|
||||
if (length(benches) > 0) {
|
||||
log.console(` ${f}`)
|
||||
arrfor(benches, function(b) {
|
||||
try {
|
||||
var result = run_single_bench(b.fn, b.name)
|
||||
var bench_error = false
|
||||
var result = null
|
||||
|
||||
var _run_bench = function() {
|
||||
result = run_single_bench(b.fn, b.name)
|
||||
result.package = pkg_result.package
|
||||
push(file_result.benchmarks, result)
|
||||
pkg_result.total++
|
||||
@@ -444,25 +453,32 @@ function run_benchmarks(package_name, specific_bench) {
|
||||
if (result.batch_size > 1) {
|
||||
log.console(` batch: ${result.batch_size} samples: ${result.samples}`)
|
||||
}
|
||||
} catch (e) {
|
||||
log.console(` ERROR ${b.name}: ${e}`)
|
||||
log.error(e)
|
||||
var error_result = {
|
||||
} disruption {
|
||||
bench_error = true
|
||||
}
|
||||
_run_bench()
|
||||
if (bench_error) {
|
||||
log.console(` ERROR ${b.name}`)
|
||||
error_result = {
|
||||
package: pkg_result.package,
|
||||
name: b.name,
|
||||
error: e.toString()
|
||||
error: "benchmark disrupted"
|
||||
}
|
||||
push(file_result.benchmarks, error_result)
|
||||
pkg_result.total++
|
||||
}
|
||||
})
|
||||
}
|
||||
} catch (e) {
|
||||
log.console(` Error loading ${f}: ${e}`)
|
||||
var error_result = {
|
||||
} disruption {
|
||||
load_error = true
|
||||
}
|
||||
_load_file()
|
||||
if (load_error) {
|
||||
log.console(` Error loading ${f}`)
|
||||
error_result = {
|
||||
package: pkg_result.package,
|
||||
name: "load_module",
|
||||
error: `Error loading module: ${e}`
|
||||
error: "error loading module"
|
||||
}
|
||||
push(file_result.benchmarks, error_result)
|
||||
pkg_result.total++
|
||||
@@ -478,15 +494,16 @@ function run_benchmarks(package_name, specific_bench) {
|
||||
|
||||
// Run all benchmarks
|
||||
var all_results = []
|
||||
var packages = null
|
||||
|
||||
if (all_pkgs) {
|
||||
if (testlib.is_valid_package('.')) {
|
||||
push(all_results, run_benchmarks(null, null))
|
||||
}
|
||||
|
||||
var packages = shop.list_packages()
|
||||
arrfor(packages, function(pkg) {
|
||||
push(all_results, run_benchmarks(pkg, null))
|
||||
packages = shop.list_packages()
|
||||
arrfor(packages, function(p) {
|
||||
push(all_results, run_benchmarks(p, null))
|
||||
})
|
||||
} else {
|
||||
push(all_results, run_benchmarks(target_pkg, target_bench))
|
||||
|
||||
232
benches/actor_patterns.cm
Normal file
232
benches/actor_patterns.cm
Normal file
@@ -0,0 +1,232 @@
|
||||
// actor_patterns.cm — Actor concurrency benchmarks
|
||||
// Message passing, fan-out/fan-in, mailbox throughput.
|
||||
// These use structured benchmarks with setup/run/teardown.
|
||||
|
||||
// Note: actor benchmarks are measured differently from pure compute.
|
||||
// Each iteration sends messages and waits for results, so they're
|
||||
// inherently slower but test real concurrency costs.
|
||||
|
||||
// Simple ping-pong: two actors sending messages back and forth
|
||||
// Since we can't create real actors from a module, we simulate
|
||||
// the message-passing patterns with function call overhead that
|
||||
// mirrors what the actor dispatch does.
|
||||
|
||||
// Simulate message dispatch overhead
|
||||
function make_mailbox() {
|
||||
return {
|
||||
queue: [],
|
||||
delivered: 0
|
||||
}
|
||||
}
|
||||
|
||||
function send(mailbox, msg) {
|
||||
push(mailbox.queue, msg)
|
||||
return null
|
||||
}
|
||||
|
||||
function receive(mailbox) {
|
||||
if (length(mailbox.queue) == 0) return null
|
||||
mailbox.delivered++
|
||||
return pop(mailbox.queue)
|
||||
}
|
||||
|
||||
function drain(mailbox) {
|
||||
var count = 0
|
||||
while (length(mailbox.queue) > 0) {
|
||||
pop(mailbox.queue)
|
||||
count++
|
||||
}
|
||||
return count
|
||||
}
|
||||
|
||||
// Ping-pong: simulate two actors exchanging messages
|
||||
function ping_pong(rounds) {
|
||||
var box_a = make_mailbox()
|
||||
var box_b = make_mailbox()
|
||||
var i = 0
|
||||
var msg = null
|
||||
|
||||
send(box_a, {type: "ping", val: 0})
|
||||
|
||||
for (i = 0; i < rounds; i++) {
|
||||
// A receives and sends to B
|
||||
msg = receive(box_a)
|
||||
if (msg) {
|
||||
send(box_b, {type: "pong", val: msg.val + 1})
|
||||
}
|
||||
// B receives and sends to A
|
||||
msg = receive(box_b)
|
||||
if (msg) {
|
||||
send(box_a, {type: "ping", val: msg.val + 1})
|
||||
}
|
||||
}
|
||||
|
||||
return box_a.delivered + box_b.delivered
|
||||
}
|
||||
|
||||
// Fan-out: one sender, N receivers
|
||||
function fan_out(n_receivers, messages_per) {
|
||||
var receivers = []
|
||||
var i = 0
|
||||
var j = 0
|
||||
for (i = 0; i < n_receivers; i++) {
|
||||
push(receivers, make_mailbox())
|
||||
}
|
||||
|
||||
// Send messages to all receivers
|
||||
for (j = 0; j < messages_per; j++) {
|
||||
for (i = 0; i < n_receivers; i++) {
|
||||
send(receivers[i], {seq: j, data: j * 17})
|
||||
}
|
||||
}
|
||||
|
||||
// All receivers drain
|
||||
var total = 0
|
||||
for (i = 0; i < n_receivers; i++) {
|
||||
total += drain(receivers[i])
|
||||
}
|
||||
|
||||
return total
|
||||
}
|
||||
|
||||
// Fan-in: N senders, one receiver
|
||||
function fan_in(n_senders, messages_per) {
|
||||
var inbox = make_mailbox()
|
||||
var i = 0
|
||||
var j = 0
|
||||
|
||||
// Each sender sends messages
|
||||
for (i = 0; i < n_senders; i++) {
|
||||
for (j = 0; j < messages_per; j++) {
|
||||
send(inbox, {sender: i, seq: j, data: i * 100 + j})
|
||||
}
|
||||
}
|
||||
|
||||
// Receiver processes all
|
||||
var total = 0
|
||||
var msg = null
|
||||
msg = receive(inbox)
|
||||
while (msg) {
|
||||
total += msg.data
|
||||
msg = receive(inbox)
|
||||
}
|
||||
|
||||
return total
|
||||
}
|
||||
|
||||
// Pipeline: chain of processors
|
||||
function pipeline(stages, items) {
|
||||
var boxes = []
|
||||
var i = 0
|
||||
var j = 0
|
||||
var msg = null
|
||||
|
||||
for (i = 0; i <= stages; i++) {
|
||||
push(boxes, make_mailbox())
|
||||
}
|
||||
|
||||
// Feed input
|
||||
for (i = 0; i < items; i++) {
|
||||
send(boxes[0], {val: i})
|
||||
}
|
||||
|
||||
// Process each stage
|
||||
for (j = 0; j < stages; j++) {
|
||||
msg = receive(boxes[j])
|
||||
while (msg) {
|
||||
send(boxes[j + 1], {val: msg.val * 2 + 1})
|
||||
msg = receive(boxes[j])
|
||||
}
|
||||
}
|
||||
|
||||
// Drain output
|
||||
var total = 0
|
||||
msg = receive(boxes[stages])
|
||||
while (msg) {
|
||||
total += msg.val
|
||||
msg = receive(boxes[stages])
|
||||
}
|
||||
|
||||
return total
|
||||
}
|
||||
|
||||
// Request-response pattern (simulate RPC)
|
||||
function request_response(n_requests) {
|
||||
var client_box = make_mailbox()
|
||||
var server_box = make_mailbox()
|
||||
var i = 0
|
||||
var req = null
|
||||
var resp = null
|
||||
var total = 0
|
||||
|
||||
for (i = 0; i < n_requests; i++) {
|
||||
// Client sends request
|
||||
send(server_box, {id: i, payload: i * 3, reply_to: client_box})
|
||||
|
||||
// Server processes
|
||||
req = receive(server_box)
|
||||
if (req) {
|
||||
send(req.reply_to, {id: req.id, result: req.payload * 2 + 1})
|
||||
}
|
||||
|
||||
// Client receives response
|
||||
resp = receive(client_box)
|
||||
if (resp) {
|
||||
total += resp.result
|
||||
}
|
||||
}
|
||||
|
||||
return total
|
||||
}
|
||||
|
||||
return {
|
||||
// Ping-pong: 10K rounds
|
||||
ping_pong_10k: function(n) {
|
||||
var i = 0
|
||||
var x = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
x += ping_pong(10000)
|
||||
}
|
||||
return x
|
||||
},
|
||||
|
||||
// Fan-out: 100 receivers, 100 messages each
|
||||
fan_out_100x100: function(n) {
|
||||
var i = 0
|
||||
var x = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
x += fan_out(100, 100)
|
||||
}
|
||||
return x
|
||||
},
|
||||
|
||||
// Fan-in: 100 senders, 100 messages each
|
||||
fan_in_100x100: function(n) {
|
||||
var i = 0
|
||||
var x = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
x += fan_in(100, 100)
|
||||
}
|
||||
return x
|
||||
},
|
||||
|
||||
// Pipeline: 10 stages, 1000 items
|
||||
pipeline_10x1k: function(n) {
|
||||
var i = 0
|
||||
var x = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
x += pipeline(10, 1000)
|
||||
}
|
||||
return x
|
||||
},
|
||||
|
||||
// Request-response: 5K requests
|
||||
rpc_5k: function(n) {
|
||||
var i = 0
|
||||
var x = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
x += request_response(5000)
|
||||
}
|
||||
return x
|
||||
}
|
||||
}
|
||||
141
benches/cli_tool.cm
Normal file
141
benches/cli_tool.cm
Normal file
@@ -0,0 +1,141 @@
|
||||
// cli_tool.cm — CLI tool simulation (macro benchmark)
|
||||
// Parse args + process data + transform + format output.
|
||||
// Simulates a realistic small utility program.
|
||||
|
||||
var json = use('json')
|
||||
|
||||
// Generate fake records
|
||||
function generate_records(n) {
|
||||
var records = []
|
||||
var x = 42
|
||||
var i = 0
|
||||
var status_vals = ["active", "inactive", "pending", "archived"]
|
||||
var dept_vals = ["eng", "sales", "ops", "hr", "marketing"]
|
||||
for (i = 0; i < n; i++) {
|
||||
x = ((x * 1103515245 + 12345) & 0x7FFFFFFF) | 0
|
||||
push(records, {
|
||||
id: i + 1,
|
||||
name: `user_${i}`,
|
||||
score: (x % 1000) / 10,
|
||||
status: status_vals[i % 4],
|
||||
department: dept_vals[i % 5]
|
||||
})
|
||||
}
|
||||
return records
|
||||
}
|
||||
|
||||
// Filter records by field value
|
||||
function filter_records(records, field, value) {
|
||||
var result = []
|
||||
var i = 0
|
||||
for (i = 0; i < length(records); i++) {
|
||||
if (records[i][field] == value) {
|
||||
push(result, records[i])
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// Group by a field
|
||||
function group_by(records, field) {
|
||||
var groups = {}
|
||||
var i = 0
|
||||
var key = null
|
||||
for (i = 0; i < length(records); i++) {
|
||||
key = records[i][field]
|
||||
if (!key) key = "unknown"
|
||||
if (!groups[key]) groups[key] = []
|
||||
push(groups[key], records[i])
|
||||
}
|
||||
return groups
|
||||
}
|
||||
|
||||
// Aggregate: compute stats per group
|
||||
function aggregate(groups) {
|
||||
var keys = array(groups)
|
||||
var result = []
|
||||
var i = 0
|
||||
var j = 0
|
||||
var grp = null
|
||||
var total = 0
|
||||
var mn = 0
|
||||
var mx = 0
|
||||
for (i = 0; i < length(keys); i++) {
|
||||
grp = groups[keys[i]]
|
||||
total = 0
|
||||
mn = 999999
|
||||
mx = 0
|
||||
for (j = 0; j < length(grp); j++) {
|
||||
total += grp[j].score
|
||||
if (grp[j].score < mn) mn = grp[j].score
|
||||
if (grp[j].score > mx) mx = grp[j].score
|
||||
}
|
||||
push(result, {
|
||||
group: keys[i],
|
||||
count: length(grp),
|
||||
average: total / length(grp),
|
||||
low: mn,
|
||||
high: mx
|
||||
})
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// Full pipeline: load → filter → sort → group → aggregate → encode
|
||||
function run_pipeline(n_records) {
|
||||
// Generate data
|
||||
var records = generate_records(n_records)
|
||||
|
||||
// Filter to active records
|
||||
var filtered = filter_records(records, "status", "active")
|
||||
|
||||
// Sort by score
|
||||
filtered = sort(filtered, "score")
|
||||
|
||||
// Limit to first 50
|
||||
if (length(filtered) > 50) {
|
||||
filtered = array(filtered, 0, 50)
|
||||
}
|
||||
|
||||
// Group and aggregate
|
||||
var groups = group_by(filtered, "department")
|
||||
var stats = aggregate(groups)
|
||||
stats = sort(stats, "average")
|
||||
|
||||
// Encode as JSON
|
||||
var output = json.encode(stats)
|
||||
|
||||
return length(output)
|
||||
}
|
||||
|
||||
return {
|
||||
// Small dataset (100 records)
|
||||
cli_pipeline_100: function(n) {
|
||||
var i = 0
|
||||
var x = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
x += run_pipeline(100)
|
||||
}
|
||||
return x
|
||||
},
|
||||
|
||||
// Medium dataset (1000 records)
|
||||
cli_pipeline_1k: function(n) {
|
||||
var i = 0
|
||||
var x = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
x += run_pipeline(1000)
|
||||
}
|
||||
return x
|
||||
},
|
||||
|
||||
// Large dataset (10K records)
|
||||
cli_pipeline_10k: function(n) {
|
||||
var i = 0
|
||||
var x = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
x += run_pipeline(10000)
|
||||
}
|
||||
return x
|
||||
}
|
||||
}
|
||||
162
benches/deltablue.cm
Normal file
162
benches/deltablue.cm
Normal file
@@ -0,0 +1,162 @@
|
||||
// deltablue.cm — Constraint solver kernel (DeltaBlue-inspired)
|
||||
// Dynamic dispatch, pointer chasing, object-heavy workload.
|
||||
|
||||
def REQUIRED = 0
|
||||
def STRONG = 1
|
||||
def NORMAL = 2
|
||||
def WEAK = 3
|
||||
def WEAKEST = 4
|
||||
|
||||
function make_variable(name, value) {
|
||||
return {
|
||||
name: name,
|
||||
value: value,
|
||||
constraints: [],
|
||||
determined_by: null,
|
||||
stay: true,
|
||||
mark: 0
|
||||
}
|
||||
}
|
||||
|
||||
function make_constraint(strength, variables, satisfy_fn) {
|
||||
return {
|
||||
strength: strength,
|
||||
variables: variables,
|
||||
satisfy: satisfy_fn,
|
||||
output: null
|
||||
}
|
||||
}
|
||||
|
||||
// Constraint propagation: simple forward solver
|
||||
function propagate(vars, constraints) {
|
||||
var changed = true
|
||||
var passes = 0
|
||||
var max_passes = length(constraints) * 3
|
||||
var i = 0
|
||||
var c = null
|
||||
var old_val = 0
|
||||
|
||||
while (changed && passes < max_passes) {
|
||||
changed = false
|
||||
passes++
|
||||
for (i = 0; i < length(constraints); i++) {
|
||||
c = constraints[i]
|
||||
old_val = c.output ? c.output.value : null
|
||||
c.satisfy(c)
|
||||
if (c.output && c.output.value != old_val) {
|
||||
changed = true
|
||||
}
|
||||
}
|
||||
}
|
||||
return passes
|
||||
}
|
||||
|
||||
// Build a chain of equality constraints: v[i] = v[i-1] + 1
|
||||
function build_chain(n) {
|
||||
var vars = []
|
||||
var constraints = []
|
||||
var i = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
push(vars, make_variable(`v${i}`, 0))
|
||||
}
|
||||
|
||||
// Set first variable
|
||||
vars[0].value = 1
|
||||
|
||||
var c = null
|
||||
for (i = 1; i < n; i++) {
|
||||
c = make_constraint(NORMAL, [vars[i - 1], vars[i]], function(self) {
|
||||
self.variables[1].value = self.variables[0].value + 1
|
||||
self.output = self.variables[1]
|
||||
})
|
||||
push(constraints, c)
|
||||
push(vars[i].constraints, c)
|
||||
}
|
||||
|
||||
return {vars: vars, constraints: constraints}
|
||||
}
|
||||
|
||||
// Build a projection: pairs of variables with scaling constraints
|
||||
function build_projection(n) {
|
||||
var src = []
|
||||
var dst = []
|
||||
var constraints = []
|
||||
var i = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
push(src, make_variable(`src${i}`, i * 10))
|
||||
push(dst, make_variable(`dst${i}`, 0))
|
||||
}
|
||||
|
||||
var scale_c = null
|
||||
for (i = 0; i < n; i++) {
|
||||
scale_c = make_constraint(STRONG, [src[i], dst[i]], function(self) {
|
||||
self.variables[1].value = self.variables[0].value * 2 + 1
|
||||
self.output = self.variables[1]
|
||||
})
|
||||
push(constraints, scale_c)
|
||||
push(dst[i].constraints, scale_c)
|
||||
}
|
||||
|
||||
return {src: src, dst: dst, constraints: constraints}
|
||||
}
|
||||
|
||||
// Edit constraint: change a source, re-propagate
|
||||
function run_edits(system, edits) {
|
||||
var i = 0
|
||||
var total_passes = 0
|
||||
for (i = 0; i < edits; i++) {
|
||||
system.vars[0].value = i
|
||||
total_passes += propagate(system.vars, system.constraints)
|
||||
}
|
||||
return total_passes
|
||||
}
|
||||
|
||||
return {
|
||||
// Chain of 100 variables, propagate
|
||||
chain_100: function(n) {
|
||||
var i = 0
|
||||
var chain = null
|
||||
var x = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
chain = build_chain(100)
|
||||
x += propagate(chain.vars, chain.constraints)
|
||||
}
|
||||
return x
|
||||
},
|
||||
|
||||
// Chain of 500 variables, propagate
|
||||
chain_500: function(n) {
|
||||
var i = 0
|
||||
var chain = null
|
||||
var x = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
chain = build_chain(500)
|
||||
x += propagate(chain.vars, chain.constraints)
|
||||
}
|
||||
return x
|
||||
},
|
||||
|
||||
// Projection of 100 pairs
|
||||
projection_100: function(n) {
|
||||
var i = 0
|
||||
var proj = null
|
||||
var x = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
proj = build_projection(100)
|
||||
x += propagate(proj.src, proj.constraints)
|
||||
}
|
||||
return x
|
||||
},
|
||||
|
||||
// Edit and re-propagate (incremental update)
|
||||
chain_edit_100: function(n) {
|
||||
var chain = build_chain(100)
|
||||
var i = 0
|
||||
var x = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
chain.vars[0].value = i
|
||||
x += propagate(chain.vars, chain.constraints)
|
||||
}
|
||||
return x
|
||||
}
|
||||
}
|
||||
126
benches/fibonacci.cm
Normal file
126
benches/fibonacci.cm
Normal file
@@ -0,0 +1,126 @@
|
||||
// fibonacci.cm — Fibonacci variants kernel
|
||||
// Tests recursion overhead, memoization patterns, iteration vs recursion.
|
||||
|
||||
// Naive recursive (exponential) — measures call overhead
|
||||
function fib_naive(n) {
|
||||
if (n <= 1) return n
|
||||
return fib_naive(n - 1) + fib_naive(n - 2)
|
||||
}
|
||||
|
||||
// Iterative (linear)
|
||||
function fib_iter(n) {
|
||||
var a = 0
|
||||
var b = 1
|
||||
var i = 0
|
||||
var tmp = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
tmp = a + b
|
||||
a = b
|
||||
b = tmp
|
||||
}
|
||||
return a
|
||||
}
|
||||
|
||||
// Memoized recursive (tests object property lookup + recursion)
|
||||
function make_memo_fib() {
|
||||
var cache = {}
|
||||
var fib = function(n) {
|
||||
var key = text(n)
|
||||
if (cache[key]) return cache[key]
|
||||
var result = null
|
||||
if (n <= 1) {
|
||||
result = n
|
||||
} else {
|
||||
result = fib(n - 1) + fib(n - 2)
|
||||
}
|
||||
cache[key] = result
|
||||
return result
|
||||
}
|
||||
return fib
|
||||
}
|
||||
|
||||
// CPS (continuation passing style) — tests closure creation
|
||||
function fib_cps(n, cont) {
|
||||
if (n <= 1) return cont(n)
|
||||
return fib_cps(n - 1, function(a) {
|
||||
return fib_cps(n - 2, function(b) {
|
||||
return cont(a + b)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
// Matrix exponentiation style (accumulator)
|
||||
function fib_matrix(n) {
|
||||
var a = 1
|
||||
var b = 0
|
||||
var c = 0
|
||||
var d = 1
|
||||
var ta = 0
|
||||
var tb = 0
|
||||
var m = n
|
||||
while (m > 0) {
|
||||
if (m % 2 == 1) {
|
||||
ta = a * d + b * c // wrong but stresses numeric ops
|
||||
tb = b * d + a * c
|
||||
a = ta
|
||||
b = tb
|
||||
}
|
||||
ta = c * c + d * d
|
||||
tb = d * (2 * c + d)
|
||||
c = ta
|
||||
d = tb
|
||||
m = floor(m / 2)
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
return {
|
||||
fib_naive_25: function(n) {
|
||||
var i = 0
|
||||
var x = 0
|
||||
for (i = 0; i < n; i++) x += fib_naive(25)
|
||||
return x
|
||||
},
|
||||
|
||||
fib_naive_30: function(n) {
|
||||
var i = 0
|
||||
var x = 0
|
||||
for (i = 0; i < n; i++) x += fib_naive(30)
|
||||
return x
|
||||
},
|
||||
|
||||
fib_iter_80: function(n) {
|
||||
var i = 0
|
||||
var x = 0
|
||||
for (i = 0; i < n; i++) x += fib_iter(80)
|
||||
return x
|
||||
},
|
||||
|
||||
fib_memo_100: function(n) {
|
||||
var i = 0
|
||||
var x = 0
|
||||
var fib = null
|
||||
for (i = 0; i < n; i++) {
|
||||
fib = make_memo_fib()
|
||||
x += fib(100)
|
||||
}
|
||||
return x
|
||||
},
|
||||
|
||||
fib_cps_20: function(n) {
|
||||
var i = 0
|
||||
var x = 0
|
||||
var identity = function(v) { return v }
|
||||
for (i = 0; i < n; i++) {
|
||||
x += fib_cps(20, identity)
|
||||
}
|
||||
return x
|
||||
},
|
||||
|
||||
fib_matrix_80: function(n) {
|
||||
var i = 0
|
||||
var x = 0
|
||||
for (i = 0; i < n; i++) x += fib_matrix(80)
|
||||
return x
|
||||
}
|
||||
}
|
||||
159
benches/hash_workload.cm
Normal file
159
benches/hash_workload.cm
Normal file
@@ -0,0 +1,159 @@
|
||||
// hash_workload.cm — Hash-heavy / word-count / map-reduce kernel
|
||||
// Stresses record (object) creation, property access, and string handling.
|
||||
|
||||
function make_words(count) {
|
||||
// Generate a repeating word list to simulate text processing
|
||||
var base_words = [
|
||||
"the", "quick", "brown", "fox", "jumps", "over", "lazy", "dog",
|
||||
"and", "cat", "sat", "on", "mat", "with", "hat", "bat",
|
||||
"alpha", "beta", "gamma", "delta", "epsilon", "zeta", "eta", "theta",
|
||||
"hello", "world", "foo", "bar", "baz", "qux", "quux", "corge"
|
||||
]
|
||||
var words = []
|
||||
var i = 0
|
||||
for (i = 0; i < count; i++) {
|
||||
push(words, base_words[i % length(base_words)])
|
||||
}
|
||||
return words
|
||||
}
|
||||
|
||||
// Word frequency count
|
||||
function word_count(words) {
|
||||
var freq = {}
|
||||
var i = 0
|
||||
var w = null
|
||||
for (i = 0; i < length(words); i++) {
|
||||
w = words[i]
|
||||
if (freq[w]) {
|
||||
freq[w] = freq[w] + 1
|
||||
} else {
|
||||
freq[w] = 1
|
||||
}
|
||||
}
|
||||
return freq
|
||||
}
|
||||
|
||||
// Find top-N words by frequency
|
||||
function top_n(freq, n) {
|
||||
var keys = array(freq)
|
||||
var pairs = []
|
||||
var i = 0
|
||||
for (i = 0; i < length(keys); i++) {
|
||||
push(pairs, {word: keys[i], count: freq[keys[i]]})
|
||||
}
|
||||
var sorted = sort(pairs, "count")
|
||||
// Return last N (highest counts)
|
||||
var result = []
|
||||
var start = length(sorted) - n
|
||||
if (start < 0) start = 0
|
||||
for (i = start; i < length(sorted); i++) {
|
||||
push(result, sorted[i])
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// Histogram: group words by length
|
||||
function group_by_length(words) {
|
||||
var groups = {}
|
||||
var i = 0
|
||||
var w = null
|
||||
var k = null
|
||||
for (i = 0; i < length(words); i++) {
|
||||
w = words[i]
|
||||
k = text(length(w))
|
||||
if (!groups[k]) groups[k] = []
|
||||
push(groups[k], w)
|
||||
}
|
||||
return groups
|
||||
}
|
||||
|
||||
// Simple hash table with chaining (stress property access patterns)
|
||||
function hash_table_ops(n) {
|
||||
var table = {}
|
||||
var i = 0
|
||||
var k = null
|
||||
var collisions = 0
|
||||
|
||||
// Insert phase
|
||||
for (i = 0; i < n; i++) {
|
||||
k = `key_${i % 512}`
|
||||
if (table[k]) collisions++
|
||||
table[k] = i
|
||||
}
|
||||
|
||||
// Lookup phase
|
||||
var found = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
k = `key_${i % 512}`
|
||||
if (table[k]) found++
|
||||
}
|
||||
|
||||
// Delete phase
|
||||
var deleted = 0
|
||||
for (i = 0; i < n; i += 3) {
|
||||
k = `key_${i % 512}`
|
||||
if (table[k]) {
|
||||
delete table[k]
|
||||
deleted++
|
||||
}
|
||||
}
|
||||
|
||||
return found - deleted + collisions
|
||||
}
|
||||
|
||||
var words_1k = make_words(1000)
|
||||
var words_10k = make_words(10000)
|
||||
|
||||
return {
|
||||
// Word count on 1K words
|
||||
wordcount_1k: function(n) {
|
||||
var i = 0
|
||||
var freq = null
|
||||
for (i = 0; i < n; i++) {
|
||||
freq = word_count(words_1k)
|
||||
}
|
||||
return freq
|
||||
},
|
||||
|
||||
// Word count on 10K words
|
||||
wordcount_10k: function(n) {
|
||||
var i = 0
|
||||
var freq = null
|
||||
for (i = 0; i < n; i++) {
|
||||
freq = word_count(words_10k)
|
||||
}
|
||||
return freq
|
||||
},
|
||||
|
||||
// Word count + top-10 extraction
|
||||
wordcount_top10: function(n) {
|
||||
var i = 0
|
||||
var freq = null
|
||||
var top = null
|
||||
for (i = 0; i < n; i++) {
|
||||
freq = word_count(words_10k)
|
||||
top = top_n(freq, 10)
|
||||
}
|
||||
return top
|
||||
},
|
||||
|
||||
// Group words by length
|
||||
group_by_len: function(n) {
|
||||
var i = 0
|
||||
var groups = null
|
||||
for (i = 0; i < n; i++) {
|
||||
groups = group_by_length(words_10k)
|
||||
}
|
||||
return groups
|
||||
},
|
||||
|
||||
// Hash table insert/lookup/delete
|
||||
hash_table: function(n) {
|
||||
var i = 0
|
||||
var x = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
x += hash_table_ops(2048)
|
||||
}
|
||||
return x
|
||||
}
|
||||
}
|
||||
167
benches/json_walk.cm
Normal file
167
benches/json_walk.cm
Normal file
@@ -0,0 +1,167 @@
|
||||
// json_walk.cm — JSON parse + walk + serialize kernel
|
||||
// Stresses strings, records, arrays, and recursive traversal.
|
||||
|
||||
var json = use('json')
|
||||
|
||||
function make_nested_object(depth, breadth) {
|
||||
var obj = {}
|
||||
var i = 0
|
||||
var k = null
|
||||
if (depth <= 0) {
|
||||
for (i = 0; i < breadth; i++) {
|
||||
k = `key_${i}`
|
||||
obj[k] = i * 3.14
|
||||
}
|
||||
return obj
|
||||
}
|
||||
for (i = 0; i < breadth; i++) {
|
||||
k = `node_${i}`
|
||||
obj[k] = make_nested_object(depth - 1, breadth)
|
||||
}
|
||||
obj.value = depth
|
||||
obj.name = `level_${depth}`
|
||||
return obj
|
||||
}
|
||||
|
||||
function make_array_data(size) {
|
||||
var arr = []
|
||||
var i = 0
|
||||
for (i = 0; i < size; i++) {
|
||||
push(arr, {
|
||||
id: i,
|
||||
name: `item_${i}`,
|
||||
active: i % 2 == 0,
|
||||
score: i * 1.5,
|
||||
tags: [`tag_${i % 5}`, `tag_${(i + 1) % 5}`]
|
||||
})
|
||||
}
|
||||
return arr
|
||||
}
|
||||
|
||||
// Walk an object tree, counting nodes
|
||||
function walk_count(obj) {
|
||||
var count = 1
|
||||
var keys = null
|
||||
var i = 0
|
||||
var v = null
|
||||
if (is_object(obj)) {
|
||||
keys = array(obj)
|
||||
for (i = 0; i < length(keys); i++) {
|
||||
v = obj[keys[i]]
|
||||
if (is_object(v) || is_array(v)) {
|
||||
count += walk_count(v)
|
||||
}
|
||||
}
|
||||
} else if (is_array(obj)) {
|
||||
for (i = 0; i < length(obj); i++) {
|
||||
v = obj[i]
|
||||
if (is_object(v) || is_array(v)) {
|
||||
count += walk_count(v)
|
||||
}
|
||||
}
|
||||
}
|
||||
return count
|
||||
}
|
||||
|
||||
// Walk and extract all numbers
|
||||
function walk_sum(obj) {
|
||||
var sum = 0
|
||||
var keys = null
|
||||
var i = 0
|
||||
var v = null
|
||||
if (is_object(obj)) {
|
||||
keys = array(obj)
|
||||
for (i = 0; i < length(keys); i++) {
|
||||
v = obj[keys[i]]
|
||||
if (is_number(v)) {
|
||||
sum += v
|
||||
} else if (is_object(v) || is_array(v)) {
|
||||
sum += walk_sum(v)
|
||||
}
|
||||
}
|
||||
} else if (is_array(obj)) {
|
||||
for (i = 0; i < length(obj); i++) {
|
||||
v = obj[i]
|
||||
if (is_number(v)) {
|
||||
sum += v
|
||||
} else if (is_object(v) || is_array(v)) {
|
||||
sum += walk_sum(v)
|
||||
}
|
||||
}
|
||||
}
|
||||
return sum
|
||||
}
|
||||
|
||||
// Pre-build test data strings
|
||||
var nested_obj = make_nested_object(3, 4)
|
||||
var nested_json = json.encode(nested_obj)
|
||||
var array_data = make_array_data(200)
|
||||
var array_json = json.encode(array_data)
|
||||
|
||||
return {
|
||||
// Parse nested JSON
|
||||
json_parse_nested: function(n) {
|
||||
var i = 0
|
||||
var obj = null
|
||||
for (i = 0; i < n; i++) {
|
||||
obj = json.decode(nested_json)
|
||||
}
|
||||
return obj
|
||||
},
|
||||
|
||||
// Parse array-of-records JSON
|
||||
json_parse_array: function(n) {
|
||||
var i = 0
|
||||
var arr = null
|
||||
for (i = 0; i < n; i++) {
|
||||
arr = json.decode(array_json)
|
||||
}
|
||||
return arr
|
||||
},
|
||||
|
||||
// Encode nested object to JSON
|
||||
json_encode_nested: function(n) {
|
||||
var i = 0
|
||||
var s = null
|
||||
for (i = 0; i < n; i++) {
|
||||
s = json.encode(nested_obj)
|
||||
}
|
||||
return s
|
||||
},
|
||||
|
||||
// Encode array to JSON
|
||||
json_encode_array: function(n) {
|
||||
var i = 0
|
||||
var s = null
|
||||
for (i = 0; i < n; i++) {
|
||||
s = json.encode(array_data)
|
||||
}
|
||||
return s
|
||||
},
|
||||
|
||||
// Parse + walk + count
|
||||
json_roundtrip_walk: function(n) {
|
||||
var i = 0
|
||||
var obj = null
|
||||
var count = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
obj = json.decode(nested_json)
|
||||
count += walk_count(obj)
|
||||
}
|
||||
return count
|
||||
},
|
||||
|
||||
// Parse + sum all numbers + re-encode
|
||||
json_roundtrip_full: function(n) {
|
||||
var i = 0
|
||||
var obj = null
|
||||
var sum = 0
|
||||
var out = null
|
||||
for (i = 0; i < n; i++) {
|
||||
obj = json.decode(array_json)
|
||||
sum += walk_sum(obj)
|
||||
out = json.encode(obj)
|
||||
}
|
||||
return sum
|
||||
}
|
||||
}
|
||||
@@ -1,24 +1,24 @@
|
||||
// micro_ops.bench.ce (or .cm depending on your convention)
|
||||
// micro_ops.cm — microbenchmarks for core operations
|
||||
|
||||
// Note: We use a function-local sink in each benchmark to avoid cross-contamination
|
||||
function blackhole(sink, x) {
|
||||
// Prevent dead-code elimination
|
||||
return (sink + (x | 0)) | 0
|
||||
}
|
||||
|
||||
function make_obj_xy(x, y) {
|
||||
return { x, y }
|
||||
return {x: x, y: y}
|
||||
}
|
||||
|
||||
function make_obj_yx(x, y) {
|
||||
// Different insertion order to force a different shape in many engines
|
||||
return { y, x }
|
||||
// Different insertion order to force a different shape
|
||||
return {y: y, x: x}
|
||||
}
|
||||
|
||||
function make_shapes(n) {
|
||||
var out = []
|
||||
for (var i = 0; i < n; i++) {
|
||||
var o = { a: i }
|
||||
var i = 0
|
||||
var o = null
|
||||
for (i = 0; i < n; i++) {
|
||||
o = {a: i}
|
||||
o[`p${i}`] = i
|
||||
push(out, o)
|
||||
}
|
||||
@@ -27,13 +27,15 @@ function make_shapes(n) {
|
||||
|
||||
function make_packed_array(n) {
|
||||
var a = []
|
||||
for (var i = 0; i < n; i++) push(a, i)
|
||||
var i = 0
|
||||
for (i = 0; i < n; i++) push(a, i)
|
||||
return a
|
||||
}
|
||||
|
||||
function make_holey_array(n) {
|
||||
var a = []
|
||||
for (var i = 0; i < n; i += 2) a[i] = i
|
||||
var i = 0
|
||||
for (i = 0; i < n; i += 2) a[i] = i
|
||||
return a
|
||||
}
|
||||
|
||||
@@ -41,7 +43,8 @@ return {
|
||||
// 0) Baseline loop cost
|
||||
loop_empty: function(n) {
|
||||
var sink = 0
|
||||
for (var i = 0; i < n; i++) {}
|
||||
var i = 0
|
||||
for (i = 0; i < n; i++) {}
|
||||
return blackhole(sink, n)
|
||||
},
|
||||
|
||||
@@ -49,35 +52,40 @@ return {
|
||||
i32_add: function(n) {
|
||||
var sink = 0
|
||||
var x = 1
|
||||
for (var i = 0; i < n; i++) x = (x + 3) | 0
|
||||
var i = 0
|
||||
for (i = 0; i < n; i++) x = (x + 3) | 0
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
f64_add: function(n) {
|
||||
var sink = 0
|
||||
var x = 1.0
|
||||
for (var i = 0; i < n; i++) x = x + 3.14159
|
||||
var i = 0
|
||||
for (i = 0; i < n; i++) x = x + 3.14159
|
||||
return blackhole(sink, x | 0)
|
||||
},
|
||||
|
||||
mixed_add: function(n) {
|
||||
var sink = 0
|
||||
var x = 1
|
||||
for (var i = 0; i < n; i++) x = x + 0.25
|
||||
var i = 0
|
||||
for (i = 0; i < n; i++) x = x + 0.25
|
||||
return blackhole(sink, x | 0)
|
||||
},
|
||||
|
||||
bit_ops: function(n) {
|
||||
var sink = 0
|
||||
var x = 0x12345678
|
||||
for (var i = 0; i < n; i++) x = ((x << 5) ^ (x >>> 3)) | 0
|
||||
var i = 0
|
||||
for (i = 0; i < n; i++) x = ((x << 5) ^ (x >>> 3)) | 0
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
overflow_path: function(n) {
|
||||
var sink = 0
|
||||
var x = 0x70000000
|
||||
for (var i = 0; i < n; i++) x = (x + 0x10000000) | 0
|
||||
var i = 0
|
||||
for (i = 0; i < n; i++) x = (x + 0x10000000) | 0
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
@@ -85,7 +93,8 @@ return {
|
||||
branch_predictable: function(n) {
|
||||
var sink = 0
|
||||
var x = 0
|
||||
for (var i = 0; i < n; i++) {
|
||||
var i = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
if ((i & 7) != 0) x++
|
||||
else x += 2
|
||||
}
|
||||
@@ -95,7 +104,8 @@ return {
|
||||
branch_alternating: function(n) {
|
||||
var sink = 0
|
||||
var x = 0
|
||||
for (var i = 0; i < n; i++) {
|
||||
var i = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
if ((i & 1) == 0) x++
|
||||
else x += 2
|
||||
}
|
||||
@@ -105,29 +115,47 @@ return {
|
||||
// 3) Calls
|
||||
call_direct: function(n) {
|
||||
var sink = 0
|
||||
function f(a) { return (a + 1) | 0 }
|
||||
var f = function(a) { return (a + 1) | 0 }
|
||||
var x = 0
|
||||
for (var i = 0; i < n; i++) x = f(x)
|
||||
var i = 0
|
||||
for (i = 0; i < n; i++) x = f(x)
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
call_indirect: function(n) {
|
||||
var sink = 0
|
||||
function f(a) { return (a + 1) | 0 }
|
||||
var f = function(a) { return (a + 1) | 0 }
|
||||
var g = f
|
||||
var x = 0
|
||||
for (var i = 0; i < n; i++) x = g(x)
|
||||
var i = 0
|
||||
for (i = 0; i < n; i++) x = g(x)
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
call_closure: function(n) {
|
||||
var sink = 0
|
||||
function make_adder(k) {
|
||||
var make_adder = function(k) {
|
||||
return function(a) { return (a + k) | 0 }
|
||||
}
|
||||
var add3 = make_adder(3)
|
||||
var x = 0
|
||||
for (var i = 0; i < n; i++) x = add3(x)
|
||||
var i = 0
|
||||
for (i = 0; i < n; i++) x = add3(x)
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
call_multi_arity: function(n) {
|
||||
var sink = 0
|
||||
var f0 = function() { return 1 }
|
||||
var f1 = function(a) { return a + 1 }
|
||||
var f2 = function(a, b) { return a + b }
|
||||
var f3 = function(a, b, c) { return a + b + c }
|
||||
var f4 = function(a, b, c, d) { return a + b + c + d }
|
||||
var x = 0
|
||||
var i = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
x = (x + f0() + f1(i) + f2(i, 1) + f3(i, 1, 2) + f4(i, 1, 2, 3)) | 0
|
||||
}
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
@@ -136,7 +164,8 @@ return {
|
||||
var sink = 0
|
||||
var o = make_obj_xy(1, 2)
|
||||
var x = 0
|
||||
for (var i = 0; i < n; i++) x = (x + o.x) | 0
|
||||
var i = 0
|
||||
for (i = 0; i < n; i++) x = (x + o.x) | 0
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
@@ -145,20 +174,38 @@ return {
|
||||
var a = make_obj_xy(1, 2)
|
||||
var b = make_obj_yx(1, 2)
|
||||
var x = 0
|
||||
for (var i = 0; i < n; i++) {
|
||||
var o = (i & 1) == 0 ? a : b
|
||||
var i = 0
|
||||
var o = null
|
||||
for (i = 0; i < n; i++) {
|
||||
o = (i & 1) == 0 ? a : b
|
||||
x = (x + o.x) | 0
|
||||
}
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
prop_read_poly_4: function(n) {
|
||||
var sink = 0
|
||||
var shapes = [
|
||||
{x: 1, y: 2},
|
||||
{y: 2, x: 1},
|
||||
{x: 1, z: 3, y: 2},
|
||||
{w: 0, x: 1, y: 2}
|
||||
]
|
||||
var x = 0
|
||||
var i = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
x = (x + shapes[i & 3].x) | 0
|
||||
}
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
prop_read_mega: function(n) {
|
||||
var sink = 0
|
||||
var objs = make_shapes(32)
|
||||
var x = 0
|
||||
for (var i = 0; i < n; i++) {
|
||||
var o = objs[i & 31]
|
||||
x = (x + o.a) | 0
|
||||
var i = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
x = (x + objs[i & 31].a) | 0
|
||||
}
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
@@ -166,7 +213,8 @@ return {
|
||||
prop_write_mono: function(n) {
|
||||
var sink = 0
|
||||
var o = make_obj_xy(1, 2)
|
||||
for (var i = 0; i < n; i++) o.x = (o.x + 1) | 0
|
||||
var i = 0
|
||||
for (i = 0; i < n; i++) o.x = (o.x + 1) | 0
|
||||
return blackhole(sink, o.x)
|
||||
},
|
||||
|
||||
@@ -175,14 +223,16 @@ return {
|
||||
var sink = 0
|
||||
var a = make_packed_array(1024)
|
||||
var x = 0
|
||||
for (var i = 0; i < n; i++) x = (x + a[i & 1023]) | 0
|
||||
var i = 0
|
||||
for (i = 0; i < n; i++) x = (x + a[i & 1023]) | 0
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
array_write_packed: function(n) {
|
||||
var sink = 0
|
||||
var a = make_packed_array(1024)
|
||||
for (var i = 0; i < n; i++) a[i & 1023] = i
|
||||
var i = 0
|
||||
for (i = 0; i < n; i++) a[i & 1023] = i
|
||||
return blackhole(sink, a[17] | 0)
|
||||
},
|
||||
|
||||
@@ -190,9 +240,10 @@ return {
|
||||
var sink = 0
|
||||
var a = make_holey_array(2048)
|
||||
var x = 0
|
||||
for (var i = 0; i < n; i++) {
|
||||
var v = a[(i & 2047)]
|
||||
// If "missing" is a special value in your language, this stresses that path too
|
||||
var i = 0
|
||||
var v = null
|
||||
for (i = 0; i < n; i++) {
|
||||
v = a[(i & 2047)]
|
||||
if (v) x = (x + v) | 0
|
||||
}
|
||||
return blackhole(sink, x)
|
||||
@@ -201,21 +252,97 @@ return {
|
||||
array_push_steady: function(n) {
|
||||
var sink = 0
|
||||
var x = 0
|
||||
for (var j = 0; j < n; j++) {
|
||||
var a = []
|
||||
for (var i = 0; i < 256; i++) push(a, i)
|
||||
var j = 0
|
||||
var i = 0
|
||||
var a = null
|
||||
for (j = 0; j < n; j++) {
|
||||
a = []
|
||||
for (i = 0; i < 256; i++) push(a, i)
|
||||
x = (x + length(a)) | 0
|
||||
}
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
array_push_pop: function(n) {
|
||||
var sink = 0
|
||||
var a = []
|
||||
var x = 0
|
||||
var i = 0
|
||||
var v = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
push(a, i)
|
||||
if (length(a) > 64) {
|
||||
v = pop(a)
|
||||
x = (x + v) | 0
|
||||
}
|
||||
}
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
array_indexed_sum: function(n) {
|
||||
var sink = 0
|
||||
var a = make_packed_array(1024)
|
||||
var x = 0
|
||||
var j = 0
|
||||
var i = 0
|
||||
for (j = 0; j < n; j++) {
|
||||
x = 0
|
||||
for (i = 0; i < 1024; i++) {
|
||||
x = (x + a[i]) | 0
|
||||
}
|
||||
}
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
// 6) Strings
|
||||
string_concat_small: function(n) {
|
||||
var sink = 0
|
||||
var x = 0
|
||||
for (var j = 0; j < n; j++) {
|
||||
var s = ""
|
||||
for (var i = 0; i < 16; i++) s = s + "x"
|
||||
var j = 0
|
||||
var i = 0
|
||||
var s = null
|
||||
for (j = 0; j < n; j++) {
|
||||
s = ""
|
||||
for (i = 0; i < 16; i++) s = s + "x"
|
||||
x = (x + length(s)) | 0
|
||||
}
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
string_concat_medium: function(n) {
|
||||
var sink = 0
|
||||
var x = 0
|
||||
var j = 0
|
||||
var i = 0
|
||||
var s = null
|
||||
for (j = 0; j < n; j++) {
|
||||
s = ""
|
||||
for (i = 0; i < 100; i++) s = s + "abcdefghij"
|
||||
x = (x + length(s)) | 0
|
||||
}
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
string_interpolation: function(n) {
|
||||
var sink = 0
|
||||
var x = 0
|
||||
var i = 0
|
||||
var s = null
|
||||
for (i = 0; i < n; i++) {
|
||||
s = `item_${i}_value_${i * 2}`
|
||||
x = (x + length(s)) | 0
|
||||
}
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
string_slice: function(n) {
|
||||
var sink = 0
|
||||
var base = "the quick brown fox jumps over the lazy dog"
|
||||
var x = 0
|
||||
var i = 0
|
||||
var s = null
|
||||
for (i = 0; i < n; i++) {
|
||||
s = text(base, i % 10, i % 10 + 10)
|
||||
x = (x + length(s)) | 0
|
||||
}
|
||||
return blackhole(sink, x)
|
||||
@@ -225,8 +352,10 @@ return {
|
||||
alloc_tiny_objects: function(n) {
|
||||
var sink = 0
|
||||
var x = 0
|
||||
for (var i = 0; i < n; i++) {
|
||||
var o = { a: i, b: i + 1, c: i + 2 }
|
||||
var i = 0
|
||||
var o = null
|
||||
for (i = 0; i < n; i++) {
|
||||
o = {a: i, b: i + 1, c: i + 2}
|
||||
x = (x + o.b) | 0
|
||||
}
|
||||
return blackhole(sink, x)
|
||||
@@ -235,9 +364,12 @@ return {
|
||||
alloc_linked_list: function(n) {
|
||||
var sink = 0
|
||||
var head = null
|
||||
for (var i = 0; i < n; i++) head = { v: i, next: head }
|
||||
var i = 0
|
||||
var x = 0
|
||||
var p = head
|
||||
var p = null
|
||||
for (i = 0; i < n; i++) head = {v: i, next: head}
|
||||
x = 0
|
||||
p = head
|
||||
while (p) {
|
||||
x = (x + p.v) | 0
|
||||
p = p.next
|
||||
@@ -245,18 +377,118 @@ return {
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
// 8) meme-specific (adapt these to your exact semantics)
|
||||
meme_clone_read: function(n) {
|
||||
// If meme(obj) clones like Object.create / prototypal clone, this hits it hard.
|
||||
// Replace with your exact meme call form.
|
||||
alloc_arrays: function(n) {
|
||||
var sink = 0
|
||||
var base = { x: 1, y: 2 }
|
||||
var x = 0
|
||||
for (var i = 0; i < n; i++) {
|
||||
var o = meme(base)
|
||||
var i = 0
|
||||
var a = null
|
||||
for (i = 0; i < n; i++) {
|
||||
a = [i, i + 1, i + 2, i + 3]
|
||||
x = (x + a[2]) | 0
|
||||
}
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
alloc_short_lived: function(n) {
|
||||
var sink = 0
|
||||
var x = 0
|
||||
var i = 0
|
||||
var o = null
|
||||
// Allocate objects that immediately become garbage
|
||||
for (i = 0; i < n; i++) {
|
||||
o = {val: i, data: {inner: i + 1}}
|
||||
x = (x + o.data.inner) | 0
|
||||
}
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
alloc_long_lived_pressure: function(n) {
|
||||
var sink = 0
|
||||
var store = []
|
||||
var x = 0
|
||||
var i = 0
|
||||
var o = null
|
||||
// Keep first 1024 objects alive, churn the rest
|
||||
for (i = 0; i < n; i++) {
|
||||
o = {val: i, data: i * 2}
|
||||
if (i < 1024) {
|
||||
push(store, o)
|
||||
}
|
||||
x = (x + o.data) | 0
|
||||
}
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
// 8) Meme (prototype clone)
|
||||
meme_clone_read: function(n) {
|
||||
var sink = 0
|
||||
var base = {x: 1, y: 2}
|
||||
var x = 0
|
||||
var i = 0
|
||||
var o = null
|
||||
for (i = 0; i < n; i++) {
|
||||
o = meme(base)
|
||||
x = (x + o.x) | 0
|
||||
}
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
// 9) Guard / type check paths
|
||||
guard_hot_number: function(n) {
|
||||
// Monomorphic number path — guards should hoist
|
||||
var sink = 0
|
||||
var x = 1
|
||||
var i = 0
|
||||
for (i = 0; i < n; i++) x = x + 1
|
||||
return blackhole(sink, x | 0)
|
||||
},
|
||||
|
||||
guard_mixed_types: function(n) {
|
||||
// Alternating number/text — guards must stay
|
||||
var sink = 0
|
||||
var vals = [1, "a", 2, "b", 3, "c", 4, "d"]
|
||||
var x = 0
|
||||
var i = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
if (is_number(vals[i & 7])) x = (x + vals[i & 7]) | 0
|
||||
}
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
// 10) Reduce / higher-order
|
||||
reduce_sum: function(n) {
|
||||
var sink = 0
|
||||
var a = make_packed_array(256)
|
||||
var x = 0
|
||||
var i = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
x = (x + reduce(a, function(acc, v) { return acc + v }, 0)) | 0
|
||||
}
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
filter_evens: function(n) {
|
||||
var sink = 0
|
||||
var a = make_packed_array(256)
|
||||
var x = 0
|
||||
var i = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
x = (x + length(filter(a, function(v) { return v % 2 == 0 }))) | 0
|
||||
}
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
arrfor_sum: function(n) {
|
||||
var sink = 0
|
||||
var a = make_packed_array(256)
|
||||
var x = 0
|
||||
var i = 0
|
||||
var sum = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
sum = 0
|
||||
arrfor(a, function(v) { sum += v })
|
||||
x = (x + sum) | 0
|
||||
}
|
||||
return blackhole(sink, x)
|
||||
}
|
||||
}
|
||||
|
||||
249
benches/module_load.cm
Normal file
249
benches/module_load.cm
Normal file
@@ -0,0 +1,249 @@
|
||||
// module_load.cm — Module loading simulation (macro benchmark)
|
||||
// Simulates parsing many small modules, linking, and running.
|
||||
// Tests the "build scenario" pattern.
|
||||
|
||||
var json = use('json')
|
||||
|
||||
// Simulate a small module: parse token stream + build AST + evaluate
|
||||
function tokenize(src) {
|
||||
var tokens = []
|
||||
var i = 0
|
||||
var ch = null
|
||||
var chars = array(src)
|
||||
var buf = ""
|
||||
|
||||
for (i = 0; i < length(chars); i++) {
|
||||
ch = chars[i]
|
||||
if (ch == " " || ch == "\n" || ch == "\t") {
|
||||
if (length(buf) > 0) {
|
||||
push(tokens, buf)
|
||||
buf = ""
|
||||
}
|
||||
} else if (ch == "(" || ch == ")" || ch == "+" || ch == "-"
|
||||
|| ch == "*" || ch == "=" || ch == ";" || ch == ",") {
|
||||
if (length(buf) > 0) {
|
||||
push(tokens, buf)
|
||||
buf = ""
|
||||
}
|
||||
push(tokens, ch)
|
||||
} else {
|
||||
buf = buf + ch
|
||||
}
|
||||
}
|
||||
if (length(buf) > 0) push(tokens, buf)
|
||||
return tokens
|
||||
}
|
||||
|
||||
// Build a simple AST from tokens
|
||||
function parse_tokens(tokens) {
|
||||
var ast = []
|
||||
var i = 0
|
||||
var tok = null
|
||||
var node = null
|
||||
for (i = 0; i < length(tokens); i++) {
|
||||
tok = tokens[i]
|
||||
if (tok == "var" || tok == "def") {
|
||||
node = {type: "decl", kind: tok, name: null, value: null}
|
||||
i++
|
||||
if (i < length(tokens)) node.name = tokens[i]
|
||||
i++ // skip =
|
||||
i++
|
||||
if (i < length(tokens)) node.value = tokens[i]
|
||||
push(ast, node)
|
||||
} else if (tok == "return") {
|
||||
node = {type: "return", value: null}
|
||||
i++
|
||||
if (i < length(tokens)) node.value = tokens[i]
|
||||
push(ast, node)
|
||||
} else if (tok == "function") {
|
||||
node = {type: "func", name: null, body: []}
|
||||
i++
|
||||
if (i < length(tokens)) node.name = tokens[i]
|
||||
// Skip to matching )
|
||||
while (i < length(tokens) && tokens[i] != ")") i++
|
||||
push(ast, node)
|
||||
} else {
|
||||
push(ast, {type: "expr", value: tok})
|
||||
}
|
||||
}
|
||||
return ast
|
||||
}
|
||||
|
||||
// Evaluate: simple symbol table + resolution
|
||||
function evaluate(ast, env) {
|
||||
var result = null
|
||||
var i = 0
|
||||
var node = null
|
||||
for (i = 0; i < length(ast); i++) {
|
||||
node = ast[i]
|
||||
if (node.type == "decl") {
|
||||
env[node.name] = node.value
|
||||
} else if (node.type == "return") {
|
||||
result = node.value
|
||||
if (env[result]) result = env[result]
|
||||
} else if (node.type == "func") {
|
||||
env[node.name] = node
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// Generate fake module source code
|
||||
function generate_module(id, dep_count) {
|
||||
var src = ""
|
||||
var i = 0
|
||||
src = src + "var _id = " + text(id) + ";\n"
|
||||
for (i = 0; i < dep_count; i++) {
|
||||
src = src + "var dep" + text(i) + " = use(mod_" + text(i) + ");\n"
|
||||
}
|
||||
src = src + "var x = " + text(id * 17) + ";\n"
|
||||
src = src + "var y = " + text(id * 31) + ";\n"
|
||||
src = src + "function compute(a, b) { return a + b; }\n"
|
||||
src = src + "var result = compute(x, y);\n"
|
||||
src = src + "return result;\n"
|
||||
return src
|
||||
}
|
||||
|
||||
// Simulate loading N modules with dependency chains
|
||||
function simulate_build(n_modules, deps_per_module) {
|
||||
var modules = []
|
||||
var loaded = {}
|
||||
var i = 0
|
||||
var j = 0
|
||||
var src = null
|
||||
var tokens = null
|
||||
var ast = null
|
||||
var env = null
|
||||
var result = null
|
||||
var total_tokens = 0
|
||||
var total_nodes = 0
|
||||
|
||||
// Generate all module sources
|
||||
for (i = 0; i < n_modules; i++) {
|
||||
src = generate_module(i, deps_per_module)
|
||||
push(modules, src)
|
||||
}
|
||||
|
||||
// "Load" each module: tokenize → parse → evaluate
|
||||
for (i = 0; i < n_modules; i++) {
|
||||
tokens = tokenize(modules[i])
|
||||
total_tokens += length(tokens)
|
||||
|
||||
ast = parse_tokens(tokens)
|
||||
total_nodes += length(ast)
|
||||
|
||||
env = {}
|
||||
// Resolve dependencies
|
||||
for (j = 0; j < deps_per_module; j++) {
|
||||
if (j < i) {
|
||||
env["dep" + text(j)] = loaded["mod_" + text(j)]
|
||||
}
|
||||
}
|
||||
|
||||
result = evaluate(ast, env)
|
||||
loaded["mod_" + text(i)] = result
|
||||
}
|
||||
|
||||
return {
|
||||
modules: n_modules,
|
||||
total_tokens: total_tokens,
|
||||
total_nodes: total_nodes,
|
||||
last_result: result
|
||||
}
|
||||
}
|
||||
|
||||
// Dependency graph analysis (topological sort simulation)
|
||||
function topo_sort(n_modules, deps_per_module) {
|
||||
// Build adjacency list
|
||||
var adj = {}
|
||||
var in_degree = {}
|
||||
var i = 0
|
||||
var j = 0
|
||||
var name = null
|
||||
var dep = null
|
||||
|
||||
for (i = 0; i < n_modules; i++) {
|
||||
name = "mod_" + text(i)
|
||||
adj[name] = []
|
||||
in_degree[name] = 0
|
||||
}
|
||||
|
||||
for (i = 0; i < n_modules; i++) {
|
||||
name = "mod_" + text(i)
|
||||
for (j = 0; j < deps_per_module; j++) {
|
||||
if (j < i) {
|
||||
dep = "mod_" + text(j)
|
||||
push(adj[dep], name)
|
||||
in_degree[name] = in_degree[name] + 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Kahn's algorithm
|
||||
var queue = []
|
||||
var keys = array(in_degree)
|
||||
for (i = 0; i < length(keys); i++) {
|
||||
if (in_degree[keys[i]] == 0) push(queue, keys[i])
|
||||
}
|
||||
|
||||
var order = []
|
||||
var current = null
|
||||
var neighbors = null
|
||||
var qi = 0
|
||||
while (qi < length(queue)) {
|
||||
current = queue[qi]
|
||||
qi++
|
||||
push(order, current)
|
||||
neighbors = adj[current]
|
||||
if (neighbors) {
|
||||
for (i = 0; i < length(neighbors); i++) {
|
||||
in_degree[neighbors[i]] = in_degree[neighbors[i]] - 1
|
||||
if (in_degree[neighbors[i]] == 0) push(queue, neighbors[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return order
|
||||
}
|
||||
|
||||
return {
|
||||
// Small build: 50 modules, 3 deps each
|
||||
build_50: function(n) {
|
||||
var i = 0
|
||||
var result = null
|
||||
for (i = 0; i < n; i++) {
|
||||
result = simulate_build(50, 3)
|
||||
}
|
||||
return result
|
||||
},
|
||||
|
||||
// Medium build: 200 modules, 5 deps each
|
||||
build_200: function(n) {
|
||||
var i = 0
|
||||
var result = null
|
||||
for (i = 0; i < n; i++) {
|
||||
result = simulate_build(200, 5)
|
||||
}
|
||||
return result
|
||||
},
|
||||
|
||||
// Large build: 500 modules, 5 deps each
|
||||
build_500: function(n) {
|
||||
var i = 0
|
||||
var result = null
|
||||
for (i = 0; i < n; i++) {
|
||||
result = simulate_build(500, 5)
|
||||
}
|
||||
return result
|
||||
},
|
||||
|
||||
// Topo sort of 500 module dependency graph
|
||||
topo_sort_500: function(n) {
|
||||
var i = 0
|
||||
var order = null
|
||||
for (i = 0; i < n; i++) {
|
||||
order = topo_sort(500, 5)
|
||||
}
|
||||
return order
|
||||
}
|
||||
}
|
||||
160
benches/nbody.cm
Normal file
160
benches/nbody.cm
Normal file
@@ -0,0 +1,160 @@
|
||||
// nbody.cm — N-body gravitational simulation kernel
|
||||
// Pure numeric + allocation workload. Classic VM benchmark.
|
||||
|
||||
var math = use('math/radians')
|
||||
|
||||
def PI = 3.141592653589793
|
||||
def SOLAR_MASS = 4 * PI * PI
|
||||
def DAYS_PER_YEAR = 365.24
|
||||
|
||||
function make_system() {
|
||||
// Sun + 4 Jovian planets
|
||||
var sun = {x: 0, y: 0, z: 0, vx: 0, vy: 0, vz: 0, mass: SOLAR_MASS}
|
||||
|
||||
var jupiter = {
|
||||
x: 4.84143144246472090,
|
||||
y: -1.16032004402742839,
|
||||
z: -0.103622044471123109,
|
||||
vx: 0.00166007664274403694 * DAYS_PER_YEAR,
|
||||
vy: 0.00769901118419740425 * DAYS_PER_YEAR,
|
||||
vz: -0.0000690460016972063023 * DAYS_PER_YEAR,
|
||||
mass: 0.000954791938424326609 * SOLAR_MASS
|
||||
}
|
||||
|
||||
var saturn = {
|
||||
x: 8.34336671824457987,
|
||||
y: 4.12479856412430479,
|
||||
z: -0.403523417114321381,
|
||||
vx: -0.00276742510726862411 * DAYS_PER_YEAR,
|
||||
vy: 0.00499852801234917238 * DAYS_PER_YEAR,
|
||||
vz: 0.0000230417297573763929 * DAYS_PER_YEAR,
|
||||
mass: 0.000285885980666130812 * SOLAR_MASS
|
||||
}
|
||||
|
||||
var uranus = {
|
||||
x: 12.8943695621391310,
|
||||
y: -15.1111514016986312,
|
||||
z: -0.223307578892655734,
|
||||
vx: 0.00296460137564761618 * DAYS_PER_YEAR,
|
||||
vy: 0.00237847173959480950 * DAYS_PER_YEAR,
|
||||
vz: -0.0000296589568540237556 * DAYS_PER_YEAR,
|
||||
mass: 0.0000436624404335156298 * SOLAR_MASS
|
||||
}
|
||||
|
||||
var neptune = {
|
||||
x: 15.3796971148509165,
|
||||
y: -25.9193146099879641,
|
||||
z: 0.179258772950371181,
|
||||
vx: 0.00268067772490389322 * DAYS_PER_YEAR,
|
||||
vy: 0.00162824170038242295 * DAYS_PER_YEAR,
|
||||
vz: -0.0000951592254519715870 * DAYS_PER_YEAR,
|
||||
mass: 0.0000515138902046611451 * SOLAR_MASS
|
||||
}
|
||||
|
||||
var bodies = [sun, jupiter, saturn, uranus, neptune]
|
||||
|
||||
// Offset momentum
|
||||
var px = 0
|
||||
var py = 0
|
||||
var pz = 0
|
||||
var i = 0
|
||||
for (i = 0; i < length(bodies); i++) {
|
||||
px += bodies[i].vx * bodies[i].mass
|
||||
py += bodies[i].vy * bodies[i].mass
|
||||
pz += bodies[i].vz * bodies[i].mass
|
||||
}
|
||||
sun.vx = -px / SOLAR_MASS
|
||||
sun.vy = -py / SOLAR_MASS
|
||||
sun.vz = -pz / SOLAR_MASS
|
||||
|
||||
return bodies
|
||||
}
|
||||
|
||||
function advance(bodies, dt) {
|
||||
var n = length(bodies)
|
||||
var i = 0
|
||||
var j = 0
|
||||
var bi = null
|
||||
var bj = null
|
||||
var dx = 0
|
||||
var dy = 0
|
||||
var dz = 0
|
||||
var dist_sq = 0
|
||||
var dist = 0
|
||||
var mag = 0
|
||||
|
||||
for (i = 0; i < n; i++) {
|
||||
bi = bodies[i]
|
||||
for (j = i + 1; j < n; j++) {
|
||||
bj = bodies[j]
|
||||
dx = bi.x - bj.x
|
||||
dy = bi.y - bj.y
|
||||
dz = bi.z - bj.z
|
||||
dist_sq = dx * dx + dy * dy + dz * dz
|
||||
dist = math.sqrt(dist_sq)
|
||||
mag = dt / (dist_sq * dist)
|
||||
|
||||
bi.vx -= dx * bj.mass * mag
|
||||
bi.vy -= dy * bj.mass * mag
|
||||
bi.vz -= dz * bj.mass * mag
|
||||
bj.vx += dx * bi.mass * mag
|
||||
bj.vy += dy * bi.mass * mag
|
||||
bj.vz += dz * bi.mass * mag
|
||||
}
|
||||
}
|
||||
|
||||
for (i = 0; i < n; i++) {
|
||||
bi = bodies[i]
|
||||
bi.x += dt * bi.vx
|
||||
bi.y += dt * bi.vy
|
||||
bi.z += dt * bi.vz
|
||||
}
|
||||
}
|
||||
|
||||
function energy(bodies) {
|
||||
var e = 0
|
||||
var n = length(bodies)
|
||||
var i = 0
|
||||
var j = 0
|
||||
var bi = null
|
||||
var bj = null
|
||||
var dx = 0
|
||||
var dy = 0
|
||||
var dz = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
bi = bodies[i]
|
||||
e += 0.5 * bi.mass * (bi.vx * bi.vx + bi.vy * bi.vy + bi.vz * bi.vz)
|
||||
for (j = i + 1; j < n; j++) {
|
||||
bj = bodies[j]
|
||||
dx = bi.x - bj.x
|
||||
dy = bi.y - bj.y
|
||||
dz = bi.z - bj.z
|
||||
e -= (bi.mass * bj.mass) / math.sqrt(dx * dx + dy * dy + dz * dz)
|
||||
}
|
||||
}
|
||||
return e
|
||||
}
|
||||
|
||||
return {
|
||||
nbody_1k: function(n) {
|
||||
var i = 0
|
||||
var j = 0
|
||||
var bodies = null
|
||||
for (i = 0; i < n; i++) {
|
||||
bodies = make_system()
|
||||
for (j = 0; j < 1000; j++) advance(bodies, 0.01)
|
||||
energy(bodies)
|
||||
}
|
||||
},
|
||||
|
||||
nbody_10k: function(n) {
|
||||
var i = 0
|
||||
var j = 0
|
||||
var bodies = null
|
||||
for (i = 0; i < n; i++) {
|
||||
bodies = make_system()
|
||||
for (j = 0; j < 10000; j++) advance(bodies, 0.01)
|
||||
energy(bodies)
|
||||
}
|
||||
}
|
||||
}
|
||||
154
benches/ray_tracer.cm
Normal file
154
benches/ray_tracer.cm
Normal file
@@ -0,0 +1,154 @@
|
||||
// ray_tracer.cm — Simple ray tracer kernel
|
||||
// Control flow + numeric + allocation. Classic VM benchmark.
|
||||
|
||||
var math = use('math/radians')
|
||||
|
||||
function vec(x, y, z) {
|
||||
return {x: x, y: y, z: z}
|
||||
}
|
||||
|
||||
function vadd(a, b) {
|
||||
return {x: a.x + b.x, y: a.y + b.y, z: a.z + b.z}
|
||||
}
|
||||
|
||||
function vsub(a, b) {
|
||||
return {x: a.x - b.x, y: a.y - b.y, z: a.z - b.z}
|
||||
}
|
||||
|
||||
function vmul(v, s) {
|
||||
return {x: v.x * s, y: v.y * s, z: v.z * s}
|
||||
}
|
||||
|
||||
function vdot(a, b) {
|
||||
return a.x * b.x + a.y * b.y + a.z * b.z
|
||||
}
|
||||
|
||||
function vnorm(v) {
|
||||
var len = math.sqrt(vdot(v, v))
|
||||
if (len == 0) return vec(0, 0, 0)
|
||||
return vmul(v, 1 / len)
|
||||
}
|
||||
|
||||
function make_sphere(center, radius, color) {
|
||||
return {
|
||||
center: center,
|
||||
radius: radius,
|
||||
color: color
|
||||
}
|
||||
}
|
||||
|
||||
function intersect_sphere(origin, dir, sphere) {
|
||||
var oc = vsub(origin, sphere.center)
|
||||
var b = vdot(oc, dir)
|
||||
var c = vdot(oc, oc) - sphere.radius * sphere.radius
|
||||
var disc = b * b - c
|
||||
if (disc < 0) return -1
|
||||
var sq = math.sqrt(disc)
|
||||
var t1 = -b - sq
|
||||
var t2 = -b + sq
|
||||
if (t1 > 0.001) return t1
|
||||
if (t2 > 0.001) return t2
|
||||
return -1
|
||||
}
|
||||
|
||||
function make_scene() {
|
||||
var spheres = [
|
||||
make_sphere(vec(0, -1, 5), 1, vec(1, 0, 0)),
|
||||
make_sphere(vec(2, 0, 6), 1, vec(0, 1, 0)),
|
||||
make_sphere(vec(-2, 0, 4), 1, vec(0, 0, 1)),
|
||||
make_sphere(vec(0, 1, 4.5), 0.5, vec(1, 1, 0)),
|
||||
make_sphere(vec(1, -0.5, 3), 0.3, vec(1, 0, 1)),
|
||||
make_sphere(vec(0, -101, 5), 100, vec(0.5, 0.5, 0.5))
|
||||
]
|
||||
var light = vnorm(vec(1, 1, -1))
|
||||
return {spheres: spheres, light: light}
|
||||
}
|
||||
|
||||
function trace(origin, dir, scene) {
|
||||
var closest_t = 999999
|
||||
var closest_sphere = null
|
||||
var i = 0
|
||||
var t = 0
|
||||
for (i = 0; i < length(scene.spheres); i++) {
|
||||
t = intersect_sphere(origin, dir, scene.spheres[i])
|
||||
if (t > 0 && t < closest_t) {
|
||||
closest_t = t
|
||||
closest_sphere = scene.spheres[i]
|
||||
}
|
||||
}
|
||||
|
||||
if (!closest_sphere) return vec(0.2, 0.3, 0.5) // sky color
|
||||
|
||||
var hit = vadd(origin, vmul(dir, closest_t))
|
||||
var normal = vnorm(vsub(hit, closest_sphere.center))
|
||||
var diffuse = vdot(normal, scene.light)
|
||||
if (diffuse < 0) diffuse = 0
|
||||
|
||||
// Shadow check
|
||||
var shadow_origin = vadd(hit, vmul(normal, 0.001))
|
||||
var in_shadow = false
|
||||
for (i = 0; i < length(scene.spheres); i++) {
|
||||
if (scene.spheres[i] != closest_sphere) {
|
||||
t = intersect_sphere(shadow_origin, scene.light, scene.spheres[i])
|
||||
if (t > 0) {
|
||||
in_shadow = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var ambient = 0.15
|
||||
var intensity = in_shadow ? ambient : ambient + diffuse * 0.85
|
||||
return vmul(closest_sphere.color, intensity)
|
||||
}
|
||||
|
||||
function render(width, height, scene) {
|
||||
var aspect = width / height
|
||||
var fov = 1.0
|
||||
var total_r = 0
|
||||
var total_g = 0
|
||||
var total_b = 0
|
||||
var y = 0
|
||||
var x = 0
|
||||
var u = 0
|
||||
var v = 0
|
||||
var dir = null
|
||||
var color = null
|
||||
var origin = vec(0, 0, 0)
|
||||
|
||||
for (y = 0; y < height; y++) {
|
||||
for (x = 0; x < width; x++) {
|
||||
u = (2 * (x + 0.5) / width - 1) * aspect * fov
|
||||
v = (1 - 2 * (y + 0.5) / height) * fov
|
||||
dir = vnorm(vec(u, v, 1))
|
||||
color = trace(origin, dir, scene)
|
||||
total_r += color.x
|
||||
total_g += color.y
|
||||
total_b += color.z
|
||||
}
|
||||
}
|
||||
|
||||
return {r: total_r, g: total_g, b: total_b}
|
||||
}
|
||||
|
||||
var scene = make_scene()
|
||||
|
||||
return {
|
||||
raytrace_32x32: function(n) {
|
||||
var i = 0
|
||||
var result = null
|
||||
for (i = 0; i < n; i++) {
|
||||
result = render(32, 32, scene)
|
||||
}
|
||||
return result
|
||||
},
|
||||
|
||||
raytrace_64x64: function(n) {
|
||||
var i = 0
|
||||
var result = null
|
||||
for (i = 0; i < n; i++) {
|
||||
result = render(64, 64, scene)
|
||||
}
|
||||
return result
|
||||
}
|
||||
}
|
||||
251
benches/richards.cm
Normal file
251
benches/richards.cm
Normal file
@@ -0,0 +1,251 @@
|
||||
// richards.cm — Richards benchmark (scheduler simulation)
|
||||
// Object-ish workload: dynamic dispatch, state machines, queuing.
|
||||
|
||||
def IDLE = 0
|
||||
def WORKER = 1
|
||||
def HANDLER_A = 2
|
||||
def HANDLER_B = 3
|
||||
def DEVICE_A = 4
|
||||
def DEVICE_B = 5
|
||||
def NUM_TASKS = 6
|
||||
|
||||
def TASK_RUNNING = 0
|
||||
def TASK_WAITING = 1
|
||||
def TASK_HELD = 2
|
||||
def TASK_SUSPENDED = 3
|
||||
|
||||
function make_packet(link, id, kind) {
|
||||
return {link: link, id: id, kind: kind, datum: 0, data: array(4, 0)}
|
||||
}
|
||||
|
||||
function scheduler() {
|
||||
var tasks = array(NUM_TASKS, null)
|
||||
var current = null
|
||||
var queue_count = 0
|
||||
var hold_count = 0
|
||||
var v1 = 0
|
||||
var v2 = 0
|
||||
var w_id = HANDLER_A
|
||||
var w_datum = 0
|
||||
var h_a_queue = null
|
||||
var h_a_count = 0
|
||||
var h_b_queue = null
|
||||
var h_b_count = 0
|
||||
var dev_a_pkt = null
|
||||
var dev_b_pkt = null
|
||||
|
||||
var find_next = function() {
|
||||
var best = null
|
||||
var i = 0
|
||||
for (i = 0; i < NUM_TASKS; i++) {
|
||||
if (tasks[i] && tasks[i].state == TASK_RUNNING) {
|
||||
if (!best || tasks[i].priority > best.priority) {
|
||||
best = tasks[i]
|
||||
}
|
||||
}
|
||||
}
|
||||
return best
|
||||
}
|
||||
|
||||
var hold_self = function() {
|
||||
hold_count++
|
||||
if (current) current.state = TASK_HELD
|
||||
return find_next()
|
||||
}
|
||||
|
||||
var release = function(id) {
|
||||
var t = tasks[id]
|
||||
if (!t) return find_next()
|
||||
if (t.state == TASK_HELD) t.state = TASK_RUNNING
|
||||
if (t.priority > (current ? current.priority : -1)) return t
|
||||
return current
|
||||
}
|
||||
|
||||
var queue_packet = function(pkt) {
|
||||
var t = tasks[pkt.id]
|
||||
var p = null
|
||||
if (!t) return find_next()
|
||||
queue_count++
|
||||
pkt.link = null
|
||||
pkt.id = current ? current.id : 0
|
||||
if (!t.queue) {
|
||||
t.queue = pkt
|
||||
t.state = TASK_RUNNING
|
||||
if (t.priority > (current ? current.priority : -1)) return t
|
||||
} else {
|
||||
p = t.queue
|
||||
while (p.link) p = p.link
|
||||
p.link = pkt
|
||||
}
|
||||
return current
|
||||
}
|
||||
|
||||
// Idle task
|
||||
tasks[IDLE] = {id: IDLE, priority: 0, queue: null, state: TASK_RUNNING,
|
||||
hold_count: 0, queue_count: 0,
|
||||
fn: function(pkt) {
|
||||
v1--
|
||||
if (v1 == 0) return hold_self()
|
||||
if ((v2 & 1) == 0) {
|
||||
v2 = v2 >> 1
|
||||
return release(DEVICE_A)
|
||||
}
|
||||
v2 = (v2 >> 1) ^ 0xD008
|
||||
return release(DEVICE_B)
|
||||
}
|
||||
}
|
||||
|
||||
// Worker task
|
||||
tasks[WORKER] = {id: WORKER, priority: 1000, queue: null, state: TASK_SUSPENDED,
|
||||
hold_count: 0, queue_count: 0,
|
||||
fn: function(pkt) {
|
||||
var i = 0
|
||||
if (!pkt) return hold_self()
|
||||
w_id = (w_id == HANDLER_A) ? HANDLER_B : HANDLER_A
|
||||
pkt.id = w_id
|
||||
pkt.datum = 0
|
||||
for (i = 0; i < 4; i++) {
|
||||
w_datum++
|
||||
if (w_datum > 26) w_datum = 1
|
||||
pkt.data[i] = 65 + w_datum
|
||||
}
|
||||
return queue_packet(pkt)
|
||||
}
|
||||
}
|
||||
|
||||
// Handler A
|
||||
tasks[HANDLER_A] = {id: HANDLER_A, priority: 2000, queue: null, state: TASK_SUSPENDED,
|
||||
hold_count: 0, queue_count: 0,
|
||||
fn: function(pkt) {
|
||||
var p = null
|
||||
if (pkt) { h_a_queue = pkt; h_a_count++ }
|
||||
if (h_a_queue) {
|
||||
p = h_a_queue
|
||||
h_a_queue = p.link
|
||||
if (h_a_count < 3) return queue_packet(p)
|
||||
return release(DEVICE_A)
|
||||
}
|
||||
return hold_self()
|
||||
}
|
||||
}
|
||||
|
||||
// Handler B
|
||||
tasks[HANDLER_B] = {id: HANDLER_B, priority: 3000, queue: null, state: TASK_SUSPENDED,
|
||||
hold_count: 0, queue_count: 0,
|
||||
fn: function(pkt) {
|
||||
var p = null
|
||||
if (pkt) { h_b_queue = pkt; h_b_count++ }
|
||||
if (h_b_queue) {
|
||||
p = h_b_queue
|
||||
h_b_queue = p.link
|
||||
if (h_b_count < 3) return queue_packet(p)
|
||||
return release(DEVICE_B)
|
||||
}
|
||||
return hold_self()
|
||||
}
|
||||
}
|
||||
|
||||
// Device A
|
||||
tasks[DEVICE_A] = {id: DEVICE_A, priority: 4000, queue: null, state: TASK_SUSPENDED,
|
||||
hold_count: 0, queue_count: 0,
|
||||
fn: function(pkt) {
|
||||
var p = null
|
||||
if (pkt) { dev_a_pkt = pkt; return hold_self() }
|
||||
if (dev_a_pkt) {
|
||||
p = dev_a_pkt
|
||||
dev_a_pkt = null
|
||||
return queue_packet(p)
|
||||
}
|
||||
return hold_self()
|
||||
}
|
||||
}
|
||||
|
||||
// Device B
|
||||
tasks[DEVICE_B] = {id: DEVICE_B, priority: 5000, queue: null, state: TASK_SUSPENDED,
|
||||
hold_count: 0, queue_count: 0,
|
||||
fn: function(pkt) {
|
||||
var p = null
|
||||
if (pkt) { dev_b_pkt = pkt; return hold_self() }
|
||||
if (dev_b_pkt) {
|
||||
p = dev_b_pkt
|
||||
dev_b_pkt = null
|
||||
return queue_packet(p)
|
||||
}
|
||||
return hold_self()
|
||||
}
|
||||
}
|
||||
|
||||
var run = function(iterations) {
|
||||
var i = 0
|
||||
var pkt1 = null
|
||||
var pkt2 = null
|
||||
var steps = 0
|
||||
var pkt = null
|
||||
var next = null
|
||||
|
||||
v1 = iterations
|
||||
v2 = 0xBEEF
|
||||
queue_count = 0
|
||||
hold_count = 0
|
||||
w_id = HANDLER_A
|
||||
w_datum = 0
|
||||
h_a_queue = null
|
||||
h_a_count = 0
|
||||
h_b_queue = null
|
||||
h_b_count = 0
|
||||
dev_a_pkt = null
|
||||
dev_b_pkt = null
|
||||
|
||||
for (i = 0; i < NUM_TASKS; i++) {
|
||||
if (tasks[i]) {
|
||||
tasks[i].state = (i == IDLE) ? TASK_RUNNING : TASK_SUSPENDED
|
||||
tasks[i].queue = null
|
||||
}
|
||||
}
|
||||
|
||||
pkt1 = make_packet(null, WORKER, 1)
|
||||
pkt2 = make_packet(pkt1, WORKER, 1)
|
||||
tasks[WORKER].queue = pkt2
|
||||
tasks[WORKER].state = TASK_RUNNING
|
||||
|
||||
current = find_next()
|
||||
while (current && steps < iterations * 10) {
|
||||
pkt = current.queue
|
||||
if (pkt) {
|
||||
current.queue = pkt.link
|
||||
current.queue_count++
|
||||
}
|
||||
next = current.fn(pkt)
|
||||
if (next) current = next
|
||||
else current = find_next()
|
||||
steps++
|
||||
}
|
||||
return {queue_count: queue_count, hold_count: hold_count, steps: steps}
|
||||
}
|
||||
|
||||
return {run: run}
|
||||
}
|
||||
|
||||
return {
|
||||
richards_100: function(n) {
|
||||
var i = 0
|
||||
var s = null
|
||||
var result = null
|
||||
for (i = 0; i < n; i++) {
|
||||
s = scheduler()
|
||||
result = s.run(100)
|
||||
}
|
||||
return result
|
||||
},
|
||||
|
||||
richards_1k: function(n) {
|
||||
var i = 0
|
||||
var s = null
|
||||
var result = null
|
||||
for (i = 0; i < n; i++) {
|
||||
s = scheduler()
|
||||
result = s.run(1000)
|
||||
}
|
||||
return result
|
||||
}
|
||||
}
|
||||
180
benches/sorting.cm
Normal file
180
benches/sorting.cm
Normal file
@@ -0,0 +1,180 @@
|
||||
// sorting.cm — Sorting and searching kernel
|
||||
// Array manipulation, comparison-heavy, allocation patterns.
|
||||
|
||||
function make_random_array(n, seed) {
|
||||
var a = []
|
||||
var x = seed
|
||||
var i = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
x = ((x * 1103515245 + 12345) & 0x7FFFFFFF) | 0
|
||||
push(a, x % 10000)
|
||||
}
|
||||
return a
|
||||
}
|
||||
|
||||
function make_descending(n) {
|
||||
var a = []
|
||||
var i = 0
|
||||
for (i = n - 1; i >= 0; i--) push(a, i)
|
||||
return a
|
||||
}
|
||||
|
||||
// Manual quicksort (tests recursion + array mutation)
|
||||
function qsort(arr, lo, hi) {
|
||||
var i = lo
|
||||
var j = hi
|
||||
var pivot = arr[floor((lo + hi) / 2)]
|
||||
var tmp = 0
|
||||
if (lo >= hi) return null
|
||||
while (i <= j) {
|
||||
while (arr[i] < pivot) i++
|
||||
while (arr[j] > pivot) j--
|
||||
if (i <= j) {
|
||||
tmp = arr[i]
|
||||
arr[i] = arr[j]
|
||||
arr[j] = tmp
|
||||
i++
|
||||
j--
|
||||
}
|
||||
}
|
||||
if (lo < j) qsort(arr, lo, j)
|
||||
if (i < hi) qsort(arr, i, hi)
|
||||
return null
|
||||
}
|
||||
|
||||
// Merge sort (tests allocation + array creation)
|
||||
function msort(arr) {
|
||||
var n = length(arr)
|
||||
if (n <= 1) return arr
|
||||
var mid = floor(n / 2)
|
||||
var left = msort(array(arr, 0, mid))
|
||||
var right = msort(array(arr, mid, n))
|
||||
return merge(left, right)
|
||||
}
|
||||
|
||||
function merge(a, b) {
|
||||
var result = []
|
||||
var i = 0
|
||||
var j = 0
|
||||
while (i < length(a) && j < length(b)) {
|
||||
if (a[i] <= b[j]) {
|
||||
push(result, a[i])
|
||||
i++
|
||||
} else {
|
||||
push(result, b[j])
|
||||
j++
|
||||
}
|
||||
}
|
||||
while (i < length(a)) {
|
||||
push(result, a[i])
|
||||
i++
|
||||
}
|
||||
while (j < length(b)) {
|
||||
push(result, b[j])
|
||||
j++
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// Binary search
|
||||
function bsearch(arr, target) {
|
||||
var lo = 0
|
||||
var hi = length(arr) - 1
|
||||
var mid = 0
|
||||
while (lo <= hi) {
|
||||
mid = floor((lo + hi) / 2)
|
||||
if (arr[mid] == target) return mid
|
||||
if (arr[mid] < target) lo = mid + 1
|
||||
else hi = mid - 1
|
||||
}
|
||||
return -1
|
||||
}
|
||||
|
||||
// Sort objects by field
|
||||
function sort_records(n) {
|
||||
var records = []
|
||||
var x = 42
|
||||
var i = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
x = ((x * 1103515245 + 12345) & 0x7FFFFFFF) | 0
|
||||
push(records, {id: i, score: x % 10000, name: `item_${i}`})
|
||||
}
|
||||
return sort(records, "score")
|
||||
}
|
||||
|
||||
return {
|
||||
// Quicksort 1K random integers
|
||||
qsort_1k: function(n) {
|
||||
var i = 0
|
||||
var a = null
|
||||
for (i = 0; i < n; i++) {
|
||||
a = make_random_array(1000, i)
|
||||
qsort(a, 0, length(a) - 1)
|
||||
}
|
||||
return a
|
||||
},
|
||||
|
||||
// Quicksort 10K random integers
|
||||
qsort_10k: function(n) {
|
||||
var i = 0
|
||||
var a = null
|
||||
for (i = 0; i < n; i++) {
|
||||
a = make_random_array(10000, i)
|
||||
qsort(a, 0, length(a) - 1)
|
||||
}
|
||||
return a
|
||||
},
|
||||
|
||||
// Merge sort 1K (allocation heavy)
|
||||
msort_1k: function(n) {
|
||||
var i = 0
|
||||
var result = null
|
||||
for (i = 0; i < n; i++) {
|
||||
result = msort(make_random_array(1000, i))
|
||||
}
|
||||
return result
|
||||
},
|
||||
|
||||
// Built-in sort 1K
|
||||
builtin_sort_1k: function(n) {
|
||||
var i = 0
|
||||
var result = null
|
||||
for (i = 0; i < n; i++) {
|
||||
result = sort(make_random_array(1000, i))
|
||||
}
|
||||
return result
|
||||
},
|
||||
|
||||
// Sort worst case (descending → ascending)
|
||||
sort_worst_case: function(n) {
|
||||
var i = 0
|
||||
var a = null
|
||||
for (i = 0; i < n; i++) {
|
||||
a = make_descending(1000)
|
||||
qsort(a, 0, length(a) - 1)
|
||||
}
|
||||
return a
|
||||
},
|
||||
|
||||
// Binary search in sorted array
|
||||
bsearch_1k: function(n) {
|
||||
var sorted = make_random_array(1000, 42)
|
||||
sorted = sort(sorted)
|
||||
var found = 0
|
||||
var i = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
if (bsearch(sorted, sorted[i % 1000]) >= 0) found++
|
||||
}
|
||||
return found
|
||||
},
|
||||
|
||||
// Sort records by field
|
||||
sort_records_500: function(n) {
|
||||
var i = 0
|
||||
var result = null
|
||||
for (i = 0; i < n; i++) {
|
||||
result = sort_records(500)
|
||||
}
|
||||
return result
|
||||
}
|
||||
}
|
||||
82
benches/spectral_norm.cm
Normal file
82
benches/spectral_norm.cm
Normal file
@@ -0,0 +1,82 @@
|
||||
// spectral_norm.cm — Spectral norm kernel
|
||||
// Pure numeric, dense array access, mathematical computation.
|
||||
|
||||
var math = use('math/radians')
|
||||
|
||||
function eval_a(i, j) {
|
||||
return 1.0 / ((i + j) * (i + j + 1) / 2 + i + 1)
|
||||
}
|
||||
|
||||
function eval_a_times_u(n, u, au) {
|
||||
var i = 0
|
||||
var j = 0
|
||||
var sum = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
sum = 0
|
||||
for (j = 0; j < n; j++) {
|
||||
sum += eval_a(i, j) * u[j]
|
||||
}
|
||||
au[i] = sum
|
||||
}
|
||||
}
|
||||
|
||||
function eval_at_times_u(n, u, atu) {
|
||||
var i = 0
|
||||
var j = 0
|
||||
var sum = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
sum = 0
|
||||
for (j = 0; j < n; j++) {
|
||||
sum += eval_a(j, i) * u[j]
|
||||
}
|
||||
atu[i] = sum
|
||||
}
|
||||
}
|
||||
|
||||
function eval_ata_times_u(n, u, atau) {
|
||||
var v = array(n, 0)
|
||||
eval_a_times_u(n, u, v)
|
||||
eval_at_times_u(n, v, atau)
|
||||
}
|
||||
|
||||
function spectral_norm(n) {
|
||||
var u = array(n, 1)
|
||||
var v = array(n, 0)
|
||||
var i = 0
|
||||
var vbv = 0
|
||||
var vv = 0
|
||||
|
||||
for (i = 0; i < 10; i++) {
|
||||
eval_ata_times_u(n, u, v)
|
||||
eval_ata_times_u(n, v, u)
|
||||
}
|
||||
|
||||
vbv = 0
|
||||
vv = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
vbv += u[i] * v[i]
|
||||
vv += v[i] * v[i]
|
||||
}
|
||||
|
||||
return math.sqrt(vbv / vv)
|
||||
}
|
||||
|
||||
return {
|
||||
spectral_100: function(n) {
|
||||
var i = 0
|
||||
var result = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
result = spectral_norm(100)
|
||||
}
|
||||
return result
|
||||
},
|
||||
|
||||
spectral_200: function(n) {
|
||||
var i = 0
|
||||
var result = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
result = spectral_norm(200)
|
||||
}
|
||||
return result
|
||||
}
|
||||
}
|
||||
188
benches/string_processing.cm
Normal file
188
benches/string_processing.cm
Normal file
@@ -0,0 +1,188 @@
|
||||
// string_processing.cm — String-heavy kernel
|
||||
// Concat, split, search, replace, interning path stress.
|
||||
|
||||
function make_lorem(paragraphs) {
|
||||
var base = "Lorem ipsum dolor sit amet consectetur adipiscing elit sed do eiusmod tempor incididunt ut labore et dolore magna aliqua Ut enim ad minim veniam quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat"
|
||||
var result = ""
|
||||
var i = 0
|
||||
for (i = 0; i < paragraphs; i++) {
|
||||
if (i > 0) result = result + " "
|
||||
result = result + base
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// Build a lookup table from text
|
||||
function build_index(txt) {
|
||||
var words = array(txt, " ")
|
||||
var index = {}
|
||||
var i = 0
|
||||
var w = null
|
||||
for (i = 0; i < length(words); i++) {
|
||||
w = words[i]
|
||||
if (!index[w]) {
|
||||
index[w] = []
|
||||
}
|
||||
push(index[w], i)
|
||||
}
|
||||
return index
|
||||
}
|
||||
|
||||
// Levenshtein-like distance (simplified)
|
||||
function edit_distance(a, b) {
|
||||
var la = length(a)
|
||||
var lb = length(b)
|
||||
if (la == 0) return lb
|
||||
if (lb == 0) return la
|
||||
|
||||
// Use flat array for 2 rows of DP matrix
|
||||
var prev = array(lb + 1, 0)
|
||||
var curr = array(lb + 1, 0)
|
||||
var i = 0
|
||||
var j = 0
|
||||
var cost = 0
|
||||
var del = 0
|
||||
var ins = 0
|
||||
var sub = 0
|
||||
var tmp = null
|
||||
var ca = array(a)
|
||||
var cb = array(b)
|
||||
|
||||
for (j = 0; j <= lb; j++) prev[j] = j
|
||||
for (i = 1; i <= la; i++) {
|
||||
curr[0] = i
|
||||
for (j = 1; j <= lb; j++) {
|
||||
cost = ca[i - 1] == cb[j - 1] ? 0 : 1
|
||||
del = prev[j] + 1
|
||||
ins = curr[j - 1] + 1
|
||||
sub = prev[j - 1] + cost
|
||||
curr[j] = del
|
||||
if (ins < curr[j]) curr[j] = ins
|
||||
if (sub < curr[j]) curr[j] = sub
|
||||
}
|
||||
tmp = prev
|
||||
prev = curr
|
||||
curr = tmp
|
||||
}
|
||||
return prev[lb]
|
||||
}
|
||||
|
||||
var lorem_5 = make_lorem(5)
|
||||
var lorem_20 = make_lorem(20)
|
||||
|
||||
return {
|
||||
// Split text into words and count
|
||||
string_split_count: function(n) {
|
||||
var i = 0
|
||||
var words = null
|
||||
var count = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
words = array(lorem_5, " ")
|
||||
count += length(words)
|
||||
}
|
||||
return count
|
||||
},
|
||||
|
||||
// Build word index (split + hash + array ops)
|
||||
string_index_build: function(n) {
|
||||
var i = 0
|
||||
var idx = null
|
||||
for (i = 0; i < n; i++) {
|
||||
idx = build_index(lorem_5)
|
||||
}
|
||||
return idx
|
||||
},
|
||||
|
||||
// Search for substrings
|
||||
string_search: function(n) {
|
||||
var targets = ["dolor", "minim", "quis", "magna", "ipsum"]
|
||||
var i = 0
|
||||
var j = 0
|
||||
var count = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
for (j = 0; j < length(targets); j++) {
|
||||
if (search(lorem_20, targets[j])) count++
|
||||
}
|
||||
}
|
||||
return count
|
||||
},
|
||||
|
||||
// Replace operations
|
||||
string_replace: function(n) {
|
||||
var i = 0
|
||||
var result = null
|
||||
for (i = 0; i < n; i++) {
|
||||
result = replace(lorem_5, "dolor", "DOLOR")
|
||||
result = replace(result, "ipsum", "IPSUM")
|
||||
result = replace(result, "amet", "AMET")
|
||||
}
|
||||
return result
|
||||
},
|
||||
|
||||
// String concatenation builder
|
||||
string_builder: function(n) {
|
||||
var i = 0
|
||||
var j = 0
|
||||
var s = null
|
||||
var total = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
s = ""
|
||||
for (j = 0; j < 50; j++) {
|
||||
s = s + "key=" + text(j) + "&value=" + text(j * 17) + "&"
|
||||
}
|
||||
total += length(s)
|
||||
}
|
||||
return total
|
||||
},
|
||||
|
||||
// Edit distance (DP + array + string ops)
|
||||
edit_distance: function(n) {
|
||||
var words = ["kitten", "sitting", "saturday", "sunday", "intention", "execution"]
|
||||
var i = 0
|
||||
var j = 0
|
||||
var total = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
for (j = 0; j < length(words) - 1; j++) {
|
||||
total += edit_distance(words[j], words[j + 1])
|
||||
}
|
||||
}
|
||||
return total
|
||||
},
|
||||
|
||||
// Upper/lower/trim chain
|
||||
string_transforms: function(n) {
|
||||
var src = " Hello World "
|
||||
var i = 0
|
||||
var x = 0
|
||||
var result = null
|
||||
for (i = 0; i < n; i++) {
|
||||
result = trim(src)
|
||||
result = upper(result)
|
||||
result = lower(result)
|
||||
x += length(result)
|
||||
}
|
||||
return x
|
||||
},
|
||||
|
||||
// Starts_with / ends_with (interning path)
|
||||
string_prefix_suffix: function(n) {
|
||||
var strs = [
|
||||
"application/json",
|
||||
"text/html",
|
||||
"image/png",
|
||||
"application/xml",
|
||||
"text/plain"
|
||||
]
|
||||
var i = 0
|
||||
var j = 0
|
||||
var count = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
for (j = 0; j < length(strs); j++) {
|
||||
if (starts_with(strs[j], "application/")) count++
|
||||
if (ends_with(strs[j], "/json")) count++
|
||||
if (starts_with(strs[j], "text/")) count++
|
||||
}
|
||||
}
|
||||
return count
|
||||
}
|
||||
}
|
||||
137
benches/tree_ops.cm
Normal file
137
benches/tree_ops.cm
Normal file
@@ -0,0 +1,137 @@
|
||||
// tree_ops.cm — Tree data structure operations kernel
|
||||
// Pointer chasing, recursion, allocation patterns.
|
||||
|
||||
// Binary tree: create, walk, transform, check
|
||||
function make_tree(depth) {
|
||||
if (depth <= 0) return {val: 1, left: null, right: null}
|
||||
return {
|
||||
val: depth,
|
||||
left: make_tree(depth - 1),
|
||||
right: make_tree(depth - 1)
|
||||
}
|
||||
}
|
||||
|
||||
function tree_check(node) {
|
||||
if (!node) return 0
|
||||
if (!node.left) return node.val
|
||||
return node.val + tree_check(node.left) - tree_check(node.right)
|
||||
}
|
||||
|
||||
function tree_sum(node) {
|
||||
if (!node) return 0
|
||||
return node.val + tree_sum(node.left) + tree_sum(node.right)
|
||||
}
|
||||
|
||||
function tree_depth(node) {
|
||||
if (!node) return 0
|
||||
var l = tree_depth(node.left)
|
||||
var r = tree_depth(node.right)
|
||||
return 1 + (l > r ? l : r)
|
||||
}
|
||||
|
||||
function tree_count(node) {
|
||||
if (!node) return 0
|
||||
return 1 + tree_count(node.left) + tree_count(node.right)
|
||||
}
|
||||
|
||||
// Transform tree: map values
|
||||
function tree_map(node, fn) {
|
||||
if (!node) return null
|
||||
return {
|
||||
val: fn(node.val),
|
||||
left: tree_map(node.left, fn),
|
||||
right: tree_map(node.right, fn)
|
||||
}
|
||||
}
|
||||
|
||||
// Flatten tree to array (in-order)
|
||||
function tree_flatten(node, result) {
|
||||
if (!node) return null
|
||||
tree_flatten(node.left, result)
|
||||
push(result, node.val)
|
||||
tree_flatten(node.right, result)
|
||||
return null
|
||||
}
|
||||
|
||||
// Build sorted tree from array (balanced)
|
||||
function build_balanced(arr, lo, hi) {
|
||||
if (lo > hi) return null
|
||||
var mid = floor((lo + hi) / 2)
|
||||
return {
|
||||
val: arr[mid],
|
||||
left: build_balanced(arr, lo, mid - 1),
|
||||
right: build_balanced(arr, mid + 1, hi)
|
||||
}
|
||||
}
|
||||
|
||||
// Find a value in BST
|
||||
function bst_find(node, val) {
|
||||
if (!node) return false
|
||||
if (val == node.val) return true
|
||||
if (val < node.val) return bst_find(node.left, val)
|
||||
return bst_find(node.right, val)
|
||||
}
|
||||
|
||||
return {
|
||||
// Binary tree create + check (allocation heavy)
|
||||
tree_create_check: function(n) {
|
||||
var i = 0
|
||||
var t = null
|
||||
var x = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
t = make_tree(10)
|
||||
x += tree_check(t)
|
||||
}
|
||||
return x
|
||||
},
|
||||
|
||||
// Deep tree traversals
|
||||
tree_traversal: function(n) {
|
||||
var t = make_tree(12)
|
||||
var x = 0
|
||||
var i = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
x += tree_sum(t) + tree_depth(t) + tree_count(t)
|
||||
}
|
||||
return x
|
||||
},
|
||||
|
||||
// Tree map (create new tree from old)
|
||||
tree_transform: function(n) {
|
||||
var t = make_tree(10)
|
||||
var i = 0
|
||||
var mapped = null
|
||||
for (i = 0; i < n; i++) {
|
||||
mapped = tree_map(t, function(v) { return v * 2 + 1 })
|
||||
}
|
||||
return mapped
|
||||
},
|
||||
|
||||
// Flatten + rebuild (array <-> tree conversion)
|
||||
tree_flatten_rebuild: function(n) {
|
||||
var t = make_tree(10)
|
||||
var i = 0
|
||||
var flat = null
|
||||
var rebuilt = null
|
||||
for (i = 0; i < n; i++) {
|
||||
flat = []
|
||||
tree_flatten(t, flat)
|
||||
rebuilt = build_balanced(flat, 0, length(flat) - 1)
|
||||
}
|
||||
return rebuilt
|
||||
},
|
||||
|
||||
// BST search (pointer chasing)
|
||||
bst_search: function(n) {
|
||||
// Build a balanced BST of 1024 elements
|
||||
var data = []
|
||||
var i = 0
|
||||
for (i = 0; i < 1024; i++) push(data, i)
|
||||
var bst = build_balanced(data, 0, 1023)
|
||||
var found = 0
|
||||
for (i = 0; i < n; i++) {
|
||||
if (bst_find(bst, i % 1024)) found++
|
||||
}
|
||||
return found
|
||||
}
|
||||
}
|
||||
@@ -12,7 +12,8 @@ var files = [
|
||||
{src: "parse.cm", name: "parse", out: "parse.mach"},
|
||||
{src: "fold.cm", name: "fold", out: "fold.mach"},
|
||||
{src: "mcode.cm", name: "mcode", out: "mcode.mach"},
|
||||
{src: "internal/bootstrap.cm", name: "bootstrap", out: "internal/bootstrap.mach"}
|
||||
{src: "internal/bootstrap.cm", name: "bootstrap", out: "internal/bootstrap.mach"},
|
||||
{src: "internal/engine.cm", name: "engine", out: "internal/engine.mach"}
|
||||
]
|
||||
|
||||
var i = 0
|
||||
|
||||
102
compile.ce
Normal file
102
compile.ce
Normal file
@@ -0,0 +1,102 @@
|
||||
// compile.ce — compile a .cm module to native .dylib via QBE
|
||||
//
|
||||
// Usage:
|
||||
// cell --core . compile.ce <file.cm>
|
||||
//
|
||||
// Produces <file>.dylib in the current directory.
|
||||
|
||||
var fd = use('fd')
|
||||
var os = use('os')
|
||||
|
||||
if (length(args) < 1) {
|
||||
print('usage: cell --core . compile.ce <file.cm>')
|
||||
return
|
||||
}
|
||||
|
||||
var file = args[0]
|
||||
var base = file
|
||||
if (ends_with(base, '.cm')) {
|
||||
base = text(base, 0, length(base) - 3)
|
||||
}
|
||||
|
||||
var safe = replace(replace(base, '/', '_'), '-', '_')
|
||||
var symbol = 'js_' + safe + '_use'
|
||||
var tmp = '/tmp/qbe_' + safe
|
||||
var ssa_path = tmp + '.ssa'
|
||||
var s_path = tmp + '.s'
|
||||
var o_path = tmp + '.o'
|
||||
var rt_o_path = '/tmp/qbe_rt.o'
|
||||
var dylib_path = base + '.dylib'
|
||||
var cwd = fd.getcwd()
|
||||
var rc = 0
|
||||
|
||||
// Step 1: emit QBE IL
|
||||
print('emit qbe...')
|
||||
rc = os.system('cd ' + cwd + ' && ./cell --core . --emit-qbe ' + file + ' > ' + ssa_path)
|
||||
if (rc != 0) {
|
||||
print('failed to emit qbe il')
|
||||
return
|
||||
}
|
||||
|
||||
// Step 2: post-process — insert dead labels after ret/jmp, append wrapper
|
||||
// Use awk via shell to avoid blob/slurpwrite issues with long strings
|
||||
print('post-process...')
|
||||
var awk_cmd = `awk '
|
||||
need_label && /^[[:space:]]*[^@}]/ && NF > 0 {
|
||||
print "@_dead_" dead_id; dead_id++; need_label=0
|
||||
}
|
||||
/^@/ || /^}/ || NF==0 { need_label=0 }
|
||||
/^[[:space:]]*ret / || /^[[:space:]]*jmp / { need_label=1; print; next }
|
||||
{ print }
|
||||
' ` + ssa_path + ` > ` + tmp + `_fixed.ssa`
|
||||
rc = os.system(awk_cmd)
|
||||
if (rc != 0) {
|
||||
print('post-process failed')
|
||||
return
|
||||
}
|
||||
|
||||
// Append wrapper function — called as symbol(ctx) by os.dylib_symbol.
|
||||
// Delegates to cell_rt_module_entry which heap-allocates a frame
|
||||
// (so closures survive) and calls cell_main.
|
||||
var wrapper_cmd = `printf '\nexport function l $` + symbol + `(l %%ctx) {\n@entry\n %%result =l call $cell_rt_module_entry(l %%ctx)\n ret %%result\n}\n' >> ` + tmp + `_fixed.ssa`
|
||||
rc = os.system(wrapper_cmd)
|
||||
if (rc != 0) {
|
||||
print('wrapper append failed')
|
||||
return
|
||||
}
|
||||
|
||||
// Step 3: compile QBE IL to assembly
|
||||
print('qbe compile...')
|
||||
rc = os.system('~/.local/bin/qbe -o ' + s_path + ' ' + tmp + '_fixed.ssa')
|
||||
if (rc != 0) {
|
||||
print('qbe compilation failed')
|
||||
return
|
||||
}
|
||||
|
||||
// Step 4: assemble
|
||||
print('assemble...')
|
||||
rc = os.system('cc -c ' + s_path + ' -o ' + o_path)
|
||||
if (rc != 0) {
|
||||
print('assembly failed')
|
||||
return
|
||||
}
|
||||
|
||||
// Step 5: compile runtime stubs (cached — skip if already built)
|
||||
if (!fd.is_file(rt_o_path)) {
|
||||
print('compile runtime stubs...')
|
||||
rc = os.system('cc -c ' + cwd + '/qbe_rt.c -o ' + rt_o_path + ' -fPIC')
|
||||
if (rc != 0) {
|
||||
print('runtime stubs compilation failed')
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// Step 6: link dylib
|
||||
print('link...')
|
||||
rc = os.system('cc -shared -fPIC -undefined dynamic_lookup ' + o_path + ' ' + rt_o_path + ' -o ' + cwd + '/' + dylib_path)
|
||||
if (rc != 0) {
|
||||
print('linking failed')
|
||||
return
|
||||
}
|
||||
|
||||
print('built: ' + dylib_path)
|
||||
@@ -1,27 +1,15 @@
|
||||
#include "cell.h"
|
||||
|
||||
// Return the current stack depth.
|
||||
JSC_CCALL(debug_stack_depth, return number2js(js,js_debugger_stack_depth(js)))
|
||||
// TODO: Reimplement stack depth for register VM
|
||||
JSC_CCALL(debug_stack_depth, return number2js(js, 0))
|
||||
|
||||
// Return a backtrace of the current call stack.
|
||||
JSC_CCALL(debug_build_backtrace, return js_debugger_build_backtrace(js,NULL))
|
||||
|
||||
// Return the closure variables for a given function.
|
||||
JSC_CCALL(debug_closure_vars, return js_debugger_closure_variables(js,argv[0]))
|
||||
|
||||
JSC_CCALL(debug_set_closure_var,
|
||||
js_debugger_set_closure_variable(js,argv[0],argv[1],argv[2]);
|
||||
return JS_NULL;
|
||||
)
|
||||
|
||||
// Return the local variables for a specific stack frame.
|
||||
JSC_CCALL(debug_local_vars, return js_debugger_local_variables(js, js2number(js,argv[0])))
|
||||
|
||||
// Return metadata about a given function.
|
||||
JSC_CCALL(debug_fn_info, return js_debugger_fn_info(js, argv[0]))
|
||||
|
||||
// Return an array of functions in the current backtrace.
|
||||
JSC_CCALL(debug_backtrace_fns, return js_debugger_backtrace_fns(js,NULL))
|
||||
// TODO: Reimplement debug introspection for register VM
|
||||
JSC_CCALL(debug_build_backtrace, return JS_NewArray(js))
|
||||
JSC_CCALL(debug_closure_vars, return JS_NewObject(js))
|
||||
JSC_CCALL(debug_set_closure_var, return JS_NULL;)
|
||||
JSC_CCALL(debug_local_vars, return JS_NewObject(js))
|
||||
JSC_CCALL(debug_fn_info, return JS_NewObject(js))
|
||||
JSC_CCALL(debug_backtrace_fns, return JS_NewArray(js))
|
||||
|
||||
static const JSCFunctionListEntry js_debug_funcs[] = {
|
||||
MIST_FUNC_DEF(debug, stack_depth, 0),
|
||||
@@ -37,4 +25,4 @@ JSValue js_debug_use(JSContext *js) {
|
||||
JSValue mod = JS_NewObject(js);
|
||||
JS_SetPropertyFunctionList(js,mod,js_debug_funcs,countof(js_debug_funcs));
|
||||
return mod;
|
||||
}
|
||||
}
|
||||
|
||||
42
debug/js.c
42
debug/js.c
@@ -1,48 +1,16 @@
|
||||
#include "cell.h"
|
||||
|
||||
JSC_CCALL(os_mem_limit, JS_SetMemoryLimit(JS_GetRuntime(js), js2number(js,argv[0])))
|
||||
JSC_CCALL(os_max_stacksize, JS_SetMaxStackSize(JS_GetRuntime(js), js2number(js,argv[0])))
|
||||
JSC_CCALL(os_max_stacksize, JS_SetMaxStackSize(js, js2number(js,argv[0])))
|
||||
|
||||
// Compute the approximate size of a single JS value in memory.
|
||||
// TODO: Reimplement memory usage reporting for new allocator
|
||||
JSC_CCALL(os_calc_mem,
|
||||
JSMemoryUsage mu;
|
||||
JS_ComputeMemoryUsage(JS_GetRuntime(js),&mu);
|
||||
ret = JS_NewObject(js);
|
||||
JS_SetPropertyStr(js,ret,"malloc_size",number2js(js,mu.malloc_size));
|
||||
JS_SetPropertyStr(js,ret,"malloc_limit",number2js(js,mu.malloc_limit));
|
||||
JS_SetPropertyStr(js,ret,"memory_used_size",number2js(js,mu.memory_used_size));
|
||||
JS_SetPropertyStr(js,ret,"malloc_count",number2js(js,mu.malloc_count));
|
||||
JS_SetPropertyStr(js,ret,"memory_used_count",number2js(js,mu.memory_used_count));
|
||||
JS_SetPropertyStr(js,ret,"str_count",number2js(js,mu.str_count));
|
||||
JS_SetPropertyStr(js,ret,"str_size",number2js(js,mu.str_size));
|
||||
JS_SetPropertyStr(js,ret,"obj_count",number2js(js,mu.obj_count));
|
||||
JS_SetPropertyStr(js,ret,"obj_size",number2js(js,mu.obj_size));
|
||||
JS_SetPropertyStr(js,ret,"prop_count",number2js(js,mu.prop_count));
|
||||
JS_SetPropertyStr(js,ret,"prop_size",number2js(js,mu.prop_size));
|
||||
JS_SetPropertyStr(js,ret,"shape_count",number2js(js,mu.shape_count));
|
||||
JS_SetPropertyStr(js,ret,"shape_size",number2js(js,mu.shape_size));
|
||||
JS_SetPropertyStr(js,ret,"js_func_count",number2js(js,mu.js_func_count));
|
||||
JS_SetPropertyStr(js,ret,"js_func_size",number2js(js,mu.js_func_size));
|
||||
JS_SetPropertyStr(js,ret,"js_func_code_size",number2js(js,mu.js_func_code_size));
|
||||
JS_SetPropertyStr(js,ret,"js_func_pc2line_count",number2js(js,mu.js_func_pc2line_count));
|
||||
JS_SetPropertyStr(js,ret,"js_func_pc2line_size",number2js(js,mu.js_func_pc2line_size));
|
||||
JS_SetPropertyStr(js,ret,"c_func_count",number2js(js,mu.c_func_count));
|
||||
JS_SetPropertyStr(js,ret,"array_count",number2js(js,mu.array_count));
|
||||
JS_SetPropertyStr(js,ret,"fast_array_count",number2js(js,mu.fast_array_count));
|
||||
JS_SetPropertyStr(js,ret,"fast_array_elements",number2js(js,mu.fast_array_elements));
|
||||
JS_SetPropertyStr(js,ret,"binary_object_count",number2js(js,mu.binary_object_count));
|
||||
JS_SetPropertyStr(js,ret,"binary_object_size",number2js(js,mu.binary_object_size));
|
||||
)
|
||||
|
||||
// Disassemble a function object into a string.
|
||||
JSC_CCALL(js_disassemble,
|
||||
return js_debugger_fn_bytecode(js, argv[0]);
|
||||
)
|
||||
|
||||
// Return metadata about a given function.
|
||||
JSC_CCALL(js_fn_info,
|
||||
return js_debugger_fn_info(js, argv[0]);
|
||||
)
|
||||
// TODO: Reimplement for register VM
|
||||
JSC_CCALL(js_disassemble, return JS_NewArray(js);)
|
||||
JSC_CCALL(js_fn_info, return JS_NewObject(js);)
|
||||
|
||||
static const JSCFunctionListEntry js_js_funcs[] = {
|
||||
MIST_FUNC_DEF(os, calc_mem, 0),
|
||||
|
||||
264
diff.ce
Normal file
264
diff.ce
Normal file
@@ -0,0 +1,264 @@
|
||||
// diff.ce — differential testing: run tests optimized vs unoptimized, compare results
|
||||
//
|
||||
// Usage:
|
||||
// cell diff - diff all test files in current package
|
||||
// cell diff suite - diff a specific test file (tests/suite.cm)
|
||||
// cell diff tests/foo - diff a specific test file by path
|
||||
var shop = use('internal/shop')
|
||||
var pkg = use('package')
|
||||
var fd = use('fd')
|
||||
var time = use('time')
|
||||
|
||||
var _args = args == null ? [] : args
|
||||
|
||||
var analyze = use('os').analyze
|
||||
var run_ast_fn = use('os').run_ast_fn
|
||||
var run_ast_noopt_fn = use('os').run_ast_noopt_fn
|
||||
|
||||
if (!run_ast_noopt_fn) {
|
||||
log.console("error: run_ast_noopt_fn not available (rebuild bootstrap)")
|
||||
$stop()
|
||||
return
|
||||
}
|
||||
|
||||
// Parse arguments: diff [test_path]
|
||||
var target_test = null
|
||||
if (length(_args) > 0) {
|
||||
target_test = _args[0]
|
||||
}
|
||||
|
||||
function is_valid_package(dir) {
|
||||
var _dir = dir == null ? '.' : dir
|
||||
return fd.is_file(_dir + '/cell.toml')
|
||||
}
|
||||
|
||||
if (!is_valid_package('.')) {
|
||||
log.console('No cell.toml found in current directory')
|
||||
$stop()
|
||||
return
|
||||
}
|
||||
|
||||
// Collect test files
|
||||
function collect_tests(specific_test) {
|
||||
var files = pkg.list_files(null)
|
||||
var test_files = []
|
||||
var i = 0
|
||||
var f = null
|
||||
var test_name = null
|
||||
var match_name = null
|
||||
var match_base = null
|
||||
for (i = 0; i < length(files); i++) {
|
||||
f = files[i]
|
||||
if (starts_with(f, "tests/") && ends_with(f, ".cm")) {
|
||||
if (specific_test) {
|
||||
test_name = text(f, 0, -3)
|
||||
match_name = specific_test
|
||||
if (!starts_with(match_name, 'tests/')) match_name = 'tests/' + match_name
|
||||
match_base = ends_with(match_name, '.cm') ? text(match_name, 0, -3) : match_name
|
||||
if (test_name != match_base) continue
|
||||
}
|
||||
push(test_files, f)
|
||||
}
|
||||
}
|
||||
return test_files
|
||||
}
|
||||
|
||||
// Deep comparison of two values
|
||||
function values_equal(a, b) {
|
||||
var i = 0
|
||||
var ka = null
|
||||
var kb = null
|
||||
if (a == b) return true
|
||||
if (is_null(a) && is_null(b)) return true
|
||||
if (is_null(a) || is_null(b)) return false
|
||||
if (is_array(a) && is_array(b)) {
|
||||
if (length(a) != length(b)) return false
|
||||
i = 0
|
||||
while (i < length(a)) {
|
||||
if (!values_equal(a[i], b[i])) return false
|
||||
i = i + 1
|
||||
}
|
||||
return true
|
||||
}
|
||||
if (is_object(a) && is_object(b)) {
|
||||
ka = array(a)
|
||||
kb = array(b)
|
||||
if (length(ka) != length(kb)) return false
|
||||
i = 0
|
||||
while (i < length(ka)) {
|
||||
if (!values_equal(a[ka[i]], b[ka[i]])) return false
|
||||
i = i + 1
|
||||
}
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
function describe(val) {
|
||||
if (is_null(val)) return "null"
|
||||
if (is_text(val)) return `"${val}"`
|
||||
if (is_number(val)) return text(val)
|
||||
if (is_logical(val)) return text(val)
|
||||
if (is_function(val)) return "<function>"
|
||||
if (is_array(val)) return `[array length=${text(length(val))}]`
|
||||
if (is_object(val)) return `{record keys=${text(length(array(val)))}}`
|
||||
return "<unknown>"
|
||||
}
|
||||
|
||||
// Run a single test file through both paths
|
||||
function diff_test_file(file_path) {
|
||||
var mod_path = text(file_path, 0, -3)
|
||||
var src_path = fd.realpath('.') + '/' + file_path
|
||||
var src = null
|
||||
var ast = null
|
||||
var mod_opt = null
|
||||
var mod_noopt = null
|
||||
var results = {file: file_path, tests: [], passed: 0, failed: 0, errors: []}
|
||||
var use_pkg = fd.realpath('.')
|
||||
var opt_error = null
|
||||
var noopt_error = null
|
||||
var keys = null
|
||||
var i = 0
|
||||
var k = null
|
||||
var opt_result = null
|
||||
var noopt_result = null
|
||||
var opt_err = null
|
||||
var noopt_err = null
|
||||
var _run_one_opt = null
|
||||
var _run_one_noopt = null
|
||||
|
||||
// Build env for module loading
|
||||
var make_env = function() {
|
||||
return {
|
||||
use: function(path) {
|
||||
return shop.use(path, use_pkg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Read and parse
|
||||
var _read = function() {
|
||||
src = text(fd.slurp(src_path))
|
||||
ast = analyze(src, src_path)
|
||||
} disruption {
|
||||
push(results.errors, `failed to parse ${file_path}`)
|
||||
return results
|
||||
}
|
||||
_read()
|
||||
if (length(results.errors) > 0) return results
|
||||
|
||||
// Run optimized
|
||||
var _run_opt = function() {
|
||||
mod_opt = run_ast_fn(mod_path, ast, make_env())
|
||||
} disruption {
|
||||
opt_error = "disrupted"
|
||||
}
|
||||
_run_opt()
|
||||
|
||||
// Run unoptimized
|
||||
var _run_noopt = function() {
|
||||
mod_noopt = run_ast_noopt_fn(mod_path, ast, make_env())
|
||||
} disruption {
|
||||
noopt_error = "disrupted"
|
||||
}
|
||||
_run_noopt()
|
||||
|
||||
// Compare module-level behavior
|
||||
if (opt_error != noopt_error) {
|
||||
push(results.errors, `module load mismatch: opt=${opt_error != null ? opt_error : "ok"} noopt=${noopt_error != null ? noopt_error : "ok"}`)
|
||||
results.failed = results.failed + 1
|
||||
return results
|
||||
}
|
||||
if (opt_error != null) {
|
||||
// Both disrupted during load — that's consistent
|
||||
results.passed = results.passed + 1
|
||||
push(results.tests, {name: "<module>", status: "passed"})
|
||||
return results
|
||||
}
|
||||
|
||||
// If module returns a record of functions, test each one
|
||||
if (is_object(mod_opt) && is_object(mod_noopt)) {
|
||||
keys = array(mod_opt)
|
||||
while (i < length(keys)) {
|
||||
k = keys[i]
|
||||
if (is_function(mod_opt[k]) && is_function(mod_noopt[k])) {
|
||||
opt_result = null
|
||||
noopt_result = null
|
||||
opt_err = null
|
||||
noopt_err = null
|
||||
|
||||
_run_one_opt = function() {
|
||||
opt_result = mod_opt[k]()
|
||||
} disruption {
|
||||
opt_err = "disrupted"
|
||||
}
|
||||
_run_one_opt()
|
||||
|
||||
_run_one_noopt = function() {
|
||||
noopt_result = mod_noopt[k]()
|
||||
} disruption {
|
||||
noopt_err = "disrupted"
|
||||
}
|
||||
_run_one_noopt()
|
||||
|
||||
if (opt_err != noopt_err) {
|
||||
push(results.tests, {name: k, status: "failed"})
|
||||
push(results.errors, `${k}: disruption mismatch opt=${opt_err != null ? opt_err : "ok"} noopt=${noopt_err != null ? noopt_err : "ok"}`)
|
||||
results.failed = results.failed + 1
|
||||
} else if (!values_equal(opt_result, noopt_result)) {
|
||||
push(results.tests, {name: k, status: "failed"})
|
||||
push(results.errors, `${k}: result mismatch opt=${describe(opt_result)} noopt=${describe(noopt_result)}`)
|
||||
results.failed = results.failed + 1
|
||||
} else {
|
||||
push(results.tests, {name: k, status: "passed"})
|
||||
results.passed = results.passed + 1
|
||||
}
|
||||
}
|
||||
i = i + 1
|
||||
}
|
||||
} else {
|
||||
// Compare direct return values
|
||||
if (!values_equal(mod_opt, mod_noopt)) {
|
||||
push(results.tests, {name: "<return>", status: "failed"})
|
||||
push(results.errors, `return value mismatch: opt=${describe(mod_opt)} noopt=${describe(mod_noopt)}`)
|
||||
results.failed = results.failed + 1
|
||||
} else {
|
||||
push(results.tests, {name: "<return>", status: "passed"})
|
||||
results.passed = results.passed + 1
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
// Main
|
||||
var test_files = collect_tests(target_test)
|
||||
log.console(`Differential testing: ${text(length(test_files))} file(s)`)
|
||||
|
||||
var total_passed = 0
|
||||
var total_failed = 0
|
||||
var i = 0
|
||||
var result = null
|
||||
var j = 0
|
||||
|
||||
while (i < length(test_files)) {
|
||||
result = diff_test_file(test_files[i])
|
||||
log.console(` ${result.file}: ${text(result.passed)} passed, ${text(result.failed)} failed`)
|
||||
j = 0
|
||||
while (j < length(result.errors)) {
|
||||
log.console(` MISMATCH: ${result.errors[j]}`)
|
||||
j = j + 1
|
||||
}
|
||||
total_passed = total_passed + result.passed
|
||||
total_failed = total_failed + result.failed
|
||||
i = i + 1
|
||||
}
|
||||
|
||||
log.console(`----------------------------------------`)
|
||||
log.console(`Diff: ${text(total_passed)} passed, ${text(total_failed)} failed, ${text(total_passed + total_failed)} total`)
|
||||
|
||||
if (total_failed > 0) {
|
||||
log.console(`DIFFERENTIAL FAILURES DETECTED`)
|
||||
}
|
||||
|
||||
$stop()
|
||||
@@ -56,6 +56,7 @@ Modules loaded with `use()`:
|
||||
## Tools
|
||||
|
||||
- [**Command Line**](/docs/cli/) — the `pit` tool
|
||||
- [**Testing**](/docs/testing/) — writing and running tests
|
||||
- [**Writing C Modules**](/docs/c-modules/) — native extensions
|
||||
|
||||
## Architecture
|
||||
|
||||
@@ -78,12 +78,13 @@ pit build
|
||||
|
||||
### pit test
|
||||
|
||||
Run tests.
|
||||
Run tests. See [Testing](/docs/testing/) for the full guide.
|
||||
|
||||
```bash
|
||||
pit test # run tests in current package
|
||||
pit test all # run all tests
|
||||
pit test <package> # run tests in specific package
|
||||
pit test suite --verify --diff # with IR verification and differential testing
|
||||
```
|
||||
|
||||
### pit link
|
||||
|
||||
264
docs/compiler-tools.md
Normal file
264
docs/compiler-tools.md
Normal file
@@ -0,0 +1,264 @@
|
||||
---
|
||||
title: "Compiler Inspection Tools"
|
||||
description: "Tools for inspecting and debugging the compiler pipeline"
|
||||
weight: 50
|
||||
type: "docs"
|
||||
---
|
||||
|
||||
ƿit includes a set of tools for inspecting the compiler pipeline at every stage. These are useful for debugging, testing optimizations, and understanding what the compiler does with your code.
|
||||
|
||||
## Pipeline Overview
|
||||
|
||||
The compiler runs in stages:
|
||||
|
||||
```
|
||||
source → tokenize → parse → fold → mcode → streamline → output
|
||||
```
|
||||
|
||||
Each stage has a corresponding dump tool that lets you see its output.
|
||||
|
||||
| Stage | Tool | What it shows |
|
||||
|-------------|-------------------|----------------------------------------|
|
||||
| fold | `dump_ast.cm` | Folded AST as JSON |
|
||||
| mcode | `dump_mcode.cm` | Raw mcode IR before optimization |
|
||||
| streamline | `dump_stream.cm` | Before/after instruction counts + IR |
|
||||
| streamline | `dump_types.cm` | Optimized IR with type annotations |
|
||||
| streamline | `streamline.ce` | Full optimized IR as JSON |
|
||||
| all | `ir_report.ce` | Structured optimizer flight recorder |
|
||||
|
||||
All tools take a source file as input and run the pipeline up to the relevant stage.
|
||||
|
||||
## Quick Start
|
||||
|
||||
```bash
|
||||
# see raw mcode IR
|
||||
./cell --core . dump_mcode.cm myfile.ce
|
||||
|
||||
# see what the optimizer changed
|
||||
./cell --core . dump_stream.cm myfile.ce
|
||||
|
||||
# full optimizer report with events
|
||||
./cell --core . ir_report.ce --full myfile.ce
|
||||
```
|
||||
|
||||
## dump_ast.cm
|
||||
|
||||
Prints the folded AST as JSON. This is the output of the parser and constant folder, before mcode generation.
|
||||
|
||||
```bash
|
||||
./cell --core . dump_ast.cm <file.ce|file.cm>
|
||||
```
|
||||
|
||||
## dump_mcode.cm
|
||||
|
||||
Prints the raw mcode IR before any optimization. Shows the instruction array as formatted text with opcode, operands, and program counter.
|
||||
|
||||
```bash
|
||||
./cell --core . dump_mcode.cm <file.ce|file.cm>
|
||||
```
|
||||
|
||||
## dump_stream.cm
|
||||
|
||||
Shows a before/after comparison of the optimizer. For each function, prints:
|
||||
- Instruction count before and after
|
||||
- Number of eliminated instructions
|
||||
- The streamlined IR (nops hidden by default)
|
||||
|
||||
```bash
|
||||
./cell --core . dump_stream.cm <file.ce|file.cm>
|
||||
```
|
||||
|
||||
## dump_types.cm
|
||||
|
||||
Shows the optimized IR with type annotations. Each instruction is followed by the known types of its slot operands, inferred by walking the instruction stream.
|
||||
|
||||
```bash
|
||||
./cell --core . dump_types.cm <file.ce|file.cm>
|
||||
```
|
||||
|
||||
## streamline.ce
|
||||
|
||||
Runs the full pipeline (tokenize, parse, fold, mcode, streamline) and outputs the optimized IR as JSON. Useful for piping to `jq` or saving for comparison.
|
||||
|
||||
```bash
|
||||
./cell --core . streamline.ce <file.ce|file.cm>
|
||||
```
|
||||
|
||||
## ir_report.ce
|
||||
|
||||
The optimizer flight recorder. Runs the full pipeline with structured logging and outputs machine-readable, diff-friendly JSON. This is the most detailed tool for understanding what the optimizer did and why.
|
||||
|
||||
```bash
|
||||
./cell --core . ir_report.ce [options] <file.ce|file.cm>
|
||||
```
|
||||
|
||||
### Options
|
||||
|
||||
| Flag | Description |
|
||||
|------|-------------|
|
||||
| `--summary` | Per-pass JSON summaries with instruction counts and timing (default) |
|
||||
| `--events` | Include rewrite events showing each optimization applied |
|
||||
| `--types` | Include type delta records showing inferred slot types |
|
||||
| `--ir-before=PASS` | Print canonical IR before a specific pass |
|
||||
| `--ir-after=PASS` | Print canonical IR after a specific pass |
|
||||
| `--ir-all` | Print canonical IR before and after all passes |
|
||||
| `--full` | Everything: summary + events + types + ir-all |
|
||||
|
||||
With no flags, `--summary` is the default.
|
||||
|
||||
### Output Format
|
||||
|
||||
Output is line-delimited JSON. Each line is a self-contained JSON object with a `type` field:
|
||||
|
||||
**`type: "pass"`** — Per-pass summary with categorized instruction counts before and after:
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "pass",
|
||||
"pass": "eliminate_type_checks",
|
||||
"fn": "fib",
|
||||
"ms": 0.12,
|
||||
"changed": true,
|
||||
"before": {"instr": 77, "nop": 0, "guard": 16, "branch": 28, ...},
|
||||
"after": {"instr": 77, "nop": 1, "guard": 15, "branch": 28, ...},
|
||||
"changes": {"guards_removed": 1, "nops_added": 1}
|
||||
}
|
||||
```
|
||||
|
||||
**`type: "event"`** — Individual rewrite event with before/after instructions and reasoning:
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "event",
|
||||
"pass": "eliminate_type_checks",
|
||||
"rule": "incompatible_type_forces_jump",
|
||||
"at": 3,
|
||||
"before": [["is_int", 5, 2, 4, 9], ["jump_false", 5, "rel_ni_2", 4, 9]],
|
||||
"after": ["_nop_tc_1", ["jump", "rel_ni_2", 4, 9]],
|
||||
"why": {"slot": 2, "known_type": "float", "checked_type": "int"}
|
||||
}
|
||||
```
|
||||
|
||||
**`type: "types"`** — Inferred type information for a function:
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "types",
|
||||
"fn": "fib",
|
||||
"param_types": {},
|
||||
"slot_types": {"25": "null"}
|
||||
}
|
||||
```
|
||||
|
||||
**`type: "ir"`** — Canonical IR text for a function at a specific point:
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "ir",
|
||||
"when": "before",
|
||||
"pass": "all",
|
||||
"fn": "fib",
|
||||
"text": "fn fib (args=1, slots=26)\n @0 access s2, 2\n ..."
|
||||
}
|
||||
```
|
||||
|
||||
### Rewrite Rules
|
||||
|
||||
Each pass records events with named rules:
|
||||
|
||||
**eliminate_type_checks:**
|
||||
- `known_type_eliminates_guard` — type already known, guard removed
|
||||
- `incompatible_type_forces_jump` — type conflicts, conditional jump becomes unconditional
|
||||
- `num_subsumes_int_float` — num check satisfied by int or float
|
||||
- `dynamic_to_field` — load_dynamic/store_dynamic narrowed to field access
|
||||
- `dynamic_to_index` — load_dynamic/store_dynamic narrowed to index access
|
||||
|
||||
**simplify_algebra:**
|
||||
- `add_zero`, `sub_zero`, `mul_one`, `div_one` — identity operations become moves
|
||||
- `mul_zero` — multiplication by zero becomes constant
|
||||
- `self_eq`, `self_ne` — same-slot comparisons become constants
|
||||
|
||||
**simplify_booleans:**
|
||||
- `not_jump_false_fusion` — not + jump_false fused into jump_true
|
||||
- `not_jump_true_fusion` — not + jump_true fused into jump_false
|
||||
- `double_not` — not + not collapsed to move
|
||||
|
||||
**eliminate_moves:**
|
||||
- `self_move` — move to same slot becomes nop
|
||||
|
||||
**eliminate_dead_jumps:**
|
||||
- `jump_to_next` — jump to immediately following label becomes nop
|
||||
|
||||
### Canonical IR Format
|
||||
|
||||
The `--ir-all`, `--ir-before`, and `--ir-after` flags produce a deterministic text representation of the IR:
|
||||
|
||||
```
|
||||
fn fib (args=1, slots=26)
|
||||
@0 access s2, 2
|
||||
@1 is_int s4, s1 ; [guard]
|
||||
@2 jump_false s4, "rel_ni_2" ; [branch]
|
||||
@3 --- nop (tc) ---
|
||||
@4 jump "rel_ni_2" ; [branch]
|
||||
@5 lt_int s3, s1, s2
|
||||
@6 jump "rel_done_4" ; [branch]
|
||||
rel_ni_2:
|
||||
@8 is_num s4, s1 ; [guard]
|
||||
```
|
||||
|
||||
Properties:
|
||||
- `@N` is the raw array index, stable across passes (passes replace, never insert or delete)
|
||||
- `sN` prefix distinguishes slot operands from literal values
|
||||
- String operands are quoted
|
||||
- Labels appear as indented headers with a colon
|
||||
- Category tags in brackets: `[guard]`, `[branch]`, `[load]`, `[store]`, `[call]`, `[arith]`, `[move]`, `[const]`
|
||||
- Nops shown as `--- nop (reason) ---` with reason codes: `tc` (type check), `bl` (boolean), `mv` (move), `dj` (dead jump), `ur` (unreachable)
|
||||
|
||||
### Examples
|
||||
|
||||
```bash
|
||||
# what passes changed something?
|
||||
./cell --core . ir_report.ce --summary myfile.ce | jq 'select(.changed)'
|
||||
|
||||
# list all rewrite rules that fired
|
||||
./cell --core . ir_report.ce --events myfile.ce | jq 'select(.type == "event") | .rule'
|
||||
|
||||
# diff IR before and after optimization
|
||||
./cell --core . ir_report.ce --ir-all myfile.ce | jq -r 'select(.type == "ir") | .text'
|
||||
|
||||
# full report for analysis
|
||||
./cell --core . ir_report.ce --full myfile.ce > report.json
|
||||
```
|
||||
|
||||
## ir_stats.cm
|
||||
|
||||
A utility module used by `ir_report.ce` and available for custom tooling. Not a standalone tool.
|
||||
|
||||
```javascript
|
||||
var ir_stats = use("ir_stats")
|
||||
|
||||
ir_stats.detailed_stats(func) // categorized instruction counts
|
||||
ir_stats.ir_fingerprint(func) // djb2 hash of instruction array
|
||||
ir_stats.canonical_ir(func, name, opts) // deterministic text representation
|
||||
ir_stats.type_snapshot(slot_types) // frozen copy of type map
|
||||
ir_stats.type_delta(before_types, after_types) // compute type changes
|
||||
ir_stats.category_tag(op) // classify an opcode
|
||||
```
|
||||
|
||||
### Instruction Categories
|
||||
|
||||
`detailed_stats` classifies each instruction into one of these categories:
|
||||
|
||||
| Category | Opcodes |
|
||||
|----------|---------|
|
||||
| load | `load_field`, `load_index`, `load_dynamic`, `get`, `access` (non-constant) |
|
||||
| store | `store_field`, `store_index`, `store_dynamic`, `set_var`, `put`, `push` |
|
||||
| branch | `jump`, `jump_true`, `jump_false`, `jump_not_null` |
|
||||
| call | `invoke`, `goinvoke` |
|
||||
| guard | `is_int`, `is_text`, `is_num`, `is_bool`, `is_null`, `is_array`, `is_func`, `is_record`, `is_stone` |
|
||||
| arith | `add_int`, `sub_int`, ..., `add_float`, ..., `concat`, `neg_int`, `neg_float`, bitwise ops |
|
||||
| move | `move` |
|
||||
| const | `int`, `true`, `false`, `null`, `access` (with constant value) |
|
||||
| label | string entries that are not nops |
|
||||
| nop | strings starting with `_nop_` |
|
||||
| other | everything else (`frame`, `setarg`, `array`, `record`, `function`, `return`, etc.) |
|
||||
@@ -11,7 +11,7 @@ type: "docs"
|
||||
|
||||
### Variables and Constants
|
||||
|
||||
Variables are declared with `var`, constants with `def`. All declarations must be initialized and must appear at the function body level — not inside `if`, `while`, `for`, or bare `{}` blocks.
|
||||
Variables are declared with `var`, constants with `def`. All declarations must be initialized and must appear at the function body level — not inside `if`, `while`, `for`, or `do` blocks.
|
||||
|
||||
```javascript
|
||||
var x = 10
|
||||
|
||||
3
docs/spec/.pages
Normal file
3
docs/spec/.pages
Normal file
@@ -0,0 +1,3 @@
|
||||
nav:
|
||||
- pipeline.md
|
||||
- mcode.md
|
||||
296
docs/spec/c-runtime.md
Normal file
296
docs/spec/c-runtime.md
Normal file
@@ -0,0 +1,296 @@
|
||||
---
|
||||
title: "C Runtime for Native Code"
|
||||
description: "Minimum C runtime surface for QBE-generated native code"
|
||||
---
|
||||
|
||||
## Overview
|
||||
|
||||
QBE-generated native code calls into a C runtime for anything that touches the heap, dispatches dynamically, or requires GC awareness. The design principle: **native code handles control flow and integer math directly; everything else is a runtime call.**
|
||||
|
||||
This document defines the runtime boundary — what must be in C, what QBE handles inline, and how to organize the C code to serve both the mcode interpreter and native code cleanly.
|
||||
|
||||
## The Boundary
|
||||
|
||||
### What native code does inline (no C calls)
|
||||
|
||||
These operations compile to straight QBE instructions with no runtime involvement:
|
||||
|
||||
- **Integer arithmetic**: `add`, `sub`, `mul` on NaN-boxed ints (shift right 1, operate, shift left 1)
|
||||
- **Integer comparisons**: extract int with shift, compare, produce tagged bool
|
||||
- **Control flow**: jumps, branches, labels, function entry/exit
|
||||
- **Slot access**: load/store to frame slots via `%fp` + offset
|
||||
- **NaN-box tagging**: integer tagging (`n << 1`), bool constants (`0x03`/`0x23`), null (`0x07`)
|
||||
- **Type tests**: `JS_IsInt` (LSB check), `JS_IsNumber`, `JS_IsText`, `JS_IsNull` — these are bit tests on the value, no heap access needed
|
||||
|
||||
### What requires a C call
|
||||
|
||||
Anything that:
|
||||
1. **Allocates** (arrays, records, strings, frames, function objects)
|
||||
2. **Touches the heap** (property get/set, array indexing, closure access)
|
||||
3. **Dispatches on type at runtime** (dynamic load/store, polymorphic arithmetic)
|
||||
4. **Calls user functions** (frame setup, argument passing, invocation)
|
||||
5. **Does string operations** (concatenation, comparison, conversion)
|
||||
|
||||
## Runtime Functions
|
||||
|
||||
### Tier 1: Essential (must exist for any program to run)
|
||||
|
||||
These are called by virtually every QBE program.
|
||||
|
||||
#### Intrinsic Lookup
|
||||
|
||||
```c
|
||||
// Look up a built-in function by name. Called once per intrinsic per callsite.
|
||||
JSValue cell_rt_get_intrinsic(JSContext *ctx, const char *name);
|
||||
```
|
||||
|
||||
Maps name → C function pointer wrapped in JSValue. This is the primary entry point for all built-in functions (`print`, `text`, `length`, `is_array`, etc). The native code never calls intrinsics directly — it always goes through `get_intrinsic` → `frame` → `invoke`.
|
||||
|
||||
#### Function Calls
|
||||
|
||||
```c
|
||||
// Allocate a call frame with space for nr_args arguments.
|
||||
JSValue cell_rt_frame(JSContext *ctx, JSValue fn, int nr_args);
|
||||
|
||||
// Set argument idx in the frame.
|
||||
void cell_rt_setarg(JSValue frame, int idx, JSValue val);
|
||||
|
||||
// Execute the function. Returns the result.
|
||||
JSValue cell_rt_invoke(JSContext *ctx, JSValue frame);
|
||||
```
|
||||
|
||||
This is the universal calling convention. Every function call — user functions, intrinsics, methods — goes through frame/setarg/invoke. The frame allocates a `JSFrameRegister` on the GC heap, setarg fills slots, invoke dispatches.
|
||||
|
||||
**Tail call variants:**
|
||||
|
||||
```c
|
||||
JSValue cell_rt_goframe(JSContext *ctx, JSValue fn, int nr_args);
|
||||
void cell_rt_goinvoke(JSContext *ctx, JSValue frame);
|
||||
```
|
||||
|
||||
Same as frame/invoke but reuse the caller's stack position.
|
||||
|
||||
### Tier 2: Property Access (needed by any program using records or arrays)
|
||||
|
||||
```c
|
||||
// Record field by constant name.
|
||||
JSValue cell_rt_load_field(JSContext *ctx, JSValue obj, const char *name);
|
||||
void cell_rt_store_field(JSContext *ctx, JSValue obj, JSValue val, const char *name);
|
||||
|
||||
// Array element by integer index.
|
||||
JSValue cell_rt_load_index(JSContext *ctx, JSValue obj, JSValue idx);
|
||||
void cell_rt_store_index(JSContext *ctx, JSValue obj, JSValue idx, JSValue val);
|
||||
|
||||
// Dynamic — type of key unknown at compile time.
|
||||
JSValue cell_rt_load_dynamic(JSContext *ctx, JSValue obj, JSValue key);
|
||||
void cell_rt_store_dynamic(JSContext *ctx, JSValue obj, JSValue key, JSValue val);
|
||||
```
|
||||
|
||||
The typed variants (`load_field`/`load_index`) skip the key-type dispatch that `load_dynamic` must do. When parse and fold provide type information, QBE emit selects the typed variant and the streamline optimizer can narrow dynamic → typed.
|
||||
|
||||
**Implementation**: These are thin wrappers around existing `JS_GetPropertyStr`/`JS_GetPropertyNumber`/`JS_GetProperty` and their `Set` counterparts.
|
||||
|
||||
### Tier 3: Closures (needed by programs with nested functions)
|
||||
|
||||
```c
|
||||
// Walk depth levels up the frame chain, read slot.
|
||||
JSValue cell_rt_get_closure(JSContext *ctx, JSValue fp, int depth, int slot);
|
||||
|
||||
// Walk depth levels up, write slot.
|
||||
void cell_rt_put_closure(JSContext *ctx, JSValue fp, JSValue val, int depth, int slot);
|
||||
```
|
||||
|
||||
Closure variables live in outer frames. `depth` is how many `caller` links to follow; `slot` is the register index in that frame.
|
||||
|
||||
### Tier 4: Object Construction (needed by programs creating arrays/records/functions)
|
||||
|
||||
```c
|
||||
// Create a function object from a compiled function index.
|
||||
// The native code loader must maintain a function table.
|
||||
JSValue cell_rt_make_function(JSContext *ctx, int fn_id);
|
||||
```
|
||||
|
||||
Array and record literals are currently compiled as intrinsic calls (`array(...)`, direct `{...}` construction) which go through the frame/invoke path. A future optimization could add:
|
||||
|
||||
```c
|
||||
// Fast paths (optional, not yet needed)
|
||||
JSValue cell_rt_new_array(JSContext *ctx, int len);
|
||||
JSValue cell_rt_new_record(JSContext *ctx);
|
||||
```
|
||||
|
||||
### Tier 5: Collection Operations
|
||||
|
||||
```c
|
||||
// a[] = val (push) and var v = a[] (pop)
|
||||
void cell_rt_push(JSContext *ctx, JSValue arr, JSValue val);
|
||||
JSValue cell_rt_pop(JSContext *ctx, JSValue arr);
|
||||
```
|
||||
|
||||
### Tier 6: Error Handling
|
||||
|
||||
```c
|
||||
// Trigger disruption. Jumps to the disrupt handler or unwinds.
|
||||
void cell_rt_disrupt(JSContext *ctx);
|
||||
```
|
||||
|
||||
### Tier 7: Miscellaneous
|
||||
|
||||
```c
|
||||
JSValue cell_rt_delete(JSContext *ctx, JSValue obj, JSValue key);
|
||||
JSValue cell_rt_typeof(JSContext *ctx, JSValue val);
|
||||
```
|
||||
|
||||
### Tier 8: String and Float Helpers (called from QBE inline code, not from qbe_emit)
|
||||
|
||||
These are called from the QBE IL that `qbe.cm` generates inline for arithmetic and comparison operations. They're not `cell_rt_` prefixed — they're lower-level:
|
||||
|
||||
```c
|
||||
// Float arithmetic (when operands aren't both ints)
|
||||
JSValue qbe_float_add(JSContext *ctx, JSValue a, JSValue b);
|
||||
JSValue qbe_float_sub(JSContext *ctx, JSValue a, JSValue b);
|
||||
JSValue qbe_float_mul(JSContext *ctx, JSValue a, JSValue b);
|
||||
JSValue qbe_float_div(JSContext *ctx, JSValue a, JSValue b);
|
||||
JSValue qbe_float_mod(JSContext *ctx, JSValue a, JSValue b);
|
||||
JSValue qbe_float_pow(JSContext *ctx, JSValue a, JSValue b);
|
||||
JSValue qbe_float_neg(JSContext *ctx, JSValue v);
|
||||
JSValue qbe_float_inc(JSContext *ctx, JSValue v);
|
||||
JSValue qbe_float_dec(JSContext *ctx, JSValue v);
|
||||
|
||||
// Float comparison (returns C int 0/1 for QBE branching)
|
||||
int qbe_float_cmp(JSContext *ctx, int op, JSValue a, JSValue b);
|
||||
|
||||
// Bitwise ops on non-int values (convert to int32 first)
|
||||
JSValue qbe_bnot(JSContext *ctx, JSValue v);
|
||||
JSValue qbe_bitwise_and(JSContext *ctx, JSValue a, JSValue b);
|
||||
JSValue qbe_bitwise_or(JSContext *ctx, JSValue a, JSValue b);
|
||||
JSValue qbe_bitwise_xor(JSContext *ctx, JSValue a, JSValue b);
|
||||
JSValue qbe_shift_shl(JSContext *ctx, JSValue a, JSValue b);
|
||||
JSValue qbe_shift_sar(JSContext *ctx, JSValue a, JSValue b);
|
||||
JSValue qbe_shift_shr(JSContext *ctx, JSValue a, JSValue b);
|
||||
|
||||
// String operations
|
||||
JSValue JS_ConcatString(JSContext *ctx, JSValue a, JSValue b);
|
||||
int js_string_compare_value(JSContext *ctx, JSValue a, JSValue b, int eq_only);
|
||||
JSValue JS_NewString(JSContext *ctx, const char *str);
|
||||
JSValue __JS_NewFloat64(JSContext *ctx, double d);
|
||||
int JS_ToBool(JSContext *ctx, JSValue v);
|
||||
|
||||
// String/number type tests (inline-able but currently calls)
|
||||
int JS_IsText(JSValue v);
|
||||
int JS_IsNumber(JSValue v);
|
||||
|
||||
// Tolerant equality (== on mixed types)
|
||||
JSValue cell_rt_eq_tol(JSContext *ctx, JSValue a, JSValue b);
|
||||
JSValue cell_rt_ne_tol(JSContext *ctx, JSValue a, JSValue b);
|
||||
|
||||
// Text ordering comparisons
|
||||
JSValue cell_rt_lt_text(JSContext *ctx, JSValue a, JSValue b);
|
||||
JSValue cell_rt_le_text(JSContext *ctx, JSValue a, JSValue b);
|
||||
JSValue cell_rt_gt_text(JSContext *ctx, JSValue a, JSValue b);
|
||||
JSValue cell_rt_ge_text(JSContext *ctx, JSValue a, JSValue b);
|
||||
```
|
||||
|
||||
## What Exists vs What Needs Writing
|
||||
|
||||
### Already exists (in qbe_helpers.c)
|
||||
|
||||
All `qbe_float_*`, `qbe_bnot`, `qbe_bitwise_*`, `qbe_shift_*`, `qbe_to_bool` — these are implemented and working.
|
||||
|
||||
### Already exists (in runtime.c / quickjs.c) but not yet wrapped
|
||||
|
||||
The underlying operations exist but aren't exposed with the `cell_rt_` names:
|
||||
|
||||
| Runtime function | Underlying implementation |
|
||||
|---|---|
|
||||
| `cell_rt_load_field` | `JS_GetPropertyStr(ctx, obj, name)` |
|
||||
| `cell_rt_load_index` | `JS_GetPropertyNumber(ctx, obj, JS_VALUE_GET_INT(idx))` |
|
||||
| `cell_rt_load_dynamic` | `JS_GetProperty(ctx, obj, key)` |
|
||||
| `cell_rt_store_field` | `JS_SetPropertyStr(ctx, obj, name, val)` |
|
||||
| `cell_rt_store_index` | `JS_SetPropertyNumber(ctx, obj, JS_VALUE_GET_INT(idx), val)` |
|
||||
| `cell_rt_store_dynamic` | `JS_SetProperty(ctx, obj, key, val)` |
|
||||
| `cell_rt_delete` | `JS_DeleteProperty(ctx, obj, key)` |
|
||||
| `cell_rt_push` | `JS_ArrayPush(ctx, &arr, val)` |
|
||||
| `cell_rt_pop` | `JS_ArrayPop(ctx, arr)` |
|
||||
| `cell_rt_typeof` | type tag switch → `JS_NewString` |
|
||||
| `cell_rt_disrupt` | `JS_Throw(ctx, ...)` |
|
||||
| `cell_rt_eq_tol` / `cell_rt_ne_tol` | comparison logic in mcode.c `eq_tol`/`ne_tol` handler |
|
||||
| `cell_rt_lt_text` etc. | `js_string_compare_value` + wrap result |
|
||||
|
||||
### Needs new code
|
||||
|
||||
| Runtime function | What's needed |
|
||||
|---|---|
|
||||
| `cell_rt_get_intrinsic` | Look up intrinsic by name string, return JSValue function. Currently scattered across `js_cell_intrinsic_get` and the mcode handler. Needs a clean single entry point. |
|
||||
| `cell_rt_frame` | Allocate `JSFrameRegister`, set function slot, set argc. Exists in mcode.c `frame` handler but not as a callable function. |
|
||||
| `cell_rt_setarg` | Write to frame slot. Trivial: `frame->slots[idx + 1] = val` (slot 0 is `this`). |
|
||||
| `cell_rt_invoke` | Call the function in the frame. Needs to dispatch: native C function vs mach bytecode vs mcode. This is the critical piece — it must handle all function types. |
|
||||
| `cell_rt_goframe` / `cell_rt_goinvoke` | Tail call variants. Similar to frame/invoke but reuse caller frame. |
|
||||
| `cell_rt_make_function` | Create function object from index. Needs a function table (populated by the native loader). |
|
||||
| `cell_rt_get_closure` / `cell_rt_put_closure` | Walk frame chain. Exists inline in mcode.c `get`/`put` handlers. |
|
||||
|
||||
## Recommended C File Organization
|
||||
|
||||
```
|
||||
source/
|
||||
cell_runtime.c — NEW: all cell_rt_* functions (the native code API)
|
||||
qbe_helpers.c — existing: float/bitwise/shift helpers for inline QBE
|
||||
runtime.c — existing: JS_GetProperty, JS_SetProperty, etc.
|
||||
quickjs.c — existing: core VM, GC, value representation
|
||||
mcode.c — existing: mcode interpreter (can delegate to cell_runtime.c)
|
||||
```
|
||||
|
||||
**`cell_runtime.c`** is the single file that defines the native code contract. It should:
|
||||
|
||||
1. Include `quickjs-internal.h` for access to value representation and heap types
|
||||
2. Export all `cell_rt_*` functions with C linkage (no `static`)
|
||||
3. Keep each function thin — delegate to existing `JS_*` functions where possible
|
||||
4. Handle GC safety: after any allocation (frame, string, array), callers' frames may have moved
|
||||
|
||||
### Implementation Priority
|
||||
|
||||
**Phase 1** — Get "hello world" running natively:
|
||||
- `cell_rt_get_intrinsic` (to find `print` and `text`)
|
||||
- `cell_rt_frame`, `cell_rt_setarg`, `cell_rt_invoke` (to call them)
|
||||
- A loader that takes QBE output → assembles → links → calls `cell_main`
|
||||
|
||||
**Phase 2** — Variables and arithmetic:
|
||||
- All property access (`load_field`, `load_index`, `store_*`, `load_dynamic`)
|
||||
- `cell_rt_make_function`, `cell_rt_get_closure`, `cell_rt_put_closure`
|
||||
|
||||
**Phase 3** — Full language:
|
||||
- `cell_rt_push`, `cell_rt_pop`, `cell_rt_delete`, `cell_rt_typeof`
|
||||
- `cell_rt_disrupt`
|
||||
- `cell_rt_goframe`, `cell_rt_goinvoke`
|
||||
- Text comparison wrappers (`cell_rt_lt_text`, etc.)
|
||||
- Tolerant equality (`cell_rt_eq_tol`, `cell_rt_ne_tol`)
|
||||
|
||||
## Calling Convention
|
||||
|
||||
All `cell_rt_*` functions follow the same pattern:
|
||||
|
||||
- First argument is always `JSContext *ctx`
|
||||
- Values are passed/returned as `JSValue` (64-bit, by value)
|
||||
- Frame pointers are `JSValue` (tagged pointer to `JSFrameRegister`)
|
||||
- String names are `const char *` (pointer to data section label)
|
||||
- Integer constants (slot indices, arg counts) are `int` / `long`
|
||||
|
||||
Native code maintains `%ctx` (JSContext) and `%fp` (current frame pointer) as persistent values across the function body. All slot reads/writes go through `%fp` + offset.
|
||||
|
||||
## What Should NOT Be in the C Runtime
|
||||
|
||||
These are handled entirely by QBE-generated code:
|
||||
|
||||
- **Integer arithmetic and comparisons** — bit operations on NaN-boxed values
|
||||
- **Control flow** — branches, loops, labels, jumps
|
||||
- **Boolean logic** — `and`/`or`/`not` on tagged values
|
||||
- **Constant loading** — integer constants are immediate, strings are data labels
|
||||
- **Type guard branches** — the `is_int`/`is_text`/`is_null` checks are inline bit tests; the branch to the float or text path is just a QBE `jnz`
|
||||
|
||||
The `qbe.cm` macros already handle all of this. The arithmetic path looks like:
|
||||
|
||||
```
|
||||
check both ints? → yes → inline int add → done
|
||||
→ no → call qbe_float_add (or JS_ConcatString for text)
|
||||
```
|
||||
|
||||
The C runtime is only called on the slow paths (float, text, dynamic dispatch). The fast path (integer arithmetic, comparisons, branching) is fully native.
|
||||
@@ -1,11 +1,13 @@
|
||||
---
|
||||
title: "Register VM"
|
||||
description: "Register-based virtual machine (Mach)"
|
||||
description: "Binary encoding of the Mach bytecode interpreter"
|
||||
---
|
||||
|
||||
## Overview
|
||||
|
||||
The Mach VM is a register-based virtual machine using 32-bit instructions. It is modeled after Lua's register VM — operands are register indices rather than stack positions, reducing instruction count and improving performance.
|
||||
The Mach VM is a register-based virtual machine that directly interprets the [Mcode IR](mcode.md) instruction set as compact 32-bit binary bytecode. It is modeled after Lua's register VM — operands are register indices rather than stack positions, reducing instruction count and improving performance.
|
||||
|
||||
The Mach serializer (`mach.c`) converts streamlined mcode JSON into binary instructions. Since the Mach bytecode is a direct encoding of the mcode, the [Mcode IR](mcode.md) reference is the authoritative instruction set documentation.
|
||||
|
||||
## Instruction Formats
|
||||
|
||||
@@ -45,95 +47,12 @@ Used for unconditional jumps with a 24-bit signed offset.
|
||||
|
||||
## Registers
|
||||
|
||||
Each function frame has a fixed number of register slots, determined at compile time. Registers hold:
|
||||
Each function frame has a fixed number of register slots, determined at compile time:
|
||||
|
||||
- **R(0)** — `this` binding
|
||||
- **R(1)..R(arity)** — function arguments
|
||||
- **R(arity+1)..** — local variables and temporaries
|
||||
|
||||
## Instruction Set
|
||||
|
||||
### Loading
|
||||
|
||||
| Opcode | Format | Description |
|
||||
|--------|--------|-------------|
|
||||
| `LOADK` | iABx | `R(A) = K(Bx)` — load from constant pool |
|
||||
| `LOADI` | iAsBx | `R(A) = sBx` — load small integer |
|
||||
| `LOADNULL` | iA | `R(A) = null` |
|
||||
| `LOADTRUE` | iA | `R(A) = true` |
|
||||
| `LOADFALSE` | iA | `R(A) = false` |
|
||||
| `MOVE` | iABC | `R(A) = R(B)` — register copy |
|
||||
|
||||
### Arithmetic
|
||||
|
||||
| Opcode | Format | Description |
|
||||
|--------|--------|-------------|
|
||||
| `ADD` | iABC | `R(A) = R(B) + R(C)` |
|
||||
| `SUB` | iABC | `R(A) = R(B) - R(C)` |
|
||||
| `MUL` | iABC | `R(A) = R(B) * R(C)` |
|
||||
| `DIV` | iABC | `R(A) = R(B) / R(C)` |
|
||||
| `MOD` | iABC | `R(A) = R(B) % R(C)` |
|
||||
| `POW` | iABC | `R(A) = R(B) ^ R(C)` |
|
||||
| `NEG` | iABC | `R(A) = -R(B)` |
|
||||
| `INC` | iABC | `R(A) = R(B) + 1` |
|
||||
| `DEC` | iABC | `R(A) = R(B) - 1` |
|
||||
|
||||
### Comparison
|
||||
|
||||
| Opcode | Format | Description |
|
||||
|--------|--------|-------------|
|
||||
| `EQ` | iABC | `R(A) = R(B) == R(C)` |
|
||||
| `NEQ` | iABC | `R(A) = R(B) != R(C)` |
|
||||
| `LT` | iABC | `R(A) = R(B) < R(C)` |
|
||||
| `LE` | iABC | `R(A) = R(B) <= R(C)` |
|
||||
| `GT` | iABC | `R(A) = R(B) > R(C)` |
|
||||
| `GE` | iABC | `R(A) = R(B) >= R(C)` |
|
||||
|
||||
### Property Access
|
||||
|
||||
| Opcode | Format | Description |
|
||||
|--------|--------|-------------|
|
||||
| `GETFIELD` | iABC | `R(A) = R(B)[K(C)]` — named property |
|
||||
| `SETFIELD` | iABC | `R(A)[K(B)] = R(C)` — set named property |
|
||||
| `GETINDEX` | iABC | `R(A) = R(B)[R(C)]` — computed property |
|
||||
| `SETINDEX` | iABC | `R(A)[R(B)] = R(C)` — set computed property |
|
||||
|
||||
### Variable Resolution
|
||||
|
||||
| Opcode | Format | Description |
|
||||
|--------|--------|-------------|
|
||||
| `GETNAME` | iABx | Unresolved variable (compiler placeholder) |
|
||||
| `GETINTRINSIC` | iABx | Global intrinsic / built-in |
|
||||
| `GETENV` | iABx | Module environment variable |
|
||||
| `GETUP` | iABC | `R(A) = UpFrame(B).slots[C]` — closure upvalue |
|
||||
| `SETUP` | iABC | `UpFrame(A).slots[B] = R(C)` — set closure upvalue |
|
||||
|
||||
### Control Flow
|
||||
|
||||
| Opcode | Format | Description |
|
||||
|--------|--------|-------------|
|
||||
| `JMP` | isJ | Unconditional jump |
|
||||
| `JMPTRUE` | iAsBx | Jump if `R(A)` is true |
|
||||
| `JMPFALSE` | iAsBx | Jump if `R(A)` is false |
|
||||
| `JMPNULL` | iAsBx | Jump if `R(A)` is null |
|
||||
|
||||
### Function Calls
|
||||
|
||||
| Opcode | Format | Description |
|
||||
|--------|--------|-------------|
|
||||
| `CALL` | iABC | Call `R(A)` with `B` args starting at `R(A+1)`, `C`=keep result |
|
||||
| `RETURN` | iA | Return `R(A)` |
|
||||
| `RETNIL` | — | Return null |
|
||||
| `CLOSURE` | iABx | Create closure from function pool entry `Bx` |
|
||||
|
||||
### Object / Array
|
||||
|
||||
| Opcode | Format | Description |
|
||||
|--------|--------|-------------|
|
||||
| `NEWOBJECT` | iA | `R(A) = {}` |
|
||||
| `NEWARRAY` | iABC | `R(A) = array(B)` |
|
||||
| `PUSH` | iABC | Push `R(B)` to array `R(A)` |
|
||||
|
||||
## JSCodeRegister
|
||||
|
||||
The compiled output for a function:
|
||||
@@ -149,8 +68,17 @@ struct JSCodeRegister {
|
||||
uint32_t func_count; // nested function count
|
||||
JSCodeRegister **functions; // nested function table
|
||||
JSValue name; // function name
|
||||
uint16_t disruption_pc; // exception handler offset
|
||||
uint16_t disruption_pc; // disruption handler offset
|
||||
};
|
||||
```
|
||||
|
||||
The constant pool holds all non-immediate values referenced by `LOADK` instructions: strings, large numbers, and other constants.
|
||||
|
||||
### Constant Pool Index Overflow
|
||||
|
||||
Named property instructions (`LOAD_FIELD`, `STORE_FIELD`, `DELETE`) use the iABC format where the constant pool key index occupies an 8-bit field (max 255). When a function references more than 256 unique property names, the serializer automatically falls back to a two-instruction sequence:
|
||||
|
||||
1. `LOADK tmp, key_index` — load the key string into a temporary register (iABx, 16-bit index)
|
||||
2. `LOAD_DYNAMIC` / `STORE_DYNAMIC` / `DELETEINDEX` — use the register-based variant
|
||||
|
||||
This is transparent to the mcode compiler and streamline optimizer.
|
||||
|
||||
@@ -1,29 +1,313 @@
|
||||
---
|
||||
title: "Mcode IR"
|
||||
description: "JSON-based intermediate representation"
|
||||
description: "Instruction set reference for the JSON-based intermediate representation"
|
||||
---
|
||||
|
||||
## Overview
|
||||
|
||||
Mcode is a JSON-based intermediate representation that can be interpreted directly. It represents the same operations as the Mach register VM but uses string-based instruction dispatch rather than binary opcodes. Mcode is intended as an intermediate step toward native code compilation.
|
||||
|
||||
## Pipeline
|
||||
Mcode is the intermediate representation at the center of the ƿit compilation pipeline. All source code is lowered to mcode before execution or native compilation. The mcode instruction set is the **authoritative reference** for the operations supported by the ƿit runtime — the Mach VM bytecode is a direct binary encoding of these same instructions.
|
||||
|
||||
```
|
||||
Source → Tokenize → Parse (AST) → Mcode (JSON) → Interpret
|
||||
→ Compile to Mach (planned)
|
||||
→ Compile to native (planned)
|
||||
Source → Tokenize → Parse → Fold → Mcode → Streamline → Machine
|
||||
```
|
||||
|
||||
Mcode is produced by the `JS_Mcode` compiler pass, which emits a cJSON tree. The mcode interpreter walks this tree directly, dispatching on instruction name strings.
|
||||
Mcode is produced by `mcode.cm`, optimized by `streamline.cm`, then either serialized to 32-bit bytecode for the Mach VM (`mach.c`), or lowered to QBE/LLVM IL for native compilation (`qbe_emit.cm`). See [Compilation Pipeline](pipeline.md) for the full overview.
|
||||
|
||||
## JSMCode Structure
|
||||
## Instruction Format
|
||||
|
||||
Each instruction is a JSON array. The first element is the instruction name (string), followed by operands. The last two elements are line and column numbers for source mapping:
|
||||
|
||||
```json
|
||||
["add_int", dest, a, b, line, col]
|
||||
["load_field", dest, obj, "key", line, col]
|
||||
["jump", "label_name"]
|
||||
```
|
||||
|
||||
Operands are register slot numbers (integers), constant values (strings, numbers), or label names (strings).
|
||||
|
||||
## Instruction Reference
|
||||
|
||||
### Loading and Constants
|
||||
|
||||
| Instruction | Operands | Description |
|
||||
|-------------|----------|-------------|
|
||||
| `access` | `dest, name` | Load variable by name (intrinsic or environment) |
|
||||
| `int` | `dest, value` | Load integer constant |
|
||||
| `true` | `dest` | Load boolean `true` |
|
||||
| `false` | `dest` | Load boolean `false` |
|
||||
| `null` | `dest` | Load `null` |
|
||||
| `move` | `dest, src` | Copy register value |
|
||||
| `function` | `dest, id` | Load nested function by index |
|
||||
| `regexp` | `dest, pattern` | Create regexp object |
|
||||
|
||||
### Arithmetic — Integer
|
||||
|
||||
| Instruction | Operands | Description |
|
||||
|-------------|----------|-------------|
|
||||
| `add_int` | `dest, a, b` | `dest = a + b` (integer) |
|
||||
| `sub_int` | `dest, a, b` | `dest = a - b` (integer) |
|
||||
| `mul_int` | `dest, a, b` | `dest = a * b` (integer) |
|
||||
| `div_int` | `dest, a, b` | `dest = a / b` (integer) |
|
||||
| `mod_int` | `dest, a, b` | `dest = a % b` (integer) |
|
||||
| `neg_int` | `dest, src` | `dest = -src` (integer) |
|
||||
|
||||
### Arithmetic — Float
|
||||
|
||||
| Instruction | Operands | Description |
|
||||
|-------------|----------|-------------|
|
||||
| `add_float` | `dest, a, b` | `dest = a + b` (float) |
|
||||
| `sub_float` | `dest, a, b` | `dest = a - b` (float) |
|
||||
| `mul_float` | `dest, a, b` | `dest = a * b` (float) |
|
||||
| `div_float` | `dest, a, b` | `dest = a / b` (float) |
|
||||
| `mod_float` | `dest, a, b` | `dest = a % b` (float) |
|
||||
| `neg_float` | `dest, src` | `dest = -src` (float) |
|
||||
|
||||
### Arithmetic — Generic
|
||||
|
||||
| Instruction | Operands | Description |
|
||||
|-------------|----------|-------------|
|
||||
| `pow` | `dest, a, b` | `dest = a ^ b` (exponentiation) |
|
||||
|
||||
### Text
|
||||
|
||||
| Instruction | Operands | Description |
|
||||
|-------------|----------|-------------|
|
||||
| `concat` | `dest, a, b` | `dest = a ~ b` (text concatenation) |
|
||||
|
||||
### Comparison — Integer
|
||||
|
||||
| Instruction | Operands | Description |
|
||||
|-------------|----------|-------------|
|
||||
| `eq_int` | `dest, a, b` | `dest = a == b` (integer) |
|
||||
| `ne_int` | `dest, a, b` | `dest = a != b` (integer) |
|
||||
| `lt_int` | `dest, a, b` | `dest = a < b` (integer) |
|
||||
| `le_int` | `dest, a, b` | `dest = a <= b` (integer) |
|
||||
| `gt_int` | `dest, a, b` | `dest = a > b` (integer) |
|
||||
| `ge_int` | `dest, a, b` | `dest = a >= b` (integer) |
|
||||
|
||||
### Comparison — Float
|
||||
|
||||
| Instruction | Operands | Description |
|
||||
|-------------|----------|-------------|
|
||||
| `eq_float` | `dest, a, b` | `dest = a == b` (float) |
|
||||
| `ne_float` | `dest, a, b` | `dest = a != b` (float) |
|
||||
| `lt_float` | `dest, a, b` | `dest = a < b` (float) |
|
||||
| `le_float` | `dest, a, b` | `dest = a <= b` (float) |
|
||||
| `gt_float` | `dest, a, b` | `dest = a > b` (float) |
|
||||
| `ge_float` | `dest, a, b` | `dest = a >= b` (float) |
|
||||
|
||||
### Comparison — Text
|
||||
|
||||
| Instruction | Operands | Description |
|
||||
|-------------|----------|-------------|
|
||||
| `eq_text` | `dest, a, b` | `dest = a == b` (text) |
|
||||
| `ne_text` | `dest, a, b` | `dest = a != b` (text) |
|
||||
| `lt_text` | `dest, a, b` | `dest = a < b` (lexicographic) |
|
||||
| `le_text` | `dest, a, b` | `dest = a <= b` (lexicographic) |
|
||||
| `gt_text` | `dest, a, b` | `dest = a > b` (lexicographic) |
|
||||
| `ge_text` | `dest, a, b` | `dest = a >= b` (lexicographic) |
|
||||
|
||||
### Comparison — Boolean
|
||||
|
||||
| Instruction | Operands | Description |
|
||||
|-------------|----------|-------------|
|
||||
| `eq_bool` | `dest, a, b` | `dest = a == b` (boolean) |
|
||||
| `ne_bool` | `dest, a, b` | `dest = a != b` (boolean) |
|
||||
|
||||
### Comparison — Special
|
||||
|
||||
| Instruction | Operands | Description |
|
||||
|-------------|----------|-------------|
|
||||
| `is_identical` | `dest, a, b` | Object identity check (same reference) |
|
||||
| `eq_tol` | `dest, a, b` | Equality with tolerance |
|
||||
| `ne_tol` | `dest, a, b` | Inequality with tolerance |
|
||||
|
||||
### Type Checks
|
||||
|
||||
Inlined from intrinsic function calls. Each sets `dest` to `true` or `false`.
|
||||
|
||||
| Instruction | Operands | Description |
|
||||
|-------------|----------|-------------|
|
||||
| `is_int` | `dest, src` | Check if integer |
|
||||
| `is_num` | `dest, src` | Check if number (integer or float) |
|
||||
| `is_text` | `dest, src` | Check if text |
|
||||
| `is_bool` | `dest, src` | Check if logical |
|
||||
| `is_null` | `dest, src` | Check if null |
|
||||
| `is_array` | `dest, src` | Check if array |
|
||||
| `is_func` | `dest, src` | Check if function |
|
||||
| `is_record` | `dest, src` | Check if record (object) |
|
||||
| `is_stone` | `dest, src` | Check if stone (immutable) |
|
||||
| `is_proxy` | `dest, src` | Check if function proxy (arity 2) |
|
||||
|
||||
### Logical
|
||||
|
||||
| Instruction | Operands | Description |
|
||||
|-------------|----------|-------------|
|
||||
| `not` | `dest, src` | Logical NOT |
|
||||
| `and` | `dest, a, b` | Logical AND |
|
||||
| `or` | `dest, a, b` | Logical OR |
|
||||
|
||||
### Bitwise
|
||||
|
||||
| Instruction | Operands | Description |
|
||||
|-------------|----------|-------------|
|
||||
| `bitand` | `dest, a, b` | Bitwise AND |
|
||||
| `bitor` | `dest, a, b` | Bitwise OR |
|
||||
| `bitxor` | `dest, a, b` | Bitwise XOR |
|
||||
| `bitnot` | `dest, src` | Bitwise NOT |
|
||||
| `shl` | `dest, a, b` | Shift left |
|
||||
| `shr` | `dest, a, b` | Arithmetic shift right |
|
||||
| `ushr` | `dest, a, b` | Unsigned shift right |
|
||||
|
||||
### Property Access
|
||||
|
||||
Memory operations come in typed variants. The compiler selects the appropriate variant based on `type_tag` and `access_kind` annotations from parse and fold.
|
||||
|
||||
| Instruction | Operands | Description |
|
||||
|-------------|----------|-------------|
|
||||
| `load_field` | `dest, obj, key` | Load record property by string key |
|
||||
| `store_field` | `obj, val, key` | Store record property by string key |
|
||||
| `load_index` | `dest, obj, idx` | Load array element by integer index |
|
||||
| `store_index` | `obj, val, idx` | Store array element by integer index |
|
||||
| `load_dynamic` | `dest, obj, key` | Load property (dispatches at runtime) |
|
||||
| `store_dynamic` | `obj, val, key` | Store property (dispatches at runtime) |
|
||||
| `delete` | `obj, key` | Delete property |
|
||||
| `in` | `dest, obj, key` | Check if property exists |
|
||||
| `length` | `dest, src` | Get length of array or text |
|
||||
|
||||
### Object and Array Construction
|
||||
|
||||
| Instruction | Operands | Description |
|
||||
|-------------|----------|-------------|
|
||||
| `record` | `dest` | Create empty record `{}` |
|
||||
| `array` | `dest, n` | Create empty array (elements added via `push`) |
|
||||
| `push` | `arr, val` | Push value to array |
|
||||
| `pop` | `dest, arr` | Pop value from array |
|
||||
|
||||
### Function Calls
|
||||
|
||||
Function calls are decomposed into three instructions:
|
||||
|
||||
| Instruction | Operands | Description |
|
||||
|-------------|----------|-------------|
|
||||
| `frame` | `dest, fn, argc` | Allocate call frame for `fn` with `argc` arguments |
|
||||
| `setarg` | `frame, idx, val` | Set argument `idx` in call frame |
|
||||
| `invoke` | `frame, result` | Execute the call, store result |
|
||||
| `goframe` | `dest, fn, argc` | Allocate frame for async/concurrent call |
|
||||
| `goinvoke` | `frame, result` | Invoke async/concurrent call |
|
||||
|
||||
### Variable Resolution
|
||||
|
||||
| Instruction | Operands | Description |
|
||||
|-------------|----------|-------------|
|
||||
| `access` | `dest, name` | Load variable (intrinsic or module environment) |
|
||||
| `set_var` | `name, src` | Set top-level variable by name |
|
||||
| `get` | `dest, level, slot` | Get closure variable from parent scope |
|
||||
| `put` | `level, slot, src` | Set closure variable in parent scope |
|
||||
|
||||
### Control Flow
|
||||
|
||||
| Instruction | Operands | Description |
|
||||
|-------------|----------|-------------|
|
||||
| `LABEL` | `name` | Define a named label (not executed) |
|
||||
| `jump` | `label` | Unconditional jump |
|
||||
| `jump_true` | `cond, label` | Jump if `cond` is true |
|
||||
| `jump_false` | `cond, label` | Jump if `cond` is false |
|
||||
| `jump_not_null` | `val, label` | Jump if `val` is not null |
|
||||
| `return` | `src` | Return value from function |
|
||||
| `disrupt` | — | Trigger disruption (error) |
|
||||
|
||||
## Typed Instruction Design
|
||||
|
||||
A key design principle of mcode is that **every type check is an explicit instruction**. Arithmetic and comparison operations come in type-specialized variants (`add_int`, `add_float`, `eq_text`, etc.) rather than a single polymorphic instruction.
|
||||
|
||||
When type information is available from the fold stage, the compiler emits the typed variant directly. When the type is unknown, the compiler emits a type-check/dispatch pattern:
|
||||
|
||||
```json
|
||||
["is_int", check, a]
|
||||
["jump_false", check, "float_path"]
|
||||
["add_int", dest, a, b]
|
||||
["jump", "done"]
|
||||
["LABEL", "float_path"]
|
||||
["add_float", dest, a, b]
|
||||
["LABEL", "done"]
|
||||
```
|
||||
|
||||
The [Streamline Optimizer](streamline.md) eliminates dead branches when types are statically known, collapsing the dispatch to a single typed instruction.
|
||||
|
||||
## Intrinsic Inlining
|
||||
|
||||
The mcode compiler recognizes calls to built-in intrinsic functions and emits direct opcodes instead of the generic frame/setarg/invoke call sequence:
|
||||
|
||||
| Source call | Emitted instruction |
|
||||
|-------------|-------------------|
|
||||
| `is_array(x)` | `is_array dest, src` |
|
||||
| `is_function(x)` | `is_func dest, src` |
|
||||
| `is_object(x)` | `is_record dest, src` |
|
||||
| `is_stone(x)` | `is_stone dest, src` |
|
||||
| `is_integer(x)` | `is_int dest, src` |
|
||||
| `is_text(x)` | `is_text dest, src` |
|
||||
| `is_number(x)` | `is_num dest, src` |
|
||||
| `is_logical(x)` | `is_bool dest, src` |
|
||||
| `is_null(x)` | `is_null dest, src` |
|
||||
| `length(x)` | `length dest, src` |
|
||||
| `push(arr, val)` | `push arr, val` |
|
||||
|
||||
## Function Proxy Decomposition
|
||||
|
||||
When the compiler encounters a method call `obj.method(args)`, it emits a branching pattern to handle ƿit's function proxy protocol. An arity-2 function used as a proxy target receives the method name and argument array instead of a normal method call:
|
||||
|
||||
```json
|
||||
["is_proxy", check, obj]
|
||||
["jump_false", check, "record_path"]
|
||||
|
||||
["access", name_slot, "method"]
|
||||
["array", args_arr, N, arg0, arg1]
|
||||
["null", null_slot]
|
||||
["frame", f, obj, 2]
|
||||
["setarg", f, 0, null_slot]
|
||||
["setarg", f, 1, name_slot]
|
||||
["setarg", f, 2, args_arr]
|
||||
["invoke", f, dest]
|
||||
["jump", "done"]
|
||||
|
||||
["LABEL", "record_path"]
|
||||
["load_field", method, obj, "method"]
|
||||
["frame", f2, method, N]
|
||||
["setarg", f2, 0, obj]
|
||||
["setarg", f2, 1, arg0]
|
||||
["invoke", f2, dest]
|
||||
|
||||
["LABEL", "done"]
|
||||
```
|
||||
|
||||
## Labels and Control Flow
|
||||
|
||||
Control flow uses named labels instead of numeric offsets:
|
||||
|
||||
```json
|
||||
["LABEL", "loop_start"]
|
||||
["add_int", 1, 1, 2]
|
||||
["jump_false", 3, "loop_end"]
|
||||
["jump", "loop_start"]
|
||||
["LABEL", "loop_end"]
|
||||
```
|
||||
|
||||
Labels are collected into a name-to-index map during loading, enabling O(1) jump resolution. The Mach serializer converts label names to numeric offsets in the binary bytecode.
|
||||
|
||||
## Nop Convention
|
||||
|
||||
The streamline optimizer replaces eliminated instructions with nop strings (e.g., `_nop_tc_1`, `_nop_bl_2`). Nop strings are skipped during interpretation and native code emission but preserved in the instruction array to maintain positional stability for jump targets.
|
||||
|
||||
## Internal Structures
|
||||
|
||||
### JSMCode (Mcode Interpreter)
|
||||
|
||||
```c
|
||||
struct JSMCode {
|
||||
uint16_t nr_args; // argument count
|
||||
uint16_t nr_slots; // register count
|
||||
cJSON **instrs; // pre-flattened instruction array
|
||||
cJSON **instrs; // instruction array
|
||||
uint32_t instr_count; // number of instructions
|
||||
|
||||
struct {
|
||||
@@ -38,53 +322,25 @@ struct JSMCode {
|
||||
cJSON *json_root; // keeps JSON alive
|
||||
const char *name; // function name
|
||||
const char *filename; // source file
|
||||
uint16_t disruption_pc; // exception handler offset
|
||||
uint16_t disruption_pc; // disruption handler offset
|
||||
};
|
||||
```
|
||||
|
||||
## Instruction Format
|
||||
### JSCodeRegister (Mach VM Bytecode)
|
||||
|
||||
Each instruction is a JSON array. The first element is the instruction name (string), followed by operands:
|
||||
|
||||
```json
|
||||
["LOADK", 0, 42]
|
||||
["ADD", 2, 0, 1]
|
||||
["JMPFALSE", 3, "else_label"]
|
||||
["CALL", 0, 2, 1]
|
||||
```c
|
||||
struct JSCodeRegister {
|
||||
uint16_t arity; // argument count
|
||||
uint16_t nr_slots; // total register count
|
||||
uint32_t cpool_count; // constant pool size
|
||||
JSValue *cpool; // constant pool
|
||||
uint32_t instr_count; // instruction count
|
||||
MachInstr32 *instructions; // 32-bit instruction array
|
||||
uint32_t func_count; // nested function count
|
||||
JSCodeRegister **functions; // nested function table
|
||||
JSValue name; // function name
|
||||
uint16_t disruption_pc; // disruption handler offset
|
||||
};
|
||||
```
|
||||
|
||||
The instruction set mirrors the Mach VM opcodes — same operations, same register semantics, but with string dispatch instead of numeric opcodes.
|
||||
|
||||
## Labels
|
||||
|
||||
Control flow uses named labels instead of numeric offsets:
|
||||
|
||||
```json
|
||||
["LABEL", "loop_start"]
|
||||
["ADD", 1, 1, 2]
|
||||
["JMPFALSE", 3, "loop_end"]
|
||||
["JMP", "loop_start"]
|
||||
["LABEL", "loop_end"]
|
||||
```
|
||||
|
||||
Labels are collected into a name-to-index map during loading, enabling O(1) jump resolution.
|
||||
|
||||
## Differences from Mach
|
||||
|
||||
| Property | Mcode | Mach |
|
||||
|----------|-------|------|
|
||||
| Instructions | cJSON arrays | 32-bit binary |
|
||||
| Dispatch | String comparison | Switch on opcode byte |
|
||||
| Constants | Inline in JSON | Separate constant pool |
|
||||
| Jump targets | Named labels | Numeric offsets |
|
||||
| Memory | Heap (cJSON nodes) | Off-heap (malloc) |
|
||||
|
||||
## Purpose
|
||||
|
||||
Mcode serves as an inspectable, debuggable intermediate format:
|
||||
|
||||
- **Human-readable** — the JSON representation can be printed and examined
|
||||
- **Language-independent** — any tool that produces the correct JSON can target the ƿit runtime
|
||||
- **Compilation target** — the Mach compiler can consume mcode as input, and future native code generators can work from the same representation
|
||||
|
||||
The cost of string-based dispatch makes mcode slower than the binary Mach VM, so it is primarily useful during development and as a compilation intermediate rather than for production execution.
|
||||
The Mach serializer (`mach.c`) converts the JSON mcode into compact 32-bit instructions with a constant pool. See [Register VM](mach.md) for the binary encoding formats.
|
||||
|
||||
127
docs/spec/pipeline.md
Normal file
127
docs/spec/pipeline.md
Normal file
@@ -0,0 +1,127 @@
|
||||
---
|
||||
title: "Compilation Pipeline"
|
||||
description: "Overview of the compilation stages and optimizations"
|
||||
---
|
||||
|
||||
## Overview
|
||||
|
||||
The compilation pipeline transforms source code through several stages, each adding information or lowering the representation toward execution. All backends share the same path through mcode and streamline.
|
||||
|
||||
```
|
||||
Source → Tokenize → Parse → Fold → Mcode → Streamline → Machine
|
||||
```
|
||||
|
||||
The final **machine** stage has two targets:
|
||||
|
||||
- **Mach VM** — a register-based bytecode interpreter that directly executes the mcode instruction set as compact 32-bit binary
|
||||
- **Native code** — lowers mcode to QBE or LLVM intermediate language, then compiles to machine code for the target CPU architecture
|
||||
|
||||
## Stages
|
||||
|
||||
### Tokenize (`tokenize.cm`)
|
||||
|
||||
Splits source text into tokens. Handles string interpolation by re-tokenizing template literal contents. Produces a token array with position information (line, column).
|
||||
|
||||
### Parse (`parse.cm`)
|
||||
|
||||
Converts tokens into an AST. Also performs semantic analysis:
|
||||
|
||||
- **Scope records**: For each scope (global, function), builds a record mapping variable names to their metadata: `make` (var/def/function/input), `function_nr`, `nr_uses`, `closure` flag, and `level`.
|
||||
- **Type tags**: When the right-hand side of a `def` is a syntactically obvious type, stamps `type_tag` on the scope record entry. Derivable types: `"integer"`, `"number"`, `"text"`, `"array"`, `"record"`, `"function"`, `"logical"`. For `def` variables, type tags are also inferred from usage patterns: push (`x[] = v`) implies array, property access (`x.foo = v`) implies record, integer key implies array, text key implies record.
|
||||
- **Type error detection**: For `def` variables with known type tags, provably wrong operations are reported as compile errors: property access on arrays, push on non-arrays, text keys on arrays, integer keys on records. Only `def` variables are checked because `var` can be reassigned.
|
||||
- **Intrinsic resolution**: Names used but not locally bound are recorded in `ast.intrinsics`. Name nodes referencing intrinsics get `intrinsic: true`.
|
||||
- **Access kind**: Subscript (`[`) nodes get `access_kind`: `"index"` for numeric subscripts, `"field"` for string subscripts, omitted otherwise.
|
||||
- **Tail position**: Return statements where the expression is a call get `tail: true`.
|
||||
|
||||
### Fold (`fold.cm`)
|
||||
|
||||
Operates on the AST. Performs constant folding and type analysis:
|
||||
|
||||
- **Constant folding**: Evaluates arithmetic on known constants at compile time (e.g., `5 + 10` becomes `15`).
|
||||
- **Constant propagation**: Tracks `def` bindings whose values are known constants.
|
||||
- **Type propagation**: Extends `type_tag` through operations. When both operands of an arithmetic op have known types, the result type is known. Propagates type tags to reference sites.
|
||||
- **Intrinsic specialization**: When an intrinsic call's argument types are known, stamps a `hint` on the call node. For example, `length(x)` where x is a known array gets `hint: "array_length"`. Type checks like `is_array(known_array)` are folded to `true`.
|
||||
- **Purity analysis**: Expressions with no side effects are marked pure (literals, name references, arithmetic on pure operands, calls to pure intrinsics). The pure intrinsic set contains only `is_*` sensory functions — they are the only intrinsics guaranteed to never disrupt regardless of argument types. Other intrinsics like `text`, `number`, and `length` can disrupt on wrong argument types and are excluded.
|
||||
- **Dead code elimination**: Removes unreachable branches when conditions are known constants. Removes unused `var`/`def` declarations with pure initializers. Removes standalone calls to pure intrinsics where the result is discarded.
|
||||
|
||||
### Mcode (`mcode.cm`)
|
||||
|
||||
Lowers the AST to a JSON-based intermediate representation with explicit operations. Key design principle: **every type check is an explicit instruction** so downstream optimizers can see and eliminate them.
|
||||
|
||||
- **Typed load/store**: Emits `load_index` (array by integer), `load_field` (record by string), or `load_dynamic` (unknown) based on type information from fold.
|
||||
- **Decomposed calls**: Function calls are split into `frame` (create call frame) + `setarg` (set arguments) + `invoke` (execute call).
|
||||
- **Intrinsic access**: Intrinsic functions are loaded via `access` with an intrinsic marker rather than global lookup.
|
||||
- **Intrinsic inlining**: Type-check intrinsics (`is_array`, `is_text`, `is_number`, `is_integer`, `is_logical`, `is_null`, `is_function`, `is_object`, `is_stone`), `length`, and `push` are emitted as direct opcodes instead of frame/setarg/invoke call sequences.
|
||||
- **Disruption handler labels**: When a function has a disruption handler, a label is emitted before the handler code. This allows the streamline optimizer's unreachable code elimination to safely nop dead code after `return` without accidentally eliminating the handler.
|
||||
- **Tail call marking**: When a return statement's expression is a call and the function has no disruption handler, the final `invoke` is renamed to `tail_invoke`. This marks the call site for future tail call optimization. Functions with disruption handlers cannot use TCO because the handler frame must remain on the stack.
|
||||
|
||||
See [Mcode IR](mcode.md) for the instruction format and complete instruction reference.
|
||||
|
||||
### Streamline (`streamline.cm`)
|
||||
|
||||
Optimizes the Mcode IR through a series of independent passes. Operates per-function:
|
||||
|
||||
1. **Backward type inference**: Infers parameter types from how they are used in typed operators (`add_int`, `store_index`, `load_field`, `push`, `pop`, etc.). Immutable `def` parameters keep their inferred type across label join points.
|
||||
2. **Type-check elimination**: When a slot's type is known, eliminates `is_<type>` + conditional jump pairs. Narrows `load_dynamic`/`store_dynamic` to typed variants.
|
||||
3. **Algebraic simplification**: Rewrites identity operations (add 0, multiply 1, divide 1) and folds same-slot comparisons.
|
||||
4. **Boolean simplification**: Fuses `not` + conditional jump into a single jump with inverted condition.
|
||||
5. **Move elimination**: Removes self-moves (`move a, a`).
|
||||
6. **Unreachable elimination**: Nops dead code after `return` until the next label.
|
||||
7. **Dead jump elimination**: Removes jumps to the immediately following label.
|
||||
|
||||
See [Streamline Optimizer](streamline.md) for detailed pass descriptions.
|
||||
|
||||
### Machine
|
||||
|
||||
The streamlined mcode is lowered to a machine target for execution.
|
||||
|
||||
#### Mach VM (default)
|
||||
|
||||
The Mach VM is a register-based virtual machine that directly interprets the mcode instruction set as 32-bit binary bytecode. The Mach serializer (`mach.c`) converts streamlined mcode JSON into compact 32-bit instructions with a constant pool. Since the mach bytecode is a direct encoding of the mcode, the [Mcode IR](mcode.md) reference serves as the authoritative instruction set documentation.
|
||||
|
||||
```
|
||||
pit script.ce
|
||||
```
|
||||
|
||||
#### Native Code (QBE / LLVM)
|
||||
|
||||
Lowers the streamlined mcode to QBE or LLVM intermediate language for compilation to native machine code. Each mcode function becomes a native function that calls into the ƿit runtime (`cell_rt_*` functions) for operations that require the runtime (allocation, intrinsic dispatch, etc.).
|
||||
|
||||
String constants are interned in a data section. Integer constants are encoded inline.
|
||||
|
||||
```
|
||||
pit --emit-qbe script.ce > output.ssa
|
||||
```
|
||||
|
||||
## Files
|
||||
|
||||
| File | Role |
|
||||
|------|------|
|
||||
| `tokenize.cm` | Lexer |
|
||||
| `parse.cm` | Parser + semantic analysis |
|
||||
| `fold.cm` | Constant folding + type analysis |
|
||||
| `mcode.cm` | AST → Mcode IR lowering |
|
||||
| `streamline.cm` | Mcode IR optimizer |
|
||||
| `qbe_emit.cm` | Mcode IR → QBE IL emitter |
|
||||
| `qbe.cm` | QBE IL operation templates |
|
||||
| `internal/bootstrap.cm` | Pipeline orchestrator |
|
||||
|
||||
## Debug Tools
|
||||
|
||||
| File | Purpose |
|
||||
|------|---------|
|
||||
| `dump_mcode.cm` | Print raw Mcode IR before streamlining |
|
||||
| `dump_stream.cm` | Print IR after streamlining with before/after stats |
|
||||
| `dump_types.cm` | Print streamlined IR with type annotations |
|
||||
|
||||
## Test Files
|
||||
|
||||
| File | Tests |
|
||||
|------|-------|
|
||||
| `parse_test.ce` | Type tags, access_kind, intrinsic resolution |
|
||||
| `fold_test.ce` | Type propagation, purity, intrinsic hints |
|
||||
| `mcode_test.ce` | Typed load/store, decomposed calls |
|
||||
| `streamline_test.ce` | Optimization counts, IR before/after |
|
||||
| `qbe_test.ce` | End-to-end QBE IL generation |
|
||||
| `test_intrinsics.cm` | Inlined intrinsic opcodes (is_array, length, push, etc.) |
|
||||
| `test_backward.cm` | Backward type propagation for parameters |
|
||||
361
docs/spec/streamline.md
Normal file
361
docs/spec/streamline.md
Normal file
@@ -0,0 +1,361 @@
|
||||
---
|
||||
title: "Streamline Optimizer"
|
||||
description: "Mcode IR optimization passes"
|
||||
---
|
||||
|
||||
## Overview
|
||||
|
||||
The streamline optimizer (`streamline.cm`) runs a series of independent passes over the Mcode IR to eliminate redundant operations. Each pass is a standalone function that can be enabled, disabled, or reordered. Passes communicate only through the instruction array they mutate in place, replacing eliminated instructions with nop strings (e.g., `_nop_tc_1`).
|
||||
|
||||
The optimizer runs after `mcode.cm` generates the IR and before the result is lowered to the Mach VM or emitted as QBE IL.
|
||||
|
||||
```
|
||||
Fold (AST) → Mcode (JSON IR) → Streamline → Mach VM / QBE
|
||||
```
|
||||
|
||||
## Type Lattice
|
||||
|
||||
The optimizer tracks a type for each slot in the register file:
|
||||
|
||||
| Type | Meaning |
|
||||
|------|---------|
|
||||
| `unknown` | No type information |
|
||||
| `int` | Integer |
|
||||
| `float` | Floating-point |
|
||||
| `num` | Number (subsumes int and float) |
|
||||
| `text` | String |
|
||||
| `bool` | Logical (true/false) |
|
||||
| `null` | Null value |
|
||||
| `array` | Array |
|
||||
| `record` | Record (object) |
|
||||
| `function` | Function |
|
||||
| `blob` | Binary blob |
|
||||
|
||||
Subsumption: `int` and `float` both satisfy a `num` check.
|
||||
|
||||
## Passes
|
||||
|
||||
### 1. infer_param_types (backward type inference)
|
||||
|
||||
Scans typed operators and generic arithmetic to determine what types their operands must be. For example, `subtract dest, a, b` implies both `a` and `b` are numbers.
|
||||
|
||||
When a parameter slot (1..nr_args) is consistently inferred as a single type, that type is recorded. Since parameters are immutable (`def`), the inferred type holds for the entire function and persists across label join points (loop headers, branch targets).
|
||||
|
||||
Backward inference rules:
|
||||
|
||||
| Operator class | Operand type inferred |
|
||||
|---|---|
|
||||
| `subtract`, `multiply`, `divide`, `modulo`, `pow`, `negate` | T_NUM |
|
||||
| `eq_int`, `ne_int`, `lt_int`, `gt_int`, `le_int`, `ge_int`, bitwise ops | T_INT |
|
||||
| `eq_float`, `ne_float`, `lt_float`, `gt_float`, `le_float`, `ge_float` | T_FLOAT |
|
||||
| `concat`, text comparisons | T_TEXT |
|
||||
| `eq_bool`, `ne_bool`, `not`, `and`, `or` | T_BOOL |
|
||||
| `store_index` (object operand) | T_ARRAY |
|
||||
| `store_index` (index operand) | T_INT |
|
||||
| `store_field` (object operand) | T_RECORD |
|
||||
| `push` (array operand) | T_ARRAY |
|
||||
| `load_index` (object operand) | T_ARRAY |
|
||||
| `load_index` (index operand) | T_INT |
|
||||
| `load_field` (object operand) | T_RECORD |
|
||||
| `pop` (array operand) | T_ARRAY |
|
||||
|
||||
Note: `add` is excluded from backward inference because it is polymorphic — it handles both numeric addition and text concatenation. Only operators that are unambiguously numeric can infer T_NUM.
|
||||
|
||||
When a slot appears with conflicting type inferences, the result is `unknown`. INT + FLOAT conflicts produce `num`.
|
||||
|
||||
**Nop prefix:** none (analysis only, does not modify instructions)
|
||||
|
||||
### 2. infer_slot_write_types (slot write-type invariance)
|
||||
|
||||
Scans all instructions to determine which non-parameter slots have a consistent write type. If every instruction that writes to a given slot produces the same type, that type is globally invariant and can safely persist across label join points.
|
||||
|
||||
This analysis is sound because:
|
||||
- `alloc_slot()` in mcode.cm is monotonically increasing — temp slots are never reused
|
||||
- All local variable declarations must be at function body level and initialized — slots are written before any backward jumps to loop headers
|
||||
- `move` is conservatively treated as T_UNKNOWN, avoiding unsound transitive assumptions
|
||||
|
||||
Write type mapping:
|
||||
|
||||
| Instruction class | Write type |
|
||||
|---|---|
|
||||
| `int` | T_INT |
|
||||
| `true`, `false` | T_BOOL |
|
||||
| `null` | T_NULL |
|
||||
| `access` | type of literal value |
|
||||
| `array` | T_ARRAY |
|
||||
| `record` | T_RECORD |
|
||||
| `function` | T_FUNCTION |
|
||||
| `length` | T_INT |
|
||||
| bitwise ops | T_INT |
|
||||
| `concat` | T_TEXT |
|
||||
| bool ops, comparisons, `in` | T_BOOL |
|
||||
| generic arithmetic (`add`, `subtract`, `negate`, etc.) | T_UNKNOWN |
|
||||
| `move`, `load_field`, `load_index`, `load_dynamic`, `pop`, `get` | T_UNKNOWN |
|
||||
| `invoke`, `tail_invoke` | T_UNKNOWN |
|
||||
|
||||
The result is a map of slot→type for slots where all writes agree on a single known type. Parameter slots (1..nr_args) and slot 0 are excluded.
|
||||
|
||||
Common patterns this enables:
|
||||
|
||||
- **Length variables** (`var len = length(arr)`): written by `length` (T_INT) only → invariant T_INT
|
||||
- **Boolean flags** (`var found = false; ... found = true`): written by `false` and `true` → invariant T_BOOL
|
||||
- **Locally-created containers** (`var arr = []`): written by `array` only → invariant T_ARRAY
|
||||
|
||||
Note: Loop counters (`var i = 0; i = i + 1`) are NOT invariant because `add` produces T_UNKNOWN. However, if `i` is a function parameter used in arithmetic, backward inference from `subtract`/`multiply`/etc. will infer T_NUM for it, which persists across labels.
|
||||
|
||||
**Nop prefix:** none (analysis only, does not modify instructions)
|
||||
|
||||
### 3. eliminate_type_checks (type-check + jump elimination)
|
||||
|
||||
Forward pass that tracks the known type of each slot. When a type check (`is_int`, `is_text`, `is_num`, etc.) is followed by a conditional jump, and the slot's type is already known, the check and jump can be eliminated or converted to an unconditional jump.
|
||||
|
||||
Three cases:
|
||||
|
||||
- **Known match** (e.g., `is_int` on a slot known to be `int`): both the check and the conditional jump are eliminated (nop'd).
|
||||
- **Known mismatch** (e.g., `is_text` on a slot known to be `int`): the check is nop'd and the conditional jump is rewritten to an unconditional `jump`.
|
||||
- **Unknown**: the check remains, but on fallthrough, the slot's type is narrowed to the checked type (enabling downstream eliminations).
|
||||
|
||||
This pass also reduces `load_dynamic`/`store_dynamic` to `load_field`/`store_field` or `load_index`/`store_index` when the key slot's type is known.
|
||||
|
||||
At label join points, all type information is reset except for parameter types from backward inference and write-invariant types from slot write-type analysis.
|
||||
|
||||
**Nop prefix:** `_nop_tc_`
|
||||
|
||||
### 4. simplify_algebra (same-slot comparison folding)
|
||||
|
||||
Tracks known constant values. Folds same-slot comparisons:
|
||||
|
||||
| Pattern | Rewrite |
|
||||
|---------|---------|
|
||||
| `eq_* dest, x, x` | `true dest` |
|
||||
| `le_* dest, x, x` | `true dest` |
|
||||
| `ge_* dest, x, x` | `true dest` |
|
||||
| `is_identical dest, x, x` | `true dest` |
|
||||
| `ne_* dest, x, x` | `false dest` |
|
||||
| `lt_* dest, x, x` | `false dest` |
|
||||
| `gt_* dest, x, x` | `false dest` |
|
||||
|
||||
**Nop prefix:** none (rewrites in place, does not create nops)
|
||||
|
||||
### 5. simplify_booleans (not + jump fusion)
|
||||
|
||||
Peephole pass that eliminates unnecessary `not` instructions:
|
||||
|
||||
| Pattern | Rewrite |
|
||||
|---------|---------|
|
||||
| `not d, x; jump_false d, L` | nop; `jump_true x, L` |
|
||||
| `not d, x; jump_true d, L` | nop; `jump_false x, L` |
|
||||
| `not d1, x; not d2, d1` | nop; `move d2, x` |
|
||||
|
||||
This is particularly effective on `if (!cond)` patterns, which the compiler generates as `not; jump_false`. After this pass, they become a single `jump_true`.
|
||||
|
||||
**Nop prefix:** `_nop_bl_`
|
||||
|
||||
### 6. eliminate_moves (self-move elimination)
|
||||
|
||||
Removes `move a, a` instructions where the source and destination are the same slot. These can arise from earlier passes rewriting binary operations into moves.
|
||||
|
||||
**Nop prefix:** `_nop_mv_`
|
||||
|
||||
### 7. eliminate_unreachable (dead code after return)
|
||||
|
||||
Nops instructions after `return` until the next real label. Only `return` is treated as a terminal instruction; `disrupt` is not, because the disruption handler code immediately follows `disrupt` and must remain reachable.
|
||||
|
||||
The mcode compiler emits a label at disruption handler entry points (see `emit_label(gen_label("disruption"))` in mcode.cm), which provides the label boundary that stops this pass from eliminating handler code.
|
||||
|
||||
**Nop prefix:** `_nop_ur_`
|
||||
|
||||
### 8. eliminate_dead_jumps (jump-to-next-label elimination)
|
||||
|
||||
Removes `jump L` instructions where `L` is the immediately following label (skipping over any intervening nop strings). These are common after other passes eliminate conditional branches, leaving behind jumps that fall through naturally.
|
||||
|
||||
**Nop prefix:** `_nop_dj_`
|
||||
|
||||
## Pass Composition
|
||||
|
||||
All passes run in sequence in `optimize_function`:
|
||||
|
||||
```
|
||||
infer_param_types → returns param_types map
|
||||
infer_slot_write_types → returns write_types map
|
||||
eliminate_type_checks → uses param_types + write_types
|
||||
simplify_algebra
|
||||
simplify_booleans
|
||||
eliminate_moves
|
||||
eliminate_unreachable
|
||||
eliminate_dead_jumps
|
||||
```
|
||||
|
||||
Each pass is independent and can be commented out for testing or benchmarking.
|
||||
|
||||
## Intrinsic Inlining
|
||||
|
||||
Before streamlining, `mcode.cm` recognizes calls to built-in intrinsic functions and emits direct opcodes instead of the generic frame/setarg/invoke call sequence. This reduces a 6-instruction call pattern to a single instruction:
|
||||
|
||||
| Call | Emitted opcode |
|
||||
|------|---------------|
|
||||
| `is_array(x)` | `is_array dest, src` |
|
||||
| `is_function(x)` | `is_func dest, src` |
|
||||
| `is_object(x)` | `is_record dest, src` |
|
||||
| `is_stone(x)` | `is_stone dest, src` |
|
||||
| `is_integer(x)` | `is_int dest, src` |
|
||||
| `is_text(x)` | `is_text dest, src` |
|
||||
| `is_number(x)` | `is_num dest, src` |
|
||||
| `is_logical(x)` | `is_bool dest, src` |
|
||||
| `is_null(x)` | `is_null dest, src` |
|
||||
| `length(x)` | `length dest, src` |
|
||||
| `push(arr, val)` | `push arr, val` |
|
||||
|
||||
These inlined opcodes have corresponding Mach VM implementations in `mach.c`.
|
||||
|
||||
## Unified Arithmetic
|
||||
|
||||
Arithmetic operations use generic opcodes: `add`, `subtract`, `multiply`, `divide`, `modulo`, `pow`, `negate`. There are no type-dispatched variants (e.g., no `add_int`/`add_float`).
|
||||
|
||||
The Mach VM dispatches at runtime with an int-first fast path via `reg_vm_binop()`: it checks `JS_VALUE_IS_BOTH_INT` first for fast integer arithmetic, then falls back to float conversion, text concatenation (for `add` only), or type error.
|
||||
|
||||
Bitwise operations (`shl`, `shr`, `ushr`, `bitand`, `bitor`, `bitxor`, `bitnot`) remain integer-only and disrupt if operands are not integers.
|
||||
|
||||
The QBE/native backend maps generic arithmetic to helper calls (`qbe.add`, `qbe.sub`, etc.). The vision for the native path is that with sufficient type inference, the backend can unbox proven-numeric values to raw registers, operate directly, and only rebox at boundaries (returns, calls, stores).
|
||||
|
||||
## Debugging Tools
|
||||
|
||||
Three dump tools inspect the IR at different stages:
|
||||
|
||||
- **`dump_mcode.cm`** — prints the raw Mcode IR after `mcode.cm`, before streamlining
|
||||
- **`dump_stream.cm`** — prints the IR after streamlining, with before/after instruction counts
|
||||
- **`dump_types.cm`** — prints the streamlined IR with type annotations on each instruction
|
||||
|
||||
Usage:
|
||||
```
|
||||
./cell --core . dump_mcode.cm <file.ce|file.cm>
|
||||
./cell --core . dump_stream.cm <file.ce|file.cm>
|
||||
./cell --core . dump_types.cm <file.ce|file.cm>
|
||||
```
|
||||
|
||||
## Tail Call Marking
|
||||
|
||||
When a function's return expression is a call (`stmt.tail == true` from the parser) and the function has no disruption handler, mcode.cm renames the final `invoke` instruction to `tail_invoke`. This is semantically identical to `invoke` in the current Mach VM, but marks the call site for future tail call optimization.
|
||||
|
||||
The disruption handler restriction exists because TCO would discard the current frame, but the handler must remain on the stack to catch disruptions from the callee.
|
||||
|
||||
`tail_invoke` is handled by the same passes as `invoke` in streamline (type tracking, algebraic simplification) and executes identically in the VM.
|
||||
|
||||
## Type Propagation Architecture
|
||||
|
||||
Type information flows through three compilation stages, each building on the previous:
|
||||
|
||||
### Stage 1: Parse-time type tags (parse.cm)
|
||||
|
||||
The parser assigns `type_tag` strings to scope variable entries when the type is syntactically obvious:
|
||||
|
||||
- **From initializers**: `def a = []` → `type_tag: "array"`, `def n = 42` → `type_tag: "integer"`, `def r = {}` → `type_tag: "record"`
|
||||
- **From usage patterns** (def only): `def x = null; x[] = v` infers `type_tag: "array"` from the push. `def x = null; x.foo = v` infers `type_tag: "record"` from property access.
|
||||
- **Type error detection** (def only): When a `def` variable has a known type_tag, provably wrong operations are compile errors:
|
||||
- Property access (`.`) on array
|
||||
- Push (`[]`) on non-array
|
||||
- Text key on array
|
||||
- Integer key on record
|
||||
|
||||
Only `def` (constant) variables participate in type inference and error detection. `var` variables can be reassigned, making their initializer type unreliable.
|
||||
|
||||
### Stage 2: Fold-time type propagation (fold.cm)
|
||||
|
||||
The fold pass extends type information through the AST:
|
||||
|
||||
- **Intrinsic folding**: `is_array(known_array)` folds to `true`. `length(known_array)` gets `hint: "array_length"`.
|
||||
- **Purity analysis**: Expressions involving only `is_*` intrinsic calls with pure arguments are considered pure. This enables dead code elimination for unused `var`/`def` bindings with pure initializers, and elimination of standalone pure call statements.
|
||||
- **Dead code**: Unused pure `var`/`def` declarations are removed. Standalone calls to pure intrinsics (where the result is discarded) are removed. Unreachable branches with constant conditions are removed.
|
||||
|
||||
The `pure_intrinsics` set currently contains only `is_*` sensory functions (`is_array`, `is_text`, `is_number`, `is_integer`, `is_function`, `is_logical`, `is_null`, `is_object`, `is_stone`). Other intrinsics like `text`, `number`, and `length` can disrupt on wrong argument types, so they are excluded — removing a call that would disrupt changes observable behavior.
|
||||
|
||||
### Stage 3: Streamline-time type tracking (streamline.cm)
|
||||
|
||||
The streamline optimizer uses a numeric type lattice (`T_INT`, `T_FLOAT`, `T_TEXT`, etc.) for fine-grained per-instruction tracking:
|
||||
|
||||
- **Backward inference** (pass 1): Scans typed operators to infer parameter types. Since parameters are `def` (immutable), inferred types persist across label boundaries.
|
||||
- **Write-type invariance** (pass 2): Scans all instructions to find local slots where every write produces the same type. These invariant types persist across label boundaries alongside parameter types.
|
||||
- **Forward tracking** (pass 3): `track_types` follows instruction execution order, tracking the type of each slot. Known-type operations set their destination type (e.g., `concat` → T_TEXT, `length` → T_INT). Generic arithmetic produces T_UNKNOWN. Type checks on unknown slots narrow the type on fallthrough.
|
||||
- **Type check elimination** (pass 3): When a slot's type is already known, `is_<type>` + conditional jump pairs are eliminated or converted to unconditional jumps.
|
||||
- **Dynamic access narrowing** (pass 3): `load_dynamic`/`store_dynamic` are narrowed to `load_field`/`store_field` or `load_index`/`store_index` when the key type is known.
|
||||
|
||||
Type information resets at label join points (since control flow merges could bring different types), except for parameter types from backward inference and write-invariant types from slot write-type analysis.
|
||||
|
||||
## Future Work
|
||||
|
||||
### Copy Propagation
|
||||
|
||||
A basic-block-local copy propagation pass would replace uses of a copied variable with its source, enabling further move elimination. An implementation was attempted but encountered an unsolved bug where 2-position instruction operand replacement produces incorrect code during self-hosting (the replacement logic for 3-position instructions works correctly). The root cause is not yet understood. See the project memory files for detailed notes.
|
||||
|
||||
### Expanded Purity Analysis
|
||||
|
||||
The current purity set is conservative (only `is_*`). It could be expanded by:
|
||||
|
||||
- **Argument-type-aware purity**: If all arguments to an intrinsic are known to be the correct types (via type_tag or slot_types), the call cannot disrupt and is safe to eliminate. For example, `length(known_array)` is pure but `length(unknown)` is not.
|
||||
- **User function purity**: Analyze user-defined function bodies during pre_scan. A function is pure if its body contains only pure expressions and calls to known-pure functions. This requires fixpoint iteration for mutual recursion.
|
||||
- **Callback-aware purity**: Intrinsics like `filter`, `find`, `reduce`, `some`, `every` are pure if their callback argument is pure.
|
||||
|
||||
### Forward Type Narrowing from Typed Operations
|
||||
|
||||
With unified arithmetic (generic `add`/`subtract`/`multiply`/`divide`/`modulo`/`negate` instead of typed variants), this approach is no longer applicable. Typed comparisons (`eq_int`, `lt_float`, etc.) still exist and their operands have known types, but these are already handled by backward inference.
|
||||
|
||||
### Guard Hoisting for Parameters
|
||||
|
||||
When a type check on a parameter passes (falls through), the parameter's type could be promoted to `param_types` so it persists across label boundaries. This would allow the first type check on a parameter to prove its type for the entire function. However, this is unsound for polymorphic parameters — if a function is called with different argument types, the first check would wrongly eliminate checks for subsequent types.
|
||||
|
||||
A safe version would require proving that a parameter is monomorphic (called with only one type across all call sites), which requires interprocedural analysis.
|
||||
|
||||
**Note:** For local variables (non-parameters), the write-type invariance analysis (pass 2) achieves a similar effect safely — if every write to a slot produces the same type, that type persists across labels without needing to hoist any guard.
|
||||
|
||||
### Tail Call Optimization
|
||||
|
||||
`tail_invoke` instructions are currently marked but execute identically to `invoke`. Actual TCO would reuse the current call frame instead of creating a new one. This requires:
|
||||
|
||||
- Ensuring argument count matches (or the frame can be resized)
|
||||
- No live locals needed after the call (guaranteed by tail position)
|
||||
- No disruption handler on the current function (already enforced by the marking)
|
||||
- VM support in mach.c to rewrite the frame in place
|
||||
|
||||
### Interprocedural Type Inference
|
||||
|
||||
Currently all type inference is intraprocedural (within a single function). Cross-function analysis could:
|
||||
|
||||
- Infer return types from function bodies
|
||||
- Propagate argument types from call sites to callees
|
||||
- Specialize functions for known argument types (cloning)
|
||||
|
||||
### Strength Reduction
|
||||
|
||||
Common patterns that could be lowered to cheaper operations when operand types are known:
|
||||
|
||||
- `multiply x, 2` with proven-int operands → shift left
|
||||
- `divide x, 2` with proven-int → arithmetic shift right
|
||||
- `modulo x, power_of_2` with proven-int → bitwise and
|
||||
|
||||
### Numeric Unboxing (QBE/native path)
|
||||
|
||||
With unified arithmetic and backward type inference, the native backend can identify regions where numeric values remain in registers without boxing/unboxing:
|
||||
|
||||
1. **Guard once**: When backward inference proves a parameter is T_NUM, emit a single type guard at function entry.
|
||||
2. **Unbox**: Convert the tagged JSValue to a raw double register.
|
||||
3. **Operate**: Use native FP/int instructions directly (no function calls, no tag checks).
|
||||
4. **Rebox**: Convert back to tagged JSValue only at rebox points (function returns, calls, stores to arrays/records).
|
||||
|
||||
This requires inserting `unbox`/`rebox` IR annotations (no-ops in the Mach VM, meaningful only to QBE).
|
||||
|
||||
### Loop-Invariant Code Motion
|
||||
|
||||
Type checks that are invariant across loop iterations (checking a variable that doesn't change in the loop body) could be hoisted above the loop. This would require identifying loop boundaries and proving invariance.
|
||||
|
||||
### Algebraic Identity Optimization
|
||||
|
||||
With unified arithmetic, algebraic identities (x+0→x, x*1→x, x*0→0, x/1→x) require knowing operand values at compile time. Since generic `add`/`multiply` operate on any numeric type, the constant-tracking logic in `simplify_algebra` could be extended to handle these for known-constant slots.
|
||||
|
||||
## Nop Convention
|
||||
|
||||
Eliminated instructions are replaced with strings matching `_nop_<prefix>_<counter>`. The prefix identifies which pass created the nop. Nop strings are:
|
||||
|
||||
- Skipped during interpretation (the VM ignores them)
|
||||
- Skipped during QBE emission
|
||||
- Not counted in instruction statistics
|
||||
- Preserved in the instruction array to maintain positional stability for jump targets
|
||||
170
docs/testing.md
Normal file
170
docs/testing.md
Normal file
@@ -0,0 +1,170 @@
|
||||
---
|
||||
title: "Testing"
|
||||
description: "Writing and running tests in ƿit"
|
||||
weight: 45
|
||||
type: "docs"
|
||||
---
|
||||
|
||||
ƿit has built-in support for writing and running tests. Tests live in the `tests/` directory of a package and are `.cm` modules that return a record of test functions.
|
||||
|
||||
## Writing Tests
|
||||
|
||||
A test file returns a record where each key starting with `test_` is a test function. A test passes if it returns `null` (or nothing). It fails if it returns a text string describing the failure.
|
||||
|
||||
```javascript
|
||||
// tests/math.cm
|
||||
return {
|
||||
test_addition: function() {
|
||||
if (1 + 2 != 3) return "expected 3"
|
||||
},
|
||||
|
||||
test_division: function() {
|
||||
if (10 / 3 != 3.333333333333333333) return "unexpected result"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Test functions take no arguments. Use early returns with a failure message to report errors:
|
||||
|
||||
```javascript
|
||||
test_array_push: function() {
|
||||
var a = [1, 2]
|
||||
a[] = 3
|
||||
if (length(a) != 3) return "expected length 3, got " + text(length(a))
|
||||
if (a[2] != 3) return "expected a[2] to be 3"
|
||||
}
|
||||
```
|
||||
|
||||
## Running Tests
|
||||
|
||||
```bash
|
||||
pit test # run all tests in current package
|
||||
pit test suite # run a specific test file (tests/suite.cm)
|
||||
pit test tests/math # same, with explicit path
|
||||
pit test all # run all tests in current package
|
||||
pit test package <name> # run all tests in a named package
|
||||
pit test package <name> <test> # run a specific test in a named package
|
||||
pit test package all # run tests from all installed packages
|
||||
```
|
||||
|
||||
### Flags
|
||||
|
||||
```bash
|
||||
pit test suite -g # run GC after each test (useful for detecting leaks)
|
||||
pit test suite --verify # enable IR verification during compilation
|
||||
pit test suite --diff # run each test optimized and unoptimized, compare results
|
||||
```
|
||||
|
||||
`--verify` and `--diff` can be combined:
|
||||
|
||||
```bash
|
||||
pit test suite --verify --diff
|
||||
```
|
||||
|
||||
## IR Verification
|
||||
|
||||
The `--verify` flag enables structural validation of the compiler's intermediate representation after each optimizer pass. This catches bugs like invalid slot references, broken jump targets, and malformed instructions.
|
||||
|
||||
When verification fails, errors are printed with the pass name that introduced them:
|
||||
|
||||
```
|
||||
[verify_ir] slot_bounds: slot 12 out of range 0..9 in instruction add_int
|
||||
[verify_ir] 1 errors after dead_code_elimination
|
||||
```
|
||||
|
||||
IR verification adds overhead and is intended for development, not production use.
|
||||
|
||||
## Differential Testing
|
||||
|
||||
Differential testing runs each test through two paths — with the optimizer enabled and with it disabled — and compares results. Any mismatch between the two indicates an optimizer bug.
|
||||
|
||||
### Inline Mode
|
||||
|
||||
The `--diff` flag on `pit test` runs each test module through both paths during a normal test run:
|
||||
|
||||
```bash
|
||||
pit test suite --diff
|
||||
```
|
||||
|
||||
Output includes a mismatch count at the end:
|
||||
|
||||
```
|
||||
Tests: 493 passed, 0 failed, 493 total
|
||||
Diff mismatches: 0
|
||||
```
|
||||
|
||||
### Standalone Mode
|
||||
|
||||
`pit diff` is a dedicated differential testing tool with detailed mismatch reporting:
|
||||
|
||||
```bash
|
||||
pit diff # diff all test files in current package
|
||||
pit diff suite # diff a specific test file
|
||||
pit diff tests/math # same, with explicit path
|
||||
```
|
||||
|
||||
For each test function, it reports whether the optimized and unoptimized results match:
|
||||
|
||||
```
|
||||
tests/suite.cm: 493 passed, 0 failed
|
||||
----------------------------------------
|
||||
Diff: 493 passed, 0 failed, 493 total
|
||||
```
|
||||
|
||||
When a mismatch is found:
|
||||
|
||||
```
|
||||
tests/suite.cm: 492 passed, 1 failed
|
||||
MISMATCH: test_foo: result mismatch opt=42 noopt=43
|
||||
```
|
||||
|
||||
## Fuzz Testing
|
||||
|
||||
The fuzzer generates random self-checking programs, compiles them, and runs them through both optimized and unoptimized paths. Each generated program contains test functions that validate their own expected results, so failures catch both correctness bugs and optimizer mismatches.
|
||||
|
||||
```bash
|
||||
pit fuzz # 100 iterations, random seed
|
||||
pit fuzz 500 # 500 iterations, random seed
|
||||
pit fuzz --seed 42 # 100 iterations, deterministic seed
|
||||
pit fuzz 1000 --seed 42 # 1000 iterations, deterministic seed
|
||||
```
|
||||
|
||||
The fuzzer generates programs that exercise:
|
||||
|
||||
- Integer and float arithmetic with known expected results
|
||||
- Control flow (if/else, while loops)
|
||||
- Closures and captured variable mutation
|
||||
- Records and property access
|
||||
- Arrays and iteration
|
||||
- Higher-order functions
|
||||
- Disruption handling
|
||||
- Text concatenation
|
||||
|
||||
On failure, the generated source is saved to `tests/fuzz_failures/` for reproduction:
|
||||
|
||||
```
|
||||
Fuzzing: 1000 iterations, starting seed=42
|
||||
FAIL seed=57: diff fuzz_3: opt=10 noopt=11
|
||||
saved to tests/fuzz_failures/seed_57.cm
|
||||
----------------------------------------
|
||||
Fuzz: 999 passed, 1 failed, 1000 total
|
||||
Failures saved to tests/fuzz_failures/
|
||||
```
|
||||
|
||||
Saved failure files are valid `.cm` modules that can be run directly or added to the test suite.
|
||||
|
||||
## Test File Organization
|
||||
|
||||
Tests live in the `tests/` directory of a package:
|
||||
|
||||
```
|
||||
mypackage/
|
||||
├── pit.toml
|
||||
├── math.cm
|
||||
└── tests/
|
||||
├── suite.cm # main test suite
|
||||
├── math.cm # math-specific tests
|
||||
└── disrupt.cm # disruption tests
|
||||
```
|
||||
|
||||
All `.cm` files under `tests/` are discovered automatically by `pit test`.
|
||||
16
dump_ast.cm
Normal file
16
dump_ast.cm
Normal file
@@ -0,0 +1,16 @@
|
||||
// dump_ast.cm — pretty-print the folded AST as JSON
|
||||
//
|
||||
// Usage: ./cell --core . dump_ast.cm <file.ce|file.cm>
|
||||
|
||||
var fd = use("fd")
|
||||
var json = use("json")
|
||||
var tokenize = use("tokenize")
|
||||
var parse = use("parse")
|
||||
var fold = use("fold")
|
||||
|
||||
var filename = args[0]
|
||||
var src = text(fd.slurp(filename))
|
||||
var tok = tokenize(src, filename)
|
||||
var ast = parse(tok.tokens, src, filename, tokenize)
|
||||
var folded = fold(ast)
|
||||
print(json.encode(folded))
|
||||
117
dump_mcode.cm
Normal file
117
dump_mcode.cm
Normal file
@@ -0,0 +1,117 @@
|
||||
// dump_mcode.cm — pretty-print mcode IR (before streamlining)
|
||||
//
|
||||
// Usage: ./cell --core . dump_mcode.cm <file.ce|file.cm>
|
||||
|
||||
var fd = use("fd")
|
||||
var json = use("json")
|
||||
var tokenize = use("tokenize")
|
||||
var parse = use("parse")
|
||||
var fold = use("fold")
|
||||
var mcode = use("mcode")
|
||||
|
||||
if (length(args) < 1) {
|
||||
print("usage: cell --core . dump_mcode.cm <file>")
|
||||
return
|
||||
}
|
||||
|
||||
var filename = args[0]
|
||||
var src = text(fd.slurp(filename))
|
||||
var tok = tokenize(src, filename)
|
||||
var ast = parse(tok.tokens, src, filename, tokenize)
|
||||
var folded = fold(ast)
|
||||
var compiled = mcode(folded)
|
||||
|
||||
var pad_right = function(s, w) {
|
||||
var r = s
|
||||
while (length(r) < w) {
|
||||
r = r + " "
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
var fmt_val = function(v) {
|
||||
if (is_null(v)) {
|
||||
return "null"
|
||||
}
|
||||
if (is_number(v)) {
|
||||
return text(v)
|
||||
}
|
||||
if (is_text(v)) {
|
||||
return `"${v}"`
|
||||
}
|
||||
if (is_object(v)) {
|
||||
return json.encode(v)
|
||||
}
|
||||
if (is_logical(v)) {
|
||||
return v ? "true" : "false"
|
||||
}
|
||||
return text(v)
|
||||
}
|
||||
|
||||
var dump_function = function(func, name) {
|
||||
var nr_args = func.nr_args != null ? func.nr_args : 0
|
||||
var nr_slots = func.nr_slots != null ? func.nr_slots : 0
|
||||
var nr_close = func.nr_close_slots != null ? func.nr_close_slots : 0
|
||||
var instrs = func.instructions
|
||||
var i = 0
|
||||
var pc = 0
|
||||
var instr = null
|
||||
var op = null
|
||||
var n = 0
|
||||
var parts = null
|
||||
var j = 0
|
||||
var operands = null
|
||||
var pc_str = null
|
||||
var op_str = null
|
||||
print(`\n=== ${name} (args=${text(nr_args)}, slots=${text(nr_slots)}, closures=${text(nr_close)}) ===`)
|
||||
if (instrs == null || length(instrs) == 0) {
|
||||
print(" (empty)")
|
||||
return null
|
||||
}
|
||||
while (i < length(instrs)) {
|
||||
instr = instrs[i]
|
||||
if (is_text(instr)) {
|
||||
if (!starts_with(instr, "_nop_")) {
|
||||
print(`${instr}:`)
|
||||
}
|
||||
} else if (is_array(instr)) {
|
||||
op = instr[0]
|
||||
n = length(instr)
|
||||
parts = []
|
||||
j = 1
|
||||
while (j < n - 2) {
|
||||
push(parts, fmt_val(instr[j]))
|
||||
j = j + 1
|
||||
}
|
||||
operands = text(parts, ", ")
|
||||
pc_str = pad_right(text(pc), 5)
|
||||
op_str = pad_right(op, 14)
|
||||
print(` ${pc_str} ${op_str} ${operands}`)
|
||||
pc = pc + 1
|
||||
}
|
||||
i = i + 1
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
var main_name = null
|
||||
var fi = 0
|
||||
var func = null
|
||||
var fname = null
|
||||
|
||||
// Dump main
|
||||
if (compiled.main != null) {
|
||||
main_name = compiled.name != null ? compiled.name : "<main>"
|
||||
dump_function(compiled.main, main_name)
|
||||
}
|
||||
|
||||
// Dump sub-functions
|
||||
if (compiled.functions != null) {
|
||||
fi = 0
|
||||
while (fi < length(compiled.functions)) {
|
||||
func = compiled.functions[fi]
|
||||
fname = func.name != null ? func.name : `<func_${text(fi)}>`
|
||||
dump_function(func, `[${text(fi)}] ${fname}`)
|
||||
fi = fi + 1
|
||||
}
|
||||
}
|
||||
166
dump_stream.cm
Normal file
166
dump_stream.cm
Normal file
@@ -0,0 +1,166 @@
|
||||
// dump_stream.cm — show mcode IR before and after streamlining
|
||||
//
|
||||
// Usage: ./cell --core . dump_stream.cm <file.ce|file.cm>
|
||||
|
||||
var fd = use("fd")
|
||||
var json = use("json")
|
||||
var tokenize = use("tokenize")
|
||||
var parse = use("parse")
|
||||
var fold = use("fold")
|
||||
var mcode = use("mcode")
|
||||
var streamline = use("streamline")
|
||||
|
||||
if (length(args) < 1) {
|
||||
print("usage: cell --core . dump_stream.cm <file>")
|
||||
return
|
||||
}
|
||||
|
||||
var filename = args[0]
|
||||
var src = text(fd.slurp(filename))
|
||||
var tok = tokenize(src, filename)
|
||||
var ast = parse(tok.tokens, src, filename, tokenize)
|
||||
var folded = fold(ast)
|
||||
var compiled = mcode(folded)
|
||||
|
||||
// Deep copy IR for before snapshot
|
||||
var before = json.decode(json.encode(compiled))
|
||||
|
||||
var optimized = streamline(compiled)
|
||||
|
||||
var pad_right = function(s, w) {
|
||||
var r = s
|
||||
while (length(r) < w) {
|
||||
r = r + " "
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
var fmt_val = function(v) {
|
||||
if (is_null(v)) {
|
||||
return "null"
|
||||
}
|
||||
if (is_number(v)) {
|
||||
return text(v)
|
||||
}
|
||||
if (is_text(v)) {
|
||||
return `"${v}"`
|
||||
}
|
||||
if (is_object(v)) {
|
||||
return json.encode(v)
|
||||
}
|
||||
if (is_logical(v)) {
|
||||
return v ? "true" : "false"
|
||||
}
|
||||
return text(v)
|
||||
}
|
||||
|
||||
var count_stats = function(func) {
|
||||
var instrs = func.instructions
|
||||
var total = 0
|
||||
var nops = 0
|
||||
var calls = 0
|
||||
var i = 0
|
||||
var instr = null
|
||||
if (instrs == null) {
|
||||
return {total: 0, nops: 0, real: 0, calls: 0}
|
||||
}
|
||||
while (i < length(instrs)) {
|
||||
instr = instrs[i]
|
||||
if (is_text(instr)) {
|
||||
if (starts_with(instr, "_nop_")) {
|
||||
nops = nops + 1
|
||||
}
|
||||
} else if (is_array(instr)) {
|
||||
total = total + 1
|
||||
if (instr[0] == "invoke") {
|
||||
calls = calls + 1
|
||||
}
|
||||
}
|
||||
i = i + 1
|
||||
}
|
||||
return {total: total, nops: nops, real: total - nops, calls: calls}
|
||||
}
|
||||
|
||||
var dump_function = function(func, show_nops) {
|
||||
var instrs = func.instructions
|
||||
var i = 0
|
||||
var pc = 0
|
||||
var instr = null
|
||||
var op = null
|
||||
var n = 0
|
||||
var parts = null
|
||||
var j = 0
|
||||
var operands = null
|
||||
var pc_str = null
|
||||
var op_str = null
|
||||
if (instrs == null || length(instrs) == 0) {
|
||||
return null
|
||||
}
|
||||
while (i < length(instrs)) {
|
||||
instr = instrs[i]
|
||||
if (is_text(instr)) {
|
||||
if (starts_with(instr, "_nop_")) {
|
||||
if (show_nops) {
|
||||
print(` ${pad_right(text(pc), 5)} --- nop ---`)
|
||||
pc = pc + 1
|
||||
}
|
||||
} else {
|
||||
print(`${instr}:`)
|
||||
}
|
||||
} else if (is_array(instr)) {
|
||||
op = instr[0]
|
||||
n = length(instr)
|
||||
parts = []
|
||||
j = 1
|
||||
while (j < n - 2) {
|
||||
push(parts, fmt_val(instr[j]))
|
||||
j = j + 1
|
||||
}
|
||||
operands = text(parts, ", ")
|
||||
pc_str = pad_right(text(pc), 5)
|
||||
op_str = pad_right(op, 14)
|
||||
print(` ${pc_str} ${op_str} ${operands}`)
|
||||
pc = pc + 1
|
||||
}
|
||||
i = i + 1
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
var dump_pair = function(before_func, after_func, name) {
|
||||
var nr_args = after_func.nr_args != null ? after_func.nr_args : 0
|
||||
var nr_slots = after_func.nr_slots != null ? after_func.nr_slots : 0
|
||||
var b_stats = count_stats(before_func)
|
||||
var a_stats = count_stats(after_func)
|
||||
var eliminated = a_stats.nops
|
||||
print(`\n=== ${name} (args=${text(nr_args)}, slots=${text(nr_slots)}) ===`)
|
||||
print(` before: ${text(b_stats.total)} instructions, ${text(b_stats.calls)} invokes`)
|
||||
print(` after: ${text(a_stats.real)} instructions (${text(eliminated)} eliminated), ${text(a_stats.calls)} invokes`)
|
||||
print("\n -- streamlined --")
|
||||
dump_function(after_func, false)
|
||||
return null
|
||||
}
|
||||
|
||||
var main_name = null
|
||||
var fi = 0
|
||||
var func = null
|
||||
var bfunc = null
|
||||
var fname = null
|
||||
|
||||
// Dump main
|
||||
if (optimized.main != null && before.main != null) {
|
||||
main_name = optimized.name != null ? optimized.name : "<main>"
|
||||
dump_pair(before.main, optimized.main, main_name)
|
||||
}
|
||||
|
||||
// Dump sub-functions
|
||||
if (optimized.functions != null && before.functions != null) {
|
||||
fi = 0
|
||||
while (fi < length(optimized.functions)) {
|
||||
func = optimized.functions[fi]
|
||||
bfunc = before.functions[fi]
|
||||
fname = func.name != null ? func.name : `<func_${text(fi)}>`
|
||||
dump_pair(bfunc, func, `[${text(fi)}] ${fname}`)
|
||||
fi = fi + 1
|
||||
}
|
||||
}
|
||||
237
dump_types.cm
Normal file
237
dump_types.cm
Normal file
@@ -0,0 +1,237 @@
|
||||
// dump_types.cm — show streamlined IR with type annotations
|
||||
//
|
||||
// Usage: ./cell --core . dump_types.cm <file.ce|file.cm>
|
||||
|
||||
var fd = use("fd")
|
||||
var json = use("json")
|
||||
var tokenize = use("tokenize")
|
||||
var parse = use("parse")
|
||||
var fold = use("fold")
|
||||
var mcode = use("mcode")
|
||||
var streamline = use("streamline")
|
||||
|
||||
if (length(args) < 1) {
|
||||
print("usage: cell --core . dump_types.cm <file>")
|
||||
return
|
||||
}
|
||||
|
||||
var filename = args[0]
|
||||
var src = text(fd.slurp(filename))
|
||||
var tok = tokenize(src, filename)
|
||||
var ast = parse(tok.tokens, src, filename, tokenize)
|
||||
var folded = fold(ast)
|
||||
var compiled = mcode(folded)
|
||||
var optimized = streamline(compiled)
|
||||
|
||||
// Type constants
|
||||
def T_UNKNOWN = "unknown"
|
||||
def T_INT = "int"
|
||||
def T_FLOAT = "float"
|
||||
def T_NUM = "num"
|
||||
def T_TEXT = "text"
|
||||
def T_BOOL = "bool"
|
||||
def T_NULL = "null"
|
||||
def T_ARRAY = "array"
|
||||
def T_RECORD = "record"
|
||||
def T_FUNCTION = "function"
|
||||
|
||||
def int_result_ops = {
|
||||
bitnot: true, bitand: true, bitor: true,
|
||||
bitxor: true, shl: true, shr: true, ushr: true
|
||||
}
|
||||
def bool_result_ops = {
|
||||
eq_int: true, ne_int: true, lt_int: true, gt_int: true,
|
||||
le_int: true, ge_int: true,
|
||||
eq_float: true, ne_float: true, lt_float: true, gt_float: true,
|
||||
le_float: true, ge_float: true,
|
||||
eq_text: true, ne_text: true, lt_text: true, gt_text: true,
|
||||
le_text: true, ge_text: true,
|
||||
eq_bool: true, ne_bool: true,
|
||||
not: true, and: true, or: true,
|
||||
is_int: true, is_text: true, is_num: true,
|
||||
is_bool: true, is_null: true, is_identical: true,
|
||||
is_array: true, is_func: true, is_record: true, is_stone: true
|
||||
}
|
||||
|
||||
var access_value_type = function(val) {
|
||||
if (is_number(val)) {
|
||||
return is_integer(val) ? T_INT : T_FLOAT
|
||||
}
|
||||
if (is_text(val)) {
|
||||
return T_TEXT
|
||||
}
|
||||
return T_UNKNOWN
|
||||
}
|
||||
|
||||
var track_types = function(slot_types, instr) {
|
||||
var op = instr[0]
|
||||
var src_type = null
|
||||
if (op == "access") {
|
||||
slot_types[text(instr[1])] = access_value_type(instr[2])
|
||||
} else if (op == "int") {
|
||||
slot_types[text(instr[1])] = T_INT
|
||||
} else if (op == "true" || op == "false") {
|
||||
slot_types[text(instr[1])] = T_BOOL
|
||||
} else if (op == "null") {
|
||||
slot_types[text(instr[1])] = T_NULL
|
||||
} else if (op == "move") {
|
||||
src_type = slot_types[text(instr[2])]
|
||||
slot_types[text(instr[1])] = src_type != null ? src_type : T_UNKNOWN
|
||||
} else if (int_result_ops[op] == true) {
|
||||
slot_types[text(instr[1])] = T_INT
|
||||
} else if (op == "concat") {
|
||||
slot_types[text(instr[1])] = T_TEXT
|
||||
} else if (bool_result_ops[op] == true) {
|
||||
slot_types[text(instr[1])] = T_BOOL
|
||||
} else if (op == "typeof") {
|
||||
slot_types[text(instr[1])] = T_TEXT
|
||||
} else if (op == "array") {
|
||||
slot_types[text(instr[1])] = T_ARRAY
|
||||
} else if (op == "record") {
|
||||
slot_types[text(instr[1])] = T_RECORD
|
||||
} else if (op == "function") {
|
||||
slot_types[text(instr[1])] = T_FUNCTION
|
||||
} else if (op == "invoke" || op == "tail_invoke") {
|
||||
slot_types[text(instr[2])] = T_UNKNOWN
|
||||
} else if (op == "load_field" || op == "load_index" || op == "load_dynamic") {
|
||||
slot_types[text(instr[1])] = T_UNKNOWN
|
||||
} else if (op == "pop" || op == "get") {
|
||||
slot_types[text(instr[1])] = T_UNKNOWN
|
||||
} else if (op == "length") {
|
||||
slot_types[text(instr[1])] = T_INT
|
||||
} else if (op == "add" || op == "subtract" || op == "multiply" ||
|
||||
op == "divide" || op == "modulo" || op == "pow" || op == "negate") {
|
||||
slot_types[text(instr[1])] = T_UNKNOWN
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
var pad_right = function(s, w) {
|
||||
var r = s
|
||||
while (length(r) < w) {
|
||||
r = r + " "
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
var fmt_val = function(v) {
|
||||
if (is_null(v)) {
|
||||
return "null"
|
||||
}
|
||||
if (is_number(v)) {
|
||||
return text(v)
|
||||
}
|
||||
if (is_text(v)) {
|
||||
return `"${v}"`
|
||||
}
|
||||
if (is_object(v)) {
|
||||
return json.encode(v)
|
||||
}
|
||||
if (is_logical(v)) {
|
||||
return v ? "true" : "false"
|
||||
}
|
||||
return text(v)
|
||||
}
|
||||
|
||||
// Build type annotation string for an instruction
|
||||
var type_annotation = function(slot_types, instr) {
|
||||
var n = length(instr)
|
||||
var parts = []
|
||||
var j = 1
|
||||
var v = null
|
||||
var t = null
|
||||
while (j < n - 2) {
|
||||
v = instr[j]
|
||||
if (is_number(v)) {
|
||||
t = slot_types[text(v)]
|
||||
if (t != null && t != T_UNKNOWN) {
|
||||
push(parts, `s${text(v)}:${t}`)
|
||||
}
|
||||
}
|
||||
j = j + 1
|
||||
}
|
||||
if (length(parts) == 0) {
|
||||
return ""
|
||||
}
|
||||
return text(parts, " ")
|
||||
}
|
||||
|
||||
var dump_function_typed = function(func, name) {
|
||||
var nr_args = func.nr_args != null ? func.nr_args : 0
|
||||
var nr_slots = func.nr_slots != null ? func.nr_slots : 0
|
||||
var instrs = func.instructions
|
||||
var slot_types = {}
|
||||
var i = 0
|
||||
var pc = 0
|
||||
var instr = null
|
||||
var op = null
|
||||
var n = 0
|
||||
var annotation = null
|
||||
var operand_parts = null
|
||||
var j = 0
|
||||
var operands = null
|
||||
var pc_str = null
|
||||
var op_str = null
|
||||
var line = null
|
||||
print(`\n=== ${name} (args=${text(nr_args)}, slots=${text(nr_slots)}) ===`)
|
||||
if (instrs == null || length(instrs) == 0) {
|
||||
print(" (empty)")
|
||||
return null
|
||||
}
|
||||
while (i < length(instrs)) {
|
||||
instr = instrs[i]
|
||||
if (is_text(instr)) {
|
||||
if (starts_with(instr, "_nop_")) {
|
||||
i = i + 1
|
||||
continue
|
||||
}
|
||||
slot_types = {}
|
||||
print(`${instr}:`)
|
||||
} else if (is_array(instr)) {
|
||||
op = instr[0]
|
||||
n = length(instr)
|
||||
annotation = type_annotation(slot_types, instr)
|
||||
operand_parts = []
|
||||
j = 1
|
||||
while (j < n - 2) {
|
||||
push(operand_parts, fmt_val(instr[j]))
|
||||
j = j + 1
|
||||
}
|
||||
operands = text(operand_parts, ", ")
|
||||
pc_str = pad_right(text(pc), 5)
|
||||
op_str = pad_right(op, 14)
|
||||
line = pad_right(` ${pc_str} ${op_str} ${operands}`, 50)
|
||||
if (length(annotation) > 0) {
|
||||
print(`${line} ; ${annotation}`)
|
||||
} else {
|
||||
print(line)
|
||||
}
|
||||
track_types(slot_types, instr)
|
||||
pc = pc + 1
|
||||
}
|
||||
i = i + 1
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
var main_name = null
|
||||
var fi = 0
|
||||
var func = null
|
||||
var fname = null
|
||||
|
||||
// Dump main
|
||||
if (optimized.main != null) {
|
||||
main_name = optimized.name != null ? optimized.name : "<main>"
|
||||
dump_function_typed(optimized.main, main_name)
|
||||
}
|
||||
|
||||
// Dump sub-functions
|
||||
if (optimized.functions != null) {
|
||||
fi = 0
|
||||
while (fi < length(optimized.functions)) {
|
||||
func = optimized.functions[fi]
|
||||
fname = func.name != null ? func.name : `<func_${text(fi)}>`
|
||||
dump_function_typed(func, `[${text(fi)}] ${fname}`)
|
||||
fi = fi + 1
|
||||
}
|
||||
}
|
||||
34
fd.c
34
fd.c
@@ -504,7 +504,7 @@ JSC_SCALL(fd_readdir,
|
||||
ret = JS_NewArray(js);
|
||||
do {
|
||||
if (strcmp(ffd.cFileName, ".") == 0 || strcmp(ffd.cFileName, "..") == 0) continue;
|
||||
JS_ArrayPush(js, ret,JS_NewString(js, ffd.cFileName));
|
||||
JS_ArrayPush(js, &ret, JS_NewString(js, ffd.cFileName));
|
||||
} while (FindNextFile(hFind, &ffd) != 0);
|
||||
FindClose(hFind);
|
||||
}
|
||||
@@ -516,7 +516,7 @@ JSC_SCALL(fd_readdir,
|
||||
ret = JS_NewArray(js);
|
||||
while ((dir = readdir(d)) != NULL) {
|
||||
if (strcmp(dir->d_name, ".") == 0 || strcmp(dir->d_name, "..") == 0) continue;
|
||||
JS_ArrayPush(js, ret, JS_NewString(js, dir->d_name));
|
||||
JS_ArrayPush(js, &ret, JS_NewString(js, dir->d_name));
|
||||
}
|
||||
closedir(d);
|
||||
} else {
|
||||
@@ -565,18 +565,22 @@ JSC_CCALL(fd_slurpwrite,
|
||||
if (!str) return JS_EXCEPTION;
|
||||
int fd = open(str, O_WRONLY | O_CREAT | O_TRUNC, 0644);
|
||||
if (fd < 0) {
|
||||
ret = JS_ThrowInternalError(js, "open failed for %s: %s", str, strerror(errno));
|
||||
JS_FreeCString(js, str);
|
||||
return JS_ThrowInternalError(js, "open failed for %s: %s", str, strerror(errno));
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
ssize_t written = write(fd, data, len);
|
||||
close(fd);
|
||||
|
||||
if (written != (ssize_t)len) {
|
||||
ret = JS_ThrowInternalError(js, "write failed for %s: %s", str, strerror(errno));
|
||||
JS_FreeCString(js, str);
|
||||
return ret;
|
||||
}
|
||||
|
||||
JS_FreeCString(js, str);
|
||||
|
||||
if (written != (ssize_t)len)
|
||||
return JS_ThrowInternalError(js, "write failed for %s: %s", str, strerror(errno));
|
||||
|
||||
return JS_NULL;
|
||||
)
|
||||
|
||||
@@ -598,7 +602,7 @@ static void visit_directory(JSContext *js, JSValue results, int *result_count, c
|
||||
} else {
|
||||
strcpy(item_rel, ffd.cFileName);
|
||||
}
|
||||
JS_SetPropertyUint32(js, results, (*result_count)++, JS_NewString(js, item_rel));
|
||||
JS_SetPropertyNumber(js, results, (*result_count)++, JS_NewString(js, item_rel));
|
||||
|
||||
if (recurse) {
|
||||
struct stat st;
|
||||
@@ -623,7 +627,7 @@ static void visit_directory(JSContext *js, JSValue results, int *result_count, c
|
||||
} else {
|
||||
strcpy(item_rel, dir->d_name);
|
||||
}
|
||||
JS_SetPropertyUint32(js, results, (*result_count)++, JS_NewString(js, item_rel));
|
||||
JS_SetPropertyNumber(js, results, (*result_count)++, JS_NewString(js, item_rel));
|
||||
|
||||
if (recurse) {
|
||||
struct stat st;
|
||||
@@ -664,17 +668,21 @@ JSC_CCALL(fd_realpath,
|
||||
#ifdef _WIN32
|
||||
char resolved[PATH_MAX];
|
||||
DWORD len = GetFullPathNameA(path, PATH_MAX, resolved, NULL);
|
||||
JS_FreeCString(js, path);
|
||||
if (len == 0 || len >= PATH_MAX) {
|
||||
return JS_ThrowInternalError(js, "realpath failed for %s: %s", path, strerror(errno));
|
||||
JSValue err = JS_ThrowInternalError(js, "realpath failed for %s: %s", path, strerror(errno));
|
||||
JS_FreeCString(js, path);
|
||||
return err;
|
||||
}
|
||||
JS_FreeCString(js, path);
|
||||
return JS_NewString(js, resolved);
|
||||
#else
|
||||
char *resolved = realpath(path, NULL);
|
||||
JS_FreeCString(js, path);
|
||||
if (!resolved) {
|
||||
return JS_ThrowInternalError(js, "realpath failed for %s: %s", path, strerror(errno));
|
||||
JSValue err = JS_ThrowInternalError(js, "realpath failed for %s: %s", path, strerror(errno));
|
||||
JS_FreeCString(js, path);
|
||||
return err;
|
||||
}
|
||||
JS_FreeCString(js, path);
|
||||
JSValue result = JS_NewString(js, resolved);
|
||||
free(resolved);
|
||||
return result;
|
||||
|
||||
19
fd.cm
19
fd.cm
@@ -1,4 +1,4 @@
|
||||
var fd = this
|
||||
var fd = native
|
||||
var wildstar = use('wildstar')
|
||||
|
||||
function last_pos(str, sep) {
|
||||
@@ -12,11 +12,11 @@ function last_pos(str, sep) {
|
||||
|
||||
// Helper to join paths
|
||||
function join_paths(base, rel) {
|
||||
base = replace(base, /\/+$/, "")
|
||||
rel = replace(rel, /^\/+/, "")
|
||||
if (!base) return rel
|
||||
if (!rel) return base
|
||||
return base + "/" + rel
|
||||
var b = replace(base, /\/+$/, "")
|
||||
var r = replace(rel, /^\/+/, "")
|
||||
if (!b) return r
|
||||
if (!r) return b
|
||||
return b + "/" + r
|
||||
}
|
||||
|
||||
fd.join_paths = join_paths
|
||||
@@ -39,7 +39,8 @@ fd.stem = function stem(path) {
|
||||
}
|
||||
|
||||
fd.globfs = function(globs, dir) {
|
||||
if (dir == null) dir = "."
|
||||
var _dir = dir
|
||||
if (_dir == null) _dir = "."
|
||||
var results = []
|
||||
|
||||
function check_neg(path) {
|
||||
@@ -88,9 +89,9 @@ fd.globfs = function(globs, dir) {
|
||||
});
|
||||
}
|
||||
|
||||
var st = fd.stat(dir)
|
||||
var st = fd.stat(_dir)
|
||||
if (st && st.isDirectory) {
|
||||
visit(dir, "")
|
||||
visit(_dir, "")
|
||||
}
|
||||
|
||||
return results
|
||||
|
||||
@@ -324,7 +324,7 @@ static void listfiles_cb(const char *path, void *userdata) {
|
||||
// Playdate listfiles returns just the name, but sometimes with slash for dir?
|
||||
// Docs say "names of files".
|
||||
|
||||
JS_SetPropertyUint32(ctx->js, ctx->array, ctx->index++, JS_NewString(ctx->js, path));
|
||||
JS_SetPropertyNumber(ctx->js, ctx->array, ctx->index++, JS_NewString(ctx->js, path));
|
||||
}
|
||||
|
||||
JSC_SCALL(fd_readdir,
|
||||
@@ -427,7 +427,7 @@ static void enum_cb(const char *name, void *userdata) {
|
||||
strcpy(item_rel, name);
|
||||
}
|
||||
|
||||
JS_SetPropertyUint32(ctx->js, ctx->results, (*ctx->count)++, JS_NewString(ctx->js, item_rel));
|
||||
JS_SetPropertyNumber(ctx->js, ctx->results, (*ctx->count)++, JS_NewString(ctx->js, item_rel));
|
||||
|
||||
if (ctx->recurse) {
|
||||
// Check if directory
|
||||
|
||||
70
fold.cm
70
fold.cm
@@ -15,10 +15,18 @@ var fold = function(ast) {
|
||||
return k == "number" || k == "text" || k == "true" || k == "false" || k == "null"
|
||||
}
|
||||
|
||||
// Only intrinsics that can NEVER disrupt regardless of argument types
|
||||
var pure_intrinsics = {
|
||||
is_array: true, is_text: true, is_number: true, is_integer: true,
|
||||
is_function: true, is_logical: true, is_null: true, is_object: true,
|
||||
is_stone: true
|
||||
}
|
||||
|
||||
var is_pure = function(expr) {
|
||||
if (expr == null) return true
|
||||
var k = expr.kind
|
||||
var i = 0
|
||||
var target = null
|
||||
if (k == "number" || k == "text" || k == "true" || k == "false" ||
|
||||
k == "null" || k == "name" || k == "this") return true
|
||||
if (k == "function") return true
|
||||
@@ -47,6 +55,17 @@ var fold = function(ast) {
|
||||
if (k == "==" || k == "!=" || k == "&&" || k == "||") {
|
||||
return is_pure(expr.left) && is_pure(expr.right)
|
||||
}
|
||||
if (k == "(") {
|
||||
target = expr.expression
|
||||
if (target != null && target.intrinsic == true && pure_intrinsics[target.name] == true) {
|
||||
i = 0
|
||||
while (i < length(expr.list)) {
|
||||
if (!is_pure(expr.list[i])) return false
|
||||
i = i + 1
|
||||
}
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -158,6 +177,7 @@ var fold = function(ast) {
|
||||
var name = null
|
||||
var sv = null
|
||||
var item = null
|
||||
var rhs_target = null
|
||||
while (i < length(stmts)) {
|
||||
stmt = stmts[i]
|
||||
kind = stmt.kind
|
||||
@@ -169,6 +189,19 @@ var fold = function(ast) {
|
||||
register_const(fn_nr, name, stmt.right)
|
||||
}
|
||||
}
|
||||
if (name != null && stmt.right != null && stmt.right.kind == "(") {
|
||||
rhs_target = stmt.right.expression
|
||||
if (rhs_target != null && rhs_target.intrinsic == true) {
|
||||
sv = scope_var(fn_nr, name)
|
||||
if (sv != null && sv.type_tag == null) {
|
||||
if (rhs_target.name == "array") sv.type_tag = "array"
|
||||
else if (rhs_target.name == "record") sv.type_tag = "record"
|
||||
else if (rhs_target.name == "text") sv.type_tag = "text"
|
||||
else if (rhs_target.name == "number") sv.type_tag = "number"
|
||||
else if (rhs_target.name == "blob") sv.type_tag = "blob"
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (kind == "function") {
|
||||
name = stmt.name
|
||||
if (name != null && stmt.arity != null) {
|
||||
@@ -320,6 +353,8 @@ var fold = function(ast) {
|
||||
var ar = null
|
||||
var akey = null
|
||||
var tv = null
|
||||
var att = null
|
||||
var arg = null
|
||||
|
||||
// Recurse into children first (bottom-up)
|
||||
if (k == "+" || k == "-" || k == "*" || k == "/" || k == "%" ||
|
||||
@@ -385,6 +420,10 @@ var fold = function(ast) {
|
||||
return copy_loc(expr, {kind: lit.kind, value: lit.value, number: lit.number})
|
||||
}
|
||||
}
|
||||
sv = scope_var(fn_nr, expr.name)
|
||||
if (sv != null && sv.type_tag != null) {
|
||||
expr.type_tag = sv.type_tag
|
||||
}
|
||||
return expr
|
||||
}
|
||||
|
||||
@@ -497,7 +536,7 @@ var fold = function(ast) {
|
||||
return expr
|
||||
}
|
||||
|
||||
// Call: stamp arity
|
||||
// Call: stamp arity and fold intrinsic type checks
|
||||
if (k == "(") {
|
||||
target = expr.expression
|
||||
if (target != null && target.kind == "name" && target.level == 0) {
|
||||
@@ -506,6 +545,30 @@ var fold = function(ast) {
|
||||
if (fn_arities[akey] != null) ar = fn_arities[akey][target.name]
|
||||
if (ar != null) expr.arity = ar
|
||||
}
|
||||
if (target != null && target.intrinsic == true && length(expr.list) == 1) {
|
||||
arg = expr.list[0]
|
||||
att = null
|
||||
if (arg.type_tag != null) {
|
||||
att = arg.type_tag
|
||||
} else if (arg.kind == "name" && arg.level == 0) {
|
||||
sv = scope_var(fn_nr, arg.name)
|
||||
if (sv != null) att = sv.type_tag
|
||||
}
|
||||
if (att != null) {
|
||||
if (target.name == "is_array") return make_bool(att == "array", expr)
|
||||
if (target.name == "is_text") return make_bool(att == "text", expr)
|
||||
if (target.name == "is_number") return make_bool(att == "number" || att == "integer", expr)
|
||||
if (target.name == "is_integer") return make_bool(att == "integer", expr)
|
||||
if (target.name == "is_function") return make_bool(att == "function", expr)
|
||||
if (target.name == "is_logical") return make_bool(att == "logical", expr)
|
||||
if (target.name == "is_null") return make_bool(att == "null", expr)
|
||||
if (target.name == "is_object") return make_bool(att == "record", expr)
|
||||
if (target.name == "length") {
|
||||
if (att == "array") expr.hint = "array_length"
|
||||
else if (att == "text") expr.hint = "text_length"
|
||||
}
|
||||
}
|
||||
}
|
||||
return expr
|
||||
}
|
||||
|
||||
@@ -525,6 +588,7 @@ var fold = function(ast) {
|
||||
|
||||
if (k == "var" || k == "def") {
|
||||
stmt.right = fold_expr(stmt.right, fn_nr)
|
||||
if (is_pure(stmt.right)) stmt.pure = true
|
||||
return stmt
|
||||
}
|
||||
if (k == "var_list") {
|
||||
@@ -631,6 +695,10 @@ var fold = function(ast) {
|
||||
}
|
||||
}
|
||||
}
|
||||
// Dead pure call elimination: standalone pure calls with no result
|
||||
if (stmt.kind == "call" && is_pure(stmt.expression)) {
|
||||
stmt.dead = true
|
||||
}
|
||||
// Dead function elimination
|
||||
if (stmt.kind == "function" && stmt.name != null) {
|
||||
sv = scope_var(fn_nr, stmt.name)
|
||||
|
||||
22823
fold.cm.mcode
Normal file
22823
fold.cm.mcode
Normal file
File diff suppressed because it is too large
Load Diff
BIN
fold_new.mach
Normal file
BIN
fold_new.mach
Normal file
Binary file not shown.
278
fuzz.ce
Normal file
278
fuzz.ce
Normal file
@@ -0,0 +1,278 @@
|
||||
// fuzz.ce — fuzzer driver: generates random programs, runs differential, saves failures
|
||||
//
|
||||
// Usage:
|
||||
// cell fuzz - run 100 iterations with a random seed
|
||||
// cell fuzz 500 - run 500 iterations with a random seed
|
||||
// cell fuzz --seed 42 - run 100 iterations starting at seed 42
|
||||
// cell fuzz 500 --seed 42 - run 500 iterations starting at seed 42
|
||||
//
|
||||
// Each iteration generates a random self-checking program, compiles it,
|
||||
// runs it through both optimized and unoptimized paths, and compares results.
|
||||
// Failures are saved to tests/fuzz_failures/ for reproduction.
|
||||
var fd = use('fd')
|
||||
var time = use('time')
|
||||
var json = use('json')
|
||||
|
||||
var os_ref = use('os')
|
||||
var analyze = os_ref.analyze
|
||||
var run_ast_fn = os_ref.run_ast_fn
|
||||
var run_ast_noopt_fn = os_ref.run_ast_noopt_fn
|
||||
|
||||
var fuzzgen = use('fuzzgen')
|
||||
|
||||
var _args = args == null ? [] : args
|
||||
|
||||
// Parse arguments: fuzz [iterations] [--seed N]
|
||||
var iterations = 100
|
||||
var start_seed = null
|
||||
var i = 0
|
||||
var n = null
|
||||
var run_err = null
|
||||
var _run_one = null
|
||||
|
||||
while (i < length(_args)) {
|
||||
if (_args[i] == '--seed' && i + 1 < length(_args)) {
|
||||
start_seed = number(_args[i + 1])
|
||||
i = i + 2
|
||||
} else {
|
||||
n = number(_args[i])
|
||||
if (n != null && n > 0) iterations = n
|
||||
i = i + 1
|
||||
}
|
||||
}
|
||||
|
||||
if (start_seed == null) {
|
||||
start_seed = floor(time.number() * 1000) % 1000000
|
||||
}
|
||||
|
||||
if (!run_ast_noopt_fn) {
|
||||
log.console("error: run_ast_noopt_fn not available (rebuild bootstrap)")
|
||||
$stop()
|
||||
return
|
||||
}
|
||||
|
||||
// Ensure failures directory exists
|
||||
var failures_dir = "tests/fuzz_failures"
|
||||
|
||||
function ensure_dir(path) {
|
||||
if (fd.is_dir(path)) return
|
||||
var parts = array(path, '/')
|
||||
var current = ''
|
||||
var j = 0
|
||||
while (j < length(parts)) {
|
||||
if (parts[j] != '') {
|
||||
current = current + parts[j] + '/'
|
||||
if (!fd.is_dir(current)) {
|
||||
fd.mkdir(current)
|
||||
}
|
||||
}
|
||||
j = j + 1
|
||||
}
|
||||
}
|
||||
|
||||
// Deep comparison
|
||||
function values_equal(a, b) {
|
||||
var j = 0
|
||||
if (a == b) return true
|
||||
if (is_null(a) && is_null(b)) return true
|
||||
if (is_null(a) || is_null(b)) return false
|
||||
if (is_array(a) && is_array(b)) {
|
||||
if (length(a) != length(b)) return false
|
||||
j = 0
|
||||
while (j < length(a)) {
|
||||
if (!values_equal(a[j], b[j])) return false
|
||||
j = j + 1
|
||||
}
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
function describe(val) {
|
||||
if (is_null(val)) return "null"
|
||||
if (is_text(val)) return `"${val}"`
|
||||
if (is_number(val)) return text(val)
|
||||
if (is_logical(val)) return text(val)
|
||||
if (is_function(val)) return "<function>"
|
||||
return "<other>"
|
||||
}
|
||||
|
||||
// Run a single fuzz iteration
|
||||
function run_fuzz(seed_val) {
|
||||
var src = fuzzgen.generate(seed_val)
|
||||
var name = "fuzz_" + text(seed_val)
|
||||
var ast = null
|
||||
var mod_opt = null
|
||||
var mod_noopt = null
|
||||
var opt_err = null
|
||||
var noopt_err = null
|
||||
var errors = []
|
||||
var keys = null
|
||||
var k = 0
|
||||
var key = null
|
||||
var ret = null
|
||||
var _run = null
|
||||
var run_err = null
|
||||
var keys2 = null
|
||||
var k2 = 0
|
||||
var key2 = null
|
||||
var opt_result = null
|
||||
var noopt_result = null
|
||||
var opt_fn_err = null
|
||||
var noopt_fn_err = null
|
||||
var _run_opt = null
|
||||
var _run_noopt = null
|
||||
|
||||
// Parse
|
||||
var _parse = function() {
|
||||
ast = analyze(src, name + ".cm")
|
||||
} disruption {
|
||||
push(errors, "parse error")
|
||||
}
|
||||
_parse()
|
||||
if (length(errors) > 0) return {seed: seed_val, errors: errors, src: src}
|
||||
|
||||
// Run optimized
|
||||
var _opt = function() {
|
||||
mod_opt = run_ast_fn(name, ast, {use: function(p) { return use(p) }})
|
||||
} disruption {
|
||||
opt_err = "disrupted"
|
||||
}
|
||||
_opt()
|
||||
|
||||
// Run unoptimized
|
||||
var _noopt = function() {
|
||||
mod_noopt = run_ast_noopt_fn(name + "_noopt", ast, {use: function(p) { return use(p) }})
|
||||
} disruption {
|
||||
noopt_err = "disrupted"
|
||||
}
|
||||
_noopt()
|
||||
|
||||
// Check module-level behavior
|
||||
if (opt_err != noopt_err) {
|
||||
push(errors, `module load: opt=${opt_err != null ? opt_err : "ok"} noopt=${noopt_err != null ? noopt_err : "ok"}`)
|
||||
return {seed: seed_val, errors: errors, src: src}
|
||||
}
|
||||
if (opt_err != null) {
|
||||
// Both failed to load — consistent
|
||||
return {seed: seed_val, errors: errors, src: src}
|
||||
}
|
||||
|
||||
// Run self-checks (optimized module)
|
||||
if (is_object(mod_opt)) {
|
||||
keys = array(mod_opt)
|
||||
k = 0
|
||||
while (k < length(keys)) {
|
||||
key = keys[k]
|
||||
if (is_function(mod_opt[key])) {
|
||||
ret = null
|
||||
run_err = null
|
||||
_run = function() {
|
||||
ret = mod_opt[key]()
|
||||
} disruption {
|
||||
run_err = "disrupted"
|
||||
}
|
||||
_run()
|
||||
|
||||
if (is_text(ret)) {
|
||||
push(errors, `self-check ${key}: ${ret}`)
|
||||
}
|
||||
if (run_err != null) {
|
||||
push(errors, `self-check ${key}: unexpected disruption`)
|
||||
}
|
||||
}
|
||||
k = k + 1
|
||||
}
|
||||
}
|
||||
|
||||
// Differential check on each function
|
||||
if (is_object(mod_opt) && is_object(mod_noopt)) {
|
||||
keys2 = array(mod_opt)
|
||||
k2 = 0
|
||||
while (k2 < length(keys2)) {
|
||||
key2 = keys2[k2]
|
||||
if (is_function(mod_opt[key2]) && is_function(mod_noopt[key2])) {
|
||||
opt_result = null
|
||||
noopt_result = null
|
||||
opt_fn_err = null
|
||||
noopt_fn_err = null
|
||||
|
||||
_run_opt = function() {
|
||||
opt_result = mod_opt[key2]()
|
||||
} disruption {
|
||||
opt_fn_err = "disrupted"
|
||||
}
|
||||
_run_opt()
|
||||
|
||||
_run_noopt = function() {
|
||||
noopt_result = mod_noopt[key2]()
|
||||
} disruption {
|
||||
noopt_fn_err = "disrupted"
|
||||
}
|
||||
_run_noopt()
|
||||
|
||||
if (opt_fn_err != noopt_fn_err) {
|
||||
push(errors, `diff ${key2}: opt=${opt_fn_err != null ? opt_fn_err : "ok"} noopt=${noopt_fn_err != null ? noopt_fn_err : "ok"}`)
|
||||
} else if (!values_equal(opt_result, noopt_result)) {
|
||||
push(errors, `diff ${key2}: opt=${describe(opt_result)} noopt=${describe(noopt_result)}`)
|
||||
}
|
||||
}
|
||||
k2 = k2 + 1
|
||||
}
|
||||
}
|
||||
|
||||
return {seed: seed_val, errors: errors, src: src}
|
||||
}
|
||||
|
||||
// Main loop
|
||||
log.console(`Fuzzing: ${text(iterations)} iterations, starting seed=${text(start_seed)}`)
|
||||
var total_pass = 0
|
||||
var total_fail = 0
|
||||
var result = null
|
||||
var j = 0
|
||||
var current_seed = 0
|
||||
var fail_path = null
|
||||
|
||||
i = 0
|
||||
while (i < iterations) {
|
||||
current_seed = start_seed + i
|
||||
run_err = null
|
||||
_run_one = function() {
|
||||
result = run_fuzz(current_seed)
|
||||
} disruption {
|
||||
run_err = "generator crashed"
|
||||
}
|
||||
_run_one()
|
||||
|
||||
if (run_err != null) {
|
||||
result = {seed: current_seed, errors: [run_err], src: "// generator crashed"}
|
||||
}
|
||||
|
||||
if (length(result.errors) > 0) {
|
||||
total_fail = total_fail + 1
|
||||
log.console(` FAIL seed=${text(current_seed)}: ${result.errors[0]}`)
|
||||
|
||||
// Save failure source for reproduction
|
||||
ensure_dir(failures_dir)
|
||||
fail_path = failures_dir + "/seed_" + text(current_seed) + ".cm"
|
||||
fd.slurpwrite(fail_path, stone(blob(result.src)))
|
||||
log.console(` saved to ${fail_path}`)
|
||||
} else {
|
||||
total_pass = total_pass + 1
|
||||
}
|
||||
|
||||
// Progress report every 100 iterations
|
||||
if ((i + 1) % 100 == 0) {
|
||||
log.console(` progress: ${text(i + 1)}/${text(iterations)} (${text(total_pass)} passed, ${text(total_fail)} failed)`)
|
||||
}
|
||||
|
||||
i = i + 1
|
||||
}
|
||||
|
||||
log.console(`----------------------------------------`)
|
||||
log.console(`Fuzz: ${text(total_pass)} passed, ${text(total_fail)} failed, ${text(iterations)} total`)
|
||||
if (total_fail > 0) {
|
||||
log.console(`Failures saved to ${failures_dir}/`)
|
||||
}
|
||||
|
||||
$stop()
|
||||
348
fuzzgen.cm
Normal file
348
fuzzgen.cm
Normal file
@@ -0,0 +1,348 @@
|
||||
// fuzzgen.cm — generates self-checking .cm programs for fuzz testing
|
||||
// Each generated program returns a record of test functions that
|
||||
// validate their own expected results.
|
||||
|
||||
// Newline constant — backtick strings don't interpret \n as escape
|
||||
var NL = "\n"
|
||||
|
||||
// Simple seeded PRNG (xorshift32)
|
||||
var _seed = 1
|
||||
function seed(s) {
|
||||
_seed = s != 0 ? s : 1
|
||||
}
|
||||
|
||||
function rand() {
|
||||
_seed = _seed ^ (_seed << 13)
|
||||
_seed = _seed ^ (_seed >> 17)
|
||||
_seed = _seed ^ (_seed << 5)
|
||||
if (_seed < 0) _seed = -_seed
|
||||
return _seed
|
||||
}
|
||||
|
||||
function rand_int(lo, hi) {
|
||||
return lo + (rand() % (hi - lo + 1))
|
||||
}
|
||||
|
||||
function rand_float() {
|
||||
return rand_int(-10000, 10000) / 100
|
||||
}
|
||||
|
||||
function rand_bool() {
|
||||
return rand() % 2 == 0
|
||||
}
|
||||
|
||||
function pick(arr) {
|
||||
return arr[rand() % length(arr)]
|
||||
}
|
||||
|
||||
// Expression generators — each returns {src: "code", val: expected_value}
|
||||
// depth is decremented to prevent infinite recursion
|
||||
|
||||
function gen_int_literal() {
|
||||
var v = rand_int(-10000, 10000)
|
||||
return {src: text(v), val: v}
|
||||
}
|
||||
|
||||
function gen_float_literal() {
|
||||
var v = rand_float()
|
||||
return {src: text(v), val: v}
|
||||
}
|
||||
|
||||
function gen_bool_literal() {
|
||||
var v = rand_bool()
|
||||
var s = "false"
|
||||
if (v) s = "true"
|
||||
return {src: s, val: v}
|
||||
}
|
||||
|
||||
function gen_text_literal() {
|
||||
var words = ["alpha", "beta", "gamma", "delta", "epsilon"]
|
||||
var w = pick(words)
|
||||
return {src: `"${w}"`, val: w}
|
||||
}
|
||||
|
||||
function gen_null_literal() {
|
||||
return {src: "null", val: null}
|
||||
}
|
||||
|
||||
function gen_int_expr(depth) {
|
||||
var a = null
|
||||
var b = null
|
||||
var op = null
|
||||
var result = null
|
||||
|
||||
if (depth <= 0) return gen_int_literal()
|
||||
|
||||
a = gen_int_expr(depth - 1)
|
||||
b = gen_int_expr(depth - 1)
|
||||
|
||||
// Avoid division by zero
|
||||
if (b.val == 0) b = {src: "1", val: 1}
|
||||
|
||||
op = pick(["+", "-", "*"])
|
||||
if (op == "+") {
|
||||
result = a.val + b.val
|
||||
} else if (op == "-") {
|
||||
result = a.val - b.val
|
||||
} else {
|
||||
result = a.val * b.val
|
||||
}
|
||||
|
||||
// Guard against overflow beyond safe integer range
|
||||
if (result > 9007199254740991 || result < -9007199254740991) {
|
||||
return gen_int_literal()
|
||||
}
|
||||
|
||||
return {src: `(${a.src} ${op} ${b.src})`, val: result}
|
||||
}
|
||||
|
||||
function gen_float_expr(depth) {
|
||||
var a = null
|
||||
var b = null
|
||||
var op = null
|
||||
var result = null
|
||||
|
||||
if (depth <= 0) return gen_float_literal()
|
||||
|
||||
a = gen_float_expr(depth - 1)
|
||||
b = gen_float_expr(depth - 1)
|
||||
|
||||
if (b.val == 0) b = {src: "1.0", val: 1.0}
|
||||
|
||||
op = pick(["+", "-", "*"])
|
||||
if (op == "+") {
|
||||
result = a.val + b.val
|
||||
} else if (op == "-") {
|
||||
result = a.val - b.val
|
||||
} else {
|
||||
result = a.val * b.val
|
||||
}
|
||||
|
||||
return {src: `(${a.src} ${op} ${b.src})`, val: result}
|
||||
}
|
||||
|
||||
function gen_text_expr(depth) {
|
||||
var a = null
|
||||
var b = null
|
||||
|
||||
if (depth <= 0) return gen_text_literal()
|
||||
|
||||
a = gen_text_literal()
|
||||
b = gen_text_literal()
|
||||
|
||||
return {src: `(${a.src} + ${b.src})`, val: a.val + b.val}
|
||||
}
|
||||
|
||||
function gen_comparison_expr(depth) {
|
||||
var a = null
|
||||
var b = null
|
||||
var op = null
|
||||
var result = null
|
||||
|
||||
a = gen_int_expr(depth > 0 ? depth - 1 : 0)
|
||||
b = gen_int_expr(depth > 0 ? depth - 1 : 0)
|
||||
|
||||
op = pick(["==", "!=", "<", ">", "<=", ">="])
|
||||
if (op == "==") {
|
||||
result = a.val == b.val
|
||||
} else if (op == "!=") {
|
||||
result = a.val != b.val
|
||||
} else if (op == "<") {
|
||||
result = a.val < b.val
|
||||
} else if (op == ">") {
|
||||
result = a.val > b.val
|
||||
} else if (op == "<=") {
|
||||
result = a.val <= b.val
|
||||
} else {
|
||||
result = a.val >= b.val
|
||||
}
|
||||
|
||||
return {src: `(${a.src} ${op} ${b.src})`, val: result}
|
||||
}
|
||||
|
||||
// Generate an if-else expression test
|
||||
function gen_if_else_test() {
|
||||
var cond = gen_comparison_expr(1)
|
||||
var then_val = gen_int_literal()
|
||||
var else_val = gen_int_literal()
|
||||
var expected = cond.val ? then_val.val : else_val.val
|
||||
|
||||
var body = "var result = null" + NL
|
||||
body = body + " if (" + cond.src + ") {" + NL
|
||||
body = body + " result = " + then_val.src + NL
|
||||
body = body + " } else {" + NL
|
||||
body = body + " result = " + else_val.src + NL
|
||||
body = body + " }" + NL
|
||||
body = body + " if (result != " + text(expected) + ") return \"if_else: expected " + text(expected) + " got \" + text(result)"
|
||||
|
||||
return body
|
||||
}
|
||||
|
||||
// Generate a loop accumulator test
|
||||
function gen_loop_test() {
|
||||
var count = rand_int(1, 50)
|
||||
var step = rand_int(1, 10)
|
||||
var expected = 0
|
||||
var i = 0
|
||||
while (i < count) {
|
||||
expected = expected + step
|
||||
i = i + 1
|
||||
}
|
||||
|
||||
var body = "var acc = 0" + NL
|
||||
body = body + " var i = 0" + NL
|
||||
body = body + " while (i < " + text(count) + ") {" + NL
|
||||
body = body + " acc = acc + " + text(step) + NL
|
||||
body = body + " i = i + 1" + NL
|
||||
body = body + " }" + NL
|
||||
body = body + " if (acc != " + text(expected) + ") return \"loop: expected " + text(expected) + " got \" + text(acc)"
|
||||
|
||||
return body
|
||||
}
|
||||
|
||||
// Generate a closure test
|
||||
function gen_closure_test() {
|
||||
var init_val = rand_int(1, 100)
|
||||
var inc = rand_int(1, 10)
|
||||
var calls = rand_int(1, 10)
|
||||
var expected = init_val + (inc * calls)
|
||||
|
||||
var body = "var counter = " + text(init_val) + NL
|
||||
body = body + " var inc = function() { counter = counter + " + text(inc) + " }" + NL
|
||||
body = body + " var i = 0" + NL
|
||||
body = body + " while (i < " + text(calls) + ") {" + NL
|
||||
body = body + " inc()" + NL
|
||||
body = body + " i = i + 1" + NL
|
||||
body = body + " }" + NL
|
||||
body = body + " if (counter != " + text(expected) + ") return \"closure: expected " + text(expected) + " got \" + text(counter)"
|
||||
|
||||
return body
|
||||
}
|
||||
|
||||
// Generate a record property test
|
||||
function gen_record_test() {
|
||||
var a = gen_int_literal()
|
||||
var b = gen_int_literal()
|
||||
var sum = a.val + b.val
|
||||
|
||||
var body = "var r = {a: " + a.src + ", b: " + b.src + "}" + NL
|
||||
body = body + " var result = r.a + r.b" + NL
|
||||
body = body + " if (result != " + text(sum) + ") return \"record: expected " + text(sum) + " got \" + text(result)"
|
||||
|
||||
return body
|
||||
}
|
||||
|
||||
// Generate an array test
|
||||
function gen_array_test() {
|
||||
var n = rand_int(2, 10)
|
||||
var vals = []
|
||||
var i = 0
|
||||
var sum = 0
|
||||
var v = 0
|
||||
while (i < n) {
|
||||
v = rand_int(-100, 100)
|
||||
push(vals, v)
|
||||
sum = sum + v
|
||||
i = i + 1
|
||||
}
|
||||
|
||||
var val_strs = []
|
||||
i = 0
|
||||
while (i < n) {
|
||||
push(val_strs, text(vals[i]))
|
||||
i = i + 1
|
||||
}
|
||||
|
||||
var body = "var a = [" + text(val_strs, ", ") + "]" + NL
|
||||
body = body + " var _sum = 0" + NL
|
||||
body = body + " var i = 0" + NL
|
||||
body = body + " while (i < length(a)) {" + NL
|
||||
body = body + " _sum = _sum + a[i]" + NL
|
||||
body = body + " i = i + 1" + NL
|
||||
body = body + " }" + NL
|
||||
body = body + " if (_sum != " + text(sum) + ") return \"array_sum: expected " + text(sum) + " got \" + text(_sum)"
|
||||
|
||||
return body
|
||||
}
|
||||
|
||||
// Generate a nested function / higher-order test
|
||||
function gen_higher_order_test() {
|
||||
var mul = rand_int(2, 10)
|
||||
var input = rand_int(1, 100)
|
||||
var expected = input * mul
|
||||
|
||||
var body = "var make_mul = function(m) {" + NL
|
||||
body = body + " return function(x) { return x * m }" + NL
|
||||
body = body + " }" + NL
|
||||
body = body + " var fn = make_mul(" + text(mul) + ")" + NL
|
||||
body = body + " var result = fn(" + text(input) + ")" + NL
|
||||
body = body + " if (result != " + text(expected) + ") return \"higher_order: expected " + text(expected) + " got \" + text(result)"
|
||||
|
||||
return body
|
||||
}
|
||||
|
||||
// Generate a disruption handling test
|
||||
function gen_disrupt_test() {
|
||||
var body = "var caught = false" + NL
|
||||
body = body + " var _fn = function() { disrupt } disruption { caught = true }" + NL
|
||||
body = body + " _fn()" + NL
|
||||
body = body + " if (!caught) return \"disrupt: expected to catch disruption\""
|
||||
|
||||
return body
|
||||
}
|
||||
|
||||
// Generate a text operation test
|
||||
function gen_text_op_test() {
|
||||
var words = ["hello", "world", "foo", "bar", "baz"]
|
||||
var w1 = pick(words)
|
||||
var w2 = pick(words)
|
||||
var expected = w1 + w2
|
||||
|
||||
var body = "var a = \"" + w1 + "\"" + NL
|
||||
body = body + " var b = \"" + w2 + "\"" + NL
|
||||
body = body + " var c = a + b" + NL
|
||||
body = body + " if (c != \"" + expected + "\") return \"text_op: expected " + expected + " got \" + c"
|
||||
|
||||
return body
|
||||
}
|
||||
|
||||
// All generators
|
||||
var generators = [
|
||||
gen_if_else_test,
|
||||
gen_loop_test,
|
||||
gen_closure_test,
|
||||
gen_record_test,
|
||||
gen_array_test,
|
||||
gen_higher_order_test,
|
||||
gen_disrupt_test,
|
||||
gen_text_op_test
|
||||
]
|
||||
|
||||
// Generate a complete self-checking .cm program
|
||||
function generate(s) {
|
||||
seed(s)
|
||||
|
||||
var num_tests = rand_int(5, 15)
|
||||
var src = "// Auto-generated fuzz test (seed=" + text(s) + ")\nreturn {\n"
|
||||
var i = 0
|
||||
var gen = null
|
||||
var body = null
|
||||
|
||||
while (i < num_tests) {
|
||||
gen = pick(generators)
|
||||
body = gen()
|
||||
if (i > 0) src = src + ",\n"
|
||||
src = src + " fuzz_" + text(i) + ": function() {\n"
|
||||
src = src + " " + body + "\n"
|
||||
src = src + " }"
|
||||
i = i + 1
|
||||
}
|
||||
|
||||
src = src + "\n}\n"
|
||||
return src
|
||||
}
|
||||
|
||||
return {
|
||||
generate: generate,
|
||||
seed: seed
|
||||
}
|
||||
@@ -1,4 +1,6 @@
|
||||
// Hidden vars (os, args, core_path, use_mcode) come from env
|
||||
// Hidden vars come from env:
|
||||
// CLI mode (cell_init): os, args, core_path, shop_path
|
||||
// Actor spawn (script_startup): os, json, nota, wota, actorsym, init, core_path, shop_path
|
||||
// args[0] = script name, args[1..] = user args
|
||||
var load_internal = os.load_internal
|
||||
function use_embed(name) {
|
||||
@@ -22,28 +24,86 @@ function use_basic(path) {
|
||||
return result
|
||||
}
|
||||
|
||||
// Load a module from .mach bytecode, falling back to .ast.json
|
||||
// Load a module from .mach/.mcode bytecode (bootstrap modules have no source fallback)
|
||||
function boot_load(name, env) {
|
||||
var mach_path = name + ".mach"
|
||||
var mach_path = core_path + '/' + name + ".cm.mach"
|
||||
var mcode_path = core_path + '/' + name + ".cm.mcode"
|
||||
var data = null
|
||||
var mcode_json = null
|
||||
if (fd.is_file(mach_path)) {
|
||||
data = fd.slurp(mach_path)
|
||||
return mach_load(data, env)
|
||||
}
|
||||
data = text(fd.slurp(name + ".ast.json"))
|
||||
return mach_eval_ast(name, data, env)
|
||||
if (fd.is_file(mcode_path)) {
|
||||
mcode_json = text(fd.slurp(mcode_path))
|
||||
return mach_eval_mcode(name, mcode_json, env)
|
||||
}
|
||||
print("error: missing bootstrap bytecode: " + name + "\n")
|
||||
disrupt
|
||||
}
|
||||
|
||||
var boot_env = {use: use_basic}
|
||||
var tokenize_mod = boot_load("tokenize", boot_env)
|
||||
var parse_mod = boot_load("parse", boot_env)
|
||||
var fold_mod = boot_load("fold", boot_env)
|
||||
use_cache['tokenize'] = tokenize_mod
|
||||
use_cache['parse'] = parse_mod
|
||||
use_cache['fold'] = fold_mod
|
||||
|
||||
// Optionally load mcode compiler module
|
||||
var mcode_mod = null
|
||||
if (use_mcode) {
|
||||
mcode_mod = boot_load("mcode", boot_env)
|
||||
// Always load mcode compiler module
|
||||
var mcode_mod = boot_load("mcode", boot_env)
|
||||
use_cache['mcode'] = mcode_mod
|
||||
var streamline_mod = null
|
||||
|
||||
// Warn if any .cm source is newer than its compiled bytecode
|
||||
function check_mach_stale() {
|
||||
var sources = [
|
||||
"tokenize.cm",
|
||||
"parse.cm",
|
||||
"fold.cm",
|
||||
"mcode.cm",
|
||||
"streamline.cm",
|
||||
"qbe.cm",
|
||||
"qbe_emit.cm",
|
||||
"internal/bootstrap.cm",
|
||||
"internal/engine.cm"
|
||||
]
|
||||
var stale = []
|
||||
var _i = 0
|
||||
var cm_path = null
|
||||
var mach_path = null
|
||||
var mcode_path = null
|
||||
var cm_stat = null
|
||||
var compiled_stat = null
|
||||
var best_mtime = null
|
||||
while (_i < length(sources)) {
|
||||
cm_path = core_path + '/' + sources[_i]
|
||||
mach_path = cm_path + '.mach'
|
||||
mcode_path = cm_path + '.mcode'
|
||||
best_mtime = null
|
||||
if (fd.is_file(mach_path)) {
|
||||
best_mtime = fd.stat(mach_path).mtime
|
||||
}
|
||||
if (fd.is_file(mcode_path)) {
|
||||
compiled_stat = fd.stat(mcode_path)
|
||||
if (best_mtime == null || compiled_stat.mtime > best_mtime) {
|
||||
best_mtime = compiled_stat.mtime
|
||||
}
|
||||
}
|
||||
if (best_mtime != null && fd.is_file(cm_path)) {
|
||||
cm_stat = fd.stat(cm_path)
|
||||
if (cm_stat.mtime > best_mtime) {
|
||||
push(stale, sources[_i])
|
||||
}
|
||||
}
|
||||
_i = _i + 1
|
||||
}
|
||||
if (length(stale) > 0) {
|
||||
print("warning: bytecode is stale for: " + text(stale, ", ") + "\n")
|
||||
print("run 'make regen' to update\n")
|
||||
}
|
||||
}
|
||||
check_mach_stale()
|
||||
|
||||
// analyze: tokenize + parse, check for errors
|
||||
function analyze(src, filename) {
|
||||
@@ -80,33 +140,109 @@ function analyze(src, filename) {
|
||||
return ast
|
||||
}
|
||||
|
||||
// Run AST through either mcode or mach pipeline
|
||||
function run_ast(name, ast, env) {
|
||||
// Load a module from .mach/.mcode bytecode, falling back to source compilation
|
||||
function load_module(name, env) {
|
||||
var mach_path = core_path + '/' + name + ".cm.mach"
|
||||
var mcode_path = core_path + '/' + name + ".cm.mcode"
|
||||
var data = null
|
||||
var mcode_json = null
|
||||
var src_path = null
|
||||
var src = null
|
||||
var ast = null
|
||||
var compiled = null
|
||||
if (use_mcode) {
|
||||
compiled = mcode_mod(ast)
|
||||
return mcode_run(name, json.encode(compiled), env)
|
||||
var optimized = null
|
||||
if (fd.is_file(mach_path)) {
|
||||
data = fd.slurp(mach_path)
|
||||
return mach_load(data, env)
|
||||
}
|
||||
return mach_eval_ast(name, json.encode(ast), env)
|
||||
if (fd.is_file(mcode_path)) {
|
||||
mcode_json = text(fd.slurp(mcode_path))
|
||||
return mach_eval_mcode(name, mcode_json, env)
|
||||
}
|
||||
src_path = core_path + '/' + name + ".cm"
|
||||
src = text(fd.slurp(src_path))
|
||||
ast = analyze(src, src_path)
|
||||
compiled = mcode_mod(ast)
|
||||
optimized = streamline_mod(compiled)
|
||||
return mach_eval_mcode(name, json.encode(optimized), env)
|
||||
}
|
||||
|
||||
// Load optimization pipeline modules (needs analyze to be defined)
|
||||
streamline_mod = load_module("streamline", boot_env)
|
||||
use_cache['streamline'] = streamline_mod
|
||||
|
||||
// Lazy-loaded verify_ir module (loaded on first use via use_fn)
|
||||
var _verify_ir_mod = null
|
||||
|
||||
// Run AST through mcode pipeline → register VM
|
||||
function run_ast(name, ast, env) {
|
||||
var compiled = mcode_mod(ast)
|
||||
if (os._verify_ir) {
|
||||
if (_verify_ir_mod == null) {
|
||||
_verify_ir_mod = use_fn('verify_ir')
|
||||
}
|
||||
compiled._verify = true
|
||||
compiled._verify_mod = _verify_ir_mod
|
||||
}
|
||||
var optimized = streamline_mod(compiled)
|
||||
// Clean up verify properties before JSON encoding
|
||||
if (optimized._verify) {
|
||||
delete optimized._verify
|
||||
delete optimized._verify_mod
|
||||
}
|
||||
return mach_eval_mcode(name, json.encode(optimized), env)
|
||||
}
|
||||
|
||||
// Run AST through mcode pipeline WITHOUT optimization → register VM
|
||||
function run_ast_noopt(name, ast, env) {
|
||||
var compiled = mcode_mod(ast)
|
||||
return mach_eval_mcode(name, json.encode(compiled), env)
|
||||
}
|
||||
|
||||
// use() with ƿit pipeline for .cm modules
|
||||
function use(path) {
|
||||
var file_path = path + '.cm'
|
||||
function use_fn(path) {
|
||||
var file_path = null
|
||||
var mach_path = null
|
||||
var mcode_path = null
|
||||
var mcode_json = null
|
||||
var data = null
|
||||
var script = null
|
||||
var ast = null
|
||||
var result = null
|
||||
if (use_cache[path])
|
||||
return use_cache[path]
|
||||
|
||||
// Check CWD first, then core_path
|
||||
// Try .cm.mach bytecode first (CWD then core_path)
|
||||
mach_path = path + '.cm.mach'
|
||||
if (!fd.is_file(mach_path))
|
||||
mach_path = core_path + '/' + path + '.cm.mach'
|
||||
if (fd.is_file(mach_path)) {
|
||||
data = fd.slurp(mach_path)
|
||||
result = mach_load(data, {use: use_fn})
|
||||
use_cache[path] = result
|
||||
return result
|
||||
}
|
||||
|
||||
// Try .cm.mcode JSON IR (CWD then core_path)
|
||||
mcode_path = path + '.cm.mcode'
|
||||
if (!fd.is_file(mcode_path))
|
||||
mcode_path = core_path + '/' + path + '.cm.mcode'
|
||||
if (fd.is_file(mcode_path)) {
|
||||
mcode_json = text(fd.slurp(mcode_path))
|
||||
result = mach_eval_mcode(path, mcode_json, {use: use_fn})
|
||||
use_cache[path] = result
|
||||
return result
|
||||
}
|
||||
|
||||
// Try .cm source (CWD then core_path)
|
||||
file_path = path + '.cm'
|
||||
if (!fd.is_file(file_path))
|
||||
file_path = core_path + '/' + path + '.cm'
|
||||
|
||||
if (fd.is_file(file_path)) {
|
||||
script = text(fd.slurp(file_path))
|
||||
ast = analyze(script, file_path)
|
||||
result = run_ast(path, ast, {use: use})
|
||||
result = run_ast(path, ast, {use: use_fn})
|
||||
use_cache[path] = result
|
||||
return result
|
||||
}
|
||||
@@ -117,21 +253,58 @@ function use(path) {
|
||||
return result
|
||||
}
|
||||
|
||||
// Load and run the user's program
|
||||
var program = args[0]
|
||||
var script_file = program
|
||||
|
||||
// Add .ce extension if not already present
|
||||
if (!ends_with(script_file, '.ce') && !ends_with(script_file, '.cm'))
|
||||
script_file = program + '.ce'
|
||||
|
||||
var user_args = []
|
||||
var _j = 1
|
||||
while (_j < length(args)) {
|
||||
push(user_args, args[_j])
|
||||
_j = _j + 1
|
||||
// Helper to load engine.cm and run it with given env
|
||||
function load_engine(env) {
|
||||
var engine_path = core_path + '/internal/engine.cm.mach'
|
||||
var mcode_path = core_path + '/internal/engine.cm.mcode'
|
||||
var data = null
|
||||
var mcode_json = null
|
||||
var engine_src = null
|
||||
var engine_ast = null
|
||||
if (fd.is_file(engine_path)) {
|
||||
data = fd.slurp(engine_path)
|
||||
return mach_load(data, env)
|
||||
}
|
||||
if (fd.is_file(mcode_path)) {
|
||||
mcode_json = text(fd.slurp(mcode_path))
|
||||
return mach_eval_mcode('engine', mcode_json, env)
|
||||
}
|
||||
engine_path = core_path + '/internal/engine.cm'
|
||||
engine_src = text(fd.slurp(engine_path))
|
||||
engine_ast = analyze(engine_src, engine_path)
|
||||
return run_ast('engine', engine_ast, env)
|
||||
}
|
||||
|
||||
var script = text(fd.slurp(script_file))
|
||||
var ast = analyze(script, script_file)
|
||||
run_ast(program, ast, {use: use, args: user_args, json: json})
|
||||
// Detect mode and route
|
||||
// CLI mode has 'args'; actor spawn mode has 'init'
|
||||
var program = null
|
||||
var user_args = []
|
||||
var _j = 0
|
||||
|
||||
if (args != null) {
|
||||
// CLI mode — always run as actor program (.ce)
|
||||
program = args[0]
|
||||
if (!program) {
|
||||
print("error: no program specified\n")
|
||||
disrupt
|
||||
}
|
||||
_j = 1
|
||||
while (_j < length(args)) {
|
||||
push(user_args, args[_j])
|
||||
_j = _j + 1
|
||||
}
|
||||
|
||||
load_engine({
|
||||
os: os, actorsym: actorsym,
|
||||
init: {program: program, arg: user_args},
|
||||
core_path: core_path, shop_path: shop_path, json: json,
|
||||
analyze: analyze, run_ast_fn: run_ast, run_ast_noopt_fn: run_ast_noopt
|
||||
})
|
||||
} else {
|
||||
// Actor spawn mode — load engine.cm with full actor env
|
||||
load_engine({
|
||||
os: os, actorsym: actorsym, init: init,
|
||||
core_path: core_path, shop_path: shop_path, json: json, nota: nota, wota: wota,
|
||||
analyze: analyze, run_ast_fn: run_ast, run_ast_noopt_fn: run_ast_noopt
|
||||
})
|
||||
}
|
||||
|
||||
2900
internal/bootstrap.cm.mcode
Normal file
2900
internal/bootstrap.cm.mcode
Normal file
File diff suppressed because it is too large
Load Diff
Binary file not shown.
@@ -1,21 +1,18 @@
|
||||
// Hidden vars (os, actorsym, init, core_path) come from env
|
||||
// Hidden vars (os, actorsym, init, core_path, shop_path, analyze, run_ast_fn, run_ast_noopt_fn, json) come from env
|
||||
// In actor spawn mode, also: nota, wota
|
||||
var ACTORDATA = actorsym
|
||||
var SYSYM = '__SYSTEM__'
|
||||
|
||||
var _cell = {}
|
||||
var need_stop = false
|
||||
|
||||
var dylib_ext
|
||||
|
||||
var cases = {
|
||||
Windows: '.dll',
|
||||
macOS: '.dylib',
|
||||
Linux: '.so'
|
||||
}
|
||||
|
||||
print(os.platform())
|
||||
|
||||
dylib_ext = cases[os.platform()]
|
||||
var dylib_ext = cases[os.platform()]
|
||||
|
||||
var MOD_EXT = '.cm'
|
||||
var ACTOR_EXT = '.ce'
|
||||
@@ -49,52 +46,70 @@ function ends_with(str, suffix) {
|
||||
return search(str, suffix, -length(suffix)) != null
|
||||
}
|
||||
|
||||
var js = use_embed('js')
|
||||
var fd = use_embed('fd')
|
||||
var js = use_embed('js')
|
||||
|
||||
// Get the shop path from HOME environment
|
||||
var home = os.getenv('HOME') || os.getenv('USERPROFILE')
|
||||
if (!home) {
|
||||
os.print('Could not determine home directory\n')
|
||||
os.exit(1)
|
||||
}
|
||||
var shop_path = home + '/.cell'
|
||||
var packages_path = shop_path + '/packages'
|
||||
var core_path = packages_path + '/core'
|
||||
|
||||
if (!fd.is_dir(core_path)) {
|
||||
os.print('Cell shop not found at ' + shop_path + '. Run "cell install" to set up.\n')
|
||||
os.exit(1)
|
||||
}
|
||||
// core_path and shop_path come from env (bootstrap.cm passes them through)
|
||||
// shop_path may be null if --core was used without --shop
|
||||
var packages_path = shop_path ? shop_path + '/packages' : null
|
||||
|
||||
var use_cache = {}
|
||||
use_cache['core/os'] = os
|
||||
|
||||
// Extra env properties added as engine initializes (log, runtime fns, etc.)
|
||||
var core_extras = {}
|
||||
|
||||
// Load a core module from the file system
|
||||
function use_core(path) {
|
||||
var cache_key = 'core/' + path
|
||||
var env = null
|
||||
if (use_cache[cache_key])
|
||||
return use_cache[cache_key];
|
||||
return use_cache[cache_key]
|
||||
|
||||
var sym = use_embed(replace(path, '/', '_'))
|
||||
var result = null
|
||||
var script = null
|
||||
var ast = null
|
||||
|
||||
// Core scripts are in packages/core/
|
||||
var file_path = core_path + '/' + path + MOD_EXT
|
||||
// Build env: merge core_extras, include C embed as 'native' if available
|
||||
env = {use: use_core}
|
||||
arrfor(array(core_extras), function(k) { env[k] = core_extras[k] })
|
||||
if (sym) env.native = sym
|
||||
|
||||
if (fd.is_file(file_path)) {
|
||||
var script_blob = fd.slurp(file_path)
|
||||
var script = text(script_blob)
|
||||
var mod = `(function setup_module(use){${script}})`
|
||||
var fn = mach_eval('core:' + path, mod)
|
||||
var result = call(fn,sym, [use_core])
|
||||
use_cache[cache_key] = result;
|
||||
return result;
|
||||
// Check for pre-compiled .cm.mach file first
|
||||
var mach_path = core_path + '/' + path + '.cm.mach'
|
||||
if (fd.is_file(mach_path)) {
|
||||
result = mach_load(fd.slurp(mach_path), env)
|
||||
use_cache[cache_key] = result
|
||||
return result
|
||||
}
|
||||
|
||||
use_cache[cache_key] = sym;
|
||||
return sym;
|
||||
// Check for .cm.mcode JSON IR
|
||||
var mcode_path = core_path + '/' + path + '.cm.mcode'
|
||||
if (fd.is_file(mcode_path)) {
|
||||
result = mach_eval_mcode('core:' + path, text(fd.slurp(mcode_path)), env)
|
||||
use_cache[cache_key] = result
|
||||
return result
|
||||
}
|
||||
|
||||
// Fall back to source .cm file — compile at runtime
|
||||
var file_path = core_path + '/' + path + MOD_EXT
|
||||
if (fd.is_file(file_path)) {
|
||||
script = text(fd.slurp(file_path))
|
||||
ast = analyze(script, file_path)
|
||||
result = run_ast_fn('core:' + path, ast, env)
|
||||
use_cache[cache_key] = result
|
||||
return result
|
||||
}
|
||||
|
||||
// Embedded C module only
|
||||
use_cache[cache_key] = sym
|
||||
return sym
|
||||
}
|
||||
|
||||
// Load full modules via use_core (extends C embeds with .cm additions, and caches)
|
||||
fd = use_core('fd')
|
||||
use_core('js')
|
||||
var blob = use_core('blob')
|
||||
|
||||
function actor() {
|
||||
@@ -112,22 +127,9 @@ function is_actor(value) {
|
||||
var ENETSERVICE = 0.1
|
||||
var REPLYTIMEOUT = 60 // seconds before replies are ignored
|
||||
|
||||
function caller_data(depth = 0)
|
||||
function caller_data(depth)
|
||||
{
|
||||
var file = "nofile"
|
||||
var line = 0
|
||||
|
||||
var caller = array(Error().stack, "\n")[1+depth]
|
||||
if (caller) {
|
||||
var md = extract(caller, /\((.*)\:/)
|
||||
var m = md ? md[1] : "SCRIPT"
|
||||
if (m) file = m
|
||||
md = extract(caller, /\:(\d*)\)/)
|
||||
m = md ? md[1] : 0
|
||||
if (m) line = m
|
||||
}
|
||||
|
||||
return {file,line}
|
||||
return {file: "nofile", line: 0}
|
||||
}
|
||||
|
||||
function console_rec(line, file, msg) {
|
||||
@@ -142,9 +144,7 @@ function log(name, args) {
|
||||
if (name == 'console') {
|
||||
os.print(console_rec(caller.line, caller.file, msg))
|
||||
} else if (name == 'error') {
|
||||
if (msg == null) msg = Error()
|
||||
if (is_proto(msg, Error))
|
||||
msg = msg.name + ": " + msg.message + "\n" + msg.stack
|
||||
if (msg == null) msg = "error"
|
||||
os.print(console_rec(caller.line, caller.file, msg))
|
||||
} else if (name == 'system') {
|
||||
msg = "[SYSTEM] " + msg
|
||||
@@ -156,6 +156,9 @@ function log(name, args) {
|
||||
|
||||
function actor_die(err)
|
||||
{
|
||||
var reason = null
|
||||
var unders = null
|
||||
|
||||
if (err && is_function(err.toString)) {
|
||||
os.print(err.toString())
|
||||
os.print("\n")
|
||||
@@ -165,14 +168,14 @@ function actor_die(err)
|
||||
if (overling) {
|
||||
if (err) {
|
||||
// with an err, this is a forceful disrupt
|
||||
var reason = (is_proto(err, Error)) ? err.stack : err
|
||||
reason = err
|
||||
report_to_overling({type:'disrupt', reason})
|
||||
} else
|
||||
report_to_overling({type:'stop'})
|
||||
}
|
||||
|
||||
if (underlings) {
|
||||
var unders = array(underlings)
|
||||
unders = array(underlings)
|
||||
arrfor(unders, function(id, index) {
|
||||
log.console(`calling on ${id} to disrupt too`)
|
||||
$_.stop(create_actor({id}))
|
||||
@@ -191,14 +194,15 @@ function actor_die(err)
|
||||
|
||||
|
||||
|
||||
actor_mod.on_exception(actor_die)
|
||||
//actor_mod.on_exception(actor_die)
|
||||
|
||||
_cell.args = init != null ? init : {}
|
||||
_cell.id = "newguy"
|
||||
|
||||
function create_actor(desc = {id:guid()}) {
|
||||
function create_actor(desc) {
|
||||
var _desc = desc == null ? {id:guid()} : desc
|
||||
var actor = {}
|
||||
actor[ACTORDATA] = desc
|
||||
actor[ACTORDATA] = _desc
|
||||
return actor
|
||||
}
|
||||
|
||||
@@ -208,10 +212,13 @@ $_.self = create_actor()
|
||||
os.use_cache = use_cache
|
||||
os.global_shop_path = shop_path
|
||||
os.$_ = $_
|
||||
os.analyze = analyze
|
||||
os.run_ast_fn = run_ast_fn
|
||||
os.run_ast_noopt_fn = run_ast_noopt_fn
|
||||
os.json = json
|
||||
use_cache['core/json'] = json
|
||||
|
||||
var shop = use_core('internal/shop')
|
||||
|
||||
var json = use_core('json')
|
||||
var time = use_core('time')
|
||||
|
||||
var pronto = use_core('pronto')
|
||||
@@ -237,6 +244,9 @@ var runtime_env = {
|
||||
sequence: sequence
|
||||
}
|
||||
|
||||
// Make runtime functions available to modules loaded via use_core
|
||||
arrfor(array(runtime_env), function(k) { core_extras[k] = runtime_env[k] })
|
||||
|
||||
// Pass to os for shop to access
|
||||
os.runtime_env = runtime_env
|
||||
|
||||
@@ -296,8 +306,8 @@ $_.time_limit = function(requestor, seconds)
|
||||
callback(val, reason)
|
||||
}, value)
|
||||
} disruption {
|
||||
cancel(Error('requestor failed'))
|
||||
callback(null, Error('requestor failed'))
|
||||
cancel('requestor failed')
|
||||
callback(null, 'requestor failed')
|
||||
}
|
||||
do_request()
|
||||
|
||||
@@ -345,9 +355,10 @@ REPLYTIMEOUT = config.reply_timeout
|
||||
}
|
||||
*/
|
||||
|
||||
function guid(bits = 256)
|
||||
function guid(bits)
|
||||
{
|
||||
var guid = blob(bits, os.random)
|
||||
var _bits = bits == null ? 256 : bits
|
||||
var guid = blob(_bits, os.random)
|
||||
stone(guid)
|
||||
return text(guid,'h')
|
||||
}
|
||||
@@ -429,13 +440,16 @@ $_.portal = function(fn, port) {
|
||||
}
|
||||
|
||||
function handle_host(e) {
|
||||
var queue = null
|
||||
var data = null
|
||||
|
||||
if (e.type == "connect") {
|
||||
log.system(`connected a new peer: ${e.peer.address}:${e.peer.port}`)
|
||||
peers[`${e.peer.address}:${e.peer.port}`] = e.peer
|
||||
var queue = peer_queue.get(e.peer)
|
||||
queue = peer_queue.get(e.peer)
|
||||
if (queue) {
|
||||
arrfor(queue, (msg, index) => e.peer.send(nota.encode(msg)))
|
||||
log.system(`sent ${msg} out of queue`)
|
||||
log.system(`sent queue out of queue`)
|
||||
peer_queue.delete(e.peer)
|
||||
}
|
||||
} else if (e.type == "disconnect") {
|
||||
@@ -445,27 +459,28 @@ function handle_host(e) {
|
||||
})
|
||||
log.system('portal got disconnect from ' + e.peer.address + ":" + e.peer.port)
|
||||
} else if (e.type == "receive") {
|
||||
var data = nota.decode(e.data)
|
||||
data = nota.decode(e.data)
|
||||
if (data.replycc && !data.replycc.address) {
|
||||
data.replycc[ACTORDATA].address = e.peer.address
|
||||
data.replycc[ACTORDATA].port = e.peer.port
|
||||
}
|
||||
function populate_actor_addresses(obj) {
|
||||
if (!is_object(obj)) return
|
||||
if (obj[ACTORDATA] && !obj[ACTORDATA].address) {
|
||||
obj[ACTORDATA].address = e.peer.address
|
||||
obj[ACTORDATA].port = e.peer.port
|
||||
}
|
||||
arrfor(array(obj), function(key, index) {
|
||||
if (key in obj)
|
||||
populate_actor_addresses(obj[key])
|
||||
})
|
||||
}
|
||||
if (data.data) populate_actor_addresses(data.data)
|
||||
if (data.data) populate_actor_addresses(data.data, e)
|
||||
turn(data)
|
||||
}
|
||||
}
|
||||
|
||||
function populate_actor_addresses(obj, e) {
|
||||
if (!is_object(obj)) return
|
||||
if (obj[ACTORDATA] && !obj[ACTORDATA].address) {
|
||||
obj[ACTORDATA].address = e.peer.address
|
||||
obj[ACTORDATA].port = e.peer.port
|
||||
}
|
||||
arrfor(array(obj), function(key, index) {
|
||||
if (key in obj)
|
||||
populate_actor_addresses(obj[key], e)
|
||||
})
|
||||
}
|
||||
|
||||
// takes a callback function, an actor object, and a configuration record for getting information about the status of a connection to the actor. The configuration record is used to request the sort of information that needs to be communicated. This can include latency, bandwidth, activity, congestion, cost, partitions. The callback is given a record containing the requested information.
|
||||
$_.contact = function(callback, record) {
|
||||
send(create_actor(record), record, callback)
|
||||
@@ -514,12 +529,13 @@ $_.unneeded = function unneeded(fn, seconds) {
|
||||
}
|
||||
|
||||
// schedules the invocation of a function after a specified amount of time.
|
||||
$_.delay = function delay(fn, seconds = 0) {
|
||||
$_.delay = function delay(fn, seconds) {
|
||||
var _seconds = seconds == null ? 0 : seconds
|
||||
function delay_turn() {
|
||||
fn()
|
||||
send_messages()
|
||||
}
|
||||
var id = actor_mod.delay(delay_turn, seconds)
|
||||
var id = actor_mod.delay(delay_turn, _seconds)
|
||||
return function() { actor_mod.removetimer(id) }
|
||||
}
|
||||
|
||||
@@ -544,6 +560,9 @@ function actor_send_immediate(actor, send) {
|
||||
}
|
||||
|
||||
function actor_send(actor, message) {
|
||||
var wota_blob = null
|
||||
var peer = null
|
||||
|
||||
if (actor[HEADER] && !actor[HEADER].replycc) // attempting to respond to a message but sender is not expecting; silently drop
|
||||
return
|
||||
|
||||
@@ -562,22 +581,21 @@ function actor_send(actor, message) {
|
||||
if (receive_fn) receive_fn(message.data)
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
// message to actor in same flock
|
||||
if (actor[ACTORDATA].id && actor_mod.mailbox_exist(actor[ACTORDATA].id)) {
|
||||
var wota_blob = wota.encode(message)
|
||||
// log.console(`sending wota blob of ${length(wota_blob)/8} bytes`)
|
||||
wota_blob = wota.encode(message)
|
||||
actor_mod.mailbox_push(actor[ACTORDATA].id, wota_blob)
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
if (actor[ACTORDATA].address) {
|
||||
if (actor[ACTORDATA].id)
|
||||
message.target = actor[ACTORDATA].id
|
||||
else
|
||||
message.type = "contact"
|
||||
|
||||
var peer = peers[actor[ACTORDATA].address + ":" + actor[ACTORDATA].port]
|
||||
|
||||
peer = peers[actor[ACTORDATA].address + ":" + actor[ACTORDATA].port]
|
||||
if (!peer) {
|
||||
if (!portal) {
|
||||
log.system(`creating a contactor ...`)
|
||||
@@ -621,6 +639,11 @@ function send_messages() {
|
||||
var replies = {}
|
||||
|
||||
function send(actor, message, reply) {
|
||||
var send_msg = null
|
||||
var target = null
|
||||
var header = null
|
||||
var id = null
|
||||
|
||||
if (!is_object(actor)) {
|
||||
log.error(`Must send to an actor object. Provided: ${actor}`)
|
||||
disrupt
|
||||
@@ -630,11 +653,11 @@ function send(actor, message, reply) {
|
||||
log.error('Message must be an object')
|
||||
disrupt
|
||||
}
|
||||
var send_msg = {type:"user", data: message}
|
||||
var target = actor
|
||||
send_msg = {type:"user", data: message}
|
||||
target = actor
|
||||
|
||||
if (actor[HEADER] && actor[HEADER].replycc) {
|
||||
var header = actor[HEADER]
|
||||
header = actor[HEADER]
|
||||
if (!header.replycc || !is_actor(header.replycc)) {
|
||||
log.error(`Supplied actor had a return, but it's not a valid actor! ${actor[HEADER]}`)
|
||||
disrupt
|
||||
@@ -645,7 +668,7 @@ function send(actor, message, reply) {
|
||||
}
|
||||
|
||||
if (reply) {
|
||||
var id = guid()
|
||||
id = guid()
|
||||
replies[id] = reply
|
||||
$_.delay(_ => {
|
||||
if (replies[id]) {
|
||||
@@ -714,7 +737,7 @@ function report_to_overling(msg)
|
||||
var program = _cell.args.program
|
||||
|
||||
if (!program) {
|
||||
log.error('No program specified. Usage: cell <program.ce> [args...]')
|
||||
log.error('No program specified. Usage: cell <program> [args...]')
|
||||
os.exit(1)
|
||||
}
|
||||
|
||||
@@ -730,18 +753,21 @@ function handle_actor_disconnect(id) {
|
||||
|
||||
function handle_sysym(msg)
|
||||
{
|
||||
var from
|
||||
var from = null
|
||||
var greeter = null
|
||||
var letter2 = null
|
||||
|
||||
if (msg.kind == 'stop') {
|
||||
actor_die("got stop message")
|
||||
} else if (msg.kind == 'underling') {
|
||||
from = msg.from
|
||||
var greeter = greeters[from[ACTORDATA].id]
|
||||
greeter = greeters[from[ACTORDATA].id]
|
||||
if (greeter) greeter(msg.message)
|
||||
if (msg.message.type == 'disrupt')
|
||||
delete underlings[from[ACTORDATA].id]
|
||||
} else if (msg.kind == 'contact') {
|
||||
if (portal_fn) {
|
||||
var letter2 = msg.data
|
||||
letter2 = msg.data
|
||||
letter2[HEADER] = msg
|
||||
delete msg.data
|
||||
portal_fn(letter2)
|
||||
@@ -758,13 +784,16 @@ function handle_sysym(msg)
|
||||
}
|
||||
|
||||
function handle_message(msg) {
|
||||
var letter = null
|
||||
var fn = null
|
||||
|
||||
if (msg[SYSYM]) {
|
||||
handle_sysym(msg[SYSYM], msg.from)
|
||||
return
|
||||
}
|
||||
|
||||
if (msg.type == "user") {
|
||||
var letter = msg.data // what the sender really sent
|
||||
letter = msg.data // what the sender really sent
|
||||
_ObjectDefineProperty(letter, HEADER, {
|
||||
value: msg, enumerable: false
|
||||
})
|
||||
@@ -773,7 +802,7 @@ function handle_message(msg) {
|
||||
})
|
||||
|
||||
if (msg.return) {
|
||||
var fn = replies[msg.return]
|
||||
fn = replies[msg.return]
|
||||
if (fn) fn(letter)
|
||||
delete replies[msg.return]
|
||||
return
|
||||
@@ -798,44 +827,68 @@ function enet_check()
|
||||
actor_mod.setname(_cell.args.program)
|
||||
|
||||
var prog = _cell.args.program
|
||||
if (ends_with(prog, '.cm')) {
|
||||
os.print(`error: ${prog} is a module (.cm), not a program (.ce)\n`)
|
||||
os.exit(1)
|
||||
}
|
||||
if (ends_with(prog, '.ce')) prog = text(prog, 0, -3)
|
||||
|
||||
var package = use_core('package')
|
||||
|
||||
var locator = shop.resolve_locator(_cell.args.program + ".ce", null)
|
||||
|
||||
if (!locator) {
|
||||
var pkg = package.find_package_dir(_cell.args.program + ".ce")
|
||||
locator = shop.resolve_locator(_cell.args.program + ".ce", pkg)
|
||||
// Find the .ce file
|
||||
var prog_path = prog + ".ce"
|
||||
var pkg_dir = null
|
||||
var core_dir = null
|
||||
if (!fd.is_file(prog_path)) {
|
||||
pkg_dir = package.find_package_dir(prog_path)
|
||||
if (pkg_dir)
|
||||
prog_path = pkg_dir + '/' + prog + '.ce'
|
||||
}
|
||||
|
||||
if (!locator) {
|
||||
os.print(`Main program ${_cell.args.program} could not be found\n`)
|
||||
if (!fd.is_file(prog_path)) {
|
||||
// Check core packages
|
||||
core_dir = core_path
|
||||
prog_path = core_dir + '/' + prog + '.ce'
|
||||
}
|
||||
if (!fd.is_file(prog_path)) {
|
||||
os.print(`Main program ${prog} could not be found\n`)
|
||||
os.exit(1)
|
||||
}
|
||||
|
||||
$_.clock(_ => {
|
||||
// Get capabilities for the main program
|
||||
var file_info = shop.file_info ? shop.file_info(locator.path) : null
|
||||
var file_info = shop.file_info ? shop.file_info(prog_path) : null
|
||||
var inject = shop.script_inject_for ? shop.script_inject_for(file_info) : []
|
||||
|
||||
// Build env object for injection
|
||||
// Build env with runtime functions + capability injections
|
||||
var env = {}
|
||||
for (var i = 0; i < length(inject); i++) {
|
||||
var key = inject[i]
|
||||
arrfor(array(runtime_env), function(k) { env[k] = runtime_env[k] })
|
||||
var _ki = 0
|
||||
var inj = null
|
||||
var key = null
|
||||
while (_ki < length(inject)) {
|
||||
inj = inject[_ki]
|
||||
key = inj
|
||||
if (key && key[0] == '$') key = text(key, 1)
|
||||
if (key == 'fd') env[key] = fd
|
||||
else env[key] = $_[key]
|
||||
if (key == 'fd') env['$fd'] = fd
|
||||
else env['$' + key] = $_[key]
|
||||
_ki = _ki + 1
|
||||
}
|
||||
|
||||
// Create use function bound to the program's package
|
||||
var pkg = file_info ? file_info.package : null
|
||||
var use_fn = function(path) { return shop.use(path, pkg) }
|
||||
env.use = function(path) {
|
||||
var ck = 'core/' + path
|
||||
if (use_cache[ck]) return use_cache[ck]
|
||||
var core_mod = use_core(path)
|
||||
if (core_mod) return core_mod
|
||||
return shop.use(path, pkg)
|
||||
}
|
||||
env.args = _cell.args.arg
|
||||
env.log = log
|
||||
|
||||
// Call with signature: setup_module(args, use, env)
|
||||
// The script wrapper binds $delay, $start, etc. from env
|
||||
var val = call(locator.symbol, null, [_cell.args.arg, use_fn, env])
|
||||
|
||||
if (val)
|
||||
var script = text(fd.slurp(prog_path))
|
||||
var ast = analyze(script, prog_path)
|
||||
var val = run_ast_fn(prog, ast, env)
|
||||
if (val) {
|
||||
log.error('Program must not return anything')
|
||||
disrupt
|
||||
}
|
||||
})
|
||||
|
||||
7443
internal/engine.cm.mcode
Normal file
7443
internal/engine.cm.mcode
Normal file
File diff suppressed because it is too large
Load Diff
449
internal/shop.cm
449
internal/shop.cm
@@ -5,7 +5,6 @@ var fd = use('fd')
|
||||
var http = use('http')
|
||||
var miniz = use('miniz')
|
||||
var time = use('time')
|
||||
var js = use('js')
|
||||
var crypto = use('crypto')
|
||||
var blob = use('blob')
|
||||
|
||||
@@ -13,6 +12,10 @@ var pkg_tools = use('package')
|
||||
var os = use('os')
|
||||
var link = use('link')
|
||||
|
||||
var analyze = os.analyze
|
||||
var run_ast_fn = os.run_ast_fn
|
||||
var shop_json = os.json
|
||||
|
||||
var core = "core"
|
||||
|
||||
function pull_from_cache(content)
|
||||
@@ -32,9 +35,10 @@ function ensure_dir(path) {
|
||||
if (fd.stat(path).isDirectory) return
|
||||
var parts = array(path, '/')
|
||||
var current = starts_with(path, '/') ? '/' : ''
|
||||
for (var i = 0; i < length(parts); i++) {
|
||||
var i = 0
|
||||
for (i = 0; i < length(parts); i++) {
|
||||
if (parts[i] == '') continue
|
||||
current += parts[i] + '/'
|
||||
current = current + parts[i] + '/'
|
||||
if (!fd.stat(current).isDirectory) {
|
||||
fd.mkdir(current)
|
||||
}
|
||||
@@ -76,12 +80,12 @@ function get_packages_dir() {
|
||||
}
|
||||
|
||||
// Get the core directory (in the global shop)
|
||||
var core_package = 'core'
|
||||
|
||||
Shop.get_core_dir = function() {
|
||||
return get_packages_dir() + '/' + core_package
|
||||
}
|
||||
|
||||
var core_package = 'core'
|
||||
|
||||
// Get the links file path (in the global shop)
|
||||
function get_links_path() {
|
||||
return global_shop_path + '/link.toml'
|
||||
@@ -116,12 +120,16 @@ function split_explicit_package_import(path)
|
||||
if (!looks_explicit) return null
|
||||
|
||||
// Find the longest prefix that is an installed package
|
||||
for (var i = length(parts) - 1; i >= 1; i--) {
|
||||
var pkg_candidate = text(array(parts, 0, i), '/')
|
||||
var mod_path = text(array(parts, i), '/')
|
||||
var i = 0
|
||||
var pkg_candidate = null
|
||||
var mod_path = null
|
||||
var candidate_dir = null
|
||||
for (i = length(parts) - 1; i >= 1; i--) {
|
||||
pkg_candidate = text(array(parts, 0, i), '/')
|
||||
mod_path = text(array(parts, i), '/')
|
||||
if (!mod_path || length(mod_path) == 0) continue
|
||||
|
||||
var candidate_dir = get_packages_dir() + '/' + safe_package_path(pkg_candidate)
|
||||
candidate_dir = get_packages_dir() + '/' + safe_package_path(pkg_candidate)
|
||||
if (fd.is_file(candidate_dir + '/cell.toml'))
|
||||
return {package: pkg_candidate, path: mod_path}
|
||||
|
||||
@@ -142,8 +150,10 @@ function package_in_shop(package) {
|
||||
|
||||
function abs_path_to_package(package_dir)
|
||||
{
|
||||
if (!fd.is_file(package_dir + '/cell.toml'))
|
||||
throw Error('Not a valid package directory (no cell.toml): ' + package_dir)
|
||||
if (!fd.is_file(package_dir + '/cell.toml')) {
|
||||
print('Not a valid package directory (no cell.toml): ' + package_dir)
|
||||
disrupt
|
||||
}
|
||||
|
||||
var packages_prefix = get_packages_dir() + '/'
|
||||
var core_dir = packages_prefix + core_package
|
||||
@@ -153,8 +163,9 @@ function abs_path_to_package(package_dir)
|
||||
return 'core'
|
||||
}
|
||||
// Also check if core_dir is a symlink pointing to package_dir
|
||||
var core_target = null
|
||||
if (fd.is_link(core_dir)) {
|
||||
var core_target = fd.readlink(core_dir)
|
||||
core_target = fd.readlink(core_dir)
|
||||
if (core_target == package_dir || fd.realpath(core_dir) == package_dir) {
|
||||
return 'core'
|
||||
}
|
||||
@@ -175,13 +186,14 @@ function abs_path_to_package(package_dir)
|
||||
return package_dir
|
||||
|
||||
// For local directories (e.g., linked targets), read the package name from cell.toml
|
||||
try {
|
||||
var content = text(fd.slurp(package_dir + '/cell.toml'))
|
||||
var cfg = toml.decode(content)
|
||||
var _toml_path = package_dir + '/cell.toml'
|
||||
var content = null
|
||||
var cfg = null
|
||||
if (fd.is_file(_toml_path)) {
|
||||
content = text(fd.slurp(_toml_path))
|
||||
cfg = toml.decode(content)
|
||||
if (cfg.package)
|
||||
return cfg.package
|
||||
} catch (e) {
|
||||
// Fall through
|
||||
}
|
||||
|
||||
return null
|
||||
@@ -299,23 +311,29 @@ Shop.resolve_package_info = function(pkg) {
|
||||
|
||||
// Verify if a package name is valid and return status
|
||||
Shop.verify_package_name = function(pkg) {
|
||||
if (!pkg) throw Error("Empty package name")
|
||||
if (pkg == 'local') throw Error("local is not a valid package name")
|
||||
if (pkg == 'core') throw Error("core is not a valid package name")
|
||||
|
||||
if (search(pkg, '://') != null)
|
||||
throw Error(`Invalid package name: ${pkg}; did you mean ${array(pkg, '://')[1]}?`)
|
||||
if (!pkg) { print("Empty package name"); disrupt }
|
||||
if (pkg == 'local') { print("local is not a valid package name"); disrupt }
|
||||
if (pkg == 'core') { print("core is not a valid package name"); disrupt }
|
||||
|
||||
if (search(pkg, '://') != null) {
|
||||
print(`Invalid package name: ${pkg}; did you mean ${array(pkg, '://')[1]}?`)
|
||||
disrupt
|
||||
}
|
||||
}
|
||||
|
||||
// Convert module package to download URL
|
||||
Shop.get_download_url = function(pkg, commit_hash) {
|
||||
var info = Shop.resolve_package_info(pkg)
|
||||
var parts = null
|
||||
var host = null
|
||||
var user = null
|
||||
var repo = null
|
||||
|
||||
if (info == 'gitea') {
|
||||
var parts = array(pkg, '/')
|
||||
var host = parts[0]
|
||||
var user = parts[1]
|
||||
var repo = parts[2]
|
||||
parts = array(pkg, '/')
|
||||
host = parts[0]
|
||||
user = parts[1]
|
||||
repo = parts[2]
|
||||
|
||||
return 'https://' + host + '/' + user + '/' + repo + '/archive/' + commit_hash + '.zip'
|
||||
}
|
||||
@@ -326,12 +344,16 @@ Shop.get_download_url = function(pkg, commit_hash) {
|
||||
// Get the API URL for checking remote git commits
|
||||
Shop.get_api_url = function(pkg) {
|
||||
var info = Shop.resolve_package_info(pkg)
|
||||
var parts = null
|
||||
var host = null
|
||||
var user = null
|
||||
var repo = null
|
||||
|
||||
if (info == 'gitea') {
|
||||
var parts = array(pkg, '/')
|
||||
var host = parts[0]
|
||||
var user = parts[1]
|
||||
var repo = parts[2]
|
||||
parts = array(pkg, '/')
|
||||
host = parts[0]
|
||||
user = parts[1]
|
||||
repo = parts[2]
|
||||
return 'https://' + host + '/api/v1/repos/' + user + '/' + repo + '/branches/'
|
||||
}
|
||||
|
||||
@@ -378,116 +400,112 @@ Shop.get_script_capabilities = function(path) {
|
||||
return Shop.script_inject_for(file_info)
|
||||
}
|
||||
|
||||
// Build the env object for a module, with runtime fns and $-prefixed capabilities.
|
||||
// Matches engine.cm's approach: env properties become free variables in the module.
|
||||
function inject_env(inject) {
|
||||
// Start with runtime functions from engine
|
||||
var env = {}
|
||||
var rt = my$_.os ? my$_.os.runtime_env : null
|
||||
if (rt) {
|
||||
for (var k in rt) {
|
||||
env[k] = rt[k]
|
||||
}
|
||||
arrfor(array(rt), function(k) { env[k] = rt[k] })
|
||||
}
|
||||
|
||||
// Add capability injections
|
||||
for (var i = 0; i < length(inject); i++) {
|
||||
var inj = inject[i]
|
||||
var key = trim(inj, '$')
|
||||
if (key == 'fd') env[key] = fd
|
||||
else env[key] = my$_[key]
|
||||
// Add capability injections with $ prefix
|
||||
var i = 0
|
||||
var inj = null
|
||||
var key = null
|
||||
for (i = 0; i < length(inject); i++) {
|
||||
inj = inject[i]
|
||||
key = inj
|
||||
if (key && key[0] == '$') key = text(key, 1)
|
||||
if (key == 'fd') env['$fd'] = fd
|
||||
else env['$' + key] = my$_[key]
|
||||
}
|
||||
return env
|
||||
}
|
||||
|
||||
function inject_bindings_code(inject) {
|
||||
var lines = []
|
||||
// Lazy-loaded compiler modules for on-the-fly compilation
|
||||
var _mcode_mod = null
|
||||
var _streamline_mod = null
|
||||
|
||||
// Runtime function bindings
|
||||
var runtime_fns = ['logical', 'some', 'every', 'starts_with', 'ends_with',
|
||||
'actor', 'is_actor', 'log', 'send',
|
||||
'fallback', 'parallel', 'race', 'sequence']
|
||||
for (var i = 0; i < length(runtime_fns); i++) {
|
||||
var fn = runtime_fns[i]
|
||||
push(lines, `var ${fn} = env["${fn}"];`)
|
||||
}
|
||||
|
||||
// Capability bindings ($delay, $start, etc.)
|
||||
for (var i = 0; i < length(inject); i++) {
|
||||
var inj = inject[i]
|
||||
var key = trim(inj, '$')
|
||||
push(lines, `var $${key} = env["${key}"];`)
|
||||
}
|
||||
return text(lines, '\n')
|
||||
}
|
||||
|
||||
// Build the use function for a specific package context
|
||||
function make_use_fn_code(pkg_arg) {
|
||||
return `function(path) { return globalThis.use(path, ${pkg_arg}); }`
|
||||
}
|
||||
|
||||
// for script forms, path is the canonical path of the module
|
||||
var script_form = function(path, script, pkg, inject) {
|
||||
var pkg_arg = pkg ? `'${pkg}'` : 'null'
|
||||
var binds = inject_bindings_code(inject)
|
||||
|
||||
var fn = `(function setup_module(args, use, env){
|
||||
def arg = args;
|
||||
def PACKAGE = ${pkg_arg};
|
||||
${binds}
|
||||
${script}
|
||||
})`
|
||||
return fn
|
||||
}
|
||||
|
||||
// Resolve module function, hashing it in the process
|
||||
// path is the exact path to the script file
|
||||
// Compile a module and return its bytecode blob.
|
||||
// The bytecode is cached on disk by content hash.
|
||||
function resolve_mod_fn(path, pkg) {
|
||||
if (!fd.is_file(path)) throw Error(`path ${path} is not a file`)
|
||||
if (!fd.is_file(path)) { print(`path ${path} is not a file`); disrupt }
|
||||
|
||||
var file_info = Shop.file_info(path)
|
||||
var file_pkg = file_info.package
|
||||
var inject = Shop.script_inject_for(file_info)
|
||||
var content = text(fd.slurp(path))
|
||||
var script = script_form(path, content, file_pkg, inject);
|
||||
var cached = pull_from_cache(stone(blob(content)))
|
||||
var ast = null
|
||||
var compiled = null
|
||||
var mach_path = null
|
||||
var mach_blob = null
|
||||
var mcode_path = null
|
||||
var ir = null
|
||||
var optimized = null
|
||||
|
||||
var obj = pull_from_cache(stone(blob(script)))
|
||||
if (obj) {
|
||||
var fn = js.compile_unblob(obj)
|
||||
return js.integrate(fn, null)
|
||||
// Check cache for pre-compiled .mach blob
|
||||
if (cached) {
|
||||
return cached
|
||||
}
|
||||
|
||||
// Compile name is just for debug/stack traces
|
||||
var compile_name = path
|
||||
// Check for pre-compiled .mach or .mcode file alongside .cm source
|
||||
if (ends_with(path, '.cm')) {
|
||||
mach_path = text(path, 0, length(path) - 3) + '.mach'
|
||||
if (fd.is_file(mach_path)) {
|
||||
mach_blob = fd.slurp(mach_path)
|
||||
put_into_cache(stone(blob(content)), mach_blob)
|
||||
return mach_blob
|
||||
}
|
||||
mcode_path = path + '.mcode'
|
||||
if (fd.is_file(mcode_path)) {
|
||||
compiled = mach_compile_mcode_bin(path, text(fd.slurp(mcode_path)))
|
||||
put_into_cache(stone(blob(content)), compiled)
|
||||
return compiled
|
||||
}
|
||||
}
|
||||
|
||||
var fn = js.compile(compile_name, script)
|
||||
// Compile via full pipeline: analyze → mcode → streamline → serialize
|
||||
if (!_mcode_mod) _mcode_mod = Shop.use("mcode", null)
|
||||
if (!_streamline_mod) _streamline_mod = Shop.use("streamline", null)
|
||||
ast = analyze(content, path)
|
||||
ir = _mcode_mod(ast)
|
||||
optimized = _streamline_mod(ir)
|
||||
compiled = mach_compile_mcode_bin(path, shop_json.encode(optimized))
|
||||
put_into_cache(stone(blob(content)), compiled)
|
||||
|
||||
put_into_cache(stone(blob(script)), js.compile_blob(fn))
|
||||
|
||||
return js.integrate(fn, null)
|
||||
return compiled
|
||||
}
|
||||
|
||||
// given a path and a package context
|
||||
// return module info about where it was found
|
||||
function resolve_locator(path, ctx)
|
||||
{
|
||||
{
|
||||
var explicit = split_explicit_package_import(path)
|
||||
var explicit_path = null
|
||||
var fn = null
|
||||
var core_dir = null
|
||||
var core_file_path = null
|
||||
var is_core = null
|
||||
var scope = null
|
||||
var alias_path = null
|
||||
|
||||
if (explicit) {
|
||||
if (is_internal_path(explicit.path) && ctx && explicit.package != ctx)
|
||||
explicit = null
|
||||
}
|
||||
if (explicit) {
|
||||
var explicit_path = get_packages_dir() + '/' + safe_package_path(explicit.package) + '/' + explicit.path
|
||||
explicit_path = get_packages_dir() + '/' + safe_package_path(explicit.package) + '/' + explicit.path
|
||||
if (fd.is_file(explicit_path)) {
|
||||
var fn = resolve_mod_fn(explicit_path, explicit.package)
|
||||
fn = resolve_mod_fn(explicit_path, explicit.package)
|
||||
return {path: explicit_path, scope: SCOPE_PACKAGE, symbol: fn}
|
||||
}
|
||||
}
|
||||
|
||||
// 1. If no context, resolve from core only
|
||||
if (!ctx) {
|
||||
var core_dir = Shop.get_core_dir()
|
||||
var core_file_path = core_dir + '/' + path
|
||||
core_dir = Shop.get_core_dir()
|
||||
core_file_path = core_dir + '/' + path
|
||||
if (fd.is_file(core_file_path)) {
|
||||
var fn = resolve_mod_fn(core_file_path, 'core')
|
||||
fn = resolve_mod_fn(core_file_path, 'core')
|
||||
return {path: core_file_path, scope: SCOPE_CORE, symbol: fn}
|
||||
}
|
||||
return null
|
||||
@@ -496,7 +514,7 @@ function resolve_locator(path, ctx)
|
||||
// check in ctx package
|
||||
// If ctx is an absolute path (starts with /), use it directly
|
||||
// Otherwise, look it up in the packages directory
|
||||
var ctx_dir
|
||||
var ctx_dir = null
|
||||
if (starts_with(ctx, '/')) {
|
||||
ctx_dir = ctx
|
||||
} else {
|
||||
@@ -505,10 +523,10 @@ function resolve_locator(path, ctx)
|
||||
var ctx_path = ctx_dir + '/' + path
|
||||
|
||||
if (fd.is_file(ctx_path)) {
|
||||
var fn = resolve_mod_fn(ctx_path, ctx)
|
||||
fn = resolve_mod_fn(ctx_path, ctx)
|
||||
// Check if ctx is the core package (either by name or by path)
|
||||
var is_core = (ctx == 'core') || (ctx_dir == Shop.get_core_dir())
|
||||
var scope = is_core ? SCOPE_CORE : SCOPE_LOCAL
|
||||
is_core = (ctx == 'core') || (ctx_dir == Shop.get_core_dir())
|
||||
scope = is_core ? SCOPE_CORE : SCOPE_LOCAL
|
||||
return {path: ctx_path, scope: scope, symbol: fn}
|
||||
}
|
||||
|
||||
@@ -518,24 +536,24 @@ function resolve_locator(path, ctx)
|
||||
// check for aliased dependency
|
||||
var alias = pkg_tools.split_alias(ctx, path)
|
||||
if (alias) {
|
||||
var alias_path = get_packages_dir() + '/' + safe_package_path(alias.package) + '/' + alias.path
|
||||
alias_path = get_packages_dir() + '/' + safe_package_path(alias.package) + '/' + alias.path
|
||||
if (fd.is_file(alias_path)) {
|
||||
var fn = resolve_mod_fn(alias_path, ctx)
|
||||
fn = resolve_mod_fn(alias_path, ctx)
|
||||
return {path: alias_path, scope:SCOPE_PACKAGE, symbol:fn}
|
||||
}
|
||||
}
|
||||
|
||||
var package_path = get_packages_dir() + '/' + safe_package_path(path)
|
||||
var package_path = get_packages_dir() + '/' + safe_package_path(path)
|
||||
if (fd.is_file(package_path)) {
|
||||
var fn = resolve_mod_fn(package_path, ctx)
|
||||
fn = resolve_mod_fn(package_path, ctx)
|
||||
return {path: package_path, scope: SCOPE_PACKAGE, symbol: fn}
|
||||
}
|
||||
|
||||
// 4. Check core as fallback
|
||||
var core_dir = Shop.get_core_dir()
|
||||
var core_file_path = core_dir + '/' + path
|
||||
core_dir = Shop.get_core_dir()
|
||||
core_file_path = core_dir + '/' + path
|
||||
if (fd.is_file(core_file_path)) {
|
||||
var fn = resolve_mod_fn(core_file_path, 'core')
|
||||
fn = resolve_mod_fn(core_file_path, 'core')
|
||||
return {path: core_file_path, scope: SCOPE_CORE, symbol: fn}
|
||||
}
|
||||
|
||||
@@ -567,7 +585,7 @@ Shop.open_package_dylib = function(pkg) {
|
||||
var link_target = link.get_target(pkg)
|
||||
var resolved_pkg = link_target ? link_target : pkg
|
||||
|
||||
var pkg_dir;
|
||||
var pkg_dir = null
|
||||
if (starts_with(resolved_pkg, '/')) {
|
||||
pkg_dir = resolved_pkg
|
||||
} else {
|
||||
@@ -575,34 +593,23 @@ Shop.open_package_dylib = function(pkg) {
|
||||
}
|
||||
|
||||
var toml_path = pkg_dir + '/cell.toml'
|
||||
var content = null
|
||||
var cfg = null
|
||||
if (fd.is_file(toml_path)) {
|
||||
try {
|
||||
var content = text(fd.slurp(toml_path))
|
||||
var cfg = toml.decode(content)
|
||||
if (cfg.dependencies) {
|
||||
arrfor(array(cfg.dependencies), function(alias, i) {
|
||||
var dep_pkg = cfg.dependencies[alias]
|
||||
try {
|
||||
Shop.open_package_dylib(dep_pkg)
|
||||
} catch (dep_e) {
|
||||
// Dependency dylib load failed, continue with others
|
||||
}
|
||||
})
|
||||
}
|
||||
} catch (e) {
|
||||
// Error reading toml, continue
|
||||
content = text(fd.slurp(toml_path))
|
||||
cfg = toml.decode(content)
|
||||
if (cfg.dependencies) {
|
||||
arrfor(array(cfg.dependencies), function(alias, i) {
|
||||
var dep_pkg = cfg.dependencies[alias]
|
||||
Shop.open_package_dylib(dep_pkg)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
var dl_path = get_lib_path(pkg)
|
||||
if (fd.is_file(dl_path)) {
|
||||
if (!open_dls[dl_path]) {
|
||||
try {
|
||||
open_dls[dl_path] = os.dylib_open(dl_path)
|
||||
} catch (e) {
|
||||
dylib_visited[pkg] = false
|
||||
throw e
|
||||
}
|
||||
open_dls[dl_path] = os.dylib_open(dl_path)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -613,12 +620,19 @@ Shop.open_package_dylib = function(pkg) {
|
||||
// Core is never loaded as a dynamic library via dlopen
|
||||
function resolve_c_symbol(path, package_context) {
|
||||
var explicit = split_explicit_package_import(path)
|
||||
var sym = null
|
||||
var dl_path = null
|
||||
var _path = null
|
||||
var core_sym = null
|
||||
var canon_pkg = null
|
||||
var mod_name = null
|
||||
|
||||
if (explicit) {
|
||||
if (is_internal_path(explicit.path) && package_context && explicit.package != package_context)
|
||||
explicit = null
|
||||
}
|
||||
if (explicit) {
|
||||
var sym = make_c_symbol(explicit.package, explicit.path)
|
||||
sym = make_c_symbol(explicit.package, explicit.path)
|
||||
if (os.internal_exists(sym)) {
|
||||
return {
|
||||
symbol: function() { return os.load_internal(sym) },
|
||||
@@ -629,7 +643,7 @@ function resolve_c_symbol(path, package_context) {
|
||||
}
|
||||
|
||||
Shop.open_package_dylib(explicit.package)
|
||||
var dl_path = get_lib_path(explicit.package)
|
||||
dl_path = get_lib_path(explicit.package)
|
||||
if (open_dls[dl_path] && os.dylib_has_symbol(open_dls[dl_path], sym)) {
|
||||
return {
|
||||
symbol: function() { return os.dylib_symbol(open_dls[dl_path], sym) },
|
||||
@@ -642,8 +656,8 @@ function resolve_c_symbol(path, package_context) {
|
||||
|
||||
// If no package context, only check core internal symbols
|
||||
if (!package_context || package_context == 'core') {
|
||||
path = replace(path, '/', '_')
|
||||
var core_sym = `js_${path}_use`
|
||||
_path = replace(path, '/', '_')
|
||||
core_sym = `js_${_path}_use`
|
||||
if (os.internal_exists(core_sym)) {
|
||||
return {
|
||||
symbol: function() { return os.load_internal(core_sym) },
|
||||
@@ -655,7 +669,7 @@ function resolve_c_symbol(path, package_context) {
|
||||
}
|
||||
|
||||
// 1. Check own package first (internal, then dylib)
|
||||
var sym = make_c_symbol(package_context, path)
|
||||
sym = make_c_symbol(package_context, path)
|
||||
if (os.internal_exists(sym)) {
|
||||
return {
|
||||
symbol: function() { return os.load_internal(sym) },
|
||||
@@ -665,7 +679,7 @@ function resolve_c_symbol(path, package_context) {
|
||||
}
|
||||
|
||||
Shop.open_package_dylib(package_context)
|
||||
var dl_path = get_lib_path(package_context)
|
||||
dl_path = get_lib_path(package_context)
|
||||
|
||||
if (open_dls[dl_path] && os.dylib_has_symbol(open_dls[dl_path], sym)) {
|
||||
return {
|
||||
@@ -681,10 +695,10 @@ function resolve_c_symbol(path, package_context) {
|
||||
// 2. Check aliased package imports (e.g. 'prosperon/sprite')
|
||||
var pkg_alias = get_import_package(path)
|
||||
if (pkg_alias) {
|
||||
var canon_pkg = get_aliased_package(path, package_context)
|
||||
canon_pkg = get_aliased_package(path, package_context)
|
||||
if (canon_pkg) {
|
||||
var mod_name = get_import_name(path)
|
||||
var sym = make_c_symbol(canon_pkg, mod_name)
|
||||
mod_name = get_import_name(path)
|
||||
sym = make_c_symbol(canon_pkg, mod_name)
|
||||
|
||||
// Check internal first
|
||||
if (os.internal_exists(sym)) {
|
||||
@@ -698,7 +712,7 @@ function resolve_c_symbol(path, package_context) {
|
||||
|
||||
// Then check dylib
|
||||
Shop.open_package_dylib(canon_pkg)
|
||||
var dl_path = get_lib_path(canon_pkg)
|
||||
dl_path = get_lib_path(canon_pkg)
|
||||
if (open_dls[dl_path] && os.dylib_has_symbol(open_dls[dl_path], sym)) {
|
||||
return {
|
||||
symbol: function() { return os.dylib_symbol(open_dls[dl_path], sym) },
|
||||
@@ -711,7 +725,7 @@ function resolve_c_symbol(path, package_context) {
|
||||
}
|
||||
|
||||
// 3. Check core internal symbols (core is never a dynamic library)
|
||||
var core_sym = `js_${replace(path, '/', '_')}_use`
|
||||
core_sym = `js_${replace(path, '/', '_')}_use`
|
||||
if (os.internal_exists(core_sym)) {
|
||||
return {
|
||||
symbol: function() { return os.load_internal(core_sym) },
|
||||
@@ -739,31 +753,37 @@ function resolve_module_info(path, package_context) {
|
||||
if (min_scope == 999)
|
||||
return null
|
||||
|
||||
var cache_key
|
||||
var cache_key = null
|
||||
var real_path = null
|
||||
var real_info = null
|
||||
var pkg_alias = null
|
||||
var canon_pkg = null
|
||||
var mod_name = null
|
||||
|
||||
if (mod_resolve.scope == SCOPE_CORE) {
|
||||
cache_key = 'core/' + path
|
||||
} else if (mod_resolve.scope < 900 && mod_resolve.path) {
|
||||
var real_path = fd.realpath(mod_resolve.path)
|
||||
real_path = fd.realpath(mod_resolve.path)
|
||||
if (real_path) {
|
||||
var real_info = Shop.file_info(real_path)
|
||||
real_info = Shop.file_info(real_path)
|
||||
if (real_info.package && real_info.name)
|
||||
cache_key = real_info.package + '/' + real_info.name
|
||||
else
|
||||
cache_key = real_path
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (!cache_key) {
|
||||
if (min_scope == SCOPE_CORE)
|
||||
cache_key = 'core/' + path
|
||||
else if (min_scope == SCOPE_LOCAL && package_context)
|
||||
cache_key = package_context + '/' + path
|
||||
else if (min_scope == SCOPE_PACKAGE) {
|
||||
var pkg_alias = get_import_package(path)
|
||||
pkg_alias = get_import_package(path)
|
||||
if (pkg_alias) {
|
||||
var canon_pkg = get_canonical_package(pkg_alias, package_context)
|
||||
canon_pkg = get_canonical_package(pkg_alias, package_context)
|
||||
if (canon_pkg) {
|
||||
var mod_name = get_import_name(path)
|
||||
mod_name = get_import_name(path)
|
||||
cache_key = canon_pkg + '/' + mod_name
|
||||
} else
|
||||
cache_key = path
|
||||
@@ -814,54 +834,50 @@ function execute_module(info)
|
||||
var c_resolve = info.c_resolve
|
||||
var mod_resolve = info.mod_resolve
|
||||
|
||||
var used
|
||||
var used = null
|
||||
var file_info = null
|
||||
var inject = null
|
||||
var env = null
|
||||
var pkg = null
|
||||
|
||||
if (mod_resolve.scope < 900) {
|
||||
var context = null
|
||||
// Build env with runtime fns, capabilities, and use function
|
||||
file_info = Shop.file_info(mod_resolve.path)
|
||||
inject = Shop.script_inject_for(file_info)
|
||||
env = inject_env(inject)
|
||||
pkg = file_info.package
|
||||
env.use = make_use_fn(pkg)
|
||||
|
||||
// Add C module as native context if available
|
||||
if (c_resolve.scope < 900) {
|
||||
context = call_c_module(c_resolve)
|
||||
env.native = call_c_module(c_resolve)
|
||||
}
|
||||
|
||||
// Get file info to determine inject list
|
||||
var file_info = Shop.file_info(mod_resolve.path)
|
||||
var inject = Shop.script_inject_for(file_info)
|
||||
var env = inject_env(inject)
|
||||
var pkg = file_info.package
|
||||
var use_fn = make_use_fn(pkg)
|
||||
|
||||
// Call with signature: setup_module(args, use, env)
|
||||
// args is null for module loading
|
||||
used = call(mod_resolve.symbol, context, [null, use_fn, env])
|
||||
// Load compiled bytecode with env
|
||||
used = mach_load(mod_resolve.symbol, env)
|
||||
} else if (c_resolve.scope < 900) {
|
||||
// C only
|
||||
used = call_c_module(c_resolve)
|
||||
} else {
|
||||
throw Error(`Module ${info.path} could not be found`)
|
||||
print(`Module ${info.path} could not be found`); disrupt
|
||||
}
|
||||
|
||||
// if (is_function(used))
|
||||
// throw Error('C module loader returned a function; did you forget to call it?')
|
||||
if (!used) { print(`Module ${info} returned null`); disrupt }
|
||||
|
||||
if (!used)
|
||||
throw Error(`Module ${info} returned null`)
|
||||
|
||||
// stone(used)
|
||||
return used
|
||||
}
|
||||
|
||||
function get_module(path, package_context) {
|
||||
var info = resolve_module_info(path, package_context)
|
||||
|
||||
if (!info)
|
||||
throw Error(`Module ${path} could not be found in ${package_context}`)
|
||||
if (!info) { print(`Module ${path} could not be found in ${package_context}`); disrupt }
|
||||
|
||||
return execute_module(info)
|
||||
}
|
||||
|
||||
Shop.use = function use(path, package_context) {
|
||||
var info = resolve_module_info(path, package_context)
|
||||
if (!info)
|
||||
throw Error(`Module ${path} could not be found in ${package_context}`)
|
||||
if (!info) { print(`Module ${path} could not be found in ${package_context}`); disrupt }
|
||||
|
||||
if (use_cache[info.cache_key])
|
||||
return use_cache[info.cache_key]
|
||||
@@ -889,13 +905,13 @@ function fetch_remote_hash(pkg) {
|
||||
if (!api_url) return null
|
||||
|
||||
|
||||
try {
|
||||
var _fetch_hash = function() {
|
||||
var resp = http.fetch(api_url)
|
||||
return Shop.extract_commit_hash(pkg, text(resp))
|
||||
} catch (e) {
|
||||
log.console("Warning: Could not check for updates for " + pkg)
|
||||
} disruption {
|
||||
return null
|
||||
}
|
||||
return _fetch_hash()
|
||||
}
|
||||
|
||||
// Download a zip for a package at a specific commit and cache it
|
||||
@@ -909,14 +925,14 @@ function download_zip(pkg, commit_hash) {
|
||||
return null
|
||||
}
|
||||
|
||||
try {
|
||||
var _download = function() {
|
||||
var zip_blob = http.fetch(download_url)
|
||||
fd.slurpwrite(cache_path, zip_blob)
|
||||
return zip_blob
|
||||
} catch (e) {
|
||||
log.error("Download failed for " + pkg + ": " + e)
|
||||
} disruption {
|
||||
return null
|
||||
}
|
||||
return _download()
|
||||
}
|
||||
|
||||
// Get zip from cache, returns null if not cached
|
||||
@@ -952,17 +968,18 @@ Shop.fetch = function(pkg) {
|
||||
// Check if we have the zip cached
|
||||
var zip_blob = get_cached_zip(pkg, commit)
|
||||
|
||||
var actual_hash = null
|
||||
if (zip_blob) {
|
||||
// If we have a hash on record, verify it
|
||||
if (expected_hash) {
|
||||
var actual_hash = text(crypto.blake2(zip_blob), 'h')
|
||||
actual_hash = text(crypto.blake2(zip_blob), 'h')
|
||||
if (actual_hash == expected_hash) {
|
||||
return { status: 'cached' }
|
||||
}
|
||||
log.console("Zip hash mismatch for " + pkg + ", re-fetching...")
|
||||
} else {
|
||||
// No hash stored yet - compute and store it
|
||||
var actual_hash = text(crypto.blake2(zip_blob), 'h')
|
||||
actual_hash = text(crypto.blake2(zip_blob), 'h')
|
||||
lock_entry.zip_hash = actual_hash
|
||||
Shop.save_lock(lock)
|
||||
return { status: 'cached' }
|
||||
@@ -1014,10 +1031,12 @@ Shop.extract = function(pkg) {
|
||||
// Check if already extracted at correct commit
|
||||
var lock = Shop.load_lock()
|
||||
var lock_entry = lock[pkg]
|
||||
var extracted_commit_file = null
|
||||
var extracted_commit = null
|
||||
if (lock_entry && lock_entry.commit) {
|
||||
var extracted_commit_file = target_dir + '/.cell_commit'
|
||||
extracted_commit_file = target_dir + '/.cell_commit'
|
||||
if (fd.is_file(extracted_commit_file)) {
|
||||
var extracted_commit = trim(text(fd.slurp(extracted_commit_file)))
|
||||
extracted_commit = trim(text(fd.slurp(extracted_commit_file)))
|
||||
if (extracted_commit == lock_entry.commit) {
|
||||
// Already extracted at this commit, skip
|
||||
return true
|
||||
@@ -1028,7 +1047,7 @@ Shop.extract = function(pkg) {
|
||||
var zip_blob = get_package_zip(pkg)
|
||||
|
||||
if (!zip_blob)
|
||||
throw Error("No zip blob available for " + pkg)
|
||||
print("No zip blob available for " + pkg); disrupt
|
||||
|
||||
// Extract zip for remote package
|
||||
install_zip(zip_blob, target_dir)
|
||||
@@ -1069,6 +1088,7 @@ Shop.update = function(pkg) {
|
||||
|
||||
log.console(`checking ${pkg}`)
|
||||
|
||||
var new_entry = null
|
||||
if (info == 'local') {
|
||||
// Check if local path exists
|
||||
if (!fd.is_dir(pkg)) {
|
||||
@@ -1076,7 +1096,7 @@ Shop.update = function(pkg) {
|
||||
return null
|
||||
}
|
||||
// Local packages always get a lock entry
|
||||
var new_entry = {
|
||||
new_entry = {
|
||||
type: 'local',
|
||||
updated: time.number()
|
||||
}
|
||||
@@ -1099,7 +1119,7 @@ Shop.update = function(pkg) {
|
||||
if (local_commit == remote_commit)
|
||||
return null
|
||||
|
||||
var new_entry = {
|
||||
new_entry = {
|
||||
type: info,
|
||||
commit: remote_commit,
|
||||
updated: time.number()
|
||||
@@ -1113,7 +1133,7 @@ Shop.update = function(pkg) {
|
||||
|
||||
function install_zip(zip_blob, target_dir) {
|
||||
var zip = miniz.read(zip_blob)
|
||||
if (!zip) throw Error("Failed to read zip archive")
|
||||
if (!zip) { print("Failed to read zip archive"); disrupt }
|
||||
|
||||
if (fd.is_link(target_dir)) fd.unlink(target_dir)
|
||||
if (fd.is_dir(target_dir)) fd.rmdir(target_dir, 1)
|
||||
@@ -1124,21 +1144,28 @@ function install_zip(zip_blob, target_dir) {
|
||||
var count = zip.count()
|
||||
var created_dirs = {}
|
||||
|
||||
for (var i = 0; i < count; i++) {
|
||||
var i = 0
|
||||
var filename = null
|
||||
var slash_pos = null
|
||||
var rel_path = null
|
||||
var full_path = null
|
||||
var dir_path = null
|
||||
var file_data = null
|
||||
for (i = 0; i < count; i++) {
|
||||
if (zip.is_directory(i)) continue
|
||||
var filename = zip.get_filename(i)
|
||||
var slash_pos = search(filename, '/')
|
||||
filename = zip.get_filename(i)
|
||||
slash_pos = search(filename, '/')
|
||||
if (slash_pos == null) continue
|
||||
if (slash_pos + 1 >= length(filename)) continue
|
||||
var rel_path = text(filename, slash_pos + 1)
|
||||
var full_path = target_dir + '/' + rel_path
|
||||
var dir_path = fd.dirname(full_path)
|
||||
rel_path = text(filename, slash_pos + 1)
|
||||
full_path = target_dir + '/' + rel_path
|
||||
dir_path = fd.dirname(full_path)
|
||||
|
||||
if (!created_dirs[dir_path]) {
|
||||
ensure_dir(dir_path)
|
||||
created_dirs[dir_path] = true
|
||||
}
|
||||
var file_data = zip.slurp(filename)
|
||||
file_data = zip.slurp(filename)
|
||||
|
||||
stone(file_data)
|
||||
|
||||
@@ -1161,18 +1188,20 @@ Shop.remove = function(pkg) {
|
||||
|
||||
Shop.get = function(pkg) {
|
||||
var lock = Shop.load_lock()
|
||||
|
||||
var info = null
|
||||
var commit = null
|
||||
|
||||
if (!lock[pkg]) {
|
||||
var info = Shop.resolve_package_info(pkg)
|
||||
info = Shop.resolve_package_info(pkg)
|
||||
if (!info) {
|
||||
throw Error("Invalid package: " + pkg)
|
||||
print("Invalid package: " + pkg); disrupt
|
||||
}
|
||||
|
||||
var commit = null
|
||||
|
||||
commit = null
|
||||
if (info != 'local') {
|
||||
commit = fetch_remote_hash(pkg)
|
||||
if (!commit) {
|
||||
throw Error("Could not resolve commit for " + pkg)
|
||||
print("Could not resolve commit for " + pkg); disrupt
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1188,8 +1217,6 @@ Shop.get = function(pkg) {
|
||||
// Compile a module
|
||||
// List all files in a package
|
||||
|
||||
var debug = use('debug')
|
||||
|
||||
Shop.file_reload = function(file)
|
||||
{
|
||||
var info = Shop.file_info(file)
|
||||
@@ -1228,9 +1255,11 @@ function get_package_scripts(package)
|
||||
{
|
||||
var files = pkg_tools.list_files(package)
|
||||
var scripts = []
|
||||
|
||||
for (var i = 0; i < length(files); i++) {
|
||||
var file = files[i]
|
||||
|
||||
var i = 0
|
||||
var file = null
|
||||
for (i = 0; i < length(files); i++) {
|
||||
file = files[i]
|
||||
if (ends_with(file, '.cm') || ends_with(file, '.ce')) {
|
||||
push(scripts, file)
|
||||
}
|
||||
|
||||
@@ -4,19 +4,21 @@ var pkg = use('package')
|
||||
|
||||
// Check if current directory is a valid cell package
|
||||
function is_valid_package(dir) {
|
||||
if (!dir) dir = '.'
|
||||
return fd.is_file(dir + '/cell.toml')
|
||||
var _dir = dir == null ? '.' : dir
|
||||
if (!_dir) _dir = '.'
|
||||
return fd.is_file(_dir + '/cell.toml')
|
||||
}
|
||||
|
||||
// Get current package name from cell.toml or null
|
||||
function get_current_package_name() {
|
||||
if (!is_valid_package('.')) return null
|
||||
try {
|
||||
var _load = function() {
|
||||
var config = pkg.load_config(null)
|
||||
return config.package || 'local'
|
||||
} catch (e) {
|
||||
} disruption {
|
||||
return 'local'
|
||||
}
|
||||
return _load()
|
||||
}
|
||||
|
||||
// Get the directory for a package
|
||||
@@ -37,9 +39,10 @@ function ensure_dir(path) {
|
||||
|
||||
var parts = array(path, '/')
|
||||
var current = starts_with(path, '/') ? '/' : ''
|
||||
for (var i = 0; i < length(parts); i++) {
|
||||
var i = 0
|
||||
for (i = 0; i < length(parts); i++) {
|
||||
if (parts[i] == '') continue
|
||||
current += parts[i] + '/'
|
||||
current = current + parts[i] + '/'
|
||||
if (!fd.is_dir(current)) {
|
||||
fd.mkdir(current)
|
||||
}
|
||||
|
||||
200
ir_report.ce
Normal file
200
ir_report.ce
Normal file
@@ -0,0 +1,200 @@
|
||||
// ir_report.ce — optimizer flight recorder CLI
|
||||
//
|
||||
// Usage: ./cell --core . ir_report.ce [options] <file.cm|file.ce>
|
||||
//
|
||||
// Options:
|
||||
// --summary Per-pass JSON summaries (default)
|
||||
// --events Include rewrite events
|
||||
// --types Include type deltas
|
||||
// --ir-before=PASS Print canonical IR before PASS
|
||||
// --ir-after=PASS Print canonical IR after PASS
|
||||
// --ir-all Print canonical IR before/after every pass
|
||||
// --full Everything (summary + events + types + ir-all)
|
||||
|
||||
var fd = use("fd")
|
||||
var json = use("json")
|
||||
var tokenize = use("tokenize")
|
||||
var parse = use("parse")
|
||||
var fold = use("fold")
|
||||
var mcode = use("mcode")
|
||||
var streamline = use("streamline")
|
||||
var ir_stats = use("ir_stats")
|
||||
|
||||
// --- Parse arguments ---
|
||||
|
||||
var filename = null
|
||||
var opt_events = false
|
||||
var opt_types = false
|
||||
var opt_ir_before = null
|
||||
var opt_ir_after = null
|
||||
var opt_ir_all = false
|
||||
var i = 0
|
||||
var arg = null
|
||||
var p = null
|
||||
var e = null
|
||||
var td = null
|
||||
|
||||
while (i < length(args)) {
|
||||
arg = args[i]
|
||||
if (arg == "--events") {
|
||||
opt_events = true
|
||||
} else if (arg == "--types") {
|
||||
opt_types = true
|
||||
} else if (arg == "--ir-all") {
|
||||
opt_ir_all = true
|
||||
} else if (arg == "--full") {
|
||||
opt_events = true
|
||||
opt_types = true
|
||||
opt_ir_all = true
|
||||
} else if (arg == "--summary") {
|
||||
// default, no-op
|
||||
} else if (starts_with(arg, "--ir-before=")) {
|
||||
opt_ir_before = text(arg, 12)
|
||||
} else if (starts_with(arg, "--ir-after=")) {
|
||||
opt_ir_after = text(arg, 11)
|
||||
} else if (!starts_with(arg, "--")) {
|
||||
filename = arg
|
||||
} else {
|
||||
print(`unknown option: ${arg}\n`)
|
||||
print("usage: cell --core . ir_report.ce [options] <file>\n")
|
||||
$stop()
|
||||
}
|
||||
i = i + 1
|
||||
}
|
||||
|
||||
if (filename == null) {
|
||||
print("usage: cell --core . ir_report.ce [options] <file.cm|file.ce>\n")
|
||||
print(" --summary per-pass JSON summaries (default)\n")
|
||||
print(" --events include rewrite events\n")
|
||||
print(" --types include type deltas\n")
|
||||
print(" --ir-before=PASS print canonical IR before PASS\n")
|
||||
print(" --ir-after=PASS print canonical IR after PASS\n")
|
||||
print(" --ir-all print canonical IR before/after every pass\n")
|
||||
print(" --full everything\n")
|
||||
$stop()
|
||||
}
|
||||
|
||||
// --- Compile ---
|
||||
|
||||
var src = text(fd.slurp(filename))
|
||||
var tok = tokenize(src, filename)
|
||||
var ast = parse(tok.tokens, src, filename, tokenize)
|
||||
var folded = fold(ast)
|
||||
var compiled = mcode(folded)
|
||||
|
||||
// --- Determine which passes need IR snapshots ---
|
||||
|
||||
var need_snapshots = opt_ir_all || opt_ir_before != null || opt_ir_after != null
|
||||
|
||||
// Deep copy for before snapshot if we need IR printing
|
||||
var before_ir = null
|
||||
if (need_snapshots) {
|
||||
before_ir = json.decode(json.encode(compiled))
|
||||
}
|
||||
|
||||
// --- Set up log ---
|
||||
|
||||
var log = {
|
||||
passes: [],
|
||||
events: null,
|
||||
type_deltas: null
|
||||
}
|
||||
|
||||
if (opt_events) {
|
||||
log.events = []
|
||||
}
|
||||
if (opt_types) {
|
||||
log.type_deltas = []
|
||||
}
|
||||
|
||||
// --- Run optimizer ---
|
||||
|
||||
var optimized = streamline(compiled, log)
|
||||
|
||||
// --- Output ---
|
||||
|
||||
var emit = function(obj) {
|
||||
print(json.encode(obj))
|
||||
print("\n")
|
||||
}
|
||||
|
||||
// Pass summaries (always)
|
||||
i = 0
|
||||
while (i < length(log.passes)) {
|
||||
p = log.passes[i]
|
||||
p.type = "pass"
|
||||
emit(p)
|
||||
i = i + 1
|
||||
}
|
||||
|
||||
// Rewrite events
|
||||
if (opt_events && log.events != null) {
|
||||
i = 0
|
||||
while (i < length(log.events)) {
|
||||
e = log.events[i]
|
||||
e.type = "event"
|
||||
emit(e)
|
||||
i = i + 1
|
||||
}
|
||||
}
|
||||
|
||||
// Type deltas
|
||||
if (opt_types && log.type_deltas != null) {
|
||||
i = 0
|
||||
while (i < length(log.type_deltas)) {
|
||||
td = log.type_deltas[i]
|
||||
td.type = "types"
|
||||
emit(td)
|
||||
i = i + 1
|
||||
}
|
||||
}
|
||||
|
||||
// --- Canonical IR printing ---
|
||||
|
||||
var print_ir = function(ir_obj, when_label, pass_name) {
|
||||
var fname = null
|
||||
var fi = 0
|
||||
var func = null
|
||||
if (ir_obj.main != null) {
|
||||
fname = ir_obj.name != null ? ir_obj.name : "<main>"
|
||||
emit({
|
||||
type: "ir",
|
||||
when: when_label,
|
||||
pass: pass_name,
|
||||
fn: fname,
|
||||
text: ir_stats.canonical_ir(ir_obj.main, fname, {show_nops: true})
|
||||
})
|
||||
}
|
||||
if (ir_obj.functions != null) {
|
||||
fi = 0
|
||||
while (fi < length(ir_obj.functions)) {
|
||||
func = ir_obj.functions[fi]
|
||||
fname = func.name != null ? func.name : `<func_${text(fi)}>`
|
||||
emit({
|
||||
type: "ir",
|
||||
when: when_label,
|
||||
pass: pass_name,
|
||||
fn: fname,
|
||||
text: ir_stats.canonical_ir(func, fname, {show_nops: true})
|
||||
})
|
||||
fi = fi + 1
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
if (need_snapshots) {
|
||||
if (opt_ir_all) {
|
||||
print_ir(before_ir, "before", "all")
|
||||
print_ir(optimized, "after", "all")
|
||||
} else {
|
||||
if (opt_ir_before != null) {
|
||||
print_ir(before_ir, "before", opt_ir_before)
|
||||
}
|
||||
if (opt_ir_after != null) {
|
||||
print_ir(optimized, "after", opt_ir_after)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
$stop()
|
||||
357
ir_stats.cm
Normal file
357
ir_stats.cm
Normal file
@@ -0,0 +1,357 @@
|
||||
// ir_stats.cm — IR statistics, fingerprinting, and canonical printing
|
||||
//
|
||||
// Usage: var ir_stats = use("ir_stats")
|
||||
// ir_stats.detailed_stats(func)
|
||||
// ir_stats.ir_fingerprint(func)
|
||||
// ir_stats.canonical_ir(func, name, opts)
|
||||
// ir_stats.type_snapshot(slot_types)
|
||||
// ir_stats.type_delta(before_types, after_types)
|
||||
|
||||
var json = use("json")
|
||||
|
||||
// --- Category maps ---
|
||||
|
||||
var load_ops = {
|
||||
load_field: true, load_index: true, load_dynamic: true,
|
||||
get: true
|
||||
}
|
||||
var store_ops = {
|
||||
store_field: true, store_index: true, store_dynamic: true,
|
||||
set_var: true, put: true, push: true
|
||||
}
|
||||
var branch_ops = {
|
||||
jump: true, jump_true: true, jump_false: true, jump_not_null: true
|
||||
}
|
||||
var call_ops = {
|
||||
invoke: true, goinvoke: true
|
||||
}
|
||||
var guard_ops = {
|
||||
is_int: true, is_text: true, is_num: true, is_bool: true,
|
||||
is_null: true, is_array: true, is_func: true, is_record: true,
|
||||
is_stone: true
|
||||
}
|
||||
var arith_ops = {
|
||||
add_int: true, sub_int: true, mul_int: true, div_int: true, mod_int: true,
|
||||
add_float: true, sub_float: true, mul_float: true, div_float: true, mod_float: true,
|
||||
concat: true, neg_int: true, neg_float: true,
|
||||
bitnot: true, bitand: true, bitor: true, bitxor: true,
|
||||
shl: true, shr: true, ushr: true
|
||||
}
|
||||
var move_ops = {
|
||||
move: true
|
||||
}
|
||||
var const_ops = {
|
||||
int: true, true: true, false: true, null: true
|
||||
}
|
||||
|
||||
var nop_reasons = {
|
||||
tc: "tc",
|
||||
bl: "bl",
|
||||
mv: "mv",
|
||||
dj: "dj",
|
||||
ur: "ur"
|
||||
}
|
||||
|
||||
var category_tag = function(op) {
|
||||
if (guard_ops[op] == true) { return "guard" }
|
||||
if (branch_ops[op] == true) { return "branch" }
|
||||
if (load_ops[op] == true) { return "load" }
|
||||
if (store_ops[op] == true) { return "store" }
|
||||
if (call_ops[op] == true) { return "call" }
|
||||
if (arith_ops[op] == true) { return "arith" }
|
||||
if (move_ops[op] == true) { return "move" }
|
||||
if (const_ops[op] == true) { return "const" }
|
||||
return null
|
||||
}
|
||||
|
||||
// --- detailed_stats ---
|
||||
|
||||
var detailed_stats = function(func) {
|
||||
var instructions = func.instructions
|
||||
var stats = {
|
||||
instr: 0, nop: 0,
|
||||
load: 0, store: 0, branch: 0, call: 0,
|
||||
guard: 0, arith: 0, move: 0, const: 0,
|
||||
label: 0, other: 0
|
||||
}
|
||||
var i = 0
|
||||
var instr = null
|
||||
var op = null
|
||||
var num = 0
|
||||
|
||||
if (instructions == null) {
|
||||
return stats
|
||||
}
|
||||
|
||||
num = length(instructions)
|
||||
while (i < num) {
|
||||
instr = instructions[i]
|
||||
if (is_text(instr)) {
|
||||
if (starts_with(instr, "_nop_")) {
|
||||
stats.nop = stats.nop + 1
|
||||
stats.instr = stats.instr + 1
|
||||
} else {
|
||||
stats.label = stats.label + 1
|
||||
}
|
||||
} else if (is_array(instr)) {
|
||||
stats.instr = stats.instr + 1
|
||||
op = instr[0]
|
||||
if (op == "access" && !is_number(instr[2]) && !is_logical(instr[2])) {
|
||||
stats.load = stats.load + 1
|
||||
} else if (op == "access") {
|
||||
stats.const = stats.const + 1
|
||||
} else if (load_ops[op] == true) {
|
||||
stats.load = stats.load + 1
|
||||
} else if (store_ops[op] == true) {
|
||||
stats.store = stats.store + 1
|
||||
} else if (branch_ops[op] == true) {
|
||||
stats.branch = stats.branch + 1
|
||||
} else if (call_ops[op] == true) {
|
||||
stats.call = stats.call + 1
|
||||
} else if (guard_ops[op] == true) {
|
||||
stats.guard = stats.guard + 1
|
||||
} else if (arith_ops[op] == true) {
|
||||
stats.arith = stats.arith + 1
|
||||
} else if (move_ops[op] == true) {
|
||||
stats.move = stats.move + 1
|
||||
} else if (const_ops[op] == true) {
|
||||
stats.const = stats.const + 1
|
||||
} else {
|
||||
stats.other = stats.other + 1
|
||||
}
|
||||
}
|
||||
i = i + 1
|
||||
}
|
||||
return stats
|
||||
}
|
||||
|
||||
// --- ir_fingerprint ---
|
||||
// djb2 hash computed over the JSON-encoded instructions
|
||||
|
||||
var djb2 = function(s) {
|
||||
var chars = array(s)
|
||||
var hash = 5381
|
||||
var i = 0
|
||||
var num = length(chars)
|
||||
while (i < num) {
|
||||
hash = ((hash * 33) + number(chars[i])) % 4294967296
|
||||
i = i + 1
|
||||
}
|
||||
return text(hash, 16)
|
||||
}
|
||||
|
||||
var ir_fingerprint = function(func) {
|
||||
return djb2(json.encode(func.instructions))
|
||||
}
|
||||
|
||||
// --- canonical_ir ---
|
||||
|
||||
var pad_right = function(s, w) {
|
||||
var r = s
|
||||
while (length(r) < w) {
|
||||
r = r + " "
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
var nop_reason = function(s) {
|
||||
// extract reason from _nop_XX_NNN
|
||||
var parts = array(s, "_")
|
||||
// parts: ["", "nop", "XX", "NNN"]
|
||||
if (length(parts) >= 3) {
|
||||
return parts[2]
|
||||
}
|
||||
return "?"
|
||||
}
|
||||
|
||||
var fmt_operand = function(v) {
|
||||
if (is_null(v)) {
|
||||
return "null"
|
||||
}
|
||||
if (is_number(v)) {
|
||||
return text(v)
|
||||
}
|
||||
if (is_text(v)) {
|
||||
return `"${v}"`
|
||||
}
|
||||
if (is_logical(v)) {
|
||||
if (v) { return "true" }
|
||||
return "false"
|
||||
}
|
||||
return text(v)
|
||||
}
|
||||
|
||||
var canonical_ir = function(func, name, opts) {
|
||||
var instructions = func.instructions
|
||||
var nr_args = func.nr_args != null ? func.nr_args : 0
|
||||
var nr_slots = func.nr_slots != null ? func.nr_slots : 0
|
||||
var show_nops = false
|
||||
var show_types = false
|
||||
var slot_types = null
|
||||
var lines = []
|
||||
var i = 0
|
||||
var instr = null
|
||||
var op = null
|
||||
var n = 0
|
||||
var parts = null
|
||||
var j = 0
|
||||
var idx_str = null
|
||||
var op_str = null
|
||||
var operands = null
|
||||
var suffix = null
|
||||
var tag = null
|
||||
var typ = null
|
||||
var reason = null
|
||||
var num = 0
|
||||
|
||||
if (opts != null) {
|
||||
if (opts.show_nops == true) { show_nops = true }
|
||||
if (opts.show_types == true) { show_types = true }
|
||||
if (opts.slot_types != null) { slot_types = opts.slot_types }
|
||||
}
|
||||
|
||||
lines[] = `fn ${name != null ? name : "<anon>"} (args=${text(nr_args)}, slots=${text(nr_slots)})`
|
||||
|
||||
if (instructions == null) {
|
||||
return text(lines, "\n")
|
||||
}
|
||||
|
||||
num = length(instructions)
|
||||
while (i < num) {
|
||||
instr = instructions[i]
|
||||
|
||||
if (is_text(instr)) {
|
||||
if (starts_with(instr, "_nop_")) {
|
||||
if (show_nops) {
|
||||
reason = nop_reason(instr)
|
||||
idx_str = pad_right(`@${text(i)}`, 6)
|
||||
lines[] = ` ${idx_str}--- nop (${reason}) ---`
|
||||
}
|
||||
} else {
|
||||
lines[] = ` ${instr}:`
|
||||
}
|
||||
i = i + 1
|
||||
continue
|
||||
}
|
||||
|
||||
if (!is_array(instr)) {
|
||||
i = i + 1
|
||||
continue
|
||||
}
|
||||
|
||||
op = instr[0]
|
||||
n = length(instr)
|
||||
parts = []
|
||||
j = 1
|
||||
while (j < n - 2) {
|
||||
if (is_number(instr[j]) && op != "int" && !(op == "access" && j == 2)) {
|
||||
parts[] = `s${text(instr[j])}`
|
||||
} else {
|
||||
parts[] = fmt_operand(instr[j])
|
||||
}
|
||||
j = j + 1
|
||||
}
|
||||
operands = text(parts, ", ")
|
||||
|
||||
idx_str = pad_right(`@${text(i)}`, 6)
|
||||
op_str = pad_right(op, 16)
|
||||
suffix = ""
|
||||
|
||||
tag = category_tag(op)
|
||||
|
||||
if (show_types && slot_types != null) {
|
||||
// show type for dest slot if known
|
||||
if (is_number(instr[1])) {
|
||||
typ = slot_types[text(instr[1])]
|
||||
if (typ != null) {
|
||||
suffix = `; -> ${typ}`
|
||||
}
|
||||
}
|
||||
if (tag != null) {
|
||||
suffix = suffix + ` [${tag}]`
|
||||
}
|
||||
} else if (tag != null) {
|
||||
suffix = suffix + `; [${tag}]`
|
||||
}
|
||||
|
||||
if (length(suffix) > 0) {
|
||||
lines[] = ` ${idx_str}${op_str}${operands} ${suffix}`
|
||||
} else {
|
||||
lines[] = ` ${idx_str}${op_str}${operands}`
|
||||
}
|
||||
|
||||
i = i + 1
|
||||
}
|
||||
|
||||
return text(lines, "\n")
|
||||
}
|
||||
|
||||
// --- type_snapshot ---
|
||||
|
||||
var type_snapshot = function(slot_types) {
|
||||
if (slot_types == null) {
|
||||
return {}
|
||||
}
|
||||
return stone(record(slot_types))
|
||||
}
|
||||
|
||||
// --- type_delta ---
|
||||
|
||||
var type_delta = function(before_types, after_types) {
|
||||
var result = {
|
||||
added: {},
|
||||
removed: {},
|
||||
strengthened: {},
|
||||
weakened: {}
|
||||
}
|
||||
var bt = before_types != null ? before_types : {}
|
||||
var at = after_types != null ? after_types : {}
|
||||
var keys = null
|
||||
var i = 0
|
||||
var k = null
|
||||
var bv = null
|
||||
var av = null
|
||||
|
||||
// check after for added/changed
|
||||
keys = array(at)
|
||||
i = 0
|
||||
while (i < length(keys)) {
|
||||
k = keys[i]
|
||||
av = at[k]
|
||||
bv = bt[k]
|
||||
if (bv == null) {
|
||||
result.added[k] = av
|
||||
} else if (bv != av) {
|
||||
if (bv == "unknown" || (bv == "num" && (av == "int" || av == "float"))) {
|
||||
result.strengthened[k] = {from: bv, to: av}
|
||||
} else if (av == "unknown" || (av == "num" && (bv == "int" || bv == "float"))) {
|
||||
result.weakened[k] = {from: bv, to: av}
|
||||
} else {
|
||||
result.strengthened[k] = {from: bv, to: av}
|
||||
}
|
||||
}
|
||||
i = i + 1
|
||||
}
|
||||
|
||||
// check before for removed
|
||||
keys = array(bt)
|
||||
i = 0
|
||||
while (i < length(keys)) {
|
||||
k = keys[i]
|
||||
if (at[k] == null) {
|
||||
result.removed[k] = bt[k]
|
||||
}
|
||||
i = i + 1
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
return {
|
||||
detailed_stats: detailed_stats,
|
||||
ir_fingerprint: ir_fingerprint,
|
||||
canonical_ir: canonical_ir,
|
||||
type_snapshot: type_snapshot,
|
||||
type_delta: type_delta,
|
||||
category_tag: category_tag
|
||||
}
|
||||
74
link.cm
74
link.cm
@@ -34,9 +34,10 @@ function ensure_dir(path) {
|
||||
if (fd.stat(path).isDirectory) return
|
||||
var parts = array(path, '/')
|
||||
var current = starts_with(path, '/') ? '/' : ''
|
||||
for (var i = 0; i < length(parts); i++) {
|
||||
var i = 0
|
||||
for (i = 0; i < length(parts); i++) {
|
||||
if (parts[i] == '') continue
|
||||
current += parts[i] + '/'
|
||||
current = current + parts[i] + '/'
|
||||
if (!fd.stat(current).isDirectory) {
|
||||
fd.mkdir(current)
|
||||
}
|
||||
@@ -66,14 +67,16 @@ Link.load = function() {
|
||||
return link_cache
|
||||
}
|
||||
|
||||
try {
|
||||
var _load = function() {
|
||||
var content = text(fd.slurp(path))
|
||||
var cfg = toml.decode(content)
|
||||
link_cache = cfg.links || {}
|
||||
} catch (e) {
|
||||
log.console("Warning: Failed to load link.toml: " + e)
|
||||
if (cfg && cfg.links) link_cache = cfg.links
|
||||
else link_cache = {}
|
||||
} disruption {
|
||||
print("Warning: Failed to load link.toml\n")
|
||||
link_cache = {}
|
||||
}
|
||||
_load()
|
||||
return link_cache
|
||||
}
|
||||
|
||||
@@ -90,14 +93,16 @@ Link.add = function(canonical, target, shop) {
|
||||
// Validate canonical package exists in shop
|
||||
var lock = shop.load_lock()
|
||||
if (!lock[canonical]) {
|
||||
throw Error('Package ' + canonical + ' is not installed. Install it first with: cell get ' + canonical)
|
||||
print('Package ' + canonical + ' is not installed. Install it first with: cell get ' + canonical + '\n')
|
||||
disrupt
|
||||
}
|
||||
|
||||
// Validate target is a valid package
|
||||
if (starts_with(target, '/')) {
|
||||
// Local path - must have cell.toml
|
||||
if (!fd.is_file(target + '/cell.toml')) {
|
||||
throw Error('Target ' + target + ' is not a valid package (no cell.toml)')
|
||||
print('Target ' + target + ' is not a valid package (no cell.toml)\n')
|
||||
disrupt
|
||||
}
|
||||
} else {
|
||||
// Remote package target - ensure it's installed
|
||||
@@ -115,34 +120,36 @@ Link.add = function(canonical, target, shop) {
|
||||
// Read the target's cell.toml to find its dependencies
|
||||
var target_path = starts_with(target, '/') ? target : get_package_abs_dir(target)
|
||||
var toml_path = target_path + '/cell.toml'
|
||||
var _install_deps = null
|
||||
if (fd.is_file(toml_path)) {
|
||||
try {
|
||||
_install_deps = function() {
|
||||
var content = text(fd.slurp(toml_path))
|
||||
var cfg = toml.decode(content)
|
||||
if (cfg.dependencies) {
|
||||
if (cfg && cfg.dependencies) {
|
||||
arrfor(array(cfg.dependencies), function(alias) {
|
||||
var dep_locator = cfg.dependencies[alias]
|
||||
// Skip local dependencies that don't exist
|
||||
if (starts_with(dep_locator, '/') && !fd.is_dir(dep_locator)) {
|
||||
log.console(" Skipping missing local dependency: " + dep_locator)
|
||||
print(" Skipping missing local dependency: " + dep_locator + "\n")
|
||||
return
|
||||
}
|
||||
// Install the dependency if not already in shop
|
||||
try {
|
||||
var _get_dep = function() {
|
||||
shop.get(dep_locator)
|
||||
shop.extract(dep_locator)
|
||||
} catch (e) {
|
||||
log.console(` Warning: Could not install dependency ${dep_locator}: ${e.message}`)
|
||||
log.error(e)
|
||||
} disruption {
|
||||
print(` Warning: Could not install dependency ${dep_locator}\n`)
|
||||
}
|
||||
_get_dep()
|
||||
})
|
||||
}
|
||||
} catch (e) {
|
||||
log.console(` Warning: Could not read dependencies from ${toml_path}`)
|
||||
} disruption {
|
||||
print(` Warning: Could not read dependencies from ${toml_path}\n`)
|
||||
}
|
||||
_install_deps()
|
||||
}
|
||||
|
||||
log.console("Linked " + canonical + " -> " + target)
|
||||
print("Linked " + canonical + " -> " + target + "\n")
|
||||
return true
|
||||
}
|
||||
|
||||
@@ -154,12 +161,12 @@ Link.remove = function(canonical) {
|
||||
var target_dir = get_package_abs_dir(canonical)
|
||||
if (fd.is_link(target_dir)) {
|
||||
fd.unlink(target_dir)
|
||||
log.console("Removed symlink at " + target_dir)
|
||||
print("Removed symlink at " + target_dir + "\n")
|
||||
}
|
||||
|
||||
|
||||
delete links[canonical]
|
||||
Link.save(links)
|
||||
log.console("Unlinked " + canonical)
|
||||
print("Unlinked " + canonical + "\n")
|
||||
return true
|
||||
}
|
||||
|
||||
@@ -174,7 +181,7 @@ Link.clear = function() {
|
||||
})
|
||||
|
||||
Link.save({})
|
||||
log.console("Cleared all links")
|
||||
print("Cleared all links\n")
|
||||
return true
|
||||
}
|
||||
|
||||
@@ -218,7 +225,7 @@ Link.sync_all = function(shop) {
|
||||
|
||||
arrfor(array(links), function(canonical) {
|
||||
var target = links[canonical]
|
||||
try {
|
||||
var _sync = function() {
|
||||
// Validate target exists
|
||||
var link_target = resolve_link_target(target)
|
||||
if (!fd.is_dir(link_target)) {
|
||||
@@ -234,10 +241,10 @@ Link.sync_all = function(shop) {
|
||||
|
||||
// Install dependencies of the linked package
|
||||
var toml_path = link_target + '/cell.toml'
|
||||
try {
|
||||
var _install = function() {
|
||||
var content = text(fd.slurp(toml_path))
|
||||
var cfg = toml.decode(content)
|
||||
if (cfg.dependencies) {
|
||||
if (cfg && cfg.dependencies) {
|
||||
arrfor(array(cfg.dependencies), function(alias) {
|
||||
var dep_locator = cfg.dependencies[alias]
|
||||
// Skip local dependencies that don't exist
|
||||
@@ -245,22 +252,25 @@ Link.sync_all = function(shop) {
|
||||
return
|
||||
}
|
||||
// Install the dependency if not already in shop
|
||||
try {
|
||||
var _get = function() {
|
||||
shop.get(dep_locator)
|
||||
shop.extract(dep_locator)
|
||||
} catch (e) {
|
||||
} disruption {
|
||||
// Silently continue - dependency may already be installed
|
||||
}
|
||||
_get()
|
||||
})
|
||||
}
|
||||
} catch (e) {
|
||||
} disruption {
|
||||
// Could not read dependencies - continue anyway
|
||||
}
|
||||
_install()
|
||||
|
||||
count++
|
||||
} catch (e) {
|
||||
push(errors, canonical + ': ' + e.message)
|
||||
count = count + 1
|
||||
} disruption {
|
||||
push(errors, canonical + ': sync failed')
|
||||
}
|
||||
_sync()
|
||||
})
|
||||
|
||||
return { synced: count, errors: errors }
|
||||
@@ -269,7 +279,7 @@ Link.sync_all = function(shop) {
|
||||
// Check if a package is currently linked
|
||||
Link.is_linked = function(canonical) {
|
||||
var links = Link.load()
|
||||
return canonical in links
|
||||
return links[canonical] != null
|
||||
}
|
||||
|
||||
// Get the link target for a package (or null if not linked)
|
||||
|
||||
6
mcode.ce
6
mcode.ce
@@ -2,10 +2,12 @@ var fd = use("fd")
|
||||
var json = use("json")
|
||||
var tokenize = use("tokenize")
|
||||
var parse = use("parse")
|
||||
var fold = use("fold")
|
||||
var mcode = use("mcode")
|
||||
var filename = args[0]
|
||||
var src = text(fd.slurp(filename))
|
||||
var result = tokenize(src, filename)
|
||||
var ast = parse(result.tokens, src, filename)
|
||||
var compiled = mcode(ast)
|
||||
var ast = parse(result.tokens, src, filename, tokenize)
|
||||
var folded = fold(ast)
|
||||
var compiled = mcode(folded)
|
||||
print(json.encode(compiled))
|
||||
|
||||
24005
mcode.cm.mcode
Normal file
24005
mcode.cm.mcode
Normal file
File diff suppressed because it is too large
Load Diff
BIN
mcode.mach
BIN
mcode.mach
Binary file not shown.
@@ -46,7 +46,6 @@ src += [ # core
|
||||
'miniz.c',
|
||||
'runtime.c',
|
||||
'mach.c',
|
||||
'mcode.c',
|
||||
'libregexp.c', 'libunicode.c', 'cutils.c', 'dtoa.c'
|
||||
]
|
||||
|
||||
|
||||
14
net/enet.c
14
net/enet.c
@@ -14,11 +14,11 @@ static void js_enet_host_finalizer(JSRuntime *rt, JSValue val)
|
||||
if (host) enet_host_destroy(host);
|
||||
}
|
||||
|
||||
static void js_enet_peer_mark(JSRuntime *rt, JSValueConst val, JS_MarkFunc *mark_func)
|
||||
{
|
||||
ENetPeer *peer = JS_GetOpaque(val, enet_peer_class_id);
|
||||
JS_MarkValue(rt, *(JSValue*)peer->data, mark_func);
|
||||
}
|
||||
//static void js_enet_peer_mark(JSRuntime *rt, JSValueConst val, JS_MarkFunc *mark_func)
|
||||
//{
|
||||
// ENetPeer *peer = JS_GetOpaque(val, enet_peer_class_id);
|
||||
// JS_MarkValue(rt, *(JSValue*)peer->data, mark_func);
|
||||
//}
|
||||
|
||||
static void js_enet_peer_finalizer(JSRuntime *rt, JSValue val)
|
||||
{
|
||||
@@ -62,7 +62,7 @@ static JSValue js_enet_host_create(JSContext *ctx, JSValueConst this_val, int ar
|
||||
enet_uint32 outgoing_bandwidth = 0;
|
||||
JSValue obj;
|
||||
|
||||
if (argc < 1 || !JS_IsObject(argv[0])) {
|
||||
if (argc < 1 || !JS_IsRecord(argv[0])) {
|
||||
host = enet_host_create(NULL, peer_count, channel_limit, incoming_bandwidth, outgoing_bandwidth);
|
||||
if (!host) return JS_ThrowInternalError(ctx, "Failed to create ENet client host");
|
||||
goto wrap;
|
||||
@@ -414,7 +414,7 @@ static JSClassDef enet_host = {
|
||||
static JSClassDef enet_peer_class = {
|
||||
"ENetPeer",
|
||||
.finalizer = js_enet_peer_finalizer,
|
||||
.gc_mark = js_enet_peer_mark
|
||||
// .gc_mark = js_enet_peer_mark
|
||||
};
|
||||
|
||||
JSValue js_enet_resolve_hostname(JSContext *js, JSValue self, int argc, JSValue *argv)
|
||||
|
||||
@@ -132,7 +132,7 @@ JSC_CCALL(socket_getaddrinfo,
|
||||
// Store the addrinfo pointer as an internal property
|
||||
// We'll need to handle this differently since we can't wrap it
|
||||
// For now, we'll skip storing the raw addrinfo
|
||||
JS_SetPropertyUint32(js, ret, idx++, info);
|
||||
JS_SetPropertyNumber(js, ret, idx++, info);
|
||||
}
|
||||
|
||||
freeaddrinfo(res);
|
||||
|
||||
31
num_torture.cm
Normal file
31
num_torture.cm
Normal file
@@ -0,0 +1,31 @@
|
||||
// num_torture.cm — integer math torture test
|
||||
// Pure integer arithmetic so it stays on the fast int path.
|
||||
// Returns the final checksum so the caller can verify correctness.
|
||||
|
||||
var n = 5000000
|
||||
var sum = 0
|
||||
var i = 0
|
||||
var a = 0
|
||||
var b = 0
|
||||
|
||||
while (i < n) {
|
||||
a = (i * 7 + 13) % 10007
|
||||
b = (a * a) % 10007
|
||||
sum = (sum + b) % 1000000007
|
||||
i = i + 1
|
||||
}
|
||||
|
||||
return function(n) {
|
||||
var i = 0
|
||||
var a = 0
|
||||
var b = 0
|
||||
var sum = 0
|
||||
while (i < n) {
|
||||
a = (i * 7 + 13) % 10007
|
||||
b = (a * a) % 10007
|
||||
sum = (sum + b) % 1000000007
|
||||
i = i + 1
|
||||
}
|
||||
|
||||
return sum
|
||||
}
|
||||
118
package.cm
118
package.cm
@@ -51,7 +51,7 @@ package.load_config = function(name)
|
||||
return config_cache[config_path]
|
||||
|
||||
if (!fd.is_file(config_path)) {
|
||||
throw Error(`${config_path} does not exist`)
|
||||
print(`${config_path} does not exist`); disrupt
|
||||
}
|
||||
|
||||
var content = text(fd.slurp(config_path))
|
||||
@@ -101,11 +101,12 @@ package.alias_to_package = function(name, alias)
|
||||
}
|
||||
|
||||
// alias is optional
|
||||
package.add_dependency = function(name, locator, alias = locator)
|
||||
package.add_dependency = function(name, locator, alias)
|
||||
{
|
||||
var _alias = alias == null ? locator : alias
|
||||
var config = package.load_config(name)
|
||||
if (!config.dependencies) config.dependencies = {}
|
||||
config.dependencies[alias] = locator
|
||||
config.dependencies[_alias] = locator
|
||||
package.save_config(name, config)
|
||||
}
|
||||
|
||||
@@ -115,10 +116,11 @@ package.remove_dependency = function(name, locator)
|
||||
var config = package.load_config(name)
|
||||
if (!config.dependencies) return
|
||||
|
||||
var alias = null
|
||||
if (config.dependencies[locator])
|
||||
delete config.dependencies[locator]
|
||||
else {
|
||||
var alias = package.find_alias(name, locator)
|
||||
alias = package.find_alias(name, locator)
|
||||
if (alias)
|
||||
delete config.dependencies[alias]
|
||||
}
|
||||
@@ -133,8 +135,9 @@ package.find_package_dir = function(file)
|
||||
if (fd.is_file(dir))
|
||||
dir = fd.dirname(dir)
|
||||
|
||||
var toml_path = null
|
||||
while (dir && length(dir) > 0) {
|
||||
var toml_path = dir + '/cell.toml'
|
||||
toml_path = dir + '/cell.toml'
|
||||
if (fd.is_file(toml_path)) {
|
||||
return dir
|
||||
}
|
||||
@@ -158,21 +161,23 @@ package.split_alias = function(name, path)
|
||||
var parts = array(path, '/')
|
||||
var first_part = parts[0]
|
||||
|
||||
try {
|
||||
var _split = function() {
|
||||
var config = package.load_config(name)
|
||||
if (!config) return null
|
||||
|
||||
var deps = config.dependencies
|
||||
var dep_locator = null
|
||||
var remaining_path = null
|
||||
if (deps && deps[first_part]) {
|
||||
var dep_locator = deps[first_part]
|
||||
var remaining_path = text(array(parts, 1), '/')
|
||||
dep_locator = deps[first_part]
|
||||
remaining_path = text(array(parts, 1), '/')
|
||||
return { package: dep_locator, path: remaining_path }
|
||||
}
|
||||
} catch (e) {
|
||||
// Config doesn't exist or couldn't be loaded
|
||||
return null
|
||||
} disruption {
|
||||
return null
|
||||
}
|
||||
|
||||
return null
|
||||
return _split()
|
||||
}
|
||||
|
||||
package.gather_dependencies = function(name)
|
||||
@@ -208,18 +213,23 @@ package.list_files = function(pkg) {
|
||||
var walk = function(current_dir, current_prefix) {
|
||||
var list = fd.readdir(current_dir)
|
||||
if (!list) return
|
||||
|
||||
for (var i = 0; i < length(list); i++) {
|
||||
var item = list[i]
|
||||
|
||||
var i = 0
|
||||
var item = null
|
||||
var full_path = null
|
||||
var rel_path = null
|
||||
var st = null
|
||||
for (i = 0; i < length(list); i++) {
|
||||
item = list[i]
|
||||
if (item == '.' || item == '..') continue
|
||||
if (starts_with(item, '.')) continue
|
||||
|
||||
if (starts_with(item, '.')) continue
|
||||
|
||||
// Skip build directories in root
|
||||
|
||||
var full_path = current_dir + "/" + item
|
||||
var rel_path = current_prefix ? current_prefix + "/" + item : item
|
||||
|
||||
var st = fd.stat(full_path)
|
||||
full_path = current_dir + "/" + item
|
||||
rel_path = current_prefix ? current_prefix + "/" + item : item
|
||||
|
||||
st = fd.stat(full_path)
|
||||
if (st.isDirectory) {
|
||||
walk(full_path, rel_path)
|
||||
} else {
|
||||
@@ -237,7 +247,8 @@ package.list_files = function(pkg) {
|
||||
package.list_modules = function(name) {
|
||||
var files = package.list_files(name)
|
||||
var modules = []
|
||||
for (var i = 0; i < length(files); i++) {
|
||||
var i = 0
|
||||
for (i = 0; i < length(files); i++) {
|
||||
if (ends_with(files[i], '.cm')) {
|
||||
push(modules, text(files[i], 0, -3))
|
||||
}
|
||||
@@ -248,7 +259,8 @@ package.list_modules = function(name) {
|
||||
package.list_programs = function(name) {
|
||||
var files = package.list_files(name)
|
||||
var programs = []
|
||||
for (var i = 0; i < length(files); i++) {
|
||||
var i = 0
|
||||
for (i = 0; i < length(files); i++) {
|
||||
if (ends_with(files[i], '.ce')) {
|
||||
push(programs, text(files[i], 0, -3))
|
||||
}
|
||||
@@ -265,14 +277,16 @@ package.get_flags = function(name, flag_type, target) {
|
||||
var flags = []
|
||||
|
||||
// Base flags
|
||||
var base = null
|
||||
var target_flags = null
|
||||
if (config.compilation && config.compilation[flag_type]) {
|
||||
var base = config.compilation[flag_type]
|
||||
base = config.compilation[flag_type]
|
||||
flags = array(flags, filter(array(base, /\s+/), function(f) { return length(f) > 0 }))
|
||||
}
|
||||
|
||||
|
||||
// Target-specific flags
|
||||
if (target && config.compilation && config.compilation[target] && config.compilation[target][flag_type]) {
|
||||
var target_flags = config.compilation[target][flag_type]
|
||||
target_flags = config.compilation[target][flag_type]
|
||||
flags = array(flags, filter(array(target_flags, /\s+/), function(f) { return length(f) > 0 }))
|
||||
}
|
||||
|
||||
@@ -290,23 +304,36 @@ package.get_c_files = function(name, target, exclude_main) {
|
||||
// Group files by their base name (without target suffix)
|
||||
var groups = {} // base_key -> { generic: file, variants: { target: file } }
|
||||
|
||||
for (var i = 0; i < length(files); i++) {
|
||||
var file = files[i]
|
||||
var i = 0
|
||||
var file = null
|
||||
var ext = null
|
||||
var base = null
|
||||
var name_part = null
|
||||
var dir_part = null
|
||||
var dir = null
|
||||
var is_variant = null
|
||||
var variant_target = null
|
||||
var generic_name = null
|
||||
var t = 0
|
||||
var suffix = null
|
||||
var group_key = null
|
||||
for (i = 0; i < length(files); i++) {
|
||||
file = files[i]
|
||||
if (!ends_with(file, '.c') && !ends_with(file, '.cpp')) continue
|
||||
|
||||
var ext = ends_with(file, '.cpp') ? '.cpp' : '.c'
|
||||
var base = text(file, 0, -length(ext))
|
||||
var name_part = fd.basename(base)
|
||||
var dir_part = fd.dirname(base)
|
||||
var dir = (dir_part && dir_part != '.') ? dir_part + '/' : ''
|
||||
|
||||
|
||||
ext = ends_with(file, '.cpp') ? '.cpp' : '.c'
|
||||
base = text(file, 0, -length(ext))
|
||||
name_part = fd.basename(base)
|
||||
dir_part = fd.dirname(base)
|
||||
dir = (dir_part && dir_part != '.') ? dir_part + '/' : ''
|
||||
|
||||
// Check for target suffix
|
||||
var is_variant = false
|
||||
var variant_target = null
|
||||
var generic_name = name_part
|
||||
|
||||
for (var t = 0; t < length(known_targets); t++) {
|
||||
var suffix = '_' + known_targets[t]
|
||||
is_variant = false
|
||||
variant_target = null
|
||||
generic_name = name_part
|
||||
|
||||
for (t = 0; t < length(known_targets); t++) {
|
||||
suffix = '_' + known_targets[t]
|
||||
if (ends_with(name_part, suffix)) {
|
||||
is_variant = true
|
||||
variant_target = known_targets[t]
|
||||
@@ -315,7 +342,7 @@ package.get_c_files = function(name, target, exclude_main) {
|
||||
}
|
||||
}
|
||||
|
||||
var group_key = dir + generic_name + ext
|
||||
group_key = dir + generic_name + ext
|
||||
if (!groups[group_key]) {
|
||||
groups[group_key] = { generic: null, variants: {} }
|
||||
}
|
||||
@@ -332,18 +359,19 @@ package.get_c_files = function(name, target, exclude_main) {
|
||||
arrfor(array(groups), function(key) {
|
||||
var group = groups[key]
|
||||
var selected = null
|
||||
|
||||
var basename = null
|
||||
|
||||
// Prefer target-specific variant if available
|
||||
if (target && group.variants[target]) {
|
||||
selected = group.variants[target]
|
||||
} else if (group.generic) {
|
||||
selected = group.generic
|
||||
}
|
||||
|
||||
|
||||
if (selected) {
|
||||
// Skip main.c if requested
|
||||
if (exclude_main) {
|
||||
var basename = fd.basename(selected)
|
||||
basename = fd.basename(selected)
|
||||
if (basename == 'main.c' || starts_with(basename, 'main_')) return
|
||||
}
|
||||
push(result, selected)
|
||||
|
||||
3
parse.ce
3
parse.ce
@@ -1,8 +1,9 @@
|
||||
var fd = use("fd")
|
||||
var json = use("json")
|
||||
var tokenize = use("tokenize")
|
||||
var parse = use("parse")
|
||||
var filename = args[0]
|
||||
var src = text(fd.slurp(filename))
|
||||
var result = tokenize(src, filename)
|
||||
var ast = parse(result.tokens, src, filename)
|
||||
var ast = parse(result.tokens, src, filename, tokenize)
|
||||
print(json.encode(ast))
|
||||
|
||||
488
parse.cm
488
parse.cm
@@ -1,18 +1,5 @@
|
||||
def CP_SLASH = 47
|
||||
def CP_BSLASH = 92
|
||||
|
||||
var is_alpha = function(c) {
|
||||
return (c >= 65 && c <= 90) || (c >= 97 && c <= 122)
|
||||
}
|
||||
|
||||
var parse = function(tokens, src, filename, tokenizer) {
|
||||
var _src_len = length(src)
|
||||
var cp = []
|
||||
var _i = 0
|
||||
while (_i < _src_len) {
|
||||
push(cp, codepoint(src[_i]))
|
||||
_i = _i + 1
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// Parser Cursor
|
||||
@@ -22,6 +9,9 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
var tok = null
|
||||
var got_lf = false
|
||||
var prev_tok = null
|
||||
var _control_depth = 0
|
||||
var _control_type = null
|
||||
var _expecting_body = false
|
||||
|
||||
var advance = function() {
|
||||
var t = null
|
||||
@@ -75,7 +65,7 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
|
||||
var errors = []
|
||||
var error_count = 0
|
||||
var function_nr = 1
|
||||
var fn_counter = 1
|
||||
|
||||
var ast_node = function(kind, token) {
|
||||
return {
|
||||
@@ -103,14 +93,18 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
})
|
||||
}
|
||||
|
||||
var _keywords = {
|
||||
"if": true, in: true, "do": true, go: true,
|
||||
"var": true, def: true, "for": true,
|
||||
"else": true, "this": true, "null": true, "true": true,
|
||||
"false": true, "while": true, "break": true,
|
||||
"return": true, "delete": true,
|
||||
disrupt: true, "function": true, "continue": true,
|
||||
disruption: true
|
||||
}
|
||||
|
||||
var is_keyword = function(kind) {
|
||||
return kind == "if" || kind == "in" || kind == "do" || kind == "go" ||
|
||||
kind == "var" || kind == "def" || kind == "for" ||
|
||||
kind == "else" || kind == "this" || kind == "null" || kind == "true" ||
|
||||
kind == "false" || kind == "while" || kind == "break" ||
|
||||
kind == "return" || kind == "delete" ||
|
||||
kind == "disrupt" || kind == "function" || kind == "continue" ||
|
||||
kind == "disruption"
|
||||
return _keywords[kind] == true
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
@@ -165,17 +159,18 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
var params = null
|
||||
var param = null
|
||||
var rpos = 0
|
||||
var pattern_str = ""
|
||||
var flags = ""
|
||||
var pattern_parts = null
|
||||
var flags_parts = null
|
||||
var tv = null
|
||||
var has_interp = false
|
||||
var ti = 0
|
||||
var tpl_list = null
|
||||
var fmt = null
|
||||
var fmt_parts = null
|
||||
var idx = 0
|
||||
var tvi = 0
|
||||
var tvlen = 0
|
||||
var depth = 0
|
||||
var expr_parts = null
|
||||
var expr_str = null
|
||||
var tc = null
|
||||
var tq = null
|
||||
@@ -184,6 +179,9 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
var sub_ast = null
|
||||
var sub_stmt = null
|
||||
var sub_expr = null
|
||||
var meth_old_cd = 0
|
||||
var meth_old_ct = null
|
||||
var meth_old_eb = false
|
||||
|
||||
if (k == "number") {
|
||||
node = ast_node("number", start)
|
||||
@@ -218,52 +216,53 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
node = ast_node("text literal", start)
|
||||
tpl_list = []
|
||||
node.list = tpl_list
|
||||
fmt = ""
|
||||
fmt_parts = []
|
||||
idx = 0
|
||||
tvi = 0
|
||||
tvlen = length(tv)
|
||||
while (tvi < tvlen) {
|
||||
if (tv[tvi] == "\\" && tvi + 1 < tvlen) {
|
||||
esc_ch = tv[tvi + 1]
|
||||
if (esc_ch == "n") { fmt = fmt + "\n" }
|
||||
else if (esc_ch == "t") { fmt = fmt + "\t" }
|
||||
else if (esc_ch == "r") { fmt = fmt + "\r" }
|
||||
else if (esc_ch == "\\") { fmt = fmt + "\\" }
|
||||
else if (esc_ch == "`") { fmt = fmt + "`" }
|
||||
else if (esc_ch == "$") { fmt = fmt + "$" }
|
||||
else if (esc_ch == "0") { fmt = fmt + character(0) }
|
||||
else { fmt = fmt + esc_ch }
|
||||
if (esc_ch == "n") { push(fmt_parts, "\n") }
|
||||
else if (esc_ch == "t") { push(fmt_parts, "\t") }
|
||||
else if (esc_ch == "r") { push(fmt_parts, "\r") }
|
||||
else if (esc_ch == "\\") { push(fmt_parts, "\\") }
|
||||
else if (esc_ch == "`") { push(fmt_parts, "`") }
|
||||
else if (esc_ch == "$") { push(fmt_parts, "$") }
|
||||
else if (esc_ch == "0") { push(fmt_parts, character(0)) }
|
||||
else { push(fmt_parts, esc_ch) }
|
||||
tvi = tvi + 2
|
||||
} else if (tv[tvi] == "$" && tvi + 1 < tvlen && tv[tvi + 1] == "{") {
|
||||
tvi = tvi + 2
|
||||
depth = 1
|
||||
expr_str = ""
|
||||
expr_parts = []
|
||||
while (tvi < tvlen && depth > 0) {
|
||||
tc = tv[tvi]
|
||||
if (tc == "{") { depth = depth + 1; expr_str = expr_str + tc; tvi = tvi + 1 }
|
||||
if (tc == "{") { depth = depth + 1; push(expr_parts, tc); tvi = tvi + 1 }
|
||||
else if (tc == "}") {
|
||||
depth = depth - 1
|
||||
if (depth > 0) { expr_str = expr_str + tc }
|
||||
if (depth > 0) { push(expr_parts, tc) }
|
||||
tvi = tvi + 1
|
||||
}
|
||||
else if (tc == "'" || tc == "\"" || tc == "`") {
|
||||
tq = tc
|
||||
expr_str = expr_str + tc
|
||||
push(expr_parts, tc)
|
||||
tvi = tvi + 1
|
||||
while (tvi < tvlen && tv[tvi] != tq) {
|
||||
if (tv[tvi] == "\\" && tvi + 1 < tvlen) {
|
||||
expr_str = expr_str + tv[tvi]
|
||||
push(expr_parts, tv[tvi])
|
||||
tvi = tvi + 1
|
||||
}
|
||||
expr_str = expr_str + tv[tvi]
|
||||
push(expr_parts, tv[tvi])
|
||||
tvi = tvi + 1
|
||||
}
|
||||
if (tvi < tvlen) { expr_str = expr_str + tv[tvi]; tvi = tvi + 1 }
|
||||
if (tvi < tvlen) { push(expr_parts, tv[tvi]); tvi = tvi + 1 }
|
||||
} else {
|
||||
expr_str = expr_str + tc
|
||||
push(expr_parts, tc)
|
||||
tvi = tvi + 1
|
||||
}
|
||||
}
|
||||
expr_str = text(expr_parts)
|
||||
expr_tokens = tokenizer(expr_str, "<template>").tokens
|
||||
sub_ast = parse(expr_tokens, expr_str, "<template>", tokenizer)
|
||||
if (sub_ast != null && sub_ast.statements != null && length(sub_ast.statements) > 0) {
|
||||
@@ -276,14 +275,16 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
}
|
||||
push(tpl_list, sub_expr)
|
||||
}
|
||||
fmt = fmt + "{" + text(idx) + "}"
|
||||
push(fmt_parts, "{")
|
||||
push(fmt_parts, text(idx))
|
||||
push(fmt_parts, "}")
|
||||
idx = idx + 1
|
||||
} else {
|
||||
fmt = fmt + tv[tvi]
|
||||
push(fmt_parts, tv[tvi])
|
||||
tvi = tvi + 1
|
||||
}
|
||||
}
|
||||
node.value = fmt
|
||||
node.value = text(fmt_parts)
|
||||
advance()
|
||||
ast_node_end(node)
|
||||
return node
|
||||
@@ -390,7 +391,7 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
ast_node_end(param)
|
||||
if (tok.kind == "=" || tok.kind == "|") {
|
||||
advance()
|
||||
param.expression = parse_expr()
|
||||
param.expression = parse_assign_expr()
|
||||
}
|
||||
push(params, param)
|
||||
} else {
|
||||
@@ -404,6 +405,12 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
else if (tok.kind == "eof") parse_error(tok, "unterminated method parameter list")
|
||||
if (length(params) > 4) parse_error(tok, "functions cannot have more than 4 parameters")
|
||||
fn.arity = length(params)
|
||||
meth_old_cd = _control_depth
|
||||
meth_old_ct = _control_type
|
||||
meth_old_eb = _expecting_body
|
||||
_control_depth = 0
|
||||
_control_type = null
|
||||
_expecting_body = false
|
||||
if (tok.kind == "{") {
|
||||
advance()
|
||||
fn.statements = parse_block_statements()
|
||||
@@ -412,11 +419,19 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
} else {
|
||||
parse_error(tok, "expected '{' for method body")
|
||||
}
|
||||
fn.function_nr = function_nr
|
||||
function_nr = function_nr + 1
|
||||
_control_depth = meth_old_cd
|
||||
_control_type = meth_old_ct
|
||||
_expecting_body = meth_old_eb
|
||||
fn.function_nr = fn_counter
|
||||
fn_counter = fn_counter + 1
|
||||
ast_node_end(fn)
|
||||
pair.right = fn
|
||||
} else if (!(is_ident && (tok.kind == "," || tok.kind == "}"))) {
|
||||
} else if (is_ident && (tok.kind == "," || tok.kind == "}")) {
|
||||
right = ast_node("name", pair.left)
|
||||
right.name = pair.left.name
|
||||
ast_node_end(right)
|
||||
pair.right = right
|
||||
} else {
|
||||
parse_error(tok, "expected ':' after property name")
|
||||
}
|
||||
push(list, pair)
|
||||
@@ -445,24 +460,25 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
if (k == "/") {
|
||||
node = ast_node("regexp", start)
|
||||
rpos = tok.at + 1
|
||||
pattern_str = ""
|
||||
flags = ""
|
||||
while (rpos < _src_len && cp[rpos] != CP_SLASH) {
|
||||
if (cp[rpos] == CP_BSLASH && rpos + 1 < _src_len) {
|
||||
pattern_str = pattern_str + character(cp[rpos]) + character(cp[rpos + 1])
|
||||
pattern_parts = []
|
||||
flags_parts = []
|
||||
while (rpos < _src_len && src[rpos] != "/") {
|
||||
if (src[rpos] == "\\" && rpos + 1 < _src_len) {
|
||||
push(pattern_parts, src[rpos])
|
||||
push(pattern_parts, src[rpos + 1])
|
||||
rpos = rpos + 2
|
||||
} else {
|
||||
pattern_str = pattern_str + character(cp[rpos])
|
||||
push(pattern_parts, src[rpos])
|
||||
rpos = rpos + 1
|
||||
}
|
||||
}
|
||||
if (rpos < _src_len) rpos = rpos + 1
|
||||
while (rpos < _src_len && is_alpha(cp[rpos])) {
|
||||
flags = flags + character(cp[rpos])
|
||||
while (rpos < _src_len && is_letter(src[rpos])) {
|
||||
push(flags_parts, src[rpos])
|
||||
rpos = rpos + 1
|
||||
}
|
||||
node.pattern = pattern_str
|
||||
if (length(flags) > 0) node.flags = flags
|
||||
node.pattern = text(pattern_parts)
|
||||
if (length(flags_parts) > 0) node.flags = text(flags_parts)
|
||||
// Skip all tokens consumed by the regex re-scan
|
||||
while (true) {
|
||||
advance()
|
||||
@@ -488,6 +504,9 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
var index = null
|
||||
var arg = null
|
||||
var args_list = null
|
||||
var one_node = null
|
||||
var binop_node = null
|
||||
var op = null
|
||||
if (node == null) return null
|
||||
while (true) {
|
||||
start = tok
|
||||
@@ -563,6 +582,10 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
var node = null
|
||||
var expr = null
|
||||
var k = tok.kind
|
||||
var operand = null
|
||||
var one_node = null
|
||||
var binop_node = null
|
||||
var op = null
|
||||
if (k == "!") {
|
||||
advance()
|
||||
node = ast_node("!", start)
|
||||
@@ -591,19 +614,22 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
ast_node_end(node)
|
||||
return node
|
||||
}
|
||||
if (k == "++") {
|
||||
if (k == "++" || k == "--") {
|
||||
advance()
|
||||
node = ast_node("++", start)
|
||||
node.expression = parse_unary()
|
||||
node.postfix = false
|
||||
ast_node_end(node)
|
||||
return node
|
||||
}
|
||||
if (k == "--") {
|
||||
advance()
|
||||
node = ast_node("--", start)
|
||||
node.expression = parse_unary()
|
||||
node.postfix = false
|
||||
operand = parse_unary()
|
||||
one_node = ast_node("number", start)
|
||||
one_node.number = 1
|
||||
one_node.value = "1"
|
||||
ast_node_end(one_node)
|
||||
op = "+"
|
||||
if (k == "--") op = "-"
|
||||
binop_node = ast_node(op, start)
|
||||
binop_node.left = operand
|
||||
binop_node.right = one_node
|
||||
ast_node_end(binop_node)
|
||||
node = ast_node("assign", start)
|
||||
node.left = operand
|
||||
node.right = binop_node
|
||||
ast_node_end(node)
|
||||
return node
|
||||
}
|
||||
@@ -666,10 +692,10 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
if (tok.kind == "?") {
|
||||
start = tok
|
||||
advance()
|
||||
then_expr = parse_expr()
|
||||
then_expr = parse_assign_expr()
|
||||
if (tok.kind == ":") advance()
|
||||
else parse_error(tok, "expected ':' in ternary expression")
|
||||
else_expr = parse_expr()
|
||||
else_expr = parse_assign_expr()
|
||||
node = ast_node("then", start)
|
||||
node.expression = cond
|
||||
node.then = then_expr
|
||||
@@ -688,6 +714,13 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
"&&=": "&&=", "||=": "||="
|
||||
}
|
||||
|
||||
var compound_binop = {
|
||||
"+=": "+", "-=": "-", "*=": "*", "/=": "/", "%=": "%",
|
||||
"<<=": "<<", ">>=": ">>", ">>>=": ">>>",
|
||||
"&=": "&", "^=": "^", "|=": "|", "**=": "**",
|
||||
"&&=": "&&", "||=": "||"
|
||||
}
|
||||
|
||||
parse_assign = function(unused) {
|
||||
var left_node = parse_ternary()
|
||||
var start = null
|
||||
@@ -696,6 +729,8 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
var node = null
|
||||
var left_kind = null
|
||||
var right_kind = null
|
||||
var binop = null
|
||||
var binop_node = null
|
||||
if (left_node == null) return null
|
||||
start = tok
|
||||
kind = assign_ops[tok.kind]
|
||||
@@ -708,12 +743,23 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
|
||||
advance()
|
||||
right_node = parse_assign()
|
||||
node = ast_node(kind, start)
|
||||
node.left = left_node
|
||||
node.right = right_node
|
||||
|
||||
if (left_node.kind == "[" && left_node.right == null) node.push = true
|
||||
if (right_node != null && right_node.kind == "[" && right_node.right == null) node.pop = true
|
||||
binop = compound_binop[kind]
|
||||
if (binop != null) {
|
||||
binop_node = ast_node(binop, start)
|
||||
binop_node.left = left_node
|
||||
binop_node.right = right_node
|
||||
ast_node_end(binop_node)
|
||||
node = ast_node("assign", start)
|
||||
node.left = left_node
|
||||
node.right = binop_node
|
||||
} else {
|
||||
node = ast_node(kind, start)
|
||||
node.left = left_node
|
||||
node.right = right_node
|
||||
if (left_node.kind == "[" && left_node.right == null) node.push = true
|
||||
if (right_node != null && right_node.kind == "[" && right_node.right == null) node.pop = true
|
||||
}
|
||||
|
||||
ast_node_end(node)
|
||||
return node
|
||||
@@ -791,9 +837,10 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
var param = null
|
||||
var prev_names = null
|
||||
var pname = null
|
||||
var dup = false
|
||||
var j = 0
|
||||
var old_dis = 0
|
||||
var old_cd = _control_depth
|
||||
var old_ct = _control_type
|
||||
var old_eb = _expecting_body
|
||||
|
||||
if (in_disruption) {
|
||||
parse_error(tok, "cannot define function inside disruption clause")
|
||||
@@ -815,13 +862,7 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
param = ast_node("name", tok)
|
||||
param.name = tok.value
|
||||
pname = tok.value
|
||||
dup = false
|
||||
j = 0
|
||||
while (j < length(prev_names)) {
|
||||
if (prev_names[j] == pname) { dup = true; break }
|
||||
j = j + 1
|
||||
}
|
||||
if (dup) parse_error(tok, "duplicate parameter name '" + pname + "'")
|
||||
if (find(prev_names, pname) != null) parse_error(tok, "duplicate parameter name '" + pname + "'")
|
||||
push(prev_names, pname)
|
||||
advance()
|
||||
ast_node_end(param)
|
||||
@@ -846,6 +887,9 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
if (length(params) > 4) parse_error(tok, "functions cannot have more than 4 parameters")
|
||||
node.arity = length(params)
|
||||
|
||||
_control_depth = 0
|
||||
_control_type = null
|
||||
_expecting_body = false
|
||||
if (tok.kind == "{") {
|
||||
advance()
|
||||
stmts = parse_block_statements()
|
||||
@@ -871,8 +915,11 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
}
|
||||
}
|
||||
|
||||
node.function_nr = function_nr
|
||||
function_nr = function_nr + 1
|
||||
_control_depth = old_cd
|
||||
_control_type = old_ct
|
||||
_expecting_body = old_eb
|
||||
node.function_nr = fn_counter
|
||||
fn_counter = fn_counter + 1
|
||||
ast_node_end(node)
|
||||
return node
|
||||
}
|
||||
@@ -887,8 +934,9 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
var expr = null
|
||||
var prev_names = null
|
||||
var pname = null
|
||||
var dup = false
|
||||
var j = 0
|
||||
var old_cd = _control_depth
|
||||
var old_ct = _control_type
|
||||
var old_eb = _expecting_body
|
||||
node.arrow = true
|
||||
|
||||
if (in_disruption) {
|
||||
@@ -911,13 +959,7 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
param = ast_node("name", tok)
|
||||
param.name = tok.value
|
||||
pname = tok.value
|
||||
dup = false
|
||||
j = 0
|
||||
while (j < length(prev_names)) {
|
||||
if (prev_names[j] == pname) { dup = true; break }
|
||||
j = j + 1
|
||||
}
|
||||
if (dup) parse_error(tok, "duplicate parameter name '" + pname + "'")
|
||||
if (find(prev_names, pname) != null) parse_error(tok, "duplicate parameter name '" + pname + "'")
|
||||
push(prev_names, pname)
|
||||
advance()
|
||||
ast_node_end(param)
|
||||
@@ -945,6 +987,9 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
advance()
|
||||
}
|
||||
|
||||
_control_depth = 0
|
||||
_control_type = null
|
||||
_expecting_body = false
|
||||
if (tok.kind == "{") {
|
||||
advance()
|
||||
stmts = parse_block_statements()
|
||||
@@ -960,8 +1005,11 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
node.statements = stmts
|
||||
}
|
||||
|
||||
node.function_nr = function_nr
|
||||
function_nr = function_nr + 1
|
||||
_control_depth = old_cd
|
||||
_control_type = old_ct
|
||||
_expecting_body = old_eb
|
||||
node.function_nr = fn_counter
|
||||
fn_counter = fn_counter + 1
|
||||
ast_node_end(node)
|
||||
return node
|
||||
}
|
||||
@@ -991,8 +1039,25 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
var elif = null
|
||||
var p1_tok = null
|
||||
var labeled_stmt = null
|
||||
var depth = 0
|
||||
var saved_ct = null
|
||||
var saved_cd = 0
|
||||
var saved_eb = false
|
||||
|
||||
if (k == "{") {
|
||||
if (!_expecting_body) {
|
||||
parse_error(start, "bare block '{ ... }' is not a valid statement; use a function, if, while, or for instead")
|
||||
advance()
|
||||
depth = 1
|
||||
while (tok.kind != "eof" && depth > 0) {
|
||||
if (tok.kind == "{") depth = depth + 1
|
||||
else if (tok.kind == "}") depth = depth - 1
|
||||
if (depth > 0) advance()
|
||||
}
|
||||
if (tok.kind == "}") advance()
|
||||
return null
|
||||
}
|
||||
_expecting_body = false
|
||||
node = ast_node("block", start)
|
||||
advance()
|
||||
stmts = parse_block_statements()
|
||||
@@ -1003,6 +1068,9 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
}
|
||||
|
||||
if (k == "var" || k == "def") {
|
||||
if (_control_depth > 0) {
|
||||
parse_error(start, "'" + k + "' declarations must appear at function body level, not inside '" + _control_type + "'; move this declaration before the '" + _control_type + "' statement")
|
||||
}
|
||||
kind_name = k
|
||||
is_def = (k == "def")
|
||||
advance()
|
||||
@@ -1029,6 +1097,8 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
}
|
||||
} else if (is_def) {
|
||||
parse_error(start, "missing initializer for constant '" + var_name + "'")
|
||||
} else {
|
||||
parse_error(start, "'var' declarations must be initialized; use 'var " + var_name + " = null' if no value is needed")
|
||||
}
|
||||
ast_node_end(node)
|
||||
push(decls, node)
|
||||
@@ -1057,6 +1127,11 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
else parse_error(tok, "expected ')' after if condition")
|
||||
then_stmts = []
|
||||
node.then = then_stmts
|
||||
saved_ct = _control_type
|
||||
saved_cd = _control_depth
|
||||
_control_type = "if"
|
||||
_control_depth = _control_depth + 1
|
||||
_expecting_body = true
|
||||
body = parse_statement()
|
||||
if (body != null) push(then_stmts, body)
|
||||
else_ifs = []
|
||||
@@ -1064,15 +1139,22 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
if (tok.kind == "else") {
|
||||
advance()
|
||||
if (tok.kind == "if") {
|
||||
_control_depth = saved_cd
|
||||
_control_type = saved_ct
|
||||
elif = parse_statement()
|
||||
if (elif != null) push(else_ifs, elif)
|
||||
ast_node_end(node)
|
||||
return node
|
||||
} else {
|
||||
else_stmts = []
|
||||
node.else = else_stmts
|
||||
_expecting_body = true
|
||||
body = parse_statement()
|
||||
if (body != null) push(else_stmts, body)
|
||||
}
|
||||
}
|
||||
_control_depth = saved_cd
|
||||
_control_type = saved_ct
|
||||
ast_node_end(node)
|
||||
return node
|
||||
}
|
||||
@@ -1088,8 +1170,15 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
else parse_error(tok, "expected ')' after while condition")
|
||||
stmts = []
|
||||
node.statements = stmts
|
||||
saved_ct = _control_type
|
||||
saved_cd = _control_depth
|
||||
_control_type = "while"
|
||||
_control_depth = _control_depth + 1
|
||||
_expecting_body = true
|
||||
body = parse_statement()
|
||||
if (body != null) push(stmts, body)
|
||||
_control_depth = saved_cd
|
||||
_control_type = saved_ct
|
||||
ast_node_end(node)
|
||||
return node
|
||||
}
|
||||
@@ -1099,8 +1188,15 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
advance()
|
||||
stmts = []
|
||||
node.statements = stmts
|
||||
saved_ct = _control_type
|
||||
saved_cd = _control_depth
|
||||
_control_type = "do"
|
||||
_control_depth = _control_depth + 1
|
||||
_expecting_body = true
|
||||
body = parse_statement()
|
||||
if (body != null) push(stmts, body)
|
||||
_control_depth = saved_cd
|
||||
_control_type = saved_ct
|
||||
if (tok.kind == "while") advance()
|
||||
else parse_error(tok, "expected 'while' after do body")
|
||||
if (tok.kind == "(") advance()
|
||||
@@ -1121,6 +1217,7 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
else parse_error(tok, "expected '(' after for")
|
||||
if (tok.kind != ";") {
|
||||
if (tok.kind == "var" || tok.kind == "def") {
|
||||
parse_error(tok, "'" + tok.kind + "' declarations cannot appear in the for initializer; declare variables before the for loop")
|
||||
init = parse_statement()
|
||||
node.init = init
|
||||
} else {
|
||||
@@ -1144,8 +1241,15 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
else parse_error(tok, "expected ')' after for clauses")
|
||||
stmts = []
|
||||
node.statements = stmts
|
||||
saved_ct = _control_type
|
||||
saved_cd = _control_depth
|
||||
_control_type = "for"
|
||||
_control_depth = _control_depth + 1
|
||||
_expecting_body = true
|
||||
body = parse_statement()
|
||||
if (body != null) push(stmts, body)
|
||||
_control_depth = saved_cd
|
||||
_control_type = saved_ct
|
||||
ast_node_end(node)
|
||||
return node
|
||||
}
|
||||
@@ -1216,6 +1320,36 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
}
|
||||
|
||||
if (k == "name") {
|
||||
if (tok.value == "try" || tok.value == "catch" || tok.value == "finally") {
|
||||
parse_error(start, "'" + tok.value + "' is not supported; use disrupt/disruption instead")
|
||||
sync_to_statement()
|
||||
return null
|
||||
}
|
||||
if (tok.value == "throw") {
|
||||
parse_error(start, "'throw' is not supported; use disrupt instead")
|
||||
sync_to_statement()
|
||||
return null
|
||||
}
|
||||
if (tok.value == "class") {
|
||||
parse_error(start, "'class' is not supported; use meme()/proto() instead")
|
||||
sync_to_statement()
|
||||
return null
|
||||
}
|
||||
if (tok.value == "new") {
|
||||
parse_error(start, "'new' is not supported; use meme()/proto() instead")
|
||||
sync_to_statement()
|
||||
return null
|
||||
}
|
||||
if (tok.value == "switch" || tok.value == "case") {
|
||||
parse_error(start, "'" + tok.value + "' is not supported; use if/else instead")
|
||||
sync_to_statement()
|
||||
return null
|
||||
}
|
||||
if (tok.value == "let" || tok.value == "const") {
|
||||
parse_error(start, "'" + tok.value + "' is not supported; use var/def instead")
|
||||
sync_to_statement()
|
||||
return null
|
||||
}
|
||||
p1_tok = peek_ahead(1)
|
||||
if (p1_tok.kind == ":") {
|
||||
node = ast_node("label", start)
|
||||
@@ -1291,7 +1425,9 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
vars: [],
|
||||
in_loop: opts.in_loop == true,
|
||||
function_nr: fn_nr,
|
||||
is_function_scope: opts.is_func == true
|
||||
is_function_scope: opts.is_func == true,
|
||||
func_node: null,
|
||||
has_inner_func: false
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1344,13 +1480,17 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
return false
|
||||
}
|
||||
|
||||
var sem_add_intrinsic = function(name) {
|
||||
var i = 0
|
||||
while (i < length(intrinsics)) {
|
||||
if (intrinsics[i] == name) return null
|
||||
i = i + 1
|
||||
var sem_find_func_scope = function(scope) {
|
||||
var s = scope
|
||||
while (s != null) {
|
||||
if (s.is_function_scope) return s
|
||||
s = s.parent
|
||||
}
|
||||
push(intrinsics, name)
|
||||
return null
|
||||
}
|
||||
|
||||
var sem_add_intrinsic = function(name) {
|
||||
if (find(intrinsics, name) == null) push(intrinsics, name)
|
||||
}
|
||||
|
||||
var functino_names = {
|
||||
@@ -1364,12 +1504,31 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
return functino_names[name] == true
|
||||
}
|
||||
|
||||
var sem_propagate_vars = function(parent, child) {
|
||||
var i = 0
|
||||
while (i < length(child.vars)) {
|
||||
push(parent.vars, child.vars[i])
|
||||
i = i + 1
|
||||
var derive_type_tag = function(expr) {
|
||||
if (expr == null) return null
|
||||
var k = expr.kind
|
||||
if (k == "array") return "array"
|
||||
if (k == "record") return "record"
|
||||
if (k == "function") return "function"
|
||||
if (k == "text" || k == "text literal") return "text"
|
||||
if (k == "number") {
|
||||
if (is_integer(expr.number)) return "integer"
|
||||
return "number"
|
||||
}
|
||||
if (k == "true" || k == "false") return "logical"
|
||||
if (k == "null") return "null"
|
||||
return null
|
||||
}
|
||||
|
||||
var _assign_kinds = {
|
||||
assign: true, "+=": true, "-=": true, "*=": true, "/=": true, "%=": true,
|
||||
"<<=": true, ">>=": true, ">>>=": true,
|
||||
"&=": true, "^=": true, "|=": true, "**=": true,
|
||||
"&&=": true, "||=": true
|
||||
}
|
||||
|
||||
var sem_propagate_vars = function(parent, child) {
|
||||
parent.vars = array(parent.vars, child.vars)
|
||||
}
|
||||
|
||||
var sem_build_scope_record = function(scope) {
|
||||
@@ -1385,7 +1544,8 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
function_nr: v.function_nr,
|
||||
nr_uses: v.nr_uses,
|
||||
closure: v.closure == 1,
|
||||
level: 0
|
||||
level: 0,
|
||||
type_tag: v.type_tag
|
||||
}
|
||||
slots = slots + 1
|
||||
if (v.closure) close_slots = close_slots + 1
|
||||
@@ -1467,6 +1627,46 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
if (kind == "[" && left_node.right != null) {
|
||||
sem_check_expr(scope, left_node.right)
|
||||
}
|
||||
// Type error detection for known-type constant objects
|
||||
if (obj_expr != null && obj_expr.kind == "name" && obj_expr.name != null) {
|
||||
v = sem_find_var(scope, obj_expr.name)
|
||||
if (v != null && v.is_const && v.type_tag != null) {
|
||||
if (kind == ".") {
|
||||
if (v.type_tag == "array") {
|
||||
sem_error(left_node, "cannot set property on array '" + obj_expr.name + "'")
|
||||
}
|
||||
} else if (kind == "[") {
|
||||
if (left_node.right == null) {
|
||||
// Push: a[] = val
|
||||
if (v.type_tag != "array") {
|
||||
sem_error(left_node, "push only works on arrays, not " + v.type_tag + " '" + obj_expr.name + "'")
|
||||
}
|
||||
} else if (v.type_tag == "array") {
|
||||
if (left_node.right.kind == "text") {
|
||||
sem_error(left_node, "cannot use text key on array '" + obj_expr.name + "'")
|
||||
}
|
||||
} else if (v.type_tag == "record") {
|
||||
if (left_node.right.kind == "number" && is_integer(left_node.right.number)) {
|
||||
sem_error(left_node, "cannot use integer key on record '" + obj_expr.name + "'; use text key")
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (v != null && v.is_const && v.type_tag == null) {
|
||||
// Infer type_tag from usage pattern (def only)
|
||||
if (kind == ".") {
|
||||
v.type_tag = "record"
|
||||
} else if (kind == "[") {
|
||||
if (left_node.right == null) {
|
||||
// Push: a[] = val → array
|
||||
v.type_tag = "array"
|
||||
} else if (left_node.right.kind == "number" && is_integer(left_node.right.number)) {
|
||||
v.type_tag = "array"
|
||||
} else if (left_node.right.kind == "text") {
|
||||
v.type_tag = "record"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1486,11 +1686,9 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
var pname = null
|
||||
var def_val = null
|
||||
var sr = null
|
||||
var enclosing = null
|
||||
|
||||
if (kind == "assign" || kind == "+=" || kind == "-=" || kind == "*=" ||
|
||||
kind == "/=" || kind == "%=" || kind == "<<=" || kind == ">>=" ||
|
||||
kind == ">>>=" || kind == "&=" || kind == "^=" || kind == "|=" ||
|
||||
kind == "**=" || kind == "&&=" || kind == "||=") {
|
||||
if (_assign_kinds[kind] == true) {
|
||||
sem_check_assign_target(scope, expr.left)
|
||||
sem_check_expr(scope, expr.right)
|
||||
return null
|
||||
@@ -1515,6 +1713,21 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
operand.level = -1
|
||||
}
|
||||
}
|
||||
} else if (operand != null) {
|
||||
sem_check_assign_target(scope, operand)
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
if (kind == "[") {
|
||||
sem_check_expr(scope, expr.left)
|
||||
sem_check_expr(scope, expr.right)
|
||||
if (expr.right != null) {
|
||||
if (expr.right.kind == "number" && is_integer(expr.right.number)) {
|
||||
expr.access_kind = "index"
|
||||
} else if (expr.right.kind == "text") {
|
||||
expr.access_kind = "field"
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
@@ -1525,7 +1738,7 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
kind == "&&" || kind == "||" || kind == "&" ||
|
||||
kind == "|" || kind == "^" || kind == "<<" || kind == ">>" ||
|
||||
kind == ">>>" || kind == "**" || kind == "in" ||
|
||||
kind == "." || kind == "[") {
|
||||
kind == ".") {
|
||||
sem_check_expr(scope, expr.left)
|
||||
sem_check_expr(scope, expr.right)
|
||||
return null
|
||||
@@ -1575,9 +1788,12 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
}
|
||||
|
||||
if (kind == "function") {
|
||||
enclosing = sem_find_func_scope(scope)
|
||||
if (enclosing != null) enclosing.has_inner_func = true
|
||||
fn_nr_val = expr.function_nr
|
||||
if (fn_nr_val == null) fn_nr_val = scope.function_nr
|
||||
fn_scope = make_scope(scope, fn_nr_val, {is_func: true})
|
||||
fn_scope.func_node = expr
|
||||
expr.outer = scope.function_nr
|
||||
i = 0
|
||||
while (i < length(expr.list)) {
|
||||
@@ -1634,6 +1850,7 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
if (r.level > 0) r.v.closure = 1
|
||||
} else {
|
||||
expr.level = -1
|
||||
expr.intrinsic = true
|
||||
sem_add_intrinsic(name)
|
||||
}
|
||||
}
|
||||
@@ -1657,6 +1874,9 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
var pname = null
|
||||
var def_val = null
|
||||
var sr = null
|
||||
var enclosing = null
|
||||
var func_scope = null
|
||||
var tt = null
|
||||
|
||||
if (kind == "var_list") {
|
||||
i = 0
|
||||
@@ -1696,6 +1916,13 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
}
|
||||
}
|
||||
sem_check_expr(scope, stmt.right)
|
||||
if (name != null) {
|
||||
tt = derive_type_tag(stmt.right)
|
||||
if (tt != null && tt != "null") {
|
||||
existing = sem_find_var(scope, name)
|
||||
if (existing != null) existing.type_tag = tt
|
||||
}
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
@@ -1771,8 +1998,30 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
return null
|
||||
}
|
||||
|
||||
if (kind == "return" || kind == "go") {
|
||||
if (kind == "go") {
|
||||
sem_check_expr(scope, stmt.expression)
|
||||
if (stmt.expression == null || stmt.expression.kind != "(") {
|
||||
sem_error(stmt, "'go' must be followed by a function call")
|
||||
} else {
|
||||
func_scope = sem_find_func_scope(scope)
|
||||
if (func_scope != null && func_scope.func_node != null) {
|
||||
if (func_scope.func_node.disruption != null) {
|
||||
sem_error(stmt, "cannot use 'go' in a function with a disruption clause")
|
||||
}
|
||||
if (func_scope.has_inner_func) {
|
||||
sem_error(stmt, "cannot use 'go' in a function that defines inner functions")
|
||||
}
|
||||
}
|
||||
stmt.tail = true
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
if (kind == "return") {
|
||||
sem_check_expr(scope, stmt.expression)
|
||||
if (stmt.expression != null && stmt.expression.kind == "(") {
|
||||
stmt.tail = true
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
@@ -1809,11 +2058,14 @@ var parse = function(tokens, src, filename, tokenizer) {
|
||||
}
|
||||
|
||||
if (kind == "function") {
|
||||
enclosing = sem_find_func_scope(scope)
|
||||
if (enclosing != null) enclosing.has_inner_func = true
|
||||
name = stmt.name
|
||||
if (name != null) sem_add_var(scope, name, {make: "function", fn_nr: scope.function_nr})
|
||||
fn_nr_val = stmt.function_nr
|
||||
if (fn_nr_val == null) fn_nr_val = scope.function_nr
|
||||
fn_scope = make_scope(scope, fn_nr_val, {is_func: true})
|
||||
fn_scope.func_node = stmt
|
||||
stmt.outer = scope.function_nr
|
||||
i = 0
|
||||
while (i < length(stmt.list)) {
|
||||
|
||||
28245
parse.cm.mcode
Normal file
28245
parse.cm.mcode
Normal file
File diff suppressed because it is too large
Load Diff
BIN
parse.mach
BIN
parse.mach
Binary file not shown.
@@ -128,7 +128,7 @@ struct listfiles_ctx {
|
||||
|
||||
static void listfiles_cb(const char *path, void *userdata) {
|
||||
struct listfiles_ctx *ctx = (struct listfiles_ctx*)userdata;
|
||||
JS_SetPropertyUint32(ctx->js, ctx->array, ctx->index++, JS_NewString(ctx->js, path));
|
||||
JS_SetPropertyNumber(ctx->js, ctx->array, ctx->index++, JS_NewString(ctx->js, path));
|
||||
}
|
||||
|
||||
JSC_SCALL(file_listfiles,
|
||||
|
||||
@@ -79,7 +79,7 @@ static void json_decode_array_value(json_decoder *decoder, int pos, json_value v
|
||||
case kJSONString: jsval = JS_NewString(ctx->js, value.data.stringval); break;
|
||||
default: jsval = JS_NULL; break;
|
||||
}
|
||||
JS_SetPropertyUint32(ctx->js, container, pos, jsval);
|
||||
JS_SetPropertyNumber(ctx->js, container, pos, jsval);
|
||||
}
|
||||
|
||||
// --- JSON Encoder Context ---
|
||||
@@ -128,7 +128,7 @@ static void encode_js_array(json_encoder *enc, JSContext *js, JSValue arr) {
|
||||
JS_FreeValue(js, lenVal);
|
||||
for (int i = 0; i < len; i++) {
|
||||
enc->addArrayMember(enc);
|
||||
JSValue val = JS_GetPropertyUint32(js, arr, i);
|
||||
JSValue val = JS_GetPropertyNumber(js, arr, i);
|
||||
encode_js_value(enc, js, val);
|
||||
JS_FreeValue(js, val);
|
||||
}
|
||||
|
||||
@@ -57,7 +57,7 @@ static void boards_list_cb(PDBoardsList *boards, const char *errorMessage) {
|
||||
boards->boards[i].boardID ? JS_NewString(g_scoreboard_js, boards->boards[i].boardID) : JS_NULL);
|
||||
JS_SetPropertyStr(g_scoreboard_js, board, "name",
|
||||
boards->boards[i].name ? JS_NewString(g_scoreboard_js, boards->boards[i].name) : JS_NULL);
|
||||
JS_SetPropertyUint32(g_scoreboard_js, arr, i, board);
|
||||
JS_SetPropertyNumber(g_scoreboard_js, arr, i, board);
|
||||
}
|
||||
args[0] = arr;
|
||||
} else {
|
||||
@@ -83,7 +83,7 @@ static void scores_cb(PDScoresList *scores, const char *errorMessage) {
|
||||
JS_SetPropertyStr(g_scoreboard_js, obj, "limit", JS_NewInt32(g_scoreboard_js, scores->limit));
|
||||
JSValue arr = JS_NewArray(g_scoreboard_js);
|
||||
for (unsigned int i = 0; i < scores->count; i++) {
|
||||
JS_SetPropertyUint32(g_scoreboard_js, arr, i, score_to_js(g_scoreboard_js, &scores->scores[i]));
|
||||
JS_SetPropertyNumber(g_scoreboard_js, arr, i, score_to_js(g_scoreboard_js, &scores->scores[i]));
|
||||
}
|
||||
JS_SetPropertyStr(g_scoreboard_js, obj, "scores", arr);
|
||||
args[0] = obj;
|
||||
|
||||
206
pronto.cm
206
pronto.cm
@@ -4,9 +4,9 @@
|
||||
// Time is in seconds.
|
||||
|
||||
function make_reason(factory, excuse, evidence) {
|
||||
def reason = Error(`pronto.${factory}${excuse ? ': ' + excuse : ''}`)
|
||||
reason.evidence = evidence
|
||||
return reason
|
||||
var msg = 'pronto.' + factory
|
||||
if (excuse) msg = msg + ': ' + excuse
|
||||
return { message: msg, evidence: evidence }
|
||||
}
|
||||
|
||||
function is_requestor(fn) {
|
||||
@@ -14,21 +14,27 @@ function is_requestor(fn) {
|
||||
}
|
||||
|
||||
function check_requestors(list, factory) {
|
||||
if (!is_array(list) || some(list, r => !is_requestor(r)))
|
||||
throw make_reason(factory, 'Bad requestor array.', list)
|
||||
if (!is_array(list) || some(list, r => !is_requestor(r))) {
|
||||
print(make_reason(factory, 'Bad requestor array.', list).message + '\n')
|
||||
disrupt
|
||||
}
|
||||
}
|
||||
|
||||
function check_callback(cb, factory) {
|
||||
if (!is_function(cb) || length(cb) != 2)
|
||||
throw make_reason(factory, 'Not a callback.', cb)
|
||||
if (!is_function(cb) || length(cb) != 2) {
|
||||
print(make_reason(factory, 'Not a callback.', cb).message + '\n')
|
||||
disrupt
|
||||
}
|
||||
}
|
||||
|
||||
// fallback(requestor_array)
|
||||
// Tries each requestor in order until one succeeds.
|
||||
function fallback(requestor_array) {
|
||||
def factory = 'fallback'
|
||||
if (!is_array(requestor_array) || length(requestor_array) == 0)
|
||||
throw make_reason(factory, 'Empty requestor array.')
|
||||
if (!is_array(requestor_array) || length(requestor_array) == 0) {
|
||||
print(make_reason(factory, 'Empty requestor array.').message + '\n')
|
||||
disrupt
|
||||
}
|
||||
check_requestors(requestor_array, factory)
|
||||
|
||||
return function fallback_requestor(callback, value) {
|
||||
@@ -38,9 +44,11 @@ function fallback(requestor_array) {
|
||||
var cancelled = false
|
||||
|
||||
function cancel(reason) {
|
||||
var _c = null
|
||||
cancelled = true
|
||||
if (current_cancel) {
|
||||
try { current_cancel(reason) } catch (_) {}
|
||||
_c = function() { current_cancel(reason) } disruption {}
|
||||
_c()
|
||||
current_cancel = null
|
||||
}
|
||||
}
|
||||
@@ -53,9 +61,9 @@ function fallback(requestor_array) {
|
||||
}
|
||||
|
||||
def requestor = requestor_array[index]
|
||||
index += 1
|
||||
index = index + 1
|
||||
|
||||
try {
|
||||
var _run = function() {
|
||||
current_cancel = requestor(function(val, reason) {
|
||||
if (cancelled) return
|
||||
current_cancel = null
|
||||
@@ -65,9 +73,10 @@ function fallback(requestor_array) {
|
||||
try_next()
|
||||
}
|
||||
}, value)
|
||||
} catch (ex) {
|
||||
} disruption {
|
||||
try_next()
|
||||
}
|
||||
_run()
|
||||
}
|
||||
|
||||
try_next()
|
||||
@@ -79,25 +88,32 @@ function fallback(requestor_array) {
|
||||
// Runs requestors in parallel, collecting all results.
|
||||
function parallel(requestor_array, throttle, need) {
|
||||
def factory = 'parallel'
|
||||
if (!is_array(requestor_array))
|
||||
throw make_reason(factory, 'Not an array.', requestor_array)
|
||||
if (!is_array(requestor_array)) {
|
||||
print(make_reason(factory, 'Not an array.', requestor_array).message + '\n')
|
||||
disrupt
|
||||
}
|
||||
check_requestors(requestor_array, factory)
|
||||
|
||||
def length = length(requestor_array)
|
||||
if (length == 0)
|
||||
def len = length(requestor_array)
|
||||
if (len == 0)
|
||||
return function(callback, value) { callback([]) }
|
||||
|
||||
if (need == null) need = length
|
||||
if (!is_number(need) || need < 0 || need > length)
|
||||
throw make_reason(factory, 'Bad need.', need)
|
||||
var _need = need
|
||||
if (_need == null) _need = len
|
||||
if (!is_number(_need) || _need < 0 || _need > len) {
|
||||
print(make_reason(factory, 'Bad need.', _need).message + '\n')
|
||||
disrupt
|
||||
}
|
||||
|
||||
if (throttle != null && (!is_number(throttle) || throttle < 1))
|
||||
throw make_reason(factory, 'Bad throttle.', throttle)
|
||||
if (throttle != null && (!is_number(throttle) || throttle < 1)) {
|
||||
print(make_reason(factory, 'Bad throttle.', throttle).message + '\n')
|
||||
disrupt
|
||||
}
|
||||
|
||||
return function parallel_requestor(callback, value) {
|
||||
check_callback(callback, factory)
|
||||
def results = array(length)
|
||||
def cancel_list = array(length)
|
||||
def results = array(len)
|
||||
def cancel_list = array(len)
|
||||
var next_index = 0
|
||||
var successes = 0
|
||||
var failures = 0
|
||||
@@ -107,33 +123,34 @@ function parallel(requestor_array, throttle, need) {
|
||||
if (finished) return
|
||||
finished = true
|
||||
arrfor(cancel_list, c => {
|
||||
try { if (is_function(c)) c(reason) } catch (_) {}
|
||||
var _c = function() { if (is_function(c)) c(reason) } disruption {}
|
||||
_c()
|
||||
})
|
||||
}
|
||||
|
||||
function start_one() {
|
||||
if (finished || next_index >= length) return
|
||||
if (finished || next_index >= len) return
|
||||
def idx = next_index
|
||||
next_index += 1
|
||||
next_index = next_index + 1
|
||||
def requestor = requestor_array[idx]
|
||||
|
||||
try {
|
||||
var _run = function() {
|
||||
cancel_list[idx] = requestor(function(val, reason) {
|
||||
if (finished) return
|
||||
cancel_list[idx] = null
|
||||
|
||||
if (val != null) {
|
||||
results[idx] = val
|
||||
successes += 1
|
||||
if (successes >= need) {
|
||||
successes = successes + 1
|
||||
if (successes >= _need) {
|
||||
finished = true
|
||||
cancel(make_reason(factory, 'Finished.'))
|
||||
callback(results)
|
||||
return
|
||||
}
|
||||
} else {
|
||||
failures += 1
|
||||
if (failures > length - need) {
|
||||
failures = failures + 1
|
||||
if (failures > len - _need) {
|
||||
cancel(reason)
|
||||
callback(null, reason || make_reason(factory, 'Too many failures.'))
|
||||
return
|
||||
@@ -142,20 +159,21 @@ function parallel(requestor_array, throttle, need) {
|
||||
|
||||
start_one()
|
||||
}, value)
|
||||
} catch (ex) {
|
||||
failures += 1
|
||||
if (failures > length - need) {
|
||||
cancel(ex)
|
||||
callback(null, ex)
|
||||
} disruption {
|
||||
failures = failures + 1
|
||||
if (failures > len - _need) {
|
||||
cancel(make_reason(factory, 'Requestor threw.'))
|
||||
callback(null, make_reason(factory, 'Requestor threw.'))
|
||||
return
|
||||
}
|
||||
start_one()
|
||||
}
|
||||
_run()
|
||||
}
|
||||
|
||||
|
||||
def concurrent = throttle ? min(throttle, length) : length
|
||||
for (var i = 0; i < concurrent; i++) start_one()
|
||||
def concurrent = throttle ? min(throttle, len) : len
|
||||
var i = 0
|
||||
while (i < concurrent) { start_one(); i = i + 1 }
|
||||
|
||||
return cancel
|
||||
}
|
||||
@@ -165,22 +183,29 @@ function parallel(requestor_array, throttle, need) {
|
||||
// Runs requestors in parallel, returns first success(es).
|
||||
function race(requestor_array, throttle, need) {
|
||||
def factory = 'race'
|
||||
if (!is_array(requestor_array) || length(requestor_array) == 0)
|
||||
throw make_reason(factory, 'Empty requestor array.')
|
||||
if (!is_array(requestor_array) || length(requestor_array) == 0) {
|
||||
print(make_reason(factory, 'Empty requestor array.').message + '\n')
|
||||
disrupt
|
||||
}
|
||||
check_requestors(requestor_array, factory)
|
||||
|
||||
def length = length(requestor_array)
|
||||
if (need == null) need = 1
|
||||
if (!is_number(need) || need < 1 || need > length)
|
||||
throw make_reason(factory, 'Bad need.', need)
|
||||
def len = length(requestor_array)
|
||||
var _need = need
|
||||
if (_need == null) _need = 1
|
||||
if (!is_number(_need) || _need < 1 || _need > len) {
|
||||
print(make_reason(factory, 'Bad need.', _need).message + '\n')
|
||||
disrupt
|
||||
}
|
||||
|
||||
if (throttle != null && (!is_number(throttle) || throttle < 1))
|
||||
throw make_reason(factory, 'Bad throttle.', throttle)
|
||||
if (throttle != null && (!is_number(throttle) || throttle < 1)) {
|
||||
print(make_reason(factory, 'Bad throttle.', throttle).message + '\n')
|
||||
disrupt
|
||||
}
|
||||
|
||||
return function race_requestor(callback, value) {
|
||||
check_callback(callback, factory)
|
||||
def results = array(length)
|
||||
def cancel_list = array(length)
|
||||
def results = array(len)
|
||||
def cancel_list = array(len)
|
||||
var next_index = 0
|
||||
var successes = 0
|
||||
var failures = 0
|
||||
@@ -190,27 +215,28 @@ function race(requestor_array, throttle, need) {
|
||||
if (finished) return
|
||||
finished = true
|
||||
arrfor(cancel_list, c => {
|
||||
try { if (is_function(c)) c(reason) } catch (_) {}
|
||||
var _c = function() { if (is_function(c)) c(reason) } disruption {}
|
||||
_c()
|
||||
})
|
||||
}
|
||||
|
||||
function start_one() {
|
||||
if (finished || next_index >= length) return
|
||||
if (finished || next_index >= len) return
|
||||
def idx = next_index
|
||||
next_index += 1
|
||||
next_index = next_index + 1
|
||||
def requestor = requestor_array[idx]
|
||||
|
||||
try {
|
||||
var _run = function() {
|
||||
cancel_list[idx] = requestor(function(val, reason) {
|
||||
if (finished) return
|
||||
cancel_list[idx] = null
|
||||
|
||||
if (val != null) {
|
||||
results[idx] = val
|
||||
successes += 1
|
||||
if (successes >= need) {
|
||||
successes = successes + 1
|
||||
if (successes >= _need) {
|
||||
cancel(make_reason(factory, 'Winner.'))
|
||||
if (need == 1) {
|
||||
if (_need == 1) {
|
||||
callback(val)
|
||||
} else {
|
||||
callback(results)
|
||||
@@ -218,8 +244,8 @@ function race(requestor_array, throttle, need) {
|
||||
return
|
||||
}
|
||||
} else {
|
||||
failures += 1
|
||||
if (failures > length - need) {
|
||||
failures = failures + 1
|
||||
if (failures > len - _need) {
|
||||
cancel(reason)
|
||||
callback(null, reason || make_reason(factory, 'All failed.'))
|
||||
return
|
||||
@@ -228,19 +254,21 @@ function race(requestor_array, throttle, need) {
|
||||
|
||||
start_one()
|
||||
}, value)
|
||||
} catch (ex) {
|
||||
failures += 1
|
||||
if (failures > length - need) {
|
||||
cancel(ex)
|
||||
callback(null, ex)
|
||||
} disruption {
|
||||
failures = failures + 1
|
||||
if (failures > len - _need) {
|
||||
cancel(make_reason(factory, 'Requestor threw.'))
|
||||
callback(null, make_reason(factory, 'Requestor threw.'))
|
||||
return
|
||||
}
|
||||
start_one()
|
||||
}
|
||||
_run()
|
||||
}
|
||||
|
||||
def concurrent = throttle ? min(throttle, length) : length
|
||||
for (var i = 0; i < concurrent; i++) start_one()
|
||||
def concurrent = throttle ? min(throttle, len) : len
|
||||
var i = 0
|
||||
while (i < concurrent) { start_one(); i = i + 1 }
|
||||
|
||||
return cancel
|
||||
}
|
||||
@@ -250,8 +278,10 @@ function race(requestor_array, throttle, need) {
|
||||
// Runs requestors one at a time, passing result to next.
|
||||
function sequence(requestor_array) {
|
||||
def factory = 'sequence'
|
||||
if (!is_array(requestor_array))
|
||||
throw make_reason(factory, 'Not an array.', requestor_array)
|
||||
if (!is_array(requestor_array)) {
|
||||
print(make_reason(factory, 'Not an array.', requestor_array).message + '\n')
|
||||
disrupt
|
||||
}
|
||||
check_requestors(requestor_array, factory)
|
||||
|
||||
if (length(requestor_array) == 0)
|
||||
@@ -264,9 +294,11 @@ function sequence(requestor_array) {
|
||||
var cancelled = false
|
||||
|
||||
function cancel(reason) {
|
||||
var _c = null
|
||||
cancelled = true
|
||||
if (current_cancel) {
|
||||
try { current_cancel(reason) } catch (_) {}
|
||||
_c = function() { current_cancel(reason) } disruption {}
|
||||
_c()
|
||||
current_cancel = null
|
||||
}
|
||||
}
|
||||
@@ -279,9 +311,9 @@ function sequence(requestor_array) {
|
||||
}
|
||||
|
||||
def requestor = requestor_array[index]
|
||||
index += 1
|
||||
index = index + 1
|
||||
|
||||
try {
|
||||
var _run = function() {
|
||||
current_cancel = requestor(function(result, reason) {
|
||||
if (cancelled) return
|
||||
current_cancel = null
|
||||
@@ -291,9 +323,10 @@ function sequence(requestor_array) {
|
||||
run_next(result)
|
||||
}
|
||||
}, val)
|
||||
} catch (ex) {
|
||||
callback(null, ex)
|
||||
} disruption {
|
||||
callback(null, make_reason(factory, 'Requestor threw.'))
|
||||
}
|
||||
_run()
|
||||
}
|
||||
|
||||
run_next(value)
|
||||
@@ -305,26 +338,29 @@ function sequence(requestor_array) {
|
||||
// Converts a unary function into a requestor.
|
||||
function requestorize(unary) {
|
||||
def factory = 'requestorize'
|
||||
if (!is_function(unary))
|
||||
throw make_reason(factory, 'Not a function.', unary)
|
||||
if (!is_function(unary)) {
|
||||
print(make_reason(factory, 'Not a function.', unary).message + '\n')
|
||||
disrupt
|
||||
}
|
||||
|
||||
return function requestorized(callback, value) {
|
||||
check_callback(callback, factory)
|
||||
try {
|
||||
var _run = function() {
|
||||
def result = unary(value)
|
||||
callback(result == null ? true : result)
|
||||
} catch (ex) {
|
||||
callback(null, ex)
|
||||
} disruption {
|
||||
callback(null, make_reason(factory, 'Function threw.'))
|
||||
}
|
||||
_run()
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
fallback,
|
||||
parallel,
|
||||
race,
|
||||
sequence,
|
||||
requestorize,
|
||||
is_requestor,
|
||||
check_callback
|
||||
fallback: fallback,
|
||||
parallel: parallel,
|
||||
race: race,
|
||||
sequence: sequence,
|
||||
requestorize: requestorize,
|
||||
is_requestor: is_requestor,
|
||||
check_callback: check_callback
|
||||
}
|
||||
|
||||
18
qbe.ce
Normal file
18
qbe.ce
Normal file
@@ -0,0 +1,18 @@
|
||||
var fd = use("fd")
|
||||
var json = use("json")
|
||||
var tokenize = use("tokenize")
|
||||
var parse = use("parse")
|
||||
var fold = use("fold")
|
||||
var mcode = use("mcode")
|
||||
var streamline = use("streamline")
|
||||
var qbe_macros = use("qbe")
|
||||
var qbe_emit = use("qbe_emit")
|
||||
var filename = args[0]
|
||||
var src = text(fd.slurp(filename))
|
||||
var result = tokenize(src, filename)
|
||||
var ast = parse(result.tokens, src, filename, tokenize)
|
||||
var folded = fold(ast)
|
||||
var compiled = mcode(folded)
|
||||
var optimized = streamline(compiled)
|
||||
var il = qbe_emit(optimized, qbe_macros)
|
||||
print(il)
|
||||
189
qbe.cm
189
qbe.cm
@@ -13,6 +13,11 @@ def js_true = 35
|
||||
def js_exception = 15
|
||||
def js_empty_text = 27
|
||||
|
||||
// Shared closure vars for functions with >4 params
|
||||
var _qop = null
|
||||
var _qop2 = null
|
||||
var _qflags = null
|
||||
|
||||
def int32_min = -2147483648
|
||||
def int32_max = 2147483647
|
||||
def mantissa_mask = 4503599627370495
|
||||
@@ -398,18 +403,20 @@ var mod = function(p, ctx, a, b) {
|
||||
// ============================================================
|
||||
|
||||
// Helper: generate comparison for a given op string and int comparison QBE op
|
||||
// null_true: whether null==null returns true (eq, le, ge) or false (ne, lt, gt)
|
||||
var cmp = function(p, ctx, a, b, int_cmp_op, float_cmp_op_id, is_eq, is_ne, null_true) {
|
||||
// reads _qflags = {int_cmp_op, float_id, is_eq, is_ne, null_true} from closure
|
||||
var cmp = function(p, ctx, a, b) {
|
||||
var int_cmp_op = _qflags.int_cmp_op
|
||||
var float_cmp_op_id = _qflags.float_id
|
||||
var eq_only = 0
|
||||
if (is_eq || is_ne) {
|
||||
var mismatch_val = js_false
|
||||
var null_val = js_false
|
||||
if (_qflags.is_eq || _qflags.is_ne) {
|
||||
eq_only = 1
|
||||
}
|
||||
var mismatch_val = js_false
|
||||
if (is_ne) {
|
||||
if (_qflags.is_ne) {
|
||||
mismatch_val = js_true
|
||||
}
|
||||
var null_val = js_false
|
||||
if (null_true) {
|
||||
if (_qflags.null_true) {
|
||||
null_val = js_true
|
||||
}
|
||||
return `@${p}.start
|
||||
@@ -485,27 +492,32 @@ var cmp = function(p, ctx, a, b, int_cmp_op, float_cmp_op_id, is_eq, is_ne, null
|
||||
// MACH_EQ=0, NEQ=1, LT=2, LE=3, GT=4, GE=5
|
||||
// null_true: eq, le, ge return true for null==null; ne, lt, gt return false
|
||||
var eq = function(p, ctx, a, b) {
|
||||
return cmp(p, ctx, a, b, "ceqw", 0, true, false, true)
|
||||
_qflags = {int_cmp_op: "ceqw", float_id: 0, is_eq: true, is_ne: false, null_true: true}
|
||||
return cmp(p, ctx, a, b)
|
||||
}
|
||||
|
||||
var ne = function(p, ctx, a, b) {
|
||||
return cmp(p, ctx, a, b, "cnew", 1, false, true, false)
|
||||
_qflags = {int_cmp_op: "cnew", float_id: 1, is_eq: false, is_ne: true, null_true: false}
|
||||
return cmp(p, ctx, a, b)
|
||||
}
|
||||
|
||||
var lt = function(p, ctx, a, b) {
|
||||
return cmp(p, ctx, a, b, "csltw", 2, false, false, false)
|
||||
_qflags = {int_cmp_op: "csltw", float_id: 2, is_eq: false, is_ne: false, null_true: false}
|
||||
return cmp(p, ctx, a, b)
|
||||
}
|
||||
|
||||
var le = function(p, ctx, a, b) {
|
||||
return cmp(p, ctx, a, b, "cslew", 3, false, false, true)
|
||||
_qflags = {int_cmp_op: "cslew", float_id: 3, is_eq: false, is_ne: false, null_true: true}
|
||||
return cmp(p, ctx, a, b)
|
||||
}
|
||||
|
||||
var gt = function(p, ctx, a, b) {
|
||||
return cmp(p, ctx, a, b, "csgtw", 4, false, false, false)
|
||||
_qflags = {int_cmp_op: "csgtw", float_id: 4, is_eq: false, is_ne: false, null_true: false}
|
||||
return cmp(p, ctx, a, b)
|
||||
}
|
||||
|
||||
var ge = function(p, ctx, a, b) {
|
||||
return cmp(p, ctx, a, b, "csgew", 5, false, false, true)
|
||||
_qflags = {int_cmp_op: "csgew", float_id: 5, is_eq: false, is_ne: false, null_true: true}
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
@@ -627,7 +639,9 @@ var bnot = function(p, ctx, v) {
|
||||
// Both operands must be numeric. Int fast path, float -> convert to int32.
|
||||
// ============================================================
|
||||
|
||||
var bitwise_op = function(p, ctx, a, b, qbe_op) {
|
||||
// reads _qop from closure
|
||||
var bitwise_op = function(p, ctx, a, b) {
|
||||
var qbe_op = _qop
|
||||
return `@${p}.start
|
||||
%${p}.at =l and ${a}, 1
|
||||
%${p}.bt =l and ${b}, 1
|
||||
@@ -654,19 +668,24 @@ var bitwise_op = function(p, ctx, a, b, qbe_op) {
|
||||
}
|
||||
|
||||
var band = function(p, ctx, a, b) {
|
||||
return bitwise_op(p, ctx, a, b, "and")
|
||||
_qop = "and"
|
||||
return bitwise_op(p, ctx, a, b)
|
||||
}
|
||||
|
||||
var bor = function(p, ctx, a, b) {
|
||||
return bitwise_op(p, ctx, a, b, "or")
|
||||
_qop = "or"
|
||||
return bitwise_op(p, ctx, a, b)
|
||||
}
|
||||
|
||||
var bxor = function(p, ctx, a, b) {
|
||||
return bitwise_op(p, ctx, a, b, "xor")
|
||||
_qop = "xor"
|
||||
return bitwise_op(p, ctx, a, b)
|
||||
}
|
||||
|
||||
// Shift ops: mask shift amount to 5 bits (& 31)
|
||||
var shift_op = function(p, ctx, a, b, qbe_op) {
|
||||
// reads _qop from closure
|
||||
var shift_op = function(p, ctx, a, b) {
|
||||
var qbe_op = _qop
|
||||
return `@${p}.start
|
||||
%${p}.at =l and ${a}, 1
|
||||
%${p}.bt =l and ${b}, 1
|
||||
@@ -694,15 +713,113 @@ var shift_op = function(p, ctx, a, b, qbe_op) {
|
||||
}
|
||||
|
||||
var shl = function(p, ctx, a, b) {
|
||||
return shift_op(p, ctx, a, b, "shl")
|
||||
_qop = "shl"
|
||||
return shift_op(p, ctx, a, b)
|
||||
}
|
||||
|
||||
var shr = function(p, ctx, a, b) {
|
||||
return shift_op(p, ctx, a, b, "sar")
|
||||
_qop = "sar"
|
||||
return shift_op(p, ctx, a, b)
|
||||
}
|
||||
|
||||
var ushr = function(p, ctx, a, b) {
|
||||
return shift_op(p, ctx, a, b, "shr")
|
||||
_qop = "shr"
|
||||
return shift_op(p, ctx, a, b)
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// Decomposed per-type-path operations
|
||||
// These map directly to the new IR ops emitted by mcode.cm.
|
||||
// ============================================================
|
||||
|
||||
// --- Text concat ---
|
||||
var concat = function(p, ctx, a, b) {
|
||||
return ` %${p} =l call $JS_ConcatString(l ${ctx}, l ${a}, l ${b})
|
||||
`
|
||||
}
|
||||
|
||||
// --- Comparisons (int path) ---
|
||||
var cmp_int = function(p, a, b, qbe_op) {
|
||||
return ` %${p}.ia =l sar ${a}, 1
|
||||
%${p}.ib =l sar ${b}, 1
|
||||
%${p}.iaw =w copy %${p}.ia
|
||||
%${p}.ibw =w copy %${p}.ib
|
||||
%${p}.cr =w ${qbe_op} %${p}.iaw, %${p}.ibw
|
||||
%${p}.crext =l extuw %${p}.cr
|
||||
%${p}.sh =l shl %${p}.crext, 5
|
||||
%${p} =l or %${p}.sh, 3
|
||||
`
|
||||
}
|
||||
|
||||
var eq_int = function(p, ctx, a, b) { return cmp_int(p, a, b, "ceqw") }
|
||||
var ne_int = function(p, ctx, a, b) { return cmp_int(p, a, b, "cnew") }
|
||||
var lt_int = function(p, ctx, a, b) { return cmp_int(p, a, b, "csltw") }
|
||||
var le_int = function(p, ctx, a, b) { return cmp_int(p, a, b, "cslew") }
|
||||
var gt_int = function(p, ctx, a, b) { return cmp_int(p, a, b, "csgtw") }
|
||||
var ge_int = function(p, ctx, a, b) { return cmp_int(p, a, b, "csgew") }
|
||||
|
||||
// --- Comparisons (float path) ---
|
||||
// reads _qop from closure (op_id)
|
||||
var cmp_float = function(p, ctx, a, b) {
|
||||
var op_id = _qop
|
||||
return ` %${p}.fcr =w call $qbe_float_cmp(l ${ctx}, w ${op_id}, l ${a}, l ${b})
|
||||
%${p}.fcrext =l extuw %${p}.fcr
|
||||
%${p}.fsh =l shl %${p}.fcrext, 5
|
||||
%${p} =l or %${p}.fsh, 3
|
||||
`
|
||||
}
|
||||
|
||||
var eq_float = function(p, ctx, a, b) { _qop = 0; return cmp_float(p, ctx, a, b) }
|
||||
var ne_float = function(p, ctx, a, b) { _qop = 1; return cmp_float(p, ctx, a, b) }
|
||||
var lt_float = function(p, ctx, a, b) { _qop = 2; return cmp_float(p, ctx, a, b) }
|
||||
var le_float = function(p, ctx, a, b) { _qop = 3; return cmp_float(p, ctx, a, b) }
|
||||
var gt_float = function(p, ctx, a, b) { _qop = 4; return cmp_float(p, ctx, a, b) }
|
||||
var ge_float = function(p, ctx, a, b) { _qop = 5; return cmp_float(p, ctx, a, b) }
|
||||
|
||||
// --- Comparisons (text path) ---
|
||||
// reads _qop (qbe_op) and _qop2 (eq_only) from closure
|
||||
var cmp_text = function(p, ctx, a, b) {
|
||||
var qbe_op = _qop
|
||||
var eq_only = _qop2
|
||||
return ` %${p}.scmp =w call $js_string_compare_value(l ${ctx}, l ${a}, l ${b}, w ${eq_only})
|
||||
%${p}.tcr =w ${qbe_op} %${p}.scmp, 0
|
||||
%${p}.tcrext =l extuw %${p}.tcr
|
||||
%${p}.tsh =l shl %${p}.tcrext, 5
|
||||
%${p} =l or %${p}.tsh, 3
|
||||
`
|
||||
}
|
||||
|
||||
var eq_text = function(p, ctx, a, b) { _qop = "ceqw"; _qop2 = 1; return cmp_text(p, ctx, a, b) }
|
||||
var ne_text = function(p, ctx, a, b) { _qop = "cnew"; _qop2 = 1; return cmp_text(p, ctx, a, b) }
|
||||
var lt_text = function(p, ctx, a, b) { _qop = "csltw"; _qop2 = 0; return cmp_text(p, ctx, a, b) }
|
||||
var le_text = function(p, ctx, a, b) { _qop = "cslew"; _qop2 = 0; return cmp_text(p, ctx, a, b) }
|
||||
var gt_text = function(p, ctx, a, b) { _qop = "csgtw"; _qop2 = 0; return cmp_text(p, ctx, a, b) }
|
||||
var ge_text = function(p, ctx, a, b) { _qop = "csgew"; _qop2 = 0; return cmp_text(p, ctx, a, b) }
|
||||
|
||||
// --- Comparisons (bool path) ---
|
||||
var eq_bool = function(p, a, b) {
|
||||
return ` %${p}.cr =w ceql ${a}, ${b}
|
||||
%${p}.crext =l extuw %${p}.cr
|
||||
%${p}.sh =l shl %${p}.crext, 5
|
||||
%${p} =l or %${p}.sh, 3
|
||||
`
|
||||
}
|
||||
|
||||
var ne_bool = function(p, a, b) {
|
||||
return ` %${p}.cr =w cnel ${a}, ${b}
|
||||
%${p}.crext =l extuw %${p}.cr
|
||||
%${p}.sh =l shl %${p}.crext, 5
|
||||
%${p} =l or %${p}.sh, 3
|
||||
`
|
||||
}
|
||||
|
||||
// --- Type guard: is_identical ---
|
||||
var is_identical = function(p, a, b) {
|
||||
return ` %${p}.cr =w ceql ${a}, ${b}
|
||||
%${p}.crext =l extuw %${p}.cr
|
||||
%${p}.sh =l shl %${p}.crext, 5
|
||||
%${p} =l or %${p}.sh, 3
|
||||
`
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
@@ -760,5 +877,33 @@ return {
|
||||
bxor: bxor,
|
||||
shl: shl,
|
||||
shr: shr,
|
||||
ushr: ushr
|
||||
ushr: ushr,
|
||||
// text concat
|
||||
concat: concat,
|
||||
// decomposed comparisons (int)
|
||||
eq_int: eq_int,
|
||||
ne_int: ne_int,
|
||||
lt_int: lt_int,
|
||||
le_int: le_int,
|
||||
gt_int: gt_int,
|
||||
ge_int: ge_int,
|
||||
// decomposed comparisons (float)
|
||||
eq_float: eq_float,
|
||||
ne_float: ne_float,
|
||||
lt_float: lt_float,
|
||||
le_float: le_float,
|
||||
gt_float: gt_float,
|
||||
ge_float: ge_float,
|
||||
// decomposed comparisons (text)
|
||||
eq_text: eq_text,
|
||||
ne_text: ne_text,
|
||||
lt_text: lt_text,
|
||||
le_text: le_text,
|
||||
gt_text: gt_text,
|
||||
ge_text: ge_text,
|
||||
// decomposed comparisons (bool)
|
||||
eq_bool: eq_bool,
|
||||
ne_bool: ne_bool,
|
||||
// type guard
|
||||
is_identical: is_identical
|
||||
}
|
||||
|
||||
3201
qbe.cm.mcode
Normal file
3201
qbe.cm.mcode
Normal file
File diff suppressed because it is too large
Load Diff
727
qbe_emit.cm
Normal file
727
qbe_emit.cm
Normal file
@@ -0,0 +1,727 @@
|
||||
// qbe_emit.cm — mcode IR → QBE IL compiler
|
||||
// Takes mcode IR (from mcode.cm) and uses qbe.cm macros to produce
|
||||
// a complete QBE IL program ready for the qbe compiler.
|
||||
// qbe module is passed via env as 'qbe'
|
||||
|
||||
var qbe_emit = function(ir, qbe) {
|
||||
var out = []
|
||||
var data_out = []
|
||||
var str_table = {}
|
||||
var str_id = 0
|
||||
var uid = 0
|
||||
|
||||
// ============================================================
|
||||
// Output helpers
|
||||
// ============================================================
|
||||
|
||||
var emit = function(s) {
|
||||
push(out, s)
|
||||
}
|
||||
|
||||
var fresh = function() {
|
||||
uid = uid + 1
|
||||
return "u" + text(uid)
|
||||
}
|
||||
|
||||
var s = function(n) {
|
||||
return "%s" + text(n)
|
||||
}
|
||||
|
||||
var sanitize = function(lbl) {
|
||||
var r = replace(lbl, ".", "_")
|
||||
r = replace(r, "-", "_")
|
||||
r = replace(r, " ", "_")
|
||||
r = replace(r, "/", "_")
|
||||
r = replace(r, "<", "")
|
||||
r = replace(r, ">", "")
|
||||
r = replace(r, "(", "")
|
||||
r = replace(r, ")", "")
|
||||
return r
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// String interning — emit data section entries
|
||||
// ============================================================
|
||||
|
||||
var intern_str = function(val) {
|
||||
if (str_table[val] != null) return str_table[val]
|
||||
var label = "$d_str_" + text(str_id)
|
||||
str_id = str_id + 1
|
||||
var escaped = replace(val, "\\", "\\\\")
|
||||
escaped = replace(escaped, "\"", "\\\"")
|
||||
var line = "data " + label + ' = ' + '{ b "' + escaped + '", b 0 }'
|
||||
push(data_out, line)
|
||||
str_table[val] = label
|
||||
return label
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// Extract property name from mcode operand
|
||||
// ============================================================
|
||||
|
||||
var prop_name = function(a) {
|
||||
if (is_text(a)) return a
|
||||
if (is_object(a)) {
|
||||
if (a.name != null) return a.name
|
||||
if (a.value != null) return a.value
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// Compile one function's instructions
|
||||
// ============================================================
|
||||
|
||||
var compile_fn = function(fn, fn_idx, is_main) {
|
||||
var instrs = fn.instructions
|
||||
var nr_slots = fn.nr_slots
|
||||
var nr_args = fn.nr_args
|
||||
var name = is_main ? "cell_main" : "cell_fn_" + text(fn_idx)
|
||||
name = sanitize(name)
|
||||
var i = 0
|
||||
var instr = null
|
||||
var op = null
|
||||
var a1 = null
|
||||
var a2 = null
|
||||
var a3 = null
|
||||
var a4 = null
|
||||
var p = null
|
||||
var pn = null
|
||||
var sl = null
|
||||
var fop_id = 0
|
||||
var nr_elems = 0
|
||||
var ei = 0
|
||||
var elem_slot = 0
|
||||
|
||||
// Function signature: (ctx, frame_ptr) → JSValue
|
||||
emit(`export function l $${name}(l %ctx, l %fp) {`)
|
||||
emit("@entry")
|
||||
|
||||
// Load all slots from frame into SSA variables
|
||||
// Each slot is a JSValue (8 bytes) at fp + slot*8
|
||||
var off = 0
|
||||
i = 0
|
||||
while (i < nr_slots) {
|
||||
off = i * 8
|
||||
emit(` %p${text(i)} =l add %fp, ${text(off)}`)
|
||||
emit(` ${s(i)} =l loadl %p${text(i)}`)
|
||||
i = i + 1
|
||||
}
|
||||
|
||||
// Write-back: store SSA var to frame slot so closures see updates
|
||||
var wb = function(slot) {
|
||||
emit(` storel ${s(slot)}, %p${text(slot)}`)
|
||||
}
|
||||
|
||||
// Walk instructions
|
||||
i = 0
|
||||
while (i < length(instrs)) {
|
||||
instr = instrs[i]
|
||||
i = i + 1
|
||||
|
||||
// Labels are plain strings
|
||||
if (is_text(instr)) {
|
||||
emit("@" + sanitize(instr))
|
||||
continue
|
||||
}
|
||||
|
||||
op = instr[0]
|
||||
a1 = instr[1]
|
||||
a2 = instr[2]
|
||||
a3 = instr[3]
|
||||
|
||||
// --- Constants ---
|
||||
|
||||
if (op == "int") {
|
||||
emit(` ${s(a1)} =l copy ${text(a2 * 2)}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "null") {
|
||||
emit(` ${s(a1)} =l copy ${text(qbe.js_null)}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "true") {
|
||||
emit(` ${s(a1)} =l copy ${text(qbe.js_true)}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "false") {
|
||||
emit(` ${s(a1)} =l copy ${text(qbe.js_false)}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "access") {
|
||||
if (is_number(a2)) {
|
||||
if (is_integer(a2)) {
|
||||
emit(` ${s(a1)} =l copy ${text(a2 * 2)}`)
|
||||
} else {
|
||||
emit(` ${s(a1)} =l call $__JS_NewFloat64(l %ctx, d d_${text(a2)})`)
|
||||
}
|
||||
} else if (is_text(a2)) {
|
||||
sl = intern_str(a2)
|
||||
emit(` ${s(a1)} =l call $JS_NewString(l %ctx, l ${sl})`)
|
||||
} else if (is_object(a2)) {
|
||||
if (a2.make == "intrinsic") {
|
||||
sl = intern_str(a2.name)
|
||||
emit(` ${s(a1)} =l call $cell_rt_get_intrinsic(l %ctx, l ${sl})`)
|
||||
} else if (a2.kind == "number") {
|
||||
if (a2.number != null && is_integer(a2.number)) {
|
||||
emit(` ${s(a1)} =l copy ${text(a2.number * 2)}`)
|
||||
} else if (a2.number != null) {
|
||||
emit(` ${s(a1)} =l call $__JS_NewFloat64(l %ctx, d d_${text(a2.number)})`)
|
||||
} else {
|
||||
emit(` ${s(a1)} =l copy ${text(qbe.js_null)}`)
|
||||
}
|
||||
} else if (a2.kind == "text") {
|
||||
sl = intern_str(a2.value)
|
||||
emit(` ${s(a1)} =l call $JS_NewString(l %ctx, l ${sl})`)
|
||||
} else if (a2.kind == "true") {
|
||||
emit(` ${s(a1)} =l copy ${text(qbe.js_true)}`)
|
||||
} else if (a2.kind == "false") {
|
||||
emit(` ${s(a1)} =l copy ${text(qbe.js_false)}`)
|
||||
} else if (a2.kind == "null") {
|
||||
emit(` ${s(a1)} =l copy ${text(qbe.js_null)}`)
|
||||
} else {
|
||||
emit(` ${s(a1)} =l copy ${text(qbe.js_null)}`)
|
||||
}
|
||||
} else {
|
||||
emit(` ${s(a1)} =l copy ${text(qbe.js_null)}`)
|
||||
}
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
|
||||
// --- Movement ---
|
||||
|
||||
if (op == "move") {
|
||||
emit(` ${s(a1)} =l copy ${s(a2)}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
|
||||
// --- Generic arithmetic (VM dispatches int/float) ---
|
||||
|
||||
if (op == "add") {
|
||||
p = fresh()
|
||||
emit(qbe.add(p, "%ctx", s(a2), s(a3)))
|
||||
emit(` ${s(a1)} =l copy %${p}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "subtract") {
|
||||
p = fresh()
|
||||
emit(qbe.sub(p, "%ctx", s(a2), s(a3)))
|
||||
emit(` ${s(a1)} =l copy %${p}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "multiply") {
|
||||
p = fresh()
|
||||
emit(qbe.mul(p, "%ctx", s(a2), s(a3)))
|
||||
emit(` ${s(a1)} =l copy %${p}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "divide") {
|
||||
p = fresh()
|
||||
emit(qbe.div(p, "%ctx", s(a2), s(a3)))
|
||||
emit(` ${s(a1)} =l copy %${p}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "modulo") {
|
||||
p = fresh()
|
||||
emit(qbe.mod(p, "%ctx", s(a2), s(a3)))
|
||||
emit(` ${s(a1)} =l copy %${p}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "negate") {
|
||||
p = fresh()
|
||||
emit(qbe.neg(p, "%ctx", s(a2)))
|
||||
emit(` ${s(a1)} =l copy %${p}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
|
||||
// --- String concat ---
|
||||
|
||||
if (op == "concat") {
|
||||
p = fresh()
|
||||
emit(qbe.concat(p, "%ctx", s(a2), s(a3)))
|
||||
emit(` ${s(a1)} =l copy %${p}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
|
||||
// --- Type checks — use qbe.cm macros ---
|
||||
|
||||
if (op == "is_int") {
|
||||
p = fresh()
|
||||
emit(qbe.is_int(p, s(a2)))
|
||||
emit(qbe.new_bool(p + ".r", "%" + p))
|
||||
emit(` ${s(a1)} =l copy %${p}.r`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "is_text") {
|
||||
p = fresh()
|
||||
emit(qbe.is_imm_text(p, s(a2)))
|
||||
emit(qbe.new_bool(p + ".r", "%" + p))
|
||||
emit(` ${s(a1)} =l copy %${p}.r`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "is_num") {
|
||||
p = fresh()
|
||||
emit(qbe.is_number(p, s(a2)))
|
||||
emit(qbe.new_bool(p + ".r", "%" + p))
|
||||
emit(` ${s(a1)} =l copy %${p}.r`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "is_bool") {
|
||||
p = fresh()
|
||||
emit(qbe.is_bool(p, s(a2)))
|
||||
emit(qbe.new_bool(p + ".r", "%" + p))
|
||||
emit(` ${s(a1)} =l copy %${p}.r`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "is_null") {
|
||||
p = fresh()
|
||||
emit(qbe.is_null(p, s(a2)))
|
||||
emit(qbe.new_bool(p + ".r", "%" + p))
|
||||
emit(` ${s(a1)} =l copy %${p}.r`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "is_identical") {
|
||||
p = fresh()
|
||||
emit(qbe.is_identical(p, s(a2), s(a3)))
|
||||
emit(` ${s(a1)} =l copy %${p}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
|
||||
// --- Comparisons (int path) ---
|
||||
|
||||
if (op == "eq_int") {
|
||||
p = fresh()
|
||||
emit(qbe.eq_int(p, "%ctx", s(a2), s(a3)))
|
||||
emit(` ${s(a1)} =l copy %${p}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "ne_int") {
|
||||
p = fresh()
|
||||
emit(qbe.ne_int(p, "%ctx", s(a2), s(a3)))
|
||||
emit(` ${s(a1)} =l copy %${p}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "lt_int") {
|
||||
p = fresh()
|
||||
emit(qbe.lt_int(p, "%ctx", s(a2), s(a3)))
|
||||
emit(` ${s(a1)} =l copy %${p}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "gt_int") {
|
||||
p = fresh()
|
||||
emit(qbe.gt_int(p, "%ctx", s(a2), s(a3)))
|
||||
emit(` ${s(a1)} =l copy %${p}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "le_int") {
|
||||
p = fresh()
|
||||
emit(qbe.le_int(p, "%ctx", s(a2), s(a3)))
|
||||
emit(` ${s(a1)} =l copy %${p}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "ge_int") {
|
||||
p = fresh()
|
||||
emit(qbe.ge_int(p, "%ctx", s(a2), s(a3)))
|
||||
emit(` ${s(a1)} =l copy %${p}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
|
||||
// --- Comparisons (float/text/bool) ---
|
||||
|
||||
if (op == "eq_float") {
|
||||
p = fresh()
|
||||
emit(qbe.eq_float(p, "%ctx", s(a2), s(a3)))
|
||||
emit(` ${s(a1)} =l copy %${p}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "ne_float") {
|
||||
p = fresh()
|
||||
emit(qbe.ne_float(p, "%ctx", s(a2), s(a3)))
|
||||
emit(` ${s(a1)} =l copy %${p}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "lt_float" || op == "gt_float" || op == "le_float" || op == "ge_float") {
|
||||
p = fresh()
|
||||
fop_id = 0
|
||||
if (op == "lt_float") fop_id = 2
|
||||
else if (op == "le_float") fop_id = 3
|
||||
else if (op == "gt_float") fop_id = 4
|
||||
else if (op == "ge_float") fop_id = 5
|
||||
emit(qbe.cmp_float != null ? qbe.cmp_float(p, "%ctx", s(a2), s(a3), fop_id) : ` %${p} =l call $qbe_float_cmp(l %ctx, w ${text(fop_id)}, l ${s(a2)}, l ${s(a3)})`)
|
||||
emit(` ${s(a1)} =l copy %${p}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "eq_text") {
|
||||
p = fresh()
|
||||
emit(qbe.eq_text(p, "%ctx", s(a2), s(a3)))
|
||||
emit(` ${s(a1)} =l copy %${p}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "ne_text") {
|
||||
p = fresh()
|
||||
emit(qbe.ne_text(p, "%ctx", s(a2), s(a3)))
|
||||
emit(` ${s(a1)} =l copy %${p}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "lt_text" || op == "gt_text" || op == "le_text" || op == "ge_text") {
|
||||
p = fresh()
|
||||
emit(` ${s(a1)} =l call $cell_rt_${op}(l %ctx, l ${s(a2)}, l ${s(a3)})`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "eq_bool") {
|
||||
p = fresh()
|
||||
emit(qbe.eq_bool(p, s(a2), s(a3)))
|
||||
emit(` ${s(a1)} =l copy %${p}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "ne_bool") {
|
||||
p = fresh()
|
||||
emit(qbe.ne_bool(p, s(a2), s(a3)))
|
||||
emit(` ${s(a1)} =l copy %${p}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "eq_tol" || op == "ne_tol") {
|
||||
emit(` ${s(a1)} =l call $cell_rt_${op}(l %ctx, l ${s(a2)}, l ${s(a3)})`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
|
||||
// --- Boolean ops ---
|
||||
|
||||
if (op == "not") {
|
||||
p = fresh()
|
||||
emit(qbe.lnot(p, "%ctx", s(a2)))
|
||||
emit(` ${s(a1)} =l copy %${p}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "and") {
|
||||
emit(` ${s(a1)} =l and ${s(a2)}, ${s(a3)}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "or") {
|
||||
emit(` ${s(a1)} =l or ${s(a2)}, ${s(a3)}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
|
||||
// --- Bitwise ops — use qbe.cm macros ---
|
||||
|
||||
if (op == "bitnot") {
|
||||
p = fresh()
|
||||
emit(qbe.bnot(p, "%ctx", s(a2)))
|
||||
emit(` ${s(a1)} =l copy %${p}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "bitand") {
|
||||
p = fresh()
|
||||
emit(qbe.band(p, "%ctx", s(a2), s(a3)))
|
||||
emit(` ${s(a1)} =l copy %${p}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "bitor") {
|
||||
p = fresh()
|
||||
emit(qbe.bor(p, "%ctx", s(a2), s(a3)))
|
||||
emit(` ${s(a1)} =l copy %${p}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "bitxor") {
|
||||
p = fresh()
|
||||
emit(qbe.bxor(p, "%ctx", s(a2), s(a3)))
|
||||
emit(` ${s(a1)} =l copy %${p}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "shl") {
|
||||
p = fresh()
|
||||
emit(qbe.shl(p, "%ctx", s(a2), s(a3)))
|
||||
emit(` ${s(a1)} =l copy %${p}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "shr") {
|
||||
p = fresh()
|
||||
emit(qbe.shr(p, "%ctx", s(a2), s(a3)))
|
||||
emit(` ${s(a1)} =l copy %${p}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "ushr") {
|
||||
p = fresh()
|
||||
emit(qbe.ushr(p, "%ctx", s(a2), s(a3)))
|
||||
emit(` ${s(a1)} =l copy %${p}`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
|
||||
// --- Property access — runtime calls ---
|
||||
|
||||
if (op == "load_field") {
|
||||
pn = prop_name(a3)
|
||||
if (pn != null) {
|
||||
sl = intern_str(pn)
|
||||
emit(` ${s(a1)} =l call $cell_rt_load_field(l %ctx, l ${s(a2)}, l ${sl})`)
|
||||
} else {
|
||||
emit(` ${s(a1)} =l call $cell_rt_load_dynamic(l %ctx, l ${s(a2)}, l ${s(a3)})`)
|
||||
}
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "load_index") {
|
||||
emit(` ${s(a1)} =l call $cell_rt_load_index(l %ctx, l ${s(a2)}, l ${s(a3)})`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "load_dynamic") {
|
||||
emit(` ${s(a1)} =l call $cell_rt_load_dynamic(l %ctx, l ${s(a2)}, l ${s(a3)})`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "store_field") {
|
||||
// IR: ["store_field", obj, val, prop] → C: (ctx, val, obj, name)
|
||||
pn = prop_name(a3)
|
||||
if (pn != null) {
|
||||
sl = intern_str(pn)
|
||||
emit(` call $cell_rt_store_field(l %ctx, l ${s(a2)}, l ${s(a1)}, l ${sl})`)
|
||||
} else {
|
||||
emit(` call $cell_rt_store_dynamic(l %ctx, l ${s(a2)}, l ${s(a1)}, l ${s(a3)})`)
|
||||
}
|
||||
continue
|
||||
}
|
||||
if (op == "store_index") {
|
||||
// IR: ["store_index", obj, val, idx] → C: (ctx, val, obj, idx)
|
||||
emit(` call $cell_rt_store_index(l %ctx, l ${s(a2)}, l ${s(a1)}, l ${s(a3)})`)
|
||||
continue
|
||||
}
|
||||
if (op == "store_dynamic") {
|
||||
// IR: ["store_dynamic", obj, val, key] → C: (ctx, val, obj, key)
|
||||
emit(` call $cell_rt_store_dynamic(l %ctx, l ${s(a2)}, l ${s(a1)}, l ${s(a3)})`)
|
||||
continue
|
||||
}
|
||||
|
||||
// --- Closure access ---
|
||||
|
||||
if (op == "get") {
|
||||
emit(` ${s(a1)} =l call $cell_rt_get_closure(l %ctx, l %fp, l ${text(a2)}, l ${text(a3)})`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "put") {
|
||||
emit(` call $cell_rt_put_closure(l %ctx, l %fp, l ${s(a1)}, l ${text(a2)}, l ${text(a3)})`)
|
||||
continue
|
||||
}
|
||||
|
||||
// --- Control flow ---
|
||||
|
||||
if (op == "jump") {
|
||||
emit(` jmp @${sanitize(a1)}`)
|
||||
continue
|
||||
}
|
||||
if (op == "jump_true") {
|
||||
p = fresh()
|
||||
emit(` %${p} =w call $JS_ToBool(l %ctx, l ${s(a1)})`)
|
||||
emit(` jnz %${p}, @${sanitize(a2)}, @${p}_f`)
|
||||
emit(`@${p}_f`)
|
||||
continue
|
||||
}
|
||||
if (op == "jump_false") {
|
||||
p = fresh()
|
||||
emit(` %${p} =w call $JS_ToBool(l %ctx, l ${s(a1)})`)
|
||||
emit(` jnz %${p}, @${p}_t, @${sanitize(a2)}`)
|
||||
emit(`@${p}_t`)
|
||||
continue
|
||||
}
|
||||
if (op == "jump_null") {
|
||||
p = fresh()
|
||||
emit(` %${p} =w ceql ${s(a1)}, ${text(qbe.js_null)}`)
|
||||
emit(` jnz %${p}, @${sanitize(a2)}, @${p}_nn`)
|
||||
emit(`@${p}_nn`)
|
||||
continue
|
||||
}
|
||||
if (op == "jump_not_null") {
|
||||
p = fresh()
|
||||
emit(` %${p} =w cnel ${s(a1)}, ${text(qbe.js_null)}`)
|
||||
emit(` jnz %${p}, @${sanitize(a2)}, @${p}_n`)
|
||||
emit(`@${p}_n`)
|
||||
continue
|
||||
}
|
||||
if (op == "wary_true") {
|
||||
p = fresh()
|
||||
emit(` %${p} =w call $JS_ToBool(l %ctx, l ${s(a1)})`)
|
||||
emit(` jnz %${p}, @${sanitize(a2)}, @${p}_f`)
|
||||
emit(`@${p}_f`)
|
||||
continue
|
||||
}
|
||||
if (op == "wary_false") {
|
||||
p = fresh()
|
||||
emit(` %${p} =w call $JS_ToBool(l %ctx, l ${s(a1)})`)
|
||||
emit(` jnz %${p}, @${p}_t, @${sanitize(a2)}`)
|
||||
emit(`@${p}_t`)
|
||||
continue
|
||||
}
|
||||
|
||||
// --- Function calls ---
|
||||
|
||||
if (op == "frame") {
|
||||
emit(` ${s(a1)} =l call $cell_rt_frame(l %ctx, l ${s(a2)}, l ${text(a3)})`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "setarg") {
|
||||
emit(` call $cell_rt_setarg(l ${s(a1)}, l ${text(a2)}, l ${s(a3)})`)
|
||||
continue
|
||||
}
|
||||
if (op == "invoke") {
|
||||
emit(` ${s(a2)} =l call $cell_rt_invoke(l %ctx, l ${s(a1)})`)
|
||||
wb(a2)
|
||||
continue
|
||||
}
|
||||
if (op == "goframe") {
|
||||
emit(` ${s(a1)} =l call $cell_rt_goframe(l %ctx, l ${s(a2)}, l ${text(a3)})`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "goinvoke") {
|
||||
emit(` %_goret =l call $cell_rt_goinvoke(l %ctx, l ${s(a1)})`)
|
||||
emit(` ret %_goret`)
|
||||
continue
|
||||
}
|
||||
|
||||
// --- Function object creation ---
|
||||
|
||||
if (op == "function") {
|
||||
emit(` ${s(a1)} =l call $cell_rt_make_function(l %ctx, l ${text(a2)}, l %fp)`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
|
||||
// --- Record/Array creation ---
|
||||
|
||||
if (op == "record") {
|
||||
emit(` ${s(a1)} =l call $JS_NewObject(l %ctx)`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "array") {
|
||||
nr_elems = a2 != null ? a2 : 0
|
||||
emit(` ${s(a1)} =l call $JS_NewArray(l %ctx)`)
|
||||
ei = 0
|
||||
while (ei < nr_elems) {
|
||||
elem_slot = instr[3 + ei]
|
||||
emit(` call $JS_SetPropertyNumber(l %ctx, l ${s(a1)}, l ${text(ei)}, l ${s(elem_slot)})`)
|
||||
ei = ei + 1
|
||||
}
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
|
||||
// --- Array push/pop ---
|
||||
|
||||
if (op == "push") {
|
||||
emit(` call $cell_rt_push(l %ctx, l ${s(a1)}, l ${s(a2)})`)
|
||||
continue
|
||||
}
|
||||
if (op == "pop") {
|
||||
emit(` ${s(a1)} =l call $cell_rt_pop(l %ctx, l ${s(a2)})`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
|
||||
// --- Misc ---
|
||||
|
||||
if (op == "return") {
|
||||
emit(` ret ${s(a1)}`)
|
||||
continue
|
||||
}
|
||||
if (op == "disrupt") {
|
||||
emit(` call $cell_rt_disrupt(l %ctx)`)
|
||||
emit(` ret ${text(qbe.js_null)}`)
|
||||
continue
|
||||
}
|
||||
if (op == "delete") {
|
||||
emit(` ${s(a1)} =l call $cell_rt_delete(l %ctx, l ${s(a2)}, l ${s(a3)})`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
if (op == "typeof") {
|
||||
emit(` ${s(a1)} =l call $cell_rt_typeof(l %ctx, l ${s(a2)})`)
|
||||
wb(a1)
|
||||
continue
|
||||
}
|
||||
|
||||
// --- Unknown opcode ---
|
||||
emit(` # unknown: ${op}`)
|
||||
}
|
||||
|
||||
emit("}")
|
||||
emit("")
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// Main: compile all functions then main
|
||||
// ============================================================
|
||||
|
||||
var fi = 0
|
||||
while (fi < length(ir.functions)) {
|
||||
compile_fn(ir.functions[fi], fi, false)
|
||||
fi = fi + 1
|
||||
}
|
||||
|
||||
compile_fn(ir.main, -1, true)
|
||||
|
||||
// Assemble: data section first, then function bodies
|
||||
var result = []
|
||||
var di = 0
|
||||
while (di < length(data_out)) {
|
||||
push(result, data_out[di])
|
||||
di = di + 1
|
||||
}
|
||||
if (length(data_out) > 0) push(result, "")
|
||||
|
||||
di = 0
|
||||
while (di < length(out)) {
|
||||
push(result, out[di])
|
||||
di = di + 1
|
||||
}
|
||||
|
||||
return text(result, "\n")
|
||||
}
|
||||
|
||||
return qbe_emit
|
||||
11792
qbe_emit.cm.mcode
Normal file
11792
qbe_emit.cm.mcode
Normal file
File diff suppressed because it is too large
Load Diff
82
qbe_rt.c
Normal file
82
qbe_rt.c
Normal file
@@ -0,0 +1,82 @@
|
||||
/*
|
||||
* qbe_rt.c - Non-inline wrappers for QBE-compiled ƿit modules
|
||||
*
|
||||
* Provides non-inline versions of static-inline quickjs functions
|
||||
* (which QBE-generated code calls as external symbols).
|
||||
*
|
||||
* All cell_rt_* runtime functions are implemented in source/qbe_helpers.c
|
||||
* (compiled into the cell binary) and resolved via -undefined dynamic_lookup.
|
||||
*/
|
||||
|
||||
#include <stdint.h>
|
||||
#include <string.h>
|
||||
#include <math.h>
|
||||
|
||||
typedef uint64_t JSValue;
|
||||
typedef struct JSContext JSContext;
|
||||
|
||||
#define JS_TAG_SHORT_FLOAT 5
|
||||
#define JS_TAG_NULL 7
|
||||
#define JS_VAL_NULL 7
|
||||
|
||||
/* ============================================================
|
||||
Non-inline wrappers for static-inline quickjs functions
|
||||
============================================================ */
|
||||
|
||||
/*
|
||||
* __JS_NewFloat64 — encode double as tagged JSValue
|
||||
* Short float: [sign:1][exp:8][mantissa:52][tag:3]
|
||||
* Returns tagged int if value is an exact integer in int32 range
|
||||
*/
|
||||
JSValue __JS_NewFloat64(JSContext *ctx, double d) {
|
||||
union { double d; uint64_t u; } u;
|
||||
u.d = d;
|
||||
|
||||
uint64_t sign = u.u >> 63;
|
||||
int exp = (u.u >> 52) & 0x7FF;
|
||||
uint64_t mantissa = u.u & ((1ULL << 52) - 1);
|
||||
|
||||
/* Zero */
|
||||
if (exp == 0 && mantissa == 0)
|
||||
return JS_TAG_SHORT_FLOAT;
|
||||
/* NaN/Inf → null */
|
||||
if (exp == 0x7FF)
|
||||
return JS_VAL_NULL;
|
||||
/* Subnormals → zero */
|
||||
if (exp == 0)
|
||||
return (sign << 63) | JS_TAG_SHORT_FLOAT;
|
||||
|
||||
int short_exp = exp - 1023 + 127;
|
||||
if (short_exp < 1 || short_exp > 254)
|
||||
return JS_VAL_NULL;
|
||||
|
||||
/* Prefer integer if exact */
|
||||
if (d >= (double)(-2147483647 - 1) && d <= (double)2147483647) {
|
||||
int32_t i = (int32_t)d;
|
||||
if ((double)i == d)
|
||||
return (uint64_t)(uint32_t)i << 1;
|
||||
}
|
||||
|
||||
return (sign << 63)
|
||||
| ((uint64_t)short_exp << 55)
|
||||
| (mantissa << 3)
|
||||
| JS_TAG_SHORT_FLOAT;
|
||||
}
|
||||
|
||||
/*
|
||||
* JS_IsNumber — check if value is tagged int or short float
|
||||
*/
|
||||
int JS_IsNumber(JSValue v) {
|
||||
int is_int = (v & 1) == 0;
|
||||
int is_float = (v & 7) == JS_TAG_SHORT_FLOAT;
|
||||
return is_int || is_float;
|
||||
}
|
||||
|
||||
/*
|
||||
* JS_NewString — create string from C string (wraps JS_NewStringLen)
|
||||
*/
|
||||
extern JSValue JS_NewStringLen(JSContext *ctx, const char *str, size_t len);
|
||||
|
||||
JSValue JS_NewString(JSContext *ctx, const char *str) {
|
||||
return JS_NewStringLen(ctx, str, strlen(str));
|
||||
}
|
||||
2
qop.c
2
qop.c
@@ -267,7 +267,7 @@ static JSValue js_qop_list(JSContext *js, JSValue self, int argc, JSValue *argv)
|
||||
|
||||
JSValue str = JS_NewStringLen(js, path, len - 1); // -1 for null terminator
|
||||
js_free(js, path);
|
||||
JS_SetPropertyUint32(js, arr, count++, str);
|
||||
JS_SetPropertyNumber(js, arr, count++, str);
|
||||
}
|
||||
|
||||
return arr;
|
||||
|
||||
155
regen.ce
Normal file
155
regen.ce
Normal file
@@ -0,0 +1,155 @@
|
||||
// regen.ce — regenerate .mach bytecode files
|
||||
// Run with: ./cell --core . regen
|
||||
|
||||
var fd = use("fd")
|
||||
var json = use("json")
|
||||
var tokenize = use("tokenize")
|
||||
var parse = use("parse")
|
||||
var fold = use("fold")
|
||||
var mcode = use("mcode")
|
||||
var streamline = use("streamline")
|
||||
|
||||
var files = [
|
||||
{src: "tokenize.cm", name: "tokenize", out: "tokenize.cm.mcode"},
|
||||
{src: "parse.cm", name: "parse", out: "parse.cm.mcode"},
|
||||
{src: "fold.cm", name: "fold", out: "fold.cm.mcode"},
|
||||
{src: "mcode.cm", name: "mcode", out: "mcode.cm.mcode"},
|
||||
{src: "streamline.cm", name: "streamline", out: "streamline.cm.mcode"},
|
||||
{src: "qbe.cm", name: "qbe", out: "qbe.cm.mcode"},
|
||||
{src: "qbe_emit.cm", name: "qbe_emit", out: "qbe_emit.cm.mcode"},
|
||||
{src: "internal/bootstrap.cm", name: "bootstrap", out: "internal/bootstrap.cm.mcode"},
|
||||
{src: "internal/engine.cm", name: "engine", out: "internal/engine.cm.mcode"}
|
||||
]
|
||||
|
||||
var i = 0
|
||||
var entry = null
|
||||
var src = null
|
||||
var tok_result = null
|
||||
var ast = null
|
||||
var folded = null
|
||||
var compiled = null
|
||||
var optimized = null
|
||||
var mcode_text = null
|
||||
var f = null
|
||||
var errs = null
|
||||
var ei = 0
|
||||
var e = null
|
||||
var had_errors = false
|
||||
|
||||
// Collapse leaf arrays (instruction arrays) onto single lines
|
||||
var compact_arrays = function(json_text) {
|
||||
var lines = array(json_text, "\n")
|
||||
var result = []
|
||||
var i = 0
|
||||
var line = null
|
||||
var trimmed = null
|
||||
var collecting = false
|
||||
var collected = null
|
||||
var indent = null
|
||||
var is_leaf = null
|
||||
var j = 0
|
||||
var inner = null
|
||||
var parts = null
|
||||
var trailing = null
|
||||
var chars = null
|
||||
var k = 0
|
||||
|
||||
while (i < length(lines)) {
|
||||
line = lines[i]
|
||||
trimmed = trim(line)
|
||||
if (collecting == false && trimmed == "[") {
|
||||
collecting = true
|
||||
chars = array(line)
|
||||
k = 0
|
||||
while (k < length(chars) && chars[k] == " ") {
|
||||
k = k + 1
|
||||
}
|
||||
indent = text(line, 0, k)
|
||||
collected = []
|
||||
i = i + 1
|
||||
continue
|
||||
}
|
||||
if (collecting) {
|
||||
if (trimmed == "]" || trimmed == "],") {
|
||||
is_leaf = true
|
||||
j = 0
|
||||
while (j < length(collected)) {
|
||||
inner = trim(collected[j])
|
||||
if (starts_with(inner, "[") || starts_with(inner, "{")) {
|
||||
is_leaf = false
|
||||
}
|
||||
j = j + 1
|
||||
}
|
||||
if (is_leaf && length(collected) > 0) {
|
||||
parts = []
|
||||
j = 0
|
||||
while (j < length(collected)) {
|
||||
inner = trim(collected[j])
|
||||
if (ends_with(inner, ",")) {
|
||||
inner = text(inner, 0, length(inner) - 1)
|
||||
}
|
||||
parts[] = inner
|
||||
j = j + 1
|
||||
}
|
||||
trailing = ""
|
||||
if (ends_with(trimmed, ",")) {
|
||||
trailing = ","
|
||||
}
|
||||
result[] = `${indent}[${text(parts, ", ")}]${trailing}`
|
||||
} else {
|
||||
result[] = `${indent}[`
|
||||
j = 0
|
||||
while (j < length(collected)) {
|
||||
result[] = collected[j]
|
||||
j = j + 1
|
||||
}
|
||||
result[] = line
|
||||
}
|
||||
collecting = false
|
||||
} else {
|
||||
collected[] = line
|
||||
}
|
||||
i = i + 1
|
||||
continue
|
||||
}
|
||||
result[] = line
|
||||
i = i + 1
|
||||
}
|
||||
return text(result, "\n")
|
||||
}
|
||||
|
||||
while (i < length(files)) {
|
||||
entry = files[i]
|
||||
src = text(fd.slurp(entry.src))
|
||||
tok_result = tokenize(src, entry.src)
|
||||
ast = parse(tok_result.tokens, src, entry.src, tokenize)
|
||||
// Check for parse/semantic errors
|
||||
errs = ast.errors
|
||||
if (errs != null && length(errs) > 0) {
|
||||
ei = 0
|
||||
while (ei < length(errs)) {
|
||||
e = errs[ei]
|
||||
if (e.line != null) {
|
||||
print(`${entry.src}:${text(e.line)}:${text(e.column)}: error: ${e.message}`)
|
||||
} else {
|
||||
print(`${entry.src}: error: ${e.message}`)
|
||||
}
|
||||
ei = ei + 1
|
||||
}
|
||||
had_errors = true
|
||||
i = i + 1
|
||||
continue
|
||||
}
|
||||
folded = fold(ast)
|
||||
compiled = mcode(folded)
|
||||
optimized = streamline(compiled)
|
||||
mcode_text = compact_arrays(json.encode(optimized, null, 2))
|
||||
f = fd.open(entry.out, "w")
|
||||
fd.write(f, mcode_text)
|
||||
fd.close(f)
|
||||
print(`wrote ${entry.out}`)
|
||||
i = i + 1
|
||||
}
|
||||
if (had_errors) {
|
||||
print("regen aborted: fix errors above")
|
||||
}
|
||||
91
run_native.ce
Normal file
91
run_native.ce
Normal file
@@ -0,0 +1,91 @@
|
||||
// run_native.ce — load a module both interpreted and native, compare speed
|
||||
//
|
||||
// Usage:
|
||||
// cell --core . run_native.ce <module>
|
||||
//
|
||||
// Loads <module>.cm via use() (interpreted) and <module>.dylib (native),
|
||||
// runs both and compares results and timing.
|
||||
|
||||
var os = use('os')
|
||||
|
||||
if (length(args) < 1) {
|
||||
print('usage: cell --core . run_native.ce <module>')
|
||||
print(' e.g. cell --core . run_native.ce num_torture')
|
||||
return
|
||||
}
|
||||
|
||||
var name = args[0]
|
||||
if (ends_with(name, '.cm')) {
|
||||
name = text(name, 0, length(name) - 3)
|
||||
}
|
||||
|
||||
var safe = replace(replace(name, '/', '_'), '-', '_')
|
||||
var symbol = 'js_' + safe + '_use'
|
||||
var dylib_path = './' + name + '.dylib'
|
||||
var fd = use('fd')
|
||||
|
||||
// --- Test argument for function-returning modules ---
|
||||
var test_arg = 5000000
|
||||
if (length(args) > 1) {
|
||||
test_arg = number(args[1])
|
||||
}
|
||||
|
||||
// --- Interpreted run ---
|
||||
print('--- interpreted ---')
|
||||
var t1 = os.now()
|
||||
var mod_interp = use(name)
|
||||
var t2 = os.now()
|
||||
var result_interp = null
|
||||
if (is_function(mod_interp)) {
|
||||
print('module returns a function, calling with ' + text(test_arg))
|
||||
t1 = os.now()
|
||||
result_interp = mod_interp(test_arg)
|
||||
t2 = os.now()
|
||||
}
|
||||
result_interp = result_interp != null ? result_interp : mod_interp
|
||||
var ms_interp = (t2 - t1) / 1000000
|
||||
print('result: ' + text(result_interp))
|
||||
print('time: ' + text(ms_interp) + ' ms')
|
||||
|
||||
// --- Native run ---
|
||||
if (!fd.is_file(dylib_path)) {
|
||||
print('\nno ' + dylib_path + ' found — run compile.ce first')
|
||||
return
|
||||
}
|
||||
|
||||
print('\n--- native ---')
|
||||
var t3 = os.now()
|
||||
var lib = os.dylib_open(dylib_path)
|
||||
var t4 = os.now()
|
||||
var mod_native = os.dylib_symbol(lib, symbol)
|
||||
var t5 = os.now()
|
||||
var result_native = null
|
||||
if (is_function(mod_native)) {
|
||||
print('module returns a function, calling with ' + text(test_arg))
|
||||
t4 = os.now()
|
||||
result_native = mod_native(test_arg)
|
||||
t5 = os.now()
|
||||
}
|
||||
result_native = result_native != null ? result_native : mod_native
|
||||
var ms_load = (t4 - t3) / 1000000
|
||||
var ms_exec = (t5 - t4) / 1000000
|
||||
var ms_native = (t5 - t3) / 1000000
|
||||
print('result: ' + text(result_native))
|
||||
print('load: ' + text(ms_load) + ' ms')
|
||||
print('exec: ' + text(ms_exec) + ' ms')
|
||||
print('total: ' + text(ms_native) + ' ms')
|
||||
|
||||
// --- Comparison ---
|
||||
print('\n--- comparison ---')
|
||||
var match = result_interp == result_native
|
||||
var speedup = 0
|
||||
var speedup_exec = 0
|
||||
print('match: ' + text(match))
|
||||
if (ms_native > 0) {
|
||||
speedup = ms_interp / ms_native
|
||||
print('speedup: ' + text(speedup) + 'x (total)')
|
||||
}
|
||||
if (ms_exec > 0) {
|
||||
speedup_exec = ms_interp / ms_exec
|
||||
print('speedup: ' + text(speedup_exec) + 'x (exec only)')
|
||||
}
|
||||
245
source/cell.c
245
source/cell.c
@@ -11,8 +11,9 @@
|
||||
#include "cell_internal.h"
|
||||
#include "cJSON.h"
|
||||
|
||||
#define BOOTSTRAP_MACH "internal/bootstrap.mach"
|
||||
#define BOOTSTRAP_AST "internal/bootstrap.ast.json"
|
||||
#define BOOTSTRAP_MACH "internal/bootstrap.cm.mach"
|
||||
#define BOOTSTRAP_MCODE "internal/bootstrap.cm.mcode"
|
||||
#define BOOTSTRAP_SRC "internal/bootstrap.cm"
|
||||
#define CELL_SHOP_DIR ".cell"
|
||||
#define CELL_CORE_DIR "packages/core"
|
||||
|
||||
@@ -26,6 +27,7 @@ int run_c_test_suite(JSContext *ctx);
|
||||
static int run_test_suite(size_t heap_size);
|
||||
|
||||
cell_rt *root_cell = NULL;
|
||||
static char *shop_path = NULL;
|
||||
static char *core_path = NULL;
|
||||
static JSRuntime *g_runtime = NULL;
|
||||
|
||||
@@ -38,31 +40,57 @@ static const char* get_home_dir(void) {
|
||||
return home;
|
||||
}
|
||||
|
||||
// Find and verify the cell shop at ~/.cell
|
||||
int find_cell_shop(void)
|
||||
// Resolve shop_path and core_path
|
||||
// core: --core flag > CELL_CORE env > derived from shop
|
||||
// shop: --shop flag > CELL_SHOP env > ~/.cell
|
||||
int find_cell_shop(const char *shop_override, const char *core_override)
|
||||
{
|
||||
const char *home = get_home_dir();
|
||||
if (!home) {
|
||||
printf("ERROR: Could not determine home directory. Set HOME environment variable.\n");
|
||||
return 0;
|
||||
// Resolve shop_path
|
||||
if (shop_override) {
|
||||
shop_path = strdup(shop_override);
|
||||
} else {
|
||||
const char *env = getenv("CELL_SHOP");
|
||||
if (env) {
|
||||
shop_path = strdup(env);
|
||||
} else {
|
||||
const char *home = get_home_dir();
|
||||
if (!home && !core_override && !getenv("CELL_CORE")) {
|
||||
printf("ERROR: Could not determine home directory. Set HOME environment variable.\n");
|
||||
return 0;
|
||||
}
|
||||
if (home) {
|
||||
size_t path_len = strlen(home) + strlen("/" CELL_SHOP_DIR) + 1;
|
||||
shop_path = malloc(path_len);
|
||||
if (shop_path)
|
||||
snprintf(shop_path, path_len, "%s/" CELL_SHOP_DIR, home);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Resolve core_path
|
||||
if (core_override) {
|
||||
core_path = strdup(core_override);
|
||||
} else {
|
||||
const char *env = getenv("CELL_CORE");
|
||||
if (env) {
|
||||
core_path = strdup(env);
|
||||
} else if (shop_path) {
|
||||
size_t core_len = strlen(shop_path) + strlen("/" CELL_CORE_DIR) + 1;
|
||||
core_path = malloc(core_len);
|
||||
if (core_path)
|
||||
snprintf(core_path, core_len, "%s/" CELL_CORE_DIR, shop_path);
|
||||
}
|
||||
}
|
||||
|
||||
// Build path to ~/.cell/core
|
||||
size_t path_len = strlen(home) + strlen("/" CELL_SHOP_DIR "/" CELL_CORE_DIR) + 1;
|
||||
core_path = malloc(path_len);
|
||||
if (!core_path) {
|
||||
printf("ERROR: Could not allocate memory for core path\n");
|
||||
printf("ERROR: No core path. Use --core <path> or set CELL_CORE.\n");
|
||||
return 0;
|
||||
}
|
||||
snprintf(core_path, path_len, "%s/" CELL_SHOP_DIR "/" CELL_CORE_DIR, home);
|
||||
|
||||
// Check if the core directory exists
|
||||
struct stat st;
|
||||
if (stat(core_path, &st) != 0 || !S_ISDIR(st.st_mode)) {
|
||||
printf("ERROR: Cell shop not found at %s/" CELL_SHOP_DIR "\n", home);
|
||||
printf("Run 'cell install' to set up the cell environment.\n");
|
||||
free(core_path);
|
||||
core_path = NULL;
|
||||
printf("ERROR: Core not found at %s\n", core_path);
|
||||
return 0;
|
||||
}
|
||||
|
||||
@@ -151,13 +179,13 @@ void script_startup(cell_rt *prt)
|
||||
cell_rt *crt = JS_GetContextOpaque(js);
|
||||
JS_FreeValue(js, js_blob_use(js));
|
||||
|
||||
// Load pre-compiled bootstrap bytecode (.mach), fall back to AST JSON
|
||||
// Load pre-compiled bootstrap (.cm.mach or .cm.mcode)
|
||||
size_t boot_size;
|
||||
int boot_is_bin = 1;
|
||||
char *boot_data = load_core_file(BOOTSTRAP_MACH, &boot_size);
|
||||
int boot_is_mcode = 0;
|
||||
if (!boot_data) {
|
||||
boot_is_bin = 0;
|
||||
boot_data = load_core_file(BOOTSTRAP_AST, &boot_size);
|
||||
boot_data = load_core_file(BOOTSTRAP_MCODE, &boot_size);
|
||||
boot_is_mcode = 1;
|
||||
}
|
||||
if (!boot_data) {
|
||||
printf("ERROR: Could not load bootstrap from %s!\n", core_path);
|
||||
@@ -183,26 +211,23 @@ void script_startup(cell_rt *prt)
|
||||
JS_SetPropertyStr(js, hidden_env, "init", JS_NULL);
|
||||
}
|
||||
|
||||
if (core_path) {
|
||||
// Set args to null for actor spawn (not CLI mode)
|
||||
JS_SetPropertyStr(js, hidden_env, "args", JS_NULL);
|
||||
|
||||
if (core_path)
|
||||
JS_SetPropertyStr(js, hidden_env, "core_path", JS_NewString(js, core_path));
|
||||
}
|
||||
JS_SetPropertyStr(js, hidden_env, "shop_path",
|
||||
shop_path ? JS_NewString(js, shop_path) : JS_NULL);
|
||||
|
||||
// Stone the environment
|
||||
hidden_env = JS_Stone(js, hidden_env);
|
||||
|
||||
// Run through MACH VM
|
||||
crt->state = ACTOR_RUNNING;
|
||||
JSValue v;
|
||||
if (boot_is_bin) {
|
||||
v = JS_RunMachBin(js, (const uint8_t *)boot_data, boot_size, hidden_env);
|
||||
free(boot_data);
|
||||
} else {
|
||||
cJSON *ast = cJSON_Parse(boot_data);
|
||||
free(boot_data);
|
||||
if (!ast) { printf("ERROR: Failed to parse bootstrap AST\n"); return; }
|
||||
v = JS_RunMachTree(js, ast, hidden_env);
|
||||
cJSON_Delete(ast);
|
||||
}
|
||||
JSValue v = boot_is_mcode
|
||||
? JS_RunMachMcode(js, boot_data, boot_size, hidden_env)
|
||||
: JS_RunMachBin(js, (const uint8_t *)boot_data, boot_size, hidden_env);
|
||||
free(boot_data);
|
||||
uncaught_exception(js, v);
|
||||
crt->state = ACTOR_IDLE;
|
||||
set_actor_state(crt);
|
||||
@@ -252,12 +277,17 @@ static int run_test_suite(size_t heap_size)
|
||||
|
||||
static void print_usage(const char *prog)
|
||||
{
|
||||
printf("Usage: %s [options] <script> [args...]\n\n", prog);
|
||||
printf("Run a cell script (.ce actor or .cm module).\n\n");
|
||||
printf("Usage: %s [options] <program> [args...]\n\n", prog);
|
||||
printf("Run a cell program (.ce actor).\n\n");
|
||||
printf("Options:\n");
|
||||
printf(" --mcode <script> [args] Run through mcode compilation pipeline\n");
|
||||
printf(" --core <path> Set core path directly (overrides CELL_CORE)\n");
|
||||
printf(" --shop <path> Set shop path (overrides CELL_SHOP)\n");
|
||||
printf(" --dev Dev mode (shop=.cell, core=.)\n");
|
||||
printf(" --test [heap_size] Run C test suite\n");
|
||||
printf(" -h, --help Show this help message\n");
|
||||
printf("\nEnvironment:\n");
|
||||
printf(" CELL_CORE Core path (default: <shop>/packages/core)\n");
|
||||
printf(" CELL_SHOP Shop path (default: ~/.cell)\n");
|
||||
printf("\nRecompile after changes: make\n");
|
||||
printf("Bootstrap from scratch: make bootstrap\n");
|
||||
}
|
||||
@@ -284,46 +314,119 @@ int cell_init(int argc, char **argv)
|
||||
}
|
||||
|
||||
/* Default: run script through bootstrap pipeline */
|
||||
int use_mcode = 0;
|
||||
int arg_start = 1;
|
||||
if (argc >= 3 && strcmp(argv[1], "--mcode") == 0) {
|
||||
use_mcode = 1;
|
||||
arg_start = 2;
|
||||
const char *shop_override = NULL;
|
||||
const char *core_override = NULL;
|
||||
|
||||
// Parse flags (order-independent)
|
||||
while (arg_start < argc && argv[arg_start][0] == '-') {
|
||||
if (strcmp(argv[arg_start], "--shop") == 0) {
|
||||
if (arg_start + 1 >= argc) {
|
||||
printf("ERROR: --shop requires a path argument\n");
|
||||
return 1;
|
||||
}
|
||||
shop_override = argv[arg_start + 1];
|
||||
arg_start += 2;
|
||||
} else if (strcmp(argv[arg_start], "--core") == 0) {
|
||||
if (arg_start + 1 >= argc) {
|
||||
printf("ERROR: --core requires a path argument\n");
|
||||
return 1;
|
||||
}
|
||||
core_override = argv[arg_start + 1];
|
||||
arg_start += 2;
|
||||
} else if (strcmp(argv[arg_start], "--dev") == 0) {
|
||||
shop_override = ".cell";
|
||||
core_override = ".";
|
||||
mkdir(".cell", 0755);
|
||||
mkdir(".cell/build", 0755);
|
||||
mkdir(".cell/packages", 0755);
|
||||
/* Ensure .cell/packages/core -> . symlink exists */
|
||||
struct stat lst;
|
||||
if (lstat(".cell/packages/core", &lst) != 0)
|
||||
symlink("../..", ".cell/packages/core");
|
||||
arg_start++;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!find_cell_shop()) return 1;
|
||||
if (arg_start >= argc) {
|
||||
print_usage(argv[0]);
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (!find_cell_shop(shop_override, core_override)) return 1;
|
||||
|
||||
actor_initialize();
|
||||
|
||||
size_t boot_size;
|
||||
int boot_is_bin = 1;
|
||||
char *boot_data = load_core_file(BOOTSTRAP_MACH, &boot_size);
|
||||
int boot_is_mcode = 0;
|
||||
if (!boot_data) {
|
||||
boot_is_bin = 0;
|
||||
boot_data = load_core_file(BOOTSTRAP_AST, &boot_size);
|
||||
boot_data = load_core_file(BOOTSTRAP_MCODE, &boot_size);
|
||||
boot_is_mcode = 1;
|
||||
}
|
||||
if (!boot_data) {
|
||||
printf("ERROR: Could not load bootstrap from %s\n", core_path);
|
||||
return 1;
|
||||
}
|
||||
|
||||
JSRuntime *rt = JS_NewRuntime();
|
||||
if (!rt) {
|
||||
g_runtime = JS_NewRuntime();
|
||||
if (!g_runtime) {
|
||||
printf("Failed to create JS runtime\n");
|
||||
free(boot_data);
|
||||
return 1;
|
||||
}
|
||||
JSContext *ctx = JS_NewContextWithHeapSize(rt, 16 * 1024 * 1024);
|
||||
JSContext *ctx = JS_NewContextWithHeapSize(g_runtime, 1024 * 1024);
|
||||
if (!ctx) {
|
||||
printf("Failed to create JS context\n");
|
||||
free(boot_data); JS_FreeRuntime(rt);
|
||||
free(boot_data); JS_FreeRuntime(g_runtime);
|
||||
return 1;
|
||||
}
|
||||
|
||||
/* Create a cell_rt for the CLI context so JS-C bridge functions work */
|
||||
cell_rt *cli_rt = calloc(sizeof(*cli_rt), 1);
|
||||
cli_rt->mutex = malloc(sizeof(pthread_mutex_t));
|
||||
pthread_mutexattr_t mattr;
|
||||
pthread_mutexattr_init(&mattr);
|
||||
pthread_mutexattr_settype(&mattr, PTHREAD_MUTEX_RECURSIVE);
|
||||
pthread_mutex_init(cli_rt->mutex, &mattr);
|
||||
cli_rt->msg_mutex = malloc(sizeof(pthread_mutex_t));
|
||||
pthread_mutex_init(cli_rt->msg_mutex, &mattr);
|
||||
pthread_mutexattr_destroy(&mattr);
|
||||
|
||||
cli_rt->context = ctx;
|
||||
JS_SetContextOpaque(ctx, cli_rt);
|
||||
JS_SetInterruptHandler(ctx, (JSInterruptHandler *)actor_interrupt_cb, cli_rt);
|
||||
|
||||
JS_AddGCRef(ctx, &cli_rt->idx_buffer_ref);
|
||||
JS_AddGCRef(ctx, &cli_rt->on_exception_ref);
|
||||
JS_AddGCRef(ctx, &cli_rt->message_handle_ref);
|
||||
JS_AddGCRef(ctx, &cli_rt->unneeded_ref);
|
||||
JS_AddGCRef(ctx, &cli_rt->actor_sym_ref);
|
||||
cli_rt->idx_buffer_ref.val = JS_NULL;
|
||||
cli_rt->on_exception_ref.val = JS_NULL;
|
||||
cli_rt->message_handle_ref.val = JS_NULL;
|
||||
cli_rt->unneeded_ref.val = JS_NULL;
|
||||
cli_rt->actor_sym_ref.val = JS_NewObject(ctx);
|
||||
|
||||
root_cell = cli_rt;
|
||||
|
||||
JS_FreeValue(ctx, js_blob_use(ctx));
|
||||
|
||||
JSValue hidden_env = JS_NewObject(ctx);
|
||||
JS_SetPropertyStr(ctx, hidden_env, "os", js_os_use(ctx));
|
||||
JS_SetPropertyStr(ctx, hidden_env, "core_path", JS_NewString(ctx, core_path));
|
||||
JS_SetPropertyStr(ctx, hidden_env, "use_mcode", JS_NewBool(ctx, use_mcode));
|
||||
JS_SetPropertyStr(ctx, hidden_env, "shop_path",
|
||||
shop_path ? JS_NewString(ctx, shop_path) : JS_NULL);
|
||||
/* TODO: remove after next 'make regen' — old bootstrap.mach reads these */
|
||||
JS_SetPropertyStr(ctx, hidden_env, "emit_qbe", JS_FALSE);
|
||||
JS_SetPropertyStr(ctx, hidden_env, "dump_mach", JS_FALSE);
|
||||
JS_SetPropertyStr(ctx, hidden_env, "actorsym", JS_DupValue(ctx, cli_rt->actor_sym_ref.val));
|
||||
JS_SetPropertyStr(ctx, hidden_env, "json", js_json_use(ctx));
|
||||
JS_SetPropertyStr(ctx, hidden_env, "nota", js_nota_use(ctx));
|
||||
JS_SetPropertyStr(ctx, hidden_env, "wota", js_wota_use(ctx));
|
||||
JS_SetPropertyStr(ctx, hidden_env, "init", JS_NULL);
|
||||
JSValue args_arr = JS_NewArray(ctx);
|
||||
for (int i = arg_start; i < argc; i++) {
|
||||
JSValue str = JS_NewString(ctx, argv[i]);
|
||||
@@ -332,17 +435,10 @@ int cell_init(int argc, char **argv)
|
||||
JS_SetPropertyStr(ctx, hidden_env, "args", args_arr);
|
||||
hidden_env = JS_Stone(ctx, hidden_env);
|
||||
|
||||
JSValue result;
|
||||
if (boot_is_bin) {
|
||||
result = JS_RunMachBin(ctx, (const uint8_t *)boot_data, boot_size, hidden_env);
|
||||
free(boot_data);
|
||||
} else {
|
||||
cJSON *ast = cJSON_Parse(boot_data);
|
||||
free(boot_data);
|
||||
if (!ast) { printf("Failed to parse bootstrap AST\n"); JS_FreeContext(ctx); JS_FreeRuntime(rt); return 1; }
|
||||
result = JS_RunMachTree(ctx, ast, hidden_env);
|
||||
cJSON_Delete(ast);
|
||||
}
|
||||
JSValue result = boot_is_mcode
|
||||
? JS_RunMachMcode(ctx, boot_data, boot_size, hidden_env)
|
||||
: JS_RunMachBin(ctx, (const uint8_t *)boot_data, boot_size, hidden_env);
|
||||
free(boot_data);
|
||||
|
||||
int exit_code = 0;
|
||||
if (JS_IsException(result)) {
|
||||
@@ -356,8 +452,33 @@ int cell_init(int argc, char **argv)
|
||||
}
|
||||
}
|
||||
|
||||
if (scheduler_actor_count() > 0) {
|
||||
scheduler_enable_quiescence();
|
||||
actor_loop();
|
||||
exit_handler();
|
||||
exit(0);
|
||||
}
|
||||
|
||||
/* No actors spawned — clean up CLI context */
|
||||
JS_DeleteGCRef(ctx, &cli_rt->idx_buffer_ref);
|
||||
JS_DeleteGCRef(ctx, &cli_rt->on_exception_ref);
|
||||
JS_DeleteGCRef(ctx, &cli_rt->message_handle_ref);
|
||||
JS_DeleteGCRef(ctx, &cli_rt->unneeded_ref);
|
||||
JS_DeleteGCRef(ctx, &cli_rt->actor_sym_ref);
|
||||
JS_SetInterruptHandler(ctx, NULL, NULL);
|
||||
|
||||
pthread_mutex_destroy(cli_rt->mutex);
|
||||
free(cli_rt->mutex);
|
||||
pthread_mutex_destroy(cli_rt->msg_mutex);
|
||||
free(cli_rt->msg_mutex);
|
||||
free(cli_rt);
|
||||
root_cell = NULL;
|
||||
|
||||
JS_FreeContext(ctx);
|
||||
JS_FreeRuntime(rt);
|
||||
JS_FreeRuntime(g_runtime);
|
||||
g_runtime = NULL;
|
||||
|
||||
exit_handler();
|
||||
return exit_code;
|
||||
}
|
||||
|
||||
|
||||
@@ -78,8 +78,7 @@ void cell_trace_sethook(cell_hook);
|
||||
#define QJSCLASS(TYPE, ...)\
|
||||
JSClassID js_##TYPE##_id;\
|
||||
static void js_##TYPE##_finalizer(JSRuntime *rt, JSValue val){\
|
||||
JSContext *js = JS_GetContext(rt);\
|
||||
TYPE *n = JS_GetOpaque2(js, val, js_##TYPE##_id); \
|
||||
TYPE *n = JS_GetOpaque(val, js_##TYPE##_id); \
|
||||
TYPE##_free(rt,n);}\
|
||||
static JSClassDef js_##TYPE##_class = {\
|
||||
.class_name = #TYPE,\
|
||||
|
||||
@@ -54,6 +54,7 @@ typedef struct cell_rt {
|
||||
double ar_secs; // time for unneeded
|
||||
|
||||
int disrupt;
|
||||
int is_quiescent; // tracked by scheduler for quiescence detection
|
||||
int main_thread_only;
|
||||
int affinity;
|
||||
|
||||
@@ -81,6 +82,8 @@ int actor_interrupt_cb(JSRuntime *rt, cell_rt *crt);
|
||||
void actor_loop();
|
||||
void actor_initialize(void);
|
||||
void actor_free(cell_rt *actor);
|
||||
int scheduler_actor_count(void);
|
||||
void scheduler_enable_quiescence(void);
|
||||
|
||||
uint64_t cell_ns();
|
||||
void cell_sleep(double seconds);
|
||||
|
||||
@@ -1327,9 +1327,6 @@ static int re_parse_nested_class(REParseState *s, REStringList *cr, const uint8_
|
||||
REStringList cr1_s, *cr1 = &cr1_s;
|
||||
BOOL invert, is_first;
|
||||
|
||||
if (lre_check_stack_overflow(s->opaque, 0))
|
||||
return re_parse_error(s, "stack overflow");
|
||||
|
||||
re_string_list_init(s, cr);
|
||||
p = *pp;
|
||||
p++; /* skip '[' */
|
||||
@@ -2356,9 +2353,6 @@ static int re_parse_disjunction(REParseState *s, BOOL is_backward_dir)
|
||||
{
|
||||
int start, len, pos;
|
||||
|
||||
if (lre_check_stack_overflow(s->opaque, 0))
|
||||
return re_parse_error(s, "stack overflow");
|
||||
|
||||
start = s->byte_code.size;
|
||||
if (re_parse_alternative(s, is_backward_dir))
|
||||
return -1;
|
||||
@@ -3205,11 +3199,6 @@ const char *lre_get_groupnames(const uint8_t *bc_buf)
|
||||
|
||||
#ifdef TEST
|
||||
|
||||
BOOL lre_check_stack_overflow(void *opaque, size_t alloca_size)
|
||||
{
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
void *lre_realloc(void *opaque, void *ptr, size_t size)
|
||||
{
|
||||
return realloc(ptr, size);
|
||||
|
||||
@@ -52,8 +52,6 @@ int lre_exec(uint8_t **capture,
|
||||
|
||||
int lre_parse_escape(const uint8_t **pp, int allow_utf16);
|
||||
|
||||
/* must be provided by the user, return non zero if overflow */
|
||||
int lre_check_stack_overflow(void *opaque, size_t alloca_size);
|
||||
/* must be provided by the user, return non zero if time out */
|
||||
int lre_check_timeout(void *opaque);
|
||||
void *lre_realloc(void *opaque, void *ptr, size_t size);
|
||||
|
||||
3588
source/mach.c
3588
source/mach.c
File diff suppressed because it is too large
Load Diff
1824
source/mcode.c
1824
source/mcode.c
File diff suppressed because it is too large
Load Diff
@@ -192,3 +192,258 @@ JSValue qbe_shift_shr(JSContext *ctx, JSValue a, JSValue b) {
|
||||
JS_ToInt32(ctx, &ib, b);
|
||||
return JS_NewInt32(ctx, (uint32_t)ia >> (ib & 31));
|
||||
}
|
||||
|
||||
/* ============================================================
|
||||
cell_rt_* — Runtime support for QBE-compiled code
|
||||
============================================================ */
|
||||
|
||||
#include <dlfcn.h>
|
||||
#include <stdio.h>
|
||||
|
||||
/* --- Property access --- */
|
||||
|
||||
JSValue cell_rt_load_field(JSContext *ctx, JSValue obj, const char *name) {
|
||||
return JS_GetPropertyStr(ctx, obj, name);
|
||||
}
|
||||
|
||||
void cell_rt_store_field(JSContext *ctx, JSValue val, JSValue obj,
|
||||
const char *name) {
|
||||
JS_SetPropertyStr(ctx, obj, name, val);
|
||||
}
|
||||
|
||||
JSValue cell_rt_load_dynamic(JSContext *ctx, JSValue obj, JSValue key) {
|
||||
if (JS_IsInt(key))
|
||||
return JS_GetPropertyNumber(ctx, obj, (uint32_t)JS_VALUE_GET_INT(key));
|
||||
return JS_GetProperty(ctx, obj, key);
|
||||
}
|
||||
|
||||
void cell_rt_store_dynamic(JSContext *ctx, JSValue val, JSValue obj,
|
||||
JSValue key) {
|
||||
if (JS_IsInt(key))
|
||||
JS_SetPropertyNumber(ctx, obj, (uint32_t)JS_VALUE_GET_INT(key), val);
|
||||
else
|
||||
JS_SetProperty(ctx, obj, key, val);
|
||||
}
|
||||
|
||||
JSValue cell_rt_load_index(JSContext *ctx, JSValue arr, JSValue idx) {
|
||||
if (JS_IsInt(idx))
|
||||
return JS_GetPropertyNumber(ctx, arr, (uint32_t)JS_VALUE_GET_INT(idx));
|
||||
return JS_GetProperty(ctx, arr, idx);
|
||||
}
|
||||
|
||||
void cell_rt_store_index(JSContext *ctx, JSValue val, JSValue arr,
|
||||
JSValue idx) {
|
||||
if (JS_IsInt(idx))
|
||||
JS_SetPropertyNumber(ctx, arr, (uint32_t)JS_VALUE_GET_INT(idx), val);
|
||||
else
|
||||
JS_SetProperty(ctx, arr, idx, val);
|
||||
}
|
||||
|
||||
/* --- Intrinsic/global lookup --- */
|
||||
|
||||
JSValue cell_rt_get_intrinsic(JSContext *ctx, const char *name) {
|
||||
return JS_GetPropertyStr(ctx, ctx->global_obj, name);
|
||||
}
|
||||
|
||||
/* --- Closure access ---
|
||||
Slot 511 in each frame stores a pointer to the enclosing frame.
|
||||
Walking depth levels up the chain gives the target frame. */
|
||||
|
||||
#define QBE_FRAME_OUTER_SLOT 511
|
||||
|
||||
JSValue cell_rt_get_closure(JSContext *ctx, void *fp, int64_t depth,
|
||||
int64_t slot) {
|
||||
JSValue *frame = (JSValue *)fp;
|
||||
for (int64_t d = 0; d < depth; d++) {
|
||||
void *outer = (void *)(uintptr_t)frame[QBE_FRAME_OUTER_SLOT];
|
||||
if (!outer) return JS_NULL;
|
||||
frame = (JSValue *)outer;
|
||||
}
|
||||
return frame[slot];
|
||||
}
|
||||
|
||||
void cell_rt_put_closure(JSContext *ctx, void *fp, JSValue val, int64_t depth,
|
||||
int64_t slot) {
|
||||
JSValue *frame = (JSValue *)fp;
|
||||
for (int64_t d = 0; d < depth; d++) {
|
||||
void *outer = (void *)(uintptr_t)frame[QBE_FRAME_OUTER_SLOT];
|
||||
if (!outer) return;
|
||||
frame = (JSValue *)outer;
|
||||
}
|
||||
frame[slot] = val;
|
||||
}
|
||||
|
||||
/* --- Function creation and calling --- */
|
||||
|
||||
typedef JSValue (*cell_compiled_fn)(JSContext *ctx, void *fp);
|
||||
|
||||
/* Table mapping fn_idx → outer_fp at creation time.
|
||||
Valid for single-threaded, non-recursive closure patterns. */
|
||||
#define MAX_QBE_FUNCTIONS 256
|
||||
static void *g_outer_fp[MAX_QBE_FUNCTIONS];
|
||||
|
||||
static JSValue cell_fn_trampoline(JSContext *ctx, JSValue this_val,
|
||||
int argc, JSValue *argv, int magic) {
|
||||
char name[64];
|
||||
snprintf(name, sizeof(name), "cell_fn_%d", magic);
|
||||
|
||||
cell_compiled_fn fn = (cell_compiled_fn)dlsym(RTLD_DEFAULT, name);
|
||||
if (!fn)
|
||||
return JS_ThrowTypeError(ctx, "native function %s not found", name);
|
||||
|
||||
/* Allocate frame: slot 0 = this, slots 1..argc = args */
|
||||
JSValue frame[512];
|
||||
memset(frame, 0, sizeof(frame));
|
||||
frame[0] = this_val;
|
||||
for (int i = 0; i < argc && i < 510; i++)
|
||||
frame[1 + i] = argv[i];
|
||||
|
||||
/* Link to outer frame for closure access */
|
||||
if (magic >= 0 && magic < MAX_QBE_FUNCTIONS)
|
||||
frame[QBE_FRAME_OUTER_SLOT] = (JSValue)(uintptr_t)g_outer_fp[magic];
|
||||
|
||||
return fn(ctx, frame);
|
||||
}
|
||||
|
||||
JSValue cell_rt_make_function(JSContext *ctx, int64_t fn_idx, void *outer_fp) {
|
||||
if (fn_idx >= 0 && fn_idx < MAX_QBE_FUNCTIONS)
|
||||
g_outer_fp[fn_idx] = outer_fp;
|
||||
return JS_NewCFunction2(ctx, (JSCFunction *)cell_fn_trampoline, "native_fn",
|
||||
255, JS_CFUNC_generic_magic, (int)fn_idx);
|
||||
}
|
||||
|
||||
/* --- Frame-based function calling --- */
|
||||
|
||||
JSValue cell_rt_frame(JSContext *ctx, JSValue fn, int64_t nargs) {
|
||||
if (!JS_IsFunction(fn)) {
|
||||
JS_ThrowTypeError(ctx, "not a function");
|
||||
return JS_EXCEPTION;
|
||||
}
|
||||
int nr_slots = (int)nargs + 2;
|
||||
JSFrameRegister *new_frame = alloc_frame_register(ctx, nr_slots);
|
||||
if (!new_frame) return JS_EXCEPTION;
|
||||
new_frame->function = fn;
|
||||
return JS_MKPTR(new_frame);
|
||||
}
|
||||
|
||||
void cell_rt_setarg(JSValue frame_val, int64_t idx, JSValue val) {
|
||||
JSFrameRegister *fr = (JSFrameRegister *)JS_VALUE_GET_PTR(frame_val);
|
||||
fr->slots[idx] = val;
|
||||
}
|
||||
|
||||
JSValue cell_rt_invoke(JSContext *ctx, JSValue frame_val) {
|
||||
JSFrameRegister *fr = (JSFrameRegister *)JS_VALUE_GET_PTR(frame_val);
|
||||
int nr_slots = (int)objhdr_cap56(fr->header);
|
||||
int c_argc = (nr_slots >= 2) ? nr_slots - 2 : 0;
|
||||
|
||||
/* Copy args to C stack */
|
||||
JSValue args[c_argc > 0 ? c_argc : 1];
|
||||
for (int i = 0; i < c_argc; i++)
|
||||
args[i] = fr->slots[i + 1];
|
||||
|
||||
JSValue result = JS_Call(ctx, fr->function, fr->slots[0], c_argc, args);
|
||||
if (JS_IsException(result))
|
||||
return JS_EXCEPTION;
|
||||
return result;
|
||||
}
|
||||
|
||||
JSValue cell_rt_goframe(JSContext *ctx, JSValue fn, int64_t nargs) {
|
||||
return cell_rt_frame(ctx, fn, nargs);
|
||||
}
|
||||
|
||||
JSValue cell_rt_goinvoke(JSContext *ctx, JSValue frame_val) {
|
||||
return cell_rt_invoke(ctx, frame_val);
|
||||
}
|
||||
|
||||
/* --- Array push/pop --- */
|
||||
|
||||
void cell_rt_push(JSContext *ctx, JSValue arr, JSValue val) {
|
||||
JS_ArrayPush(ctx, &arr, val);
|
||||
}
|
||||
|
||||
JSValue cell_rt_pop(JSContext *ctx, JSValue arr) {
|
||||
return JS_ArrayPop(ctx, arr);
|
||||
}
|
||||
|
||||
/* --- Delete --- */
|
||||
|
||||
JSValue cell_rt_delete(JSContext *ctx, JSValue obj, JSValue key) {
|
||||
int ret = JS_DeleteProperty(ctx, obj, key);
|
||||
return JS_NewBool(ctx, ret >= 0);
|
||||
}
|
||||
|
||||
/* --- Typeof --- */
|
||||
|
||||
JSValue cell_rt_typeof(JSContext *ctx, JSValue val) {
|
||||
if (JS_IsNull(val)) return JS_NewString(ctx, "null");
|
||||
if (JS_IsInt(val) || JS_IsNumber(val)) return JS_NewString(ctx, "number");
|
||||
if (JS_IsBool(val)) return JS_NewString(ctx, "logical");
|
||||
if (JS_IsText(val)) return JS_NewString(ctx, "text");
|
||||
if (JS_IsFunction(val)) return JS_NewString(ctx, "function");
|
||||
if (JS_IsArray(val)) return JS_NewString(ctx, "array");
|
||||
if (JS_IsRecord(val)) return JS_NewString(ctx, "object");
|
||||
return JS_NewString(ctx, "unknown");
|
||||
}
|
||||
|
||||
/* --- Text comparison stubs (called from QBE type-dispatch branches) --- */
|
||||
|
||||
JSValue cell_rt_lt_text(JSContext *ctx, JSValue a, JSValue b) {
|
||||
const char *sa = JS_ToCString(ctx, a);
|
||||
const char *sb = JS_ToCString(ctx, b);
|
||||
int r = (sa && sb) ? strcmp(sa, sb) < 0 : 0;
|
||||
return JS_NewBool(ctx, r);
|
||||
}
|
||||
|
||||
JSValue cell_rt_gt_text(JSContext *ctx, JSValue a, JSValue b) {
|
||||
const char *sa = JS_ToCString(ctx, a);
|
||||
const char *sb = JS_ToCString(ctx, b);
|
||||
int r = (sa && sb) ? strcmp(sa, sb) > 0 : 0;
|
||||
return JS_NewBool(ctx, r);
|
||||
}
|
||||
|
||||
JSValue cell_rt_le_text(JSContext *ctx, JSValue a, JSValue b) {
|
||||
const char *sa = JS_ToCString(ctx, a);
|
||||
const char *sb = JS_ToCString(ctx, b);
|
||||
int r = (sa && sb) ? strcmp(sa, sb) <= 0 : 0;
|
||||
return JS_NewBool(ctx, r);
|
||||
}
|
||||
|
||||
JSValue cell_rt_ge_text(JSContext *ctx, JSValue a, JSValue b) {
|
||||
const char *sa = JS_ToCString(ctx, a);
|
||||
const char *sb = JS_ToCString(ctx, b);
|
||||
int r = (sa && sb) ? strcmp(sa, sb) >= 0 : 0;
|
||||
return JS_NewBool(ctx, r);
|
||||
}
|
||||
|
||||
JSValue cell_rt_eq_tol(JSContext *ctx, JSValue a, JSValue b) {
|
||||
return JS_NewBool(ctx, a == b);
|
||||
}
|
||||
|
||||
JSValue cell_rt_ne_tol(JSContext *ctx, JSValue a, JSValue b) {
|
||||
return JS_NewBool(ctx, a != b);
|
||||
}
|
||||
|
||||
/* --- Disruption --- */
|
||||
|
||||
void cell_rt_disrupt(JSContext *ctx) {
|
||||
JS_ThrowTypeError(ctx, "type error in native code");
|
||||
}
|
||||
|
||||
/* --- Module entry point ---
|
||||
Called as symbol(ctx) by os.dylib_symbol. Looks up cell_main
|
||||
in the loaded dylib, builds a heap-allocated frame (so closures
|
||||
can reference it after the module returns), and runs the module body. */
|
||||
|
||||
JSValue cell_rt_module_entry(JSContext *ctx) {
|
||||
cell_compiled_fn fn = (cell_compiled_fn)dlsym(RTLD_DEFAULT, "cell_main");
|
||||
if (!fn)
|
||||
return JS_ThrowTypeError(ctx, "cell_main not found in loaded dylib");
|
||||
|
||||
/* Heap-allocate so closures created in cell_main can reference
|
||||
this frame after the module entry returns. */
|
||||
JSValue *frame = calloc(512, sizeof(JSValue));
|
||||
if (!frame)
|
||||
return JS_ThrowTypeError(ctx, "frame allocation failed");
|
||||
|
||||
return fn(ctx, frame);
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,296 +0,0 @@
|
||||
/*
|
||||
* QuickJS opcode definitions
|
||||
*
|
||||
* Copyright (c) 2017-2018 Fabrice Bellard
|
||||
* Copyright (c) 2017-2018 Charlie Gordon
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in
|
||||
* all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
||||
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
* THE SOFTWARE.
|
||||
*/
|
||||
|
||||
#ifdef FMT
|
||||
FMT(none)
|
||||
FMT(none_int)
|
||||
FMT(none_loc)
|
||||
FMT(none_arg)
|
||||
FMT(u8)
|
||||
FMT(i8)
|
||||
FMT(loc8)
|
||||
FMT(const8)
|
||||
FMT(label8)
|
||||
FMT(u16)
|
||||
FMT(i16)
|
||||
FMT(label16)
|
||||
FMT(npop)
|
||||
FMT(npopx)
|
||||
FMT(npop_u16)
|
||||
FMT(loc)
|
||||
FMT(arg)
|
||||
FMT(u32)
|
||||
FMT(i32)
|
||||
FMT(const)
|
||||
FMT(label)
|
||||
FMT(label_u16)
|
||||
FMT(key)
|
||||
FMT(key_u8)
|
||||
FMT(key_u16)
|
||||
FMT(key_label_u16)
|
||||
FMT(u8_u16) /* 1 byte + 2 bytes for upvalue access */
|
||||
#undef FMT
|
||||
#endif /* FMT */
|
||||
|
||||
#ifdef DEF
|
||||
|
||||
#ifndef def
|
||||
#define def(id, size, n_pop, n_push, f) DEF(id, size, n_pop, n_push, f)
|
||||
#endif
|
||||
|
||||
DEF(invalid, 1, 0, 0, none) /* never emitted */
|
||||
|
||||
/* push values */
|
||||
DEF( push_i32, 5, 0, 1, i32)
|
||||
DEF( push_const, 5, 0, 1, const)
|
||||
DEF( fclosure, 5, 0, 1, const) /* must follow push_const */
|
||||
DEF( null, 1, 0, 1, none)
|
||||
DEF( push_this, 1, 0, 1, none) /* only used at the start of a function */
|
||||
DEF( push_false, 1, 0, 1, none)
|
||||
DEF( push_true, 1, 0, 1, none)
|
||||
DEF( object, 1, 0, 1, none)
|
||||
DEF( special_object, 2, 0, 1, u8) /* only used at the start of a function */
|
||||
|
||||
DEF( drop, 1, 1, 0, none) /* a -> */
|
||||
DEF( nip, 1, 2, 1, none) /* a b -> b */
|
||||
DEF( nip1, 1, 3, 2, none) /* a b c -> b c */
|
||||
DEF( dup, 1, 1, 2, none) /* a -> a a */
|
||||
DEF( dup1, 1, 2, 3, none) /* a b -> a a b */
|
||||
DEF( dup2, 1, 2, 4, none) /* a b -> a b a b */
|
||||
DEF( dup3, 1, 3, 6, none) /* a b c -> a b c a b c */
|
||||
DEF( insert2, 1, 2, 3, none) /* obj a -> a obj a (dup_x1) */
|
||||
DEF( insert3, 1, 3, 4, none) /* obj prop a -> a obj prop a (dup_x2) */
|
||||
DEF( insert4, 1, 4, 5, none) /* this obj prop a -> a this obj prop a */
|
||||
DEF( perm3, 1, 3, 3, none) /* obj a b -> a obj b */
|
||||
DEF( perm4, 1, 4, 4, none) /* obj prop a b -> a obj prop b */
|
||||
DEF( perm5, 1, 5, 5, none) /* this obj prop a b -> a this obj prop b */
|
||||
DEF( swap, 1, 2, 2, none) /* a b -> b a */
|
||||
DEF( swap2, 1, 4, 4, none) /* a b c d -> c d a b */
|
||||
DEF( rot3l, 1, 3, 3, none) /* x a b -> a b x */
|
||||
DEF( rot3r, 1, 3, 3, none) /* a b x -> x a b */
|
||||
DEF( rot4l, 1, 4, 4, none) /* x a b c -> a b c x */
|
||||
DEF( rot5l, 1, 5, 5, none) /* x a b c d -> a b c d x */
|
||||
|
||||
DEF( call, 3, 1, 1, npop) /* arguments are not counted in n_pop */
|
||||
DEF( tail_call, 3, 1, 0, npop) /* arguments are not counted in n_pop */
|
||||
DEF( call_method, 3, 2, 1, npop) /* arguments are not counted in n_pop */
|
||||
DEF(tail_call_method, 3, 2, 0, npop) /* arguments are not counted in n_pop */
|
||||
DEF( array_from, 3, 0, 1, npop) /* arguments are not counted in n_pop */
|
||||
DEF( return, 1, 1, 0, none)
|
||||
DEF( return_undef, 1, 0, 0, none)
|
||||
DEF( throw, 1, 1, 0, none)
|
||||
DEF( throw_error, 6, 0, 0, key_u8)
|
||||
DEF( regexp, 1, 2, 1, none) /* create a RegExp object from the pattern and a
|
||||
bytecode string */
|
||||
|
||||
/* Global variable access - resolved by linker to get/set_global_slot */
|
||||
DEF( check_var, 5, 0, 1, key) /* check if a variable exists - resolved by linker */
|
||||
DEF( get_var_undef, 5, 0, 1, key) /* resolved by linker to get_global_slot */
|
||||
DEF( get_var, 5, 0, 1, key) /* resolved by linker to get_global_slot */
|
||||
DEF( put_var, 5, 1, 0, key) /* resolved by linker to set_global_slot */
|
||||
DEF( put_var_init, 5, 1, 0, key) /* resolved by linker to set_global_slot */
|
||||
DEF( put_var_strict, 5, 2, 0, key) /* resolved by linker to set_global_slot */
|
||||
|
||||
/* Global variable opcodes - resolved by linker to get/set_global_slot */
|
||||
DEF( define_var, 6, 0, 0, key_u8)
|
||||
DEF(check_define_var, 6, 0, 0, key_u8)
|
||||
DEF( define_func, 6, 1, 0, key_u8)
|
||||
DEF( get_field, 5, 1, 1, key)
|
||||
DEF( get_field2, 5, 1, 2, key)
|
||||
DEF( put_field, 5, 2, 0, key)
|
||||
DEF( get_array_el, 1, 2, 1, none)
|
||||
DEF( get_array_el2, 1, 2, 2, none) /* obj prop -> obj value */
|
||||
DEF( get_array_el3, 1, 2, 3, none) /* obj prop -> obj prop1 value */
|
||||
DEF( put_array_el, 1, 3, 0, none)
|
||||
DEF( define_field, 5, 2, 1, key)
|
||||
DEF( set_name, 5, 1, 1, key)
|
||||
DEF(set_name_computed, 1, 2, 2, none)
|
||||
DEF(define_array_el, 1, 3, 2, none)
|
||||
DEF(copy_data_properties, 2, 3, 3, u8)
|
||||
DEF( define_method, 6, 2, 1, key_u8)
|
||||
DEF(define_method_computed, 2, 3, 1, u8) /* must come after define_method */
|
||||
DEF( define_class, 6, 2, 2, key_u8) /* parent ctor -> ctor proto */
|
||||
DEF( define_class_computed, 6, 3, 3, key_u8) /* field_name parent ctor -> field_name ctor proto (class with computed name) */
|
||||
|
||||
DEF( get_loc, 3, 0, 1, loc)
|
||||
DEF( put_loc, 3, 1, 0, loc) /* must come after get_loc */
|
||||
DEF( set_loc, 3, 1, 1, loc) /* must come after put_loc */
|
||||
DEF( get_arg, 3, 0, 1, arg)
|
||||
DEF( put_arg, 3, 1, 0, arg) /* must come after get_arg */
|
||||
DEF( set_arg, 3, 1, 1, arg) /* must come after put_arg */
|
||||
DEF(set_loc_uninitialized, 3, 0, 0, loc)
|
||||
DEF( get_loc_check, 3, 0, 1, loc)
|
||||
DEF( put_loc_check, 3, 1, 0, loc) /* must come after get_loc_check */
|
||||
DEF( put_loc_check_init, 3, 1, 0, loc)
|
||||
DEF(get_loc_checkthis, 3, 0, 1, loc)
|
||||
DEF( if_false, 5, 1, 0, label)
|
||||
DEF( if_true, 5, 1, 0, label) /* must come after if_false */
|
||||
DEF( goto, 5, 0, 0, label) /* must come after if_true */
|
||||
DEF( catch, 5, 0, 1, label)
|
||||
DEF( gosub, 5, 0, 0, label) /* used to execute the finally block */
|
||||
DEF( ret, 1, 1, 0, none) /* used to return from the finally block */
|
||||
DEF( nip_catch, 1, 2, 1, none) /* catch ... a -> a */
|
||||
|
||||
DEF( to_propkey, 1, 1, 1, none)
|
||||
|
||||
/* arithmetic/logic operations */
|
||||
DEF( neg, 1, 1, 1, none)
|
||||
DEF( plus, 1, 1, 1, none)
|
||||
DEF( dec, 1, 1, 1, none)
|
||||
DEF( inc, 1, 1, 1, none)
|
||||
DEF( post_dec, 1, 1, 2, none)
|
||||
DEF( post_inc, 1, 1, 2, none)
|
||||
DEF( dec_loc, 2, 0, 0, loc8)
|
||||
DEF( inc_loc, 2, 0, 0, loc8)
|
||||
DEF( add_loc, 2, 1, 0, loc8)
|
||||
DEF( not, 1, 1, 1, none)
|
||||
DEF( lnot, 1, 1, 1, none)
|
||||
DEF( delete, 1, 2, 1, none)
|
||||
DEF( delete_var, 5, 0, 1, key) /* deprecated - global object is immutable */
|
||||
|
||||
DEF( mul, 1, 2, 1, none)
|
||||
DEF( mul_float, 1, 2, 1, none)
|
||||
DEF( div, 1, 2, 1, none)
|
||||
DEF( div_float, 1, 2, 1, none)
|
||||
DEF( mod, 1, 2, 1, none)
|
||||
DEF( add, 1, 2, 1, none)
|
||||
DEF( add_float, 1, 2, 1, none)
|
||||
DEF( sub, 1, 2, 1, none)
|
||||
DEF( sub_float, 1, 2, 1, none)
|
||||
DEF( pow, 1, 2, 1, none)
|
||||
DEF( shl, 1, 2, 1, none)
|
||||
DEF( sar, 1, 2, 1, none)
|
||||
DEF( shr, 1, 2, 1, none)
|
||||
DEF( lt, 1, 2, 1, none)
|
||||
DEF( lte, 1, 2, 1, none)
|
||||
DEF( gt, 1, 2, 1, none)
|
||||
DEF( gte, 1, 2, 1, none)
|
||||
DEF( in, 1, 2, 1, none)
|
||||
DEF( strict_eq, 1, 2, 1, none)
|
||||
DEF( strict_neq, 1, 2, 1, none)
|
||||
DEF( and, 1, 2, 1, none)
|
||||
DEF( xor, 1, 2, 1, none)
|
||||
DEF( or, 1, 2, 1, none)
|
||||
/* format template - format_string_cpool_idx(u32), expr_count(u16)
|
||||
Note: n_push=2 ensures stack has room for temp [format_str, arr] pair,
|
||||
even though we only leave 1 value (the result) on the stack. */
|
||||
DEF(format_template, 7, 0, 1, npop_u16)
|
||||
|
||||
/* Upvalue access (closures via outer_frame chain) */
|
||||
DEF( get_up, 4, 0, 1, u8_u16) /* depth:u8, slot:u16 -> value */
|
||||
DEF( set_up, 4, 1, 0, u8_u16) /* value, depth:u8, slot:u16 -> */
|
||||
|
||||
/* Name resolution with bytecode patching */
|
||||
DEF( get_name, 5, 0, 1, const) /* cpool_idx -> value, patches itself */
|
||||
DEF( get_env_slot, 3, 0, 1, u16) /* slot -> value (patched from get_name) */
|
||||
DEF( set_env_slot, 3, 1, 0, u16) /* value -> slot (patched from put_var) */
|
||||
DEF(get_global_slot, 3, 0, 1, u16) /* slot -> value (patched from get_var) */
|
||||
DEF(set_global_slot, 3, 1, 0, u16) /* value -> slot (patched from put_var) */
|
||||
|
||||
/* must be the last non short and non temporary opcode */
|
||||
DEF( nop, 1, 0, 0, none)
|
||||
|
||||
/* temporary opcodes: never emitted in the final bytecode */
|
||||
|
||||
def( enter_scope, 3, 0, 0, u16) /* emitted in phase 1, removed in phase 2 */
|
||||
def( leave_scope, 3, 0, 0, u16) /* emitted in phase 1, removed in phase 2 */
|
||||
|
||||
def( label, 5, 0, 0, label) /* emitted in phase 1, removed in phase 3 */
|
||||
|
||||
/* the following opcodes must be in the same order as the 'with_x' and
|
||||
get_var_undef, get_var and put_var opcodes */
|
||||
def(scope_get_var_undef, 7, 0, 1, key_u16) /* emitted in phase 1, removed in phase 2 */
|
||||
def( scope_get_var, 7, 0, 1, key_u16) /* emitted in phase 1, removed in phase 2 */
|
||||
def( scope_put_var, 7, 1, 0, key_u16) /* emitted in phase 1, removed in phase 2 */
|
||||
def(scope_delete_var, 7, 0, 1, key_u16) /* emitted in phase 1, removed in phase 2 */
|
||||
def(scope_put_var_init, 7, 0, 2, key_u16) /* emitted in phase 1, removed in phase 2 */
|
||||
def(scope_get_var_checkthis, 7, 0, 1, key_u16) /* emitted in phase 1, removed in phase 2, only used to return 'this' in derived class constructors */
|
||||
def(get_field_opt_chain, 5, 1, 1, key) /* emitted in phase 1, removed in phase 2 */
|
||||
def(get_array_el_opt_chain, 1, 2, 1, none) /* emitted in phase 1, removed in phase 2 */
|
||||
def( set_class_name, 5, 1, 1, u32) /* emitted in phase 1, removed in phase 2 */
|
||||
|
||||
def( line_num, 5, 0, 0, u32) /* emitted in phase 1, removed in phase 3 */
|
||||
|
||||
#if SHORT_OPCODES
|
||||
DEF( push_minus1, 1, 0, 1, none_int)
|
||||
DEF( push_0, 1, 0, 1, none_int)
|
||||
DEF( push_1, 1, 0, 1, none_int)
|
||||
DEF( push_2, 1, 0, 1, none_int)
|
||||
DEF( push_3, 1, 0, 1, none_int)
|
||||
DEF( push_4, 1, 0, 1, none_int)
|
||||
DEF( push_5, 1, 0, 1, none_int)
|
||||
DEF( push_6, 1, 0, 1, none_int)
|
||||
DEF( push_7, 1, 0, 1, none_int)
|
||||
DEF( push_i8, 2, 0, 1, i8)
|
||||
DEF( push_i16, 3, 0, 1, i16)
|
||||
DEF( push_const8, 2, 0, 1, const8)
|
||||
DEF( fclosure8, 2, 0, 1, const8) /* must follow push_const8 */
|
||||
DEF(push_empty_string, 1, 0, 1, none)
|
||||
|
||||
DEF( get_loc8, 2, 0, 1, loc8)
|
||||
DEF( put_loc8, 2, 1, 0, loc8)
|
||||
DEF( set_loc8, 2, 1, 1, loc8)
|
||||
|
||||
DEF( get_loc0, 1, 0, 1, none_loc)
|
||||
DEF( get_loc1, 1, 0, 1, none_loc)
|
||||
DEF( get_loc2, 1, 0, 1, none_loc)
|
||||
DEF( get_loc3, 1, 0, 1, none_loc)
|
||||
DEF( put_loc0, 1, 1, 0, none_loc)
|
||||
DEF( put_loc1, 1, 1, 0, none_loc)
|
||||
DEF( put_loc2, 1, 1, 0, none_loc)
|
||||
DEF( put_loc3, 1, 1, 0, none_loc)
|
||||
DEF( set_loc0, 1, 1, 1, none_loc)
|
||||
DEF( set_loc1, 1, 1, 1, none_loc)
|
||||
DEF( set_loc2, 1, 1, 1, none_loc)
|
||||
DEF( set_loc3, 1, 1, 1, none_loc)
|
||||
DEF( get_arg0, 1, 0, 1, none_arg)
|
||||
DEF( get_arg1, 1, 0, 1, none_arg)
|
||||
DEF( get_arg2, 1, 0, 1, none_arg)
|
||||
DEF( get_arg3, 1, 0, 1, none_arg)
|
||||
DEF( put_arg0, 1, 1, 0, none_arg)
|
||||
DEF( put_arg1, 1, 1, 0, none_arg)
|
||||
DEF( put_arg2, 1, 1, 0, none_arg)
|
||||
DEF( put_arg3, 1, 1, 0, none_arg)
|
||||
DEF( set_arg0, 1, 1, 1, none_arg)
|
||||
DEF( set_arg1, 1, 1, 1, none_arg)
|
||||
DEF( set_arg2, 1, 1, 1, none_arg)
|
||||
DEF( set_arg3, 1, 1, 1, none_arg)
|
||||
|
||||
DEF( if_false8, 2, 1, 0, label8)
|
||||
DEF( if_true8, 2, 1, 0, label8) /* must come after if_false8 */
|
||||
DEF( goto8, 2, 0, 0, label8) /* must come after if_true8 */
|
||||
DEF( goto16, 3, 0, 0, label16)
|
||||
|
||||
DEF( call0, 1, 1, 1, npopx)
|
||||
DEF( call1, 1, 1, 1, npopx)
|
||||
DEF( call2, 1, 1, 1, npopx)
|
||||
DEF( call3, 1, 1, 1, npopx)
|
||||
|
||||
DEF( is_null, 1, 1, 1, none)
|
||||
#endif
|
||||
|
||||
#undef DEF
|
||||
#undef def
|
||||
#endif /* DEF */
|
||||
455
source/quickjs.h
455
source/quickjs.h
@@ -55,13 +55,13 @@ enum mist_obj_type {
|
||||
OBJ_FORWARD = 7
|
||||
};
|
||||
|
||||
typedef uint64_t JSValue;
|
||||
|
||||
#define OBJHDR_S_BIT 3u
|
||||
#define OBJHDR_P_BIT 4u
|
||||
#define OBJHDR_A_BIT 5u
|
||||
#define OBJHDR_R_BIT 7u
|
||||
|
||||
|
||||
#define OBJHDR_FLAG(bit) ((objhdr_t)1ull << (bit))
|
||||
#define OBJHDR_S_MASK OBJHDR_FLAG (OBJHDR_S_BIT)
|
||||
#define OBJHDR_P_MASK OBJHDR_FLAG (OBJHDR_P_BIT)
|
||||
@@ -92,7 +92,6 @@ static inline int objhdr_s (objhdr_t h) { return (h & OBJHDR_S_MASK) != 0; }
|
||||
typedef struct JSRuntime JSRuntime; // the entire VM
|
||||
typedef struct JSContext JSContext; // Each actor
|
||||
typedef struct JSClass JSClass;
|
||||
typedef struct JSFunctionBytecode JSFunctionBytecode;
|
||||
typedef uint32_t JSClassID;
|
||||
|
||||
/* Forward declaration - JSGCRef moved after JSValue definition */
|
||||
@@ -115,39 +114,24 @@ struct JSGCRef;
|
||||
|
||||
============================================================ */
|
||||
|
||||
#if INTPTR_MAX >= INT64_MAX
|
||||
#define JS_PTR64
|
||||
#define JS_PTR64_DEF(a) a
|
||||
typedef uint64_t JSValue;
|
||||
#define JSW 8
|
||||
#else
|
||||
typedef uint32_t JSValue;
|
||||
#define JSW 4
|
||||
#define JS_PTR64_DEF(a)
|
||||
#endif
|
||||
|
||||
#define JSValue JSValue
|
||||
|
||||
/* JSValueConst is just JSValue (const is not needed in value semantics) */
|
||||
typedef JSValue JSValueConst;
|
||||
|
||||
#define JSW 8
|
||||
|
||||
/* LSB-based tags */
|
||||
enum {
|
||||
/* Primary tags (low bits) */
|
||||
JS_TAG_INT = 0, /* LSB = 0 */
|
||||
JS_TAG_PTR = 1, /* LSB = 01 */
|
||||
#ifdef JS_PTR64
|
||||
JS_TAG_SHORT_FLOAT = 5, /* LSB = 101 */
|
||||
#endif
|
||||
JS_TAG_SPECIAL = 3, /* LSB = 11 */
|
||||
|
||||
/* Special subtypes (5 bits: xxxx11) */
|
||||
JS_TAG_BOOL = 0x03, /* 00011 */
|
||||
JS_TAG_NULL = 0x07, /* 00111 */
|
||||
JS_TAG_EXCEPTION = 0x0F, /* 01111 */
|
||||
JS_TAG_UNINITIALIZED = 0x17, /* 10111 */
|
||||
JS_TAG_STRING_IMM = 0x1B, /* 11011 - immediate ASCII (up to 7 chars) */
|
||||
JS_TAG_CATCH_OFFSET = 0x1F, /* 11111 */
|
||||
JS_TAG_STRING_IMM = 0x0B, /* 01011 - immediate ASCII (up to 7 chars) */
|
||||
};
|
||||
|
||||
/* Compatibility tag aliases for external code */
|
||||
@@ -180,16 +164,10 @@ void JS_DeleteGCRef(JSContext *ctx, JSGCRef *ref);
|
||||
/* Get primary tag (low 2-3 bits) */
|
||||
static inline int
|
||||
JS_VALUE_GET_TAG (JSValue v) {
|
||||
#ifdef JS_PTR64
|
||||
if ((v & 1) == 0) return JS_TAG_INT;
|
||||
if ((v & 7) == JS_TAG_SHORT_FLOAT) return JS_TAG_SHORT_FLOAT;
|
||||
if ((v & 3) == JS_TAG_PTR) return JS_TAG_PTR;
|
||||
return (int)(v & 0x1F); /* special tag */
|
||||
#else
|
||||
if ((v & 1) == 0) return JS_TAG_INT;
|
||||
if ((v & 3) == JS_TAG_PTR) return JS_TAG_PTR;
|
||||
return (int)(v & 0x1F);
|
||||
#endif
|
||||
}
|
||||
|
||||
#define JS_VALUE_GET_NORM_TAG(v) JS_VALUE_GET_TAG (v)
|
||||
@@ -220,7 +198,6 @@ static inline JSValue _JS_MkVal (int tag, int32_t val) {
|
||||
Out of range → JS_NULL
|
||||
============================================================ */
|
||||
|
||||
#ifdef JS_PTR64
|
||||
static inline JSValue
|
||||
__JS_NewFloat64 (JSContext *ctx, double d) {
|
||||
union {
|
||||
@@ -280,17 +257,6 @@ static inline double JS_VALUE_GET_FLOAT64 (JSValue v) {
|
||||
#define JS_TAG_IS_FLOAT64(tag) ((tag) == JS_TAG_SHORT_FLOAT)
|
||||
#define JS_NAN JS_MKVAL (JS_TAG_NULL, 0)
|
||||
|
||||
#else /* 32-bit: no short float, use boxed double */
|
||||
|
||||
static inline JSValue __JS_NewFloat64 (JSContext *ctx,
|
||||
double d); /* forward decl */
|
||||
static inline double JS_VALUE_GET_FLOAT64 (JSValue v);
|
||||
|
||||
#define JS_TAG_IS_FLOAT64(tag) (0)
|
||||
#define JS_NAN JS_MKVAL (JS_TAG_NULL, 0)
|
||||
|
||||
#endif /* JS_PTR64 */
|
||||
|
||||
/* ============================================================
|
||||
Type Checks
|
||||
============================================================ */
|
||||
@@ -299,14 +265,10 @@ static inline JS_BOOL JS_IsInt (JSValue v) { return (v & 1) == 0; }
|
||||
static inline JS_BOOL JS_IsPtr (JSValue v) { return (v & 7) == JS_TAG_PTR; }
|
||||
static inline JS_BOOL JS_IsSpecial (JSValue v) { return (v & 3) == JS_TAG_SPECIAL; }
|
||||
|
||||
|
||||
|
||||
#ifdef JS_PTR64
|
||||
static inline JS_BOOL
|
||||
JS_IsShortFloat (JSValue v) {
|
||||
return (v & 7) == JS_TAG_SHORT_FLOAT;
|
||||
}
|
||||
#endif
|
||||
|
||||
#define JS_VALUE_IS_BOTH_INT(v1, v2) (((v1) & 1) == 0 && ((v2) & 1) == 0)
|
||||
#define JS_VALUE_IS_BOTH_FLOAT(v1, v2) \
|
||||
@@ -320,14 +282,6 @@ JS_IsShortFloat (JSValue v) {
|
||||
#define JS_FALSE ((JSValue)JS_TAG_BOOL)
|
||||
#define JS_TRUE ((JSValue)(JS_TAG_BOOL | (1 << 5)))
|
||||
#define JS_EXCEPTION ((JSValue)JS_TAG_EXCEPTION)
|
||||
#define JS_UNINITIALIZED ((JSValue)JS_TAG_UNINITIALIZED)
|
||||
|
||||
/* flags for object properties - simplified model:
|
||||
- No per-property writable/configurable (use stone() for immutability)
|
||||
- Text keys are enumerable, object keys are not */
|
||||
#define JS_PROP_TMASK (3 << 4) /* mask for NORMAL, VARREF */
|
||||
#define JS_PROP_NORMAL (0 << 4)
|
||||
#define JS_PROP_VARREF (2 << 4) /* used internally for closures */
|
||||
|
||||
#ifndef JS_DEFAULT_STACK_SIZE
|
||||
#define JS_DEFAULT_STACK_SIZE (1024 * 1024)
|
||||
@@ -344,74 +298,35 @@ typedef JSValue JSCFunctionData (JSContext *ctx, JSValue this_val,
|
||||
int argc, JSValue *argv, int magic,
|
||||
JSValue *data);
|
||||
|
||||
typedef struct JSMallocState {
|
||||
size_t malloc_count;
|
||||
size_t malloc_size;
|
||||
size_t malloc_limit;
|
||||
void *opaque; /* user opaque */
|
||||
} JSMallocState;
|
||||
|
||||
typedef struct JSMallocFunctions {
|
||||
void *(*js_malloc) (JSMallocState *s, size_t size);
|
||||
void (*js_free) (JSMallocState *s, void *ptr);
|
||||
void *(*js_realloc) (JSMallocState *s, void *ptr, size_t size);
|
||||
size_t (*js_malloc_usable_size) (const void *ptr);
|
||||
} JSMallocFunctions;
|
||||
|
||||
typedef struct JSGCObjectHeader JSGCObjectHeader;
|
||||
|
||||
JSValue JS_Stone (JSContext *ctx, JSValue this_val);
|
||||
/* ============================================================
|
||||
1. Runtime / Context Lifecycle
|
||||
============================================================ */
|
||||
|
||||
JSRuntime *JS_NewRuntime (void);
|
||||
/* info lifetime must exceed that of rt */
|
||||
void JS_SetRuntimeInfo (JSRuntime *rt, const char *info);
|
||||
void JS_FreeRuntime (JSRuntime *rt);
|
||||
void JS_SetMemoryLimit (JSRuntime *rt, size_t limit);
|
||||
|
||||
JSContext *JS_NewContext (JSRuntime *rt);
|
||||
JSContext *JS_NewContextWithHeapSize (JSRuntime *rt, size_t heap_size);
|
||||
void JS_FreeContext (JSContext *s);
|
||||
void *JS_GetContextOpaque (JSContext *ctx);
|
||||
void JS_SetContextOpaque (JSContext *ctx, void *opaque);
|
||||
JSRuntime *JS_GetRuntime (JSContext *ctx);
|
||||
/* use 0 to disable maximum stack size check */
|
||||
void JS_SetMaxStackSize (JSContext *ctx, size_t stack_size);
|
||||
/* should be called when changing thread to update the stack top value
|
||||
used to check stack overflow. */
|
||||
void JS_UpdateStackTop (JSContext *ctx);
|
||||
void JS_FreeRuntime (JSRuntime *rt);
|
||||
void *JS_GetRuntimeOpaque (JSRuntime *rt);
|
||||
void JS_SetRuntimeOpaque (JSRuntime *rt, void *opaque);
|
||||
typedef void JS_MarkFunc (JSRuntime *rt, JSGCObjectHeader *gp);
|
||||
/* JS_MarkValue is a no-op with copying GC (values are traced from roots) */
|
||||
void JS_MarkValue (JSRuntime *rt, JSValue val, JS_MarkFunc *mark_func);
|
||||
|
||||
/* return != 0 if the JS code needs to be interrupted */
|
||||
typedef int JSInterruptHandler (JSRuntime *rt, void *opaque);
|
||||
void JS_SetInterruptHandler (JSContext *ctx, JSInterruptHandler *cb,
|
||||
void *opaque);
|
||||
|
||||
JS_BOOL JS_IsLiveObject (JSRuntime *rt, JSValue obj);
|
||||
|
||||
JSContext *JS_NewContext (JSRuntime *rt);
|
||||
void JS_FreeContext (JSContext *s);
|
||||
JSContext *JS_DupContext (JSContext *ctx);
|
||||
JSContext *JS_GetContext (JSRuntime *rt);
|
||||
void *JS_GetContextOpaque (JSContext *ctx);
|
||||
void JS_SetContextOpaque (JSContext *ctx, void *opaque);
|
||||
JSRuntime *JS_GetRuntime (JSContext *ctx);
|
||||
void JS_SetClassProto (JSContext *ctx, JSClassID class_id, JSValue obj);
|
||||
JSValue JS_GetClassProto (JSContext *ctx, JSClassID class_id);
|
||||
|
||||
JSContext *JS_NewContextWithHeapSize (JSRuntime *rt, size_t heap_size);
|
||||
|
||||
typedef struct JSMemoryUsage {
|
||||
int64_t malloc_size, malloc_limit, memory_used_size;
|
||||
int64_t malloc_count;
|
||||
int64_t memory_used_count;
|
||||
int64_t str_count, str_size;
|
||||
int64_t obj_count, obj_size;
|
||||
int64_t prop_count, prop_size;
|
||||
int64_t shape_count, shape_size;
|
||||
int64_t js_func_count, js_func_size, js_func_code_size;
|
||||
int64_t js_func_pc2line_count, js_func_pc2line_size;
|
||||
int64_t c_func_count, array_count;
|
||||
int64_t fast_array_count, fast_array_elements;
|
||||
int64_t binary_object_count, binary_object_size;
|
||||
} JSMemoryUsage;
|
||||
|
||||
void JS_ComputeMemoryUsage (JSRuntime *rt, JSMemoryUsage *s);
|
||||
void JS_DumpMemoryUsage (FILE *fp, const JSMemoryUsage *s, JSRuntime *rt);
|
||||
|
||||
/* Class system */
|
||||
typedef void JSClassFinalizer (JSRuntime *rt, JSValue val);
|
||||
typedef void JSClassGCMark (JSRuntime *rt, JSValue val,
|
||||
JS_MarkFunc *mark_func);
|
||||
typedef JSValue JSClassCall (JSContext *ctx, JSValue func_obj,
|
||||
JSValue this_val, int argc,
|
||||
JSValue *argv, int flags);
|
||||
@@ -419,12 +334,11 @@ typedef JSValue JSClassCall (JSContext *ctx, JSValue func_obj,
|
||||
typedef struct JSClassDef {
|
||||
const char *class_name;
|
||||
JSClassFinalizer *finalizer;
|
||||
JSClassGCMark *gc_mark;
|
||||
/* if call != NULL, the object is a function */
|
||||
JSClassCall *call;
|
||||
} JSClassDef;
|
||||
|
||||
#define JS_INVALID_CLASS_ID 0
|
||||
extern JSClassID js_class_id_alloc;
|
||||
JSClassID JS_NewClassID (JSClassID *pclass_id);
|
||||
/* Returns the class ID if `v` is an object, otherwise returns
|
||||
* JS_INVALID_CLASS_ID. */
|
||||
@@ -432,22 +346,12 @@ JSClassID JS_GetClassID (JSValue v);
|
||||
int JS_NewClass (JSContext *ctx, JSClassID class_id,
|
||||
const JSClassDef *class_def);
|
||||
int JS_IsRegisteredClass (JSContext *ctx, JSClassID class_id);
|
||||
|
||||
extern JSClassID js_class_id_alloc;
|
||||
void JS_SetClassProto (JSContext *ctx, JSClassID class_id, JSValue obj);
|
||||
JSValue JS_GetClassProto (JSContext *ctx, JSClassID class_id);
|
||||
|
||||
/* ============================================================
|
||||
Copying GC - No Reference Counting Needed
|
||||
============================================================
|
||||
With a copying GC, reference counting is not needed since all live
|
||||
objects are discovered by tracing from roots. These macros make
|
||||
existing DupValue/FreeValue calls into no-ops.
|
||||
2. Value Creation and Type Checks
|
||||
============================================================ */
|
||||
#define JS_DupValue(ctx, v) (v)
|
||||
#define JS_FreeValue(ctx, v) ((void)0)
|
||||
#define JS_DupValueRT(rt, v) (v)
|
||||
#define JS_FreeValueRT(rt, v) ((void)0)
|
||||
|
||||
/* value handling */
|
||||
|
||||
static inline JSValue
|
||||
JS_NewBool (JSContext *ctx, JS_BOOL val) {
|
||||
@@ -459,11 +363,6 @@ JS_NewInt32 (JSContext *ctx, int32_t val) {
|
||||
return JS_MKVAL (JS_TAG_INT, val);
|
||||
}
|
||||
|
||||
static inline JSValue
|
||||
JS_NewCatchOffset (JSContext *ctx, int32_t val) {
|
||||
return JS_MKVAL (JS_TAG_CATCH_OFFSET, val);
|
||||
}
|
||||
|
||||
static inline JSValue
|
||||
JS_NewInt64 (JSContext *ctx, int64_t val) {
|
||||
JSValue v;
|
||||
@@ -504,6 +403,7 @@ JS_NewFloat64 (JSContext *ctx, double d) {
|
||||
return __JS_NewFloat64 (ctx, d);
|
||||
}
|
||||
|
||||
/* Inline type checks (immediate tags) */
|
||||
static inline JS_BOOL JS_IsNumber (JSValue v) {
|
||||
int tag = JS_VALUE_GET_TAG (v);
|
||||
return tag == JS_TAG_INT || JS_TAG_IS_FLOAT64 (tag);
|
||||
@@ -521,11 +421,7 @@ static inline JS_BOOL JS_IsException (JSValue v) {
|
||||
return (JS_VALUE_GET_TAG (v) == JS_TAG_EXCEPTION);
|
||||
}
|
||||
|
||||
static inline JS_BOOL JS_IsUninitialized (JSValue v) {
|
||||
return (JS_VALUE_GET_TAG (v) == JS_TAG_UNINITIALIZED);
|
||||
}
|
||||
|
||||
/* Immediate String Helpers */
|
||||
/* Immediate ASCII string helpers */
|
||||
#define MIST_ASCII_MAX_LEN 7
|
||||
|
||||
static inline JS_BOOL
|
||||
@@ -538,13 +434,11 @@ MIST_GetImmediateASCIILen (JSValue v) {
|
||||
return (int)((v >> 5) & 0x7);
|
||||
}
|
||||
|
||||
static inline int
|
||||
MIST_GetImmediateASCIIChar (JSValue v, int idx) {
|
||||
static inline int MIST_GetImmediateASCIIChar (JSValue v, int idx) {
|
||||
return (int)((v >> (8 + idx * 8)) & 0xFF);
|
||||
}
|
||||
|
||||
static inline JSValue
|
||||
MIST_TryNewImmediateASCII (const char *str, size_t len) {
|
||||
static inline JSValue MIST_TryNewImmediateASCII (const char *str, size_t len) {
|
||||
if (len > MIST_ASCII_MAX_LEN) return JS_NULL;
|
||||
JSValue v = (JSValue)JS_TAG_STRING_IMM | ((JSValue)len << 5);
|
||||
for (size_t i = 0; i < len; i++) {
|
||||
@@ -555,27 +449,100 @@ MIST_TryNewImmediateASCII (const char *str, size_t len) {
|
||||
return v;
|
||||
}
|
||||
|
||||
static inline JS_BOOL JS_IsInteger (JSValue v) {
|
||||
return JS_VALUE_GET_TAG (v) == JS_TAG_INT;
|
||||
}
|
||||
|
||||
static inline JS_BOOL JS_IsObject (JSValue v) {
|
||||
return JS_IsPtr (v);
|
||||
}
|
||||
|
||||
/* Heap object type checks (non-inline — see mist_is_* in quickjs-internal.h
|
||||
for inline versions used by the VM dispatch loop) */
|
||||
JS_BOOL JS_IsArray(JSValue v);
|
||||
JS_BOOL JS_IsRecord(JSValue v);
|
||||
#define JS_IsObject JS_IsRecord
|
||||
JS_BOOL JS_IsFunction(JSValue v);
|
||||
JS_BOOL JS_IsCode(JSValue v);
|
||||
JS_BOOL JS_IsForwarded(JSValue v);
|
||||
JS_BOOL JS_IsFrame(JSValue v);
|
||||
JS_BOOL JS_IsBlob(JSValue v);
|
||||
JS_BOOL JS_IsText(JSValue v);
|
||||
static JS_BOOL JS_IsStone(JSValue v);
|
||||
JS_BOOL JS_IsStone(JSValue v);
|
||||
|
||||
// Fundamental
|
||||
/* ============================================================
|
||||
3. GC References
|
||||
============================================================
|
||||
With a copying GC, reference counting is not needed since all live
|
||||
objects are discovered by tracing from roots. These macros make
|
||||
existing DupValue/FreeValue calls into no-ops.
|
||||
============================================================ */
|
||||
#define JS_DupValue(ctx, v) (v)
|
||||
#define JS_FreeValue(ctx, v) ((void)0)
|
||||
#define JS_DupValueRT(rt, v) (v)
|
||||
#define JS_FreeValueRT(rt, v) ((void)0)
|
||||
|
||||
/* ============================================================
|
||||
4. Property Access
|
||||
============================================================ */
|
||||
|
||||
JSValue JS_GetProperty (JSContext *ctx, JSValue this_obj, JSValue prop);
|
||||
int JS_SetProperty (JSContext *ctx, JSValue this_obj, JSValue prop, JSValue val);
|
||||
|
||||
JSValue JS_GetPropertyStr (JSContext *ctx, JSValue this_obj, const char *prop);
|
||||
int JS_SetPropertyStr (JSContext *ctx, JSValue this_obj, const char *prop, JSValue val);
|
||||
|
||||
JSValue JS_GetPropertyNumber (JSContext *ctx, JSValue this_obj, int idx);
|
||||
JSValue JS_SetPropertyNumber (JSContext *ctx, JSValue obj, int idx, JSValue val);
|
||||
|
||||
JSValue JS_GetPrototype (JSContext *ctx, JSValue val);
|
||||
JSValue JS_GetOwnPropertyNames (JSContext *ctx, JSValue obj);
|
||||
int JS_GetLength (JSContext *ctx, JSValue obj, int64_t *pres);
|
||||
|
||||
void JS_SetOpaque (JSValue obj, void *opaque);
|
||||
void *JS_GetOpaque (JSValue obj, JSClassID class_id);
|
||||
void *JS_GetOpaque2 (JSContext *ctx, JSValue obj, JSClassID class_id);
|
||||
void *JS_GetAnyOpaque (JSValue obj, JSClassID *class_id);
|
||||
|
||||
/* ============================================================
|
||||
5. Object / Array / String Creation
|
||||
============================================================ */
|
||||
|
||||
JSValue JS_NewObjectProtoClass (JSContext *ctx, JSValue proto, JSClassID class_id);
|
||||
JSValue JS_NewObjectClass (JSContext *ctx, int class_id);
|
||||
JSValue JS_NewObjectProto (JSContext *ctx, JSValue proto);
|
||||
JSValue JS_NewObject (JSContext *ctx);
|
||||
|
||||
JSValue JS_NewArray (JSContext *ctx);
|
||||
JSValue JS_NewArrayLen (JSContext *ctx, uint32_t len);
|
||||
JSValue JS_NewArrayFrom (JSContext *ctx, int count, JSValue *values);
|
||||
int JS_ArrayPush (JSContext *ctx, JSValue *arr_ptr, JSValue val);
|
||||
JSValue JS_ArrayPop (JSContext *ctx, JSValue obj);
|
||||
|
||||
JSValue JS_NewStringLen (JSContext *ctx, const char *str1, size_t len1);
|
||||
static inline JSValue JS_NewString (JSContext *ctx, const char *str) {
|
||||
return JS_NewStringLen (ctx, str, strlen (str));
|
||||
}
|
||||
|
||||
/* ============================================================
|
||||
6. Type Conversion
|
||||
============================================================ */
|
||||
|
||||
int JS_ToBool (JSContext *ctx, JSValue val); /* return -1 for JS_EXCEPTION */
|
||||
int JS_ToInt32 (JSContext *ctx, int32_t *pres, JSValue val);
|
||||
static inline int JS_ToUint32 (JSContext *ctx, uint32_t *pres, JSValue val) {
|
||||
return JS_ToInt32 (ctx, (int32_t *)pres, val);
|
||||
}
|
||||
int JS_ToInt64 (JSContext *ctx, int64_t *pres, JSValue val);
|
||||
int JS_ToFloat64 (JSContext *ctx, double *pres, JSValue val);
|
||||
|
||||
JSValue JS_ToString (JSContext *ctx, JSValue val);
|
||||
JSValue JS_ToPropertyKey (JSContext *ctx, JSValue val);
|
||||
|
||||
const char *JS_ToCStringLen2 (JSContext *ctx, size_t *plen, JSValue val1, JS_BOOL cesu8);
|
||||
static inline const char * JS_ToCStringLen (JSContext *ctx, size_t *plen, JSValue val1) {
|
||||
return JS_ToCStringLen2 (ctx, plen, val1, 0);
|
||||
}
|
||||
static inline const char * JS_ToCString (JSContext *ctx, JSValue val1) {
|
||||
return JS_ToCStringLen2 (ctx, NULL, val1, 0);
|
||||
}
|
||||
void JS_FreeCString (JSContext *ctx, const char *ptr);
|
||||
|
||||
JS_BOOL JS_StrictEq (JSContext *ctx, JSValue op1, JSValue op2);
|
||||
|
||||
/* ============================================================
|
||||
7. Error Handling
|
||||
============================================================ */
|
||||
|
||||
JSValue JS_Throw (JSContext *ctx, JSValue obj);
|
||||
JSValue JS_GetException (JSContext *ctx);
|
||||
JS_BOOL JS_HasException (JSContext *ctx);
|
||||
@@ -591,45 +558,28 @@ JSValue __js_printf_like (2, 3)
|
||||
JS_ThrowInternalError (JSContext *ctx, const char *fmt, ...);
|
||||
JSValue JS_ThrowOutOfMemory (JSContext *ctx);
|
||||
|
||||
JS_BOOL JS_StrictEq (JSContext *ctx, JSValue op1, JSValue op2);
|
||||
/* ============================================================
|
||||
8. Function Creation and Invocation
|
||||
============================================================ */
|
||||
|
||||
int JS_ToBool (JSContext *ctx, JSValue val); /* return -1 for JS_EXCEPTION */
|
||||
int JS_ToInt32 (JSContext *ctx, int32_t *pres, JSValue val);
|
||||
static inline int JS_ToUint32 (JSContext *ctx, uint32_t *pres, JSValue val) {
|
||||
return JS_ToInt32 (ctx, (int32_t *)pres, val);
|
||||
}
|
||||
int JS_ToInt64 (JSContext *ctx, int64_t *pres, JSValue val);
|
||||
int JS_ToFloat64 (JSContext *ctx, double *pres, JSValue val);
|
||||
/* return an exception if 'val' is a Number */
|
||||
JSValue JS_Call (JSContext *ctx, JSValue func_obj, JSValue this_obj, int argc, JSValue *argv);
|
||||
JSValue JS_Stone (JSContext *ctx, JSValue this_val);
|
||||
|
||||
JSValue JS_NewStringLen (JSContext *ctx, const char *str1, size_t len1);
|
||||
static inline JSValue JS_NewString (JSContext *ctx, const char *str) {
|
||||
return JS_NewStringLen (ctx, str, strlen (str));
|
||||
}
|
||||
JSValue JS_ToString (JSContext *ctx, JSValue val);
|
||||
JSValue JS_ToPropertyKey (JSContext *ctx, JSValue val);
|
||||
const char *JS_ToCStringLen2 (JSContext *ctx, size_t *plen, JSValue val1, JS_BOOL cesu8);
|
||||
static inline const char * JS_ToCStringLen (JSContext *ctx, size_t *plen, JSValue val1) {
|
||||
return JS_ToCStringLen2 (ctx, plen, val1, 0);
|
||||
}
|
||||
static inline const char * JS_ToCString (JSContext *ctx, JSValue val1) {
|
||||
return JS_ToCStringLen2 (ctx, NULL, val1, 0);
|
||||
}
|
||||
void JS_FreeCString (JSContext *ctx, const char *ptr);
|
||||
/* JSON */
|
||||
/* 'buf' must be zero terminated i.e. buf[buf_len] = '\0'. */
|
||||
JSValue JS_ParseJSON (JSContext *ctx, const char *buf, size_t buf_len,
|
||||
const char *filename);
|
||||
#define JS_PARSE_JSON_EXT (1 << 0) /* allow extended JSON */
|
||||
JSValue JS_ParseJSON2 (JSContext *ctx, const char *buf, size_t buf_len,
|
||||
const char *filename, int flags);
|
||||
JSValue JS_JSONStringify (JSContext *ctx, JSValue obj,
|
||||
JSValue replacer, JSValue space0);
|
||||
|
||||
JSValue JS_NewObjectProtoClass (JSContext *ctx, JSValue proto, JSClassID class_id);
|
||||
JSValue JS_NewObjectClass (JSContext *ctx, int class_id);
|
||||
JSValue JS_NewObjectProto (JSContext *ctx, JSValue proto);
|
||||
JSValue JS_NewObject (JSContext *ctx);
|
||||
/* ============================================================
|
||||
9. Intrinsic Wrappers (JS_Cell* / JS_Array*)
|
||||
============================================================ */
|
||||
|
||||
JSValue JS_NewArray (JSContext *ctx);
|
||||
JSValue JS_NewArrayLen (JSContext *ctx, uint32_t len);
|
||||
|
||||
/* GC-safe push: takes pointer to array, updates it if array grows */
|
||||
int JS_ArrayPush (JSContext *ctx, JSValue *arr_ptr, JSValue val);
|
||||
JSValue JS_ArrayPop (JSContext *ctx, JSValue obj);
|
||||
|
||||
/* Intrinsic array operations - signatures match internal functions */
|
||||
/* Intrinsic array operations */
|
||||
JSValue JS_Array (JSContext *ctx, JSValue arg0, JSValue arg1, JSValue arg2, JSValue arg3);
|
||||
JSValue JS_ArrayFilter (JSContext *ctx, JSValue arr, JSValue fn);
|
||||
JSValue JS_ArraySort (JSContext *ctx, JSValue arr, JSValue selector);
|
||||
@@ -637,9 +587,7 @@ JSValue JS_ArrayFind (JSContext *ctx, JSValue arr, JSValue target_or_fn, JSValue
|
||||
JSValue JS_ArrFor (JSContext *ctx, JSValue arr, JSValue fn, JSValue reverse, JSValue exit_val);
|
||||
JSValue JS_ArrayReduce (JSContext *ctx, JSValue arr, JSValue fn, JSValue initial, JSValue reverse);
|
||||
|
||||
/* Cell intrinsic functions - C API wrappers */
|
||||
|
||||
/* Core functions */
|
||||
/* Core cell functions */
|
||||
JSValue JS_CellStone (JSContext *ctx, JSValue val);
|
||||
JSValue JS_CellLength (JSContext *ctx, JSValue val);
|
||||
JSValue JS_CellReverse (JSContext *ctx, JSValue val);
|
||||
@@ -652,7 +600,7 @@ JSValue JS_CellModulo (JSContext *ctx, JSValue a, JSValue b);
|
||||
JSValue JS_CellNeg (JSContext *ctx, JSValue val);
|
||||
JSValue JS_CellNot (JSContext *ctx, JSValue val);
|
||||
|
||||
/* Text functions */
|
||||
/* Text cell functions */
|
||||
JSValue JS_CellText (JSContext *ctx, JSValue val);
|
||||
JSValue JS_CellLower (JSContext *ctx, JSValue text);
|
||||
JSValue JS_CellUpper (JSContext *ctx, JSValue text);
|
||||
@@ -663,7 +611,7 @@ JSValue JS_CellSearch (JSContext *ctx, JSValue text, JSValue pattern, JSValue fr
|
||||
JSValue JS_CellExtract (JSContext *ctx, JSValue text, JSValue from, JSValue to);
|
||||
JSValue JS_CellCharacter (JSContext *ctx, JSValue codepoint);
|
||||
|
||||
/* Number functions */
|
||||
/* Number cell functions */
|
||||
JSValue JS_CellNumber (JSContext *ctx, JSValue val);
|
||||
JSValue JS_CellAbs (JSContext *ctx, JSValue num);
|
||||
JSValue JS_CellSign (JSContext *ctx, JSValue num);
|
||||
@@ -677,70 +625,20 @@ JSValue JS_CellMin (JSContext *ctx, JSValue a, JSValue b);
|
||||
JSValue JS_CellMax (JSContext *ctx, JSValue a, JSValue b);
|
||||
JSValue JS_CellRemainder (JSContext *ctx, JSValue a, JSValue b);
|
||||
|
||||
/* Object functions */
|
||||
/* Object cell functions */
|
||||
JSValue JS_CellObject (JSContext *ctx, JSValue proto, JSValue props);
|
||||
|
||||
/* Format function */
|
||||
/* Format */
|
||||
JSValue JS_CellFormat (JSContext *ctx, JSValue text, JSValue collection, JSValue transformer);
|
||||
|
||||
/* Helper functions */
|
||||
JSValue JS_NewArrayFrom (JSContext *ctx, int count, JSValue *values);
|
||||
/* Output helpers */
|
||||
void JS_PrintText (JSContext *ctx, JSValue val);
|
||||
void JS_PrintTextLn (JSContext *ctx, JSValue val);
|
||||
void JS_PrintFormatted (JSContext *ctx, const char *fmt, int count, JSValue *values);
|
||||
|
||||
JSValue JS_GetProperty (JSContext *ctx, JSValue this_obj, JSValue prop);
|
||||
|
||||
// For records
|
||||
JSValue JS_GetPropertyStr (JSContext *ctx, JSValue this_obj, const char *prop);
|
||||
int JS_SetPropertyStr (JSContext *ctx, JSValue this_obj, const char *prop, JSValue val);
|
||||
|
||||
// Set property on the global object
|
||||
int JS_SetGlobalStr (JSContext *ctx, const char *prop, JSValue val);
|
||||
int JS_SetProperty (JSContext *ctx, JSValue this_obj, JSValue prop, JSValue val);
|
||||
JSValue JS_GetPrototype (JSContext *ctx, JSValue val);
|
||||
|
||||
// Must be an array
|
||||
JSValue JS_GetPropertyNumber (JSContext *ctx, JSValue this_obj, int idx);
|
||||
JSValue JS_SetPropertyNumber (JSContext *ctx, JSValue obj, int idx, JSValue val);
|
||||
|
||||
// Indexed property access (works with arrays and objects)
|
||||
JSValue JS_GetPropertyUint32 (JSContext *ctx, JSValue this_obj, uint32_t idx);
|
||||
int JS_SetPropertyUint32 (JSContext *ctx, JSValue this_obj, uint32_t idx, JSValue val);
|
||||
int JS_SetPropertyInt64 (JSContext *ctx, JSValue this_obj, int64_t idx, JSValue val);
|
||||
|
||||
/* Get property keys as array of text */
|
||||
JSValue JS_GetOwnPropertyNames (JSContext *ctx, JSValue obj);
|
||||
|
||||
JSValue JS_Call (JSContext *ctx, JSValue func_obj, JSValue this_obj, int argc, JSValue *argv);
|
||||
|
||||
void JS_SetOpaque (JSValue obj, void *opaque);
|
||||
void *JS_GetOpaque (JSValue obj, JSClassID class_id);
|
||||
void *JS_GetOpaque2 (JSContext *ctx, JSValue obj, JSClassID class_id);
|
||||
void *JS_GetAnyOpaque (JSValue obj, JSClassID *class_id);
|
||||
|
||||
/* 'buf' must be zero terminated i.e. buf[buf_len] = '\0'. */
|
||||
JSValue JS_ParseJSON (JSContext *ctx, const char *buf, size_t buf_len,
|
||||
const char *filename);
|
||||
#define JS_PARSE_JSON_EXT (1 << 0) /* allow extended JSON */
|
||||
JSValue JS_ParseJSON2 (JSContext *ctx, const char *buf, size_t buf_len,
|
||||
const char *filename, int flags);
|
||||
JSValue JS_JSONStringify (JSContext *ctx, JSValue obj,
|
||||
JSValue replacer, JSValue space0);
|
||||
|
||||
/* return != 0 if the JS code needs to be interrupted */
|
||||
typedef int JSInterruptHandler (JSRuntime *rt, void *opaque);
|
||||
void JS_SetInterruptHandler (JSContext *ctx, JSInterruptHandler *cb,
|
||||
void *opaque);
|
||||
/* select which debug info is stripped from the compiled code */
|
||||
#define JS_STRIP_SOURCE (1 << 0) /* strip source code */
|
||||
#define JS_STRIP_DEBUG \
|
||||
(1 << 1) /* strip all debug info including source code */
|
||||
void JS_SetStripInfo (JSRuntime *rt, int flags);
|
||||
int JS_GetStripInfo (JSRuntime *rt);
|
||||
|
||||
|
||||
/* C function definition */
|
||||
/* ============================================================
|
||||
10. C Function Definition
|
||||
============================================================ */
|
||||
typedef enum JSCFunctionEnum {
|
||||
JS_CFUNC_generic,
|
||||
JS_CFUNC_generic_magic,
|
||||
@@ -1000,7 +898,27 @@ typedef struct JSCFunctionListEntry {
|
||||
int JS_SetPropertyFunctionList (JSContext *ctx, JSValue obj,
|
||||
const JSCFunctionListEntry *tab, int len);
|
||||
|
||||
/* debug value output */
|
||||
/* ============================================================
|
||||
11. Debug / Dump Utilities
|
||||
============================================================ */
|
||||
|
||||
typedef struct JSMemoryUsage {
|
||||
int64_t malloc_size, malloc_limit, memory_used_size;
|
||||
int64_t malloc_count;
|
||||
int64_t memory_used_count;
|
||||
int64_t str_count, str_size;
|
||||
int64_t obj_count, obj_size;
|
||||
int64_t prop_count, prop_size;
|
||||
int64_t shape_count, shape_size;
|
||||
int64_t js_func_count, js_func_size, js_func_code_size;
|
||||
int64_t js_func_pc2line_count, js_func_pc2line_size;
|
||||
int64_t c_func_count, array_count;
|
||||
int64_t fast_array_count, fast_array_elements;
|
||||
int64_t binary_object_count, binary_object_size;
|
||||
} JSMemoryUsage;
|
||||
|
||||
void JS_ComputeMemoryUsage (JSRuntime *rt, JSMemoryUsage *s);
|
||||
void JS_DumpMemoryUsage (FILE *fp, const JSMemoryUsage *s, JSRuntime *rt);
|
||||
|
||||
typedef struct {
|
||||
JS_BOOL show_hidden : 8; /* only show enumerable properties */
|
||||
@@ -1046,17 +964,19 @@ typedef void (*js_hook) (JSContext *, int type, js_debug *dbg, void *user);
|
||||
void js_debug_sethook (JSContext *ctx, js_hook, int type, void *user);
|
||||
|
||||
uint32_t js_debugger_stack_depth (JSContext *ctx);
|
||||
JSValue js_debugger_backtrace_fns (JSContext *ctx, const uint8_t *cur_pc);
|
||||
JSValue js_debugger_backtrace_fns (JSContext *ctx);
|
||||
JSValue js_debugger_closure_variables (JSContext *ctx, JSValue fn);
|
||||
JSValue js_debugger_local_variables (JSContext *ctx, int stack_index);
|
||||
void js_debugger_set_closure_variable (JSContext *js, JSValue fn,
|
||||
JSValue var_name, JSValue val);
|
||||
JSValue js_debugger_build_backtrace (JSContext *ctx, const uint8_t *cur_pc);
|
||||
JSValue js_debugger_build_backtrace (JSContext *ctx);
|
||||
JSValue js_debugger_fn_info (JSContext *ctx, JSValue fn);
|
||||
JSValue js_debugger_fn_bytecode (JSContext *js, JSValue fn);
|
||||
void *js_debugger_val_address (JSContext *js, JSValue val);
|
||||
|
||||
/* Memory allocation functions (bump allocator) */
|
||||
/* ============================================================
|
||||
12. Memory Allocation
|
||||
============================================================ */
|
||||
void *js_malloc (JSContext *ctx, size_t size);
|
||||
void *js_mallocz (JSContext *ctx, size_t size);
|
||||
void *js_realloc (JSContext *ctx, void *ptr, size_t size);
|
||||
@@ -1068,17 +988,13 @@ void *js_malloc_rt (size_t size);
|
||||
void *js_mallocz_rt (size_t size);
|
||||
void js_free_rt (void *ptr);
|
||||
|
||||
/* ============================================================
|
||||
13. Compilation and Bytecode
|
||||
============================================================ */
|
||||
|
||||
struct cJSON;
|
||||
|
||||
/* Compiled bytecode (context-free, serializable) */
|
||||
typedef struct MachCode MachCode;
|
||||
|
||||
/* Compile AST cJSON tree to context-free MachCode. */
|
||||
MachCode *JS_CompileMachTree(struct cJSON *ast);
|
||||
|
||||
/* Compile AST JSON string to context-free MachCode. */
|
||||
MachCode *JS_CompileMach(const char *ast_json);
|
||||
|
||||
/* Free a compiled MachCode tree. */
|
||||
void JS_FreeMachCode(MachCode *mc);
|
||||
|
||||
@@ -1091,30 +1007,17 @@ MachCode *JS_DeserializeMachCode(const uint8_t *data, size_t size);
|
||||
/* Load compiled MachCode into a JSContext, materializing JSValues. */
|
||||
struct JSCodeRegister *JS_LoadMachCode(JSContext *ctx, MachCode *mc, JSValue env);
|
||||
|
||||
/* Dump MACH bytecode to stdout. Takes AST cJSON tree. */
|
||||
void JS_DumpMachTree (JSContext *ctx, struct cJSON *ast, JSValue env);
|
||||
|
||||
/* Dump MACH bytecode to stdout. Takes AST JSON string. */
|
||||
void JS_DumpMach (JSContext *ctx, const char *ast_json, JSValue env);
|
||||
|
||||
/* Compile and execute MACH bytecode from AST cJSON tree. */
|
||||
JSValue JS_RunMachTree (JSContext *ctx, struct cJSON *ast, JSValue env);
|
||||
|
||||
/* Compile and execute MACH bytecode from AST JSON string. */
|
||||
JSValue JS_RunMach (JSContext *ctx, const char *ast_json, JSValue env);
|
||||
|
||||
/* Deserialize and execute pre-compiled MACH binary bytecode. */
|
||||
JSValue JS_RunMachBin(JSContext *ctx, const uint8_t *data, size_t size, JSValue env);
|
||||
|
||||
/* Execute MCODE from cJSON tree. Takes ownership of root. */
|
||||
JSValue JS_CallMcodeTree (JSContext *ctx, struct cJSON *root);
|
||||
/* Parse mcode JSON IR, compile, and execute via register VM. */
|
||||
JSValue JS_RunMachMcode(JSContext *ctx, const char *json_str, size_t len, JSValue env);
|
||||
|
||||
/* Execute MCODE from cJSON tree with hidden env. Takes ownership of root. */
|
||||
JSValue JS_CallMcodeTreeEnv (JSContext *ctx, struct cJSON *root, JSValue env);
|
||||
/* Dump disassembly of pre-compiled MACH binary bytecode. */
|
||||
void JS_DumpMachBin(JSContext *ctx, const uint8_t *data, size_t size, JSValue env);
|
||||
|
||||
/* Parse and execute MCODE JSON string.
|
||||
Returns result of execution, or JS_EXCEPTION on error. */
|
||||
JSValue JS_CallMcode (JSContext *ctx, const char *mcode_json);
|
||||
/* Compile mcode JSON IR to MachCode binary. */
|
||||
MachCode *mach_compile_mcode(struct cJSON *mcode_json);
|
||||
|
||||
/* Get stack trace as cJSON array of frame objects.
|
||||
Returns NULL if no register VM frame is active.
|
||||
|
||||
2808
source/runtime.c
2808
source/runtime.c
File diff suppressed because it is too large
Load Diff
@@ -47,9 +47,11 @@ static struct {
|
||||
actor_node *main_head; // Main Thread Queue Head
|
||||
actor_node *main_tail; // Main Thread Queue Tail
|
||||
|
||||
int shutting_down;
|
||||
|
||||
pthread_t *worker_threads;
|
||||
int shutting_down;
|
||||
int quiescence_enabled; // set after bootstrap, before actor_loop
|
||||
_Atomic int quiescent_count; // actors idle with no messages and no timers
|
||||
|
||||
pthread_t *worker_threads;
|
||||
int num_workers;
|
||||
pthread_t timer_thread;
|
||||
} engine;
|
||||
@@ -258,6 +260,10 @@ void actor_initialize(void) {
|
||||
|
||||
void actor_free(cell_rt *actor)
|
||||
{
|
||||
if (actor->is_quiescent) {
|
||||
actor->is_quiescent = 0;
|
||||
atomic_fetch_sub(&engine.quiescent_count, 1);
|
||||
}
|
||||
lockless_shdel(actors, actor->id);
|
||||
|
||||
// Do not go forward with actor destruction until the actor is completely free
|
||||
@@ -303,14 +309,45 @@ void actor_free(cell_rt *actor)
|
||||
free(actor);
|
||||
|
||||
int actor_count = lockless_shlen(actors);
|
||||
if (actor_count == 0) exit(0);
|
||||
if (actor_count == 0) {
|
||||
fprintf(stderr, "all actors are dead\n");
|
||||
pthread_mutex_lock(&engine.lock);
|
||||
engine.shutting_down = 1;
|
||||
pthread_cond_broadcast(&engine.wake_cond);
|
||||
pthread_cond_broadcast(&engine.timer_cond);
|
||||
pthread_cond_broadcast(&engine.main_cond);
|
||||
pthread_mutex_unlock(&engine.lock);
|
||||
}
|
||||
}
|
||||
|
||||
int scheduler_actor_count(void) {
|
||||
return (int)lockless_shlen(actors);
|
||||
}
|
||||
|
||||
void scheduler_enable_quiescence(void) {
|
||||
engine.quiescence_enabled = 1;
|
||||
// Check if all actors are already quiescent
|
||||
int qc = atomic_load(&engine.quiescent_count);
|
||||
int total = (int)lockless_shlen(actors);
|
||||
if (qc >= total && total > 0) {
|
||||
pthread_mutex_lock(&engine.lock);
|
||||
engine.shutting_down = 1;
|
||||
pthread_cond_broadcast(&engine.wake_cond);
|
||||
pthread_cond_broadcast(&engine.timer_cond);
|
||||
pthread_cond_broadcast(&engine.main_cond);
|
||||
pthread_mutex_unlock(&engine.lock);
|
||||
}
|
||||
}
|
||||
|
||||
void exit_handler(void) {
|
||||
static int already_exiting = 0;
|
||||
if (already_exiting) return;
|
||||
already_exiting = 1;
|
||||
|
||||
pthread_mutex_lock(&engine.lock);
|
||||
engine.shutting_down = 1;
|
||||
pthread_cond_broadcast(&engine.wake_cond);
|
||||
pthread_cond_broadcast(&engine.timer_cond);
|
||||
pthread_cond_broadcast(&engine.wake_cond);
|
||||
pthread_cond_broadcast(&engine.timer_cond);
|
||||
pthread_cond_broadcast(&engine.main_cond);
|
||||
pthread_mutex_unlock(&engine.lock);
|
||||
|
||||
@@ -319,19 +356,17 @@ void exit_handler(void) {
|
||||
for (int i=0; i < engine.num_workers; i++) {
|
||||
pthread_join(engine.worker_threads[i], NULL);
|
||||
}
|
||||
|
||||
|
||||
free(engine.worker_threads);
|
||||
pthread_mutex_destroy(&engine.lock);
|
||||
pthread_cond_destroy(&engine.wake_cond);
|
||||
pthread_cond_destroy(&engine.timer_cond);
|
||||
pthread_cond_destroy(&engine.main_cond);
|
||||
|
||||
|
||||
pthread_mutex_destroy(actors_mutex);
|
||||
free(actors_mutex);
|
||||
|
||||
|
||||
arrfree(timer_heap);
|
||||
|
||||
exit(0);
|
||||
}
|
||||
|
||||
int actor_exists(const char *id)
|
||||
@@ -357,9 +392,13 @@ void set_actor_state(cell_rt *actor)
|
||||
|
||||
case ACTOR_IDLE:
|
||||
if (arrlen(actor->letters)) {
|
||||
if (actor->is_quiescent) {
|
||||
actor->is_quiescent = 0;
|
||||
atomic_fetch_sub(&engine.quiescent_count, 1);
|
||||
}
|
||||
actor->state = ACTOR_READY;
|
||||
actor->ar = 0;
|
||||
|
||||
|
||||
actor_node *n = malloc(sizeof(actor_node));
|
||||
n->actor = actor;
|
||||
n->next = NULL;
|
||||
@@ -384,21 +423,46 @@ void set_actor_state(cell_rt *actor)
|
||||
}
|
||||
pthread_mutex_unlock(&engine.lock);
|
||||
|
||||
} else if (!arrlen(actor->letters) && !hmlen(actor->timers)) {
|
||||
// Schedule remove timer
|
||||
static uint32_t global_timer_id = 1;
|
||||
uint32_t id = global_timer_id++;
|
||||
actor->ar = id;
|
||||
|
||||
uint64_t now = cell_ns();
|
||||
uint64_t execute_at = now + (uint64_t)(actor->ar_secs * 1e9);
|
||||
|
||||
pthread_mutex_lock(&engine.lock);
|
||||
heap_push(execute_at, actor, id, TIMER_NATIVE_REMOVE);
|
||||
if (timer_heap[0].timer_id == id) {
|
||||
pthread_cond_signal(&engine.timer_cond);
|
||||
} else if (!hmlen(actor->timers)) {
|
||||
// No messages AND no timers
|
||||
// Only count as quiescent if no $unneeded callback registered
|
||||
int has_unneeded = !JS_IsNull(actor->unneeded_ref.val);
|
||||
if (!actor->is_quiescent && actor->id && !has_unneeded) {
|
||||
actor->is_quiescent = 1;
|
||||
int qc = atomic_fetch_add(&engine.quiescent_count, 1) + 1;
|
||||
int total = (int)lockless_shlen(actors);
|
||||
if (qc >= total && total > 0 && engine.quiescence_enabled) {
|
||||
pthread_mutex_lock(&engine.lock);
|
||||
engine.shutting_down = 1;
|
||||
pthread_cond_broadcast(&engine.wake_cond);
|
||||
pthread_cond_broadcast(&engine.timer_cond);
|
||||
pthread_cond_broadcast(&engine.main_cond);
|
||||
pthread_mutex_unlock(&engine.lock);
|
||||
}
|
||||
}
|
||||
|
||||
if (!engine.shutting_down) {
|
||||
// Schedule remove timer
|
||||
static uint32_t global_timer_id = 1;
|
||||
uint32_t id = global_timer_id++;
|
||||
actor->ar = id;
|
||||
|
||||
uint64_t now = cell_ns();
|
||||
uint64_t execute_at = now + (uint64_t)(actor->ar_secs * 1e9);
|
||||
|
||||
pthread_mutex_lock(&engine.lock);
|
||||
heap_push(execute_at, actor, id, TIMER_NATIVE_REMOVE);
|
||||
if (timer_heap[0].timer_id == id) {
|
||||
pthread_cond_signal(&engine.timer_cond);
|
||||
}
|
||||
pthread_mutex_unlock(&engine.lock);
|
||||
}
|
||||
} else {
|
||||
// Has timers but no letters — waiting, not quiescent
|
||||
if (actor->is_quiescent) {
|
||||
actor->is_quiescent = 0;
|
||||
atomic_fetch_sub(&engine.quiescent_count, 1);
|
||||
}
|
||||
pthread_mutex_unlock(&engine.lock);
|
||||
}
|
||||
break;
|
||||
}
|
||||
@@ -520,11 +584,17 @@ const char *register_actor(const char *id, cell_rt *actor, int mainthread, doubl
|
||||
actor->main_thread_only = mainthread;
|
||||
actor->id = strdup(id);
|
||||
actor->ar_secs = ar;
|
||||
int added = lockless_shput_unique(actors, id, actor);
|
||||
int added = lockless_shput_unique(actors, actor->id, actor);
|
||||
if (!added) {
|
||||
free(actor->id);
|
||||
return "Actor with given ID already exists.";
|
||||
}
|
||||
// Now that actor is in the registry, track its quiescent state
|
||||
if (actor->state == ACTOR_IDLE && !arrlen(actor->letters)
|
||||
&& !hmlen(actor->timers) && JS_IsNull(actor->unneeded_ref.val)) {
|
||||
actor->is_quiescent = 1;
|
||||
atomic_fetch_add(&engine.quiescent_count, 1);
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
@@ -578,20 +648,22 @@ void actor_turn(cell_rt *actor)
|
||||
arrdel(actor->letters, 0); // O(N) but we kept array as requested
|
||||
pthread_mutex_unlock(actor->msg_mutex);
|
||||
|
||||
if (l.type == LETTER_BLOB) {
|
||||
if (l.type == LETTER_BLOB) {
|
||||
// Create a JS blob from the C blob
|
||||
size_t size = blob_length(l.blob_data) / 8; // Convert bits to bytes
|
||||
JSValue arg = js_new_blob_stoned_copy(actor->context, (void*)blob_data(l.blob_data), size);
|
||||
blob_destroy(l.blob_data);
|
||||
result = JS_Call(actor->context, actor->message_handle_ref.val, JS_NULL, 1, &arg);
|
||||
uncaught_exception(actor->context, result);
|
||||
if (!uncaught_exception(actor->context, result))
|
||||
actor->disrupt = 1;
|
||||
JS_FreeValue(actor->context, arg);
|
||||
} else if (l.type == LETTER_CALLBACK) {
|
||||
result = JS_Call(actor->context, l.callback, JS_NULL, 0, NULL);
|
||||
uncaught_exception(actor->context, result);
|
||||
if (!uncaught_exception(actor->context, result))
|
||||
actor->disrupt = 1;
|
||||
JS_FreeValue(actor->context, l.callback);
|
||||
}
|
||||
|
||||
|
||||
if (actor->disrupt) goto ENDTURN;
|
||||
|
||||
ENDTURN:
|
||||
@@ -599,9 +671,17 @@ void actor_turn(cell_rt *actor)
|
||||
|
||||
if (actor->trace_hook)
|
||||
actor->trace_hook(actor->name, CELL_HOOK_EXIT);
|
||||
|
||||
|
||||
if (actor->disrupt) {
|
||||
/* Actor must die. Unlock before freeing so actor_free can
|
||||
lock/unlock/destroy the mutex without use-after-free. */
|
||||
pthread_mutex_unlock(actor->mutex);
|
||||
actor_free(actor);
|
||||
return;
|
||||
}
|
||||
|
||||
set_actor_state(actor);
|
||||
|
||||
|
||||
pthread_mutex_unlock(actor->mutex);
|
||||
}
|
||||
|
||||
|
||||
105
source/suite.c
105
source/suite.c
@@ -21,7 +21,6 @@ static const char *js_type_name(JSValue v) {
|
||||
if (JS_IsText(v)) return "string";
|
||||
if (JS_IsArray(v)) return "array";
|
||||
if (JS_IsRecord(v)) return "object";
|
||||
if (JS_IsObject(v)) return "object";
|
||||
return "unknown";
|
||||
}
|
||||
|
||||
@@ -310,7 +309,6 @@ TEST(string_heap_to_cstring) {
|
||||
|
||||
TEST(object_create) {
|
||||
JSValue obj = JS_NewObject(ctx);
|
||||
ASSERT(JS_IsObject(obj));
|
||||
ASSERT(JS_IsRecord(obj));
|
||||
return 1;
|
||||
}
|
||||
@@ -435,7 +433,6 @@ TEST(object_many_properties_resize) {
|
||||
|
||||
TEST(array_create) {
|
||||
JSValue arr = JS_NewArray(ctx);
|
||||
ASSERT(JS_IsObject(arr));
|
||||
ASSERT(JS_IsArray(arr));
|
||||
return 1;
|
||||
}
|
||||
@@ -465,9 +462,9 @@ TEST(array_get_by_index) {
|
||||
JS_ArrayPush(ctx, &arr_ref.val, JS_NewInt32(ctx, 200));
|
||||
JS_ArrayPush(ctx, &arr_ref.val, JS_NewInt32(ctx, 300));
|
||||
|
||||
JSValue v0 = JS_GetPropertyUint32(ctx, arr_ref.val, 0);
|
||||
JSValue v1 = JS_GetPropertyUint32(ctx, arr_ref.val, 1);
|
||||
JSValue v2 = JS_GetPropertyUint32(ctx, arr_ref.val, 2);
|
||||
JSValue v0 = JS_GetPropertyNumber(ctx, arr_ref.val, 0);
|
||||
JSValue v1 = JS_GetPropertyNumber(ctx, arr_ref.val, 1);
|
||||
JSValue v2 = JS_GetPropertyNumber(ctx, arr_ref.val, 2);
|
||||
JS_PopGCRef(ctx, &arr_ref);
|
||||
|
||||
ASSERT_INT(v0, 100);
|
||||
@@ -486,12 +483,12 @@ TEST(array_set_by_index) {
|
||||
|
||||
/* Create values first, then read arr_ref.val */
|
||||
JSValue v55 = JS_NewInt32(ctx, 55);
|
||||
JS_SetPropertyUint32(ctx, arr_ref.val, 0, v55);
|
||||
JS_SetPropertyNumber(ctx, arr_ref.val, 0, v55);
|
||||
JSValue v66 = JS_NewInt32(ctx, 66);
|
||||
JS_SetPropertyUint32(ctx, arr_ref.val, 1, v66);
|
||||
JS_SetPropertyNumber(ctx, arr_ref.val, 1, v66);
|
||||
|
||||
JSValue v0 = JS_GetPropertyUint32(ctx, arr_ref.val, 0);
|
||||
JSValue v1 = JS_GetPropertyUint32(ctx, arr_ref.val, 1);
|
||||
JSValue v0 = JS_GetPropertyNumber(ctx, arr_ref.val, 0);
|
||||
JSValue v1 = JS_GetPropertyNumber(ctx, arr_ref.val, 1);
|
||||
JS_PopGCRef(ctx, &arr_ref);
|
||||
|
||||
ASSERT_INT(v0, 55);
|
||||
@@ -525,7 +522,7 @@ TEST(array_out_of_bounds_is_null) {
|
||||
arr_ref.val = JS_NewArray(ctx);
|
||||
JS_ArrayPush(ctx, &arr_ref.val, JS_NewInt32(ctx, 1));
|
||||
|
||||
JSValue val = JS_GetPropertyUint32(ctx, arr_ref.val, 999);
|
||||
JSValue val = JS_GetPropertyNumber(ctx, arr_ref.val, 999);
|
||||
JS_PopGCRef(ctx, &arr_ref);
|
||||
ASSERT(JS_IsNull(val));
|
||||
return 1;
|
||||
@@ -544,10 +541,10 @@ TEST(array_mixed_types) {
|
||||
JS_ArrayPush(ctx, &arr_ref.val, JS_TRUE);
|
||||
JS_ArrayPush(ctx, &arr_ref.val, JS_NULL);
|
||||
|
||||
JSValue v0 = JS_GetPropertyUint32(ctx, arr_ref.val, 0);
|
||||
JSValue v1 = JS_GetPropertyUint32(ctx, arr_ref.val, 1);
|
||||
JSValue v2 = JS_GetPropertyUint32(ctx, arr_ref.val, 2);
|
||||
JSValue v3 = JS_GetPropertyUint32(ctx, arr_ref.val, 3);
|
||||
JSValue v0 = JS_GetPropertyNumber(ctx, arr_ref.val, 0);
|
||||
JSValue v1 = JS_GetPropertyNumber(ctx, arr_ref.val, 1);
|
||||
JSValue v2 = JS_GetPropertyNumber(ctx, arr_ref.val, 2);
|
||||
JSValue v3 = JS_GetPropertyNumber(ctx, arr_ref.val, 3);
|
||||
JS_PopGCRef(ctx, &str_ref);
|
||||
JS_PopGCRef(ctx, &arr_ref);
|
||||
|
||||
@@ -571,9 +568,9 @@ TEST(array_many_elements_resize) {
|
||||
JS_GetLength(ctx, arr_ref.val, &len);
|
||||
|
||||
/* Verify some values */
|
||||
JSValue v0 = JS_GetPropertyUint32(ctx, arr_ref.val, 0);
|
||||
JSValue v500 = JS_GetPropertyUint32(ctx, arr_ref.val, 500);
|
||||
JSValue v999 = JS_GetPropertyUint32(ctx, arr_ref.val, 999);
|
||||
JSValue v0 = JS_GetPropertyNumber(ctx, arr_ref.val, 0);
|
||||
JSValue v500 = JS_GetPropertyNumber(ctx, arr_ref.val, 500);
|
||||
JSValue v999 = JS_GetPropertyNumber(ctx, arr_ref.val, 999);
|
||||
|
||||
/* Pop BEFORE assertions */
|
||||
JS_PopGCRef(ctx, &arr_ref);
|
||||
@@ -716,9 +713,9 @@ TEST(array_slice_basic) {
|
||||
JS_GetLength(ctx, sliced, &len);
|
||||
ASSERT(len == 3);
|
||||
|
||||
JSValue v0 = JS_GetPropertyUint32(ctx, sliced, 0);
|
||||
JSValue v1 = JS_GetPropertyUint32(ctx, sliced, 1);
|
||||
JSValue v2 = JS_GetPropertyUint32(ctx, sliced, 2);
|
||||
JSValue v0 = JS_GetPropertyNumber(ctx, sliced, 0);
|
||||
JSValue v1 = JS_GetPropertyNumber(ctx, sliced, 1);
|
||||
JSValue v2 = JS_GetPropertyNumber(ctx, sliced, 2);
|
||||
ASSERT_INT(v0, 10);
|
||||
ASSERT_INT(v1, 20);
|
||||
ASSERT_INT(v2, 30);
|
||||
@@ -747,10 +744,10 @@ TEST(array_concat_basic) {
|
||||
JS_GetLength(ctx, result, &len);
|
||||
ASSERT(len == 4);
|
||||
|
||||
ASSERT_INT(JS_GetPropertyUint32(ctx, result, 0), 1);
|
||||
ASSERT_INT(JS_GetPropertyUint32(ctx, result, 1), 2);
|
||||
ASSERT_INT(JS_GetPropertyUint32(ctx, result, 2), 3);
|
||||
ASSERT_INT(JS_GetPropertyUint32(ctx, result, 3), 4);
|
||||
ASSERT_INT(JS_GetPropertyNumber(ctx, result, 0), 1);
|
||||
ASSERT_INT(JS_GetPropertyNumber(ctx, result, 1), 2);
|
||||
ASSERT_INT(JS_GetPropertyNumber(ctx, result, 2), 3);
|
||||
ASSERT_INT(JS_GetPropertyNumber(ctx, result, 3), 4);
|
||||
return 1;
|
||||
}
|
||||
|
||||
@@ -772,11 +769,11 @@ TEST(array_sort_numbers) {
|
||||
JS_GetLength(ctx, sorted, &len);
|
||||
ASSERT(len == 5);
|
||||
|
||||
ASSERT_INT(JS_GetPropertyUint32(ctx, sorted, 0), 10);
|
||||
ASSERT_INT(JS_GetPropertyUint32(ctx, sorted, 1), 20);
|
||||
ASSERT_INT(JS_GetPropertyUint32(ctx, sorted, 2), 30);
|
||||
ASSERT_INT(JS_GetPropertyUint32(ctx, sorted, 3), 40);
|
||||
ASSERT_INT(JS_GetPropertyUint32(ctx, sorted, 4), 50);
|
||||
ASSERT_INT(JS_GetPropertyNumber(ctx, sorted, 0), 10);
|
||||
ASSERT_INT(JS_GetPropertyNumber(ctx, sorted, 1), 20);
|
||||
ASSERT_INT(JS_GetPropertyNumber(ctx, sorted, 2), 30);
|
||||
ASSERT_INT(JS_GetPropertyNumber(ctx, sorted, 3), 40);
|
||||
ASSERT_INT(JS_GetPropertyNumber(ctx, sorted, 4), 50);
|
||||
return 1;
|
||||
}
|
||||
|
||||
@@ -840,8 +837,8 @@ TEST(array_filter_basic) {
|
||||
JS_GetLength(ctx, filtered, &len);
|
||||
ASSERT(len == 5); /* 6, 7, 8, 9, 10 */
|
||||
|
||||
ASSERT_INT(JS_GetPropertyUint32(ctx, filtered, 0), 6);
|
||||
ASSERT_INT(JS_GetPropertyUint32(ctx, filtered, 4), 10);
|
||||
ASSERT_INT(JS_GetPropertyNumber(ctx, filtered, 0), 6);
|
||||
ASSERT_INT(JS_GetPropertyNumber(ctx, filtered, 4), 10);
|
||||
return 1;
|
||||
}
|
||||
|
||||
@@ -865,11 +862,11 @@ TEST(array_filter_even) {
|
||||
JS_GetLength(ctx, filtered, &len);
|
||||
ASSERT(len == 5); /* 2, 4, 6, 8, 10 */
|
||||
|
||||
ASSERT_INT(JS_GetPropertyUint32(ctx, filtered, 0), 2);
|
||||
ASSERT_INT(JS_GetPropertyUint32(ctx, filtered, 1), 4);
|
||||
ASSERT_INT(JS_GetPropertyUint32(ctx, filtered, 2), 6);
|
||||
ASSERT_INT(JS_GetPropertyUint32(ctx, filtered, 3), 8);
|
||||
ASSERT_INT(JS_GetPropertyUint32(ctx, filtered, 4), 10);
|
||||
ASSERT_INT(JS_GetPropertyNumber(ctx, filtered, 0), 2);
|
||||
ASSERT_INT(JS_GetPropertyNumber(ctx, filtered, 1), 4);
|
||||
ASSERT_INT(JS_GetPropertyNumber(ctx, filtered, 2), 6);
|
||||
ASSERT_INT(JS_GetPropertyNumber(ctx, filtered, 3), 8);
|
||||
ASSERT_INT(JS_GetPropertyNumber(ctx, filtered, 4), 10);
|
||||
return 1;
|
||||
}
|
||||
|
||||
@@ -893,9 +890,9 @@ TEST(array_map_double) {
|
||||
JS_GetLength(ctx, mapped, &len);
|
||||
ASSERT(len == 3);
|
||||
|
||||
ASSERT_INT(JS_GetPropertyUint32(ctx, mapped, 0), 2);
|
||||
ASSERT_INT(JS_GetPropertyUint32(ctx, mapped, 1), 4);
|
||||
ASSERT_INT(JS_GetPropertyUint32(ctx, mapped, 2), 6);
|
||||
ASSERT_INT(JS_GetPropertyNumber(ctx, mapped, 0), 2);
|
||||
ASSERT_INT(JS_GetPropertyNumber(ctx, mapped, 1), 4);
|
||||
ASSERT_INT(JS_GetPropertyNumber(ctx, mapped, 2), 6);
|
||||
return 1;
|
||||
}
|
||||
|
||||
@@ -1356,9 +1353,9 @@ TEST(cell_reverse_array) {
|
||||
JSValue reversed = JS_CellReverse(ctx, arr_ref.val);
|
||||
JS_PopGCRef(ctx, &arr_ref);
|
||||
ASSERT(JS_IsArray(reversed));
|
||||
ASSERT_INT(JS_GetPropertyUint32(ctx, reversed, 0), 3);
|
||||
ASSERT_INT(JS_GetPropertyUint32(ctx, reversed, 1), 2);
|
||||
ASSERT_INT(JS_GetPropertyUint32(ctx, reversed, 2), 1);
|
||||
ASSERT_INT(JS_GetPropertyNumber(ctx, reversed, 0), 3);
|
||||
ASSERT_INT(JS_GetPropertyNumber(ctx, reversed, 1), 2);
|
||||
ASSERT_INT(JS_GetPropertyNumber(ctx, reversed, 2), 1);
|
||||
return 1;
|
||||
}
|
||||
|
||||
@@ -1408,9 +1405,9 @@ TEST(parse_json_array) {
|
||||
int64_t len;
|
||||
JS_GetLength(ctx, arr, &len);
|
||||
ASSERT(len == 3);
|
||||
ASSERT_INT(JS_GetPropertyUint32(ctx, arr, 0), 1);
|
||||
ASSERT_INT(JS_GetPropertyUint32(ctx, arr, 1), 2);
|
||||
ASSERT_INT(JS_GetPropertyUint32(ctx, arr, 2), 3);
|
||||
ASSERT_INT(JS_GetPropertyNumber(ctx, arr, 0), 1);
|
||||
ASSERT_INT(JS_GetPropertyNumber(ctx, arr, 1), 2);
|
||||
ASSERT_INT(JS_GetPropertyNumber(ctx, arr, 2), 3);
|
||||
return 1;
|
||||
}
|
||||
|
||||
@@ -1565,12 +1562,12 @@ TEST(property_type_restrictions) {
|
||||
|
||||
/* Setting numeric properties on non-arrays should throw */
|
||||
JSValue v100 = JS_NewInt32(ctx, 100);
|
||||
int ret1 = JS_SetPropertyUint32(ctx, obj_ref.val, 0, v100);
|
||||
int ret1 = JS_IsException(JS_SetPropertyNumber(ctx, obj_ref.val, 0, v100)) ? -1 : 0;
|
||||
int has_exc1 = JS_HasException(ctx);
|
||||
JS_GetException(ctx); /* Clear the exception */
|
||||
|
||||
/* Getting numeric properties on objects should return null */
|
||||
JSValue v0 = JS_GetPropertyUint32(ctx, obj_ref.val, 0);
|
||||
JSValue v0 = JS_GetPropertyNumber(ctx, obj_ref.val, 0);
|
||||
int v0_is_null = JS_IsNull(v0);
|
||||
|
||||
/* Getting text keys from arrays should return null */
|
||||
@@ -1646,8 +1643,8 @@ TEST(new_array_from) {
|
||||
int64_t len;
|
||||
JS_GetLength(ctx, arr, &len);
|
||||
ASSERT(len == 4);
|
||||
ASSERT_INT(JS_GetPropertyUint32(ctx, arr, 0), 10);
|
||||
ASSERT_INT(JS_GetPropertyUint32(ctx, arr, 3), 40);
|
||||
ASSERT_INT(JS_GetPropertyNumber(ctx, arr, 0), 10);
|
||||
ASSERT_INT(JS_GetPropertyNumber(ctx, arr, 3), 40);
|
||||
return 1;
|
||||
}
|
||||
|
||||
@@ -1833,9 +1830,7 @@ TEST(is_function_check) {
|
||||
TEST(is_integer_vs_number) {
|
||||
JSValue i = JS_NewInt32(ctx, 42);
|
||||
JSValue f = JS_NewFloat64(ctx, 3.14);
|
||||
ASSERT(JS_IsInteger(i));
|
||||
ASSERT(JS_IsInt(i));
|
||||
ASSERT(!JS_IsInteger(f));
|
||||
ASSERT(!JS_IsInt(f));
|
||||
ASSERT(JS_IsNumber(i));
|
||||
ASSERT(JS_IsNumber(f));
|
||||
@@ -1983,9 +1978,9 @@ TEST(wota_encode_nested_array) {
|
||||
int is_arr = JS_IsArray(decoded);
|
||||
int64_t len;
|
||||
JS_GetLength(ctx, decoded, &len);
|
||||
JSValue v0 = JS_GetPropertyUint32(ctx, decoded, 0);
|
||||
JSValue v2 = JS_GetPropertyUint32(ctx, decoded, 2);
|
||||
JSValue inner = JS_GetPropertyUint32(ctx, decoded, 1);
|
||||
JSValue v0 = JS_GetPropertyNumber(ctx, decoded, 0);
|
||||
JSValue v2 = JS_GetPropertyNumber(ctx, decoded, 2);
|
||||
JSValue inner = JS_GetPropertyNumber(ctx, decoded, 1);
|
||||
int inner_is_arr = JS_IsArray(inner);
|
||||
int64_t inner_len;
|
||||
JS_GetLength(ctx, inner, &inner_len);
|
||||
|
||||
19
streamline.ce
Normal file
19
streamline.ce
Normal file
@@ -0,0 +1,19 @@
|
||||
// streamline.ce — run the full compile + optimize pipeline, output JSON
|
||||
//
|
||||
// Usage: ./cell --core . streamline.ce <file.ce|file.cm>
|
||||
|
||||
var fd = use("fd")
|
||||
var json = use("json")
|
||||
var tokenize = use("tokenize")
|
||||
var parse = use("parse")
|
||||
var fold = use("fold")
|
||||
var mcode = use("mcode")
|
||||
var streamline = use("streamline")
|
||||
var filename = args[0]
|
||||
var src = text(fd.slurp(filename))
|
||||
var result = tokenize(src, filename)
|
||||
var ast = parse(result.tokens, src, filename, tokenize)
|
||||
var folded = fold(ast)
|
||||
var compiled = mcode(folded)
|
||||
var optimized = streamline(compiled)
|
||||
print(json.encode(optimized))
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user