81 Commits

Author SHA1 Message Date
John Alanbrook
a1b41d5ecf rm push/pop 2026-02-26 08:13:18 -06:00
John Alanbrook
eb19b18594 slow messags 2026-02-26 00:56:43 -06:00
John Alanbrook
e203700d37 Merge branch 'fix_log_err' 2026-02-25 23:30:36 -06:00
John Alanbrook
c56444556d fix log in engine err 2026-02-25 23:30:32 -06:00
John Alanbrook
080e675d18 better update output 2026-02-25 23:29:37 -06:00
John Alanbrook
957b964d9d better disrupt message on fd 2026-02-25 20:38:34 -06:00
John Alanbrook
fa9d2609b1 fix fd bug 2026-02-25 20:25:36 -06:00
John Alanbrook
e38c2f07bf Merge branch 'async_fd' 2026-02-25 17:43:12 -06:00
John Alanbrook
ecc1777b24 async cellfs 2026-02-25 17:43:01 -06:00
John Alanbrook
1cfd5b8133 add hot reload to util 2026-02-25 17:28:11 -06:00
John Alanbrook
9c1141f408 Merge branch 'async_http' 2026-02-25 16:39:16 -06:00
John Alanbrook
696cca530b internal pronto 2026-02-25 16:39:12 -06:00
John Alanbrook
c92a4087a6 Merge branch 'async_fd' 2026-02-25 16:24:20 -06:00
John Alanbrook
01637c49b0 fix sendmessage 2026-02-25 16:24:08 -06:00
John Alanbrook
f9e660ebaa better log 2026-02-25 16:07:39 -06:00
John Alanbrook
4f8fada57d Merge branch 'audit_build' 2026-02-25 16:06:17 -06:00
John Alanbrook
adcaa92bea better logging for compiling 2026-02-25 16:05:37 -06:00
John Alanbrook
fc36707b39 Merge branch 'async_fd' 2026-02-25 16:03:55 -06:00
John Alanbrook
7cb8ce7945 async fd 2026-02-25 16:03:52 -06:00
John Alanbrook
bb7997a751 fix sendmessage 2026-02-25 15:26:20 -06:00
John Alanbrook
327b990442 Merge branch 'master' into async_http 2026-02-25 14:48:46 -06:00
John Alanbrook
51c0a0b306 tls and http 2026-02-25 14:48:37 -06:00
John Alanbrook
8ac82016dd api for sending wota messages direct 2026-02-25 12:45:02 -06:00
John Alanbrook
d0bf757d91 remove dupavlue and freevalue 2026-02-25 09:40:58 -06:00
John Alanbrook
c77f1f8639 move general http business to http and out of the probe cli 2026-02-24 21:06:43 -06:00
John Alanbrook
2b877e6b0c add http.cm and probe 2026-02-24 21:04:03 -06:00
John Alanbrook
3d4c0ec3d3 more comprehensive C suite 2026-02-24 17:51:36 -06:00
John Alanbrook
33d9013409 fix tests; add comprehensive tests for functions and fix bugs in the mach VM regarding them. 2026-02-24 17:41:18 -06:00
John Alanbrook
c2f57d1dae fix tests 2026-02-24 16:55:07 -06:00
John Alanbrook
7bd17c6476 Merge branch 'native_boot' 2026-02-23 19:07:07 -06:00
John Alanbrook
5ac1620b48 Merge branch 'fix_missing_stop' 2026-02-23 19:02:03 -06:00
John Alanbrook
124c9536b4 no more implicit returning in programs and mdouels 2026-02-23 19:01:58 -06:00
John Alanbrook
940807c37a parallel compiling; no more var hoisting; audit reports function hoisting 2026-02-23 18:57:47 -06:00
John Alanbrook
70f560550f rm tests 2026-02-23 18:16:07 -06:00
John Alanbrook
060a494f47 Merge branch 'fix_missing_stop' 2026-02-23 18:09:41 -06:00
John Alanbrook
6812d3edbc enet portal works 2026-02-23 18:09:35 -06:00
John Alanbrook
4da15d2a3e refactor 2026-02-23 18:08:13 -06:00
John Alanbrook
a34566a0c1 further refactor 2026-02-23 17:22:23 -06:00
John Alanbrook
9f7d861932 Merge branch 'audit_c_api' 2026-02-23 16:54:25 -06:00
John Alanbrook
c5536697ff merge cell 2026-02-23 16:54:19 -06:00
John Alanbrook
d066ab03cd correct caching 2026-02-23 13:37:11 -06:00
John Alanbrook
9c1cb43c7d fix repeated loads 2026-02-23 12:39:55 -06:00
John Alanbrook
99fb575c9c Merge branch 'native_boot' 2026-02-23 11:20:41 -06:00
John Alanbrook
193991c532 canonical paths 2026-02-23 11:18:08 -06:00
John Alanbrook
76552c6854 faster boot by refactor qbe_emit 2026-02-23 11:16:13 -06:00
John Alanbrook
f26b6e853d fix string indexing in native 2026-02-23 10:39:49 -06:00
John Alanbrook
94c28f0e17 more native without fallback 2026-02-23 10:20:18 -06:00
John Alanbrook
a18584afd3 Merge branch 'native_boot' 2026-02-23 09:18:31 -06:00
John Alanbrook
3f6cfad7ef fix gc scanning for env on native fns 2026-02-23 09:18:25 -06:00
John Alanbrook
b03edb0d90 Merge branch 'native_boot' 2026-02-22 20:48:19 -06:00
John Alanbrook
62440d3ed6 trace 2026-02-22 20:48:17 -06:00
John Alanbrook
4edc4b7cc5 native boot 2026-02-22 20:47:26 -06:00
John Alanbrook
012b507415 add -e flag 2026-02-22 11:24:27 -06:00
John Alanbrook
ee6398ada9 Merge branch 'master' into fuse_bug 2026-02-22 10:51:07 -06:00
John Alanbrook
173438e8bc add tests 2026-02-22 10:48:09 -06:00
John Alanbrook
7ac5ac63d2 Merge branch 'array_push_bug' 2026-02-22 10:35:00 -06:00
John Alanbrook
a05f180356 fix fuse bug 2026-02-22 10:34:55 -06:00
John Alanbrook
d88692cd30 fix inline issue 2026-02-22 10:31:15 -06:00
John Alanbrook
b0ac5de7e2 more comprehensive vm_suite 2026-02-22 10:08:40 -06:00
John Alanbrook
1d4fc11772 Merge remote-tracking branch 'origin/master' 2026-02-22 09:04:03 -06:00
John Alanbrook
7372b80e07 wary jumps 2026-02-21 20:58:24 -06:00
John Alanbrook
d27047dd82 Merge branch 'optimize_mcode' into fix_aot 2026-02-21 20:42:25 -06:00
John Alanbrook
8e96379377 wary booleans 2026-02-21 20:42:17 -06:00
John Alanbrook
8f415fea80 Merge branch 'optimize_mcode' 2026-02-21 19:45:14 -06:00
John Alanbrook
cec0b99207 correct apply check and add apply opcode 2026-02-21 19:43:05 -06:00
John Alanbrook
2d4645da9c Merge branch 'optimize_mcode' 2026-02-21 19:42:19 -06:00
John Alanbrook
017b63ba80 inline intrinsics 2026-02-21 19:23:53 -06:00
John Alanbrook
99fa86a09c asserts only for frame gets 2026-02-21 19:06:41 -06:00
John Alanbrook
6d6b53009f hash fingerprint for copmile chain 2026-02-21 17:25:14 -06:00
John Alanbrook
cc7fc6b667 fix inlining default params issue 2026-02-21 17:03:00 -06:00
John Alanbrook
bbeb757e40 faster 2026-02-21 16:23:44 -06:00
John Alanbrook
2ac446f7cf inline fns 2026-02-21 15:05:57 -06:00
John Alanbrook
517bd64275 Merge branch 'optimize_mcode' into fix_aot 2026-02-21 14:13:51 -06:00
John Alanbrook
f7e3c0803c Merge branch 'master' into fix_aot 2026-02-21 13:31:33 -06:00
John Alanbrook
e7ed6bd8b2 Merge branch 'optimize_mcode' 2026-02-21 04:13:29 -06:00
John Alanbrook
071aa33153 Merge branch 'optimize_mcode' 2026-02-21 03:38:38 -06:00
John Alanbrook
d041c49972 Merge branch 'optimize_mcode' 2026-02-21 03:38:34 -06:00
John Alanbrook
eadad194be doc gc bugs 2026-02-21 03:01:26 -06:00
John Alanbrook
81c88f9439 gx fices 2026-02-21 02:37:30 -06:00
John Alanbrook
20c2576fa7 working link 2026-02-21 02:18:42 -06:00
John Alanbrook
65fa37cc03 fix 2026-02-19 03:12:58 -06:00
138 changed files with 95347 additions and 51294 deletions

5
.gitignore vendored
View File

@@ -1,7 +1,12 @@
.git/
.obj/
website/public/
website/site/
website/.hugo_build.lock
.cache
.cell
cell
libcell_runtime*
bin/
build/
*.zip

View File

@@ -57,7 +57,7 @@ The creator functions are **polymorphic** — behavior depends on argument types
- `record(record, another)` — merge
- `record(array_of_keys)` — create record from keys
Other key intrinsics: `length()`, `stone()`, `is_stone()`, `print()`, `filter()`, `find()`, `reduce()`, `sort()`, `reverse()`, `some()`, `every()`, `starts_with()`, `ends_with()`, `meme()`, `proto()`, `isa()`, `splat()`, `apply()`, `extract()`, `replace()`, `search()`, `format()`, `lower()`, `upper()`, `trim()`
Other key intrinsics: `object()`, `length()`, `stone()`, `is_stone()`, `filter()`, `find()`, `reduce()`, `sort()`, `reverse()`, `some()`, `every()`, `starts_with()`, `ends_with()`, `meme()`, `proto()`, `isa()`, `splat()`, `apply()`, `extract()`, `replace()`, `search()`, `format()`, `lower()`, `upper()`, `trim()`
Sensory functions: `is_array()`, `is_text()`, `is_number()`, `is_object()`, `is_function()`, `is_null()`, `is_logical()`, `is_integer()`, `is_stone()`, etc.
@@ -124,16 +124,19 @@ This project uses a **copying garbage collector**. ANY JS allocation (`JS_NewObj
JS_FRAME(js);
JS_ROOT(obj, JS_NewObject(js));
JS_SetPropertyStr(js, obj.val, "x", JS_NewInt32(js, 42));
JS_SetPropertyStr(js, obj.val, "name", JS_NewString(js, "hello"));
JSValue name = JS_NewString(js, "hello");
JS_SetPropertyStr(js, obj.val, "name", name);
JS_RETURN(obj.val);
```
**Pattern — array with loop:**
**Pattern — array with loop (declare root BEFORE the loop):**
```c
JS_FRAME(js);
JS_ROOT(arr, JS_NewArray(js));
JSGCRef item = { .val = JS_NULL, .prev = NULL };
JS_PushGCRef(js, &item);
for (int i = 0; i < count; i++) {
JS_ROOT(item, JS_NewObject(js));
item.val = JS_NewObject(js);
JS_SetPropertyStr(js, item.val, "v", JS_NewInt32(js, i));
JS_SetPropertyNumber(js, arr.val, i, item.val);
}
@@ -142,18 +145,28 @@ JS_RETURN(arr.val);
**Rules:**
- Access rooted values via `.val` (e.g., `obj.val`, not `obj`)
- NEVER put `JS_ROOT` inside a loop — it pushes the same stack address twice, corrupting the GC chain
- Error returns before `JS_FRAME` use plain `return`
- Error returns after `JS_FRAME` must use `JS_RETURN_EX()` or `JS_RETURN_NULL()`
- When calling a helper that itself returns a JSValue, that return value is safe to pass directly into `JS_SetPropertyStr` — no need to root temporaries that aren't stored in a local
**Common mistake — UNSAFE (will crash under GC pressure):**
**CRITICAL — C argument evaluation order bug:**
Allocating functions (`JS_NewString`, `JS_NewFloat64`, `js_new_blob_stoned_copy`, etc.) used as arguments to `JS_SetPropertyStr` can crash because C evaluates arguments in unspecified order. The compiler may read `obj.val` BEFORE the allocating call, then GC moves the object, leaving a stale pointer.
```c
JSValue obj = JS_NewObject(js); // NOT rooted
JS_SetPropertyStr(js, obj, "pixels", js_new_blob_stoned_copy(js, data, len));
// ^^^ blob allocation can GC, invalidating obj
return obj; // obj may be a dangling pointer
// UNSAFE — intermittent crash:
JS_SetPropertyStr(js, obj.val, "format", JS_NewString(js, "rgba32"));
JS_SetPropertyStr(js, obj.val, "pixels", js_new_blob_stoned_copy(js, data, len));
// SAFE — separate the allocation:
JSValue fmt = JS_NewString(js, "rgba32");
JS_SetPropertyStr(js, obj.val, "format", fmt);
JSValue pixels = js_new_blob_stoned_copy(js, data, len);
JS_SetPropertyStr(js, obj.val, "pixels", pixels);
```
`JS_NewInt32`, `JS_NewUint32`, and `JS_NewBool` do NOT allocate and are safe inline.
See `docs/c-modules.md` for the full GC safety reference.
## Project Layout
@@ -167,17 +180,19 @@ See `docs/c-modules.md` for the full GC safety reference.
## Package Management (Shop CLI)
When running locally with `./cell --dev`, these commands manage packages:
**Two shops:** `cell <cmd>` uses the global shop at `~/.cell/packages/`. `cell --dev <cmd>` uses the local shop at `.cell/packages/`. Linked packages (via `cell link`) are symlinked into the shop — edit the source directory directly.
```
./cell --dev add <path> # add a package (local path or remote)
./cell --dev remove <path> # remove a package (cleans lock, symlink, dylibs)
./cell --dev build <path> # build C modules for a package
./cell --dev test package <path> # run tests for a package
./cell --dev list # list installed packages
cell add <path> # add a package (local path or remote)
cell remove <path> # remove a package (cleans lock, symlink, dylibs)
cell build <path> # build C modules for a package
cell build <path> --force # force rebuild (ignore stat cache)
cell test package <path> # run tests for a package
cell list # list installed packages
cell link # list linked packages
```
Local paths are symlinked into `.cell/packages/`. The build step compiles C files to content-addressed dylibs in `~/.cell/build/<hash>` and writes a per-package manifest so the runtime can find them. C files in `src/` are support files linked into module dylibs, not standalone modules.
The build step compiles C files to content-addressed dylibs in `~/.cell/build/<hash>` and writes a per-package manifest so the runtime can find them. C files in `src/` are support files linked into module dylibs, not standalone modules.
## Debugging Compiler Issues

View File

@@ -24,7 +24,7 @@ $(BUILD_DBG)/build.ninja:
install: all $(CELL_SHOP)
cp cell $(INSTALL_BIN)/cell
cp libcell_runtime.dylib $(INSTALL_LIB)/
cp source/cell.h source/quickjs.h source/wota.h $(INSTALL_INC)/
cp source/cell.h $(INSTALL_INC)/
rm -rf $(CELL_SHOP)/packages/core
ln -s $(CURDIR) $(CELL_SHOP)/packages/core
@echo "Installed cell to $(INSTALL_BIN) and $(INSTALL_LIB)"
@@ -32,7 +32,7 @@ install: all $(CELL_SHOP)
install_debug: debug $(CELL_SHOP)
cp cell $(INSTALL_BIN)/cell
cp libcell_runtime.dylib $(INSTALL_LIB)/
cp source/cell.h source/quickjs.h source/wota.h $(INSTALL_INC)/
cp source/cell.h $(INSTALL_INC)/
rm -rf $(CELL_SHOP)/packages/core
ln -s $(CURDIR) $(CELL_SHOP)/packages/core
@echo "Installed cell (debug+asan) to $(INSTALL_BIN) and $(INSTALL_LIB)"

View File

@@ -1,6 +1,5 @@
#include "quickjs.h"
#include "miniz.h"
#include "cell.h"
#include "miniz.h"
static JSClassID js_reader_class_id;
static JSClassID js_writer_class_id;
@@ -319,7 +318,6 @@ JSValue js_reader_list(JSContext *js, JSValue self, int argc, JSValue *argv)
JSValue filename = JS_NewString(js, file_stat.m_filename);
if (JS_IsException(filename)) {
JS_FreeValue(js, arr);
return filename;
}
JS_SetPropertyNumber(js, arr, arr_index++, filename);

118
audit.ce
View File

@@ -4,90 +4,146 @@
// cell audit Audit all packages
// cell audit <locator> Audit specific package
// cell audit . Audit current directory package
// cell audit --function-hoist [<locator>] Report function hoisting usage
//
// Compiles every script in the package(s) to check for errors.
// Continues past failures and reports all issues at the end.
var shop = use('internal/shop')
var pkg = use('package')
var fd = use('fd')
var target_package = null
var function_hoist = false
var i = 0
var run = function() {
var packages = null
var tokenize_mod = null
var parse_mod = null
var hoist_files = 0
var hoist_refs = 0
var total_ok = 0
var total_errors = 0
var total_scripts = 0
var all_failures = []
var all_unresolved = []
var summary = null
for (i = 0; i < length(args); i++) {
if (args[i] == '--help' || args[i] == '-h') {
log.console("Usage: cell audit [<locator>]")
log.console("Usage: cell audit [--function-hoist] [<locator>]")
log.console("")
log.console("Test-compile all .ce and .cm scripts in package(s).")
log.console("Reports all errors without stopping at the first failure.")
log.console("")
log.console("Flags:")
log.console(" --function-hoist Report files that rely on function hoisting")
return
} else if (args[i] == '--function-hoist') {
function_hoist = true
} else if (!starts_with(args[i], '-')) {
target_package = args[i]
}
}
// Resolve local paths
if (target_package) {
// Resolve local paths
if (target_package) {
target_package = shop.resolve_locator(target_package)
}
}
var packages = null
var total_ok = 0
var total_errors = 0
var total_scripts = 0
var all_failures = []
var all_unresolved = []
if (target_package) {
if (target_package) {
packages = [target_package]
} else {
} else {
packages = shop.list_packages()
}
}
arrfor(packages, function(p) {
if (function_hoist) {
tokenize_mod = use('tokenize')
parse_mod = use('parse')
arrfor(packages, function(p) {
var scripts = shop.get_package_scripts(p)
var pkg_dir = shop.get_package_dir(p)
if (length(scripts) == 0) return
arrfor(scripts, function(script) {
var src_path = pkg_dir + '/' + script
var src = null
var tok_result = null
var ast = null
var scan = function() {
if (!fd.is_file(src_path)) return
src = text(fd.slurp(src_path))
tok_result = tokenize_mod(src, script)
ast = parse_mod(tok_result.tokens, src, script, tokenize_mod)
if (ast._hoisted_fns != null && length(ast._hoisted_fns) > 0) {
log.console(p + '/' + script + ":")
hoist_files = hoist_files + 1
arrfor(ast._hoisted_fns, function(ref) {
var msg = " " + ref.name
if (ref.line != null) msg = msg + " (ref line " + text(ref.line)
if (ref.decl_line != null) msg = msg + ", declared line " + text(ref.decl_line)
if (ref.line != null) msg = msg + ")"
log.console(msg)
hoist_refs = hoist_refs + 1
})
}
} disruption {
// skip files that fail to parse
}
scan()
})
})
log.console("")
log.console("Summary: " + text(hoist_files) + " files with function hoisting, " + text(hoist_refs) + " total forward references")
return
}
arrfor(packages, function(p) {
var scripts = shop.get_package_scripts(p)
var result = null
var resolution = null
if (length(scripts) == 0) return
log.console("Auditing " + p + " (" + text(length(scripts)) + " scripts)...")
var result = shop.build_package_scripts(p)
result = shop.build_package_scripts(p)
total_ok = total_ok + result.ok
total_errors = total_errors + length(result.errors)
total_scripts = total_scripts + result.total
arrfor(result.errors, function(e) {
push(all_failures, p + ": " + e)
all_failures[] = p + ": " + e
})
// Check use() resolution
var resolution = shop.audit_use_resolution(p)
resolution = shop.audit_use_resolution(p)
arrfor(resolution.unresolved, function(u) {
push(all_unresolved, p + '/' + u.script + ": use('" + u.module + "') cannot be resolved")
all_unresolved[] = p + '/' + u.script + ": use('" + u.module + "') cannot be resolved"
})
})
})
log.console("")
if (length(all_failures) > 0) {
log.console("")
if (length(all_failures) > 0) {
log.console("Failed scripts:")
arrfor(all_failures, function(f) {
log.console(" " + f)
})
log.console("")
}
}
if (length(all_unresolved) > 0) {
if (length(all_unresolved) > 0) {
log.console("Unresolved modules:")
arrfor(all_unresolved, function(u) {
log.console(" " + u)
})
log.console("")
}
}
var summary = "Audit complete: " + text(total_ok) + "/" + text(total_scripts) + " scripts compiled"
if (total_errors > 0) summary = summary + ", " + text(total_errors) + " failed"
if (length(all_unresolved) > 0) summary = summary + ", " + text(length(all_unresolved)) + " unresolved use() calls"
log.console(summary)
summary = "Audit complete: " + text(total_ok) + "/" + text(total_scripts) + " scripts compiled"
if (total_errors > 0) summary = summary + ", " + text(total_errors) + " failed"
if (length(all_unresolved) > 0) summary = summary + ", " + text(length(all_unresolved)) + " unresolved use() calls"
log.console(summary)
}
run()
$stop()

View File

@@ -28,7 +28,7 @@ function strip_mode_flags() {
} else if (a == '--compare') {
bench_mode = "compare"
} else {
push(filtered, a)
filtered[] = a
}
})
_args = filtered
@@ -197,7 +197,7 @@ function collect_benches(package_name, specific_bench) {
match_base = ends_with(match_name, '.cm') ? text(match_name, 0, -3) : match_name
if (bench_name != match_base) return
}
push(bench_files, f)
bench_files[] = f
}
})
return bench_files
@@ -355,7 +355,7 @@ function run_single_bench(bench_fn, bench_name) {
if (teardown_fn) teardown_fn(state)
ns_per_op = is_batch ? duration / batch_size : duration
push(timings_per_op, ns_per_op)
timings_per_op[] = ns_per_op
} else {
start = os.now()
if (is_batch) {
@@ -366,7 +366,7 @@ function run_single_bench(bench_fn, bench_name) {
duration = os.now() - start
ns_per_op = is_batch ? duration / batch_size : duration
push(timings_per_op, ns_per_op)
timings_per_op[] = ns_per_op
}
}
@@ -442,11 +442,11 @@ function load_bench_module(f, package_name, mode) {
function collect_bench_fns(bench_mod) {
var benches = []
if (is_function(bench_mod)) {
push(benches, {name: 'main', fn: bench_mod})
benches[] = {name: 'main', fn: bench_mod}
} else if (is_object(bench_mod)) {
arrfor(array(bench_mod), function(k) {
if (is_function(bench_mod[k]))
push(benches, {name: k, fn: bench_mod[k]})
benches[] = {name: k, fn: bench_mod[k]}
})
}
return benches
@@ -524,7 +524,7 @@ function run_benchmarks(package_name, specific_bench) {
result = run_single_bench(b.fn, b.name)
result.package = pkg_result.package
result.mode = bench_mode == "compare" ? "bytecode" : bench_mode
push(file_result.benchmarks, result)
file_result.benchmarks[] = result
pkg_result.total++
log.console(` ${result.name}`)
@@ -538,7 +538,7 @@ function run_benchmarks(package_name, specific_bench) {
nat_result = run_single_bench(native_benches[nat_b].fn, b.name)
nat_result.package = pkg_result.package
nat_result.mode = "native"
push(file_result.benchmarks, nat_result)
file_result.benchmarks[] = nat_result
pkg_result.total++
print_bench_result(nat_result, "native ")
@@ -570,7 +570,7 @@ function run_benchmarks(package_name, specific_bench) {
name: b.name,
error: "benchmark disrupted"
}
push(file_result.benchmarks, error_result)
file_result.benchmarks[] = error_result
pkg_result.total++
}
})
@@ -586,12 +586,12 @@ function run_benchmarks(package_name, specific_bench) {
name: "load_module",
error: "error loading module"
}
push(file_result.benchmarks, error_result)
file_result.benchmarks[] = error_result
pkg_result.total++
}
if (length(file_result.benchmarks) > 0) {
push(pkg_result.files, file_result)
pkg_result.files[] = file_result
}
})
@@ -604,15 +604,15 @@ var packages = null
if (all_pkgs) {
if (testlib.is_valid_package('.')) {
push(all_results, run_benchmarks(null, null))
all_results[] = run_benchmarks(null, null)
}
packages = shop.list_packages()
arrfor(packages, function(p) {
push(all_results, run_benchmarks(p, null))
all_results[] = run_benchmarks(p, null)
})
} else {
push(all_results, run_benchmarks(target_pkg, target_bench))
all_results[] = run_benchmarks(target_pkg, target_bench)
}
// Calculate totals
@@ -688,7 +688,7 @@ Total benchmarks: ${total_benches}
var pkg_benches = []
arrfor(pkg_res.files, function(f) {
arrfor(f.benchmarks, function(benchmark) {
push(pkg_benches, benchmark)
pkg_benches[] = benchmark
})
})

View File

@@ -1,86 +0,0 @@
// bench_arith.ce — arithmetic and number crunching benchmark
// Tests: integer add/mul, float ops, loop counter overhead, conditionals
var time = use('time')
def iterations = 2000000
// 1. Integer sum in tight loop
function bench_int_sum() {
var i = 0
var s = 0
for (i = 0; i < iterations; i++) {
s = s + i
}
return s
}
// 2. Integer multiply + mod (sieve-like)
function bench_int_mul_mod() {
var i = 0
var s = 0
for (i = 1; i < iterations; i++) {
s = s + (i * 7 % 1000)
}
return s
}
// 3. Float math — accumulate with division
function bench_float_arith() {
var i = 0
var s = 0.5
for (i = 1; i < iterations; i++) {
s = s + 1.0 / i
}
return s
}
// 4. Nested loop with branch (fizzbuzz-like counter)
function bench_branch() {
var i = 0
var fizz = 0
var buzz = 0
var fizzbuzz = 0
for (i = 1; i <= iterations; i++) {
if (i % 15 == 0) {
fizzbuzz = fizzbuzz + 1
} else if (i % 3 == 0) {
fizz = fizz + 1
} else if (i % 5 == 0) {
buzz = buzz + 1
}
}
return fizz + buzz + fizzbuzz
}
// 5. Nested loop (small inner)
function bench_nested() {
var i = 0
var j = 0
var s = 0
def outer = 5000
def inner = 5000
for (i = 0; i < outer; i++) {
for (j = 0; j < inner; j++) {
s = s + 1
}
}
return s
}
// Run each and print timing
function run(name, fn) {
var start = time.number()
var result = fn()
var elapsed = time.number() - start
var ms = whole(elapsed * 100000) / 100
log.console(` ${name}: ${ms} ms (result: ${result})`)
}
log.console("=== Arithmetic Benchmark ===")
log.console(` iterations: ${iterations}`)
run("int_sum ", bench_int_sum)
run("int_mul_mod ", bench_int_mul_mod)
run("float_arith ", bench_float_arith)
run("branch ", bench_branch)
run("nested_loop ", bench_nested)

View File

@@ -1,67 +0,0 @@
// bench_arith.js — arithmetic and number crunching benchmark (QuickJS)
const iterations = 2000000;
function bench_int_sum() {
let s = 0;
for (let i = 0; i < iterations; i++) {
s = s + i;
}
return s;
}
function bench_int_mul_mod() {
let s = 0;
for (let i = 1; i < iterations; i++) {
s = s + (i * 7 % 1000);
}
return s;
}
function bench_float_arith() {
let s = 0.5;
for (let i = 1; i < iterations; i++) {
s = s + 1.0 / i;
}
return s;
}
function bench_branch() {
let fizz = 0, buzz = 0, fizzbuzz = 0;
for (let i = 1; i <= iterations; i++) {
if (i % 15 === 0) {
fizzbuzz = fizzbuzz + 1;
} else if (i % 3 === 0) {
fizz = fizz + 1;
} else if (i % 5 === 0) {
buzz = buzz + 1;
}
}
return fizz + buzz + fizzbuzz;
}
function bench_nested() {
let s = 0;
const outer = 5000, inner = 5000;
for (let i = 0; i < outer; i++) {
for (let j = 0; j < inner; j++) {
s = s + 1;
}
}
return s;
}
function run(name, fn) {
const start = performance.now();
const result = fn();
const elapsed = performance.now() - start;
console.log(` ${name}: ${elapsed.toFixed(2)} ms (result: ${result})`);
}
console.log("=== Arithmetic Benchmark ===");
console.log(` iterations: ${iterations}`);
run("int_sum ", bench_int_sum);
run("int_mul_mod ", bench_int_mul_mod);
run("float_arith ", bench_float_arith);
run("branch ", bench_branch);
run("nested_loop ", bench_nested);

View File

@@ -1,68 +0,0 @@
-- bench_arith.lua — arithmetic and number crunching benchmark (Lua)
local iterations = 2000000
local clock = os.clock
local function bench_int_sum()
local s = 0
for i = 0, iterations - 1 do
s = s + i
end
return s
end
local function bench_int_mul_mod()
local s = 0
for i = 1, iterations - 1 do
s = s + (i * 7 % 1000)
end
return s
end
local function bench_float_arith()
local s = 0.5
for i = 1, iterations - 1 do
s = s + 1.0 / i
end
return s
end
local function bench_branch()
local fizz, buzz, fizzbuzz = 0, 0, 0
for i = 1, iterations do
if i % 15 == 0 then
fizzbuzz = fizzbuzz + 1
elseif i % 3 == 0 then
fizz = fizz + 1
elseif i % 5 == 0 then
buzz = buzz + 1
end
end
return fizz + buzz + fizzbuzz
end
local function bench_nested()
local s = 0
local outer, inner = 5000, 5000
for i = 0, outer - 1 do
for j = 0, inner - 1 do
s = s + 1
end
end
return s
end
local function run(name, fn)
local start = clock()
local result = fn()
local elapsed = (clock() - start) * 1000
print(string.format(" %s: %.2f ms (result: %s)", name, elapsed, tostring(result)))
end
print("=== Arithmetic Benchmark ===")
print(string.format(" iterations: %d", iterations))
run("int_sum ", bench_int_sum)
run("int_mul_mod ", bench_int_mul_mod)
run("float_arith ", bench_float_arith)
run("branch ", bench_branch)
run("nested_loop ", bench_nested)

View File

@@ -1,113 +0,0 @@
// bench_array.ce — array operation benchmark
// Tests: sequential access, push/build, index write, sum reduction, sort
var time = use('time')
def size = 100000
// 1. Build array with push
function bench_push() {
var a = []
var i = 0
for (i = 0; i < size; i++) {
a[] = i
}
return length(a)
}
// 2. Index write into preallocated array
function bench_index_write() {
var a = array(size, 0)
var i = 0
for (i = 0; i < size; i++) {
a[i] = i
}
return a[size - 1]
}
// 3. Sequential read and sum
function bench_seq_read() {
var a = array(size, 0)
var i = 0
for (i = 0; i < size; i++) {
a[i] = i
}
var s = 0
for (i = 0; i < size; i++) {
s = s + a[i]
}
return s
}
// 4. Reverse array in-place
function bench_reverse() {
var a = array(size, 0)
var i = 0
for (i = 0; i < size; i++) {
a[i] = i
}
var lo = 0
var hi = size - 1
var tmp = 0
while (lo < hi) {
tmp = a[lo]
a[lo] = a[hi]
a[hi] = tmp
lo = lo + 1
hi = hi - 1
}
return a[0]
}
// 5. Nested array access (matrix-like, 300x300)
function bench_matrix() {
def n = 300
var mat = array(n, null)
var i = 0
var j = 0
for (i = 0; i < n; i++) {
mat[i] = array(n, 0)
for (j = 0; j < n; j++) {
mat[i][j] = i * n + j
}
}
// sum diagonal
var s = 0
for (i = 0; i < n; i++) {
s = s + mat[i][i]
}
return s
}
// 6. filter-like: count evens
function bench_filter_count() {
var a = array(size, 0)
var i = 0
for (i = 0; i < size; i++) {
a[i] = i
}
var count = 0
for (i = 0; i < size; i++) {
if (a[i] % 2 == 0) {
count = count + 1
}
}
return count
}
function run(name, fn) {
var start = time.number()
var result = fn()
var elapsed = time.number() - start
var ms = whole(elapsed * 100000) / 100
log.console(` ${name}: ${ms} ms (result: ${result})`)
}
log.console("=== Array Benchmark ===")
log.console(` size: ${size}`)
run("push ", bench_push)
run("index_write ", bench_index_write)
run("seq_read_sum ", bench_seq_read)
run("reverse ", bench_reverse)
run("matrix_300 ", bench_matrix)
run("filter_count ", bench_filter_count)

View File

@@ -1,93 +0,0 @@
// bench_array.js — array operation benchmark (QuickJS)
const size = 100000;
function bench_push() {
let a = [];
for (let i = 0; i < size; i++) {
a.push(i);
}
return a.length;
}
function bench_index_write() {
let a = new Array(size).fill(0);
for (let i = 0; i < size; i++) {
a[i] = i;
}
return a[size - 1];
}
function bench_seq_read() {
let a = new Array(size).fill(0);
for (let i = 0; i < size; i++) {
a[i] = i;
}
let s = 0;
for (let i = 0; i < size; i++) {
s = s + a[i];
}
return s;
}
function bench_reverse() {
let a = new Array(size).fill(0);
for (let i = 0; i < size; i++) {
a[i] = i;
}
let lo = 0, hi = size - 1, tmp;
while (lo < hi) {
tmp = a[lo];
a[lo] = a[hi];
a[hi] = tmp;
lo = lo + 1;
hi = hi - 1;
}
return a[0];
}
function bench_matrix() {
const n = 300;
let mat = new Array(n);
for (let i = 0; i < n; i++) {
mat[i] = new Array(n).fill(0);
for (let j = 0; j < n; j++) {
mat[i][j] = i * n + j;
}
}
let s = 0;
for (let i = 0; i < n; i++) {
s = s + mat[i][i];
}
return s;
}
function bench_filter_count() {
let a = new Array(size).fill(0);
for (let i = 0; i < size; i++) {
a[i] = i;
}
let count = 0;
for (let i = 0; i < size; i++) {
if (a[i] % 2 === 0) {
count = count + 1;
}
}
return count;
}
function run(name, fn) {
const start = performance.now();
const result = fn();
const elapsed = performance.now() - start;
console.log(` ${name}: ${elapsed.toFixed(2)} ms (result: ${result})`);
}
console.log("=== Array Benchmark ===");
console.log(` size: ${size}`);
run("push ", bench_push);
run("index_write ", bench_index_write);
run("seq_read_sum ", bench_seq_read);
run("reverse ", bench_reverse);
run("matrix_300 ", bench_matrix);
run("filter_count ", bench_filter_count);

View File

@@ -1,93 +0,0 @@
-- bench_array.lua — array operation benchmark (Lua)
local size = 100000
local clock = os.clock
local function bench_push()
local a = {}
for i = 0, size - 1 do
a[#a + 1] = i
end
return #a
end
local function bench_index_write()
local a = {}
for i = 1, size do a[i] = 0 end
for i = 1, size do
a[i] = i - 1
end
return a[size]
end
local function bench_seq_read()
local a = {}
for i = 1, size do
a[i] = i - 1
end
local s = 0
for i = 1, size do
s = s + a[i]
end
return s
end
local function bench_reverse()
local a = {}
for i = 1, size do
a[i] = i - 1
end
local lo, hi = 1, size
while lo < hi do
a[lo], a[hi] = a[hi], a[lo]
lo = lo + 1
hi = hi - 1
end
return a[1]
end
local function bench_matrix()
local n = 300
local mat = {}
for i = 1, n do
mat[i] = {}
for j = 1, n do
mat[i][j] = (i - 1) * n + (j - 1)
end
end
local s = 0
for i = 1, n do
s = s + mat[i][i]
end
return s
end
local function bench_filter_count()
local a = {}
for i = 1, size do
a[i] = i - 1
end
local count = 0
for i = 1, size do
if a[i] % 2 == 0 then
count = count + 1
end
end
return count
end
local function run(name, fn)
local start = clock()
local result = fn()
local elapsed = (clock() - start) * 1000
print(string.format(" %s: %.2f ms (result: %s)", name, elapsed, tostring(result)))
end
print("=== Array Benchmark ===")
print(string.format(" size: %d", size))
run("push ", bench_push)
run("index_write ", bench_index_write)
run("seq_read_sum ", bench_seq_read)
run("reverse ", bench_reverse)
run("matrix_300 ", bench_matrix)
run("filter_count ", bench_filter_count)

View File

@@ -1,21 +0,0 @@
var time = use('time')
function fib(n) {
if (n < 2) {
return n
}
return fib(n - 1) + fib(n - 2)
}
function run(name, fn) {
var start = time.number()
var result = fn()
var elapsed = time.number() - start
var ms = whole(elapsed * 100000) / 100
log.console(` ${name}: ${ms} ms (result: ${result})`)
}
log.console("=== Cell fib ===")
run("fib(25)", function() { return fib(25) })
run("fib(30)", function() { return fib(30) })
run("fib(35)", function() { return fib(35) })

View File

@@ -1,194 +0,0 @@
// bench_native.ce — compare VM vs native execution speed
//
// Usage:
// cell --dev bench_native.ce <module.cm> [iterations]
//
// Compiles (if needed) and benchmarks a module via both VM and native dylib.
// Reports median/mean timing per benchmark + speedup ratio.
var os = use('internal/os')
var fd = use('fd')
if (length(args) < 1) {
log.bench('usage: cell --dev bench_native.ce <module.cm> [iterations]')
return
}
var file = args[0]
var name = file
if (ends_with(name, '.cm')) {
name = text(name, 0, length(name) - 3)
}
var iterations = 11
if (length(args) > 1) {
iterations = number(args[1])
}
def WARMUP = 3
var safe = replace(replace(name, '/', '_'), '-', '_')
var symbol = 'js_' + safe + '_use'
var dylib_path = './' + file + '.dylib'
// --- Statistics ---
var stat_sort = function(arr) {
return sort(arr)
}
var stat_median = function(arr) {
if (length(arr) == 0) return 0
var sorted = stat_sort(arr)
var mid = floor(length(arr) / 2)
if (length(arr) % 2 == 0) {
return (sorted[mid - 1] + sorted[mid]) / 2
}
return sorted[mid]
}
var stat_mean = function(arr) {
if (length(arr) == 0) return 0
var sum = reduce(arr, function(a, b) { return a + b })
return sum / length(arr)
}
var format_ns = function(ns) {
if (ns < 1000) return text(round(ns)) + 'ns'
if (ns < 1000000) return text(round(ns / 1000 * 100) / 100) + 'us'
if (ns < 1000000000) return text(round(ns / 1000000 * 100) / 100) + 'ms'
return text(round(ns / 1000000000 * 100) / 100) + 's'
}
// --- Collect benchmarks from module ---
var collect_benches = function(mod) {
var benches = []
var keys = null
var i = 0
var k = null
if (is_function(mod)) {
push(benches, {name: 'main', fn: mod})
} else if (is_object(mod)) {
keys = array(mod)
i = 0
while (i < length(keys)) {
k = keys[i]
if (is_function(mod[k])) {
push(benches, {name: k, fn: mod[k]})
}
i = i + 1
}
}
return benches
}
// --- Run one benchmark function ---
var run_bench = function(fn, label) {
var samples = []
var i = 0
var t1 = 0
var t2 = 0
// warmup
i = 0
while (i < WARMUP) {
fn(1)
i = i + 1
}
// collect samples
i = 0
while (i < iterations) {
t1 = os.now()
fn(1)
t2 = os.now()
push(samples, t2 - t1)
i = i + 1
}
return {
label: label,
median: stat_median(samples),
mean: stat_mean(samples)
}
}
// --- Load VM module ---
log.bench('loading VM module: ' + file)
var vm_mod = use(name)
var vm_benches = collect_benches(vm_mod)
if (length(vm_benches) == 0) {
log.bench('no benchmarkable functions found in ' + file)
return
}
// --- Load native module ---
var native_mod = null
var native_benches = []
var has_native = fd.is_file(dylib_path)
var lib = null
if (has_native) {
log.bench('loading native module: ' + dylib_path)
lib = os.dylib_open(dylib_path)
native_mod = os.dylib_symbol(lib, symbol)
native_benches = collect_benches(native_mod)
} else {
log.bench('no ' + dylib_path + ' found -- VM-only benchmarking')
log.bench(' hint: cell --dev compile.ce ' + file)
}
// --- Run benchmarks ---
log.bench('')
log.bench('samples: ' + text(iterations) + ' (warmup: ' + text(WARMUP) + ')')
log.bench('')
var pad = function(s, n) {
var result = s
while (length(result) < n) result = result + ' '
return result
}
var i = 0
var b = null
var vm_result = null
var j = 0
var found = false
var nat_result = null
var speedup = 0
while (i < length(vm_benches)) {
b = vm_benches[i]
vm_result = run_bench(b.fn, 'vm')
log.bench(pad(b.name, 20) + ' VM: ' + pad(format_ns(vm_result.median), 12) + ' (median) ' + format_ns(vm_result.mean) + ' (mean)')
// find matching native bench
j = 0
found = false
while (j < length(native_benches)) {
if (native_benches[j].name == b.name) {
nat_result = run_bench(native_benches[j].fn, 'native')
log.bench(pad('', 20) + ' NT: ' + pad(format_ns(nat_result.median), 12) + ' (median) ' + format_ns(nat_result.mean) + ' (mean)')
if (nat_result.median > 0) {
speedup = vm_result.median / nat_result.median
log.bench(pad('', 20) + ' speedup: ' + text(round(speedup * 100) / 100) + 'x')
}
found = true
}
j = j + 1
}
if (has_native && !found) {
log.bench(pad('', 20) + ' NT: (no matching function)')
}
log.bench('')
i = i + 1
}

View File

@@ -1,118 +0,0 @@
// bench_object.ce — object/record and string benchmark
// Tests: property read/write, string concat, string interpolation, method-like dispatch
var time = use('time')
def iterations = 200000
// 1. Record create + property write
function bench_record_create() {
var i = 0
var r = null
for (i = 0; i < iterations; i++) {
r = {x: i, y: i + 1, z: i + 2}
}
return r.z
}
// 2. Property read in loop
function bench_prop_read() {
var obj = {x: 10, y: 20, z: 30, w: 40}
var i = 0
var s = 0
for (i = 0; i < iterations; i++) {
s = s + obj.x + obj.y + obj.z + obj.w
}
return s
}
// 3. Dynamic property access (computed keys)
function bench_dynamic_prop() {
var obj = {a: 1, b: 2, c: 3, d: 4, e: 5}
var keys = ["a", "b", "c", "d", "e"]
var i = 0
var j = 0
var s = 0
for (i = 0; i < iterations; i++) {
for (j = 0; j < 5; j++) {
s = s + obj[keys[j]]
}
}
return s
}
// 4. String concatenation
function bench_string_concat() {
var i = 0
var s = ""
def n = 10000
for (i = 0; i < n; i++) {
s = s + "x"
}
return length(s)
}
// 5. String interpolation
function bench_interpolation() {
var i = 0
var s = ""
def n = 50000
for (i = 0; i < n; i++) {
s = `item_${i}`
}
return s
}
// 6. Prototype chain / method-like call
function make_point(x, y) {
return {
x: x,
y: y,
sum: function(self) {
return self.x + self.y
}
}
}
function bench_method_call() {
var p = make_point(3, 4)
var i = 0
var s = 0
for (i = 0; i < iterations; i++) {
s = s + p.sum(p)
}
return s
}
// 7. Function call overhead (simple recursion depth)
function fib(n) {
if (n <= 1) return n
return fib(n - 1) + fib(n - 2)
}
function bench_fncall() {
var i = 0
var s = 0
for (i = 0; i < 20; i++) {
s = s + fib(25)
}
return s
}
function run(name, fn) {
var start = time.number()
var result = fn()
var elapsed = time.number() - start
var ms = whole(elapsed * 100000) / 100
log.console(` ${name}: ${ms} ms (result: ${result})`)
}
log.console("=== Object / String / Call Benchmark ===")
log.console(` iterations: ${iterations}`)
run("record_create ", bench_record_create)
run("prop_read ", bench_prop_read)
run("dynamic_prop ", bench_dynamic_prop)
run("string_concat ", bench_string_concat)
run("interpolation ", bench_interpolation)
run("method_call ", bench_method_call)
run("fncall_fib25 ", bench_fncall)

View File

@@ -1,99 +0,0 @@
// bench_object.js — object/string/call benchmark (QuickJS)
const iterations = 200000;
function bench_record_create() {
let r;
for (let i = 0; i < iterations; i++) {
r = {x: i, y: i + 1, z: i + 2};
}
return r.z;
}
function bench_prop_read() {
const obj = {x: 10, y: 20, z: 30, w: 40};
let s = 0;
for (let i = 0; i < iterations; i++) {
s = s + obj.x + obj.y + obj.z + obj.w;
}
return s;
}
function bench_dynamic_prop() {
const obj = {a: 1, b: 2, c: 3, d: 4, e: 5};
const keys = ["a", "b", "c", "d", "e"];
let s = 0;
for (let i = 0; i < iterations; i++) {
for (let j = 0; j < 5; j++) {
s = s + obj[keys[j]];
}
}
return s;
}
function bench_string_concat() {
let s = "";
const n = 10000;
for (let i = 0; i < n; i++) {
s = s + "x";
}
return s.length;
}
function bench_interpolation() {
let s = "";
const n = 50000;
for (let i = 0; i < n; i++) {
s = `item_${i}`;
}
return s;
}
function make_point(x, y) {
return {
x: x,
y: y,
sum: function(self) {
return self.x + self.y;
}
};
}
function bench_method_call() {
const p = make_point(3, 4);
let s = 0;
for (let i = 0; i < iterations; i++) {
s = s + p.sum(p);
}
return s;
}
function fib(n) {
if (n <= 1) return n;
return fib(n - 1) + fib(n - 2);
}
function bench_fncall() {
let s = 0;
for (let i = 0; i < 20; i++) {
s = s + fib(25);
}
return s;
}
function run(name, fn) {
const start = performance.now();
const result = fn();
const elapsed = performance.now() - start;
console.log(` ${name}: ${elapsed.toFixed(2)} ms (result: ${result})`);
}
console.log("=== Object / String / Call Benchmark ===");
console.log(` iterations: ${iterations}`);
run("record_create ", bench_record_create);
run("prop_read ", bench_prop_read);
run("dynamic_prop ", bench_dynamic_prop);
run("string_concat ", bench_string_concat);
run("interpolation ", bench_interpolation);
run("method_call ", bench_method_call);
run("fncall_fib25 ", bench_fncall);

View File

@@ -1,101 +0,0 @@
-- bench_object.lua — object/string/call benchmark (Lua)
local iterations = 200000
local clock = os.clock
local function bench_record_create()
local r
for i = 0, iterations - 1 do
r = {x = i, y = i + 1, z = i + 2}
end
return r.z
end
local function bench_prop_read()
local obj = {x = 10, y = 20, z = 30, w = 40}
local s = 0
for i = 0, iterations - 1 do
s = s + obj.x + obj.y + obj.z + obj.w
end
return s
end
local function bench_dynamic_prop()
local obj = {a = 1, b = 2, c = 3, d = 4, e = 5}
local keys = {"a", "b", "c", "d", "e"}
local s = 0
for i = 0, iterations - 1 do
for j = 1, 5 do
s = s + obj[keys[j]]
end
end
return s
end
local function bench_string_concat()
local parts = {}
local n = 10000
for i = 1, n do
parts[i] = "x"
end
local s = table.concat(parts)
return #s
end
local function bench_interpolation()
local s = ""
local n = 50000
for i = 0, n - 1 do
s = string.format("item_%d", i)
end
return s
end
local function make_point(x, y)
return {
x = x,
y = y,
sum = function(self)
return self.x + self.y
end
}
end
local function bench_method_call()
local p = make_point(3, 4)
local s = 0
for i = 0, iterations - 1 do
s = s + p.sum(p)
end
return s
end
local function fib(n)
if n <= 1 then return n end
return fib(n - 1) + fib(n - 2)
end
local function bench_fncall()
local s = 0
for i = 0, 19 do
s = s + fib(25)
end
return s
end
local function run(name, fn)
local start = clock()
local result = fn()
local elapsed = (clock() - start) * 1000
print(string.format(" %s: %.2f ms (result: %s)", name, elapsed, tostring(result)))
end
print("=== Object / String / Call Benchmark ===")
print(string.format(" iterations: %d", iterations))
run("record_create ", bench_record_create)
run("prop_read ", bench_prop_read)
run("dynamic_prop ", bench_dynamic_prop)
run("string_concat ", bench_string_concat)
run("interpolation ", bench_interpolation)
run("method_call ", bench_method_call)
run("fncall_fib25 ", bench_fncall)

View File

@@ -27,13 +27,13 @@ function send(mailbox, msg) {
function receive(mailbox) {
if (length(mailbox.queue) == 0) return null
mailbox.delivered++
return pop(mailbox.queue)
return mailbox.queue[]
}
function drain(mailbox) {
var count = 0
while (length(mailbox.queue) > 0) {
pop(mailbox.queue)
mailbox.queue[]
count++
}
return count

View File

@@ -13,13 +13,13 @@ function generate_records(n) {
var dept_vals = ["eng", "sales", "ops", "hr", "marketing"]
for (i = 0; i < n; i++) {
x = ((x * 1103515245 + 12345) & 0x7FFFFFFF) | 0
push(records, {
records[] = {
id: i + 1,
name: `user_${i}`,
score: (x % 1000) / 10,
status: status_vals[i % 4],
department: dept_vals[i % 5]
})
}
}
return records
}
@@ -30,7 +30,7 @@ function filter_records(records, field, value) {
var i = 0
for (i = 0; i < length(records); i++) {
if (records[i][field] == value) {
push(result, records[i])
result[] = records[i]
}
}
return result
@@ -45,7 +45,7 @@ function group_by(records, field) {
key = records[i][field]
if (!key) key = "unknown"
if (!groups[key]) groups[key] = []
push(groups[key], records[i])
groups[key][] = records[i]
}
return groups
}
@@ -70,13 +70,13 @@ function aggregate(groups) {
if (grp[j].score < mn) mn = grp[j].score
if (grp[j].score > mx) mx = grp[j].score
}
push(result, {
result[] = {
group: keys[i],
count: length(grp),
average: total / length(grp),
low: mn,
high: mx
})
}
}
return result
}

View File

@@ -57,7 +57,7 @@ function build_chain(n) {
var constraints = []
var i = 0
for (i = 0; i < n; i++) {
push(vars, make_variable(`v${i}`, 0))
vars[] = make_variable(`v${i}`, 0)
}
// Set first variable
@@ -69,8 +69,8 @@ function build_chain(n) {
self.variables[1].value = self.variables[0].value + 1
self.output = self.variables[1]
})
push(constraints, c)
push(vars[i].constraints, c)
constraints[] = c
vars[i].constraints[] = c
}
return {vars: vars, constraints: constraints}
@@ -83,8 +83,8 @@ function build_projection(n) {
var constraints = []
var i = 0
for (i = 0; i < n; i++) {
push(src, make_variable(`src${i}`, i * 10))
push(dst, make_variable(`dst${i}`, 0))
src[] = make_variable(`src${i}`, i * 10)
dst[] = make_variable(`dst${i}`, 0)
}
var scale_c = null
@@ -93,8 +93,8 @@ function build_projection(n) {
self.variables[1].value = self.variables[0].value * 2 + 1
self.output = self.variables[1]
})
push(constraints, scale_c)
push(dst[i].constraints, scale_c)
constraints[] = scale_c
dst[i].constraints[] = scale_c
}
return {src: src, dst: dst, constraints: constraints}

View File

@@ -12,7 +12,7 @@ function make_words(count) {
var words = []
var i = 0
for (i = 0; i < count; i++) {
push(words, base_words[i % length(base_words)])
words[] = base_words[i % length(base_words)]
}
return words
}
@@ -39,7 +39,7 @@ function top_n(freq, n) {
var pairs = []
var i = 0
for (i = 0; i < length(keys); i++) {
push(pairs, {word: keys[i], count: freq[keys[i]]})
pairs[] = {word: keys[i], count: freq[keys[i]]}
}
var sorted = sort(pairs, "count")
// Return last N (highest counts)
@@ -47,7 +47,7 @@ function top_n(freq, n) {
var start = length(sorted) - n
if (start < 0) start = 0
for (i = start; i < length(sorted); i++) {
push(result, sorted[i])
result[] = sorted[i]
}
return result
}
@@ -62,7 +62,7 @@ function group_by_length(words) {
w = words[i]
k = text(length(w))
if (!groups[k]) groups[k] = []
push(groups[k], w)
groups[k][] = w
}
return groups
}

View File

@@ -27,13 +27,13 @@ function make_array_data(size) {
var arr = []
var i = 0
for (i = 0; i < size; i++) {
push(arr, {
arr[] = {
id: i,
name: `item_${i}`,
active: i % 2 == 0,
score: i * 1.5,
tags: [`tag_${i % 5}`, `tag_${(i + 1) % 5}`]
})
}
}
return arr
}

302
benches/micro_core.cm Normal file
View File

@@ -0,0 +1,302 @@
// micro_core.cm — direct microbenchmarks for core ops
function blackhole(sink, x) {
return (sink + (x | 0)) | 0
}
function make_obj_xy(x, y) {
return {x: x, y: y}
}
function make_obj_yx(x, y) {
// Different insertion order to force a different shape
return {y: y, x: x}
}
function make_packed_array(n) {
var a = []
var i = 0
for (i = 0; i < n; i++) a[] = i
return a
}
function make_holey_array(n) {
var a = []
var i = 0
for (i = 0; i < n; i += 2) a[i] = i
return a
}
return {
loop_empty: function(n) {
var sink = 0
var i = 0
for (i = 0; i < n; i++) {}
return blackhole(sink, n)
},
i32_add: function(n) {
var sink = 0
var x = 1
var i = 0
for (i = 0; i < n; i++) x = (x + 3) | 0
return blackhole(sink, x)
},
f64_add: function(n) {
var sink = 0
var x = 1.0
var i = 0
for (i = 0; i < n; i++) x = x + 3.14159
return blackhole(sink, x | 0)
},
mixed_add: function(n) {
var sink = 0
var x = 1
var i = 0
for (i = 0; i < n; i++) x = x + 0.25
return blackhole(sink, x | 0)
},
bit_ops: function(n) {
var sink = 0
var x = 0x12345678
var i = 0
for (i = 0; i < n; i++) x = ((x << 5) ^ (x >>> 3)) | 0
return blackhole(sink, x)
},
overflow_path: function(n) {
var sink = 0
var x = 0x70000000
var i = 0
for (i = 0; i < n; i++) x = (x + 0x10000000) | 0
return blackhole(sink, x)
},
call_direct: function(n) {
var sink = 0
var f = function(a) { return (a + 1) | 0 }
var x = 0
var i = 0
for (i = 0; i < n; i++) x = f(x)
return blackhole(sink, x)
},
call_indirect: function(n) {
var sink = 0
var f = function(a) { return (a + 1) | 0 }
var g = f
var x = 0
var i = 0
for (i = 0; i < n; i++) x = g(x)
return blackhole(sink, x)
},
call_closure: function(n) {
var sink = 0
var make_adder = function(k) {
return function(a) { return (a + k) | 0 }
}
var add3 = make_adder(3)
var x = 0
var i = 0
for (i = 0; i < n; i++) x = add3(x)
return blackhole(sink, x)
},
array_read_packed: function(n) {
var sink = 0
var a = make_packed_array(1024)
var x = 0
var i = 0
for (i = 0; i < n; i++) x = (x + a[i & 1023]) | 0
return blackhole(sink, x)
},
array_write_packed: function(n) {
var sink = 0
var a = make_packed_array(1024)
var i = 0
for (i = 0; i < n; i++) a[i & 1023] = i
return blackhole(sink, a[17] | 0)
},
array_read_holey: function(n) {
var sink = 0
var a = make_holey_array(2048)
var x = 0
var i = 0
var v = null
for (i = 0; i < n; i++) {
v = a[(i & 2047)]
if (v) x = (x + v) | 0
}
return blackhole(sink, x)
},
array_push_steady: function(n) {
var sink = 0
var x = 0
var j = 0
var i = 0
var a = null
for (j = 0; j < n; j++) {
a = []
for (i = 0; i < 256; i++) a[] = i
x = (x + length(a)) | 0
}
return blackhole(sink, x)
},
array_indexed_sum: function(n) {
var sink = 0
var a = make_packed_array(1024)
var x = 0
var j = 0
var i = 0
for (j = 0; j < n; j++) {
x = 0
for (i = 0; i < 1024; i++) {
x = (x + a[i]) | 0
}
}
return blackhole(sink, x)
},
prop_read_mono: function(n) {
var sink = 0
var o = make_obj_xy(1, 2)
var x = 0
var i = 0
for (i = 0; i < n; i++) x = (x + o.x) | 0
return blackhole(sink, x)
},
prop_read_poly_2: function(n) {
var sink = 0
var a = make_obj_xy(1, 2)
var b = make_obj_yx(1, 2)
var x = 0
var i = 0
var o = null
for (i = 0; i < n; i++) {
o = (i & 1) == 0 ? a : b
x = (x + o.x) | 0
}
return blackhole(sink, x)
},
prop_read_poly_4: function(n) {
var sink = 0
var shapes = [
{x: 1, y: 2},
{y: 2, x: 1},
{x: 1, z: 3, y: 2},
{w: 0, x: 1, y: 2}
]
var x = 0
var i = 0
for (i = 0; i < n; i++) {
x = (x + shapes[i & 3].x) | 0
}
return blackhole(sink, x)
},
string_concat_small: function(n) {
var sink = 0
var x = 0
var j = 0
var i = 0
var s = null
for (j = 0; j < n; j++) {
s = ""
for (i = 0; i < 16; i++) s = s + "x"
x = (x + length(s)) | 0
}
return blackhole(sink, x)
},
string_concat_medium: function(n) {
var sink = 0
var x = 0
var j = 0
var i = 0
var s = null
for (j = 0; j < n; j++) {
s = ""
for (i = 0; i < 100; i++) s = s + "abcdefghij"
x = (x + length(s)) | 0
}
return blackhole(sink, x)
},
string_slice: function(n) {
var sink = 0
var base = "the quick brown fox jumps over the lazy dog"
var x = 0
var i = 0
var s = null
for (i = 0; i < n; i++) {
s = text(base, i % 10, i % 10 + 10)
x = (x + length(s)) | 0
}
return blackhole(sink, x)
},
guard_hot_number: function(n) {
var sink = 0
var x = 1
var i = 0
for (i = 0; i < n; i++) x = x + 1
return blackhole(sink, x | 0)
},
guard_mixed_types: function(n) {
var sink = 0
var vals = [1, "a", 2, "b", 3, "c", 4, "d"]
var x = 0
var i = 0
for (i = 0; i < n; i++) {
if (is_number(vals[i & 7])) x = (x + vals[i & 7]) | 0
}
return blackhole(sink, x)
},
reduce_sum: function(n) {
var sink = 0
var a = make_packed_array(256)
var x = 0
var i = 0
for (i = 0; i < n; i++) {
x = (x + reduce(a, function(acc, v) { return acc + v }, 0)) | 0
}
return blackhole(sink, x)
},
filter_evens: function(n) {
var sink = 0
var a = make_packed_array(256)
var x = 0
var i = 0
for (i = 0; i < n; i++) {
x = (x + length(filter(a, function(v) { return v % 2 == 0 }))) | 0
}
return blackhole(sink, x)
},
arrfor_sum: function(n) {
var sink = 0
var a = make_packed_array(256)
var x = 0
var i = 0
var sum = 0
for (i = 0; i < n; i++) {
sum = 0
arrfor(a, function(v) { sum += v })
x = (x + sum) | 0
}
return blackhole(sink, x)
}
}

View File

@@ -272,7 +272,7 @@ return {
for (i = 0; i < n; i++) {
push(a, i)
if (length(a) > 64) {
v = pop(a)
v = a[]
x = (x + v) | 0
}
}

View File

@@ -16,21 +16,21 @@ function tokenize(src) {
ch = chars[i]
if (ch == " " || ch == "\n" || ch == "\t") {
if (length(buf) > 0) {
push(tokens, buf)
tokens[] = buf
buf = ""
}
} else if (ch == "(" || ch == ")" || ch == "+" || ch == "-"
|| ch == "*" || ch == "=" || ch == ";" || ch == ",") {
if (length(buf) > 0) {
push(tokens, buf)
tokens[] = buf
buf = ""
}
push(tokens, ch)
tokens[] = ch
} else {
buf = buf + ch
}
}
if (length(buf) > 0) push(tokens, buf)
if (length(buf) > 0) tokens[] = buf
return tokens
}
@@ -49,21 +49,21 @@ function parse_tokens(tokens) {
i++ // skip =
i++
if (i < length(tokens)) node.value = tokens[i]
push(ast, node)
ast[] = node
} else if (tok == "return") {
node = {type: "return", value: null}
i++
if (i < length(tokens)) node.value = tokens[i]
push(ast, node)
ast[] = node
} else if (tok == "function") {
node = {type: "func", name: null, body: []}
i++
if (i < length(tokens)) node.name = tokens[i]
// Skip to matching )
while (i < length(tokens) && tokens[i] != ")") i++
push(ast, node)
ast[] = node
} else {
push(ast, {type: "expr", value: tok})
ast[] = {type: "expr", value: tok}
}
}
return ast
@@ -121,7 +121,7 @@ function simulate_build(n_modules, deps_per_module) {
// Generate all module sources
for (i = 0; i < n_modules; i++) {
src = generate_module(i, deps_per_module)
push(modules, src)
modules[] = src
}
// "Load" each module: tokenize → parse → evaluate
@@ -173,7 +173,7 @@ function topo_sort(n_modules, deps_per_module) {
for (j = 0; j < deps_per_module; j++) {
if (j < i) {
dep = "mod_" + text(j)
push(adj[dep], name)
adj[dep][] = name
in_degree[name] = in_degree[name] + 1
}
}
@@ -183,7 +183,7 @@ function topo_sort(n_modules, deps_per_module) {
var queue = []
var keys = array(in_degree)
for (i = 0; i < length(keys); i++) {
if (in_degree[keys[i]] == 0) push(queue, keys[i])
if (in_degree[keys[i]] == 0) queue[] = keys[i]
}
var order = []
@@ -193,12 +193,12 @@ function topo_sort(n_modules, deps_per_module) {
while (qi < length(queue)) {
current = queue[qi]
qi++
push(order, current)
order[] = current
neighbors = adj[current]
if (neighbors) {
for (i = 0; i < length(neighbors); i++) {
in_degree[neighbors[i]] = in_degree[neighbors[i]] - 1
if (in_degree[neighbors[i]] == 0) push(queue, neighbors[i])
if (in_degree[neighbors[i]] == 0) queue[] = neighbors[i]
}
}
}

View File

@@ -7,7 +7,7 @@ function make_random_array(n, seed) {
var i = 0
for (i = 0; i < n; i++) {
x = ((x * 1103515245 + 12345) & 0x7FFFFFFF) | 0
push(a, x % 10000)
a[] = x % 10000
}
return a
}
@@ -15,7 +15,7 @@ function make_random_array(n, seed) {
function make_descending(n) {
var a = []
var i = 0
for (i = n - 1; i >= 0; i--) push(a, i)
for (i = n - 1; i >= 0; i--) a[] = i
return a
}
@@ -58,19 +58,19 @@ function merge(a, b) {
var j = 0
while (i < length(a) && j < length(b)) {
if (a[i] <= b[j]) {
push(result, a[i])
result[] = a[i]
i++
} else {
push(result, b[j])
result[] = b[j]
j++
}
}
while (i < length(a)) {
push(result, a[i])
result[] = a[i]
i++
}
while (j < length(b)) {
push(result, b[j])
result[] = b[j]
j++
}
return result
@@ -97,7 +97,7 @@ function sort_records(n) {
var i = 0
for (i = 0; i < n; i++) {
x = ((x * 1103515245 + 12345) & 0x7FFFFFFF) | 0
push(records, {id: i, score: x % 10000, name: `item_${i}`})
records[] = {id: i, score: x % 10000, name: `item_${i}`}
}
return sort(records, "score")
}

View File

@@ -23,7 +23,7 @@ function build_index(txt) {
if (!index[w]) {
index[w] = []
}
push(index[w], i)
index[w][] = i
}
return index
}

View File

@@ -48,7 +48,7 @@ function tree_map(node, fn) {
function tree_flatten(node, result) {
if (!node) return null
tree_flatten(node.left, result)
push(result, node.val)
result[] = node.val
tree_flatten(node.right, result)
return null
}
@@ -126,7 +126,7 @@ return {
// Build a balanced BST of 1024 elements
var data = []
var i = 0
for (i = 0; i < 1024; i++) push(data, i)
for (i = 0; i < 1024; i++) data[] = i
var bst = build_balanced(data, 0, 1023)
var found = 0
for (i = 0; i < n; i++) {

View File

@@ -1,20 +0,0 @@
#!/bin/bash
# Run hyperfine with parameter lists
# This will create a cross-product of all libraries × all scenarios
hyperfine \
--warmup 3 \
--runs 20 \
-i \
--export-csv wota_vs_nota_vs_json.csv \
--export-json wota_vs_nota_vs_json.json \
--export-markdown wota_vs_nota_vs_json.md \
--parameter-list lib wota,nota,json \
--parameter-list scen empty,integers,floats,strings,objects,nested,large_array \
'cell benchmarks/wota_nota_json {lib} {scen}'
echo "Benchmark complete! Results saved to:"
echo " - wota_vs_nota_vs_json.csv"
echo " - wota_vs_nota_vs_json.json"
echo " - wota_vs_nota_vs_json.md"

View File

@@ -95,12 +95,12 @@ function benchArrayOps() {
var arr = [];
var j = 0
for (j = 0; j < iterations.medium; j++) {
push(arr, j);
arr[] = j;
}
});
var arr = [];
for (i = 0; i < 10000; i++) push(arr, i);
for (i = 0; i < 10000; i++) arr[] = i;
var accessTime = measureTime(function() {
var sum = 0;
@@ -188,7 +188,7 @@ function benchStringOps() {
});
for (i = 0; i < 1000; i++) {
push(strings, "string" + i);
strings[] = "string" + i;
}
var joinTime = measureTime(function() {
@@ -261,13 +261,13 @@ function benchClosures() {
var funcs = [];
var j = 0
for (j = 0; j < iterations.medium; j++) {
push(funcs, makeAdder(j));
funcs[] = makeAdder(j);
}
});
var adders = [];
for (i = 0; i < 1000; i++) {
push(adders, makeAdder(i));
adders[] = makeAdder(i);
}
var closureCallTime = measureTime(function() {

View File

@@ -15,7 +15,7 @@ var nll = null
var oll = null
for (i = 0; i < 10000; i++) {
accstr += i;
push(newarr, text(i))
newarr[] = text(i)
}
var jsonDecodeTimes = [];
var jsonEncodeTimes = [];
@@ -26,19 +26,19 @@ var notaSizes = [];
for (i = 0; i < 100; i++) {
start = os.now();
jll = json.decode(ll);
push(jsonDecodeTimes, (os.now() - start) * 1000);
jsonDecodeTimes[] = (os.now() - start) * 1000;
start = os.now();
jsonStr = JSON.stringify(jll);
push(jsonEncodeTimes, (os.now() - start) * 1000);
jsonEncodeTimes[] = (os.now() - start) * 1000;
start = os.now();
nll = nota.encode(jll);
push(notaEncodeTimes, (os.now() - start) * 1000);
notaEncodeTimes[] = (os.now() - start) * 1000;
start = os.now();
oll = nota.decode(nll);
push(notaDecodeTimes, (os.now() - start) * 1000);
notaDecodeTimes[] = (os.now() - start) * 1000;
}
function getStats(arr) {

File diff suppressed because it is too large Load Diff

View File

@@ -99,7 +99,7 @@ function runBenchmarkForLibrary(lib, bench) {
for (j = 0; j < length(bench.data); j++) {
e = lib.encode(bench.data[j]);
if (i == 0) {
push(encodedList, e);
encodedList[] = e;
totalSize += lib.getSize(e);
}
}

192
boot.ce Normal file
View File

@@ -0,0 +1,192 @@
// cell boot [--native] <program> - Pre-compile all module dependencies in parallel
//
// Discovers all transitive module dependencies for a program,
// checks which are not yet cached, and compiles uncached ones
// in parallel using worker actors composed via parallel() requestors.
//
// Also used as a child actor by engine.cm for auto-boot.
var shop = use('internal/shop')
var fd = use('fd')
var pkg_tools = use('package')
var build = use('build')
var is_native = false
var target_prog = null
var target_pkg = null
var i = 0
// Child actor mode: receive message from engine.cm
var _child_mode = false
var run_boot = null
$receiver(function(msg) {
_child_mode = true
is_native = msg.native || false
target_prog = msg.program
target_pkg = msg.package
run_boot()
})
// CLI mode: parse arguments
if (args && length(args) > 0) {
for (i = 0; i < length(args); i = i + 1) {
if (args[i] == '--native') {
is_native = true
} else if (args[i] == '--help' || args[i] == '-h') {
log.console("Usage: cell boot [--native] <program>")
log.console("")
log.console("Pre-compile all module dependencies for a program.")
log.console("Uncached modules are compiled in parallel.")
$stop()
} else if (!starts_with(args[i], '-')) {
target_prog = args[i]
}
}
if (!target_prog) {
log.error("boot: no program specified")
$stop()
}
}
// Discover all transitive module dependencies for a file
function discover_deps(file_path) {
return shop.trace_deps(file_path)
}
// Filter out already-cached modules
function filter_uncached(deps) {
var uncached = []
var j = 0
var s = null
j = 0
while (j < length(deps.scripts)) {
s = deps.scripts[j]
if (is_native) {
if (!shop.is_native_cached(s.path, s.package)) {
uncached[] = {type: 'native_script', path: s.path, package: s.package}
}
} else {
if (!shop.is_cached(s.path)) {
uncached[] = {type: 'script', path: s.path, package: s.package}
}
}
j = j + 1
}
// Expand C packages into individual files for parallel compilation
var target = build.detect_host_target()
var pkg = null
var c_files = null
var k = 0
j = 0
while (j < length(deps.c_packages)) {
pkg = deps.c_packages[j]
if (pkg != 'core') {
c_files = pkg_tools.get_c_files(pkg, target, true)
k = 0
while (k < length(c_files)) {
uncached[] = {type: 'c_file', package: pkg, file: c_files[k]}
k = k + 1
}
}
j = j + 1
}
return uncached
}
function item_name(item) {
if (item.path) return item.path
if (item.file) return item.package + '/' + item.file
return item.package
}
// Create a requestor that spawns a compile_worker actor for one item
function make_compile_requestor(item) {
var worker = null
var name = item_name(item)
return function(callback, value) {
log.console('boot: spawning worker for ' + name)
$start(function(event) {
if (event.type == 'greet') {
worker = event.actor
send(event.actor, {
type: item.type,
path: item.path,
package: item.package,
file: item.file
})
}
if (event.type == 'stop') {
callback(name)
}
if (event.type == 'disrupt') {
log.error('boot: worker failed for ' + name)
callback(null, {message: 'compile failed: ' + name})
}
}, 'compile_worker')
return function cancel(reason) {
if (worker) $stop(worker)
}
}
}
run_boot = function() {
var prog_path = null
var prog_info = null
var deps = null
var uncached = null
var requestors = null
var p = null
// Resolve the program path
if (target_prog) {
p = target_prog
if (ends_with(p, '.ce')) p = text(p, 0, -3)
prog_info = shop.resolve_program ? shop.resolve_program(p, target_pkg) : null
if (prog_info) {
prog_path = prog_info.path
if (!target_pkg && prog_info.pkg) target_pkg = prog_info.pkg
} else {
prog_path = p + '.ce'
if (!fd.is_file(prog_path)) {
prog_path = null
}
}
}
if (!prog_path || !fd.is_file(prog_path)) {
log.error('boot: could not find program: ' + text(target_prog || ''))
$stop()
return
}
// Discover all transitive deps
deps = discover_deps(prog_path)
uncached = filter_uncached(deps)
if (length(uncached) == 0) {
log.console('boot: all modules cached')
$stop()
return
}
// Compile uncached modules in parallel using worker actors
log.console('boot: ' + text(length(uncached)) + ' modules to compile')
requestors = array(uncached, make_compile_requestor)
parallel(requestors)(function(results, reason) {
if (reason) {
log.error('boot: ' + (reason.message || text(reason)))
} else {
log.console('boot: compiled ' + text(length(results)) + ' modules')
}
$stop()
}, null)
}
// CLI mode: start immediately
if (!_child_mode && target_prog) {
run_boot()
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

2764
boot/qbe.cm.mcode Normal file

File diff suppressed because it is too large Load Diff

34991
boot/qbe_emit.cm.mcode Normal file

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

217
build.cm
View File

@@ -65,7 +65,7 @@ function replace_sigils(str, pkg_dir) {
function replace_sigils_array(flags, pkg_dir) {
var result = []
arrfor(flags, function(flag) {
push(result, replace_sigils(flag, pkg_dir))
result[] = replace_sigils(flag, pkg_dir)
})
return result
}
@@ -179,7 +179,7 @@ function bmfst_save(cmd_str, src_path, deps, obj_path) {
arrfor(deps, function(dep_path) {
var st = memo_stat(dep_path)
if (st)
push(entries, {p: dep_path, m: st.m, s: st.s})
entries[] = {p: dep_path, m: st.m, s: st.s}
})
var mf = {o: obj_path, d: entries}
var mf_path = bmfst_path(cmd_str, src_path)
@@ -191,16 +191,16 @@ function bmfst_save(cmd_str, src_path, deps, obj_path) {
function bmfst_dl_key(setup, link_info) {
var parts = [setup.cmd_str, setup.src_path]
push(parts, 'target:' + text(link_info.target))
push(parts, 'cc:' + text(link_info.cc))
parts[] = 'target:' + text(link_info.target)
parts[] = 'cc:' + text(link_info.cc)
arrfor(link_info.extra_objects, function(obj) {
if (obj != null) push(parts, 'extra:' + text(obj))
if (obj != null) parts[] = 'extra:' + text(obj)
})
arrfor(link_info.ldflags, function(flag) {
push(parts, 'ldflag:' + text(flag))
parts[] = 'ldflag:' + text(flag)
})
arrfor(link_info.target_ldflags, function(flag) {
push(parts, 'target_ldflag:' + text(flag))
parts[] = 'target_ldflag:' + text(flag)
})
return text(parts, '\n')
}
@@ -227,7 +227,7 @@ function bmfst_dl_save(setup, link_info, deps, dylib_path) {
arrfor(deps, function(dep_path) {
var st = memo_stat(dep_path)
if (st)
push(entries, {p: dep_path, m: st.m, s: st.s})
entries[] = {p: dep_path, m: st.m, s: st.s}
})
var mf = {dylib: dylib_path, d: entries}
var mf_path = cache_path(bmfst_dl_key(setup, link_info), SALT_BMFST_DL)
@@ -259,7 +259,7 @@ function get_c_deps(cc, flags, src_path) {
var dep_file = '/tmp/cell_deps_' + content_hash(src_path) + '.d'
var dep_cmd = [cc, '-MM', '-MG', '-MF', '"' + dep_file + '"']
dep_cmd = array(dep_cmd, flags)
push(dep_cmd, '"' + src_path + '"')
dep_cmd[] = '"' + src_path + '"'
var ret = os.system(text(dep_cmd, ' ') + ' 2>/dev/null')
if (ret != 0) return [src_path]
if (!fd.is_file(dep_file)) return [src_path]
@@ -274,9 +274,9 @@ function hash_all_deps(cmd_str, deps) {
arrfor(deps, function(dep_path) {
var content = memo_read(dep_path)
if (content != null)
push(parts, dep_path + '\n' + content)
parts[] = dep_path + '\n' + content
else
push(parts, dep_path + '\n<missing>')
parts[] = dep_path + '\n<missing>'
})
return text(parts, '\n')
}
@@ -310,16 +310,16 @@ function compile_setup(pkg, file, target, opts) {
common_flags = array(common_flags, ['-Os', '-DNDEBUG'])
}
push(common_flags, '-DCELL_USE_NAME=' + sym_name)
push(common_flags, '-I"' + pkg_dir + '"')
common_flags[] = '-DCELL_USE_NAME=' + sym_name
common_flags[] = '-I"' + pkg_dir + '"'
if (fd.is_dir(pkg_dir + '/include')) {
push(common_flags, '-I"' + pkg_dir + '/include"')
common_flags[] = '-I"' + pkg_dir + '/include"'
}
if (pkg != 'core') {
core_dir = shop.get_package_dir('core')
push(common_flags, '-I"' + core_dir + '/source"')
common_flags[] = '-I"' + core_dir + '/source"'
}
arrfor(cflags, function(flag) {
@@ -331,16 +331,16 @@ function compile_setup(pkg, file, target, opts) {
f = '-I"' + pkg_dir + '/' + ipath + '"'
}
}
push(common_flags, f)
common_flags[] = f
})
arrfor(target_cflags, function(flag) {
push(common_flags, flag)
common_flags[] = flag
})
var cmd_parts = [cc, '-c', '-fPIC']
cmd_parts = array(cmd_parts, common_flags)
push(cmd_parts, '"' + src_path + '"')
cmd_parts[] = '"' + src_path + '"'
return {
cmd_str: text(cmd_parts, ' '),
@@ -390,11 +390,13 @@ Build.compile_file = function(pkg, file, target, opts) {
// Layer 2: stat-based manifest probe (zero file reads on warm cache)
var mf_obj = null
var _linked = fd.is_link(setup.pkg_dir)
var _tag = _linked ? ' [linked]' : ''
if (!_opts.force) {
mf_obj = bmfst_probe(setup.cmd_str, setup.src_path)
if (mf_obj) {
if (_opts.verbose) log.build('[verbose] manifest hit: ' + file)
log.shop('manifest hit ' + file)
if (_opts.verbose) log.build(`[verbose] manifest hit: ${pkg}/${file}${_tag}`)
log.shop(`manifest hit ${pkg}/${file}${_tag}`)
return mf_obj
}
}
@@ -462,7 +464,7 @@ Build.compile_file = function(pkg, file, target, opts) {
// Compile
log.shop('compiling ' + file)
log.console('Compiling ' + file)
log.build('Compiling ' + file)
err_path = '/tmp/cell_build_err_' + content_hash(setup.src_path) + '.log'
full_cmd = setup.cmd_str + ' -o "' + obj_path + '" 2>"' + err_path + '"'
ret = os.system(full_cmd)
@@ -511,7 +513,7 @@ Build.build_package = function(pkg, target, exclude_main, buildtype) {
arrfor(c_files, function(file) {
var obj = Build.compile_file(pkg, file, _target, {buildtype: _buildtype, cflags: cached_cflags})
push(objects, obj)
objects[] = obj
})
return objects
@@ -525,16 +527,16 @@ Build.build_package = function(pkg, target, exclude_main, buildtype) {
// link_opts: {extra_objects, ldflags, target_ldflags, target, cc}
function compute_dylib_content(full_content, link_opts) {
var parts = [full_content]
push(parts, 'target:' + text(link_opts.target))
push(parts, 'cc:' + text(link_opts.cc))
parts[] = 'target:' + text(link_opts.target)
parts[] = 'cc:' + text(link_opts.cc)
arrfor(link_opts.extra_objects, function(obj) {
if (obj != null) push(parts, 'extra:' + text(obj))
if (obj != null) parts[] = 'extra:' + text(obj)
})
arrfor(link_opts.ldflags, function(flag) {
push(parts, 'ldflag:' + text(flag))
parts[] = 'ldflag:' + text(flag)
})
arrfor(link_opts.target_ldflags, function(flag) {
push(parts, 'target_ldflag:' + text(flag))
parts[] = 'target_ldflag:' + text(flag)
})
return text(parts, '\n')
}
@@ -568,7 +570,7 @@ Build.build_module_dylib = function(pkg, file, target, opts) {
f = '-L"' + setup.pkg_dir + '/' + lpath + '"'
}
}
push(resolved_ldflags, f)
resolved_ldflags[] = f
})
var build_dir = get_build_dir()
@@ -578,11 +580,13 @@ Build.build_module_dylib = function(pkg, file, target, opts) {
// Stat-based dylib manifest — zero file reads on warm cache
var mf_dylib = null
var _linked = fd.is_link(setup.pkg_dir)
var _tag = _linked ? ' [linked]' : ''
if (!_opts.force) {
mf_dylib = bmfst_dl_probe(setup, link_info)
if (mf_dylib) {
if (_opts.verbose) log.build('[verbose] manifest hit: ' + file)
log.shop('manifest hit ' + file)
if (_opts.verbose) log.build(`[verbose] manifest hit: ${pkg}/${file}${_tag}`)
log.shop(`manifest hit ${pkg}/${file}${_tag}`)
return mf_dylib
}
}
@@ -599,6 +603,8 @@ Build.build_module_dylib = function(pkg, file, target, opts) {
var post_probe = null
var fallback_probe = null
var _fail_msg2 = null
var link_err_path = null
var link_err_text = null
if (probe && probe.fail) {
_fail_msg2 = probe.fail_path ? text(fd.slurp(probe.fail_path)) : null
@@ -677,25 +683,31 @@ Build.build_module_dylib = function(pkg, file, target, opts) {
'-Wl,-rpath,' + local_dir
])
} else if (tc.system == 'windows') {
push(cmd_parts, '-Wl,--allow-shlib-undefined')
cmd_parts[] = '-Wl,--allow-shlib-undefined'
}
push(cmd_parts, '-L"' + local_dir + '"')
push(cmd_parts, '"' + text(obj) + '"')
cmd_parts[] = '-L"' + local_dir + '"'
cmd_parts[] = '"' + text(obj) + '"'
arrfor(_extra, function(extra_obj) {
if (extra_obj != null) push(cmd_parts, '"' + text(extra_obj) + '"')
if (extra_obj != null) cmd_parts[] = '"' + text(extra_obj) + '"'
})
cmd_parts = array(cmd_parts, resolved_ldflags)
cmd_parts = array(cmd_parts, target_ldflags)
push(cmd_parts, '-o')
push(cmd_parts, '"' + dylib_path + '"')
cmd_parts[] = '-o'
cmd_parts[] = '"' + dylib_path + '"'
cmd_str = text(cmd_parts, ' ')
if (_opts.verbose) log.build('[verbose] link: ' + cmd_str)
log.shop('linking ' + file)
log.console('Linking module ' + file + ' -> ' + fd.basename(dylib_path))
ret = os.system(cmd_str)
log.build('Linking module ' + file + ' -> ' + fd.basename(dylib_path))
link_err_path = '/tmp/cell_link_err_' + content_hash(file) + '.log'
ret = os.system(cmd_str + ' 2>"' + link_err_path + '"')
if (ret != 0) {
if (fd.is_file(link_err_path))
link_err_text = text(fd.slurp(link_err_path))
if (link_err_text)
log.error('Linking failed: ' + file + '\n' + link_err_text)
else
log.error('Linking failed: ' + file)
return null
}
@@ -710,6 +722,28 @@ Build.build_module_dylib = function(pkg, file, target, opts) {
return dylib_path
}
// Compile a single C module file for a package (support objects + one dylib).
// Used by parallel boot workers. No manifest writing — the runtime handles that.
Build.compile_c_module = function(pkg, file, target, opts) {
var _target = target || Build.detect_host_target()
var _opts = opts || {}
var _buildtype = _opts.buildtype || 'release'
var pkg_dir = shop.get_package_dir(pkg)
var cached_cflags = replace_sigils_array(pkg_tools.get_flags(pkg, 'CFLAGS', _target), pkg_dir)
// Compile support sources to cached objects (content-addressed, safe for concurrent workers)
var sources = pkg_tools.get_sources(pkg)
var support_objects = []
if (pkg != 'core') {
arrfor(sources, function(src_file) {
var obj = Build.compile_file(pkg, src_file, _target, {buildtype: _buildtype, cflags: cached_cflags})
if (obj != null) support_objects[] = obj
})
}
return Build.build_module_dylib(pkg, file, _target, {buildtype: _buildtype, extra_objects: support_objects, cflags: cached_cflags})
}
// Build a dynamic library for a package (one dylib per C file)
// Returns array of {file, symbol, dylib} for each module
// Also writes a manifest mapping symbols to dylib paths
@@ -719,6 +753,9 @@ Build.build_dynamic = function(pkg, target, buildtype, opts) {
var _opts = opts || {}
var c_files = pkg_tools.get_c_files(pkg, _target, true)
var results = []
var total = length(c_files)
var done = 0
var failed = 0
// Pre-fetch cflags once to avoid repeated TOML reads
var pkg_dir = shop.get_package_dir(pkg)
@@ -730,7 +767,7 @@ Build.build_dynamic = function(pkg, target, buildtype, opts) {
if (pkg != 'core') {
arrfor(sources, function(src_file) {
var obj = Build.compile_file(pkg, src_file, _target, {buildtype: _buildtype, cflags: cached_cflags, verbose: _opts.verbose, force: _opts.force})
if (obj != null) push(support_objects, obj)
if (obj != null) support_objects[] = obj
})
}
@@ -738,10 +775,16 @@ Build.build_dynamic = function(pkg, target, buildtype, opts) {
var sym_name = shop.c_symbol_for_file(pkg, file)
var dylib = Build.build_module_dylib(pkg, file, _target, {buildtype: _buildtype, extra_objects: support_objects, cflags: cached_cflags, verbose: _opts.verbose, force: _opts.force})
if (dylib) {
push(results, {file: file, symbol: sym_name, dylib: dylib})
results[] = {file: file, symbol: sym_name, dylib: dylib}
} else {
failed = failed + 1
}
done = done + 1
})
if (total > 0)
log.build(` Building C modules (${text(done)} ok${failed > 0 ? `, ${text(failed)} failed` : ''})`)
// Write manifest so runtime can find dylibs without the build module
var mpath = manifest_path(pkg)
fd.slurpwrite(mpath, stone(blob(json.encode(results))))
@@ -771,7 +814,7 @@ Build.build_static = function(packages, target, output, buildtype) {
var objects = Build.build_package(pkg, _target, !is_core, _buildtype)
arrfor(objects, function(obj) {
push(all_objects, obj)
all_objects[] = obj
})
// Collect LDFLAGS (with sigil replacement)
@@ -791,7 +834,7 @@ Build.build_static = function(packages, target, output, buildtype) {
f = '-L"' + pkg_dir + '/' + lpath + '"'
}
}
push(all_ldflags, f)
all_ldflags[] = f
})
}
})
@@ -813,18 +856,18 @@ Build.build_static = function(packages, target, output, buildtype) {
var cmd_parts = [cc]
arrfor(all_objects, function(obj) {
push(cmd_parts, '"' + obj + '"')
cmd_parts[] = '"' + obj + '"'
})
arrfor(all_ldflags, function(flag) {
push(cmd_parts, flag)
cmd_parts[] = flag
})
arrfor(target_ldflags, function(flag) {
push(cmd_parts, flag)
cmd_parts[] = flag
})
push(cmd_parts, '-o', '"' + out_path + '"')
cmd_parts[] = '-o', '"' + out_path + '"'
var cmd_str = text(cmd_parts, ' ')
@@ -878,7 +921,7 @@ function qbe_insert_dead_labels(il_text) {
line = lines[i]
trimmed = trim(line)
if (need_label && !starts_with(trimmed, '@') && !starts_with(trimmed, '}') && length(trimmed) > 0) {
push(result, "@_dead_" + text(dead_id))
result[] = "@_dead_" + text(dead_id)
dead_id = dead_id + 1
need_label = false
}
@@ -888,7 +931,7 @@ function qbe_insert_dead_labels(il_text) {
if (starts_with(trimmed, 'ret ') || starts_with(trimmed, 'jmp ')) {
need_label = true
}
push(result, line)
result[] = line
i = i + 1
}
return text(result, "\n")
@@ -916,9 +959,17 @@ Build.compile_native = function(src_path, target, buildtype, pkg) {
var qbe_emit = use('qbe_emit')
// Step 2: Generate QBE IL
// Derive package from the file itself (not the caller's context) to ensure correct symbol names
var sym_name = null
if (pkg) {
sym_name = shop.c_symbol_for_file(pkg, fd.basename(src_path))
var _file_info = shop.file_info(src_path)
var _actual_pkg = _file_info.package || pkg
var _sym_stem = null
if (_actual_pkg) {
if (_file_info.name)
_sym_stem = _file_info.name + (_file_info.is_actor ? '.ce' : '.cm')
else
_sym_stem = fd.basename(src_path)
sym_name = shop.c_symbol_for_file(_actual_pkg, _sym_stem)
}
var il_parts = qbe_emit(optimized, qbe_macros, sym_name)
@@ -967,7 +1018,7 @@ Build.compile_native = function(src_path, target, buildtype, pkg) {
log.error('Linking native dylib failed for: ' + src_path); disrupt
}
log.console('Built native: ' + fd.basename(dylib_path))
log.shop('compiled native: ' + src_path)
return dylib_path
}
@@ -989,9 +1040,17 @@ Build.compile_native_ir = function(optimized, src_path, opts) {
var qbe_macros = use('qbe')
var qbe_emit = use('qbe_emit')
// Derive package from the file itself (not the caller's context)
var sym_name = null
if (pkg) {
sym_name = shop.c_symbol_for_file(pkg, fd.basename(src_path))
var _file_info2 = shop.file_info(src_path)
var _actual_pkg2 = _file_info2.package || pkg
var _sym_stem2 = null
if (_actual_pkg2) {
if (_file_info2.name)
_sym_stem2 = _file_info2.name + (_file_info2.is_actor ? '.ce' : '.cm')
else
_sym_stem2 = fd.basename(src_path)
sym_name = shop.c_symbol_for_file(_actual_pkg2, _sym_stem2)
}
var il_parts = qbe_emit(optimized, qbe_macros, sym_name)
@@ -1039,7 +1098,7 @@ Build.compile_native_ir = function(optimized, src_path, opts) {
log.error('Linking native dylib failed for: ' + src_path); disrupt
}
log.console('Built native: ' + fd.basename(dylib_path))
log.shop('compiled native: ' + src_path)
return dylib_path
}
@@ -1063,16 +1122,16 @@ Build.compile_cm_to_mach = function(src_path) {
// output: path to write the generated .c file
Build.generate_module_table = function(modules, output) {
var lines = []
push(lines, '/* Generated module table — do not edit */')
push(lines, '#include <stddef.h>')
push(lines, '#include <string.h>')
push(lines, '')
push(lines, 'struct cell_embedded_entry {')
push(lines, ' const char *name;')
push(lines, ' const unsigned char *data;')
push(lines, ' size_t size;')
push(lines, '};')
push(lines, '')
lines[] = '/* Generated module table — do not edit */'
lines[] = '#include <stddef.h>'
lines[] = '#include <string.h>'
lines[] = ''
lines[] = 'struct cell_embedded_entry {'
lines[] = ' const char *name;'
lines[] = ' const unsigned char *data;'
lines[] = ' size_t size;'
lines[] = '};'
lines[] = ''
var entries = []
arrfor(modules, function(mod) {
@@ -1081,27 +1140,27 @@ Build.generate_module_table = function(modules, output) {
var bytes = array(mach)
var hex = []
arrfor(bytes, function(b) {
push(hex, '0x' + text(b, 'h2'))
hex[] = '0x' + text(b, 'h2')
})
push(lines, 'static const unsigned char mod_' + safe + '_data[] = {')
push(lines, ' ' + text(hex, ', '))
push(lines, '};')
push(lines, '')
push(entries, safe)
lines[] = 'static const unsigned char mod_' + safe + '_data[] = {'
lines[] = ' ' + text(hex, ', ')
lines[] = '};'
lines[] = ''
entries[] = safe
log.console('Embedded: ' + mod.name + ' (' + text(length(bytes)) + ' bytes)')
})
// Lookup function
push(lines, 'const struct cell_embedded_entry *cell_embedded_module_lookup(const char *name) {')
lines[] = 'const struct cell_embedded_entry *cell_embedded_module_lookup(const char *name) {'
arrfor(modules, function(mod, i) {
var safe = entries[i]
push(lines, ' if (strcmp(name, "' + mod.name + '") == 0) {')
push(lines, ' static const struct cell_embedded_entry e = {"' + mod.name + '", mod_' + safe + '_data, sizeof(mod_' + safe + '_data)};')
push(lines, ' return &e;')
push(lines, ' }')
lines[] = ' if (strcmp(name, "' + mod.name + '") == 0) {'
lines[] = ' static const struct cell_embedded_entry e = {"' + mod.name + '", mod_' + safe + '_data, sizeof(mod_' + safe + '_data)};'
lines[] = ' return &e;'
lines[] = ' }'
})
push(lines, ' return (void *)0;')
push(lines, '}')
lines[] = ' return (void *)0;'
lines[] = '}'
var c_text = text(lines, '\n')
fd.slurpwrite(output, stone(blob(c_text)))
@@ -1129,14 +1188,14 @@ Build.build_all_dynamic = function(target, buildtype, opts) {
// Build core first
if (find(packages, function(p) { return p == 'core' }) != null) {
core_mods = Build.build_dynamic('core', _target, _buildtype, _opts)
push(results, {package: 'core', modules: core_mods})
results[] = {package: 'core', modules: core_mods}
}
// Build other packages
arrfor(packages, function(pkg) {
if (pkg == 'core') return
var pkg_mods = Build.build_dynamic(pkg, _target, _buildtype, _opts)
push(results, {package: pkg, modules: pkg_mods})
results[] = {package: pkg, modules: pkg_mods}
})
// Print build report

154
cellfs.cm
View File

@@ -4,10 +4,11 @@ var fd = use('fd')
var miniz = use('miniz')
var qop = use('internal/qop')
var wildstar = use('internal/wildstar')
var blib = use('blob')
var mounts = []
var writepath = "."
var write_mount = null
function normalize_path(path) {
if (!path) return ""
@@ -30,7 +31,7 @@ function mount_exists(mount, path) {
result = mount.handle.stat(path) != null
} disruption {}
_check()
} else {
} else if (mount.type == 'fs') {
full_path = fd.join_paths(mount.source, path)
_check = function() {
st = fd.stat(full_path)
@@ -119,12 +120,12 @@ function resolve(path, must_exist) {
}
function mount(source, name) {
var st = fd.stat(source)
var st = null
var blob = null
var qop_archive = null
var zip = null
var _try_qop = null
var http = null
var mount_info = {
source: source,
@@ -134,6 +135,29 @@ function mount(source, name) {
zip_blob: null
}
if (starts_with(source, 'http://') || starts_with(source, 'https://')) {
http = use('http')
mount_info.type = 'http'
mount_info.handle = {
base_url: source,
get: function(path, callback) {
var url = source + '/' + path
$clock(function(_t) {
var resp = http.request('GET', url, null, null)
if (resp && resp.status == 200) {
callback(resp.body)
} else {
callback(null, "HTTP " + text(resp ? resp.status : 0) + ": " + url)
}
})
}
}
mounts[] = mount_info
return
}
st = fd.stat(source)
if (st.isDirectory) {
mount_info.type = 'fs'
} else if (st.isFile) {
@@ -163,7 +187,7 @@ function mount(source, name) {
log.error("Unsupported mount source type: " + source); disrupt
}
push(mounts, mount_info)
mounts[] = mount_info
}
function unmount(name_or_source) {
@@ -191,11 +215,13 @@ function slurp(path) {
}
function slurpwrite(path, data) {
var full_path = writepath + "/" + path
var f = fd.open(full_path, 'w')
fd.write(f, data)
fd.close(f)
var full_path = null
if (write_mount) {
full_path = fd.join_paths(write_mount.source, path)
} else {
full_path = fd.join_paths(".", path)
}
fd.slurpwrite(full_path, data)
}
function exists(path) {
@@ -276,12 +302,25 @@ function rm(path) {
}
function mkdir(path) {
var full = fd.join_paths(writepath, path)
var full = null
if (write_mount) {
full = fd.join_paths(write_mount.source, path)
} else {
full = fd.join_paths(".", path)
}
fd.mkdir(full)
}
function set_writepath(path) {
writepath = path
function set_writepath(mount_name) {
var found = null
if (mount_name == null) { write_mount = null; return }
arrfor(mounts, function(m) {
if (m.name == mount_name) { found = m; return true }
}, false, true)
if (!found || found.type != 'fs') {
log.error("writepath: must be an fs mount"); disrupt
}
write_mount = found
}
function basedir() {
@@ -317,7 +356,7 @@ function enumerate(_path, recurse) {
arrfor(list, function(item) {
var item_rel = rel_prefix ? rel_prefix + "/" + item : item
var child_st = null
push(results, item_rel)
results[] = item_rel
if (recurse) {
child_st = fd.stat(fd.join_paths(curr_full, item))
@@ -357,7 +396,7 @@ function enumerate(_path, recurse) {
if (!seen[rel]) {
seen[rel] = true
push(results, rel)
results[] = rel
}
}
})
@@ -416,7 +455,7 @@ function globfs(globs, _dir) {
}
} else {
if (!check_neg(item_rel) && check_pos(item_rel)) {
push(results, item_rel)
results[] = item_rel
}
}
})
@@ -440,7 +479,7 @@ function globfs(globs, _dir) {
if (length(rel) == 0) return
if (!check_neg(rel) && check_pos(rel)) {
push(results, rel)
results[] = rel
}
}
})
@@ -449,6 +488,82 @@ function globfs(globs, _dir) {
return results
}
// Requestor factory: returns a requestor for reading a file at path
function get(path) {
return function get_requestor(callback, value) {
var res = resolve(path, false)
var full = null
var f = null
var acc = null
var cancelled = false
var data = null
var _close = null
if (!res) { callback(null, "not found: " + path); return }
if (res.mount.type == 'zip') {
callback(res.mount.handle.slurp(res.path))
return
}
if (res.mount.type == 'qop') {
data = res.mount.handle.read(res.path)
if (data) {
callback(data)
} else {
callback(null, "not found in qop: " + path)
}
return
}
if (res.mount.type == 'http') {
res.mount.handle.get(res.path, callback)
return
}
full = fd.join_paths(res.mount.source, res.path)
f = fd.open(full, 'r')
acc = blob()
function next(_t) {
var chunk = null
if (cancelled) return
chunk = fd.read(f, 65536)
if (length(chunk) == 0) {
fd.close(f)
stone(acc)
callback(acc)
return
}
acc.write_blob(chunk)
$clock(next)
}
next()
return function cancel() {
cancelled = true
_close = function() { fd.close(f) } disruption {}
_close()
}
}
}
// Requestor factory: returns a requestor for writing data to path
function put(path, data) {
return function put_requestor(callback, value) {
var _data = data != null ? data : value
var full = null
var _do = null
if (!write_mount) { callback(null, "no write mount set"); return }
full = fd.join_paths(write_mount.source, path)
_do = function() {
fd.slurpwrite(full, _data)
callback(true)
} disruption {
callback(null, "write failed: " + path)
}
_do()
}
}
cellfs.mount = mount
cellfs.mount_package = mount_package
cellfs.unmount = unmount
@@ -467,7 +582,8 @@ cellfs.writepath = set_writepath
cellfs.basedir = basedir
cellfs.prefdir = prefdir
cellfs.realdir = realdir
cellfs.mount('.')
cellfs.get = get
cellfs.put = put
cellfs.resolve = resolve
return cellfs

18
cfg.ce
View File

@@ -168,7 +168,7 @@ var run = function() {
if (is_array(instr)) {
if (block_start_pcs[text(pc)]) {
if (current_block != null) {
push(blocks, current_block)
blocks[] = current_block
}
current_block = {
id: length(blocks),
@@ -184,7 +184,7 @@ var run = function() {
}
if (current_block != null) {
push(current_block.instrs, {pc: pc, instr: instr})
current_block.instrs[] = {pc: pc, instr: instr}
current_block.end_pc = pc
n = length(instr)
line_num = instr[n - 2]
@@ -200,7 +200,7 @@ var run = function() {
ii = ii + 1
}
if (current_block != null) {
push(blocks, current_block)
blocks[] = current_block
}
// Build block index
@@ -235,19 +235,19 @@ var run = function() {
if (target_bi <= bi) {
edge_type = "loop back-edge"
}
push(blk.edges, {target: target_bi, kind: edge_type})
blk.edges[] = {target: target_bi, kind: edge_type}
}
if (is_conditional_jump(last_op)) {
if (bi + 1 < length(blocks)) {
push(blk.edges, {target: bi + 1, kind: "fallthrough"})
blk.edges[] = {target: bi + 1, kind: "fallthrough"}
}
}
} else if (is_terminator(last_op)) {
push(blk.edges, {target: -1, kind: "EXIT (" + last_op + ")"})
blk.edges[] = {target: -1, kind: "EXIT (" + last_op + ")"}
} else {
if (bi + 1 < length(blocks)) {
push(blk.edges, {target: bi + 1, kind: "fallthrough"})
blk.edges[] = {target: bi + 1, kind: "fallthrough"}
}
}
}
@@ -308,7 +308,7 @@ var run = function() {
parts = []
j = 1
while (j < n - 2) {
push(parts, fmt_val(instr[j]))
parts[] = fmt_val(instr[j])
j = j + 1
}
operands = text(parts, ", ")
@@ -381,7 +381,7 @@ var run = function() {
parts = []
j = 1
while (j < n - 2) {
push(parts, fmt_val(instr[j]))
parts[] = fmt_val(instr[j])
j = j + 1
}
operands = text(parts, ", ")

View File

@@ -93,13 +93,13 @@ if (is_shop_scope) {
packages_to_clean = shop.list_packages()
} else {
// Single package
push(packages_to_clean, scope)
packages_to_clean[] = scope
if (deep) {
_gather = function() {
deps = pkg.gather_dependencies(scope)
arrfor(deps, function(dep) {
push(packages_to_clean, dep)
packages_to_clean[] = dep
})
} disruption {
// Skip if can't read dependencies
@@ -116,11 +116,11 @@ var packages_dir = replace(shop.get_package_dir(''), /\/$/, '') // Get base pack
if (clean_build) {
// Nuke entire build cache (content-addressed, per-package clean impractical)
if (fd.is_dir(build_dir)) {
push(dirs_to_delete, build_dir)
dirs_to_delete[] = build_dir
}
// Clean orphaned lib/ directory if it exists (legacy)
if (fd.is_dir(lib_dir)) {
push(dirs_to_delete, lib_dir)
dirs_to_delete[] = lib_dir
}
}
@@ -128,7 +128,7 @@ if (clean_fetch) {
if (is_shop_scope) {
// Clean entire packages directory (dangerous!)
if (fd.is_dir(packages_dir)) {
push(dirs_to_delete, packages_dir)
dirs_to_delete[] = packages_dir
}
} else {
// Clean specific package directories
@@ -137,7 +137,7 @@ if (clean_fetch) {
var pkg_dir = shop.get_package_dir(p)
if (fd.is_dir(pkg_dir) || fd.is_link(pkg_dir)) {
push(dirs_to_delete, pkg_dir)
dirs_to_delete[] = pkg_dir
}
})
}

40
compile_worker.ce Normal file
View File

@@ -0,0 +1,40 @@
// compile_worker - Worker actor that compiles a single module and replies
//
// Receives a message with:
// {type: 'script', path, package} — bytecode compile
// {type: 'native_script', path, package} — native compile
// {type: 'c_package', package} — C package build
// {type: 'c_file', package, file} — single C module build
//
// Replies with {ok: true/false, path} and stops.
var shop = use('internal/shop')
var build = use('build')
$receiver(function(msg) {
var name = msg.path || (msg.file ? msg.package + '/' + msg.file : msg.package)
var _work = function() {
if (msg.type == 'script') {
log.console('compile_worker: compiling ' + name)
shop.precompile(msg.path, msg.package)
} else if (msg.type == 'native_script') {
log.console('compile_worker: native compiling ' + name)
build.compile_native(msg.path, null, null, msg.package)
} else if (msg.type == 'c_package') {
log.console('compile_worker: building package ' + name)
build.build_dynamic(msg.package, null, null, null)
} else if (msg.type == 'c_file') {
log.console('compile_worker: building ' + name)
build.compile_c_module(msg.package, msg.file)
}
log.console('compile_worker: done ' + name)
send(msg, {ok: true, path: name})
} disruption {
log.error('compile_worker: failed ' + name)
send(msg, {ok: false, error: 'compile failed'})
}
_work()
$stop()
})
var _t = $delay($stop, 120)

View File

@@ -1,4 +1,5 @@
#include "cell.h"
#include "pit_internal.h"
JSC_CCALL(os_mem_limit, JS_SetMemoryLimit(JS_GetRuntime(js), js2number(js,argv[0])))
JSC_CCALL(os_max_stacksize, JS_SetMaxStackSize(js, js2number(js,argv[0])))

24
diff.ce
View File

@@ -55,7 +55,7 @@ function collect_tests(specific_test) {
match_base = ends_with(match_name, '.cm') ? text(match_name, 0, -3) : match_name
if (test_name != match_base) continue
}
push(test_files, f)
test_files[] = f
}
}
return test_files
@@ -100,7 +100,7 @@ function diff_test_file(file_path) {
src = text(fd.slurp(src_path))
ast = analyze(src, src_path)
} disruption {
push(results.errors, `failed to parse ${file_path}`)
results.errors[] = `failed to parse ${file_path}`
return results
}
_read()
@@ -124,14 +124,14 @@ function diff_test_file(file_path) {
// Compare module-level behavior
if (opt_error != noopt_error) {
push(results.errors, `module load mismatch: opt=${opt_error != null ? opt_error : "ok"} noopt=${noopt_error != null ? noopt_error : "ok"}`)
results.errors[] = `module load mismatch: opt=${opt_error != null ? opt_error : "ok"} noopt=${noopt_error != null ? noopt_error : "ok"}`
results.failed = results.failed + 1
return results
}
if (opt_error != null) {
// Both disrupted during load — that's consistent
results.passed = results.passed + 1
push(results.tests, {name: "<module>", status: "passed"})
results.tests[] = {name: "<module>", status: "passed"}
return results
}
@@ -161,15 +161,15 @@ function diff_test_file(file_path) {
_run_one_noopt()
if (opt_err != noopt_err) {
push(results.tests, {name: k, status: "failed"})
push(results.errors, `${k}: disruption mismatch opt=${opt_err != null ? opt_err : "ok"} noopt=${noopt_err != null ? noopt_err : "ok"}`)
results.tests[] = {name: k, status: "failed"}
results.errors[] = `${k}: disruption mismatch opt=${opt_err != null ? opt_err : "ok"} noopt=${noopt_err != null ? noopt_err : "ok"}`
results.failed = results.failed + 1
} else if (!values_equal(opt_result, noopt_result)) {
push(results.tests, {name: k, status: "failed"})
push(results.errors, `${k}: result mismatch opt=${describe(opt_result)} noopt=${describe(noopt_result)}`)
results.tests[] = {name: k, status: "failed"}
results.errors[] = `${k}: result mismatch opt=${describe(opt_result)} noopt=${describe(noopt_result)}`
results.failed = results.failed + 1
} else {
push(results.tests, {name: k, status: "passed"})
results.tests[] = {name: k, status: "passed"}
results.passed = results.passed + 1
}
}
@@ -178,11 +178,11 @@ function diff_test_file(file_path) {
} else {
// Compare direct return values
if (!values_equal(mod_opt, mod_noopt)) {
push(results.tests, {name: "<return>", status: "failed"})
push(results.errors, `return value mismatch: opt=${describe(mod_opt)} noopt=${describe(mod_noopt)}`)
results.tests[] = {name: "<return>", status: "failed"}
results.errors[] = `return value mismatch: opt=${describe(mod_opt)} noopt=${describe(mod_noopt)}`
results.failed = results.failed + 1
} else {
push(results.tests, {name: "<return>", status: "passed"})
results.tests[] = {name: "<return>", status: "passed"}
results.passed = results.passed + 1
}
}

View File

@@ -93,7 +93,7 @@ var run = function() {
var operands = null
var line_str = null
while (j < n - 2) {
push(parts, fmt_val(instr[j]))
parts[] = fmt_val(instr[j])
j = j + 1
}
operands = text(parts, ", ")

View File

@@ -206,7 +206,7 @@ var run = function() {
parts = []
j = 1
while (j < n - 2) {
push(parts, fmt_val(instr[j]))
parts[] = fmt_val(instr[j])
j = j + 1
}
operands = text(parts, ", ")

View File

@@ -453,6 +453,53 @@ JSC_CCALL(mymod_make,
)
```
### C Argument Evaluation Order (critical)
In C, the **order of evaluation of function arguments is unspecified**. This interacts with the copying GC to create intermittent crashes that are extremely difficult to diagnose.
```c
// UNSAFE — crashes intermittently:
JS_FRAME(js);
JS_ROOT(obj, JS_NewObject(js));
JS_SetPropertyStr(js, obj.val, "format", JS_NewString(js, "rgba32"));
// ^^^^^^^ may be evaluated BEFORE JS_NewString runs
// If JS_NewString triggers GC, the already-read obj.val is a dangling pointer.
```
The compiler is free to evaluate `obj.val` into a register, *then* call `JS_NewString`. If `JS_NewString` triggers GC, the object moves to a new address. The rooted `obj` is updated by GC, but the **register copy** is not — it still holds the old address. `JS_SetPropertyStr` then writes to freed memory.
**Fix:** always separate the allocating call into a local variable:
```c
// SAFE:
JS_FRAME(js);
JS_ROOT(obj, JS_NewObject(js));
JSValue fmt = JS_NewString(js, "rgba32");
JS_SetPropertyStr(js, obj.val, "format", fmt);
// obj.val is read AFTER JS_NewString completes — guaranteed correct.
```
This applies to **any** allocating function used as an argument when another argument references a rooted `.val`:
```c
// ALL of these are UNSAFE:
JS_SetPropertyStr(js, obj.val, "pixels", js_new_blob_stoned_copy(js, data, len));
JS_SetPropertyStr(js, obj.val, "x", JS_NewFloat64(js, 3.14));
JS_SetPropertyStr(js, obj.val, "name", JS_NewString(js, name));
// SAFE versions — separate the allocation:
JSValue pixels = js_new_blob_stoned_copy(js, data, len);
JS_SetPropertyStr(js, obj.val, "pixels", pixels);
JSValue x = JS_NewFloat64(js, 3.14);
JS_SetPropertyStr(js, obj.val, "x", x);
JSValue s = JS_NewString(js, name);
JS_SetPropertyStr(js, obj.val, "name", s);
```
**Functions that allocate** (must be separated): `JS_NewString`, `JS_NewFloat64`, `JS_NewInt64`, `JS_NewObject`, `JS_NewArray`, `JS_NewCFunction`, `js_new_blob_stoned_copy`
**Functions that do NOT allocate** (safe inline): `JS_NewInt32`, `JS_NewUint32`, `JS_NewBool`, `JS_NULL`, `JS_TRUE`, `JS_FALSE`
### Macros
| Macro | Purpose |

233
docs/library/probe.md Normal file
View File

@@ -0,0 +1,233 @@
---
title: "probe"
description: "Runtime observability for actors"
weight: 90
type: "docs"
---
Runtime observability for actors. Register named probe functions on any actor and query them over HTTP while the program runs.
```javascript
var probe = use('probe')
```
The probe server starts automatically on the first `register()` call, listening on `127.0.0.1:9000`.
## Registering Probes
### probe.register(target, probes)
Register a group of probe functions under a target name. Each probe is a function that receives an `args` record and returns a value.
```javascript
var probe = use('probe')
var world = {
entities: [
{id: 1, name: "player", x: 10, y: 20, hp: 100},
{id: 2, name: "goblin", x: 55, y: 30, hp: 40}
],
tick: 0
}
probe.register("game", {
state: function(args) {
return world
},
entities: function(args) {
return world.entities
},
entity: function(args) {
return find(world.entities, function(e) {
return e.id == args.id
})
}
})
probe.register("render", {
info: function(args) {
return {fps: 60, draw_calls: 128, batches: 12}
}
})
```
A target is just a namespace — group related probes under the same target name. Register as many targets as you like; the server starts once and serves them all.
## HTTP Endpoints
All responses are JSON with an `ok` field.
### GET /discover
Lists all registered targets and their probe names. Designed for tooling — an LLM or dashboard can call this first to learn what's available, then query specific probes.
```
$ curl http://127.0.0.1:9000/discover
```
```json
{
"ok": true,
"targets": {
"game": ["state", "entities", "entity"],
"render": ["info"]
}
}
```
### POST /probe
Call a single probe function by target and name. Optionally pass arguments.
```
$ curl -X POST -H "Content-Type: application/json" \
-d '{"target":"game","name":"state"}' \
http://127.0.0.1:9000/probe
```
```json
{
"ok": true,
"result": {
"entities": [
{"id": 1, "name": "player", "x": 10, "y": 20, "hp": 100},
{"id": 2, "name": "goblin", "x": 55, "y": 30, "hp": 40}
],
"tick": 4821
}
}
```
With arguments:
```
$ curl -X POST -H "Content-Type: application/json" \
-d '{"target":"game","name":"entity","args":{"id":1}}' \
http://127.0.0.1:9000/probe
```
```json
{
"ok": true,
"result": {"id": 1, "name": "player", "x": 10, "y": 20, "hp": 100}
}
```
### POST /snapshot
Call multiple probes in one request. Returns all results keyed by `target/name`.
```
$ curl -X POST -H "Content-Type: application/json" \
-d '{"probes":[{"target":"game","name":"state"},{"target":"render","name":"info"}]}' \
http://127.0.0.1:9000/snapshot
```
```json
{
"ok": true,
"results": {
"game/state": {
"entities": [
{"id": 1, "name": "player", "x": 10, "y": 20, "hp": 100},
{"id": 2, "name": "goblin", "x": 55, "y": 30, "hp": 40}
],
"tick": 4821
},
"render/info": {
"fps": 60,
"draw_calls": 128,
"batches": 12
}
}
}
```
### Errors
Unknown paths return 404:
```json
{"ok": false, "error": "not found"}
```
Unknown targets or probe names:
```json
{"ok": false, "error": "unknown probe: game/nonexistent"}
```
If a probe function disrupts:
```json
{"ok": false, "error": "probe failed"}
```
## Example
A game actor with a simulation loop and probe observability:
```javascript
// game.ce
var probe = use('probe')
var state = {
entities: [
{id: 1, name: "player", x: 0, y: 0, hp: 100},
{id: 2, name: "enemy", x: 50, y: 50, hp: 60}
],
frame: 0,
paused: false
}
probe.register("game", {
state: function(args) {
return state
},
entities: function(args) {
return state.entities
},
entity: function(args) {
return find(state.entities, function(e) {
return e.id == args.id
})
}
})
// game loop
def tick = function(_) {
if (!state.paused) {
state.frame = state.frame + 1
// ... update entities, physics, AI ...
}
$delay(tick, 0.016)
}
$delay(tick, 0.016)
```
While the game runs, query it from a terminal:
```
$ curl -s http://127.0.0.1:9000/discover | jq .targets
{
"game": ["state", "entities", "entity"]
}
$ curl -s -X POST -d '{"target":"game","name":"state"}' \
-H "Content-Type: application/json" \
http://127.0.0.1:9000/probe | jq .result.frame
7834
$ curl -s -X POST -d '{"target":"game","name":"entity","args":{"id":1}}' \
-H "Content-Type: application/json" \
http://127.0.0.1:9000/probe | jq .result
{
"id": 1,
"name": "player",
"x": 142,
"y": 87,
"hp": 100
}
```
Probes run inside the actor's normal turn, so the values are always consistent — never a half-updated frame.

View File

@@ -9,13 +9,16 @@ Logging in ƿit is channel-based. Any `log.X(value)` call writes to channel `"X"
## Channels
Three channels are conventional:
These channels are conventional:
| Channel | Usage |
|---------|-------|
| `log.console(msg)` | General output |
| `log.error(msg)` | Errors and warnings |
| `log.error(msg)` | Errors |
| `log.warn(msg)` | Compiler diagnostics and warnings |
| `log.system(msg)` | Internal system messages |
| `log.build(msg)` | Per-file compile/link output |
| `log.shop(msg)` | Package management internals |
Any name works. `log.debug(msg)` creates channel `"debug"`, `log.perf(msg)` creates `"perf"`, and so on.
@@ -29,16 +32,18 @@ Non-text values are JSON-encoded automatically.
## Default Behavior
With no configuration, a default sink routes `console`, `error`, and `system` to the terminal in pretty format. The `error` channel includes a stack trace by default:
With no configuration, a default sink routes `console` and `error` to the terminal in clean format. The `error` channel includes a stack trace by default:
```
[a3f12] [console] server started on port 8080
[a3f12] [error] connection refused
server started on port 8080
error: connection refused
at handle_request (server.ce:42:3)
at main (main.ce:5:1)
```
The format is `[actor_id] [channel] message`. Error stack traces are always on unless you explicitly configure a sink without them.
Clean format prints messages without actor ID or channel prefix. Error messages are prefixed with `error:`. Other formats (`pretty`, `bare`) include actor IDs and are available for debugging. Stack traces are always on for errors unless you explicitly configure a sink without them.
Channels like `warn`, `build`, and `shop` are not routed to the terminal by default. Enable them with `pit log enable <channel>`.
## Configuration
@@ -67,7 +72,7 @@ exclude = ["console"]
| Field | Values | Description |
|-------|--------|-------------|
| `type` | `"console"`, `"file"` | Where output goes |
| `format` | `"pretty"`, `"bare"`, `"json"` | How output is formatted |
| `format` | `"clean"`, `"pretty"`, `"bare"`, `"json"` | How output is formatted |
| `channels` | array of names, or `["*"]` | Which channels this sink receives. Quote `'*'` on the CLI to prevent shell glob expansion. |
| `exclude` | array of names | Channels to skip (useful with `"*"`) |
| `stack` | array of channel names | Channels that capture a stack trace |
@@ -75,6 +80,13 @@ exclude = ["console"]
### Formats
**clean** — CLI-friendly. No actor ID or channel prefix. Error channel messages are prefixed with `error:`. This is the default format.
```
server started on port 8080
error: connection refused
```
**pretty** — human-readable, one line per message. Includes actor ID, channel, source location, and message.
```
@@ -158,7 +170,10 @@ The `pit log` command manages sinks and reads log files. See [CLI — pit log](/
```bash
pit log list # show sinks
pit log add terminal console --format=bare --channels=console
pit log channels # list channels with enabled/disabled status
pit log enable warn # enable a channel on the terminal sink
pit log disable warn # disable a channel
pit log add terminal console --format=clean --channels=console
pit log add dump file .cell/logs/dump.jsonl '--channels=*' --exclude=console
pit log add debug console --channels=error,debug --stack=error,debug
pit log remove terminal
@@ -166,6 +181,16 @@ pit log read dump --lines=20 --channel=error
pit log tail dump
```
### Channel toggling
The `enable` and `disable` commands modify the terminal sink's channel list without touching other sink configuration. This is the easiest way to opt in to extra output:
```bash
pit log enable warn # see compiler warnings
pit log enable build # see per-file compile/link output
pit log disable warn # hide warnings again
```
## Examples
### Development setup

View File

@@ -241,7 +241,7 @@ source/
**`cell_runtime.c`** is the single file that defines the native code contract. It should:
1. Include `quickjs-internal.h` for access to value representation and heap types
1. Include `pit_internal.h` for access to value representation and heap types
2. Export all `cell_rt_*` functions with C linkage (no `static`)
3. Keep each function thin — delegate to existing `JS_*` functions where possible
4. Handle GC safety: after any allocation (frame, string, array), callers' frames may have moved

2
fd.cm
View File

@@ -83,7 +83,7 @@ fd.globfs = function(globs, dir) {
}
} else {
if (!check_neg(item_rel) && check_pos(item_rel)) {
push(results, item_rel)
results[] = item_rel
}
}
});

29
fold.cm
View File

@@ -362,6 +362,7 @@ var fold = function(ast) {
var fold_expr = null
var fold_stmt = null
var fold_stmts = null
var fold_fn = null
fold_expr = function(expr, fn_nr) {
if (expr == null) return null
@@ -592,8 +593,6 @@ var fold = function(ast) {
return expr
}
var fold_fn = null
fold_stmt = function(stmt, fn_nr) {
if (stmt == null) return null
var k = stmt.kind
@@ -710,26 +709,26 @@ var fold = function(ast) {
if (sv != null && sv.nr_uses == 0) {
if (is_pure(stmt.right)) stmt.dead = true
if (stmt.right != null && stmt.right.kind == "(" && stmt.right.expression != null && stmt.right.expression.name == "use") {
push(ast._diagnostics, {
ast._diagnostics[] = {
severity: "warning",
line: stmt.left.from_row + 1,
col: stmt.left.from_column + 1,
message: `unused import '${name}'`
})
}
} else if (stmt.kind == "def") {
push(ast._diagnostics, {
ast._diagnostics[] = {
severity: "warning",
line: stmt.left.from_row + 1,
col: stmt.left.from_column + 1,
message: `unused constant '${name}'`
})
}
} else {
push(ast._diagnostics, {
ast._diagnostics[] = {
severity: "warning",
line: stmt.left.from_row + 1,
col: stmt.left.from_column + 1,
message: `unused variable '${name}'`
})
}
}
}
}
@@ -743,15 +742,15 @@ var fold = function(ast) {
sv = scope_var(fn_nr, stmt.name)
if (sv != null && sv.nr_uses == 0) {
stmt.dead = true
push(ast._diagnostics, {
ast._diagnostics[] = {
severity: "warning",
line: stmt.from_row + 1,
col: stmt.from_column + 1,
message: `unused function '${stmt.name}'`
})
}
}
if (stmt.dead != true) push(out, stmt)
}
if (stmt.dead != true) out[] = stmt
i = i + 1
}
return out
@@ -1040,7 +1039,7 @@ var fold = function(ast) {
i = 0
while (i < length(ast.intrinsics)) {
if (used_intrinsics[ast.intrinsics[i]] == true) {
push(new_intrinsics, ast.intrinsics[i])
new_intrinsics[] = ast.intrinsics[i]
}
i = i + 1
}
@@ -1072,16 +1071,16 @@ var fold = function(ast) {
fn_sv = scope_var(0, fn.name)
if (fn_sv != null && fn_sv.nr_uses == 0) {
fn.dead = true
push(ast._diagnostics, {
ast._diagnostics[] = {
severity: "warning",
line: fn.from_row + 1,
col: fn.from_column + 1,
message: `unused function '${fn.name}'`
})
}
}
}
if (fn.dead != true) {
push(live_fns, fn)
live_fns[] = fn
}
fi = fi + 1
}

12
fuzz.ce
View File

@@ -89,7 +89,7 @@ function run_fuzz(seed_val) {
var _parse = function() {
ast = analyze(src, name + ".cm")
} disruption {
push(errors, "parse error")
errors[] = "parse error"
}
_parse()
if (length(errors) > 0) return {seed: seed_val, errors: errors, src: src}
@@ -112,7 +112,7 @@ function run_fuzz(seed_val) {
// Check module-level behavior
if (opt_err != noopt_err) {
push(errors, `module load: opt=${opt_err != null ? opt_err : "ok"} noopt=${noopt_err != null ? noopt_err : "ok"}`)
errors[] = `module load: opt=${opt_err != null ? opt_err : "ok"} noopt=${noopt_err != null ? noopt_err : "ok"}`
return {seed: seed_val, errors: errors, src: src}
}
if (opt_err != null) {
@@ -137,10 +137,10 @@ function run_fuzz(seed_val) {
_run()
if (is_text(ret)) {
push(errors, `self-check ${key}: ${ret}`)
errors[] = `self-check ${key}: ${ret}`
}
if (run_err != null) {
push(errors, `self-check ${key}: unexpected disruption`)
errors[] = `self-check ${key}: unexpected disruption`
}
}
k = k + 1
@@ -174,9 +174,9 @@ function run_fuzz(seed_val) {
_run_noopt()
if (opt_fn_err != noopt_fn_err) {
push(errors, `diff ${key2}: opt=${opt_fn_err != null ? opt_fn_err : "ok"} noopt=${noopt_fn_err != null ? noopt_fn_err : "ok"}`)
errors[] = `diff ${key2}: opt=${opt_fn_err != null ? opt_fn_err : "ok"} noopt=${noopt_fn_err != null ? noopt_fn_err : "ok"}`
} else if (!values_equal(opt_result, noopt_result)) {
push(errors, `diff ${key2}: opt=${describe(opt_result)} noopt=${describe(noopt_result)}`)
errors[] = `diff ${key2}: opt=${describe(opt_result)} noopt=${describe(noopt_result)}`
}
}
k2 = k2 + 1

View File

@@ -241,7 +241,7 @@ function gen_array_test() {
var v = 0
while (i < n) {
v = rand_int(-100, 100)
push(vals, v)
vals[] = v
sum = sum + v
i = i + 1
}
@@ -249,7 +249,7 @@ function gen_array_test() {
var val_strs = []
i = 0
while (i < n) {
push(val_strs, text(vals[i]))
val_strs[] = text(vals[i])
i = i + 1
}

View File

@@ -1,403 +0,0 @@
# Plan: Complete Copying GC Implementation
## Overview
Remove reference counting (DupValue/FreeValue) entirely and complete the Cheney copying garbage collector. Each JSContext will use bump allocation from a heap block, and when out of memory, request a new heap from JSRuntime's buddy allocator and copy live objects to the new heap.
## Target Architecture (from docs/memory.md)
### Object Types (simplified from current):
**Type 0 - Array**: `{ header, length, elements[] }`
**Type 1 - Blob**: `{ header, length, bits[] }`
**Type 2 - Text**: `{ header, length_or_hash, packed_chars[] }`
**Type 3 - Record**: `{ header, prototype, length, key_value_pairs[] }`
**Type 4 - Function**: `{ header, code_ptr, outer_frame_ptr }` - 3 words only, always stone
**Type 5 - Frame**: `{ header, function_ptr, caller_ptr, ret_addr, args[], closure_vars[], local_vars[], temps[] }`
**Type 6 - Code**: Lives in immutable memory only, never copied
**Type 7 - Forward**: Object has moved; cap56 contains new address
### Key Design Points:
- **JSFunction** is just a pointer to code and a pointer to the frame that created it (3 words)
- **Closure variables live in frames** - when a function returns, its frame is "reduced" to just the closure variables
- **Code objects are immutable** - stored in stone memory, never copied during GC
- **Frame reduction**: When a function returns, `caller` is set to zero, signaling the frame can be shrunk
## Current State (needs refactoring)
1. **Partial Cheney GC exists** at `source/quickjs.c:1844-2030`: `ctx_gc`, `gc_copy_value`, `gc_scan_object`
2. **744 calls to JS_DupValue/JS_FreeValue** scattered throughout (currently undefined, causing compilation errors)
3. **Current JSFunction** is bloated (has kind, name, union of cfunc/bytecode/bound) - needs simplification
4. **Current JSVarRef** is a separate object - should be eliminated, closures live in frames
5. **Bump allocator** in `js_malloc` (line 1495) with `heap_base`/`heap_free`/`heap_end`
6. **Buddy allocator** for memory blocks (lines 1727-1837)
7. **Header offset inconsistency** - some structs have header at offset 0, some at offset 8
## Implementation Steps
### Phase 1: Define No-Op DupValue/FreeValue (To Enable Compilation)
Add these near line 100 in `source/quickjs.c`:
```c
/* Copying GC - no reference counting needed */
#define JS_DupValue(ctx, v) (v)
#define JS_FreeValue(ctx, v) ((void)0)
#define JS_DupValueRT(rt, v) (v)
#define JS_FreeValueRT(rt, v) ((void)0)
```
This makes the code compile while keeping existing call sites (they become no-ops).
### Phase 2: Standardize Object Headers (offset 0)
Remove `JSGCObjectHeader` (ref counting remnant) and put `objhdr_t` at offset 0:
```c
typedef struct JSArray {
objhdr_t hdr; // offset 0
word_t length;
JSValue values[];
} JSArray;
typedef struct JSRecord {
objhdr_t hdr; // offset 0
JSRecord *proto;
word_t length;
slot slots[];
} JSRecord;
typedef struct JSText {
objhdr_t hdr; // offset 0
word_t length; // pretext: length, text: hash
word_t packed[];
} JSText;
typedef struct JSBlob {
objhdr_t hdr; // offset 0
word_t length;
uint8_t bits[];
} JSBlob;
/* Simplified JSFunction per memory.md - 3 words */
typedef struct JSFunction {
objhdr_t hdr; // offset 0, always stone
JSCode *code; // pointer to immutable code object
struct JSFrame *outer; // frame that created this function
} JSFunction;
/* JSFrame per memory.md */
typedef struct JSFrame {
objhdr_t hdr; // offset 0
JSFunction *function; // function being executed
struct JSFrame *caller; // calling frame (NULL = reduced/returned)
word_t ret_addr; // return instruction address
JSValue slots[]; // args, closure vars, locals, temps
} JSFrame;
/* JSCode - always in immutable (stone) memory */
typedef struct JSCode {
objhdr_t hdr; // offset 0, always stone
word_t arity; // max number of inputs
word_t frame_size; // capacity of activation frame
word_t closure_size; // reduced capacity for returned frames
word_t entry_point; // address to begin execution
word_t disruption_point;// address of disruption clause
uint8_t bytecode[]; // actual bytecode
} JSCode;
```
### Phase 3: Complete gc_object_size for All Types
Update `gc_object_size` (line 1850) to read header at offset 0:
```c
static size_t gc_object_size(void *ptr) {
objhdr_t hdr = *(objhdr_t*)ptr; // Header at offset 0
uint8_t type = objhdr_type(hdr);
uint64_t cap = objhdr_cap56(hdr);
switch (type) {
case OBJ_ARRAY:
return sizeof(JSArray) + cap * sizeof(JSValue);
case OBJ_BLOB:
return sizeof(JSBlob) + (cap + 7) / 8; // cap is bits
case OBJ_TEXT:
return sizeof(JSText) + ((cap + 1) / 2) * sizeof(uint64_t);
case OBJ_RECORD:
return sizeof(JSRecord) + (cap + 1) * sizeof(slot); // cap is mask
case OBJ_FUNCTION:
return sizeof(JSFunction); // 3 words
case OBJ_FRAME:
return sizeof(JSFrame) + cap * sizeof(JSValue); // cap is slot count
case OBJ_CODE:
return 0; // Code is never copied (immutable)
default:
return 64; // Conservative fallback
}
}
```
### Phase 4: Complete gc_scan_object for All Types
Update `gc_scan_object` (line 1924):
```c
static void gc_scan_object(JSContext *ctx, void *ptr, uint8_t **to_free, uint8_t *to_end) {
objhdr_t hdr = *(objhdr_t*)ptr;
uint8_t type = objhdr_type(hdr);
switch (type) {
case OBJ_ARRAY: {
JSArray *arr = (JSArray*)ptr;
for (uint32_t i = 0; i < arr->length; i++) {
arr->values[i] = gc_copy_value(ctx, arr->values[i], to_free, to_end);
}
break;
}
case OBJ_RECORD: {
JSRecord *rec = (JSRecord*)ptr;
// Copy prototype
if (rec->proto) {
JSValue proto_val = JS_MKPTR(rec->proto);
proto_val = gc_copy_value(ctx, proto_val, to_free, to_end);
rec->proto = (JSRecord*)JS_VALUE_GET_PTR(proto_val);
}
// Copy table entries
uint32_t mask = objhdr_cap56(rec->hdr);
for (uint32_t i = 1; i <= mask; i++) { // Skip slot 0
JSValue k = rec->slots[i].key;
if (!rec_key_is_empty(k) && !rec_key_is_tomb(k)) {
rec->slots[i].key = gc_copy_value(ctx, k, to_free, to_end);
rec->slots[i].value = gc_copy_value(ctx, rec->slots[i].value, to_free, to_end);
}
}
break;
}
case OBJ_FUNCTION: {
JSFunction *func = (JSFunction*)ptr;
// Code is immutable, don't copy - but outer frame needs copying
if (func->outer) {
JSValue outer_val = JS_MKPTR(func->outer);
outer_val = gc_copy_value(ctx, outer_val, to_free, to_end);
func->outer = (JSFrame*)JS_VALUE_GET_PTR(outer_val);
}
break;
}
case OBJ_FRAME: {
JSFrame *frame = (JSFrame*)ptr;
// Copy function pointer
if (frame->function) {
JSValue func_val = JS_MKPTR(frame->function);
func_val = gc_copy_value(ctx, func_val, to_free, to_end);
frame->function = (JSFunction*)JS_VALUE_GET_PTR(func_val);
}
// Copy caller (unless NULL = reduced frame)
if (frame->caller) {
JSValue caller_val = JS_MKPTR(frame->caller);
caller_val = gc_copy_value(ctx, caller_val, to_free, to_end);
frame->caller = (JSFrame*)JS_VALUE_GET_PTR(caller_val);
}
// Copy all slots (args, closure vars, locals, temps)
uint32_t slot_count = objhdr_cap56(frame->hdr);
for (uint32_t i = 0; i < slot_count; i++) {
frame->slots[i] = gc_copy_value(ctx, frame->slots[i], to_free, to_end);
}
break;
}
case OBJ_TEXT:
case OBJ_BLOB:
case OBJ_CODE:
// No internal references to scan
break;
}
}
```
### Phase 5: Fix gc_copy_value Forwarding
Update `gc_copy_value` (line 1883) for offset 0 headers:
```c
static JSValue gc_copy_value(JSContext *ctx, JSValue v, uint8_t **to_free, uint8_t *to_end) {
if (!JS_IsPtr(v)) return v; // Immediate value
void *ptr = JS_VALUE_GET_PTR(v);
// Stone memory - don't copy (includes Code objects)
objhdr_t hdr = *(objhdr_t*)ptr;
if (objhdr_s(hdr)) return v;
// Check if in current heap
if ((uint8_t*)ptr < ctx->heap_base || (uint8_t*)ptr >= ctx->heap_end)
return v; // External allocation
// Already forwarded?
if (objhdr_type(hdr) == OBJ_FORWARD) {
void *new_ptr = (void*)(uintptr_t)objhdr_cap56(hdr);
return JS_MKPTR(new_ptr);
}
// Copy object to new space
size_t size = gc_object_size(ptr);
void *new_ptr = *to_free;
*to_free += size;
memcpy(new_ptr, ptr, size);
// Leave forwarding pointer in old location
*(objhdr_t*)ptr = objhdr_make((uint64_t)(uintptr_t)new_ptr, OBJ_FORWARD, 0, 0, 0, 0);
return JS_MKPTR(new_ptr);
}
```
### Phase 6: Complete GC Root Tracing
Update `ctx_gc` (line 1966) to trace all roots including JSGCRef:
```c
static int ctx_gc(JSContext *ctx) {
// ... existing setup code ...
// Copy roots: global object, class prototypes, etc. (existing)
ctx->global_obj = gc_copy_value(ctx, ctx->global_obj, &to_free, to_end);
ctx->global_var_obj = gc_copy_value(ctx, ctx->global_var_obj, &to_free, to_end);
// ... other existing root copying ...
// Copy GC root stack (JS_PUSH_VALUE/JS_POP_VALUE)
for (JSGCRef *ref = ctx->top_gc_ref; ref; ref = ref->prev) {
ref->val = gc_copy_value(ctx, ref->val, &to_free, to_end);
}
// Copy GC root list (JS_AddGCRef/JS_DeleteGCRef)
for (JSGCRef *ref = ctx->last_gc_ref; ref; ref = ref->prev) {
ref->val = gc_copy_value(ctx, ref->val, &to_free, to_end);
}
// Copy current exception
ctx->current_exception = gc_copy_value(ctx, ctx->current_exception, &to_free, to_end);
// Cheney scan (existing)
// ...
}
```
### Phase 7: Trigger GC on Allocation Failure
Update `js_malloc` (line 1495):
```c
void *js_malloc(JSContext *ctx, size_t size) {
size = (size + 7) & ~7; // Align to 8 bytes
if ((uint8_t*)ctx->heap_free + size > (uint8_t*)ctx->heap_end) {
if (ctx_gc(ctx) < 0) {
JS_ThrowOutOfMemory(ctx);
return NULL;
}
// Retry after GC
if ((uint8_t*)ctx->heap_free + size > (uint8_t*)ctx->heap_end) {
JS_ThrowOutOfMemory(ctx);
return NULL;
}
}
void *ptr = ctx->heap_free;
ctx->heap_free = (uint8_t*)ctx->heap_free + size;
return ptr;
}
```
### Phase 8: Frame Reduction (for closures)
When a function returns, "reduce" its frame to just closure variables:
```c
static void reduce_frame(JSContext *ctx, JSFrame *frame) {
if (frame->caller == NULL) return; // Already reduced
JSCode *code = frame->function->code;
uint32_t closure_size = code->closure_size;
// Shrink capacity to just closure variables
frame->hdr = objhdr_make(closure_size, OBJ_FRAME, 0, 0, 0, 0);
frame->caller = NULL; // Signal: frame is reduced
}
```
### Phase 9: Remove Unused Reference Counting Code
Delete:
- `gc_decref`, `gc_decref_child` functions
- `gc_scan_incref_child`, `gc_scan_incref_child2` functions
- `JS_GCPhaseEnum`, `gc_phase` fields
- `JSGCObjectHeader` struct (merge into objhdr_t)
- `ref_count` fields from any remaining structs
- `mark_function_children_decref` function
- All `free_*` functions that rely on ref counting
## Files to Modify
1. **source/quickjs.c** - Main implementation:
- Add DupValue/FreeValue no-op macros (~line 100)
- Restructure JSArray, JSBlob, JSText, JSRecord (lines 468-499)
- Simplify JSFunction to 3-word struct (line 1205)
- Add JSFrame as heap object (new)
- Restructure JSCode/JSFunctionBytecode (line 1293)
- Fix gc_object_size (line 1850)
- Fix gc_copy_value (line 1883)
- Complete gc_scan_object (line 1924)
- Update ctx_gc for all roots (line 1966)
- Update js_malloc to trigger GC (line 1495)
- Delete ref counting code throughout
2. **source/quickjs.h** - Public API:
- Remove JSGCObjectHeader
- Update JSValue type checks if needed
- Ensure JS_IsStone works with offset 0 headers
## Execution Order
1. **First**: Add DupValue/FreeValue macros (enables compilation)
2. **Second**: Standardize struct layouts (header at offset 0)
3. **Third**: Fix gc_object_size and gc_copy_value
4. **Fourth**: Complete gc_scan_object for all types
5. **Fifth**: Update ctx_gc with complete root tracing
6. **Sixth**: Wire js_malloc to trigger GC
7. **Seventh**: Add frame reduction for closures
8. **Finally**: Remove ref counting dead code
## Verification
1. **Compile test**: `make` should succeed without errors
2. **Basic test**: Run simple scripts:
```js
var a = [1, 2, 3]
log.console(a[1])
```
3. **Stress test**: Allocate many objects to trigger GC:
```js
for (var i = 0; i < 100000; i++) {
var x = { value: i }
}
log.console("done")
```
4. **Closure test**: Test functions with closures survive GC:
```js
fn make_counter() {
var count = 0
fn inc() { count = count + 1; return count }
return inc
}
var c = make_counter()
log.console(c()) // 1
log.console(c()) // 2
```
5. **GC stress with closures**: Create many closures, trigger GC, verify they still work
## Key Design Decisions (Resolved)
1. **JSCode storage**: Lives in stone (immutable) memory, never copied during GC ✓
2. **Header offset**: Standardized to offset 0 for all heap objects ✓
3. **Closure variables**: Live in JSFrame objects; frames are "reduced" when functions return ✓
4. **JSVarRef**: Eliminated - closures reference their outer frame directly ✓

View File

@@ -98,7 +98,7 @@ function gather_graph(locator, visited) {
arrfor(array(deps), function(alias) {
var dep_locator = deps[alias]
add_node(dep_locator)
push(edges, { from: locator, to: dep_locator, alias: alias })
edges[] = { from: locator, to: dep_locator, alias: alias }
gather_graph(dep_locator, visited)
})
}
@@ -117,7 +117,7 @@ if (show_world) {
packages = shop.list_packages()
arrfor(packages, function(p) {
if (p != 'core') {
push(roots, p)
roots[] = p
}
})
} else {
@@ -128,7 +128,7 @@ if (show_world) {
target_locator = shop.resolve_locator(target_locator)
push(roots, target_locator)
roots[] = target_locator
}
arrfor(roots, function(root) {
@@ -164,7 +164,7 @@ if (format == 'tree') {
children = []
arrfor(edges, function(e) {
if (e.from == locator) {
push(children, e)
children[] = e
}
})
@@ -180,7 +180,7 @@ if (format == 'tree') {
children = []
arrfor(edges, function(e) {
if (e.from == roots[i]) {
push(children, e)
children[] = e
}
})
@@ -230,7 +230,7 @@ if (format == 'tree') {
}
arrfor(array(nodes), function(id) {
push(output.nodes, nodes[id])
output.nodes[] = nodes[id]
})
output.edges = edges

783
http.cm Normal file
View File

@@ -0,0 +1,783 @@
var socket = use('socket')
var tls = use('net/tls')
var Blob = use('blob')
def CRLF = "\r\n"
def status_texts = {
"200": "OK", "201": "Created", "204": "No Content",
"301": "Moved Permanently", "302": "Found", "307": "Temporary Redirect",
"400": "Bad Request", "401": "Unauthorized", "403": "Forbidden",
"404": "Not Found", "405": "Method Not Allowed", "500": "Internal Server Error"
}
// ============================================================
// Server (unchanged)
// ============================================================
function serve(port) {
var fd = socket.socket("AF_INET", "SOCK_STREAM")
socket.setsockopt(fd, "SOL_SOCKET", "SO_REUSEADDR", true)
socket.bind(fd, {address: "127.0.0.1", port: port})
socket.listen(fd, 16)
return fd
}
function parse_request(conn_fd) {
var data = socket.recv(conn_fd, 65536)
var raw = text(data)
var hdr_end = search(raw, CRLF + CRLF)
if (hdr_end == null) disrupt
var header_text = text(raw, 0, hdr_end)
var body_text = text(raw, hdr_end + 4)
var lines = array(header_text, CRLF)
var parts = array(lines[0], " ")
var method = parts[0]
var url = parts[1]
var qpos = search(url, "?")
var path = qpos != null ? text(url, 0, qpos) : url
var headers = {}
var i = 1
var colon = null
var key = null
var val = null
while (i < length(lines)) {
colon = search(lines[i], ": ")
if (colon != null) {
key = lower(text(lines[i], 0, colon))
val = text(lines[i], colon + 2)
headers[key] = val
}
i = i + 1
}
var cl = headers["content-length"]
var content_length = null
var remaining = null
var more = null
if (cl != null) content_length = number(cl)
if (content_length != null && length(body_text) < content_length) {
remaining = content_length - length(body_text)
more = socket.recv(conn_fd, remaining)
body_text = body_text + text(more)
}
if (content_length == null || content_length == 0) body_text = null
return {
method: method, path: path, url: url,
headers: headers, body: body_text, _conn: conn_fd
}
}
function accept(server_fd) {
var conn = socket.accept(server_fd)
return parse_request(conn.socket)
}
function on_request(server_fd, handler) {
var _accept = function() {
var conn = socket.accept(server_fd)
var req = null
var _parse = function() {
req = parse_request(conn.socket)
} disruption {
req = null
}
_parse()
if (req != null) handler(req)
socket.on_readable(server_fd, _accept)
}
socket.on_readable(server_fd, _accept)
}
function respond(conn, status, headers, body) {
var st = status_texts[text(status)]
if (st == null) st = "Unknown"
var out = "HTTP/1.1 " + text(status) + " " + st + CRLF
out = out + "Connection: close" + CRLF
var body_str = ""
var keys = null
var i = 0
if (body != null) {
if (is_text(body)) body_str = body
else body_str = text(body)
}
if (headers != null) {
keys = array(headers)
i = 0
while (i < length(keys)) {
out = out + keys[i] + ": " + headers[keys[i]] + CRLF
i = i + 1
}
}
out = out + "Content-Length: " + text(length(body_str)) + CRLF
out = out + CRLF + body_str
socket.send(conn, out)
socket.close(conn)
}
function sse_open(conn, headers) {
var out = "HTTP/1.1 200 OK" + CRLF
out = out + "Content-Type: text/event-stream" + CRLF
out = out + "Cache-Control: no-cache" + CRLF
out = out + "Connection: keep-alive" + CRLF
var keys = null
var i = 0
if (headers != null) {
keys = array(headers)
i = 0
while (i < length(keys)) {
out = out + keys[i] + ": " + headers[keys[i]] + CRLF
i = i + 1
}
}
out = out + CRLF
socket.send(conn, out)
}
function sse_event(conn, event, data) {
var frame = "event: " + event + "\ndata: " + data + "\n\n"
var ok = true
var _send = function() {
socket.send(conn, frame)
} disruption {
ok = false
}
_send()
return ok
}
function sse_close(conn) {
socket.close(conn)
}
// ============================================================
// Blocking client request (kept for compatibility)
// ============================================================
function request(method, url, headers, body) {
var parts = array(url, "/")
var host_port = parts[2]
var path = "/" + text(array(parts, 3, length(parts)), "/")
var hp = array(host_port, ":")
var host = hp[0]
var port = length(hp) > 1 ? number(hp[1]) : 80
var fd = socket.socket("AF_INET", "SOCK_STREAM")
var raw = null
var hdr_end = null
var _do = function() {
socket.connect(fd, {address: host, port: port})
var body_str = ""
if (body != null) {
if (is_text(body)) body_str = body
else body_str = text(body)
}
var keys = null
var i = 0
var req = method + " " + path + " HTTP/1.1" + CRLF
req = req + "Host: " + host_port + CRLF
req = req + "Connection: close" + CRLF
if (headers != null) {
keys = array(headers)
i = 0
while (i < length(keys)) {
req = req + keys[i] + ": " + headers[keys[i]] + CRLF
i = i + 1
}
}
if (length(body_str) > 0) {
req = req + "Content-Length: " + text(length(body_str)) + CRLF
}
req = req + CRLF + body_str
socket.send(fd, req)
raw = text(socket.recv(fd, 65536))
} disruption {
raw = null
}
_do()
socket.close(fd)
if (raw == null) return null
hdr_end = search(raw, CRLF + CRLF)
if (hdr_end == null) return null
var header_text = text(raw, 0, hdr_end)
var lines = array(header_text, CRLF)
var status_parts = array(lines[0], " ")
var status = number(status_parts[1])
var resp_headers = {}
var hi = 1
var colon = null
while (hi < length(lines)) {
colon = search(lines[hi], ": ")
if (colon != null) {
resp_headers[lower(text(lines[hi], 0, colon))] = text(lines[hi], colon + 2)
}
hi = hi + 1
}
return {
status: status,
headers: resp_headers,
body: text(raw, hdr_end + 4)
}
}
// ============================================================
// Requestor-based async fetch
// ============================================================
// parse_url requestor — sync, extract {scheme, host, port, path} from URL
var parse_url = function(callback, value) {
var url = null
var method = "GET"
var req_headers = null
var req_body = null
log.console("value type=" + text(is_text(value)) + " val=" + text(value))
if (is_text(value)) {
url = value
log.console("url after assign=" + text(is_text(url)) + " url=" + text(url))
} else {
url = value.url
if (value.method != null) method = value.method
if (value.headers != null) req_headers = value.headers
if (value.body != null) req_body = value.body
}
// strip scheme
var scheme = "http"
var rest = url
var scheme_end = search(url, "://")
log.console("A: url_type=" + text(is_text(url)) + " scheme_end=" + text(scheme_end))
if (scheme_end != null) {
scheme = lower(text(url, 0, scheme_end))
rest = text(url, scheme_end + 3, length(url))
log.console("B: scheme=" + scheme + " rest=" + rest + " rest_type=" + text(is_text(rest)))
}
// split host from path
var slash = search(rest, "/")
var host_port = rest
var path = "/"
log.console("C: slash=" + text(slash))
if (slash != null) {
host_port = text(rest, 0, slash)
path = text(rest, slash, length(rest))
}
// split host:port
var hp = array(host_port, ":")
var host = hp[0]
var port = null
if (length(hp) > 1) {
port = number(hp[1])
} else {
port = scheme == "https" ? 443 : 80
}
callback({
scheme: scheme, host: host, port: port, path: path,
host_port: host_port, method: method,
req_headers: req_headers, req_body: req_body
})
return null
}
// resolve_dns requestor — blocking getaddrinfo, swappable later
var resolve_dns = function(callback, state) {
var ok = true
var addrs = null
var _resolve = function() {
addrs = socket.getaddrinfo(state.host, text(state.port))
} disruption {
ok = false
}
_resolve()
if (!ok || addrs == null || length(addrs) == 0) {
callback(null, "dns resolution failed for " + state.host)
return null
}
callback(record(state, {address: addrs[0].address}))
return null
}
// open_connection requestor — non-blocking connect + optional TLS
var open_connection = function(callback, state) {
var fd = socket.socket("AF_INET", "SOCK_STREAM")
var cancelled = false
var cancel = function() {
var _close = null
if (!cancelled) {
cancelled = true
_close = function() {
socket.unwatch(fd)
socket.close(fd)
} disruption {}
_close()
}
}
socket.setnonblock(fd)
var finish_connect = function(the_fd) {
var ctx = null
if (state.scheme == "https") {
ctx = tls.wrap(the_fd, state.host)
}
callback(record(state, {fd: the_fd, tls: ctx}))
}
// non-blocking connect — EINPROGRESS is expected
var connect_err = false
var _connect = function() {
socket.connect(fd, {address: state.address, port: state.port})
} disruption {
connect_err = true
}
_connect()
// if connect succeeded immediately (localhost, etc)
var _finish_immediate = null
if (!connect_err && !cancelled) {
_finish_immediate = function() {
finish_connect(fd)
} disruption {
cancel()
callback(null, "connection setup failed")
}
_finish_immediate()
return cancel
}
// wait for connect to complete
socket.on_writable(fd, function() {
if (cancelled) return
var err = socket.getsockopt(fd, "SOL_SOCKET", "SO_ERROR")
if (err != 0) {
cancel()
callback(null, "connect failed (errno " + text(err) + ")")
return
}
var _finish = function() {
finish_connect(fd)
} disruption {
cancel()
callback(null, "connection setup failed")
}
_finish()
})
return cancel
}
// send_request requestor — format + send HTTP/1.1 request
var send_request = function(callback, state) {
var cancelled = false
var cancel = function() {
cancelled = true
}
var _send = function() {
var body_str = ""
var keys = null
var i = 0
if (state.req_body != null) {
if (is_text(state.req_body)) body_str = state.req_body
else body_str = text(state.req_body)
}
var req = state.method + " " + state.path + " HTTP/1.1" + CRLF
req = req + "Host: " + state.host_port + CRLF
req = req + "Connection: close" + CRLF
req = req + "User-Agent: cell/1.0" + CRLF
req = req + "Accept: */*" + CRLF
if (state.req_headers != null) {
keys = array(state.req_headers)
i = 0
while (i < length(keys)) {
req = req + keys[i] + ": " + state.req_headers[keys[i]] + CRLF
i = i + 1
}
}
if (length(body_str) > 0) {
req = req + "Content-Length: " + text(length(body_str)) + CRLF
}
req = req + CRLF + body_str
if (state.tls != null) {
tls.send(state.tls, req)
} else {
socket.send(state.fd, req)
}
} disruption {
if (!cancelled) callback(null, "send request failed")
return cancel
}
_send()
if (!cancelled) callback(state)
return cancel
}
// parse response headers from raw text
function parse_headers(raw) {
var hdr_end = search(raw, CRLF + CRLF)
if (hdr_end == null) return null
var header_text = text(raw, 0, hdr_end)
var lines = array(header_text, CRLF)
var status_parts = array(lines[0], " ")
var status_code = number(status_parts[1])
var headers = {}
var i = 1
var colon = null
while (i < length(lines)) {
colon = search(lines[i], ": ")
if (colon != null) {
headers[lower(text(lines[i], 0, colon))] = text(lines[i], colon + 2)
}
i = i + 1
}
return {
status: status_code, headers: headers,
body_start: hdr_end + 4
}
}
// decode chunked transfer encoding (text version, for async responses)
function decode_chunked(body_text) {
var result = ""
var pos = 0
var chunk_end = null
var chunk_size = null
while (pos < length(body_text)) {
chunk_end = search(text(body_text, pos), CRLF)
if (chunk_end == null) return result
chunk_size = number(text(body_text, pos, pos + chunk_end), 16)
if (chunk_size == null || chunk_size == 0) return result
pos = pos + chunk_end + 2
result = result + text(body_text, pos, pos + chunk_size)
pos = pos + chunk_size + 2
}
return result
}
// decode chunked transfer encoding (blob version, preserves binary data)
function decode_chunked_blob(buf, body_start_bytes) {
var result = Blob()
var pos = body_start_bytes
var total_bytes = length(buf) / 8
var header_end = null
var header_blob = null
var header_text = null
var crlf_pos = null
var chunk_size = null
var chunk_data = null
while (pos < total_bytes) {
header_end = pos + 20
if (header_end > total_bytes) header_end = total_bytes
header_blob = buf.read_blob(pos * 8, header_end * 8)
stone(header_blob)
header_text = text(header_blob)
crlf_pos = search(header_text, CRLF)
if (crlf_pos == null) break
chunk_size = number(text(header_text, 0, crlf_pos), 16)
if (chunk_size == null || chunk_size == 0) break
pos = pos + crlf_pos + 2
chunk_data = buf.read_blob(pos * 8, (pos + chunk_size) * 8)
stone(chunk_data)
result.write_blob(chunk_data)
pos = pos + chunk_size + 2
}
stone(result)
return result
}
// receive_response requestor — async incremental receive
var receive_response = function(callback, state) {
var cancelled = false
var buffer = ""
var parsed = null
var content_length = null
var is_chunked = false
var body_complete = false
var cancel = function() {
var _cleanup = null
if (!cancelled) {
cancelled = true
_cleanup = function() {
if (state.tls != null) {
tls.close(state.tls)
} else {
socket.unwatch(state.fd)
socket.close(state.fd)
}
} disruption {}
_cleanup()
}
}
var finish = function() {
if (cancelled) return
var body_text = text(buffer, parsed.body_start)
if (is_chunked) {
body_text = decode_chunked(body_text)
}
// clean up connection
var _cleanup = function() {
if (state.tls != null) {
tls.close(state.tls)
} else {
socket.close(state.fd)
}
} disruption {}
_cleanup()
callback({
status: parsed.status,
headers: parsed.headers,
body: body_text
})
}
var check_complete = function() {
var te = null
var cl = null
var body_text = null
// still waiting for headers
if (parsed == null) {
parsed = parse_headers(buffer)
if (parsed == null) return false
te = parsed.headers["transfer-encoding"]
if (te != null && search(lower(te), "chunked") != null) {
is_chunked = true
}
cl = parsed.headers["content-length"]
if (cl != null) content_length = number(cl)
}
body_text = text(buffer, parsed.body_start)
if (is_chunked) {
// chunked: look for the terminating 0-length chunk
if (search(body_text, CRLF + "0" + CRLF) != null) return true
if (starts_with(body_text, "0" + CRLF)) return true
return false
}
if (content_length != null) {
return length(body_text) >= content_length
}
// connection: close — we read until EOF (handled by recv returning 0 bytes)
return false
}
var on_data = function() {
if (cancelled) return
var chunk = null
var got_data = false
var eof = false
var _recv = function() {
if (state.tls != null) {
chunk = tls.recv(state.tls, 16384)
} else {
chunk = socket.recv(state.fd, 16384)
}
} disruption {
// recv error — treat as EOF
eof = true
}
_recv()
var chunk_text = null
if (!eof && chunk != null) {
stone(chunk)
chunk_text = text(chunk)
if (length(chunk_text) > 0) {
buffer = buffer + chunk_text
got_data = true
} else {
eof = true
}
}
if (got_data && check_complete()) {
finish()
return
}
if (eof) {
// connection closed — if we have headers, deliver what we have
if (parsed != null || parse_headers(buffer) != null) {
if (parsed == null) parsed = parse_headers(buffer)
finish()
} else {
cancel()
callback(null, "connection closed before headers received")
}
return
}
// re-register for more data (one-shot watches)
if (!cancelled) {
if (state.tls != null) {
tls.on_readable(state.tls, on_data)
} else {
socket.on_readable(state.fd, on_data)
}
}
}
// start reading
if (state.tls != null) {
tls.on_readable(state.tls, on_data)
} else {
socket.on_readable(state.fd, on_data)
}
return cancel
}
// ============================================================
// fetch — synchronous HTTP(S) GET, returns response body (stoned blob)
// ============================================================
var fetch = function(url) {
var scheme = "http"
var rest = url
var scheme_end = search(url, "://")
var slash = null
var host_port = null
var path = "/"
var hp = null
var host = null
var port = null
var fd = null
var ctx = null
var buf = Blob()
var raw_text = null
var hdr_end = null
var header_text = null
var body_start_bits = null
var body = null
var addrs = null
var address = null
var ok = true
var status_line = null
var status_code = null
if (scheme_end != null) {
scheme = lower(text(url, 0, scheme_end))
rest = text(url, scheme_end + 3, length(url))
}
slash = search(rest, "/")
host_port = rest
if (slash != null) {
host_port = text(rest, 0, slash)
path = text(rest, slash, length(rest))
}
hp = array(host_port, ":")
host = hp[0]
port = length(hp) > 1 ? number(hp[1]) : (scheme == "https" ? 443 : 80)
addrs = socket.getaddrinfo(host, text(port))
if (addrs == null || length(addrs) == 0) return null
address = addrs[0].address
fd = socket.socket("AF_INET", "SOCK_STREAM")
var _do = function() {
var req = null
var chunk = null
socket.connect(fd, {address: address, port: port})
if (scheme == "https") ctx = tls.wrap(fd, host)
req = "GET " + path + " HTTP/1.1" + CRLF
req = req + "Host: " + host_port + CRLF
req = req + "Connection: close" + CRLF
req = req + "User-Agent: cell/1.0" + CRLF
req = req + "Accept: */*" + CRLF + CRLF
if (ctx != null) tls.send(ctx, req)
else socket.send(fd, req)
while (true) {
if (ctx != null) chunk = tls.recv(ctx, 16384)
else chunk = socket.recv(fd, 16384)
if (chunk == null) break
stone(chunk)
if (length(chunk) == 0) break
buf.write_blob(chunk)
}
} disruption {
ok = false
}
_do()
var _cleanup = function() {
if (ctx != null) tls.close(ctx)
else socket.close(fd)
} disruption {}
_cleanup()
if (!ok) return null
stone(buf)
raw_text = text(buf)
hdr_end = search(raw_text, CRLF + CRLF)
if (hdr_end == null) return null
header_text = text(raw_text, 0, hdr_end)
status_line = text(header_text, 0, search(header_text, CRLF) || length(header_text))
status_code = number(text(status_line, 9, 12))
if (status_code == null || status_code < 200 || status_code >= 300) {
log.error("fetch: " + status_line)
disrupt
}
if (search(lower(header_text), "transfer-encoding: chunked") != null) {
body = decode_chunked_blob(buf, hdr_end + 4)
return body
}
// Headers are ASCII so char offset = byte offset
body_start_bits = (hdr_end + 4) * 8
body = buf.read_blob(body_start_bits, length(buf))
stone(body)
return body
}
// ============================================================
// fetch_requestor — async requestor pipeline for fetch
// ============================================================
var fetch_requestor = sequence([
parse_url,
resolve_dns,
open_connection,
send_request,
receive_response
])
function close(fd) {
socket.close(fd)
}
return {
// server
serve: serve, accept: accept, on_request: on_request,
respond: respond, close: close,
sse_open: sse_open, sse_event: sse_event, sse_close: sse_close,
// client
fetch: fetch,
fetch_requestor: fetch_requestor,
request: request
}

View File

@@ -109,7 +109,7 @@ function trace_imports(file_path, depth) {
all_packages[imp_pkg] = true
push(all_imports, {
all_imports[] = {
from: file_path,
from_pkg: file_pkg,
module_path: mod_path,
@@ -117,7 +117,7 @@ function trace_imports(file_path, depth) {
package: imp_pkg,
type: imp_type,
depth: depth
})
}
// Recurse into resolved scripts
if (resolved && (ends_with(resolved, '.cm') || ends_with(resolved, '.ce'))) {

View File

@@ -1,6 +1,6 @@
// Minimal bootstrap — seeds the content-addressed cache
// Only runs on cold start (C runtime couldn't find engine in cache)
// Hidden vars: os, core_path, shop_path
// Hidden vars: os, core_path, shop_path, native_mode (optional)
var load_internal = os.load_internal
function use_embed(name) {
return load_internal("js_core_" + name + "_use")
@@ -89,20 +89,190 @@ function compile_and_cache(name, source_path) {
}
}
// Seed the cache with everything engine needs
var seed_files = [
// --- Native compilation support ---
function detect_host_target() {
var platform = os.platform()
var arch = os.arch ? os.arch() : 'arm64'
if (platform == 'macOS' || platform == 'darwin')
return arch == 'x86_64' ? 'macos_x86_64' : 'macos_arm64'
if (platform == 'Linux' || platform == 'linux')
return arch == 'x86_64' ? 'linux' : 'linux_arm64'
if (platform == 'Windows' || platform == 'windows')
return 'windows'
return null
}
function detect_cc() {
var platform = os.platform()
if (platform == 'macOS') return 'clang'
return 'cc'
}
// Compute native dylib cache path matching build.cm's scheme:
// cache_path(native_cache_content(src, target, ''), SALT_NATIVE)
function native_dylib_cache_path(src, target) {
var native_key = src + '\n' + target + '\nnative\n'
var full_key = native_key + '\nnative'
return cache_path(content_hash(full_key))
}
// Compile a module to a native dylib and cache it
var _qbe_mod = null
var _qbe_emit_mod = null
var _host_target = null
var _cc = null
var _is_darwin = false
var _rt_compiled = false
function compile_native_cached(name, source_path) {
var source_blob = fd.slurp(source_path)
var src = text(source_blob)
var dylib_path = native_dylib_cache_path(src, _host_target)
var ast = null
var compiled = null
var il_parts = null
var helpers_il = null
var all_fns = null
var full_il = null
var asm_text = null
var tmp = null
var rc = null
var rt_o = null
var qbe_rt_path = null
var link_cmd = null
if (dylib_path && fd.is_file(dylib_path)) {
os.print("bootstrap: native cache hit: " + name + "\n")
return
}
var t0 = null
var t1 = null
os.print("bootstrap: compiling native: " + name + "\n")
t0 = os.now()
ast = analyze(src, source_path)
compiled = streamline_mod(mcode_mod(ast))
t1 = os.now()
os.print(" [" + name + "] pipeline (tok+parse+fold+mcode+streamline): " + text((t1 - t0) / 1000000) + "ms\n")
t0 = os.now()
il_parts = _qbe_emit_mod(compiled, _qbe_mod, null)
t1 = os.now()
os.print(" [" + name + "] qbe_emit: " + text((t1 - t0) / 1000000) + "ms\n")
helpers_il = (il_parts.helpers && length(il_parts.helpers) > 0)
? text(il_parts.helpers, "\n") : ""
all_fns = text(il_parts.functions, "\n")
full_il = il_parts.data + "\n\n" + helpers_il + "\n\n" + all_fns
t0 = os.now()
asm_text = os.qbe(full_il)
t1 = os.now()
os.print(" [" + name + "] os.qbe (QBE compile): " + text((t1 - t0) / 1000000) + "ms\n")
tmp = '/tmp/cell_boot_' + name
fd.slurpwrite(tmp + '.s', stone(blob(asm_text)))
t0 = os.now()
rc = os.system(_cc + ' -c ' + tmp + '.s -o ' + tmp + '.o')
t1 = os.now()
os.print(" [" + name + "] clang -c: " + text((t1 - t0) / 1000000) + "ms\n")
if (rc != 0) {
os.print("error: assembly failed for " + name + "\n")
disrupt
}
// Compile QBE runtime stubs (once)
rt_o = '/tmp/cell_qbe_rt.o'
if (!_rt_compiled && !fd.is_file(rt_o)) {
qbe_rt_path = core_path + '/src/qbe_rt.c'
rc = os.system(_cc + ' -c ' + qbe_rt_path + ' -o ' + rt_o + ' -fPIC')
if (rc != 0) {
os.print("error: qbe_rt compilation failed\n")
disrupt
}
_rt_compiled = true
}
// Link dylib
ensure_build_dir()
link_cmd = _cc + ' -shared -fPIC'
if (_is_darwin)
link_cmd = link_cmd + ' -undefined dynamic_lookup'
link_cmd = link_cmd + ' ' + tmp + '.o ' + rt_o + ' -o ' + dylib_path
t0 = os.now()
rc = os.system(link_cmd)
t1 = os.now()
os.print(" [" + name + "] clang -shared (link): " + text((t1 - t0) / 1000000) + "ms\n")
if (rc != 0) {
os.print("error: linking failed for " + name + "\n")
disrupt
}
}
// --- Main bootstrap logic ---
// Check if native_mode was passed from C runtime
var _native = false
var _check_nm = function() {
if (native_mode) _native = true
} disruption {}
_check_nm()
var _targets = null
var _ti = 0
var _te = null
if (_native) {
// Native path: compile everything to native dylibs
_qbe_mod = boot_load("qbe")
_qbe_emit_mod = boot_load("qbe_emit")
_host_target = detect_host_target()
_cc = detect_cc()
_is_darwin = os.platform() == 'macOS'
if (!_host_target) {
os.print("error: could not detect host target for native compilation\n")
disrupt
}
// Also seed bytecode cache for engine (so non-native path still works)
compile_and_cache("engine", core_path + '/internal/engine.cm')
// Compile pipeline modules + qbe/qbe_emit + engine to native dylibs
_targets = [
{name: "tokenize", path: "tokenize.cm"},
{name: "parse", path: "parse.cm"},
{name: "fold", path: "fold.cm"},
{name: "mcode", path: "mcode.cm"},
{name: "streamline", path: "streamline.cm"},
{name: "qbe", path: "qbe.cm"},
{name: "qbe_emit", path: "qbe_emit.cm"},
{name: "engine", path: "internal/engine.cm"}
]
_ti = 0
while (_ti < length(_targets)) {
_te = _targets[_ti]
compile_native_cached(_te.name, core_path + '/' + _te.path)
_ti = _ti + 1
}
} else {
// Bytecode path: seed cache with everything engine needs
_targets = [
{name: "tokenize", path: "tokenize.cm"},
{name: "parse", path: "parse.cm"},
{name: "fold", path: "fold.cm"},
{name: "mcode", path: "mcode.cm"},
{name: "streamline", path: "streamline.cm"},
{name: "engine", path: "internal/engine.cm"}
]
var _i = 0
var entry = null
while (_i < length(seed_files)) {
entry = seed_files[_i]
compile_and_cache(entry.name, core_path + '/' + entry.path)
_i = _i + 1
]
_ti = 0
while (_ti < length(_targets)) {
_te = _targets[_ti]
compile_and_cache(_te.name, core_path + '/' + _te.path)
_ti = _ti + 1
}
}
os.print("bootstrap: cache seeded\n")

472
internal/enet.c Normal file
View File

@@ -0,0 +1,472 @@
#include "cell.h"
#define ENET_IMPLEMENTATION
#include "enet.h"
#include <stdio.h>
#include <string.h>
#include <math.h>
static JSClassID enet_host_id;
static JSClassID enet_peer_class_id;
static void js_enet_host_finalizer(JSRuntime *rt, JSValue val)
{
ENetHost *host = JS_GetOpaque(val, enet_host_id);
if (host) enet_host_destroy(host);
}
static JSClassDef enet_host_def = {
"ENetHost",
.finalizer = js_enet_host_finalizer,
};
static JSClassDef enet_peer_def = {
"ENetPeer",
};
/* Helper: create a JS peer wrapper for an ENetPeer pointer.
Fresh wrapper each time — no caching in peer->data. */
static JSValue peer_wrap(JSContext *ctx, ENetPeer *peer)
{
JSValue obj = JS_NewObjectClass(ctx, enet_peer_class_id);
if (JS_IsException(obj)) return obj;
JS_SetOpaque(obj, peer);
return obj;
}
/* ── Host functions ─────────────────────────────────────────── */
static JSValue js_enet_create_host(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
ENetHost *host;
ENetAddress address;
ENetAddress *send = &address;
size_t peer_count = 1000;
size_t channel_limit = 0;
enet_uint32 incoming_bandwidth = 0;
enet_uint32 outgoing_bandwidth = 0;
JSValue obj;
if (argc < 1 || !JS_IsRecord(argv[0])) {
host = enet_host_create(NULL, peer_count, channel_limit, incoming_bandwidth, outgoing_bandwidth);
if (!host) return JS_RaiseDisrupt(ctx, "Failed to create ENet client host");
goto wrap;
}
JSValue config_obj = argv[0];
JSValue addr_val = JS_GetPropertyStr(ctx, config_obj, "address");
const char *addr_str = JS_IsText(addr_val) ? JS_ToCString(ctx, addr_val) : NULL;
if (!addr_str)
send = NULL;
else {
JSValue port_val = JS_GetPropertyStr(ctx, config_obj, "port");
int32_t port32 = 0;
JS_ToInt32(ctx, &port32, port_val);
if (strcmp(addr_str, "any") == 0)
address.host = ENET_HOST_ANY;
else if (strcmp(addr_str, "broadcast") == 0)
enet_address_set_host_ip(&address, "255.255.255.255");
else {
int err = enet_address_set_host_ip(&address, addr_str);
if (err != 0) {
JS_FreeCString(ctx, addr_str);
return JS_RaiseDisrupt(ctx, "Failed to set host IP. Error: %d", err);
}
}
address.port = (enet_uint16)port32;
JS_FreeCString(ctx, addr_str);
}
JSValue chan_val = JS_GetPropertyStr(ctx, config_obj, "channels");
JS_ToUint32(ctx, &channel_limit, chan_val);
JSValue in_bw_val = JS_GetPropertyStr(ctx, config_obj, "incoming_bandwidth");
JS_ToUint32(ctx, &incoming_bandwidth, in_bw_val);
JSValue out_bw_val = JS_GetPropertyStr(ctx, config_obj, "outgoing_bandwidth");
JS_ToUint32(ctx, &outgoing_bandwidth, out_bw_val);
host = enet_host_create(send, peer_count, channel_limit, incoming_bandwidth, outgoing_bandwidth);
if (!host) return JS_RaiseDisrupt(ctx, "Failed to create ENet host");
wrap:
obj = JS_NewObjectClass(ctx, enet_host_id);
if (JS_IsException(obj)) {
enet_host_destroy(host);
return obj;
}
JS_SetOpaque(obj, host);
return obj;
}
/* service(host, callback [, timeout]) */
static JSValue js_enet_service(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
if (argc < 2) return JS_RaiseDisrupt(ctx, "service: expected (host, callback)");
ENetHost *host = JS_GetOpaque(argv[0], enet_host_id);
if (!host) return JS_RaiseDisrupt(ctx, "service: invalid host");
if (!JS_IsFunction(argv[1]))
return JS_RaiseDisrupt(ctx, "service: expected callback function");
enet_uint32 timeout_ms = 0;
if (argc >= 3 && !JS_IsNull(argv[2])) {
double secs = 0;
JS_ToFloat64(ctx, &secs, argv[2]);
if (secs > 0) timeout_ms = (enet_uint32)(secs * 1000.0);
}
JS_FRAME(ctx);
JS_ROOT(event_obj, JS_NULL);
ENetEvent event;
while (enet_host_service(host, &event, timeout_ms) > 0) {
event_obj.val = JS_NewObject(ctx);
JSValue peer_val = peer_wrap(ctx, event.peer);
JS_SetPropertyStr(ctx, event_obj.val, "peer", peer_val);
switch (event.type) {
case ENET_EVENT_TYPE_CONNECT:
JS_SetPropertyStr(ctx, event_obj.val, "type", JS_NewString(ctx, "connect"));
break;
case ENET_EVENT_TYPE_RECEIVE:
JS_SetPropertyStr(ctx, event_obj.val, "type", JS_NewString(ctx, "receive"));
JS_SetPropertyStr(ctx, event_obj.val, "channelID", JS_NewInt32(ctx, event.channelID));
if (event.packet->dataLength > 0) {
JSValue data_val = js_new_blob_stoned_copy(ctx, event.packet->data, event.packet->dataLength);
JS_SetPropertyStr(ctx, event_obj.val, "data", data_val);
}
enet_packet_destroy(event.packet);
break;
case ENET_EVENT_TYPE_DISCONNECT:
JS_SetPropertyStr(ctx, event_obj.val, "type", JS_NewString(ctx, "disconnect"));
break;
case ENET_EVENT_TYPE_DISCONNECT_TIMEOUT:
JS_SetPropertyStr(ctx, event_obj.val, "type", JS_NewString(ctx, "disconnect_timeout"));
break;
case ENET_EVENT_TYPE_NONE:
JS_SetPropertyStr(ctx, event_obj.val, "type", JS_NewString(ctx, "none"));
break;
}
JS_Call(ctx, argv[1], JS_NULL, 1, &event_obj.val);
}
JS_RETURN_NULL();
}
/* connect(host, address, port) → peer */
static JSValue js_enet_connect(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
if (argc < 3) return JS_RaiseDisrupt(ctx, "connect: expected (host, address, port)");
ENetHost *host = JS_GetOpaque(argv[0], enet_host_id);
if (!host) return JS_RaiseDisrupt(ctx, "connect: invalid host");
const char *hostname = JS_ToCString(ctx, argv[1]);
if (!hostname) return JS_EXCEPTION;
int port;
JS_ToInt32(ctx, &port, argv[2]);
ENetAddress address;
enet_address_set_host(&address, hostname);
JS_FreeCString(ctx, hostname);
address.port = port;
ENetPeer *peer = enet_host_connect(host, &address, 2, 0);
if (!peer) return JS_RaiseDisrupt(ctx, "No available peers for connection");
return peer_wrap(ctx, peer);
}
/* flush(host) */
static JSValue js_enet_flush(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
if (argc < 1) return JS_RaiseDisrupt(ctx, "flush: expected (host)");
ENetHost *host = JS_GetOpaque(argv[0], enet_host_id);
if (!host) return JS_RaiseDisrupt(ctx, "flush: invalid host");
enet_host_flush(host);
return JS_NULL;
}
/* broadcast(host, data) */
static JSValue js_enet_broadcast(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
if (argc < 2) return JS_RaiseDisrupt(ctx, "broadcast: expected (host, data)");
ENetHost *host = JS_GetOpaque(argv[0], enet_host_id);
if (!host) return JS_RaiseDisrupt(ctx, "broadcast: invalid host");
const char *data_str = NULL;
size_t data_len = 0;
uint8_t *buf = NULL;
if (JS_IsText(argv[1])) {
data_str = JS_ToCStringLen(ctx, &data_len, argv[1]);
if (!data_str) return JS_EXCEPTION;
} else if (js_is_blob(ctx, argv[1])) {
buf = js_get_blob_data(ctx, &data_len, argv[1]);
if (!buf) return JS_EXCEPTION;
} else {
return JS_RaiseDisrupt(ctx, "broadcast: data must be string or blob");
}
ENetPacket *packet = enet_packet_create(data_str ? (const void *)data_str : (const void *)buf, data_len, ENET_PACKET_FLAG_RELIABLE);
if (data_str) JS_FreeCString(ctx, data_str);
if (!packet) return JS_RaiseDisrupt(ctx, "Failed to create ENet packet");
enet_host_broadcast(host, 0, packet);
return JS_NULL;
}
/* host_port(host) → number */
static JSValue js_enet_host_port(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
if (argc < 1) return JS_RaiseDisrupt(ctx, "host_port: expected (host)");
ENetHost *host = JS_GetOpaque(argv[0], enet_host_id);
if (!host) return JS_RaiseDisrupt(ctx, "host_port: invalid host");
return JS_NewInt32(ctx, host->address.port);
}
/* host_address(host) → string */
static JSValue js_enet_host_address(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
if (argc < 1) return JS_RaiseDisrupt(ctx, "host_address: expected (host)");
ENetHost *host = JS_GetOpaque(argv[0], enet_host_id);
if (!host) return JS_RaiseDisrupt(ctx, "host_address: invalid host");
char ip_str[128];
if (enet_address_get_host_ip(&host->address, ip_str, sizeof(ip_str)) != 0)
return JS_NULL;
return JS_NewString(ctx, ip_str);
}
/* ── Peer functions ─────────────────────────────────────────── */
/* send(peer, data) */
static JSValue js_enet_send(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
if (argc < 2) return JS_RaiseDisrupt(ctx, "send: expected (peer, data)");
ENetPeer *peer = JS_GetOpaque(argv[0], enet_peer_class_id);
if (!peer) return JS_RaiseDisrupt(ctx, "send: invalid peer");
const char *data_str = NULL;
size_t data_len = 0;
uint8_t *buf = NULL;
if (JS_IsText(argv[1])) {
data_str = JS_ToCStringLen(ctx, &data_len, argv[1]);
if (!data_str) return JS_EXCEPTION;
} else if (js_is_blob(ctx, argv[1])) {
buf = js_get_blob_data(ctx, &data_len, argv[1]);
if (!buf) return JS_EXCEPTION;
} else {
return JS_RaiseDisrupt(ctx, "send: data must be string or blob");
}
ENetPacket *packet = enet_packet_create(data_str ? (const void *)data_str : (const void *)buf, data_len, ENET_PACKET_FLAG_RELIABLE);
if (data_str) JS_FreeCString(ctx, data_str);
if (!packet) return JS_RaiseDisrupt(ctx, "Failed to create ENet packet");
if (enet_peer_send(peer, 0, packet) < 0) return JS_RaiseDisrupt(ctx, "Unable to send packet");
return JS_NULL;
}
/* disconnect(peer) */
static JSValue js_enet_disconnect(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
if (argc < 1) return JS_RaiseDisrupt(ctx, "disconnect: expected (peer)");
ENetPeer *peer = JS_GetOpaque(argv[0], enet_peer_class_id);
if (!peer) return JS_RaiseDisrupt(ctx, "disconnect: invalid peer");
enet_peer_disconnect(peer, 0);
return JS_NULL;
}
/* disconnect_now(peer) */
static JSValue js_enet_disconnect_now(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
if (argc < 1) return JS_RaiseDisrupt(ctx, "disconnect_now: expected (peer)");
ENetPeer *peer = JS_GetOpaque(argv[0], enet_peer_class_id);
if (!peer) return JS_RaiseDisrupt(ctx, "disconnect_now: invalid peer");
enet_peer_disconnect_now(peer, 0);
return JS_NULL;
}
/* disconnect_later(peer) */
static JSValue js_enet_disconnect_later(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
if (argc < 1) return JS_RaiseDisrupt(ctx, "disconnect_later: expected (peer)");
ENetPeer *peer = JS_GetOpaque(argv[0], enet_peer_class_id);
if (!peer) return JS_RaiseDisrupt(ctx, "disconnect_later: invalid peer");
enet_peer_disconnect_later(peer, 0);
return JS_NULL;
}
/* reset(peer) */
static JSValue js_enet_reset(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
if (argc < 1) return JS_RaiseDisrupt(ctx, "reset: expected (peer)");
ENetPeer *peer = JS_GetOpaque(argv[0], enet_peer_class_id);
if (!peer) return JS_RaiseDisrupt(ctx, "reset: invalid peer");
enet_peer_reset(peer);
return JS_NULL;
}
/* ping(peer) */
static JSValue js_enet_ping(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
if (argc < 1) return JS_RaiseDisrupt(ctx, "ping: expected (peer)");
ENetPeer *peer = JS_GetOpaque(argv[0], enet_peer_class_id);
if (!peer) return JS_RaiseDisrupt(ctx, "ping: invalid peer");
enet_peer_ping(peer);
return JS_NULL;
}
/* throttle_configure(peer, interval, acceleration, deceleration) */
static JSValue js_enet_throttle_configure(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
if (argc < 4) return JS_RaiseDisrupt(ctx, "throttle_configure: expected (peer, interval, accel, decel)");
ENetPeer *peer = JS_GetOpaque(argv[0], enet_peer_class_id);
if (!peer) return JS_RaiseDisrupt(ctx, "throttle_configure: invalid peer");
int interval, acceleration, deceleration;
if (JS_ToInt32(ctx, &interval, argv[1]) || JS_ToInt32(ctx, &acceleration, argv[2]) || JS_ToInt32(ctx, &deceleration, argv[3]))
return JS_RaiseDisrupt(ctx, "throttle_configure: expected integer arguments");
enet_peer_throttle_configure(peer, interval, acceleration, deceleration);
return JS_NULL;
}
/* peer_timeout(peer, limit, min, max) */
static JSValue js_enet_peer_timeout(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
if (argc < 4) return JS_RaiseDisrupt(ctx, "peer_timeout: expected (peer, limit, min, max)");
ENetPeer *peer = JS_GetOpaque(argv[0], enet_peer_class_id);
if (!peer) return JS_RaiseDisrupt(ctx, "peer_timeout: invalid peer");
int timeout_limit, timeout_min, timeout_max;
if (JS_ToInt32(ctx, &timeout_limit, argv[1]) || JS_ToInt32(ctx, &timeout_min, argv[2]) || JS_ToInt32(ctx, &timeout_max, argv[3]))
return JS_RaiseDisrupt(ctx, "peer_timeout: expected integer arguments");
enet_peer_timeout(peer, timeout_limit, timeout_min, timeout_max);
return JS_NULL;
}
/* ── Peer property getters ──────────────────────────────────── */
#define PEER_GETTER(name, field, convert) \
static JSValue js_enet_##name(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv) { \
if (argc < 1) return JS_RaiseDisrupt(ctx, #name ": expected (peer)"); \
ENetPeer *peer = JS_GetOpaque(argv[0], enet_peer_class_id); \
if (!peer) return JS_RaiseDisrupt(ctx, #name ": invalid peer"); \
return convert(ctx, peer->field); \
}
static inline JSValue _int32(JSContext *ctx, int v) { return JS_NewInt32(ctx, v); }
static inline JSValue _uint32(JSContext *ctx, unsigned int v) { return JS_NewUint32(ctx, v); }
PEER_GETTER(peer_rtt, roundTripTime, _int32)
PEER_GETTER(peer_rtt_variance, roundTripTimeVariance, _int32)
PEER_GETTER(peer_last_send_time, lastSendTime, _int32)
PEER_GETTER(peer_last_receive_time, lastReceiveTime, _int32)
PEER_GETTER(peer_mtu, mtu, _int32)
PEER_GETTER(peer_outgoing_data_total, outgoingDataTotal, _int32)
PEER_GETTER(peer_incoming_data_total, incomingDataTotal, _int32)
PEER_GETTER(peer_packet_loss, packetLoss, _int32)
PEER_GETTER(peer_state, state, _int32)
PEER_GETTER(peer_reliable_data_in_transit, reliableDataInTransit, _int32)
static JSValue js_enet_peer_incoming_bandwidth(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
if (argc < 1) return JS_RaiseDisrupt(ctx, "peer_incoming_bandwidth: expected (peer)");
ENetPeer *peer = JS_GetOpaque(argv[0], enet_peer_class_id);
if (!peer) return JS_RaiseDisrupt(ctx, "peer_incoming_bandwidth: invalid peer");
if (peer->incomingBandwidth == 0) return JS_NewFloat64(ctx, INFINITY);
return JS_NewInt32(ctx, peer->incomingBandwidth);
}
static JSValue js_enet_peer_outgoing_bandwidth(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
if (argc < 1) return JS_RaiseDisrupt(ctx, "peer_outgoing_bandwidth: expected (peer)");
ENetPeer *peer = JS_GetOpaque(argv[0], enet_peer_class_id);
if (!peer) return JS_RaiseDisrupt(ctx, "peer_outgoing_bandwidth: invalid peer");
if (peer->outgoingBandwidth == 0) return JS_NewFloat64(ctx, INFINITY);
return JS_NewInt32(ctx, peer->outgoingBandwidth);
}
static JSValue js_enet_peer_port(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
if (argc < 1) return JS_RaiseDisrupt(ctx, "peer_port: expected (peer)");
ENetPeer *peer = JS_GetOpaque(argv[0], enet_peer_class_id);
if (!peer) return JS_RaiseDisrupt(ctx, "peer_port: invalid peer");
return JS_NewUint32(ctx, peer->address.port);
}
static JSValue js_enet_peer_address(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
if (argc < 1) return JS_RaiseDisrupt(ctx, "peer_address: expected (peer)");
ENetPeer *peer = JS_GetOpaque(argv[0], enet_peer_class_id);
if (!peer) return JS_RaiseDisrupt(ctx, "peer_address: invalid peer");
char ip_str[128];
if (enet_address_get_host_ip(&peer->address, ip_str, sizeof(ip_str)) != 0)
return JS_NULL;
return JS_NewString(ctx, ip_str);
}
static JSValue js_enet_resolve_hostname(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
const char *hostname = JS_ToCString(ctx, argv[0]);
JS_FreeCString(ctx, hostname);
return JS_NULL;
}
/* ── Module export ──────────────────────────────────────────── */
static const JSCFunctionListEntry js_enet_funcs[] = {
/* host */
JS_CFUNC_DEF("create_host", 1, js_enet_create_host),
JS_CFUNC_DEF("service", 2, js_enet_service),
JS_CFUNC_DEF("connect", 3, js_enet_connect),
JS_CFUNC_DEF("flush", 1, js_enet_flush),
JS_CFUNC_DEF("broadcast", 2, js_enet_broadcast),
JS_CFUNC_DEF("host_port", 1, js_enet_host_port),
JS_CFUNC_DEF("host_address", 1, js_enet_host_address),
/* peer */
JS_CFUNC_DEF("send", 2, js_enet_send),
JS_CFUNC_DEF("disconnect", 1, js_enet_disconnect),
JS_CFUNC_DEF("disconnect_now", 1, js_enet_disconnect_now),
JS_CFUNC_DEF("disconnect_later", 1, js_enet_disconnect_later),
JS_CFUNC_DEF("reset", 1, js_enet_reset),
JS_CFUNC_DEF("ping", 1, js_enet_ping),
JS_CFUNC_DEF("throttle_configure", 4, js_enet_throttle_configure),
JS_CFUNC_DEF("peer_timeout", 4, js_enet_peer_timeout),
JS_CFUNC_DEF("peer_address", 1, js_enet_peer_address),
JS_CFUNC_DEF("peer_port", 1, js_enet_peer_port),
JS_CFUNC_DEF("peer_rtt", 1, js_enet_peer_rtt),
JS_CFUNC_DEF("peer_rtt_variance", 1, js_enet_peer_rtt_variance),
JS_CFUNC_DEF("peer_incoming_bandwidth", 1, js_enet_peer_incoming_bandwidth),
JS_CFUNC_DEF("peer_outgoing_bandwidth", 1, js_enet_peer_outgoing_bandwidth),
JS_CFUNC_DEF("peer_last_send_time", 1, js_enet_peer_last_send_time),
JS_CFUNC_DEF("peer_last_receive_time", 1, js_enet_peer_last_receive_time),
JS_CFUNC_DEF("peer_mtu", 1, js_enet_peer_mtu),
JS_CFUNC_DEF("peer_outgoing_data_total", 1, js_enet_peer_outgoing_data_total),
JS_CFUNC_DEF("peer_incoming_data_total", 1, js_enet_peer_incoming_data_total),
JS_CFUNC_DEF("peer_packet_loss", 1, js_enet_peer_packet_loss),
JS_CFUNC_DEF("peer_state", 1, js_enet_peer_state),
JS_CFUNC_DEF("peer_reliable_data_in_transit", 1, js_enet_peer_reliable_data_in_transit),
JS_CFUNC_DEF("resolve_hostname", 1, js_enet_resolve_hostname),
};
JSValue js_core_internal_enet_use(JSContext *ctx)
{
enet_initialize();
JS_FRAME(ctx);
JS_NewClassID(&enet_host_id);
JS_NewClass(ctx, enet_host_id, &enet_host_def);
JS_NewClassID(&enet_peer_class_id);
JS_NewClass(ctx, enet_peer_class_id, &enet_peer_def);
JS_ROOT(export_obj, JS_NewObject(ctx));
JS_SetPropertyFunctionList(ctx, export_obj.val, js_enet_funcs, countof(js_enet_funcs));
JS_RETURN(export_obj.val);
}

File diff suppressed because it is too large Load Diff

View File

@@ -117,10 +117,10 @@ JSC_CCALL(fd_read,
JSC_SCALL(fd_slurp,
struct stat st;
if (stat(str, &st) != 0)
return JS_RaiseDisrupt(js, "stat failed: %s", strerror(errno));
return JS_RaiseDisrupt(js, "stat failed for %s: %s", str, strerror(errno));
if (!S_ISREG(st.st_mode))
return JS_RaiseDisrupt(js, "path is not a regular file");
return JS_RaiseDisrupt(js, "path %s is not a regular file", str);
size_t size = st.st_size;
if (size == 0)
@@ -223,12 +223,10 @@ JSC_SCALL(fd_rmdir,
if (ffd.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY) {
JSValue args[2] = { JS_NewString(js, full_path), JS_TRUE };
JSValue result = js_fd_rmdir(js, JS_NULL, 2, args);
JS_FreeValue(js, args[0]);
if (JS_IsException(result)) {
FindClose(hFind);
return result;
}
JS_FreeValue(js, result);
} else {
if (unlink(full_path) != 0) {
FindClose(hFind);
@@ -252,12 +250,10 @@ JSC_SCALL(fd_rmdir,
if (lstat(full_path, &st) == 0 && S_ISDIR(st.st_mode)) {
JSValue args[2] = { JS_NewString(js, full_path), JS_TRUE };
JSValue result = js_fd_rmdir(js, JS_NULL, 2, args);
JS_FreeValue(js, args[0]);
if (JS_IsException(result)) {
closedir(dir);
return result;
}
JS_FreeValue(js, result);
} else {
if (unlink(full_path) != 0) {
closedir(dir);
@@ -640,7 +636,8 @@ static void visit_directory(JSContext *js, JSValue *results, int *result_count,
} else {
strcpy(item_rel, ffd.cFileName);
}
JS_SetPropertyNumber(js, *results, (*result_count)++, JS_NewString(js, item_rel));
JSValue name_str = JS_NewString(js, item_rel);
JS_SetPropertyNumber(js, *results, (*result_count)++, name_str);
if (recurse) {
struct stat st;
@@ -665,7 +662,8 @@ static void visit_directory(JSContext *js, JSValue *results, int *result_count,
} else {
strcpy(item_rel, dir->d_name);
}
JS_SetPropertyNumber(js, *results, (*result_count)++, JS_NewString(js, item_rel));
JSValue name_str = JS_NewString(js, item_rel);
JS_SetPropertyNumber(js, *results, (*result_count)++, name_str);
if (recurse) {
struct stat st;
@@ -765,6 +763,22 @@ JSC_CCALL(fd_readlink,
#endif
)
JSC_CCALL(fd_on_readable,
int fd = js2fd(js, argv[0]);
if (fd < 0) return JS_EXCEPTION;
if (!JS_IsFunction(argv[1]))
return JS_RaiseDisrupt(js, "on_readable: callback must be a function");
actor_watch_readable(js, fd, argv[1]);
return JS_NULL;
)
JSC_CCALL(fd_unwatch,
int fd = js2fd(js, argv[0]);
if (fd < 0) return JS_EXCEPTION;
actor_unwatch(js, fd);
return JS_NULL;
)
static const JSCFunctionListEntry js_fd_funcs[] = {
MIST_FUNC_DEF(fd, open, 2),
MIST_FUNC_DEF(fd, write, 2),
@@ -791,6 +805,8 @@ static const JSCFunctionListEntry js_fd_funcs[] = {
MIST_FUNC_DEF(fd, symlink, 2),
MIST_FUNC_DEF(fd, realpath, 1),
MIST_FUNC_DEF(fd, readlink, 1),
MIST_FUNC_DEF(fd, on_readable, 2),
MIST_FUNC_DEF(fd, unwatch, 1),
};
JSValue js_core_internal_fd_use(JSContext *js) {

View File

@@ -326,7 +326,6 @@ JSC_SCALL(fd_readdir,
if (pd_file->listfiles(str, listfiles_cb, &ctx, 0) != 0) {
const char* err = pd_file->geterr();
JS_FreeValue(js, ret_arr);
return JS_RaiseDisrupt(js, "listfiles failed: %s", err ? err : "unknown error");
}

View File

@@ -1,5 +1,5 @@
#define NOTA_IMPLEMENTATION
#include "quickjs-internal.h"
#include "pit_internal.h"
#include "cell.h"
static int nota_get_arr_len (JSContext *ctx, JSValue arr) {
@@ -24,7 +24,7 @@ typedef struct NotaEncodeContext {
static void nota_stack_push (NotaEncodeContext *enc, JSValueConst val) {
NotaVisitedNode *node = (NotaVisitedNode *)sys_malloc (sizeof (NotaVisitedNode));
JS_PushGCRef (enc->ctx, &node->ref);
node->ref.val = JS_DupValue (enc->ctx, val);
node->ref.val = val;
node->next = enc->visited_list;
enc->visited_list = node;
}
@@ -32,7 +32,6 @@ static void nota_stack_push (NotaEncodeContext *enc, JSValueConst val) {
static void nota_stack_pop (NotaEncodeContext *enc) {
NotaVisitedNode *node = enc->visited_list;
enc->visited_list = node->next;
JS_FreeValue (enc->ctx, node->ref.val);
JS_PopGCRef (enc->ctx, &node->ref);
sys_free (node);
}
@@ -48,14 +47,12 @@ static int nota_stack_has (NotaEncodeContext *enc, JSValueConst val) {
}
static JSValue nota_apply_replacer (NotaEncodeContext *enc, JSValueConst holder, JSValueConst key, JSValueConst val) {
if (!enc->replacer_ref || JS_IsNull (enc->replacer_ref->val)) return JS_DupValue (enc->ctx, val);
if (!enc->replacer_ref || JS_IsNull (enc->replacer_ref->val)) return val;
JSValue args[2] = { JS_DupValue (enc->ctx, key), JS_DupValue (enc->ctx, val) };
JSValue args[2] = { key, val };
JSValue result = JS_Call (enc->ctx, enc->replacer_ref->val, holder, 2, args);
JS_FreeValue (enc->ctx, args[0]);
JS_FreeValue (enc->ctx, args[1]);
if (JS_IsException (result)) return JS_DupValue (enc->ctx, val);
if (JS_IsException (result)) return val;
return result;
}
@@ -140,15 +137,11 @@ static char *js_do_nota_decode (JSContext *js, JSValue *tmp, char *nota, JSValue
}
if (!JS_IsNull (reviver)) {
JSValue args[2] = { JS_DupValue (js, key), JS_DupValue (js, *tmp) };
JSValue args[2] = { key, *tmp };
JSValue revived = JS_Call (js, reviver, holder, 2, args);
JS_FreeValue (js, args[0]);
JS_FreeValue (js, args[1]);
if (!JS_IsException (revived)) {
JS_FreeValue (js, *tmp);
*tmp = revived;
} else {
JS_FreeValue (js, revived);
}
}
@@ -229,10 +222,8 @@ static void nota_encode_value (NotaEncodeContext *enc, JSValueConst val, JSValue
if (!JS_IsNull (adata)) {
nota_write_sym (&enc->nb, NOTA_PRIVATE);
nota_encode_value (enc, adata, replaced_ref.val, JS_NULL);
JS_FreeValue (ctx, adata);
break;
}
JS_FreeValue (ctx, adata);
if (nota_stack_has (enc, replaced_ref.val)) {
enc->cycle = 1;
break;

View File

@@ -1,5 +1,5 @@
#include "cell.h"
#include "cell_internal.h"
#include "pit_internal.h"
#include <sys/stat.h>
#include <sys/types.h>
@@ -308,18 +308,16 @@ JSC_SCALL(os_system,
setting pause_flag = 2. Bump turn_gen so stale timer events are
ignored, and clear the pause flag so the VM doesn't raise
"interrupted" on the next backward branch. */
cell_rt *crt = JS_GetContextOpaque(js);
if (crt) {
atomic_fetch_add_explicit(&crt->turn_gen, 1, memory_order_relaxed);
atomic_fetch_add_explicit(&js->turn_gen, 1, memory_order_relaxed);
JS_SetPauseFlag(js, 0);
crt->turn_start_ns = cell_ns();
}
js->turn_start_ns = cell_ns();
ret = number2js(js, err);
)
JSC_CCALL(os_exit,
int code = 0;
if (argc > 0) JS_ToInt32(js, &code, argv[0]);
if (argc > 0 && !JS_IsNull(argv[0]))
JS_ToInt32(js, &code, argv[0]);
exit(code);
)
@@ -705,6 +703,27 @@ static JSValue js_os_stack(JSContext *js, JSValue self, int argc, JSValue *argv)
JS_RETURN(arr.val);
}
static JSValue js_os_unstone(JSContext *js, JSValue self, int argc, JSValue *argv) {
if (argc < 1) return JS_NULL;
JSValue obj = argv[0];
if (mist_is_blob(obj)) {
JSBlob *bd = (JSBlob *)chase(obj);
bd->mist_hdr = objhdr_set_s(bd->mist_hdr, false);
return obj;
}
if (JS_IsArray(obj)) {
JSArray *arr = JS_VALUE_GET_ARRAY(obj);
arr->mist_hdr = objhdr_set_s(arr->mist_hdr, false);
return obj;
}
if (mist_is_gc_object(obj)) {
JSRecord *rec = JS_VALUE_GET_RECORD(obj);
rec->mist_hdr = objhdr_set_s(rec->mist_hdr, false);
return obj;
}
return JS_NULL;
}
static const JSCFunctionListEntry js_os_funcs[] = {
MIST_FUNC_DEF(os, platform, 0),
MIST_FUNC_DEF(os, arch, 0),
@@ -733,6 +752,7 @@ static const JSCFunctionListEntry js_os_funcs[] = {
MIST_FUNC_DEF(os, getenv, 1),
MIST_FUNC_DEF(os, qbe, 1),
MIST_FUNC_DEF(os, stack, 1),
MIST_FUNC_DEF(os, unstone, 1),
};
JSValue js_core_internal_os_use(JSContext *js) {

View File

@@ -13,8 +13,8 @@ var os = use('internal/os')
var link = use('link')
// These come from env (via core_extras in engine.cm):
// analyze, run_ast_fn, core_json, use_cache, shop_path, actor_api, runtime_env,
// content_hash, cache_path, ensure_build_dir
// analyze, run_ast_fn, core_json, use_cache, core_path, shop_path, actor_api,
// runtime_env, content_hash, cache_path, ensure_build_dir
var shop_json = core_json
var global_shop_path = shop_path
var my$_ = actor_api
@@ -29,21 +29,18 @@ function safe_c_name(name) {
function pull_from_cache(content)
{
var path = hash_path(content)
if (fd.is_file(path))
var path = cache_path(content)
if (fd.is_file(path)) {
log.system('shop: cache hit')
return fd.slurp(path)
}
}
function put_into_cache(content, obj)
{
var path = hash_path(content)
var path = cache_path(content)
fd.slurpwrite(path, obj)
}
function hash_path(content, salt)
{
var s = salt || 'mach'
return global_shop_path + '/build/' + content_hash(stone(blob(text(content) + '\n' + s)))
log.system('shop: cached')
}
var Shop = {}
@@ -139,6 +136,42 @@ function package_in_shop(package) {
return package in lock
}
// Derive canonical lock name from a directory's git origin remote.
// Reads .git/config, extracts the origin url, strips https:// and .git suffix,
// then checks if that name exists in the lock file.
function git_origin_to_lock_name(dir) {
var git_cfg = dir + '/.git/config'
var raw = null
var lines = null
var in_origin = false
var url = null
var candidate = null
if (!fd.is_file(git_cfg)) return null
raw = text(fd.slurp(git_cfg))
if (!raw) return null
lines = array(raw, '\n')
arrfor(lines, function(line) {
var trimmed = trim(line)
if (trimmed == '[remote "origin"]') {
in_origin = true
} else if (starts_with(trimmed, '[')) {
in_origin = false
} else if (in_origin && starts_with(trimmed, 'url = ')) {
url = trim(text(trimmed, 6))
}
})
if (!url) return null
candidate = url
if (starts_with(candidate, 'https://'))
candidate = text(candidate, 8)
else if (starts_with(candidate, 'http://'))
candidate = text(candidate, 7)
if (ends_with(candidate, '.git'))
candidate = text(candidate, 0, length(candidate) - 4)
if (package_in_shop(candidate)) return candidate
return null
}
function abs_path_to_package(package_dir)
{
if (!fd.is_file(package_dir + '/cell.toml')) {
@@ -182,20 +215,21 @@ function abs_path_to_package(package_dir)
if (package_in_shop(package_dir))
return package_dir
// For local directories (e.g., linked targets), read the package name from cell.toml
var _toml_path = package_dir + '/cell.toml'
var content = null
var cfg = null
if (fd.is_file(_toml_path)) {
content = text(fd.slurp(_toml_path))
cfg = toml.decode(content)
if (cfg.package)
return cfg.package
}
// For local directories, try git remote origin to derive canonical name
var _git_name = git_origin_to_lock_name(package_dir)
if (_git_name) return _git_name
return package_dir
}
function safe_canonicalize(pkg) {
if (!pkg || !starts_with(pkg, '/')) return pkg
var _canon = null
var _try = function() { _canon = abs_path_to_package(pkg) } disruption {}
_try()
return (_canon && _canon != pkg) ? _canon : pkg
}
// given a file, find the absolute path, package name, and import name
Shop.file_info = function(file) {
var info = {
@@ -212,16 +246,19 @@ Shop.file_info = function(file) {
info.is_actor = true
// Find package directory and determine package name
// find_package_dir resolves symlinks internally, so we must use the
// resolved file path for substring math to get the correct name.
var pkg_dir = pkg_tools.find_package_dir(file)
var resolved_file = fd.realpath(file) || file
if (pkg_dir) {
info.package = abs_path_to_package(pkg_dir)
if (info.is_actor)
info.name = text(file, length(pkg_dir) + 1, length(file) - length(ACTOR_EXT))
info.name = text(resolved_file, length(pkg_dir) + 1, length(resolved_file) - length(ACTOR_EXT))
else if (info.is_module)
info.name = text(file, length(pkg_dir) + 1, length(file) - length(MOD_EXT))
info.name = text(resolved_file, length(pkg_dir) + 1, length(resolved_file) - length(MOD_EXT))
else
info.name = text(file, length(pkg_dir) + 1)
info.name = text(resolved_file, length(pkg_dir) + 1)
}
return info
@@ -418,6 +455,16 @@ Shop.extract_commit_hash = function(pkg, response) {
var open_dls = {}
var package_dylibs = {} // pkg -> [{file, symbol, dylib}, ...]
var reload_hashes = {} // cache_key -> content hash for reload change detection
function open_dylib_cached(path) {
var handle = open_dls[path]
if (handle) return handle
handle = os.dylib_open(path)
if (!handle) return null
open_dls[path] = handle
return handle
}
// Host target detection for native dylib resolution
function detect_host_target() {
@@ -455,9 +502,14 @@ function try_native_mod_dylib(pkg, stem) {
if (!fd.is_file(build_path)) return null
log.shop('native dylib cache hit: ' + stem)
var handle = os.dylib_open(build_path)
var handle = open_dylib_cached(build_path)
if (!handle) return null
var sym = Shop.c_symbol_for_file(pkg, stem)
// Verify the symbol actually exists in the dylib before returning native descriptor
if (sym && !os.dylib_has_symbol(handle, sym) && !os.dylib_has_symbol(handle, 'cell_main')) {
log.shop('native dylib for ' + stem + ' (dylib=' + build_path + ') missing symbol ' + sym + ' and cell_main, falling back to bytecode')
return null
}
return {_native: true, _handle: handle, _sym: sym}
}
@@ -698,6 +750,8 @@ function resolve_mod_fn(path, pkg) {
var dylib_path = null
var handle = null
var sym = null
var _fi = null
var _fi_pkg = null
policy = get_policy()
@@ -709,8 +763,8 @@ function resolve_mod_fn(path, pkg) {
}
}
// Check for native .cm dylib at deterministic path first
if (policy.allow_dylib && pkg && _stem) {
// Check for native .cm dylib at deterministic path first (only in native mode)
if (policy.native && policy.allow_dylib && pkg && _stem) {
native_result = try_native_mod_dylib(pkg, _stem)
if (native_result != null) return native_result
}
@@ -723,7 +777,10 @@ function resolve_mod_fn(path, pkg) {
if (dylib_path) {
handle = os.dylib_open(dylib_path)
if (handle) {
sym = pkg && _stem ? Shop.c_symbol_for_file(pkg, _stem) : null
// Derive symbol from file_info (authoritative package), not caller's pkg
_fi = Shop.file_info(path)
_fi_pkg = _fi.package || pkg
sym = _fi_pkg ? Shop.c_symbol_for_file(_fi_pkg, (_fi.name ? _fi.name + (_fi.is_actor ? '.ce' : '.cm') : fd.basename(path))) : null
return {_native: true, _handle: handle, _sym: sym}
}
}
@@ -740,11 +797,11 @@ function resolve_mod_fn(path, pkg) {
// Check for cached mcode in content-addressed store
if (policy.allow_compile) {
cached_mcode_path = hash_path(content_key, 'mcode')
cached_mcode_path = cache_path(content_key, 'mcode')
if (fd.is_file(cached_mcode_path)) {
mcode_json = text(fd.slurp(cached_mcode_path))
compiled = mach_compile_mcode_bin(path, mcode_json)
put_into_cache(content_key, compiled)
if (!policy.native) put_into_cache(content_key, compiled)
return compiled
}
}
@@ -765,12 +822,14 @@ function resolve_mod_fn(path, pkg) {
mcode_json = shop_json.encode(optimized)
// Cache mcode (architecture-independent) in content-addressed store
if (!policy.native) {
fd.ensure_dir(global_shop_path + '/build')
fd.slurpwrite(cached_mcode_path, stone(blob(mcode_json)))
}
// Cache mach blob
compiled = mach_compile_mcode_bin(path, mcode_json)
put_into_cache(content_key, compiled)
if (!policy.native) put_into_cache(content_key, compiled)
return compiled
}
@@ -779,6 +838,50 @@ function resolve_mod_fn(path, pkg) {
disrupt
}
// Resolve a module's bytecode only (skip native dylib check).
// Used as fallback when a cached native dylib fails to load.
function resolve_mod_fn_bytecode(path, pkg) {
if (!fd.is_file(path)) return null
var content = text(fd.slurp(path))
if (length(content) == 0) return null
var content_key = stone(blob(content))
var cached = null
var cached_mcode_path = null
var mcode_json = null
var compiled = null
// Check cache for pre-compiled .mach blob
cached = pull_from_cache(content_key)
if (cached) return cached
// Check for cached mcode
cached_mcode_path = cache_path(content_key, 'mcode')
if (fd.is_file(cached_mcode_path)) {
mcode_json = text(fd.slurp(cached_mcode_path))
compiled = mach_compile_mcode_bin(path, mcode_json)
put_into_cache(content_key, compiled)
return compiled
}
// Compile from source
if (!_mcode_mod) _mcode_mod = use_cache['core/mcode'] || use_cache['mcode']
if (!_streamline_mod) _streamline_mod = use_cache['core/streamline'] || use_cache['streamline']
if (!_mcode_mod || !_streamline_mod) return null
var ast = analyze(content, path)
var ir = _mcode_mod(ast)
var optimized = _streamline_mod(ir)
mcode_json = shop_json.encode(optimized)
fd.ensure_dir(global_shop_path + '/build')
fd.slurpwrite(cache_path(content_key, 'mcode'), stone(blob(mcode_json)))
compiled = mach_compile_mcode_bin(path, mcode_json)
put_into_cache(content_key, compiled)
return compiled
}
// given a path and a package context
// return module info about where it was found
// Resolve a module path to {path, scope, pkg} without compiling.
@@ -828,7 +931,7 @@ function resolve_path(path, ctx)
if (fd.is_file(ctx_path)) {
is_core = (ctx == 'core') || is_core_dir(ctx_dir)
scope = is_core ? SCOPE_CORE : SCOPE_LOCAL
return {path: ctx_path, scope: scope, pkg: is_core ? 'core' : ctx}
return {path: ctx_path, scope: scope, pkg: is_core ? 'core' : safe_canonicalize(ctx)}
}
if (is_internal_path(path))
@@ -838,7 +941,7 @@ function resolve_path(path, ctx)
if (alias) {
alias_path = get_packages_dir() + '/' + fd.safe_package_path(alias.package) + '/' + alias.path
if (fd.is_file(alias_path))
return {path: alias_path, scope: SCOPE_PACKAGE, pkg: ctx}
return {path: alias_path, scope: SCOPE_PACKAGE, pkg: alias.package}
}
package_path = get_packages_dir() + '/' + fd.safe_package_path(path)
@@ -907,9 +1010,10 @@ function read_dylib_manifest(pkg) {
// Ensure all C modules for a package are built and loaded.
// Returns the array of {file, symbol, dylib} results, cached per package.
function ensure_package_dylibs(pkg) {
if (package_dylibs[pkg] != null) return package_dylibs[pkg]
if (pkg == 'core') {
package_dylibs[pkg] = []
var _pkg = safe_canonicalize(pkg)
if (package_dylibs[_pkg] != null) return package_dylibs[_pkg]
if (_pkg == 'core') {
package_dylibs[_pkg] = []
return []
}
@@ -917,28 +1021,50 @@ function ensure_package_dylibs(pkg) {
var build_mod = use_cache['core/build']
var target = null
var c_files = null
var _all_ok = true
var _ri = 0
if (build_mod) {
// Fast path: if manifest exists and all dylibs are present, skip build_dynamic
results = read_dylib_manifest(_pkg)
if (results != null) {
_all_ok = true
_ri = 0
while (_ri < length(results)) {
if (results[_ri].dylib && !fd.is_file(results[_ri].dylib)) {
_all_ok = false
break
}
_ri = _ri + 1
}
if (_all_ok) {
log.shop('manifest ok for ' + _pkg + ' (' + text(length(results)) + ' modules)')
} else {
results = null
}
}
if (results == null) {
target = detect_host_target()
if (!target) return null
c_files = pkg_tools.get_c_files(pkg, target, true)
c_files = pkg_tools.get_c_files(_pkg, target, true)
if (!c_files || length(c_files) == 0) {
package_dylibs[pkg] = []
package_dylibs[_pkg] = []
return []
}
log.shop('ensuring C modules for ' + pkg)
results = build_mod.build_dynamic(pkg, target, 'release', {})
log.shop('ensuring C modules for ' + _pkg)
results = build_mod.build_dynamic(_pkg, target, 'release', {})
}
} else {
// No build module at runtime — read manifest from cell build
results = read_dylib_manifest(pkg)
results = read_dylib_manifest(_pkg)
if (!results) return null
log.shop('loaded manifest for ' + pkg + ' (' + text(length(results)) + ' modules)')
log.shop('loaded manifest for ' + _pkg + ' (' + text(length(results)) + ' modules)')
}
if (results == null) results = []
package_dylibs[pkg] = results
package_dylibs[_pkg] = results
// Preload all sibling dylibs with RTLD_LAZY|RTLD_GLOBAL
arrfor(results, function(r) {
@@ -982,7 +1108,7 @@ function try_dylib_symbol(sym, pkg, file_stem) {
// Resolve a C symbol by searching:
// At each scope: check build-cache dylib first, then internal (static)
function resolve_c_symbol(path, _pkg_ctx) {
var package_context = is_core_dir(_pkg_ctx) ? 'core' : _pkg_ctx
var package_context = is_core_dir(_pkg_ctx) ? 'core' : safe_canonicalize(_pkg_ctx)
var explicit = split_explicit_package_import(path)
var sym = null
var loader = null
@@ -1212,8 +1338,17 @@ Shop.is_loaded = function is_loaded(path, package_context) {
}
// Create a use function bound to a specific package context
function make_use_fn(pkg) {
function make_use_fn(pkg, force_native) {
return function(path) {
var _native = null
if (force_native && !native_mode) {
_native = function() {
return Shop.use_native(path, pkg)
} disruption {
return Shop.use(path, pkg)
}
return _native()
}
return Shop.use(path, pkg)
}
}
@@ -1236,6 +1371,8 @@ function execute_module(info)
var inject = null
var env = null
var pkg = null
var _native_load = null
var _bc = null
if (mod_resolve.scope < 900) {
// Check if native dylib was resolved (descriptor with _handle and _sym)
@@ -1244,10 +1381,29 @@ function execute_module(info)
inject = Shop.script_inject_for(file_info)
env = inject_env(inject)
pkg = file_info.package
env.use = make_use_fn(pkg)
env.use = make_use_fn(pkg, true)
env = stone(env)
_native_load = function() {
used = os.native_module_load_named(
mod_resolve.symbol._handle, mod_resolve.symbol._sym, env)
log.shop('loaded ' + info.cache_key + ' [native]')
} disruption {
// Native load failed — fall back to bytecode
log.shop('native load failed for ' + info.cache_key + ' (sym=' + text(mod_resolve.symbol._sym || '') + '), falling back to bytecode')
_bc = resolve_mod_fn_bytecode(mod_resolve.path, file_info.package)
if (_bc) {
// Build a fresh env for bytecode (env is stoned, can't modify)
env = inject_env(inject)
env.use = make_use_fn(pkg)
env = stone(env)
used = mach_load(_bc, env)
log.shop('loaded ' + info.cache_key + ' [bytecode fallback]')
} else {
log.error('native load failed and bytecode fallback also failed for ' + info.cache_key)
disrupt
}
}
_native_load()
} else {
// Build env with runtime fns, capabilities, and use function
file_info = Shop.file_info(mod_resolve.path)
@@ -1259,6 +1415,7 @@ function execute_module(info)
// Load compiled bytecode with env
used = mach_load(mod_resolve.symbol, env)
log.shop('loaded ' + info.cache_key + ' [bytecode]')
}
} else if (c_resolve.scope < 900) {
// C only
@@ -1267,7 +1424,7 @@ function execute_module(info)
log.shop(`Module could not be found (c_resolve scope=${info.c_resolve.scope}, mod_resolve scope=${info.mod_resolve.scope}, cache_key=${info.cache_key})`); disrupt
}
if (!used) { log.error(`Module ${info} returned null`); disrupt }
if (!used) { log.error('Module ' + text(info.cache_key || info) + ' returned null'); disrupt }
return used
}
@@ -1298,7 +1455,7 @@ Shop.use = function use(path, _pkg_ctx) {
log.error("use() expects a text module path, but received a non-text value")
disrupt
}
var package_context = is_core_dir(_pkg_ctx) ? 'core' : _pkg_ctx
var package_context = is_core_dir(_pkg_ctx) ? 'core' : safe_canonicalize(_pkg_ctx)
// Check for embedded module (static builds)
var embed_key = 'embedded:' + path
var embedded = null
@@ -1342,18 +1499,18 @@ Shop.use = function use(path, _pkg_ctx) {
if (use_cache[info.cache_key])
return use_cache[info.cache_key]
push(use_stack, _use_entry)
use_stack[] = _use_entry
var _use_result = null
var _use_ok = false
var _load = function() {
_use_result = execute_module(info)
_use_ok = true
} disruption {
pop(use_stack)
use_stack[]
disrupt
}
_load()
pop(use_stack)
use_stack[]
use_cache[info.cache_key] = _use_result
return _use_result
}
@@ -1487,12 +1644,16 @@ function download_zip(pkg, commit_hash) {
return _download()
}
// Get zip from cache, returns null if not cached
// Get zip from cache, returns null if not cached or empty
function get_cached_zip(pkg, commit_hash) {
var cache_path = get_cache_path(pkg, commit_hash)
if (fd.is_file(cache_path))
return fd.slurp(cache_path)
var data = null
if (fd.is_file(cache_path)) {
data = fd.slurp(cache_path)
stone(data)
if (length(data) > 0) return data
fd.remove(cache_path)
}
return null
}
@@ -1748,6 +1909,7 @@ Shop.sync_with_deps = function(pkg, opts) {
if (visited[current]) continue
visited[current] = true
log.build(' Fetching ' + current + '...')
Shop.sync(current, opts)
_read_deps = function() {
@@ -1761,7 +1923,7 @@ Shop.sync_with_deps = function(pkg, opts) {
arrfor(array(deps), function(alias) {
dep_locator = deps[alias]
if (!visited[dep_locator])
push(queue, dep_locator)
queue[] = dep_locator
})
}
}
@@ -1847,19 +2009,27 @@ Shop.file_reload = function(file)
}
Shop.module_reload = function(path, package) {
if (!Shop.is_loaded(path,package)) return
if (!Shop.is_loaded(path, package)) return false
// Clear the module info cache for this path
var lookup_key = package ? package + ':' + path : ':' + path
module_info_cache[lookup_key] = null
var info = resolve_module_info(path, package)
if (!info) return false
// Invalidate package dylib cache so next resolve triggers rebuild
if (package) {
package_dylibs[package] = null
// Check if source actually changed
var mod_path = null
var source = null
var new_hash = null
if (info.mod_resolve) mod_path = info.mod_resolve.path
if (mod_path && fd.is_file(mod_path)) {
source = fd.slurp(mod_path)
new_hash = content_hash(stone(blob(text(source))))
if (reload_hashes[info.cache_key] == new_hash) return false
reload_hashes[info.cache_key] = new_hash
}
var info = resolve_module_info(path, package)
if (!info) return
// Clear caches
module_info_cache[lookup_key] = null
if (package) package_dylibs[package] = null
var cache_key = info.cache_key
var old = use_cache[cache_key]
@@ -1868,13 +2038,18 @@ Shop.module_reload = function(path, package) {
var newmod = get_module(path, package)
use_cache[cache_key] = newmod
// Smart update: unstone -> merge -> re-stone to preserve references
if (old && is_object(old) && is_object(newmod)) {
os.unstone(old)
arrfor(array(newmod), function(k) { old[k] = newmod[k] })
arrfor(array(old), function(k) {
if (!(k in newmod)) old[k] = null
})
stone(old)
use_cache[cache_key] = old
}
return true
}
function get_package_scripts(package)
@@ -1887,7 +2062,7 @@ function get_package_scripts(package)
for (i = 0; i < length(files); i++) {
file = files[i]
if (ends_with(file, '.cm') || ends_with(file, '.ce')) {
push(scripts, file)
scripts[] = file
}
}
@@ -1909,7 +2084,7 @@ function extract_use_calls(source) {
if (end == null) end = search(text(source, start), '"')
if (end != null) {
arg = text(source, start, start + end)
push(uses, arg)
uses[] = arg
}
idx = search(text(source, idx + 4), "use(")
if (idx != null) idx = idx + (source.length - (source.length - idx))
@@ -1932,12 +2107,18 @@ Shop.build_package_scripts = function(package)
resolve_mod_fn(pkg_dir + '/' + script, package)
ok = ok + 1
} disruption {
push(errors, script)
log.console(" compile error: " + package + '/' + script)
errors[] = script
log.build(" compile error: " + package + '/' + script)
}
_try()
})
if (length(errors) > 0) {
log.build(' Compiling scripts (' + text(ok) + ' ok, ' + text(length(errors)) + ' errors)')
} else if (ok > 0) {
log.build(' Compiling scripts (' + text(ok) + ' ok)')
}
return {ok: ok, errors: errors, total: length(scripts)}
}
@@ -1979,14 +2160,14 @@ Shop.audit_use_resolution = function(package) {
end = search(rest, quote)
if (end == null) continue
arg = text(rest, 0, end)
if (length(arg) > 0) push(uses, arg)
if (length(arg) > 0) uses[] = arg
rest = text(rest, end + 1)
}
arrfor(uses, function(mod) {
var _resolve = function() {
info = resolve_module_info(mod, package)
if (!info) push(unresolved, {script: script, module: mod})
if (!info) unresolved[] = {script: script, module: mod}
} disruption {}
_resolve()
})
@@ -2014,6 +2195,7 @@ Shop.get_lib_dir = function() {
Shop.ensure_dir = fd.ensure_dir
Shop.install_zip = install_zip
Shop.ensure_package_dylibs = ensure_package_dylibs
Shop.resolve_path = resolve_path
Shop.get_local_dir = function() {
return global_shop_path + "/local"
@@ -2080,7 +2262,7 @@ Shop.load_as_mach = function(path, pkg) {
// Try cached mcode -> compile to mach
if (!compiled) {
cached_mcode_path = hash_path(content_key, 'mcode')
cached_mcode_path = cache_path(content_key, 'mcode')
if (fd.is_file(cached_mcode_path)) {
mcode_json = text(fd.slurp(cached_mcode_path))
compiled = mach_compile_mcode_bin(file_path, mcode_json)
@@ -2100,7 +2282,7 @@ Shop.load_as_mach = function(path, pkg) {
ir = _mcode_mod(ast)
optimized = _streamline_mod(ir)
mcode_json = shop_json.encode(optimized)
cached_mcode_path = hash_path(content_key, 'mcode')
cached_mcode_path = cache_path(content_key, 'mcode')
fd.ensure_dir(global_shop_path + '/build')
fd.slurpwrite(cached_mcode_path, stone(blob(mcode_json)))
compiled = mach_compile_mcode_bin(file_path, mcode_json)
@@ -2148,11 +2330,85 @@ Shop.load_as_dylib = function(path, pkg) {
if (!file_info) file_info = Shop.file_info(file_path)
inject = Shop.script_inject_for(file_info)
env = inject_env(inject)
env.use = make_use_fn(real_pkg)
env.use = make_use_fn(real_pkg, true)
env = stone(env)
return os.native_module_load_named(result._handle, result._sym, env)
}
// Trace all transitive module dependencies for a file.
// Returns {scripts: [{path, package}], c_packages: [string]}
Shop.trace_deps = function(file_path) {
var visited = {}
var scripts = []
var c_packages = {}
function trace(fp) {
if (visited[fp]) return
visited[fp] = true
var fi = Shop.file_info(fp)
var file_pkg = fi.package
var idx = null
var j = 0
var imp = null
var rinfo = null
if (ends_with(fp, '.cm'))
scripts[] = {path: fp, package: file_pkg}
var _trace = function() {
idx = Shop.index_file(fp)
if (!idx || !idx.imports) return
j = 0
while (j < length(idx.imports)) {
imp = idx.imports[j]
rinfo = Shop.resolve_import_info(imp.module_path, file_pkg)
if (rinfo) {
if (rinfo.type == 'script' && rinfo.resolved_path)
trace(rinfo.resolved_path)
else if (rinfo.type == 'native' && rinfo.package)
c_packages[rinfo.package] = true
}
j = j + 1
}
} disruption {}
_trace()
}
trace(file_path)
return {scripts: scripts, c_packages: array(c_packages)}
}
// Check if a C package has a build manifest (was previously built)
Shop.has_c_manifest = function(pkg) {
return fd.is_file(dylib_manifest_path(pkg))
}
// Check if a .cm file has a cached bytecode artifact (mach or mcode)
Shop.is_cached = function(path) {
if (!fd.is_file(path)) return false
var content_key = stone(blob(text(fd.slurp(path))))
if (fd.is_file(cache_path(content_key, 'mach'))) return true
if (fd.is_file(cache_path(content_key, 'mcode'))) return true
return false
}
// Check if a .cm file has a cached native dylib artifact
Shop.is_native_cached = function(path, pkg) {
var build_mod = use_cache['core/build']
if (!build_mod || !fd.is_file(path)) return false
var src = text(fd.slurp(path))
var host = detect_host_target()
if (!host) return false
var san_flags = build_mod.native_sanitize_flags ? build_mod.native_sanitize_flags() : ''
var native_key = build_mod.native_cache_content ?
build_mod.native_cache_content(src, host, san_flags) :
(src + '\n' + host)
return fd.is_file(build_mod.cache_path(native_key, build_mod.SALT_NATIVE))
}
// Compile + cache a module without executing it
Shop.precompile = function(path, pkg) {
resolve_mod_fn(path, pkg)
}
Shop.audit_packages = function() {
var packages = Shop.list_packages()
@@ -2162,7 +2418,7 @@ Shop.audit_packages = function() {
if (package == 'core') return
if (fd.is_dir(package)) return
if (fetch_remote_hash(package)) return
push(bad, package)
bad[] = package
})
return bad
@@ -2195,18 +2451,40 @@ Shop.parse_package = function(locator) {
Shop.use_native = function(path, package_context) {
var src_path = path
if (!starts_with(path, '/'))
var locator = null
var lookup = null
var cache_key = null
var cfg = null
var old_native = null
if (!starts_with(path, '/') && !fd.is_file(path)) {
lookup = ends_with(path, '.cm') ? path : path + '.cm'
locator = resolve_locator(lookup, package_context)
if (!locator) { log.error('use_native: module not found: ' + path + ' (package: ' + text(package_context || '') + ')'); disrupt }
src_path = locator.path
} else if (!starts_with(path, '/')) {
src_path = fd.realpath(path)
}
if (!fd.is_file(src_path)) { log.error('File not found: ' + path); disrupt }
var file_info = Shop.file_info(src_path)
var pkg = file_info.package || package_context
var pkg = file_info.package || (locator ? locator.pkg : package_context)
var sym_stem = file_info.name ? file_info.name + (file_info.is_actor ? '.ce' : '.cm') : fd.basename(src_path)
cache_key = 'native:' + text(pkg || '') + ':' + src_path
if (use_cache[cache_key]) return use_cache[cache_key]
var sym_name = null
if (pkg)
sym_name = Shop.c_symbol_for_file(pkg, fd.basename(src_path))
if (pkg) {
sym_name = Shop.c_symbol_for_file(pkg, sym_stem)
}
var build = Shop.use('build', 'core')
var build = use_cache['core/build'] || use_cache['build']
if (!build) {
cfg = Shop.load_config()
old_native = cfg.policy.native
cfg.policy.native = false
build = Shop.use('build', 'core')
cfg.policy.native = old_native
}
var dylib_path = build.compile_native(src_path, null, null, pkg)
var handle = os.dylib_open(dylib_path)
@@ -2215,12 +2493,16 @@ Shop.use_native = function(path, package_context) {
// Build env with runtime functions and capabilities
var inject = Shop.script_inject_for(file_info)
var env = inject_env(inject)
env.use = make_use_fn(pkg)
env.use = make_use_fn(pkg, true)
env = stone(env)
var loaded = null
if (sym_name)
return os.native_module_load_named(handle, sym_name, env)
return os.native_module_load(handle, env)
loaded = os.native_module_load_named(handle, sym_name, env)
else
loaded = os.native_module_load(handle, env)
use_cache[cache_key] = loaded
return loaded
}
return Shop

View File

@@ -1,5 +1,5 @@
#define WOTA_IMPLEMENTATION
#include "quickjs-internal.h"
#include "pit_internal.h"
#include "cell.h"
typedef struct ObjectRef {
@@ -41,13 +41,11 @@ static void wota_stack_free (WotaEncodeContext *enc) {
}
static JSValue wota_apply_replacer (WotaEncodeContext *enc, JSValueConst holder, JSValue key, JSValueConst val) {
if (JS_IsNull (enc->replacer)) return JS_DupValue (enc->ctx, val);
JSValue key_val = JS_IsNull (key) ? JS_NULL : JS_DupValue (enc->ctx, key);
JSValue args[2] = { key_val, JS_DupValue (enc->ctx, val) };
if (JS_IsNull (enc->replacer)) return val;
JSValue key_val = JS_IsNull (key) ? JS_NULL : key;
JSValue args[2] = { key_val, val };
JSValue result = JS_Call (enc->ctx, enc->replacer, holder, 2, args);
JS_FreeValue (enc->ctx, args[0]);
JS_FreeValue (enc->ctx, args[1]);
if (JS_IsException (result)) return JS_DupValue (enc->ctx, val);
if (JS_IsException (result)) return val;
return result;
}
@@ -60,20 +58,17 @@ static void encode_object_properties (WotaEncodeContext *enc, JSValueConst val,
JSGCRef val_ref, keys_ref;
JS_PushGCRef (ctx, &val_ref);
JS_PushGCRef (ctx, &keys_ref);
val_ref.val = JS_DupValue (ctx, val);
val_ref.val = val;
keys_ref.val = JS_GetOwnPropertyNames (ctx, val_ref.val);
if (JS_IsException (keys_ref.val)) {
wota_write_sym (&enc->wb, WOTA_NULL);
JS_FreeValue (ctx, val_ref.val);
JS_PopGCRef (ctx, &keys_ref);
JS_PopGCRef (ctx, &val_ref);
return;
}
int64_t plen64;
if (JS_GetLength (ctx, keys_ref.val, &plen64) < 0) {
JS_FreeValue (ctx, keys_ref.val);
JS_FreeValue (ctx, val_ref.val);
wota_write_sym (&enc->wb, WOTA_NULL);
JS_PopGCRef (ctx, &keys_ref);
JS_PopGCRef (ctx, &val_ref);
@@ -105,12 +100,9 @@ static void encode_object_properties (WotaEncodeContext *enc, JSValueConst val,
prop_refs[non_function_count].val = prop_val;
non_function_count++;
} else {
JS_FreeValue (ctx, prop_val);
JS_FreeValue (ctx, key_refs[i].val);
key_refs[i].val = JS_NULL;
}
}
JS_FreeValue (ctx, keys_ref.val);
wota_write_record (&enc->wb, non_function_count);
for (uint32_t i = 0; i < non_function_count; i++) {
size_t klen;
@@ -118,8 +110,6 @@ static void encode_object_properties (WotaEncodeContext *enc, JSValueConst val,
wota_write_text_len (&enc->wb, prop_name ? prop_name : "", prop_name ? klen : 0);
wota_encode_value (enc, prop_refs[i].val, val_ref.val, key_refs[i].val);
JS_FreeCString (ctx, prop_name);
JS_FreeValue (ctx, prop_refs[i].val);
JS_FreeValue (ctx, key_refs[i].val);
}
/* Pop all GC refs in reverse order */
for (int i = plen - 1; i >= 0; i--) {
@@ -128,7 +118,6 @@ static void encode_object_properties (WotaEncodeContext *enc, JSValueConst val,
}
sys_free (prop_refs);
sys_free (key_refs);
JS_FreeValue (ctx, val_ref.val);
JS_PopGCRef (ctx, &keys_ref);
JS_PopGCRef (ctx, &val_ref);
}
@@ -139,7 +128,7 @@ static void wota_encode_value (WotaEncodeContext *enc, JSValueConst val, JSValue
if (!JS_IsNull (enc->replacer) && !JS_IsNull (key))
replaced = wota_apply_replacer (enc, holder, key, val);
else
replaced = JS_DupValue (enc->ctx, val);
replaced = val;
int tag = JS_VALUE_GET_TAG (replaced);
switch (tag) {
@@ -183,7 +172,6 @@ static void wota_encode_value (WotaEncodeContext *enc, JSValueConst val, JSValue
size_t buf_len;
void *buf_data = js_get_blob_data (ctx, &buf_len, replaced);
if (buf_data == (void *)-1) {
JS_FreeValue (ctx, replaced);
return;
}
if (buf_len == 0) {
@@ -205,7 +193,6 @@ static void wota_encode_value (WotaEncodeContext *enc, JSValueConst val, JSValue
for (int64_t i = 0; i < arr_len; i++) {
JSValue elem_val = JS_GetPropertyNumber (ctx, replaced, i);
wota_encode_value (enc, elem_val, replaced, JS_NewInt32 (ctx, (int32_t)i));
JS_FreeValue (ctx, elem_val);
}
wota_stack_pop (enc);
break;
@@ -218,10 +205,8 @@ static void wota_encode_value (WotaEncodeContext *enc, JSValueConst val, JSValue
if (!JS_IsNull (adata)) {
wota_write_sym (&enc->wb, WOTA_PRIVATE);
wota_encode_value (enc, adata, replaced, JS_NULL);
JS_FreeValue (ctx, adata);
break;
}
JS_FreeValue (ctx, adata);
if (wota_stack_has (enc, replaced)) {
enc->cycle = 1;
break;
@@ -230,16 +215,13 @@ static void wota_encode_value (WotaEncodeContext *enc, JSValueConst val, JSValue
JSValue to_json = JS_GetPropertyStr (ctx, replaced, "toJSON");
if (JS_IsFunction (to_json)) {
JSValue result = JS_Call (ctx, to_json, replaced, 0, NULL);
JS_FreeValue (ctx, to_json);
if (!JS_IsException (result)) {
wota_encode_value (enc, result, holder, key);
JS_FreeValue (ctx, result);
} else
wota_write_sym (&enc->wb, WOTA_NULL);
wota_stack_pop (enc);
break;
}
JS_FreeValue (ctx, to_json);
encode_object_properties (enc, replaced, holder);
wota_stack_pop (enc);
break;
@@ -248,7 +230,6 @@ static void wota_encode_value (WotaEncodeContext *enc, JSValueConst val, JSValue
wota_write_sym (&enc->wb, WOTA_NULL);
break;
}
JS_FreeValue (ctx, replaced);
}
static char *decode_wota_value (JSContext *ctx, char *data_ptr, JSValue *out_val, JSValue holder, JSValue key, JSValue reviver) {
@@ -355,16 +336,12 @@ static char *decode_wota_value (JSContext *ctx, char *data_ptr, JSValue *out_val
break;
}
if (!JS_IsNull (reviver)) {
JSValue key_val = JS_IsNull (key) ? JS_NULL : JS_DupValue (ctx, key);
JSValue args[2] = { key_val, JS_DupValue (ctx, *out_val) };
JSValue key_val = JS_IsNull (key) ? JS_NULL : key;
JSValue args[2] = { key_val, *out_val };
JSValue revived = JS_Call (ctx, reviver, holder, 2, args);
JS_FreeValue (ctx, args[0]);
JS_FreeValue (ctx, args[1]);
if (!JS_IsException (revived)) {
JS_FreeValue (ctx, *out_val);
*out_val = revived;
} else
JS_FreeValue (ctx, revived);
}
}
return data_ptr;
}

13
link.ce
View File

@@ -140,17 +140,18 @@ if (cmd == 'list') {
return
}
// Read package name from cell.toml
// Derive canonical package name from the target directory
_read_toml = function() {
content = toml.decode(text(fd.slurp(toml_path)))
if (content.package) {
pkg_name = content.package
var info = shop.file_info(target + '/cell.toml')
if (info && info.package) {
pkg_name = info.package
} else {
log.console("Error: cell.toml at " + target + " does not define 'package'")
log.console("Error: could not determine package name for " + target)
log.console("Ensure it is installed or has a git remote matching a lock entry")
$stop()
}
} disruption {
log.console("Error reading cell.toml")
log.console("Error determining package name for " + target)
$stop()
}
_read_toml()

View File

@@ -208,11 +208,11 @@ Link.sync_all = function(shop) {
// Validate target exists
var link_target = resolve_link_target(target)
if (!fd.is_dir(link_target)) {
push(errors, canonical + ': target ' + link_target + ' does not exist')
errors[] = canonical + ': target ' + link_target + ' does not exist'
return
}
if (!fd.is_file(link_target + '/cell.toml')) {
push(errors, canonical + ': target ' + link_target + ' is not a valid package')
errors[] = canonical + ': target ' + link_target + ' is not a valid package'
return
}
@@ -246,7 +246,7 @@ Link.sync_all = function(shop) {
count = count + 1
} disruption {
push(errors, canonical + ': sync failed')
errors[] = canonical + ': sync failed'
}
_sync()
})

14
list.ce
View File

@@ -89,16 +89,16 @@ var run = function() {
// Add status indicators
status = []
if (link_target) {
push(status, "linked -> " + link_target)
status[] = "linked -> " + link_target
}
if (lock_entry && lock_entry.commit) {
push(status, "@" + text(lock_entry.commit, 0, 8))
status[] = "@" + text(lock_entry.commit, 0, 8)
}
if (lock_entry && lock_entry.type == 'local') {
push(status, "local")
status[] = "local"
}
if (!lock_entry) {
push(status, "not installed")
status[] = "not installed"
}
if (length(status) > 0) {
@@ -136,11 +136,11 @@ if (mode == 'local') {
var link_target = links[p]
if (link_target) {
push(linked_pkgs, p)
linked_pkgs[] = p
} else if (lock_entry && lock_entry.type == 'local') {
push(local_pkgs, p)
local_pkgs[] = p
} else {
push(remote_pkgs, p)
remote_pkgs[] = p
}
})

498
log.ce
View File

@@ -1,15 +1,17 @@
// cell log - Manage and read log sinks
// cell log - Manage log sink configuration
//
// Usage:
// cell log list List configured sinks
// cell log list Show sinks and channel routing
// cell log channels List channels with status
// cell log enable <channel> Enable a channel on terminal
// cell log disable <channel> Disable a channel on terminal
// cell log add <name> console [opts] Add a console sink
// cell log add <name> file <path> [opts] Add a file sink
// cell log remove <name> Remove a sink
// cell log read <sink> [opts] Read from a file sink
// cell log tail <sink> [--lines=N] Follow a file sink
//
// The --stack option controls which channels capture a stack trace.
// Default: --stack=error (errors always show a stack trace).
// cell log route <channel> <sink> Route a channel to a sink
// cell log unroute <channel> <sink> Remove a channel from a sink
// cell log stack <channel> Enable stack traces on a channel
// cell log unstack <channel> Disable stack traces on a channel
var toml = use('toml')
var fd = use('fd')
@@ -18,9 +20,8 @@ var json = use('json')
var log_path = shop_path + '/log.toml'
function load_config() {
if (fd.is_file(log_path)) {
if (fd.is_file(log_path))
return toml.decode(text(fd.slurp(log_path)))
}
return null
}
@@ -45,23 +46,24 @@ function print_help() {
log.console("Usage: cell log <command> [options]")
log.console("")
log.console("Commands:")
log.console(" list List configured sinks")
log.console(" list Show sinks and channel routing")
log.console(" channels List channels with status")
log.console(" enable <channel> Enable a channel on terminal")
log.console(" disable <channel> Disable a channel on terminal")
log.console(" add <name> console [opts] Add a console sink")
log.console(" add <name> file <path> [opts] Add a file sink")
log.console(" remove <name> Remove a sink")
log.console(" read <sink> [opts] Read from a file sink")
log.console(" tail <sink> [--lines=N] Follow a file sink")
log.console(" route <channel> <sink> Route a channel to a sink")
log.console(" unroute <channel> <sink> Remove a channel from a sink")
log.console(" stack <channel> Enable stack traces on a channel")
log.console(" unstack <channel> Disable stack traces on a channel")
log.console("")
log.console("Options for add:")
log.console(" --format=pretty|bare|json Output format (default: pretty for console, json for file)")
log.console(" --channels=ch1,ch2 Channels to subscribe (default: console,error,system)")
log.console(" --exclude=ch1,ch2 Channels to exclude (for wildcard sinks)")
log.console(" --stack=ch1,ch2 Channels that capture a stack trace (default: error)")
log.console("")
log.console("Options for read:")
log.console(" --lines=N Show last N lines (default: all)")
log.console(" --channel=X Filter by channel")
log.console(" --since=timestamp Only show entries after timestamp")
log.console(" --channels=ch1,ch2 Channels to subscribe (default: *)")
log.console(" --exclude=ch1,ch2 Channels to exclude")
log.console(" --mode=append|overwrite File write mode (default: append)")
log.console(" --max_size=N Max file size in bytes before truncation")
}
function parse_opt(arg, prefix) {
@@ -71,36 +73,85 @@ function parse_opt(arg, prefix) {
return null
}
function format_entry(entry) {
var aid = text(entry.actor_id, 0, 5)
var src = ""
var ev = null
if (entry.source && entry.source.file)
src = entry.source.file + ":" + text(entry.source.line)
ev = is_text(entry.event) ? entry.event : json.encode(entry.event)
return "[" + aid + "] [" + entry.channel + "] " + src + " " + ev
// Collect all stack channels across all sinks
function collect_stack_channels(config) {
var stack_chs = {}
var names = array(config.sink)
arrfor(names, function(n) {
var s = config.sink[n]
if (is_array(s.stack)) {
arrfor(s.stack, function(ch) { stack_chs[ch] = true })
}
})
return stack_chs
}
// Find which sinks a stack channel is declared on (for modification)
function find_stack_sink(config, channel) {
var names = array(config.sink)
var found = null
arrfor(names, function(n) {
if (found) return
var s = config.sink[n]
if (is_array(s.stack)) {
arrfor(s.stack, function(ch) {
if (ch == channel) found = n
})
}
})
return found
}
function do_list() {
var config = load_config()
var names = null
var channel_routing = {}
var stack_chs = null
names = (config && config.sink) ? array(config.sink) : []
if (length(names) == 0) {
log.console("No log sinks configured.")
log.console("Default: console pretty for console/error/system (stack traces on error)")
return
}
// Show sinks
log.console("Sinks:")
arrfor(names, function(n) {
var s = config.sink[n]
var ch = is_array(s.channels) ? text(s.channels, ', ') : '(none)'
var ex = is_array(s.exclude) ? " exclude=" + text(s.exclude, ',') : ""
var stk = is_array(s.stack) ? " stack=" + text(s.stack, ',') : ""
var fmt = s.format || (s.type == 'file' ? 'json' : 'pretty')
var mode = s.mode ? " mode=" + s.mode : ""
var maxsz = s.max_size ? " max_size=" + text(s.max_size) : ""
var ex = is_array(s.exclude) ? " exclude=" + text(s.exclude, ',') : ""
if (s.type == 'file')
log.console(" " + n + ": " + s.type + " -> " + s.path + " [" + ch + "] format=" + fmt + ex + stk)
log.console(" " + n + ": file -> " + s.path + " format=" + fmt + mode + maxsz)
else
log.console(" " + n + ": " + s.type + " [" + ch + "] format=" + fmt + ex + stk)
log.console(" " + n + ": console format=" + fmt + ex)
})
// Build channel -> sinks map
arrfor(names, function(n) {
var s = config.sink[n]
var chs = is_array(s.channels) ? s.channels : []
arrfor(chs, function(ch) {
if (!channel_routing[ch]) channel_routing[ch] = []
channel_routing[ch][] = n
})
})
// Show routing
log.console("")
log.console("Routing:")
var channels = array(channel_routing)
arrfor(channels, function(ch) {
log.console(" " + ch + " -> " + text(channel_routing[ch], ', '))
})
// Show stack traces
stack_chs = collect_stack_channels(config)
var stack_list = array(stack_chs)
if (length(stack_list) > 0) {
log.console("")
log.console("Stack traces on: " + text(stack_list, ', '))
}
}
function do_add() {
@@ -108,14 +159,15 @@ function do_add() {
var sink_type = null
var path = null
var format = null
var channels = ["console", "error", "system"]
var channels = ["*"]
var exclude = null
var stack_chs = ["error"]
var mode = null
var max_size = null
var config = null
var val = null
var i = 0
if (length(args) < 3) {
log.error("Usage: cell log add <name> console|file [path] [options]")
log.console("Usage: cell log add <name> console|file [path] [options]")
return
}
name = args[1]
@@ -123,7 +175,7 @@ function do_add() {
if (sink_type == 'file') {
if (length(args) < 4) {
log.error("Usage: cell log add <name> file <path> [options]")
log.console("Usage: cell log add <name> file <path> [options]")
return
}
path = args[3]
@@ -133,7 +185,7 @@ function do_add() {
format = "pretty"
i = 3
} else {
log.error("Unknown sink type: " + sink_type + " (use 'console' or 'file')")
log.console("Unknown sink type: " + sink_type + " (use 'console' or 'file')")
return
}
@@ -144,17 +196,21 @@ function do_add() {
if (val) { channels = array(val, ','); continue }
val = parse_opt(args[i], 'exclude')
if (val) { exclude = array(val, ','); continue }
val = parse_opt(args[i], 'stack')
if (val) { stack_chs = array(val, ','); continue }
val = parse_opt(args[i], 'mode')
if (val) { mode = val; continue }
val = parse_opt(args[i], 'max_size')
if (val) { max_size = number(val); continue }
}
config = load_config()
if (!config) config = {}
if (!config.sink) config.sink = {}
config.sink[name] = {type: sink_type, format: format, channels: channels, stack: stack_chs}
config.sink[name] = {type: sink_type, format: format, channels: channels}
if (path) config.sink[name].path = path
if (exclude) config.sink[name].exclude = exclude
if (mode) config.sink[name].mode = mode
if (max_size) config.sink[name].max_size = max_size
save_config(config)
log.console("Added sink: " + name)
@@ -164,13 +220,13 @@ function do_remove() {
var name = null
var config = null
if (length(args) < 2) {
log.error("Usage: cell log remove <name>")
log.console("Usage: cell log remove <name>")
return
}
name = args[1]
config = load_config()
if (!config || !config.sink || !config.sink[name]) {
log.error("Sink not found: " + name)
log.console("Sink not found: " + name)
return
}
delete config.sink[name]
@@ -178,154 +234,244 @@ function do_remove() {
log.console("Removed sink: " + name)
}
function do_read() {
var name = null
var max_lines = 0
var filter_channel = null
var since = 0
function do_route() {
var channel = null
var sink_name = null
var config = null
var sink = null
var content = null
var lines = null
var entries = []
var entry = null
var val = null
var i = 0
if (length(args) < 2) {
log.error("Usage: cell log read <sink_name> [options]")
var already = false
if (length(args) < 3) {
log.console("Usage: cell log route <channel> <sink>")
return
}
name = args[1]
for (i = 2; i < length(args); i++) {
val = parse_opt(args[i], 'lines')
if (val) { max_lines = number(val); continue }
val = parse_opt(args[i], 'channel')
if (val) { filter_channel = val; continue }
val = parse_opt(args[i], 'since')
if (val) { since = number(val); continue }
}
channel = args[1]
sink_name = args[2]
config = load_config()
if (!config || !config.sink || !config.sink[name]) {
log.error("Sink not found: " + name)
if (!config || !config.sink || !config.sink[sink_name]) {
log.console("Sink not found: " + sink_name)
return
}
sink = config.sink[name]
if (sink.type != 'file') {
log.error("Can only read from file sinks")
return
}
if (!fd.is_file(sink.path)) {
log.console("Log file does not exist yet: " + sink.path)
return
}
content = text(fd.slurp(sink.path))
lines = array(content, '\n')
arrfor(lines, function(line) {
var parse_fn = null
if (length(line) == 0) return
parse_fn = function() {
entry = json.decode(line)
} disruption {
entry = null
}
parse_fn()
if (!entry) return
if (filter_channel && entry.channel != filter_channel) return
if (since > 0 && entry.timestamp < since) return
entries[] = entry
})
if (max_lines > 0 && length(entries) > max_lines)
entries = array(entries, length(entries) - max_lines, length(entries))
arrfor(entries, function(e) {
log.console(format_entry(e))
sink = config.sink[sink_name]
if (!is_array(sink.channels)) sink.channels = []
arrfor(sink.channels, function(ch) {
if (ch == channel) already = true
})
if (already) {
log.console(channel + " already routed to " + sink_name)
return
}
sink.channels[] = channel
save_config(config)
log.console(channel + " -> " + sink_name)
}
function do_tail() {
var name = null
var tail_lines = 10
function do_unroute() {
var channel = null
var sink_name = null
var config = null
var sink = null
var last_size = 0
var val = null
var i = 0
if (length(args) < 2) {
log.error("Usage: cell log tail <sink_name> [--lines=N]")
var found = false
if (length(args) < 3) {
log.console("Usage: cell log unroute <channel> <sink>")
return
}
name = args[1]
for (i = 2; i < length(args); i++) {
val = parse_opt(args[i], 'lines')
if (val) { tail_lines = number(val); continue }
}
channel = args[1]
sink_name = args[2]
config = load_config()
if (!config || !config.sink || !config.sink[name]) {
log.error("Sink not found: " + name)
if (!config || !config.sink || !config.sink[sink_name]) {
log.console("Sink not found: " + sink_name)
return
}
sink = config.sink[name]
if (sink.type != 'file') {
log.error("Can only tail file sinks")
sink = config.sink[sink_name]
if (!is_array(sink.channels)) sink.channels = []
sink.channels = filter(sink.channels, function(ch) { return ch != channel })
save_config(config)
log.console(channel + " removed from " + sink_name)
}
function do_stack() {
var channel = null
var config = null
var names = null
var added = false
if (length(args) < 2) {
log.console("Usage: cell log stack <channel>")
return
}
if (!fd.is_file(sink.path))
log.console("Waiting for log file: " + sink.path)
function poll() {
var st = null
var poll_content = null
var poll_lines = null
var start = 0
var poll_entry = null
var old_line_count = 0
var idx = 0
var parse_fn = null
if (!fd.is_file(sink.path)) {
$delay(poll, 1)
channel = args[1]
config = load_config()
if (!config || !config.sink) {
log.console("No sinks configured")
return
}
st = fd.stat(sink.path)
if (st.size == last_size) {
$delay(poll, 1)
// Add to first sink that already has a stack array, or first sink overall
names = array(config.sink)
arrfor(names, function(n) {
var s = config.sink[n]
var already = false
if (added) return
if (is_array(s.stack)) {
arrfor(s.stack, function(ch) { if (ch == channel) already = true })
if (!already) s.stack[] = channel
added = true
}
})
if (!added && length(names) > 0) {
config.sink[names[0]].stack = [channel]
added = true
}
if (added) {
save_config(config)
log.console("Stack traces enabled on: " + channel)
}
}
function do_unstack() {
var channel = null
var config = null
var names = null
if (length(args) < 2) {
log.console("Usage: cell log unstack <channel>")
return
}
poll_content = text(fd.slurp(sink.path))
poll_lines = array(poll_content, '\n')
if (last_size == 0 && length(poll_lines) > tail_lines) {
start = length(poll_lines) - tail_lines
} else if (last_size > 0) {
old_line_count = length(array(text(poll_content, 0, last_size), '\n'))
start = old_line_count
channel = args[1]
config = load_config()
if (!config || !config.sink) {
log.console("No sinks configured")
return
}
names = array(config.sink)
arrfor(names, function(n) {
var s = config.sink[n]
if (is_array(s.stack))
s.stack = filter(s.stack, function(ch) { return ch != channel })
})
save_config(config)
log.console("Stack traces disabled on: " + channel)
}
last_size = st.size
for (idx = start; idx < length(poll_lines); idx++) {
if (length(poll_lines[idx]) == 0) continue
parse_fn = function() {
poll_entry = json.decode(poll_lines[idx])
} disruption {
poll_entry = null
}
parse_fn()
if (!poll_entry) continue
os.print(format_entry(poll_entry) + "\n")
}
$delay(poll, 1)
}
var known_channels = ["console", "error", "warn", "system", "build", "shop", "compile", "test"]
poll()
function find_terminal_sink(config) {
var names = null
var found = null
if (!config || !config.sink) return null
names = array(config.sink)
if (config.sink.terminal) return config.sink.terminal
arrfor(names, function(n) {
if (!found && config.sink[n].type == "console")
found = config.sink[n]
})
return found
}
function do_enable() {
var channel = null
var config = null
var sink = null
var i = 0
var already = false
var new_exclude = []
if (length(args) < 2) {
log.error("Usage: cell log enable <channel>")
return
}
channel = args[1]
config = load_config()
if (!config) config = {sink: {}}
if (!config.sink) config.sink = {}
sink = find_terminal_sink(config)
if (!sink) {
config.sink.terminal = {type: "console", format: "clean", channels: ["console", "error", channel], stack: ["error"]}
save_config(config)
log.console("Enabled channel: " + channel)
return
}
if (is_array(sink.channels) && length(sink.channels) == 1 && sink.channels[0] == "*") {
if (is_array(sink.exclude)) {
new_exclude = []
arrfor(sink.exclude, function(ex) {
if (ex != channel) new_exclude[] = ex
})
sink.exclude = new_exclude
}
} else {
if (!is_array(sink.channels)) sink.channels = ["console", "error"]
arrfor(sink.channels, function(ch) {
if (ch == channel) already = true
})
if (!already) sink.channels[] = channel
}
save_config(config)
log.console("Enabled channel: " + channel)
}
function do_disable() {
var channel = null
var config = null
var sink = null
var i = 0
var new_channels = []
var already_excluded = false
if (length(args) < 2) {
log.error("Usage: cell log disable <channel>")
return
}
channel = args[1]
config = load_config()
if (!config || !config.sink) {
log.error("No log configuration found")
return
}
sink = find_terminal_sink(config)
if (!sink) {
log.error("No terminal sink found")
return
}
if (is_array(sink.channels) && length(sink.channels) == 1 && sink.channels[0] == "*") {
if (!is_array(sink.exclude)) sink.exclude = []
already_excluded = false
arrfor(sink.exclude, function(ex) {
if (ex == channel) already_excluded = true
})
if (!already_excluded) sink.exclude[] = channel
} else {
if (is_array(sink.channels)) {
arrfor(sink.channels, function(ch) {
if (ch != channel) new_channels[] = ch
})
sink.channels = new_channels
}
}
save_config(config)
log.console("Disabled channel: " + channel)
}
function do_channels() {
var config = load_config()
var sink = null
var is_wildcard = false
var active = {}
if (config) sink = find_terminal_sink(config)
if (sink) {
if (is_array(sink.channels) && length(sink.channels) == 1 && sink.channels[0] == "*") {
is_wildcard = true
arrfor(known_channels, function(ch) { active[ch] = true })
if (is_array(sink.exclude)) {
arrfor(sink.exclude, function(ex) { active[ex] = false })
}
} else if (is_array(sink.channels)) {
arrfor(sink.channels, function(ch) { active[ch] = true })
}
} else {
active.console = true
active.error = true
}
log.console("Channels:")
arrfor(known_channels, function(ch) {
var status = active[ch] ? "enabled" : "disabled"
log.console(" " + ch + ": " + status)
})
}
// Main dispatch
@@ -335,16 +481,26 @@ if (length(args) == 0) {
print_help()
} else if (args[0] == 'list') {
do_list()
} else if (args[0] == 'channels') {
do_channels()
} else if (args[0] == 'enable') {
do_enable()
} else if (args[0] == 'disable') {
do_disable()
} else if (args[0] == 'add') {
do_add()
} else if (args[0] == 'remove') {
do_remove()
} else if (args[0] == 'read') {
do_read()
} else if (args[0] == 'tail') {
do_tail()
} else if (args[0] == 'route') {
do_route()
} else if (args[0] == 'unroute') {
do_unroute()
} else if (args[0] == 'stack') {
do_stack()
} else if (args[0] == 'unstack') {
do_unstack()
} else {
log.error("Unknown command: " + args[0])
log.console("Unknown command: " + args[0])
print_help()
}

View File

@@ -85,7 +85,7 @@ var dump_function = function(func, name) {
parts = []
j = 1
while (j < n - 2) {
push(parts, fmt_val(instr[j]))
parts[] = fmt_val(instr[j])
j = j + 1
}
operands = text(parts, ", ")

568
mcode.cm
View File

@@ -166,7 +166,7 @@ var mcode = function(ast) {
// Variable tracking
var add_var = function(name, slot, is_const) {
push(s_vars, {name: name, slot: slot, is_const: is_const, is_closure: false})
s_vars[] = {name: name, slot: slot, is_const: is_const, is_closure: false}
}
var find_var = function(name) {
@@ -228,13 +228,13 @@ var mcode = function(ast) {
// Instruction emission
var add_instr = function(instr) {
push(instr, s_cur_line)
push(instr, s_cur_col)
push(s_instructions, instr)
instr[] = s_cur_line
instr[] = s_cur_col
s_instructions[] = instr
}
var emit_label = function(label) {
push(s_instructions, label)
s_instructions[] = label
}
var emit_0 = function(op) {
@@ -362,6 +362,8 @@ var mcode = function(ast) {
s_slot_types[text(dest)] = s_slot_types[text(src)]
}
var emit_numeric_binop = null
// emit_add_decomposed: emit type-dispatched add (text → concat, num → add)
// reads _bp_dest, _bp_left, _bp_right, _bp_ln, _bp_rn from closure
var emit_add_decomposed = function() {
@@ -421,7 +423,7 @@ var mcode = function(ast) {
// emit_numeric_binop: emit type-guarded numeric binary op
// reads _bp_dest, _bp_left, _bp_right, _bp_ln, _bp_rn from closure
var emit_numeric_binop = function(op_str) {
emit_numeric_binop = function(op_str) {
var left_known = is_known_number(_bp_ln) || slot_is_num(_bp_left)
var right_known = is_known_number(_bp_rn) || slot_is_num(_bp_right)
var t0 = null
@@ -741,7 +743,7 @@ var mcode = function(ast) {
slot = alloc_slot()
lit = {kind: "name", name: name, make: "intrinsic"}
add_instr(["access", slot, lit])
push(s_intrinsic_cache, {name: name, slot: slot})
s_intrinsic_cache[] = {name: name, slot: slot}
_i = _i + 1
}
}
@@ -878,10 +880,124 @@ var mcode = function(ast) {
var inline_every = true
var inline_some = true
var inline_reduce = true
var inline_map = true
var inline_map = false
var inline_find = true
// --- Helper: emit arity-dispatched callback invocation ---
// ctx = {fn, fn_arity, result, null_s, frame, zero, one, az, ao, prefix,
// known_arity (optional — compile-time arity of callback literal)}
// args = [slot_for_arg1, slot_for_arg2] — data args (not this)
// max_args = 1 or 2 — how many data args to support
var emit_arity_call = function(ctx, args, max_args) {
var call_one = null
var call_two = null
var call_done = null
var ka = ctx.known_arity
// When callback arity is known at compile time, emit only the matching
// call path. This avoids dead branches where parameters are nulled,
// which confuse the type checker after inlining (e.g. push on null).
if (ka != null) {
if (ka >= max_args) {
ka = max_args
}
if (ka == 0) {
emit_3("frame", ctx.frame, ctx.fn, 0)
emit_3("setarg", ctx.frame, 0, ctx.null_s)
emit_2("invoke", ctx.frame, ctx.result)
} else if (ka == 1 || max_args < 2) {
emit_3("frame", ctx.frame, ctx.fn, 1)
emit_3("setarg", ctx.frame, 0, ctx.null_s)
emit_3("setarg", ctx.frame, 1, args[0])
emit_2("invoke", ctx.frame, ctx.result)
} else {
emit_3("frame", ctx.frame, ctx.fn, 2)
emit_3("setarg", ctx.frame, 0, ctx.null_s)
emit_3("setarg", ctx.frame, 1, args[0])
emit_3("setarg", ctx.frame, 2, args[1])
emit_2("invoke", ctx.frame, ctx.result)
}
return null
}
call_one = gen_label(ctx.prefix + "_c1")
call_two = gen_label(ctx.prefix + "_c2")
call_done = gen_label(ctx.prefix + "_cd")
emit_3("eq", ctx.az, ctx.fn_arity, ctx.zero)
emit_jump_cond("jump_false", ctx.az, call_one)
emit_3("frame", ctx.frame, ctx.fn, 0)
emit_3("setarg", ctx.frame, 0, ctx.null_s)
emit_2("invoke", ctx.frame, ctx.result)
emit_jump(call_done)
emit_label(call_one)
if (max_args >= 2) {
emit_3("eq", ctx.ao, ctx.fn_arity, ctx.one)
emit_jump_cond("jump_false", ctx.ao, call_two)
}
emit_3("frame", ctx.frame, ctx.fn, 1)
emit_3("setarg", ctx.frame, 0, ctx.null_s)
emit_3("setarg", ctx.frame, 1, args[0])
emit_2("invoke", ctx.frame, ctx.result)
if (max_args < 2) {
emit_label(call_done)
return null
}
emit_jump(call_done)
emit_label(call_two)
emit_3("frame", ctx.frame, ctx.fn, 2)
emit_3("setarg", ctx.frame, 0, ctx.null_s)
emit_3("setarg", ctx.frame, 1, args[0])
emit_3("setarg", ctx.frame, 2, args[1])
emit_2("invoke", ctx.frame, ctx.result)
emit_label(call_done)
return null
}
// --- Helper: guard that reverse param is logical (or null) ---
// Disrupts if rev_slot is not null and not a boolean.
var emit_reverse_guard = function(rev_slot, msg) {
var ok_label = gen_label("rev_ok")
var g = alloc_slot()
emit_jump_cond("jump_null", rev_slot, ok_label)
emit_2("is_bool", g, rev_slot)
emit_jump_cond("jump_true", g, ok_label)
emit_log_error(msg)
emit_0("disrupt")
emit_label(ok_label)
}
// --- Helper: forward loop scaffolding ---
// L = {arr, len, i, check, item, one, loop_label, done_label}
// body_fn(L) — called between element load and increment
var emit_forward_loop = function(L, body_fn) {
emit_2("int", L.i, 0)
emit_label(L.loop_label)
emit_3("lt", L.check, L.i, L.len)
emit_jump_cond("jump_false", L.check, L.done_label)
emit_3("load_index", L.item, L.arr, L.i)
body_fn(L)
emit_3("add", L.i, L.i, L.one)
emit_jump(L.loop_label)
emit_label(L.done_label)
return null
}
// --- Helper: reverse loop scaffolding ---
var emit_reverse_loop = function(L, body_fn) {
var zero = alloc_slot()
emit_2("int", zero, 0)
emit_3("subtract", L.i, L.len, L.one)
emit_label(L.loop_label)
emit_3("ge", L.check, L.i, zero)
emit_jump_cond("jump_false", L.check, L.done_label)
emit_3("load_index", L.item, L.arr, L.i)
body_fn(L)
emit_3("subtract", L.i, L.i, L.one)
emit_jump(L.loop_label)
emit_label(L.done_label)
return null
}
// --- Helper: emit a reduce loop body ---
// r = {acc, i, arr, fn, len, fn_arity}; emits loop updating acc in-place.
// r = {acc, i, arr, fn, len, fn_arity, known_arity}; emits loop updating acc in-place.
// Caller must emit the done_label after calling this.
var emit_reduce_loop = function(r, forward, done_label) {
var acc = r.acc
@@ -895,13 +1011,13 @@ var mcode = function(ast) {
var null_s = alloc_slot()
var one = alloc_slot()
var zero = alloc_slot()
var arity_is_zero = alloc_slot()
var arity_is_one = alloc_slot()
var az = alloc_slot()
var ao = alloc_slot()
var f = alloc_slot()
var loop_label = gen_label("reduce_loop")
var call_one_label = gen_label("reduce_call_one")
var call_two_label = gen_label("reduce_call_two")
var call_done_label = gen_label("reduce_call_done")
var ctx = {fn: fn_slot, fn_arity: fn_arity, result: acc, null_s: null_s,
frame: f, zero: zero, one: one, az: az, ao: ao, prefix: "reduce",
known_arity: r.known_arity}
emit_2("int", one, 1)
emit_2("int", zero, 0)
emit_1("null", null_s)
@@ -913,27 +1029,7 @@ var mcode = function(ast) {
}
emit_jump_cond("jump_false", check, done_label)
emit_3("load_index", item, arr_slot, i)
emit_3("eq", arity_is_zero, fn_arity, zero)
emit_jump_cond("jump_false", arity_is_zero, call_one_label)
emit_3("frame", f, fn_slot, 0)
emit_3("setarg", f, 0, null_s)
emit_2("invoke", f, acc)
emit_jump(call_done_label)
emit_label(call_one_label)
emit_3("eq", arity_is_one, fn_arity, one)
emit_jump_cond("jump_false", arity_is_one, call_two_label)
emit_3("frame", f, fn_slot, 1)
emit_3("setarg", f, 0, null_s)
emit_3("setarg", f, 1, acc)
emit_2("invoke", f, acc)
emit_jump(call_done_label)
emit_label(call_two_label)
emit_3("frame", f, fn_slot, 2)
emit_3("setarg", f, 0, null_s)
emit_3("setarg", f, 1, acc)
emit_3("setarg", f, 2, item)
emit_2("invoke", f, acc)
emit_label(call_done_label)
emit_arity_call(ctx, [acc, item], 2)
if (forward) {
emit_3("add", i, i, one)
} else {
@@ -942,60 +1038,64 @@ var mcode = function(ast) {
emit_jump(loop_label)
}
// --- Inline expansion: arrfor(arr, fn) ---
var expand_inline_arrfor = function(dest, arr_slot, fn_slot) {
// --- Inline expansion: arrfor(arr, fn[, rev[, exit]]) ---
var expand_inline_arrfor = function(dest, args, nargs) {
var arr_slot = args.arr
var fn_slot = args.fn
var len = alloc_slot()
var i = alloc_slot()
var check = alloc_slot()
var item = alloc_slot()
var fn_arity = alloc_slot()
var arity_is_zero = alloc_slot()
var arity_is_one = alloc_slot()
var az = alloc_slot()
var ao = alloc_slot()
var null_s = alloc_slot()
var zero = alloc_slot()
var one = alloc_slot()
var f = alloc_slot()
var discard = alloc_slot()
var loop_label = gen_label("arrfor_loop")
var done_label = gen_label("arrfor_done")
var call_one_label = gen_label("arrfor_call_one")
var call_two_label = gen_label("arrfor_call_two")
var call_done_label = gen_label("arrfor_call_done")
var val = alloc_slot()
var eq_check = alloc_slot()
var early_exit = gen_label("arrfor_exit")
var done_final = gen_label("arrfor_final")
var rev_label = gen_label("arrfor_rev")
var final_label = gen_label("arrfor_fwd_done")
var fwd_L = {arr: arr_slot, len: len, i: i, check: check, item: item, one: one,
loop_label: gen_label("arrfor_fwd"), done_label: gen_label("arrfor_fwd_d")}
var rev_L = {arr: arr_slot, len: len, i: i, check: check, item: item, one: one,
loop_label: gen_label("arrfor_rev_l"), done_label: gen_label("arrfor_rev_d")}
var ctx = {fn: fn_slot, fn_arity: fn_arity, result: val, null_s: null_s,
frame: f, zero: zero, one: one, az: az, ao: ao, prefix: "arrfor"}
var body_fn = function(L) {
emit_arity_call(ctx, [L.item, L.i], 2)
if (nargs >= 4 && args.exit >= 0) {
emit_3("eq", eq_check, val, args.exit)
emit_jump_cond("jump_true", eq_check, early_exit)
}
return null
}
emit_2("length", len, arr_slot)
emit_2("int", i, 0)
emit_2("int", zero, 0)
emit_2("int", one, 1)
emit_1("null", null_s)
emit_2("length", fn_arity, fn_slot)
emit_label(loop_label)
emit_3("lt", check, i, len)
emit_jump_cond("jump_false", check, done_label)
emit_3("load_index", item, arr_slot, i)
emit_3("eq", arity_is_zero, fn_arity, zero)
emit_jump_cond("jump_false", arity_is_zero, call_one_label)
emit_3("frame", f, fn_slot, 0)
emit_3("setarg", f, 0, null_s)
emit_2("invoke", f, discard)
emit_jump(call_done_label)
emit_label(call_one_label)
emit_3("eq", arity_is_one, fn_arity, one)
emit_jump_cond("jump_false", arity_is_one, call_two_label)
emit_3("frame", f, fn_slot, 1)
emit_3("setarg", f, 0, null_s)
emit_3("setarg", f, 1, item)
emit_2("invoke", f, discard)
emit_jump(call_done_label)
emit_label(call_two_label)
emit_3("frame", f, fn_slot, 2)
emit_3("setarg", f, 0, null_s)
emit_3("setarg", f, 1, item)
emit_3("setarg", f, 2, i)
emit_2("invoke", f, discard)
emit_label(call_done_label)
emit_3("add", i, i, one)
emit_jump(loop_label)
emit_label(done_label)
if (nargs <= 2) {
emit_forward_loop(fwd_L, body_fn)
} else {
emit_reverse_guard(args.rev, "arrfor: reverse must be a logical")
emit_jump_cond("wary_true", args.rev, rev_label)
emit_forward_loop(fwd_L, body_fn)
emit_jump(final_label)
emit_label(rev_label)
emit_reverse_loop(rev_L, body_fn)
emit_label(final_label)
}
emit_1("null", dest)
emit_jump(done_final)
if (nargs >= 4 && args.exit >= 0) {
emit_label(early_exit)
emit_2("move", dest, val)
}
emit_label(done_final)
return dest
}
@@ -1040,7 +1140,7 @@ var mcode = function(ast) {
emit_3("setarg", f, 1, item)
emit_2("invoke", f, val)
emit_label(call_done_label)
emit_jump_cond("jump_false", val, ret_false)
emit_jump_cond("wary_false", val, ret_false)
emit_3("add", i, i, one)
emit_jump(loop_label)
emit_label(ret_true)
@@ -1093,7 +1193,7 @@ var mcode = function(ast) {
emit_3("setarg", f, 1, item)
emit_2("invoke", f, val)
emit_label(call_done_label)
emit_jump_cond("jump_true", val, ret_true)
emit_jump_cond("wary_true", val, ret_true)
emit_3("add", i, i, one)
emit_jump(loop_label)
emit_label(ret_true)
@@ -1113,58 +1213,160 @@ var mcode = function(ast) {
var check = alloc_slot()
var item = alloc_slot()
var fn_arity = alloc_slot()
var arity_is_zero = alloc_slot()
var arity_is_one = alloc_slot()
var az = alloc_slot()
var ao = alloc_slot()
var null_s = alloc_slot()
var zero = alloc_slot()
var one = alloc_slot()
var f = alloc_slot()
var val = alloc_slot()
var loop_label = gen_label("filter_loop")
var call_one_label = gen_label("filter_call_one")
var call_two_label = gen_label("filter_call_two")
var call_done_label = gen_label("filter_call_done")
var skip_label = gen_label("filter_skip")
var done_label = gen_label("filter_done")
var skip = gen_label("filter_skip")
var bail = gen_label("filter_bail")
var bool_check = alloc_slot()
var ctx = {fn: fn_slot, fn_arity: fn_arity, result: val, null_s: null_s,
frame: f, zero: zero, one: one, az: az, ao: ao, prefix: "filter"}
var L = {arr: arr_slot, len: len, i: i, check: check, item: item, one: one,
loop_label: gen_label("filter_loop"), done_label: gen_label("filter_done")}
add_instr(["array", result, 0])
emit_2("length", len, arr_slot)
emit_2("int", i, 0)
emit_2("int", zero, 0)
emit_2("int", one, 1)
emit_1("null", null_s)
emit_2("length", fn_arity, fn_slot)
emit_label(loop_label)
emit_3("lt", check, i, len)
emit_jump_cond("jump_false", check, done_label)
emit_3("load_index", item, arr_slot, i)
emit_3("eq", arity_is_zero, fn_arity, zero)
emit_jump_cond("jump_false", arity_is_zero, call_one_label)
emit_3("frame", f, fn_slot, 0)
emit_3("setarg", f, 0, null_s)
emit_2("invoke", f, val)
emit_jump(call_done_label)
emit_label(call_one_label)
emit_3("eq", arity_is_one, fn_arity, one)
emit_jump_cond("jump_false", arity_is_one, call_two_label)
emit_3("frame", f, fn_slot, 1)
emit_3("setarg", f, 0, null_s)
emit_3("setarg", f, 1, item)
emit_2("invoke", f, val)
emit_jump(call_done_label)
emit_label(call_two_label)
emit_3("frame", f, fn_slot, 2)
emit_3("setarg", f, 0, null_s)
emit_3("setarg", f, 1, item)
emit_3("setarg", f, 2, i)
emit_2("invoke", f, val)
emit_label(call_done_label)
emit_jump_cond("jump_false", val, skip_label)
emit_2("push", result, item)
emit_label(skip_label)
emit_3("add", i, i, one)
emit_jump(loop_label)
emit_label(done_label)
emit_forward_loop(L, function(L) {
emit_arity_call(ctx, [L.item, L.i], 2)
emit_2("is_bool", bool_check, val)
emit_jump_cond("jump_false", bool_check, bail)
emit_jump_cond("jump_false", val, skip)
emit_2("push", result, L.item)
emit_label(skip)
return null
})
emit_2("move", dest, result)
var end = gen_label("filter_end")
emit_jump(end)
emit_label(bail)
emit_1("null", dest)
emit_label(end)
return dest
}
// --- Inline expansion: find(arr, target[, rev[, from]]) ---
var expand_inline_find = function(dest, args, nargs) {
var arr_slot = args.arr
var target = args.target
var len = alloc_slot()
var i = alloc_slot()
var check = alloc_slot()
var item = alloc_slot()
var fn_arity = alloc_slot()
var az = alloc_slot()
var ao = alloc_slot()
var null_s = alloc_slot()
var zero = alloc_slot()
var one = alloc_slot()
var f = alloc_slot()
var val = alloc_slot()
var is_fn = alloc_slot()
var eq_check = alloc_slot()
var fn_mode_label = gen_label("find_fn")
var found_label = gen_label("find_found")
var not_found_label = gen_label("find_nf")
var final_label = gen_label("find_final")
var vrev = gen_label("find_vrev")
var vdone = gen_label("find_vdone")
var frev = gen_label("find_frev")
var fdone = gen_label("find_fdone")
var vL = {arr: arr_slot, len: len, i: i, check: check, item: item, one: one,
loop_label: gen_label("find_vl"), done_label: gen_label("find_vd")}
var vrL = {arr: arr_slot, len: len, i: i, check: check, item: item, one: one,
loop_label: gen_label("find_vrl"), done_label: gen_label("find_vrd")}
var fL = {arr: arr_slot, len: len, i: i, check: check, item: item, one: one,
loop_label: gen_label("find_fl"), done_label: gen_label("find_fd")}
var ffL = {arr: arr_slot, len: len, i: i, check: check, item: item, one: one,
loop_label: gen_label("find_ffl"), done_label: gen_label("find_ffd")}
var frL = {arr: arr_slot, len: len, i: i, check: check, item: item, one: one,
loop_label: gen_label("find_frl"), done_label: gen_label("find_frd")}
var ctx = {fn: target, fn_arity: fn_arity, result: val, null_s: null_s,
frame: f, zero: zero, one: one, az: az, ao: ao, prefix: "find"}
var val_body = function(L) {
emit_3("eq", eq_check, L.item, target)
emit_jump_cond("jump_true", eq_check, found_label)
return null
}
var fn_body = function(L) {
emit_arity_call(ctx, [L.item, L.i], 2)
emit_jump_cond("wary_true", val, found_label)
return null
}
emit_2("length", len, arr_slot)
emit_2("int", zero, 0)
emit_2("int", one, 1)
emit_1("null", null_s)
if (nargs > 2) {
emit_reverse_guard(args.rev, "find: reverse must be a logical")
}
emit_2("is_func", is_fn, target)
emit_jump_cond("jump_true", is_fn, fn_mode_label)
// === Value mode ===
if (nargs <= 2) {
emit_forward_loop(vL, val_body)
} else {
emit_jump_cond("wary_true", args.rev, vrev)
if (nargs >= 4 && args.from >= 0) {
emit_2("move", i, args.from)
}
if (nargs >= 4 && args.from >= 0) {
emit_label(vL.loop_label)
emit_3("lt", vL.check, vL.i, vL.len)
emit_jump_cond("jump_false", vL.check, vL.done_label)
emit_3("load_index", vL.item, vL.arr, vL.i)
val_body(vL)
emit_3("add", vL.i, vL.i, vL.one)
emit_jump(vL.loop_label)
emit_label(vL.done_label)
} else {
emit_forward_loop(vL, val_body)
}
emit_jump(vdone)
emit_label(vrev)
emit_reverse_loop(vrL, val_body)
emit_label(vdone)
}
emit_jump(not_found_label)
// === Function mode ===
emit_label(fn_mode_label)
emit_2("length", fn_arity, target)
if (nargs <= 2) {
emit_forward_loop(fL, fn_body)
} else {
emit_jump_cond("wary_true", args.rev, frev)
if (nargs >= 4 && args.from >= 0) {
emit_2("move", i, args.from)
}
if (nargs >= 4 && args.from >= 0) {
emit_label(ffL.loop_label)
emit_3("lt", ffL.check, ffL.i, ffL.len)
emit_jump_cond("jump_false", ffL.check, ffL.done_label)
emit_3("load_index", ffL.item, ffL.arr, ffL.i)
fn_body(ffL)
emit_3("add", ffL.i, ffL.i, ffL.one)
emit_jump(ffL.loop_label)
emit_label(ffL.done_label)
} else {
emit_forward_loop(ffL, fn_body)
}
emit_jump(fdone)
emit_label(frev)
emit_reverse_loop(frL, fn_body)
emit_label(fdone)
}
emit_label(not_found_label)
emit_1("null", dest)
emit_jump(final_label)
emit_label(found_label)
emit_2("move", dest, i)
emit_label(final_label)
return dest
}
@@ -1285,7 +1487,8 @@ var mcode = function(ast) {
emit_2("length", fn_arity, fn_slot)
emit_2("int", zero, 0)
emit_2("int", one, 1)
r = {acc: acc, i: i, arr: arr_slot, fn: fn_slot, len: len, fn_arity: fn_arity}
r = {acc: acc, i: i, arr: arr_slot, fn: fn_slot, len: len, fn_arity: fn_arity,
known_arity: args.fn_known_arity}
if (nargs == 2) {
null_label = gen_label("reduce_null")
d1 = gen_label("reduce_d1")
@@ -1337,12 +1540,13 @@ var mcode = function(ast) {
d2 = gen_label("reduce_d2")
d3 = gen_label("reduce_d3")
d4 = gen_label("reduce_d4")
emit_reverse_guard(rev_slot, "reduce: reverse must be a logical")
emit_2("is_null", check, init_slot)
emit_jump_cond("jump_false", check, has_init)
// No initial
emit_3("lt", check, zero, len)
emit_jump_cond("jump_false", check, null_label)
emit_jump_cond("jump_true", rev_slot, no_init_rev)
emit_jump_cond("wary_true", rev_slot, no_init_rev)
// No initial, forward
emit_3("load_index", acc, arr_slot, zero)
emit_2("move", i, one)
@@ -1364,7 +1568,7 @@ var mcode = function(ast) {
emit_jump(final_label)
// Has initial
emit_label(has_init)
emit_jump_cond("jump_true", rev_slot, init_rev)
emit_jump_cond("wary_true", rev_slot, init_rev)
// Has initial, forward
emit_2("move", acc, init_slot)
emit_2("int", i, 0)
@@ -1411,7 +1615,7 @@ var mcode = function(ast) {
left_slot = gen_expr(left, -1)
dest = alloc_slot()
emit_2("move", dest, left_slot)
emit_jump_cond("jump_false", dest, end_label)
emit_jump_cond("wary_false", dest, end_label)
right_slot = gen_expr(right, -1)
emit_2("move", dest, right_slot)
emit_label(end_label)
@@ -1423,7 +1627,7 @@ var mcode = function(ast) {
left_slot = gen_expr(left, -1)
dest = alloc_slot()
emit_2("move", dest, left_slot)
emit_jump_cond("jump_true", dest, end_label)
emit_jump_cond("wary_true", dest, end_label)
right_slot = gen_expr(right, -1)
emit_2("move", dest, right_slot)
emit_label(end_label)
@@ -1734,6 +1938,8 @@ var mcode = function(ast) {
var guard_t = 0
var guard_err = null
var guard_done = null
var cb_known = null
var cb_p = null
if (expr == null) {
return -1
@@ -1768,7 +1974,7 @@ var mcode = function(ast) {
expr_slots = []
_i = 0
while (_i < nexpr) {
push(expr_slots, gen_expr(list[_i], -1))
expr_slots[] = gen_expr(list[_i], -1)
_i = _i + 1
}
// Create array from expression results
@@ -1877,6 +2083,23 @@ var mcode = function(ast) {
if (kind == "[") {
obj = expr.left
idx = expr.right
if (idx == null) {
// arr[] pop expression
obj_slot = gen_expr(obj, -1)
guard_t = alloc_slot()
guard_err = gen_label("pop_err")
guard_done = gen_label("pop_done")
emit_2("is_array", guard_t, obj_slot)
emit_jump_cond("jump_false", guard_t, guard_err)
slot = target >= 0 ? target : alloc_slot()
emit_2("pop", slot, obj_slot)
emit_jump(guard_done)
emit_label(guard_err)
emit_log_error("cannot pop: target must be an array")
emit_0("disrupt")
emit_label(guard_done)
return slot
}
obj_slot = gen_expr(obj, -1)
idx_slot = gen_expr(idx, -1)
slot = alloc_slot()
@@ -1992,12 +2215,22 @@ var mcode = function(ast) {
emit_label(guard_done)
return a1
}
// Callback intrinsics → inline mcode loops
if (nargs == 2 && fname == "arrfor" && inline_arrfor) {
// apply(fn, arr) → direct opcode
if (nargs == 2 && fname == "apply") {
a0 = gen_expr(args_list[0], -1)
a1 = gen_expr(args_list[1], -1)
d = alloc_slot()
return expand_inline_arrfor(d, a0, a1)
emit_3("apply", d, a0, a1)
return d
}
// Callback intrinsics → inline mcode loops
if (fname == "arrfor" && nargs >= 2 && nargs <= 4 && inline_arrfor) {
a0 = gen_expr(args_list[0], -1)
a1 = gen_expr(args_list[1], -1)
a2 = nargs >= 3 ? gen_expr(args_list[2], -1) : -1
a3 = nargs >= 4 ? gen_expr(args_list[3], -1) : -1
d = alloc_slot()
return expand_inline_arrfor(d, {arr: a0, fn: a1, rev: a2, exit: a3}, nargs)
}
if (nargs == 2 && fname == "every" && inline_every) {
a0 = gen_expr(args_list[0], -1)
@@ -2017,33 +2250,30 @@ var mcode = function(ast) {
d = alloc_slot()
return expand_inline_filter(d, a0, a1)
}
if (fname == "find" && nargs >= 2 && nargs <= 4 && inline_find) {
a0 = gen_expr(args_list[0], -1)
a1 = gen_expr(args_list[1], -1)
a2 = nargs >= 3 ? gen_expr(args_list[2], -1) : -1
a3 = nargs >= 4 ? gen_expr(args_list[3], -1) : -1
d = alloc_slot()
return expand_inline_find(d, {arr: a0, target: a1, rev: a2, from: a3}, nargs)
}
if (fname == "reduce" && nargs >= 2 && nargs <= 4 && inline_reduce) {
a0 = gen_expr(args_list[0], -1)
a1 = gen_expr(args_list[1], -1)
a2 = nargs >= 3 ? gen_expr(args_list[2], -1) : -1
a3 = nargs >= 4 ? gen_expr(args_list[3], -1) : -1
d = alloc_slot()
return expand_inline_reduce(d, {arr: a0, fn: a1, init: a2, rev: a3}, nargs)
}
// array(arr, fn) → inline map expansion
// Skip when first arg is a number literal (that's array(N, fn) — creation, not map)
if (nargs == 2 && fname == "array" && inline_map
&& args_list[0].kind != "number") {
// Specialized: array(arr, known_sensory_intrinsic) → direct opcode loop
if (args_list[1].kind == "name" && args_list[1].intrinsic == true
&& sensory_ops[args_list[1].name] != null) {
a0 = gen_expr(args_list[0], -1)
d = alloc_slot()
return expand_inline_map_intrinsic(d, a0, sensory_ops[args_list[1].name])
}
// General: array(arr, fn_literal) → map loop with arity dispatch
cb_known = null
if (args_list[1].kind == "function") {
a0 = gen_expr(args_list[0], -1)
a1 = gen_expr(args_list[1], -1)
d = alloc_slot()
return expand_inline_map(d, a0, a1)
cb_p = args_list[1].list
if (cb_p == null) cb_p = args_list[1].parameters
cb_known = cb_p != null ? length(cb_p) : 0
}
return expand_inline_reduce(d, {arr: a0, fn: a1, init: a2, rev: a3,
fn_known_arity: cb_known}, nargs)
}
// array(arr, fn) inline expansion removed — array() is too complex to inline
}
// Collect arg slots
@@ -2051,7 +2281,7 @@ var mcode = function(ast) {
_i = 0
nargs = args_list != null ? length(args_list) : 0
while (_i < nargs) {
push(arg_slots, gen_expr(args_list[_i], -1))
arg_slots[] = gen_expr(args_list[_i], -1)
_i = _i + 1
}
dest = alloc_slot()
@@ -2179,7 +2409,7 @@ var mcode = function(ast) {
obj = operand.left
prop = operand.right
obj_slot = gen_expr(obj, -1)
push(s_instructions, ["delete", slot, obj_slot, prop])
add_instr(["delete", slot, obj_slot, prop])
} else if (operand_kind == "[") {
obj = operand.left
idx = operand.right
@@ -2200,7 +2430,7 @@ var mcode = function(ast) {
else_label = gen_label("tern_else")
end_label = gen_label("tern_end")
cond_slot = gen_expr(cond, -1)
emit_jump_cond("jump_false", cond_slot, else_label)
emit_jump_cond("wary_false", cond_slot, else_label)
dest = alloc_slot()
then_slot = gen_expr(then_expr, -1)
emit_2("move", dest, then_slot)
@@ -2219,7 +2449,7 @@ var mcode = function(ast) {
elem_slots = []
_i = 0
while (_i < count) {
push(elem_slots, gen_expr(list[_i], -1))
elem_slots[] = gen_expr(list[_i], -1)
_i = _i + 1
}
dest = alloc_slot()
@@ -2236,7 +2466,7 @@ var mcode = function(ast) {
if (kind == "record") {
list = expr.list
dest = alloc_slot()
push(s_instructions, ["record", dest, length(list)])
s_instructions[] = ["record", dest, length(list)]
_i = 0
while (_i < length(list)) {
pair = list[_i]
@@ -2266,7 +2496,7 @@ var mcode = function(ast) {
func = gen_function(expr)
func_id = s_func_counter
s_func_counter = s_func_counter + 1
push(s_functions, func)
s_functions[] = func
dest = alloc_slot()
emit_2("function", dest, func_id)
return dest
@@ -2425,7 +2655,7 @@ var mcode = function(ast) {
else_label = gen_label("if_else")
end_label = gen_label("if_end")
cond_slot = gen_expr(cond, -1)
emit_jump_cond("jump_false", cond_slot, else_label)
emit_jump_cond("wary_false", cond_slot, else_label)
_i = 0
while (_i < length(then_stmts)) {
gen_statement(then_stmts[_i])
@@ -2466,7 +2696,7 @@ var mcode = function(ast) {
}
emit_label(start_label)
cond_slot = gen_expr(cond, -1)
emit_jump_cond("jump_false", cond_slot, end_label)
emit_jump_cond("wary_false", cond_slot, end_label)
_i = 0
while (_i < length(stmts)) {
gen_statement(stmts[_i])
@@ -2501,7 +2731,7 @@ var mcode = function(ast) {
}
emit_label(cond_label)
cond_slot = gen_expr(cond, -1)
emit_jump_cond("jump_true", cond_slot, start_label)
emit_jump_cond("wary_true", cond_slot, start_label)
emit_label(end_label)
s_loop_break = old_break
s_loop_continue = old_continue
@@ -2535,7 +2765,7 @@ var mcode = function(ast) {
emit_label(start_label)
if (test != null) {
test_slot = gen_expr(test, -1)
emit_jump_cond("jump_false", test_slot, end_label)
emit_jump_cond("wary_false", test_slot, end_label)
}
_i = 0
while (_i < length(stmts)) {
@@ -2584,7 +2814,7 @@ var mcode = function(ast) {
_i = 0
nargs = args_list != null ? length(args_list) : 0
while (_i < nargs) {
push(arg_slots, gen_expr(args_list[_i], -1))
arg_slots[] = gen_expr(args_list[_i], -1)
_i = _i + 1
}
callee_kind = callee.kind
@@ -2639,7 +2869,7 @@ var mcode = function(ast) {
case_kind = case_node.kind
if (case_kind == "default") {
default_label = gen_label("switch_default")
push(case_labels, default_label)
case_labels[] = default_label
} else {
case_label = gen_label("switch_case")
case_expr = case_node.expression
@@ -2649,7 +2879,7 @@ var mcode = function(ast) {
_bp_rn = case_expr
emit_binop("eq", cmp_slot, switch_val, case_val)
emit_jump_cond("jump_true", cmp_slot, case_label)
push(case_labels, case_label)
case_labels[] = case_label
}
_i = _i + 1
}
@@ -2681,7 +2911,7 @@ var mcode = function(ast) {
func = gen_function(stmt)
func_id = s_func_counter
s_func_counter = s_func_counter + 1
push(s_functions, func)
s_functions[] = func
local_slot = find_var(name)
dest = alloc_slot()
emit_2("function", dest, func_id)
@@ -2735,7 +2965,7 @@ var mcode = function(ast) {
var saved_func = 0
var captured_this = 0
push(parent_states, saved)
parent_states[] = saved
s_instructions = []
s_vars = []
@@ -2826,7 +3056,7 @@ var mcode = function(ast) {
compiled = gen_function(fn)
func_id = s_func_counter
s_func_counter = s_func_counter + 1
push(s_functions, compiled)
s_functions[] = compiled
local_slot = find_var(fname)
dest = alloc_slot()
emit_2("function", dest, func_id)
@@ -2899,7 +3129,7 @@ var mcode = function(ast) {
saved_func = s_func_counter
// Pop parent state
pop(parent_states)
parent_states[]
restore_state(saved)
s_label_counter = saved_label
s_func_counter = saved_func
@@ -2919,7 +3149,6 @@ var mcode = function(ast) {
var local_slot = 0
var dest = 0
var statements = ast.statements
var last_expr_slot = -1
var stmt = null
var kind = null
var null_slot = 0
@@ -2967,7 +3196,7 @@ var mcode = function(ast) {
compiled = gen_function(fn)
func_id = s_func_counter
s_func_counter = s_func_counter + 1
push(s_functions, compiled)
s_functions[] = compiled
local_slot = find_var(name)
dest = alloc_slot()
emit_2("function", dest, func_id)
@@ -2986,21 +3215,18 @@ var mcode = function(ast) {
kind = stmt.kind
if (kind != null) {
if (kind == "call") {
last_expr_slot = gen_expr(stmt.expression, -1)
gen_expr(stmt.expression, -1)
} else if (kind == "return" || kind == "disrupt" ||
kind == "break" || kind == "continue") {
gen_statement(stmt)
last_expr_slot = -1
} else if (kind == "var" || kind == "def" ||
kind == "break" || kind == "continue" ||
kind == "var" || kind == "def" ||
kind == "var_list" || kind == "def_list" ||
kind == "function" || kind == "block" ||
kind == "if" || kind == "while" ||
kind == "do" || kind == "for" ||
kind == "switch") {
gen_statement(stmt)
last_expr_slot = -1
} else {
last_expr_slot = gen_expr(stmt, -1)
gen_expr(stmt, -1)
}
} else {
gen_statement(stmt)
@@ -3008,13 +3234,9 @@ var mcode = function(ast) {
_i = _i + 1
}
if (last_expr_slot >= 0) {
emit_1("return", last_expr_slot)
} else {
null_slot = alloc_slot()
emit_1("null", null_slot)
emit_1("return", null_slot)
}
result = {}
result.name = filename != null ? filename : "<eval>"

View File

@@ -34,6 +34,7 @@ if host_machine.system() == 'darwin'
fworks = [
'CoreFoundation',
'CFNetwork',
'Security',
]
foreach fkit : fworks
deps += dependency('appleframeworks', modules: fkit)
@@ -82,7 +83,9 @@ scripts = [
'internal/os.c',
'internal/fd.c',
'net/http.c',
'net/enet.c',
'net/tls.c',
'net/socket.c',
'internal/enet.c',
'archive/miniz.c',
'source/cJSON.c'
]
@@ -193,5 +196,3 @@ cell_exe = executable('cell',
# Install headers for building dynamic libraries using Cell
install_headers('source/cell.h')
install_headers('source/quickjs.h')
install_headers('source/wota.h')

View File

@@ -1,588 +0,0 @@
#include "cell.h"
#define ENET_IMPLEMENTATION
#include "enet.h"
#include <stdio.h>
#include <string.h>
#include <math.h>
static JSClassID enet_host_id;
static JSClassID enet_peer_class_id;
static void js_enet_host_finalizer(JSRuntime *rt, JSValue val)
{
ENetHost *host = JS_GetOpaque(val, enet_host_id);
if (host) enet_host_destroy(host);
}
//static void js_enet_peer_mark(JSRuntime *rt, JSValueConst val, JS_MarkFunc *mark_func)
//{
// ENetPeer *peer = JS_GetOpaque(val, enet_peer_class_id);
// JS_MarkValue(rt, *(JSValue*)peer->data, mark_func);
//}
static void js_enet_peer_finalizer(JSRuntime *rt, JSValue val)
{
ENetPeer *peer = JS_GetOpaque(val, enet_peer_class_id);
JS_FreeValueRT(rt, *(JSValue*)peer->data);
free(peer->data);
}
// Initialize the ENet library. Must be called before using any ENet functionality.
static JSValue js_enet_initialize(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
if (enet_initialize() != 0) return JS_RaiseDisrupt(ctx, "Error initializing ENet");
return JS_NULL;
}
// Deinitialize the ENet library, cleaning up all resources. Call this when you no longer
// need any ENet functionality.
static JSValue js_enet_deinitialize(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
enet_deinitialize();
return JS_NULL;
}
// Create an ENet host for either a client-like unbound host or a server bound to a specific
// address and port:
//
// - If no argument is provided, creates an unbound "client-like" host with default settings
// (maximum 32 peers, 2 channels, unlimited bandwidth).
// - If you pass an "ip:port" string (e.g. "127.0.0.1:7777"), it creates a server bound to
// that address. The server supports up to 32 peers, 2 channels, and unlimited bandwidth.
//
// Throws an error if host creation fails for any reason.
static JSValue js_enet_host_create(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
ENetHost *host;
ENetAddress address;
ENetAddress *send = &address;
size_t peer_count = 1000;
size_t channel_limit = 0;
enet_uint32 incoming_bandwidth = 0;
enet_uint32 outgoing_bandwidth = 0;
JSValue obj;
if (argc < 1 || !JS_IsRecord(argv[0])) {
host = enet_host_create(NULL, peer_count, channel_limit, incoming_bandwidth, outgoing_bandwidth);
if (!host) return JS_RaiseDisrupt(ctx, "Failed to create ENet client host");
goto wrap;
}
JSValue config_obj = argv[0];
JSValue addr_val = JS_GetPropertyStr(ctx, config_obj, "address");
const char *addr_str = JS_IsText(addr_val) ? JS_ToCString(ctx, addr_val) : NULL;
JS_FreeValue(ctx, addr_val);
if (!addr_str)
send = NULL;
else {
JSValue port_val = JS_GetPropertyStr(ctx, config_obj, "port");
int32_t port32 = 0;
JS_ToInt32(ctx, &port32, port_val);
JS_FreeValue(ctx, port_val);
if (strcmp(addr_str, "any") == 0)
address.host = ENET_HOST_ANY;
else if (strcmp(addr_str, "broadcast") == 0)
enet_address_set_host_ip(&address, "255.255.255.255");
else {
int err = enet_address_set_host_ip(&address, addr_str);
if (err != 0) {
JS_FreeCString(ctx, addr_str);
return JS_RaiseDisrupt(ctx, "Failed to set host IP from '%s'. Error: %d", addr_str, err);
}
}
address.port = (enet_uint16)port32;
JS_FreeCString(ctx, addr_str);
}
JSValue chan_val = JS_GetPropertyStr(ctx, config_obj, "channels");
JS_ToUint32(ctx, &channel_limit, chan_val);
JS_FreeValue(ctx, chan_val);
JSValue in_bw_val = JS_GetPropertyStr(ctx, config_obj, "incoming_bandwidth");
JS_ToUint32(ctx, &incoming_bandwidth, in_bw_val);
JS_FreeValue(ctx, in_bw_val);
JSValue out_bw_val = JS_GetPropertyStr(ctx, config_obj, "outgoing_bandwidth");
JS_ToUint32(ctx, &outgoing_bandwidth, out_bw_val);
JS_FreeValue(ctx, out_bw_val);
host = enet_host_create(send, peer_count, channel_limit, incoming_bandwidth, outgoing_bandwidth);
if (!host) return JS_RaiseDisrupt(ctx, "Failed to create ENet host");
wrap:
obj = JS_NewObjectClass(ctx, enet_host_id);
if (JS_IsException(obj)) {
enet_host_destroy(host);
return obj;
}
JS_SetOpaque(obj, host);
return obj;
}
// Helper function to get a JSValue for an ENetPeer.
static JSValue peer_get_value(JSContext *ctx, ENetPeer *peer)
{
if (!peer->data) {
peer->data = malloc(sizeof(JSValue));
*(JSValue*)peer->data = JS_NewObjectClass(ctx, enet_peer_class_id);
JS_SetOpaque(*(JSValue*)peer->data, peer);
}
return JS_DupValue(ctx, *(JSValue*)peer->data);
}
// Poll for and process any available network events (connect, receive, disconnect, or none)
// from this host, calling the provided callback for each event. This function loops until
// no more events are available in the current timeframe.
//
// :param callback: A function called once for each available event, receiving an event
// object as its single argument.
// :param timeout: (optional) Timeout in milliseconds. Defaults to 0 (non-blocking).
// :return: None
static JSValue js_enet_host_service(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
ENetHost *host = JS_GetOpaque(this_val, enet_host_id);
if (!host) return JS_EXCEPTION;
if (argc < 1 || !JS_IsFunction(argv[0])) return JS_RaiseDisrupt(ctx, "Expected a callback function as first argument");
double secs;
JS_ToFloat64(ctx, &secs, argv[1]);
ENetEvent event;
while (enet_host_service(host, &event, secs*1000.0f) > 0) {
JSValue event_obj = JS_NewObject(ctx);
JS_SetPropertyStr(ctx, event_obj, "peer", peer_get_value(ctx, event.peer));
switch (event.type) {
case ENET_EVENT_TYPE_CONNECT:
JS_SetPropertyStr(ctx, event_obj, "type", JS_NewString(ctx, "connect"));
break;
case ENET_EVENT_TYPE_RECEIVE:
JS_SetPropertyStr(ctx, event_obj, "type", JS_NewString(ctx, "receive"));
JS_SetPropertyStr(ctx, event_obj, "channelID", JS_NewInt32(ctx, event.channelID));
// Pass raw data as string or ArrayBuffer
if (event.packet->dataLength > 0) {
JSValue data_val = js_new_blob_stoned_copy(ctx, event.packet->data, event.packet->dataLength);
JS_SetPropertyStr(ctx, event_obj, "data", data_val);
}
enet_packet_destroy(event.packet);
break;
case ENET_EVENT_TYPE_DISCONNECT:
JS_SetPropertyStr(ctx, event_obj, "type", JS_NewString(ctx, "disconnect"));
break;
case ENET_EVENT_TYPE_DISCONNECT_TIMEOUT:
JS_SetPropertyStr(ctx, event_obj, "type", JS_NewString(ctx, "disconnect_timeout"));
break;
case ENET_EVENT_TYPE_NONE:
JS_SetPropertyStr(ctx, event_obj, "type", JS_NewString(ctx, "none"));
break;
}
// TODO: raise exception?
JS_FreeValue(ctx, event_obj);
}
return JS_NULL;
}
// Initiate a connection from this host to a remote server. Throws an error if the
// connection cannot be started.
//
// :param hostname: The hostname or IP address of the remote server (e.g. "example.com" or "127.0.0.1").
// :param port: The port number to connect to.
// :return: An ENetPeer object representing the connection.
static JSValue js_enet_host_connect(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
ENetHost *host = JS_GetOpaque(this_val, enet_host_id);
if (!host) return JS_EXCEPTION;
if (argc < 2) return JS_RaiseDisrupt(ctx, "Expected 2 arguments: hostname, port");
const char *hostname = JS_ToCString(ctx, argv[0]);
if (!hostname) return JS_EXCEPTION;
int port;
JS_ToInt32(ctx, &port, argv[1]);
ENetAddress address;
enet_address_set_host(&address, hostname);
JS_FreeCString(ctx, hostname);
address.port = port;
ENetPeer *peer = enet_host_connect(host, &address, 2, 0);
if (!peer) return JS_RaiseDisrupt(ctx, "No available peers for initiating an ENet connection");
return peer_get_value(ctx, peer);
}
// Flush all pending outgoing packets for this host immediately.
//
// :return: None
static JSValue js_enet_host_flush(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
ENetHost *host = JS_GetOpaque(this_val, enet_host_id);
if (!host) return JS_EXCEPTION;
enet_host_flush(host);
return JS_NULL;
}
// Broadcast a string or ArrayBuffer to all connected peers on channel 0.
//
// :param data: A string or ArrayBuffer to broadcast to all peers.
// :return: None
static JSValue js_enet_host_broadcast(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
ENetHost *host = JS_GetOpaque(this_val, enet_host_id);
if (!host) return JS_EXCEPTION;
if (argc < 1) return JS_RaiseDisrupt(ctx, "Expected a string or ArrayBuffer to broadcast");
const char *data_str = NULL;
size_t data_len = 0;
uint8_t *buf = NULL;
if (JS_IsText(argv[0])) {
data_str = JS_ToCStringLen(ctx, &data_len, argv[0]);
if (!data_str) return JS_EXCEPTION;
} else if (js_is_blob(ctx,argv[0])) {
buf = js_get_blob_data(ctx, &data_len, argv[0]);
if (!buf) return JS_EXCEPTION;
} else {
return JS_RaiseDisrupt(ctx, "broadcast() only accepts a string or ArrayBuffer");
}
ENetPacket *packet = enet_packet_create(data_str ? (const void*)data_str : (const void*)buf, data_len, ENET_PACKET_FLAG_RELIABLE);
if (data_str) JS_FreeCString(ctx, data_str);
if (!packet) return JS_RaiseDisrupt(ctx, "Failed to create ENet packet");
enet_host_broadcast(host, 0, packet);
return JS_NULL;
}
static JSValue js_enet_host_get_port(JSContext *js, JSValueConst self, int argc, JSValueConst *argv)
{
ENetHost *host = JS_GetOpaque(self, enet_host_id);
if (!host) return JS_EXCEPTION;
return JS_NewInt32(js, host->address.port);
}
static JSValue js_enet_host_get_address(JSContext *js, JSValueConst self, int argc, JSValueConst *argv)
{
ENetHost *me = JS_GetOpaque(self, enet_host_id);
if (!me) return JS_EXCEPTION;
char ip_str[128];
if (enet_address_get_host_ip(&me->address, ip_str, sizeof(ip_str)) != 0)
return JS_NULL;
return JS_NewString(js, ip_str);
}
// Peer-level operations
// Request a graceful disconnection from this peer. The connection will close after
// pending data is sent.
//
// :return: None
static JSValue js_enet_peer_disconnect(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
ENetPeer *peer = JS_GetOpaque(this_val, enet_peer_class_id);
if (!peer) return JS_EXCEPTION;
enet_peer_disconnect(peer, 0);
return JS_NULL;
}
// Send a string or ArrayBuffer to this peer on channel 0.
//
// :param data: A string or ArrayBuffer to send.
// :return: None
static JSValue js_enet_peer_send(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
ENetPeer *peer = JS_GetOpaque(this_val, enet_peer_class_id);
if (!peer) return JS_EXCEPTION;
if (argc < 1) return JS_RaiseDisrupt(ctx, "Expected a string or ArrayBuffer to send");
const char *data_str = NULL;
size_t data_len = 0;
uint8_t *buf = NULL;
if (JS_IsText(argv[0])) {
data_str = JS_ToCStringLen(ctx, &data_len, argv[0]);
if (!data_str) return JS_EXCEPTION;
} else if (js_is_blob(ctx,argv[0])) {
buf = js_get_blob_data(ctx, &data_len, argv[0]);
if (!buf) return JS_EXCEPTION;
} else {
return JS_RaiseDisrupt(ctx, "send() only accepts a string or ArrayBuffer");
}
ENetPacket *packet = enet_packet_create(data_str ? (const void*)data_str : (const void*)buf, data_len, ENET_PACKET_FLAG_RELIABLE);
if (data_str) JS_FreeCString(ctx, data_str);
if (!packet) return JS_RaiseDisrupt(ctx, "Failed to create ENet packet");
if (enet_peer_send(peer, 0, packet) < 0) return JS_RaiseDisrupt(ctx, "Unable to send packet");
return JS_NULL;
}
// Immediately terminate the connection to this peer, discarding any pending data.
//
// :return: None
static JSValue js_enet_peer_disconnect_now(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
ENetPeer *peer = JS_GetOpaque(this_val, enet_peer_class_id);
if (!peer) return JS_EXCEPTION;
enet_peer_disconnect_now(peer, 0);
return JS_NULL;
}
// Request a disconnection from this peer after all queued packets are sent.
//
// :return: None
static JSValue js_enet_peer_disconnect_later(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
ENetPeer *peer = JS_GetOpaque(this_val, enet_peer_class_id);
if (!peer) return JS_EXCEPTION;
enet_peer_disconnect_later(peer, 0);
return JS_NULL;
}
// Reset this peer's connection, immediately dropping it and clearing its internal state.
//
// :return: None
static JSValue js_enet_peer_reset(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
ENetPeer *peer = JS_GetOpaque(this_val, enet_peer_class_id);
if (!peer) return JS_EXCEPTION;
enet_peer_reset(peer);
return JS_NULL;
}
// Send a ping request to this peer to measure latency.
//
// :return: None
static JSValue js_enet_peer_ping(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
ENetPeer *peer = JS_GetOpaque(this_val, enet_peer_class_id);
if (!peer) return JS_EXCEPTION;
enet_peer_ping(peer);
return JS_NULL;
}
// Configure the throttling behavior for this peer, controlling how ENet adjusts its sending
// rate based on packet loss or congestion.
//
// :param interval: The interval (ms) between throttle adjustments.
// :param acceleration: The factor to increase sending speed when conditions improve.
// :param deceleration: The factor to decrease sending speed when conditions worsen.
// :return: None
static JSValue js_enet_peer_throttle_configure(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
ENetPeer *peer = JS_GetOpaque(this_val, enet_peer_class_id);
if (!peer) return JS_EXCEPTION;
int interval, acceleration, deceleration;
if (argc < 3 || JS_ToInt32(ctx, &interval, argv[0]) || JS_ToInt32(ctx, &acceleration, argv[1]) || JS_ToInt32(ctx, &deceleration, argv[2]))
return JS_RaiseDisrupt(ctx, "Expected 3 int arguments: interval, acceleration, deceleration");
enet_peer_throttle_configure(peer, interval, acceleration, deceleration);
return JS_NULL;
}
static JSValue js_enet_peer_timeout(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
ENetPeer *peer = JS_GetOpaque(this_val, enet_peer_class_id);
if (!peer) return JS_EXCEPTION;
int timeout_limit, timeout_min, timeout_max;
if (argc < 3 || JS_ToInt32(ctx, &timeout_limit, argv[0]) || JS_ToInt32(ctx, &timeout_min, argv[1]) || JS_ToInt32(ctx, &timeout_max, argv[2]))
return JS_RaiseDisrupt(ctx, "Expected 3 integer arguments: timeout_limit, timeout_min, timeout_max");
enet_peer_timeout(peer, timeout_limit, timeout_min, timeout_max);
return JS_NULL;
}
// Class definitions
static JSClassDef enet_host = {
"ENetHost",
.finalizer = js_enet_host_finalizer,
};
static JSClassDef enet_peer_class = {
"ENetPeer",
.finalizer = js_enet_peer_finalizer,
// .gc_mark = js_enet_peer_mark
};
JSValue js_enet_resolve_hostname(JSContext *js, JSValue self, int argc, JSValue *argv)
{
// TODO: implement
const char *hostname = JS_ToCString(js, argv[0]);
JS_FreeCString(js, hostname);
return JS_NULL;
}
static const JSCFunctionListEntry js_enet_funcs[] = {
JS_CFUNC_DEF("initialize", 0, js_enet_initialize),
JS_CFUNC_DEF("deinitialize", 0, js_enet_deinitialize),
JS_CFUNC_DEF("create_host", 1, js_enet_host_create),
JS_CFUNC_DEF("resolve_hostname", 1, js_enet_resolve_hostname),
};
static const JSCFunctionListEntry js_enet_host_funcs[] = {
JS_CFUNC_DEF("service", 2, js_enet_host_service),
JS_CFUNC_DEF("connect", 2, js_enet_host_connect),
JS_CFUNC_DEF("flush", 0, js_enet_host_flush),
JS_CFUNC_DEF("broadcast", 1, js_enet_host_broadcast),
// JS_CGETSET_DEF("port", js_enet_host_get_port, NULL),
// JS_CGETSET_DEF("address", js_enet_host_get_address, NULL),
};
static JSValue js_enet_peer_get_rtt(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
ENetPeer *peer = JS_GetOpaque(this_val, enet_peer_class_id);
if (!peer) return JS_EXCEPTION;
return JS_NewInt32(ctx, peer->roundTripTime);
}
static JSValue js_enet_peer_get_incoming_bandwidth(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
ENetPeer *peer = JS_GetOpaque(this_val, enet_peer_class_id);
if (!peer) return JS_EXCEPTION;
if (peer->incomingBandwidth == 0) return JS_NewFloat64(ctx, INFINITY);
return JS_NewInt32(ctx, peer->incomingBandwidth);
}
static JSValue js_enet_peer_get_outgoing_bandwidth(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
ENetPeer *peer = JS_GetOpaque(this_val, enet_peer_class_id);
if (!peer) return JS_EXCEPTION;
if (peer->outgoingBandwidth == 0) return JS_NewFloat64(ctx, INFINITY);
return JS_NewInt32(ctx, peer->outgoingBandwidth);
}
static JSValue js_enet_peer_get_last_send_time(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
ENetPeer *peer = JS_GetOpaque(this_val, enet_peer_class_id);
if (!peer) return JS_EXCEPTION;
return JS_NewInt32(ctx, peer->lastSendTime);
}
static JSValue js_enet_peer_get_last_receive_time(JSContext *ctx, JSValueConst this_val, int argc, JSValueConst *argv)
{
ENetPeer *peer = JS_GetOpaque(this_val, enet_peer_class_id);
if (!peer) return JS_EXCEPTION;
return JS_NewInt32(ctx, peer->lastReceiveTime);
}
static JSValue js_enet_peer_get_mtu(JSContext *ctx, JSValueConst this_val)
{
ENetPeer *peer = JS_GetOpaque(this_val, enet_peer_class_id);
if (!peer) return JS_NewFloat64(ctx, INFINITY);
return JS_NewInt32(ctx, peer->mtu);
}
static JSValue js_enet_peer_get_outgoing_data_total(JSContext *ctx, JSValueConst this_val)
{
ENetPeer *peer = JS_GetOpaque(this_val, enet_peer_class_id);
if (!peer) return JS_NewFloat64(ctx, INFINITY);
return JS_NewInt32(ctx, peer->outgoingDataTotal);
}
static JSValue js_enet_peer_get_incoming_data_total(JSContext *ctx, JSValueConst this_val)
{
ENetPeer *peer = JS_GetOpaque(this_val, enet_peer_class_id);
if (!peer) return JS_NewFloat64(ctx, INFINITY);
return JS_NewInt32(ctx, peer->incomingDataTotal);
}
static JSValue js_enet_peer_get_rtt_variance(JSContext *ctx, JSValueConst this_val)
{
ENetPeer *peer = JS_GetOpaque(this_val, enet_peer_class_id);
if (!peer) return JS_NewFloat64(ctx, INFINITY);
return JS_NewInt32(ctx, peer->roundTripTimeVariance);
}
static JSValue js_enet_peer_get_packet_loss(JSContext *ctx, JSValueConst this_val)
{
ENetPeer *peer = JS_GetOpaque(this_val, enet_peer_class_id);
if (!peer) return JS_NewFloat64(ctx, INFINITY);
return JS_NewInt32(ctx, peer->packetLoss);
}
static JSValue js_enet_peer_get_state(JSContext *ctx, JSValueConst this_val)
{
ENetPeer *peer = JS_GetOpaque(this_val, enet_peer_class_id);
if (!peer) return JS_NewInt32(ctx, -1);
return JS_NewInt32(ctx, peer->state);
}
static JSValue js_enet_peer_get_reliable_data_in_transit(JSContext *ctx, JSValueConst this_val)
{
ENetPeer *peer = JS_GetOpaque(this_val, enet_peer_class_id);
if (!peer) return JS_NewFloat64(ctx, INFINITY);
return JS_NewInt32(ctx, peer->reliableDataInTransit);
}
static JSValue js_enet_peer_get_port(JSContext *js, JSValueConst self)
{
ENetPeer *peer = JS_GetOpaque(self, enet_peer_class_id);
return JS_NewUint32(js, peer->address.port);
}
static JSValue js_enet_peer_get_address(JSContext *js, JSValueConst self)
{
ENetPeer *peer = JS_GetOpaque(self, enet_peer_class_id);
char ip_str[128];
if (enet_address_get_host_ip(&peer->address, ip_str, sizeof(ip_str)) != 0)
return JS_NULL;
return JS_NewString(js, ip_str);
}
static const JSCFunctionListEntry js_enet_peer_funcs[] = {
JS_CFUNC_DEF("send", 1, js_enet_peer_send),
JS_CFUNC_DEF("disconnect", 0, js_enet_peer_disconnect),
JS_CFUNC_DEF("disconnect_now", 0, js_enet_peer_disconnect_now),
JS_CFUNC_DEF("disconnect_later", 0, js_enet_peer_disconnect_later),
JS_CFUNC_DEF("reset", 0, js_enet_peer_reset),
JS_CFUNC_DEF("ping", 0, js_enet_peer_ping),
JS_CFUNC_DEF("throttle_configure", 3, js_enet_peer_throttle_configure),
JS_CFUNC_DEF("timeout", 3, js_enet_peer_timeout),
// JS_CGETSET_DEF("rtt", js_enet_peer_get_rtt, NULL),
// JS_CGETSET_DEF("incoming_bandwidth", js_enet_peer_get_incoming_bandwidth, NULL),
// JS_CGETSET_DEF("outgoing_bandwidth", js_enet_peer_get_outgoing_bandwidth, NULL),
// JS_CGETSET_DEF("last_send_time", js_enet_peer_get_last_send_time, NULL),
// JS_CGETSET_DEF("last_receive_time", js_enet_peer_get_last_receive_time, NULL),
// JS_CGETSET_DEF("mtu", js_enet_peer_get_mtu, NULL),
// JS_CGETSET_DEF("outgoing_data_total", js_enet_peer_get_outgoing_data_total, NULL),
// JS_CGETSET_DEF("incoming_data_total", js_enet_peer_get_incoming_data_total, NULL),
// JS_CGETSET_DEF("rtt_variance", js_enet_peer_get_rtt_variance, NULL),
// JS_CGETSET_DEF("packet_loss", js_enet_peer_get_packet_loss, NULL),
// JS_CGETSET_DEF("state", js_enet_peer_get_state, NULL),
// JS_CGETSET_DEF("reliable_data_in_transit", js_enet_peer_get_reliable_data_in_transit, NULL),
// JS_CGETSET_DEF("port", js_enet_peer_get_port, NULL),
// JS_CGETSET_DEF("address", js_enet_peer_get_address, NULL),
};
JSValue js_core_enet_use(JSContext *ctx)
{
JS_FRAME(ctx);
JS_NewClassID(&enet_host_id);
JS_NewClass(ctx, enet_host_id, &enet_host);
JS_ROOT(host_proto, JS_NewObject(ctx));
JS_SetPropertyFunctionList(ctx, host_proto.val, js_enet_host_funcs, countof(js_enet_host_funcs));
JS_SetClassProto(ctx, enet_host_id, host_proto.val);
JS_NewClassID(&enet_peer_class_id);
JS_NewClass(ctx, enet_peer_class_id, &enet_peer_class);
JS_ROOT(peer_proto, JS_NewObject(ctx));
JS_SetPropertyFunctionList(ctx, peer_proto.val, js_enet_peer_funcs, countof(js_enet_peer_funcs));
JS_SetClassProto(ctx, enet_peer_class_id, peer_proto.val);
JS_ROOT(export_obj, JS_NewObject(ctx));
JS_SetPropertyFunctionList(ctx, export_obj.val, js_enet_funcs, countof(js_enet_funcs));
JS_RETURN(export_obj.val);
}

View File

@@ -321,7 +321,7 @@ static const JSCFunctionListEntry js_http_funcs[] = {
JS_CFUNC_DEF("fetch", 2, js_fetch_picoparser),
};
JSValue js_core_http_use(JSContext *js) {
JSValue js_core_net_http_use(JSContext *js) {
JS_FRAME(js);
par_easycurl_init(0); // Initialize platform HTTP backend
JS_ROOT(mod, JS_NewObject(js));

View File

@@ -24,6 +24,9 @@
#include <stdlib.h>
#include <errno.h>
#include <stdio.h>
#ifndef _WIN32
#include <fcntl.h>
#endif
// Helper to convert JS value to file descriptor
static int js2fd(JSContext *ctx, JSValueConst val)
@@ -64,7 +67,6 @@ JSC_CCALL(socket_getaddrinfo,
else if (strcmp(family, "AF_INET6") == 0) hints.ai_family = AF_INET6;
JS_FreeCString(js, family);
}
JS_FreeValue(js, val);
val = JS_GetPropertyStr(js, argv[2], "socktype");
if (!JS_IsNull(val)) {
@@ -73,19 +75,16 @@ JSC_CCALL(socket_getaddrinfo,
else if (strcmp(socktype, "SOCK_DGRAM") == 0) hints.ai_socktype = SOCK_DGRAM;
JS_FreeCString(js, socktype);
}
JS_FreeValue(js, val);
val = JS_GetPropertyStr(js, argv[2], "flags");
if (!JS_IsNull(val)) {
hints.ai_flags = js2number(js, val);
}
JS_FreeValue(js, val);
val = JS_GetPropertyStr(js, argv[2], "passive");
if (JS_ToBool(js, val)) {
hints.ai_flags |= AI_PASSIVE;
}
JS_FreeValue(js, val);
}
int status = getaddrinfo(node, service, &hints, &res);
@@ -570,6 +569,103 @@ JSC_CCALL(socket_close,
return JS_NULL;
)
JSC_CCALL(socket_on_readable,
int sockfd = js2fd(js, argv[0]);
if (sockfd < 0) return JS_EXCEPTION;
if (!JS_IsFunction(argv[1]))
return JS_RaiseDisrupt(js, "on_readable: callback must be a function");
actor_watch_readable(js, sockfd, argv[1]);
return JS_NULL;
)
JSC_CCALL(socket_unwatch,
int sockfd = js2fd(js, argv[0]);
if (sockfd < 0) return JS_EXCEPTION;
actor_unwatch(js, sockfd);
return JS_NULL;
)
JSC_CCALL(socket_on_writable,
int sockfd = js2fd(js, argv[0]);
if (sockfd < 0) return JS_EXCEPTION;
if (!JS_IsFunction(argv[1]))
return JS_RaiseDisrupt(js, "on_writable: callback must be a function");
actor_watch_writable(js, sockfd, argv[1]);
return JS_NULL;
)
JSC_CCALL(socket_setnonblock,
int sockfd = js2fd(js, argv[0]);
if (sockfd < 0) return JS_EXCEPTION;
#ifdef _WIN32
u_long mode = 1;
if (ioctlsocket(sockfd, FIONBIO, &mode) != 0)
return JS_RaiseDisrupt(js, "setnonblock failed");
#else
int flags = fcntl(sockfd, F_GETFL, 0);
if (flags < 0 || fcntl(sockfd, F_SETFL, flags | O_NONBLOCK) < 0)
return JS_RaiseDisrupt(js, "setnonblock failed: %s", strerror(errno));
#endif
return JS_NULL;
)
JSC_CCALL(socket_getsockopt,
int sockfd = js2fd(js, argv[0]);
if (sockfd < 0) return JS_EXCEPTION;
int level = SOL_SOCKET;
int optname = 0;
// Parse level
if (JS_IsText(argv[1])) {
const char *level_str = JS_ToCString(js, argv[1]);
if (strcmp(level_str, "SOL_SOCKET") == 0) level = SOL_SOCKET;
else if (strcmp(level_str, "IPPROTO_TCP") == 0) level = IPPROTO_TCP;
else if (strcmp(level_str, "IPPROTO_IP") == 0) level = IPPROTO_IP;
else if (strcmp(level_str, "IPPROTO_IPV6") == 0) level = IPPROTO_IPV6;
JS_FreeCString(js, level_str);
} else {
level = js2number(js, argv[1]);
}
// Parse option name
if (JS_IsText(argv[2])) {
const char *opt_str = JS_ToCString(js, argv[2]);
if (strcmp(opt_str, "SO_ERROR") == 0) optname = SO_ERROR;
else if (strcmp(opt_str, "SO_REUSEADDR") == 0) optname = SO_REUSEADDR;
else if (strcmp(opt_str, "SO_KEEPALIVE") == 0) optname = SO_KEEPALIVE;
else if (strcmp(opt_str, "SO_BROADCAST") == 0) optname = SO_BROADCAST;
JS_FreeCString(js, opt_str);
} else {
optname = js2number(js, argv[2]);
}
int optval = 0;
socklen_t optlen = sizeof(optval);
if (getsockopt(sockfd, level, optname, &optval, &optlen) < 0)
return JS_RaiseDisrupt(js, "getsockopt failed: %s", strerror(errno));
return JS_NewInt32(js, optval);
)
JSC_CCALL(socket_send_self,
if (argc < 1 || !JS_IsText(argv[0]))
return JS_RaiseDisrupt(js, "send_self: expects a text argument");
const char *msg = JS_ToCString(js, argv[0]);
WotaBuffer wb;
wota_buffer_init(&wb, 16);
wota_write_record(&wb, 1);
wota_write_text(&wb, "text");
wota_write_text(&wb, msg);
JS_FreeCString(js, msg);
const char *err = JS_SendMessage(js, &wb);
if (err) {
wota_buffer_free(&wb);
return JS_RaiseDisrupt(js, "send_self failed: %s", err);
}
return JS_NULL;
)
static const JSCFunctionListEntry js_socket_funcs[] = {
MIST_FUNC_DEF(socket, getaddrinfo, 3),
MIST_FUNC_DEF(socket, socket, 3),
@@ -587,6 +683,12 @@ static const JSCFunctionListEntry js_socket_funcs[] = {
MIST_FUNC_DEF(socket, gai_strerror, 1),
MIST_FUNC_DEF(socket, setsockopt, 4),
MIST_FUNC_DEF(socket, close, 1),
MIST_FUNC_DEF(socket, on_readable, 2),
MIST_FUNC_DEF(socket, on_writable, 2),
MIST_FUNC_DEF(socket, unwatch, 1),
MIST_FUNC_DEF(socket, setnonblock, 1),
MIST_FUNC_DEF(socket, getsockopt, 3),
MIST_FUNC_DEF(socket, send_self, 1),
};
JSValue js_core_socket_use(JSContext *js) {
@@ -611,6 +713,8 @@ JSValue js_core_socket_use(JSContext *js) {
JS_SetPropertyStr(js, mod.val, "SOL_SOCKET", JS_NewInt32(js, SOL_SOCKET));
JS_SetPropertyStr(js, mod.val, "SO_REUSEADDR", JS_NewInt32(js, SO_REUSEADDR));
JS_SetPropertyStr(js, mod.val, "SO_ERROR", JS_NewInt32(js, SO_ERROR));
JS_SetPropertyStr(js, mod.val, "SO_KEEPALIVE", JS_NewInt32(js, SO_KEEPALIVE));
JS_RETURN(mod.val);
}

238
net/tls.c Normal file
View File

@@ -0,0 +1,238 @@
#include "cell.h"
#include <string.h>
#include <stdlib.h>
#include <errno.h>
#include <stdio.h>
#if defined(__APPLE__)
/* SecureTransport — deprecated but functional, no external deps */
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
#include <Security/Security.h>
#include <Security/SecureTransport.h>
#include <sys/socket.h>
#include <unistd.h>
#include <poll.h>
typedef struct {
SSLContextRef ssl;
int fd;
} tls_ctx;
static void tls_ctx_free(JSRuntime *rt, tls_ctx *ctx) {
if (!ctx) return;
if (ctx->ssl) {
SSLClose(ctx->ssl);
CFRelease(ctx->ssl);
}
if (ctx->fd >= 0)
close(ctx->fd);
free(ctx);
}
QJSCLASS(tls_ctx,)
static OSStatus tls_read_cb(SSLConnectionRef conn, void *data, size_t *len) {
int fd = *(const int *)conn;
size_t requested = *len;
size_t total = 0;
while (total < requested) {
ssize_t n = read(fd, (char *)data + total, requested - total);
if (n > 0) {
total += n;
} else if (n == 0) {
*len = total;
return errSSLClosedGraceful;
} else {
if (errno == EAGAIN || errno == EWOULDBLOCK) {
*len = total;
return (total > 0) ? noErr : errSSLWouldBlock;
}
*len = total;
return errSSLClosedAbort;
}
}
*len = total;
return noErr;
}
static OSStatus tls_write_cb(SSLConnectionRef conn, const void *data, size_t *len) {
int fd = *(const int *)conn;
size_t requested = *len;
size_t total = 0;
while (total < requested) {
ssize_t n = write(fd, (const char *)data + total, requested - total);
if (n > 0) {
total += n;
} else if (n == 0) {
*len = total;
return errSSLClosedGraceful;
} else {
if (errno == EAGAIN || errno == EWOULDBLOCK) {
*len = total;
return (total > 0) ? noErr : errSSLWouldBlock;
}
*len = total;
return errSSLClosedAbort;
}
}
*len = total;
return noErr;
}
/* tls.wrap(fd, hostname) -> ctx */
JSC_CCALL(tls_wrap,
int fd = -1;
if (JS_ToInt32(js, &fd, argv[0]) < 0)
return JS_RaiseDisrupt(js, "tls.wrap: fd must be a number");
const char *hostname = JS_ToCString(js, argv[1]);
if (!hostname)
return JS_RaiseDisrupt(js, "tls.wrap: hostname must be a string");
tls_ctx *ctx = calloc(1, sizeof(tls_ctx));
ctx->fd = fd;
ctx->ssl = SSLCreateContext(NULL, kSSLClientSide, kSSLStreamType);
if (!ctx->ssl) {
free(ctx);
JS_FreeCString(js, hostname);
return JS_RaiseDisrupt(js, "tls.wrap: SSLCreateContext failed");
}
SSLSetIOFuncs(ctx->ssl, tls_read_cb, tls_write_cb);
SSLSetConnection(ctx->ssl, &ctx->fd);
SSLSetPeerDomainName(ctx->ssl, hostname, strlen(hostname));
JS_FreeCString(js, hostname);
/* Retry handshake on non-blocking sockets (errSSLWouldBlock) */
OSStatus status;
for (int attempts = 0; attempts < 200; attempts++) {
status = SSLHandshake(ctx->ssl);
if (status == noErr) break;
if (status != errSSLWouldBlock) break;
struct pollfd pfd = { .fd = ctx->fd, .events = POLLIN | POLLOUT };
poll(&pfd, 1, 50);
}
if (status != noErr) {
CFRelease(ctx->ssl);
ctx->ssl = NULL;
ctx->fd = -1; /* don't close caller's fd */
free(ctx);
return JS_RaiseDisrupt(js, "tls.wrap: handshake failed (status %d)", (int)status);
}
return tls_ctx2js(js, ctx);
)
/* tls.send(ctx, data) -> bytes_sent */
JSC_CCALL(tls_send,
tls_ctx *ctx = js2tls_ctx(js, argv[0]);
if (!ctx || !ctx->ssl)
return JS_RaiseDisrupt(js, "tls.send: invalid context");
size_t len;
size_t written = 0;
OSStatus status;
if (JS_IsText(argv[1])) {
const char *data = JS_ToCStringLen(js, &len, argv[1]);
status = SSLWrite(ctx->ssl, data, len, &written);
JS_FreeCString(js, data);
} else {
unsigned char *data = js_get_blob_data(js, &len, argv[1]);
if (!data)
return JS_RaiseDisrupt(js, "tls.send: invalid data");
status = SSLWrite(ctx->ssl, data, len, &written);
}
if (status != noErr && status != errSSLWouldBlock)
return JS_RaiseDisrupt(js, "tls.send: write failed (status %d)", (int)status);
return JS_NewInt64(js, (int64_t)written);
)
/* tls.recv(ctx, len) -> blob */
JSC_CCALL(tls_recv,
tls_ctx *ctx = js2tls_ctx(js, argv[0]);
if (!ctx || !ctx->ssl)
return JS_RaiseDisrupt(js, "tls.recv: invalid context");
size_t len = 4096;
if (argc > 1) len = js2number(js, argv[1]);
void *out;
ret = js_new_blob_alloc(js, len, &out);
if (JS_IsException(ret)) return ret;
size_t received = 0;
OSStatus status = SSLRead(ctx->ssl, out, len, &received);
if (status != noErr && status != errSSLWouldBlock &&
status != errSSLClosedGraceful) {
return JS_RaiseDisrupt(js, "tls.recv: read failed (status %d)", (int)status);
}
js_blob_stone(ret, received);
return ret;
)
/* tls.close(ctx) -> null */
JSC_CCALL(tls_close,
tls_ctx *ctx = js2tls_ctx(js, argv[0]);
if (!ctx) return JS_NULL;
if (ctx->ssl) {
SSLClose(ctx->ssl);
CFRelease(ctx->ssl);
ctx->ssl = NULL;
}
if (ctx->fd >= 0) {
close(ctx->fd);
ctx->fd = -1;
}
return JS_NULL;
)
/* tls.fd(ctx) -> number — get underlying fd for on_readable */
JSC_CCALL(tls_fd,
tls_ctx *ctx = js2tls_ctx(js, argv[0]);
if (!ctx)
return JS_RaiseDisrupt(js, "tls.fd: invalid context");
return JS_NewInt32(js, ctx->fd);
)
/* tls.on_readable(ctx, callback) -> null */
JSC_CCALL(tls_on_readable,
tls_ctx *ctx = js2tls_ctx(js, argv[0]);
if (!ctx)
return JS_RaiseDisrupt(js, "tls.on_readable: invalid context");
if (!JS_IsFunction(argv[1]))
return JS_RaiseDisrupt(js, "tls.on_readable: callback must be a function");
actor_watch_readable(js, ctx->fd, argv[1]);
return JS_NULL;
)
static const JSCFunctionListEntry js_tls_funcs[] = {
MIST_FUNC_DEF(tls, wrap, 2),
MIST_FUNC_DEF(tls, send, 2),
MIST_FUNC_DEF(tls, recv, 2),
MIST_FUNC_DEF(tls, close, 1),
MIST_FUNC_DEF(tls, fd, 1),
MIST_FUNC_DEF(tls, on_readable, 2),
};
JSValue js_core_net_tls_use(JSContext *js) {
JS_FRAME(js);
QJSCLASSPREP_NO_FUNCS(tls_ctx);
JS_ROOT(mod, JS_NewObject(js));
JS_SetPropertyFunctionList(js, mod.val, js_tls_funcs, countof(js_tls_funcs));
JS_RETURN(mod.val);
}
#pragma clang diagnostic pop
#else
/* Stub for non-Apple platforms — TLS not yet implemented */
JSValue js_core_net_tls_use(JSContext *js) {
return JS_RaiseDisrupt(js, "TLS not available on this platform");
}
#endif

View File

@@ -88,9 +88,9 @@ var packages = ['core']
var deps = pkg_tools.gather_dependencies(target_package)
for (i = 0; i < length(deps); i++) {
push(packages, deps[i])
packages[] = deps[i]
}
push(packages, target_package)
packages[] = target_package
// Remove duplicates
var unique_packages = []
@@ -98,7 +98,7 @@ var seen = {}
for (i = 0; i < length(packages); i++) {
if (!seen[packages[i]]) {
seen[packages[i]] = true
push(unique_packages, packages[i])
unique_packages[] = packages[i]
}
}
packages = unique_packages

View File

@@ -198,7 +198,7 @@ package.find_packages = function(dir) {
var list = fd.readdir(dir)
if (!list) return found
if (fd.is_file(dir + '/cell.toml'))
push(found, dir)
found[] = dir
arrfor(list, function(item) {
if (item == '.' || item == '..' || item == '.cell' || item == '.git') return
var full = dir + '/' + item
@@ -207,7 +207,7 @@ package.find_packages = function(dir) {
if (st && st.isDirectory) {
sub = package.find_packages(full)
arrfor(sub, function(p) {
push(found, p)
found[] = p
})
}
})
@@ -227,14 +227,14 @@ package.list_modules = function(name) {
var stem = null
for (i = 0; i < length(files); i++) {
if (ends_with(files[i], '.cm')) {
push(modules, text(files[i], 0, -3))
modules[] = text(files[i], 0, -3)
}
}
var c_files = package.get_c_files(name, null, true)
for (i = 0; i < length(c_files); i++) {
stem = ends_with(c_files[i], '.cpp') ? text(c_files[i], 0, -4) : text(c_files[i], 0, -2)
if (find(modules, function(m) { return m == stem }) == null)
push(modules, stem)
modules[] = stem
}
return modules
}
@@ -245,7 +245,7 @@ package.list_programs = function(name) {
var i = 0
for (i = 0; i < length(files); i++) {
if (ends_with(files[i], '.ce')) {
push(programs, text(files[i], 0, -3))
programs[] = text(files[i], 0, -3)
}
}
return programs
@@ -360,7 +360,7 @@ package.get_c_files = function(name, target, exclude_main) {
basename = fd.basename(selected)
if (basename == 'main.c' || starts_with(basename, 'main_')) return
}
push(result, selected)
result[] = selected
}
})

156
parse.cm
View File

@@ -90,12 +90,12 @@ var parse = function(tokens, src, filename, tokenizer) {
var parse_error = function(token, msg) {
if (error_count >= 5) return null
error_count = error_count + 1
push(errors, {
errors[] = {
message: msg,
line: token.from_row + 1,
column: token.from_column + 1,
offset: token.at
})
}
}
var _keywords = {
@@ -230,8 +230,8 @@ var parse = function(tokens, src, filename, tokenizer) {
if (tv[tvi] == "\\" && tvi + 1 < tvlen) {
esc_ch = tv[tvi + 1]
esc_val = template_escape_map[esc_ch]
if (esc_val != null) { push(fmt_parts, esc_val) }
else { push(fmt_parts, esc_ch) }
if (esc_val != null) { fmt_parts[] = esc_val }
else { fmt_parts[] = esc_ch }
tvi = tvi + 2
} else if (tv[tvi] == "$" && tvi + 1 < tvlen && tv[tvi + 1] == "{") {
tvi = tvi + 2
@@ -239,27 +239,27 @@ var parse = function(tokens, src, filename, tokenizer) {
expr_parts = []
while (tvi < tvlen && depth > 0) {
tc = tv[tvi]
if (tc == "{") { depth = depth + 1; push(expr_parts, tc); tvi = tvi + 1 }
if (tc == "{") { depth = depth + 1; expr_parts[] = tc; tvi = tvi + 1 }
else if (tc == "}") {
depth = depth - 1
if (depth > 0) { push(expr_parts, tc) }
if (depth > 0) { expr_parts[] = tc }
tvi = tvi + 1
}
else if (tc == "'" || tc == "\"" || tc == "`") {
tq = tc
push(expr_parts, tc)
expr_parts[] = tc
tvi = tvi + 1
while (tvi < tvlen && tv[tvi] != tq) {
if (tv[tvi] == "\\" && tvi + 1 < tvlen) {
push(expr_parts, tv[tvi])
expr_parts[] = tv[tvi]
tvi = tvi + 1
}
push(expr_parts, tv[tvi])
expr_parts[] = tv[tvi]
tvi = tvi + 1
}
if (tvi < tvlen) { push(expr_parts, tv[tvi]); tvi = tvi + 1 }
if (tvi < tvlen) { expr_parts[] = tv[tvi]; tvi = tvi + 1 }
} else {
push(expr_parts, tc)
expr_parts[] = tc
tvi = tvi + 1
}
}
@@ -274,14 +274,14 @@ var parse = function(tokens, src, filename, tokenizer) {
} else {
sub_expr = sub_stmt
}
push(tpl_list, sub_expr)
tpl_list[] = sub_expr
}
push(fmt_parts, "{")
push(fmt_parts, text(idx))
push(fmt_parts, "}")
fmt_parts[] = "{"
fmt_parts[] = text(idx)
fmt_parts[] = "}"
idx = idx + 1
} else {
push(fmt_parts, tv[tvi])
fmt_parts[] = tv[tvi]
tvi = tvi + 1
}
}
@@ -332,7 +332,7 @@ var parse = function(tokens, src, filename, tokenizer) {
advance()
while (tok.kind != "]" && tok.kind != "eof") {
elem = parse_assign_expr()
if (elem != null) push(list, elem)
if (elem != null) list[] = elem
if (tok.kind == ",") advance()
else break
}
@@ -395,7 +395,7 @@ var parse = function(tokens, src, filename, tokenizer) {
advance()
param.expression = parse_assign_expr()
}
push(params, param)
params[] = param
} else {
parse_error(tok, "expected parameter name")
break
@@ -436,7 +436,7 @@ var parse = function(tokens, src, filename, tokenizer) {
} else {
parse_error(tok, "expected ':' after property name")
}
push(list, pair)
list[] = pair
if (tok.kind == ",") advance()
else if (tok.kind == "{") {
if (right && right.kind == "(") {
@@ -473,17 +473,17 @@ var parse = function(tokens, src, filename, tokenizer) {
flags_parts = []
while (rpos < _src_len && src[rpos] != "/") {
if (src[rpos] == "\\" && rpos + 1 < _src_len) {
push(pattern_parts, src[rpos])
push(pattern_parts, src[rpos + 1])
pattern_parts[] = src[rpos]
pattern_parts[] = src[rpos + 1]
rpos = rpos + 2
} else {
push(pattern_parts, src[rpos])
pattern_parts[] = src[rpos]
rpos = rpos + 1
}
}
if (rpos < _src_len) rpos = rpos + 1
while (rpos < _src_len && is_letter(src[rpos])) {
push(flags_parts, src[rpos])
flags_parts[] = src[rpos]
rpos = rpos + 1
}
node.pattern = text(pattern_parts)
@@ -557,7 +557,7 @@ var parse = function(tokens, src, filename, tokenizer) {
new_node.list = args_list
while (tok.kind != ")" && tok.kind != "eof") {
arg = parse_assign_expr()
if (arg != null) push(args_list, arg)
if (arg != null) args_list[] = arg
if (tok.kind == ",") advance()
else break
}
@@ -830,7 +830,7 @@ var parse = function(tokens, src, filename, tokenizer) {
before = cursor
stmt = parse_statement()
if (stmt != null) {
push(stmts, stmt)
stmts[] = stmt
} else if (cursor == before) {
sync_to_statement()
}
@@ -872,14 +872,14 @@ var parse = function(tokens, src, filename, tokenizer) {
param.name = tok.value
pname = tok.value
if (find(prev_names, pname) != null) parse_error(tok, "duplicate parameter name '" + pname + "'")
push(prev_names, pname)
prev_names[] = pname
advance()
ast_node_end(param)
if (tok.kind == "=" || tok.kind == "|") {
advance()
param.expression = parse_assign_expr()
}
push(params, param)
params[] = param
} else {
parse_error(tok, "expected parameter name")
break
@@ -959,7 +959,7 @@ var parse = function(tokens, src, filename, tokenizer) {
param.name = tok.value
advance()
ast_node_end(param)
push(params, param)
params[] = param
} else if (tok.kind == "(") {
advance()
prev_names = []
@@ -969,14 +969,14 @@ var parse = function(tokens, src, filename, tokenizer) {
param.name = tok.value
pname = tok.value
if (find(prev_names, pname) != null) parse_error(tok, "duplicate parameter name '" + pname + "'")
push(prev_names, pname)
prev_names[] = pname
advance()
ast_node_end(param)
if (tok.kind == "=" || tok.kind == "|") {
advance()
param.expression = parse_assign_expr()
}
push(params, param)
params[] = param
} else {
parse_error(tok, "expected parameter name")
break
@@ -1010,7 +1010,7 @@ var parse = function(tokens, src, filename, tokenizer) {
expr = parse_assign_expr()
ret.expression = expr
ast_node_end(ret)
push(stmts, ret)
stmts[] = ret
node.statements = stmts
}
@@ -1110,7 +1110,7 @@ var parse = function(tokens, src, filename, tokenizer) {
parse_error(start, "'var' declarations must be initialized; use 'var " + var_name + " = null' if no value is needed")
}
ast_node_end(node)
push(decls, node)
decls[] = node
decl_count = decl_count + 1
if (tok.kind == ",") advance()
else break
@@ -1142,7 +1142,7 @@ var parse = function(tokens, src, filename, tokenizer) {
_control_depth = _control_depth + 1
_expecting_body = true
body = parse_statement()
if (body != null) push(then_stmts, body)
if (body != null) then_stmts[] = body
else_ifs = []
node.list = else_ifs
if (tok.kind == "else") {
@@ -1151,7 +1151,7 @@ var parse = function(tokens, src, filename, tokenizer) {
_control_depth = saved_cd
_control_type = saved_ct
elif = parse_statement()
if (elif != null) push(else_ifs, elif)
if (elif != null) else_ifs[] = elif
ast_node_end(node)
return node
} else {
@@ -1159,7 +1159,7 @@ var parse = function(tokens, src, filename, tokenizer) {
node.else = else_stmts
_expecting_body = true
body = parse_statement()
if (body != null) push(else_stmts, body)
if (body != null) else_stmts[] = body
}
}
_control_depth = saved_cd
@@ -1185,7 +1185,7 @@ var parse = function(tokens, src, filename, tokenizer) {
_control_depth = _control_depth + 1
_expecting_body = true
body = parse_statement()
if (body != null) push(stmts, body)
if (body != null) stmts[] = body
_control_depth = saved_cd
_control_type = saved_ct
ast_node_end(node)
@@ -1203,7 +1203,7 @@ var parse = function(tokens, src, filename, tokenizer) {
_control_depth = _control_depth + 1
_expecting_body = true
body = parse_statement()
if (body != null) push(stmts, body)
if (body != null) stmts[] = body
_control_depth = saved_cd
_control_type = saved_ct
if (tok.kind == "while") advance()
@@ -1256,7 +1256,7 @@ var parse = function(tokens, src, filename, tokenizer) {
_control_depth = _control_depth + 1
_expecting_body = true
body = parse_statement()
if (body != null) push(stmts, body)
if (body != null) stmts[] = body
_control_depth = saved_cd
_control_type = saved_ct
ast_node_end(node)
@@ -1402,9 +1402,9 @@ var parse = function(tokens, src, filename, tokenizer) {
stmt = parse_statement()
if (stmt != null) {
if (stmt.kind == "function") {
push(functions, stmt)
functions[] = stmt
} else {
push(statements, stmt)
statements[] = stmt
}
} else if (cursor == before) {
sync_to_statement()
@@ -1420,12 +1420,13 @@ var parse = function(tokens, src, filename, tokenizer) {
var sem_errors = []
var scopes_array = []
var intrinsics = []
var hoisted_fn_refs = []
var sem_error = function(node, msg) {
var err = {message: msg}
if (node.from_row != null) err.line = node.from_row + 1
if (node.from_column != null) err.column = node.from_column + 1
push(sem_errors, err)
sem_errors[] = err
}
var make_scope = function(parent, fn_nr, opts) {
@@ -1441,14 +1442,17 @@ var parse = function(tokens, src, filename, tokenizer) {
}
var sem_add_var = function(scope, name, make_opts) {
push(scope.vars, {
var entry = {
name: name,
is_const: make_opts.is_const == true,
make: make_opts.make,
function_nr: make_opts.fn_nr,
nr_uses: 0,
closure: 0
})
}
if (make_opts.reached == false) entry.reached = false
if (make_opts.decl_line != null) entry.decl_line = make_opts.decl_line
scope.vars[] = entry
}
var sem_lookup_var = function(scope, name) {
@@ -1499,7 +1503,7 @@ var parse = function(tokens, src, filename, tokenizer) {
}
var sem_add_intrinsic = function(name) {
if (find(intrinsics, name) == null) push(intrinsics, name)
if (find(intrinsics, name) == null) intrinsics[] = name
}
var functino_names = {
@@ -1567,39 +1571,17 @@ var parse = function(tokens, src, filename, tokenizer) {
var sem_check_expr = null
var sem_check_stmt = null
var sem_predeclare_vars = function(scope, stmts) {
var sem_predeclare_fns = function(scope, stmts) {
var i = 0
var stmt = null
var kind = null
var name = null
var item = null
var ik = null
var j = 0
while (i < length(stmts)) {
stmt = stmts[i]
kind = stmt.kind
if (kind == "function") {
if (stmt.kind == "function") {
name = stmt.name
if (name != null && sem_find_var(scope, name) == null) {
sem_add_var(scope, name, {make: "function", fn_nr: scope.function_nr})
}
} else if (kind == "var") {
name = stmt.left.name
if (name != null && sem_find_var(scope, name) == null) {
sem_add_var(scope, name, {make: "var", fn_nr: scope.function_nr})
}
} else if (kind == "var_list") {
j = 0
while (j < length(stmt.list)) {
item = stmt.list[j]
ik = item.kind
if (ik == "var") {
name = item.left.name
if (name != null && sem_find_var(scope, name) == null) {
sem_add_var(scope, name, {make: "var", fn_nr: scope.function_nr})
}
}
j = j + 1
sem_add_var(scope, name, {make: "function", fn_nr: scope.function_nr,
decl_line: stmt.from_row != null ? stmt.from_row + 1 : null, reached: false})
}
}
i = i + 1
@@ -1831,7 +1813,7 @@ var parse = function(tokens, src, filename, tokenizer) {
i = i + 1
}
if (expr.statements != null) {
sem_predeclare_vars(fn_scope, expr.statements)
sem_predeclare_fns(fn_scope, expr.statements)
i = 0
while (i < length(expr.statements)) {
sem_check_stmt(fn_scope, expr.statements[i])
@@ -1846,7 +1828,7 @@ var parse = function(tokens, src, filename, tokenizer) {
}
}
sr = sem_build_scope_record(fn_scope)
push(scopes_array, sr.rec)
scopes_array[] = sr.rec
expr.nr_slots = sr.nr_slots
expr.nr_close_slots = sr.nr_close
return null
@@ -1875,6 +1857,11 @@ var parse = function(tokens, src, filename, tokenizer) {
expr.function_nr = r.def_function_nr
r.v.nr_uses = r.v.nr_uses + 1
if (r.level > 0) r.v.closure = 1
if (r.v.reached == false && r.v.decl_line != null && expr.from_row != null && expr.from_row + 1 < r.v.decl_line) {
hoisted_fn_refs[] = {name: name, line: expr.from_row + 1,
col: expr.from_column != null ? expr.from_column + 1 : null,
decl_line: r.v.decl_line}
}
} else {
expr.level = -1
expr.intrinsic = true
@@ -2088,7 +2075,14 @@ var parse = function(tokens, src, filename, tokenizer) {
enclosing = sem_find_func_scope(scope)
if (enclosing != null) enclosing.has_inner_func = true
name = stmt.name
if (name != null && sem_find_var(scope, name) == null) sem_add_var(scope, name, {make: "function", fn_nr: scope.function_nr})
if (name != null) {
existing = sem_find_var(scope, name)
if (existing != null) {
existing.reached = true
} else {
sem_add_var(scope, name, {make: "function", fn_nr: scope.function_nr})
}
}
fn_nr_val = stmt.function_nr
if (fn_nr_val == null) fn_nr_val = scope.function_nr
fn_scope = make_scope(scope, fn_nr_val, {is_func: true})
@@ -2102,7 +2096,7 @@ var parse = function(tokens, src, filename, tokenizer) {
if (def_val != null) sem_check_expr(fn_scope, def_val)
i = i + 1
}
sem_predeclare_vars(fn_scope, stmt.statements)
sem_predeclare_fns(fn_scope, stmt.statements)
i = 0
while (i < length(stmt.statements)) {
sem_check_stmt(fn_scope, stmt.statements[i])
@@ -2116,7 +2110,7 @@ var parse = function(tokens, src, filename, tokenizer) {
}
}
sr = sem_build_scope_record(fn_scope)
push(scopes_array, sr.rec)
scopes_array[] = sr.rec
stmt.nr_slots = sr.nr_slots
stmt.nr_close_slots = sr.nr_close
return null
@@ -2124,6 +2118,7 @@ var parse = function(tokens, src, filename, tokenizer) {
}
var semantic_check = function(ast) {
hoisted_fn_refs = []
var global_scope = make_scope(null, 0, {is_func: true})
var i = 0
var stmt = null
@@ -2134,7 +2129,11 @@ var parse = function(tokens, src, filename, tokenizer) {
i = 0
while (i < length(ast.functions)) {
name = ast.functions[i].name
if (name != null) sem_add_var(global_scope, name, {make: "function", fn_nr: 0})
if (name != null) {
sem_add_var(global_scope, name, {make: "function", fn_nr: 0,
decl_line: ast.functions[i].from_row != null ? ast.functions[i].from_row + 1 : null,
reached: false})
}
i = i + 1
}
@@ -2154,13 +2153,14 @@ var parse = function(tokens, src, filename, tokenizer) {
new_scopes = [sr.rec]
i = 0
while (i < length(scopes_array)) {
push(new_scopes, scopes_array[i])
new_scopes[] = scopes_array[i]
i = i + 1
}
scopes_array = new_scopes
ast.scopes = scopes_array
ast.intrinsics = intrinsics
if (length(hoisted_fn_refs) > 0) ast._hoisted_fns = hoisted_fn_refs
if (length(sem_errors) > 0) {
ast.errors = sem_errors
}
@@ -2183,7 +2183,7 @@ var parse = function(tokens, src, filename, tokenizer) {
if (ast.errors != null) {
_mi = 0
while (_mi < length(errors)) {
push(ast.errors, errors[_mi])
ast.errors[] = errors[_mi]
_mi = _mi + 1
}
} else {

338
plan.md
View File

@@ -1,338 +0,0 @@
# Cell/QuickJS Refactoring Plan: Remove Atoms, Shapes, and Dual-Encoding
## Overview
Refactor `source/quickjs.c` to match `docs/memory.md` specification:
- Remove JSAtom system (171 references → ~41 remaining)
- Remove JSShape system (94 references) ✓
- Remove IC caches (shape-based inline caches) ✓
- Remove `is_wide_char` dual-encoding (18 locations) ✓
- Use JSValue texts directly as property keys
- Reference: `mquickjs.c` shows the target pattern
## Completed Phases
### Phase 1: Remove is_wide_char Remnants ✓
### Phase 2: Remove IC Caches ✓
### Phase 3: Remove JSShape System ✓
### Phase 4: Complete Property Access with JSValue Keys ✓
Completed:
- Removed JS_GC_OBJ_TYPE_JS_OBJECT fallbacks from OP_get_field
- Removed JS_GC_OBJ_TYPE_JS_OBJECT fallbacks from OP_put_field
- Removed JS_GC_OBJ_TYPE_JS_OBJECT fallbacks from OP_define_field
- Created emit_key() function that adds JSValue to cpool and emits index
---
## Phase 5: Convert JSAtom to JSValue Text (IN PROGRESS)
This is the core transformation. All identifier handling moves from atoms to JSValue.
### Completed Items
**Token and Parser Infrastructure:**
- [x] Change JSToken.u.ident.atom to JSToken.u.ident.str (JSValue)
- [x] Change parse_ident() to return JSValue
- [x] Create emit_key() function (cpool-based)
- [x] Create JS_KEY_* macros for common names (lines ~279-335 in quickjs.c)
- [x] Update all token.u.ident.atom references to .str
- [x] Create keyword lookup table (js_keywords[]) with string comparison
- [x] Rewrite update_token_ident() to use js_keyword_lookup()
- [x] Rewrite is_strict_future_keyword() to use JSValue
- [x] Update token_is_pseudo_keyword() to use JSValue and js_key_equal()
**Function Declaration Parsing:**
- [x] Update js_parse_function_decl() signature to use JSValue func_name
- [x] Update js_parse_function_decl2() to use JSValue func_name throughout
- [x] Update js_parse_function_check_names() to use JSValue
- [x] Convert JS_DupAtom/JS_FreeAtom to JS_DupValue/JS_FreeValue in function parsing
**Variable Definition and Lookup:**
- [x] Update find_global_var() to use JSValue and js_key_equal()
- [x] Update find_lexical_global_var() to use JSValue
- [x] Update find_lexical_decl() to use JSValue and js_key_equal()
- [x] Update js_define_var() to use JSValue
- [x] Update js_parse_check_duplicate_parameter() to use JSValue and js_key_equal()
- [x] Update js_parse_destructuring_var() to return JSValue
- [x] Update js_parse_var() to use JSValue for variable names
**Comparison Helpers:**
- [x] Create js_key_equal_str() for comparing JSValue with C string literals
- [x] Update is_var_in_arg_scope() to use js_key_equal/js_key_equal_str
- [x] Update has_with_scope() to use js_key_equal_str
- [x] Update closure variable comparisons (cv->var_name) to use js_key_equal_str
**Property Access:**
- [x] Fix JS_GetPropertyStr to create proper JSValue keys
- [x] Fix JS_SetPropertyInternal callers to use JS_KEY_* instead of JS_ATOM_*
### JS_KEY_* Macros Added
Compile-time immediate ASCII string constants (≤7 chars):
```c
JS_KEY_empty, JS_KEY_name, JS_KEY_message, JS_KEY_stack,
JS_KEY_errors, JS_KEY_Error, JS_KEY_cause, JS_KEY_length,
JS_KEY_value, JS_KEY_get, JS_KEY_set, JS_KEY_raw,
JS_KEY_flags, JS_KEY_source, JS_KEY_exec, JS_KEY_toJSON,
JS_KEY_eval, JS_KEY_this, JS_KEY_true, JS_KEY_false,
JS_KEY_null, JS_KEY_NaN, JS_KEY_default, JS_KEY_index,
JS_KEY_input, JS_KEY_groups, JS_KEY_indices, JS_KEY_let,
JS_KEY_var, JS_KEY_new, JS_KEY_of, JS_KEY_yield,
JS_KEY_async, JS_KEY_target, JS_KEY_from, JS_KEY_meta,
JS_KEY_as, JS_KEY_with
```
Runtime macro for strings >7 chars:
```c
#define JS_KEY_STR(ctx, str) JS_NewStringLen((ctx), (str), sizeof(str) - 1)
```
Helper function for comparing JSValue with C string literals:
```c
static JS_BOOL js_key_equal_str(JSValue a, const char *str);
```
### Remaining Work
#### 5.3 Update js_parse_property_name() ✓
- [x] Change return type from JSAtom* to JSValue*
- [x] Update all callers (js_parse_object_literal, etc.)
- [x] Updated get_lvalue(), put_lvalue(), js_parse_destructuring_element()
#### 5.4 Replace remaining emit_atom() calls with emit_key() ✓
- [x] Removed emit_atom wrapper function
- [x] Changed last emit_atom(JS_ATOM_this) to emit_key(JS_KEY_this)
#### 5.5 Update Variable Opcode Format in quickjs-opcode.h
- [ ] Change `atom` format opcodes to `key` format
- [ ] Change `atom_u8` and `atom_u16` to `key_u8` and `key_u16`
#### 5.6 Update VM Opcode Handlers ✓
These now read cpool indices and look up JSValue:
- [x] OP_check_var, OP_get_var_undef, OP_get_var
- [x] OP_put_var, OP_put_var_init, OP_put_var_strict
- [x] OP_set_name, OP_make_var_ref, OP_delete_var
- [x] OP_define_var, OP_define_func, OP_throw_error
- [x] OP_make_loc_ref, OP_make_arg_ref
- [x] OP_define_method, OP_define_method_computed
#### 5.7 Update resolve_scope_var() ✓
- [x] Changed signature to use JSValue var_name
- [x] Updated all comparisons to use js_key_equal()/js_key_equal_str()
- [x] Updated var_object_test() to use JSValue
- [x] Updated optimize_scope_make_global_ref() to use JSValue
- [x] Updated resolve_variables() callers to read from cpool
#### 5.8 Convert Remaining JS_ATOM_* Usages
Categories remaining:
- Some debug/print functions still use JSAtom
- Some function signatures not yet converted
- Will be addressed in Phase 7 cleanup
---
## Phase 6: Update Bytecode Serialization ✓
### 6.1 JS_WriteObjectTag Changes ✓
- [x] Changed JS_WriteObjectTag to use bc_put_key() directly for property keys
- [x] Removed JS_ValueToAtom/bc_put_atom path (was broken anyway)
- [x] cpool values serialized via JS_WriteObjectRec()
### 6.2 JS_ReadObject Changes ✓
- [x] Changed JS_ReadObjectTag to use bc_get_key() for property keys
- [x] Uses JS_SetPropertyInternal with JSValue keys
### 6.3 Opcode Format Updates ✓
- [x] Added OP_FMT_key_u8, OP_FMT_key_u16, OP_FMT_key_label_u16 formats
- [x] Updated variable opcodes to use key formats instead of atom formats
- [x] Updated bc_byte_swap() to handle new key formats
- [x] Updated JS_WriteFunctionBytecode() to skip key format opcodes
- [x] Updated JS_ReadFunctionBytecode() to skip key format opcodes
### 6.4 Version Bump ✓
- [x] Incremented BC_VERSION from 5 to 6
---
## Phase 7: Final Cleanup ✓
### 7.1 Additional Parser/Compiler Fixes ✓
- [x] Fixed TOK_IDENT case to use JSValue name, JS_DupValue, emit_key
- [x] Fixed TOK_TRY catch clause to use JSValue name
- [x] Fixed js_parse_statement_or_decl label_name to use JSValue
- [x] Fixed OP_scope_get_var "eval" check to use js_key_equal_str
- [x] Fixed js_parse_delete to use JSValue for name comparison
- [x] Fixed JSON parsing to use js_key_from_string for property names
- [x] Added js_key_from_string() helper function
- [x] Added JS_KEY__ret_, JS_KEY__eval_, JS_KEY__var_ for internal names
- [x] Updated add_closure_var, get_closure_var2, get_closure_var to use JSValue var_name
- [x] Updated set_closure_from_var to use JS_DupValue
- [x] Updated add_closure_variables to use JS_DupValue
- [x] Updated instantiate_hoisted_definitions to use fd_cpool_add for keys
- [x] Updated resolve_variables to use js_key_equal and fd_cpool_add
### 7.1.1 Property Access and Runtime Fixes ✓
- [x] Fixed JS_GetPropertyValue to use js_key_from_string instead of JS_ValueToAtom
- [x] Fixed JS_GetPropertyKey to use js_key_from_string for string keys
- [x] Fixed JS_SetPropertyKey to use js_key_from_string for string keys
- [x] Fixed JS_HasPropertyKey to use js_key_from_string for string keys
- [x] Fixed JS_DeletePropertyKey to use js_key_from_string for string keys
- [x] Updated JS_HasProperty signature to take JSValue prop
- [x] Fixed OP_get_ref_value handler to use JSValue key
- [x] Fixed OP_put_ref_value handler to use JSValue key
- [x] Updated free_func_def to use JS_FreeValue for JSValue fields
### 7.2 Remove JSAtom Type and Functions ✓
- [x] Removed most JS_ATOM_* constants (kept JS_ATOM_NULL, JS_ATOM_END for BC compat)
- [x] JS_NewAtomString now returns JSValue using js_key_new
- [x] JS_FreeAtom, JS_DupAtom are stubs (no-op for backward compat)
- [x] JS_AtomToValue, JS_ValueToAtom are stubs (minimal BC compat)
- [x] Replaced JS_ATOM_* usages with JS_KEY_* or JS_GetPropertyStr
### 7.3 Additional Runtime Fixes ✓
- [x] Fixed free_function_bytecode to use JS_FreeValueRT for JSValue fields
- [x] Fixed JS_SetPropertyFunctionList to use JSValue keys via find_key()
- [x] Fixed JS_InstantiateFunctionListItem to use JSValue keys
- [x] Fixed internalize_json_property to use JSValue names
- [x] Fixed emit_break and push_break_entry to use JSValue label_name
- [x] Implemented JS_Invoke to use JSValue method key
### 7.4 Remaining Stubs (kept for bytecode backward compatibility)
- JSAtom typedef (uint32_t) - used in BC serialization
- JS_ATOM_NULL, JS_ATOM_END - bytecode format markers
- JS_FreeAtom, JS_DupAtom - no-op stubs
- JS_FreeAtomRT, JS_DupAtomRT - no-op stubs
- Legacy BC reader (idx_to_atom array) - for reading old bytecode
---
## Current Build Status
**Build: SUCCEEDS** with warnings (unused variables, labels)
**Statistics:**
- JS_ATOM_* usages: Minimal (only BC serialization compat)
- Property access uses JS_KEY_* macros or JS_GetPropertyStr
- BC_VERSION: 6 (updated for new key-based format)
**What Works:**
- All property access via JSValue keys
- Keyword detection via string comparison
- Function declaration parsing with JSValue names
- Variable definition with JSValue names
- Closure variable tracking with JSValue names
- VM opcode handlers read cpool indices and look up JSValue
- resolve_scope_var() uses JSValue throughout
- js_parse_property_name() returns JSValue
- Bytecode serialization uses bc_put_key/bc_get_key for property keys
- Variable opcodes use key format (cpool indices)
- JSON parsing uses JSValue keys
- Internal variable names use JS_KEY__ret_, JS_KEY__eval_, JS_KEY__var_
- JS_SetPropertyFunctionList uses JSValue keys
- JS_Invoke uses JSValue method keys
- break/continue labels use JSValue
---
## Phase 8: Migrate to New Tagging System (IN PROGRESS)
**Problem**: `JS_VALUE_GET_TAG` returns `JS_TAG_PTR` for all pointers, but ~200 places check for obsolete tags like `JS_TAG_OBJECT`, `JS_TAG_STRING`, `JS_TAG_FUNCTION`, etc., which are never returned. This causes crashes.
**Target Design** (from memory.md):
- JSValue tags: Only `JS_TAG_INT`, `JS_TAG_PTR`, `JS_TAG_SHORT_FLOAT`, `JS_TAG_SPECIAL`
- Pointer types determined by `objhdr_t` header at offset 8 in heap objects
- mist_obj_type: `OBJ_ARRAY(0)`, `OBJ_BLOB(1)`, `OBJ_TEXT(2)`, `OBJ_RECORD(3)`, `OBJ_FUNCTION(4)`, etc.
### 8.1 Unified Heap Object Layout ✓
- [x] Updated mist_text structure to have objhdr_t at offset 8:
```c
typedef struct mist_text {
JSRefCountHeader _dummy_header; /* unused, for offset alignment */
uint32_t _pad; /* padding to align objhdr_t to offset 8 */
objhdr_t hdr; /* NOW at offset 8, like JSString */
word_t length;
word_t packed[];
} mist_text;
```
- [x] JSString already has objhdr_t at offset 8
### 8.2 Type-Checking Helper Functions ✓
Added lowercase internal helpers (to avoid conflict with quickjs.h declarations):
```c
static inline JS_BOOL js_is_gc_object(JSValue v) { return JS_IsPtr(v); }
static inline JSGCObjectTypeEnum js_get_gc_type(JSValue v) {
return ((JSGCObjectHeader *)JS_VALUE_GET_PTR(v))->gc_obj_type;
}
static inline JS_BOOL js_is_record(JSValue v) {
if (!JS_IsPtr(v)) return FALSE;
return js_get_gc_type(v) == JS_GC_OBJ_TYPE_RECORD;
}
static inline JS_BOOL js_is_array(JSValue v) {
if (!JS_IsPtr(v)) return FALSE;
return js_get_gc_type(v) == JS_GC_OBJ_TYPE_ARRAY;
}
static inline JS_BOOL js_is_function(JSValue v) {
if (!JS_IsPtr(v)) return FALSE;
return js_get_gc_type(v) == JS_GC_OBJ_TYPE_FUNCTION;
}
static inline JS_BOOL js_is_object(JSValue v) {
if (!JS_IsPtr(v)) return FALSE;
JSGCObjectTypeEnum t = js_get_gc_type(v);
return t == JS_GC_OBJ_TYPE_RECORD || t == JS_GC_OBJ_TYPE_ARRAY;
}
```
### 8.3 Updated Core Functions ✓
- [x] Updated JS_IsString to read objhdr_t from offset 8
- [x] Updated js_key_hash to read objhdr_t from offset 8
- [x] Updated js_key_equal to read objhdr_t from offset 8
- [x] Updated __JS_FreeValueRT to use objhdr_type for type dispatch
- [x] Updated JS_MarkValue, JS_MarkValueEdgeEx for GC
- [x] Added JS_SetPropertyValue function
- [x] Changed quickjs.h JS_IsFunction/JS_IsObject from inline to extern declarations
### 8.4 Tag Check Migration (PARTIAL)
Updated some critical tag checks:
- [x] Some JS_TAG_OBJECT checks → js_is_object() or js_is_record()
- [ ] Many more JS_TAG_OBJECT checks remain (~200 total)
- [ ] JS_TAG_FUNCTION checks → js_is_function()
- [ ] JS_TAG_STRING checks (some already use JS_IsString)
### 8.5 Remaining Work
- [ ] Fix ASAN memory corruption error (attempting free on address not malloc'd)
- Crash occurs in js_def_realloc during js_realloc_array
- Address is 112 bytes inside a JSFunctionDef allocation
- [ ] Complete remaining ~200 tag check migrations
- [ ] Add mist_hdr to JSFunction (optional, gc_obj_type already works)
- [ ] Remove obsolete tag definitions from quickjs.h:
- JS_TAG_STRING = -8
- JS_TAG_ARRAY = -6
- JS_TAG_FUNCTION = -5
- JS_TAG_FUNCTION_BYTECODE = -2
- JS_TAG_OBJECT = -1
### Current Status
**Build: SUCCEEDS** with warnings
**Runtime: CRASHES** with ASAN error:
```
==16122==ERROR: AddressSanitizer: attempting free on address which was not malloc()-ed
```
The crash occurs during test execution in `js_def_realloc` called from `js_realloc_array`.
Root cause not yet identified - likely a pointer being passed to realloc that wasn't allocated with malloc.
---
## Notes
- JSVarDef.var_name is JSValue
- JSClosureVar.var_name is JSValue
- JSGlobalVar.var_name is JSValue
- JSFunctionDef.func_name is JSValue
- BlockEnv.label_name is JSValue
- OP_get_field/put_field/define_field already use cpool index format
- JSRecord with open addressing is fully implemented
- js_key_hash and js_key_equal work with both immediate and heap text
- js_key_equal_str enables comparison with C string literals for internal names

View File

@@ -135,7 +135,6 @@ JSC_SCALL(file_listfiles,
JSValue arr = JS_NewArray(js);
struct listfiles_ctx ctx = { js, arr, 0 };
if (pd_file->listfiles(str, listfiles_cb, &ctx, showhidden) != 0) {
JS_FreeValue(js, arr);
ret = JS_NULL;
} else {
ret = arr;

View File

@@ -112,9 +112,6 @@ static void encode_js_object(json_encoder *enc, JSContext *js, JSValue obj) {
JSValue val = JS_GetProperty(js, obj, props[i].atom);
enc->addTableMember(enc, key, strlen(key));
encode_js_value(enc, js, val);
JS_FreeValue(js, val);
JS_FreeCString(js, key);
JS_FreeAtom(js, props[i].atom);
}
js_free_rt(props);
}
@@ -125,12 +122,10 @@ static void encode_js_array(json_encoder *enc, JSContext *js, JSValue arr) {
enc->startArray(enc);
JSValue lenVal = JS_GetPropertyStr(js, arr, "length");
int len = (int)js2number(js, lenVal);
JS_FreeValue(js, lenVal);
for (int i = 0; i < len; i++) {
enc->addArrayMember(enc);
JSValue val = JS_GetPropertyNumber(js, arr, i);
encode_js_value(enc, js, val);
JS_FreeValue(js, val);
}
enc->endArray(enc);
}
@@ -149,7 +144,6 @@ static void encode_js_value(json_encoder *enc, JSContext *js, JSValue val) {
size_t len;
const char *str = JS_ToCStringLen(js, &len, val);
enc->writeString(enc, str, len);
JS_FreeCString(js, str);
} else if (JS_IsArray(val)) {
encode_js_array(enc, js, val);
} else if (JS_IsObject(val)) {

View File

@@ -30,9 +30,6 @@ static void add_score_cb(PDScore *score, const char *errorMessage) {
args[0] = score_to_js(g_scoreboard_js, score);
args[1] = errorMessage ? JS_NewString(g_scoreboard_js, errorMessage) : JS_NULL;
JSValue ret = JS_Call(g_scoreboard_js, g_add_score_callback, JS_NULL, 2, args);
JS_FreeValue(g_scoreboard_js, ret);
JS_FreeValue(g_scoreboard_js, args[0]);
JS_FreeValue(g_scoreboard_js, args[1]);
}
static void personal_best_cb(PDScore *score, const char *errorMessage) {
@@ -41,9 +38,6 @@ static void personal_best_cb(PDScore *score, const char *errorMessage) {
args[0] = score_to_js(g_scoreboard_js, score);
args[1] = errorMessage ? JS_NewString(g_scoreboard_js, errorMessage) : JS_NULL;
JSValue ret = JS_Call(g_scoreboard_js, g_personal_best_callback, JS_NULL, 2, args);
JS_FreeValue(g_scoreboard_js, ret);
JS_FreeValue(g_scoreboard_js, args[0]);
JS_FreeValue(g_scoreboard_js, args[1]);
}
static void boards_list_cb(PDBoardsList *boards, const char *errorMessage) {
@@ -65,9 +59,6 @@ static void boards_list_cb(PDBoardsList *boards, const char *errorMessage) {
}
args[1] = errorMessage ? JS_NewString(g_scoreboard_js, errorMessage) : JS_NULL;
JSValue ret = JS_Call(g_scoreboard_js, g_boards_list_callback, JS_NULL, 2, args);
JS_FreeValue(g_scoreboard_js, ret);
JS_FreeValue(g_scoreboard_js, args[0]);
JS_FreeValue(g_scoreboard_js, args[1]);
}
static void scores_cb(PDScoresList *scores, const char *errorMessage) {
@@ -92,9 +83,6 @@ static void scores_cb(PDScoresList *scores, const char *errorMessage) {
}
args[1] = errorMessage ? JS_NewString(g_scoreboard_js, errorMessage) : JS_NULL;
JSValue ret = JS_Call(g_scoreboard_js, g_scores_callback, JS_NULL, 2, args);
JS_FreeValue(g_scoreboard_js, ret);
JS_FreeValue(g_scoreboard_js, args[0]);
JS_FreeValue(g_scoreboard_js, args[1]);
}
// --- API Functions ---
@@ -104,8 +92,7 @@ JSC_SCALL(scoreboards_addScore,
uint32_t value = (uint32_t)js2number(js, argv[1]);
if (argc > 2 && JS_IsFunction(argv[2])) {
g_scoreboard_js = js;
JS_FreeValue(js, g_add_score_callback);
g_add_score_callback = JS_DupValue(js, argv[2]);
g_add_score_callback = argv[2];
}
ret = JS_NewBool(js, pd_scoreboards->addScore(str, value, add_score_cb));
)
@@ -114,8 +101,7 @@ JSC_SCALL(scoreboards_getPersonalBest,
if (!pd_scoreboards) return JS_RaiseDisrupt(js, "scoreboards not initialized");
if (argc > 1 && JS_IsFunction(argv[1])) {
g_scoreboard_js = js;
JS_FreeValue(js, g_personal_best_callback);
g_personal_best_callback = JS_DupValue(js, argv[1]);
g_personal_best_callback = argv[1];
}
ret = JS_NewBool(js, pd_scoreboards->getPersonalBest(str, personal_best_cb));
)
@@ -131,8 +117,7 @@ JSC_CCALL(scoreboards_getScoreboards,
if (!pd_scoreboards) return JS_RaiseDisrupt(js, "scoreboards not initialized");
if (argc > 0 && JS_IsFunction(argv[0])) {
g_scoreboard_js = js;
JS_FreeValue(js, g_boards_list_callback);
g_boards_list_callback = JS_DupValue(js, argv[0]);
g_boards_list_callback = argv[0];
}
return JS_NewBool(js, pd_scoreboards->getScoreboards(boards_list_cb));
)
@@ -147,8 +132,7 @@ JSC_SCALL(scoreboards_getScores,
if (!pd_scoreboards) return JS_RaiseDisrupt(js, "scoreboards not initialized");
if (argc > 1 && JS_IsFunction(argv[1])) {
g_scoreboard_js = js;
JS_FreeValue(js, g_scores_callback);
g_scores_callback = JS_DupValue(js, argv[1]);
g_scores_callback = argv[1];
}
ret = JS_NewBool(js, pd_scoreboards->getScores(str, scores_cb));
)

124
probe.ce Normal file
View File

@@ -0,0 +1,124 @@
// cell probe - Query a running probe server
//
// Usage:
// cell probe List all targets and probes
// cell probe <target> <name> Query a probe
// cell probe <target> <name> k=v ... Query with arguments
// cell probe --port=8080 game state Use a different port
var http = use('http')
var json = use('json')
var port = 9000
var base = null
function print_targets(targets) {
var keys = array(targets)
var j = 0
var p = 0
var probes = null
while (j < length(keys)) {
probes = targets[keys[j]]
log.console(keys[j])
p = 0
while (p < length(probes)) {
log.console(" " + probes[p])
p = p + 1
}
j = j + 1
}
}
function run() {
var target = null
var name = null
var probe_args = {}
var i = 0
var eq = null
var k = null
var v = null
var n = null
for (i = 0; i < length(args); i++) {
if (args[i] == '--help' || args[i] == '-h') {
log.console("Usage: cell probe [target] [name] [key=value ...]")
log.console("")
log.console(" cell probe List all targets and probes")
log.console(" cell probe game state Query game/state")
log.console(" cell probe game entity id=1 Query with arguments")
log.console("")
log.console("Options:")
log.console(" --port=N Connect to port N (default 9000)")
return
} else if (starts_with(args[i], '--port=')) {
port = number(text(args[i], 7))
} else if (target == null) {
target = args[i]
} else if (name == null) {
name = args[i]
} else {
eq = search(args[i], "=")
if (eq != null) {
k = text(args[i], 0, eq)
v = text(args[i], eq + 1)
n = number(v)
if (n != null) {
v = n
} else if (v == "true") {
v = true
} else if (v == "false") {
v = false
} else if (v == "null") {
v = null
}
probe_args[k] = v
}
}
}
base = "http://127.0.0.1:" + text(port)
var resp = null
var body = null
var data = null
if (target == null) {
resp = http.request("GET", base + "/discover", null, null)
} else {
body = {target: target, name: name}
if (length(array(probe_args)) > 0) body.args = probe_args
resp = http.request("POST", base + "/probe",
{"content-type": "application/json"}, json.encode(body, false))
}
if (resp == null) {
log.error("could not connect to probe server on port " + text(port))
return
}
var _parse = function() {
data = json.decode(resp.body)
} disruption {
data = null
}
_parse()
if (data == null) {
log.error("invalid response from server")
return
}
if (!data.ok) {
log.error(data.error)
return
}
if (target == null) {
print_targets(data.targets)
} else {
log.console(json.encode(data.result, 2))
}
}
run()
$stop()

124
probe.cm Normal file
View File

@@ -0,0 +1,124 @@
var http = use('http')
var json = use('json')
var registry = {}
var server_fd = null
var port = 9000
function handle_request(req) {
var result = null
var _try = null
if (req.method == "GET" && req.path == "/discover") {
result = discover()
http.respond(req._conn, 200, {"content-type": "application/json"},
json.encode(result))
return
}
if (req.method == "POST" && req.path == "/probe") {
_try = function() {
result = handle_probe(req)
} disruption {
result = {ok: false, error: "probe failed"}
}
_try()
http.respond(req._conn, 200, {"content-type": "application/json"},
json.encode(result))
return
}
if (req.method == "POST" && req.path == "/snapshot") {
_try = function() {
result = handle_snapshot(req)
} disruption {
result = {ok: false, error: "snapshot failed"}
}
_try()
http.respond(req._conn, 200, {"content-type": "application/json"},
json.encode(result))
return
}
http.respond(req._conn, 404, {"content-type": "application/json"},
json.encode({ok: false, error: "not found"}))
}
function discover() {
var targets = {}
var target_keys = array(registry)
var i = 0
while (i < length(target_keys)) {
targets[target_keys[i]] = array(registry[target_keys[i]])
i = i + 1
}
return {ok: true, targets: targets}
}
function handle_probe(req) {
var body = json.decode(req.body)
var target = body.target
var name = body.name
var args = body.args
if (target == null || name == null) {
return {ok: false, error: "missing target or name"}
}
var target_probes = registry[target]
if (target_probes == null) {
return {ok: false, error: "unknown target: " + target}
}
var probe_fn = target_probes[name]
if (probe_fn == null) {
return {ok: false, error: "unknown probe: " + target + "/" + name}
}
if (args == null) args = {}
var result = probe_fn(args)
return {ok: true, result: result}
}
function handle_snapshot(req) {
var body = json.decode(req.body)
var probes = body.probes
if (probes == null) {
return {ok: false, error: "missing probes array"}
}
var results = {}
var i = 0
var p = null
var target_probes = null
var probe_fn = null
var key = null
while (i < length(probes)) {
p = probes[i]
key = p.target + "/" + p.name
target_probes = registry[p.target]
if (target_probes != null) {
probe_fn = target_probes[p.name]
if (probe_fn != null) {
results[key] = probe_fn(p.args != null ? p.args : {})
}
}
i = i + 1
}
return {ok: true, results: results}
}
function start_server() {
server_fd = http.serve(port)
http.on_request(server_fd, handle_request)
}
function register(target, probes) {
registry[target] = probes
if (server_fd == null) start_server()
}
return {
register: register,
port: port
}

File diff suppressed because it is too large Load Diff

View File

@@ -149,7 +149,7 @@ if (!is_array(args) || length(args) < 1) {
}
for (; i < length(args) - 1; i++) {
push(sources, args[i])
sources[] = args[i]
}
archive = args[length(args) - 1]

View File

@@ -78,7 +78,7 @@ var run = function() {
arrfor(all_packages, function(p) {
if (p == 'core') return
if (!needed[p] && find(packages_to_remove, p) == null) {
push(packages_to_remove, p)
packages_to_remove[] = p
}
})
}

View File

@@ -165,11 +165,11 @@ for (i = 0; i < length(sorted); i++) {
// Format output
status_parts = []
if (is_linked) push(status_parts, "linked")
if (is_local) push(status_parts, "local")
if (!is_in_lock) push(status_parts, "not in lock")
if (!is_fetched) push(status_parts, "not fetched")
if (has_c_files) push(status_parts, "has C modules")
if (is_linked) status_parts[] = "linked"
if (is_local) status_parts[] = "local"
if (!is_in_lock) status_parts[] = "not in lock"
if (!is_fetched) status_parts[] = "not fetched"
if (has_c_files) status_parts[] = "has C modules"
commit_str = ""
if (lock_entry && lock_entry.commit) {

View File

@@ -21,7 +21,7 @@ var packages = shop.list_packages()
arrfor(packages, function(package_name) {
// Check if package name matches
if (search(package_name, query) != null) {
push(found_packages, package_name)
found_packages[] = package_name
}
// Search modules and actors within the package
@@ -29,14 +29,14 @@ arrfor(packages, function(package_name) {
var modules = pkg.list_modules(package_name)
arrfor(modules, function(mod) {
if (search(mod, query) != null) {
push(found_modules, package_name + ':' + mod)
found_modules[] = package_name + ':' + mod
}
})
var actors = pkg.list_programs(package_name)
arrfor(actors, function(actor) {
if (search(actor, query) != null) {
push(found_actors, package_name + ':' + actor)
found_actors[] = package_name + ':' + actor
}
})
} disruption {

View File

@@ -42,7 +42,7 @@ for (i = 0; i < length(args); i++) {
var core_dir = shop.get_package_dir('core')
var boot_dir = core_dir + '/boot'
var pipeline_modules = ['tokenize', 'parse', 'fold', 'mcode', 'streamline']
var pipeline_modules = ['tokenize', 'parse', 'fold', 'mcode', 'streamline', 'qbe', 'qbe_emit']
var generated = 0
var name = null
var src_path = null

View File

@@ -179,7 +179,7 @@ var run = function() {
first_def[slot_num] = pc
first_def_op[slot_num] = op
}
push(events, {kind: "DEF", slot: operand_val, pc: pc, instr: instr})
events[] = {kind: "DEF", slot: operand_val, pc: pc, instr: instr}
}
di = di + 1
}
@@ -191,7 +191,7 @@ var run = function() {
slot_num = text(operand_val)
if (!uses[slot_num]) uses[slot_num] = 0
uses[slot_num] = uses[slot_num] + 1
push(events, {kind: "USE", slot: operand_val, pc: pc, instr: instr})
events[] = {kind: "USE", slot: operand_val, pc: pc, instr: instr}
}
ui = ui + 1
}
@@ -219,7 +219,7 @@ var run = function() {
parts = []
j = 1
while (j < n - 2) {
push(parts, fmt_val(evt.instr[j]))
parts[] = fmt_val(evt.instr[j])
j = j + 1
}
operands = text(parts, ", ")

Some files were not shown because too many files have changed in this diff Show More