Merge branch 'master' into fix_aot

This commit is contained in:
2026-02-17 15:48:54 -06:00
18 changed files with 729 additions and 334 deletions

View File

@@ -6,6 +6,7 @@
// cell build <locator> Build dynamic library for specific package
// cell build -t <target> Cross-compile dynamic libraries for target platform
// cell build -b <type> Build type: release (default), debug, or minsize
// cell build --verbose Print resolved flags, commands, and cache status
var build = use('build')
var shop = use('internal/shop')
@@ -15,6 +16,7 @@ var fd = use('fd')
var target = null
var target_package = null
var buildtype = 'release'
var verbose = false
var force_rebuild = false
var dry_run = false
var i = 0
@@ -55,6 +57,8 @@ for (i = 0; i < length(args); i++) {
}
} else if (args[i] == '--force') {
force_rebuild = true
} else if (args[i] == '--verbose' || args[i] == '-v') {
verbose = true
} else if (args[i] == '--dry-run') {
dry_run = true
} else if (args[i] == '--list-targets') {
@@ -104,7 +108,7 @@ if (target_package) {
// Build single package
log.console('Building ' + target_package + '...')
_build = function() {
lib = build.build_dynamic(target_package, target, buildtype)
lib = build.build_dynamic(target_package, target, buildtype, {verbose: verbose})
if (lib) {
log.console(`Built ${text(length(lib))} module(s)`)
}
@@ -116,7 +120,7 @@ if (target_package) {
} else {
// Build all packages
log.console('Building all packages...')
results = build.build_all_dynamic(target, buildtype)
results = build.build_all_dynamic(target, buildtype, {verbose: verbose})
success = 0
failed = 0

View File

@@ -172,6 +172,11 @@ Build.compile_file = function(pkg, file, target, opts) {
var cmd_str = text(cmd_parts, ' ')
if (_opts.verbose) {
print('[verbose] CFLAGS: ' + text(cflags, ' '))
print('[verbose] compile: ' + cmd_str)
}
// Content hash: command + file content
var file_content = fd.slurp(src_path)
var hash_input = cmd_str + '\n' + text(file_content)
@@ -183,8 +188,10 @@ Build.compile_file = function(pkg, file, target, opts) {
// Check if already compiled
if (fd.is_file(obj_path)) {
if (_opts.verbose) print('[verbose] cache hit: ' + file)
return obj_path
}
if (_opts.verbose) print('[verbose] cache miss: ' + file)
// Compile — capture stderr to detect missing-header vs real errors
var err_path = '/tmp/cell_build_err_' + hash + '.log'
@@ -308,6 +315,10 @@ Build.build_module_dylib = function(pkg, file, target, opts) {
var cmd_str = null
var ret = null
if (_opts.verbose) {
print('[verbose] LDFLAGS: ' + text(resolved_ldflags, ' '))
}
if (!fd.is_file(dylib_path)) {
cmd_parts = [cc, '-shared', '-fPIC']
@@ -340,6 +351,7 @@ Build.build_module_dylib = function(pkg, file, target, opts) {
push(cmd_parts, '"' + dylib_path + '"')
cmd_str = text(cmd_parts, ' ')
if (_opts.verbose) print('[verbose] link: ' + cmd_str)
log.console('Linking module ' + file + ' -> ' + fd.basename(dylib_path))
ret = os.system(cmd_str)
if (ret != 0) {
@@ -361,6 +373,7 @@ Build.build_module_dylib = function(pkg, file, target, opts) {
ensure_dir(install_dir)
var install_path = shop.get_lib_dir() + '/' + shop.lib_name_for_package(pkg) + '/' + file_stem + dylib_ext
fd.slurpwrite(install_path, fd.slurp(dylib_path))
if (_opts.verbose) print('[verbose] install: ' + install_path)
return dylib_path
}
@@ -368,9 +381,10 @@ Build.build_module_dylib = function(pkg, file, target, opts) {
// Build a dynamic library for a package (one dylib per C file)
// Returns array of {file, symbol, dylib} for each module
// Also writes a manifest mapping symbols to dylib paths
Build.build_dynamic = function(pkg, target, buildtype) {
Build.build_dynamic = function(pkg, target, buildtype, opts) {
var _target = target || Build.detect_host_target()
var _buildtype = buildtype || 'release'
var _opts = opts || {}
var c_files = pkg_tools.get_c_files(pkg, _target, true)
var results = []
@@ -382,13 +396,13 @@ Build.build_dynamic = function(pkg, target, buildtype) {
var sources = pkg_tools.get_sources(pkg)
var support_objects = []
arrfor(sources, function(src_file) {
var obj = Build.compile_file(pkg, src_file, _target, {buildtype: _buildtype, cflags: cached_cflags})
var obj = Build.compile_file(pkg, src_file, _target, {buildtype: _buildtype, cflags: cached_cflags, verbose: _opts.verbose})
push(support_objects, obj)
})
arrfor(c_files, function(file) {
var sym_name = shop.c_symbol_for_file(pkg, file)
var dylib = Build.build_module_dylib(pkg, file, _target, {buildtype: _buildtype, extra_objects: support_objects, cflags: cached_cflags})
var dylib = Build.build_module_dylib(pkg, file, _target, {buildtype: _buildtype, extra_objects: support_objects, cflags: cached_cflags, verbose: _opts.verbose})
if (dylib) {
push(results, {file: file, symbol: sym_name, dylib: dylib})
}
@@ -847,9 +861,10 @@ Build.generate_module_table = function(modules, output) {
// ============================================================================
// Build dynamic libraries for all installed packages
Build.build_all_dynamic = function(target, buildtype) {
Build.build_all_dynamic = function(target, buildtype, opts) {
var _target = target || Build.detect_host_target()
var _buildtype = buildtype || 'release'
var _opts = opts || {}
var packages = shop.list_packages()
var results = []
@@ -860,14 +875,14 @@ Build.build_all_dynamic = function(target, buildtype) {
// Build core first
if (find(packages, function(p) { return p == 'core' }) != null) {
core_mods = Build.build_dynamic('core', _target, _buildtype)
core_mods = Build.build_dynamic('core', _target, _buildtype, _opts)
push(results, {package: 'core', modules: core_mods})
}
// Build other packages
arrfor(packages, function(pkg) {
if (pkg == 'core') return
var pkg_mods = Build.build_dynamic(pkg, _target, _buildtype)
var pkg_mods = Build.build_dynamic(pkg, _target, _buildtype, _opts)
push(results, {package: pkg, modules: pkg_mods})
})

View File

@@ -1,98 +0,0 @@
// compile_seed.ce — compile a .cm module to native .dylib via QBE (seed mode)
// Usage: ./cell --dev --seed compile_seed <file.cm>
var fd = use("fd")
var os = use("os")
var tokenize = use("tokenize")
var parse = use("parse")
var fold = use("fold")
var mcode = use("mcode")
var streamline = use("streamline")
var qbe_macros = use("qbe")
var qbe_emit = use("qbe_emit")
if (length(args) < 1) {
print("usage: cell --dev --seed compile_seed <file.cm>")
disrupt
}
var file = args[0]
var base = file
if (ends_with(base, ".cm")) {
base = text(base, 0, length(base) - 3)
} else if (ends_with(base, ".ce")) {
base = text(base, 0, length(base) - 3)
}
var safe = replace(replace(replace(base, "/", "_"), "-", "_"), ".", "_")
var symbol = "js_" + safe + "_use"
var tmp = "/tmp/qbe_" + safe
var ssa_path = tmp + ".ssa"
var s_path = tmp + ".s"
var o_path = tmp + ".o"
var rt_o_path = "/tmp/qbe_rt.o"
var dylib_path = file + ".dylib"
var rc = 0
// Step 1: compile to QBE IL
print("compiling " + file + " to QBE IL...")
var src = text(fd.slurp(file))
var result = tokenize(src, file)
var ast = parse(result.tokens, src, file, tokenize)
var folded = fold(ast)
var compiled = mcode(folded)
var optimized = streamline(compiled)
var il = qbe_emit(optimized, qbe_macros)
// Step 2: append wrapper function
var wrapper = `
export function l $${symbol}(l %ctx) {
@entry
%result =l call $cell_rt_module_entry(l %ctx)
ret %result
}
`
il = il + wrapper
// Write IL to file — remove old file first to avoid leftover content
if (fd.is_file(ssa_path)) fd.unlink(ssa_path)
var out_fd = fd.open(ssa_path, 1537, 420)
fd.write(out_fd, il)
fd.close(out_fd)
print("wrote " + ssa_path + " (" + text(length(il)) + " bytes)")
// Step 3: compile QBE IL to assembly
print("qbe compile...")
rc = os.system("qbe -o " + s_path + " " + ssa_path)
if (rc != 0) {
print("qbe compilation failed")
disrupt
}
// Step 4: assemble
print("assemble...")
rc = os.system("cc -c " + s_path + " -o " + o_path)
if (rc != 0) {
print("assembly failed")
disrupt
}
// Step 5: compile runtime stubs
if (!fd.is_file(rt_o_path)) {
print("compile runtime stubs...")
rc = os.system("cc -c source/qbe_helpers.c -o " + rt_o_path + " -fPIC -Isource")
if (rc != 0) {
print("runtime stubs compilation failed")
disrupt
}
}
// Step 6: link dylib
print("link...")
rc = os.system("cc -shared -fPIC -undefined dynamic_lookup " + o_path + " " + rt_o_path + " -o " + dylib_path)
if (rc != 0) {
print("linking failed")
disrupt
}
print("built: " + dylib_path)

View File

@@ -65,6 +65,7 @@ pit build -b debug # build type: release (default), debug, minsiz
pit build --list-targets # list available targets
pit build --force # force rebuild
pit build --dry-run # show what would be built
pit build --verbose # print resolved flags, commands, cache status
```
### pit test
@@ -330,9 +331,14 @@ pit mcode <file.cm>
Apply the full optimization pipeline to a source file and output optimized mcode as JSON.
```bash
pit streamline <file.cm>
pit streamline <file.cm> # full optimized IR as JSON (default)
pit streamline --stats <file.cm> # summary stats per function
pit streamline --ir <file.cm> # human-readable IR
pit streamline --check <file.cm> # warnings only (e.g. high slot count)
```
Flags can be combined. `--stats` output includes function name, args, slots, instruction counts by category, and nops eliminated. `--check` warns when `nr_slots > 200` (approaching the 255 limit).
### pit qbe
Compile a source file to QBE intermediate language (for native code generation).
@@ -367,6 +373,17 @@ Ahead-of-time compile and execute a program natively.
pit run_aot <program.ce>
```
### pit seed
Regenerate the boot seed files in `boot/`. Seeds are pre-compiled mcode IR (JSON) that bootstrap the compilation pipeline on cold start. They only need regenerating when the pipeline source changes in a way the existing seeds can't compile, or before distribution.
```bash
pit seed # regenerate all boot seeds
pit seed --clean # also clear the build cache after
```
The engine recompiles pipeline modules automatically when source changes (via content-addressed cache). Seeds are a fallback for cold start when no cache exists.
### Analysis
### pit explain

View File

@@ -81,9 +81,37 @@ Shows the optimized IR with type annotations. Each instruction is followed by th
Runs the full pipeline (tokenize, parse, fold, mcode, streamline) and outputs the optimized IR as JSON. Useful for piping to `jq` or saving for comparison.
```bash
./cell --core . streamline.ce <file.ce|file.cm>
./cell --core . streamline.ce <file.ce|file.cm> # full JSON (default)
./cell --core . streamline.ce --stats <file.ce|file.cm> # summary stats per function
./cell --core . streamline.ce --ir <file.ce|file.cm> # human-readable IR
./cell --core . streamline.ce --check <file.ce|file.cm> # warnings only
```
| Flag | Description |
|------|-------------|
| (none) | Full optimized IR as JSON (backward compatible) |
| `--stats` | Per-function summary: args, slots, instruction counts by category, nops eliminated |
| `--ir` | Human-readable canonical IR (same format as `ir_report.ce`) |
| `--check` | Warnings only (e.g. `nr_slots > 200` approaching 255 limit) |
Flags can be combined.
## seed.ce
Regenerates the boot seed files in `boot/`. These are pre-compiled mcode IR (JSON) files that bootstrap the compilation pipeline on cold start.
```bash
./cell --core . seed.ce # regenerate all boot seeds
./cell --core . seed.ce --clean # also clear the build cache after
```
The script compiles each pipeline module (tokenize, parse, fold, mcode, streamline) and `internal/bootstrap.cm` through the current pipeline, encodes the output as JSON, and writes it to `boot/<name>.cm.mcode`.
**When to regenerate seeds:**
- Before a release or distribution
- When the pipeline source changes in a way the existing seeds can't compile the new source (e.g. language-level changes)
- Seeds do NOT need regenerating for normal development — the engine recompiles pipeline modules from source automatically via the content-addressed cache
## ir_report.ce
The optimizer flight recorder. Runs the full pipeline with structured logging and outputs machine-readable, diff-friendly JSON. This is the most detailed tool for understanding what the optimizer did and why.

View File

@@ -9,11 +9,11 @@ Packages are the fundamental unit of code organization and sharing in ƿit.
## Package Structure
A package is a directory containing a `pit.toml` manifest:
A package is a directory containing a `cell.toml` manifest:
```
mypackage/
├── pit.toml # package manifest
├── cell.toml # package manifest
├── main.ce # entry point (optional)
├── utils.cm # module
├── helper/
@@ -23,7 +23,7 @@ mypackage/
└── helpers.cm # private module (internal/ only)
```
## pit.toml
## cell.toml
The package manifest declares metadata and dependencies:
@@ -47,7 +47,7 @@ mylib = "/Users/john/work/mylib"
When importing with `use()`, ƿit searches in order:
1. **Local package** — relative to package root
2. **Dependencies** — via aliases in `pit.toml`
2. **Dependencies** — via aliases in `cell.toml`
3. **Core** — built-in ƿit modules
```javascript
@@ -179,7 +179,7 @@ C files in a package are compiled into per-file dynamic libraries:
```
mypackage/
├── pit.toml
├── cell.toml
├── render.c # compiled to lib/mypackage/render.dylib
└── physics.c # compiled to lib/mypackage/physics.dylib
```

View File

@@ -48,7 +48,7 @@ When `use('path')` is called from a package context, the shop resolves the modul
For a call like `use('sprite')` from package `myapp`:
1. **Own package**`~/.pit/packages/myapp/sprite.cm` and C symbol `js_myapp_sprite_use`
2. **Aliased dependencies** — if `myapp/pit.toml` has `renderer = "gitea.pockle.world/john/renderer"`, checks `renderer/sprite.cm` and its C symbols
2. **Aliased dependencies** — if `myapp/cell.toml` has `renderer = "gitea.pockle.world/john/renderer"`, checks `renderer/sprite.cm` and its C symbols
3. **Core** — built-in core modules and internal C symbols
For calls without a package context (from core modules), only core is searched.

View File

@@ -93,6 +93,25 @@ String constants are interned in a data section. Integer constants are encoded i
pit --emit-qbe script.ce > output.ssa
```
## Boot Seeds
The `boot/` directory contains pre-compiled mcode IR (JSON) seed files for the pipeline modules:
```
boot/tokenize.cm.mcode
boot/parse.cm.mcode
boot/fold.cm.mcode
boot/mcode.cm.mcode
boot/streamline.cm.mcode
boot/bootstrap.cm.mcode
```
Seeds are used during cold start (empty cache) to compile the pipeline modules from source. The engine's `load_pipeline_module()` hashes the **source file** content — if the source changes, the hash changes, the cache misses, and the module is recompiled from source using the boot seeds. This means:
- Editing a pipeline module (e.g. `tokenize.cm`) takes effect on the next run automatically
- Seeds only need regenerating if the pipeline changes in a way the existing seeds can't compile the new source, or before distribution
- Use `pit seed` to regenerate all seeds, and `pit seed --clean` to also clear the build cache
## Files
| File | Role |

View File

@@ -1,166 +0,0 @@
// dump_stream.cm — show mcode IR before and after streamlining
//
// Usage: ./cell --core . dump_stream.cm <file.ce|file.cm>
var fd = use("fd")
var json = use("json")
var tokenize = use("tokenize")
var parse = use("parse")
var fold = use("fold")
var mcode = use("mcode")
var streamline = use("streamline")
if (length(args) < 1) {
print("usage: cell --core . dump_stream.cm <file>")
return
}
var filename = args[0]
var src = text(fd.slurp(filename))
var tok = tokenize(src, filename)
var ast = parse(tok.tokens, src, filename, tokenize)
var folded = fold(ast)
var compiled = mcode(folded)
// Deep copy IR for before snapshot
var before = json.decode(json.encode(compiled))
var optimized = streamline(compiled)
var pad_right = function(s, w) {
var r = s
while (length(r) < w) {
r = r + " "
}
return r
}
var fmt_val = function(v) {
if (is_null(v)) {
return "null"
}
if (is_number(v)) {
return text(v)
}
if (is_text(v)) {
return `"${v}"`
}
if (is_object(v)) {
return json.encode(v)
}
if (is_logical(v)) {
return v ? "true" : "false"
}
return text(v)
}
var count_stats = function(func) {
var instrs = func.instructions
var total = 0
var nops = 0
var calls = 0
var i = 0
var instr = null
if (instrs == null) {
return {total: 0, nops: 0, real: 0, calls: 0}
}
while (i < length(instrs)) {
instr = instrs[i]
if (is_text(instr)) {
if (starts_with(instr, "_nop_")) {
nops = nops + 1
}
} else if (is_array(instr)) {
total = total + 1
if (instr[0] == "invoke") {
calls = calls + 1
}
}
i = i + 1
}
return {total: total, nops: nops, real: total - nops, calls: calls}
}
var dump_function = function(func, show_nops) {
var instrs = func.instructions
var i = 0
var pc = 0
var instr = null
var op = null
var n = 0
var parts = null
var j = 0
var operands = null
var pc_str = null
var op_str = null
if (instrs == null || length(instrs) == 0) {
return null
}
while (i < length(instrs)) {
instr = instrs[i]
if (is_text(instr)) {
if (starts_with(instr, "_nop_")) {
if (show_nops) {
print(` ${pad_right(text(pc), 5)} --- nop ---`)
pc = pc + 1
}
} else {
print(`${instr}:`)
}
} else if (is_array(instr)) {
op = instr[0]
n = length(instr)
parts = []
j = 1
while (j < n - 2) {
push(parts, fmt_val(instr[j]))
j = j + 1
}
operands = text(parts, ", ")
pc_str = pad_right(text(pc), 5)
op_str = pad_right(op, 14)
print(` ${pc_str} ${op_str} ${operands}`)
pc = pc + 1
}
i = i + 1
}
return null
}
var dump_pair = function(before_func, after_func, name) {
var nr_args = after_func.nr_args != null ? after_func.nr_args : 0
var nr_slots = after_func.nr_slots != null ? after_func.nr_slots : 0
var b_stats = count_stats(before_func)
var a_stats = count_stats(after_func)
var eliminated = a_stats.nops
print(`\n=== ${name} (args=${text(nr_args)}, slots=${text(nr_slots)}) ===`)
print(` before: ${text(b_stats.total)} instructions, ${text(b_stats.calls)} invokes`)
print(` after: ${text(a_stats.real)} instructions (${text(eliminated)} eliminated), ${text(a_stats.calls)} invokes`)
print("\n -- streamlined --")
dump_function(after_func, false)
return null
}
var main_name = null
var fi = 0
var func = null
var bfunc = null
var fname = null
// Dump main
if (optimized.main != null && before.main != null) {
main_name = optimized.name != null ? optimized.name : "<main>"
dump_pair(before.main, optimized.main, main_name)
}
// Dump sub-functions
if (optimized.functions != null && before.functions != null) {
fi = 0
while (fi < length(optimized.functions)) {
func = optimized.functions[fi]
bfunc = before.functions[fi]
fname = func.name != null ? func.name : `<func_${text(fi)}>`
dump_pair(bfunc, func, `[${text(fi)}] ${fname}`)
fi = fi + 1
}
}

View File

@@ -1,6 +1,7 @@
// Hidden vars (os, actorsym, init, core_path, shop_path, json, args) come from env
// Engine is self-sufficient: defines its own compilation pipeline
var ACTORDATA = actorsym
var native_mode = false
var SYSYM = '__SYSTEM__'
var _cell = {}
@@ -216,13 +217,24 @@ var _program = null
var _user_args = []
var _j = 1
var _init = init
if (args != null && _init == null) {
// Inherit native_mode from init (set by C for --native, or by parent actor)
if (_init != null && _init.native_mode)
native_mode = true
// CLI path: convert args to init record
if (args != null && (_init == null || !_init.program)) {
_program = args[0]
while (_j < length(args)) {
push(_user_args, args[_j])
_j = _j + 1
}
_init = {program: _program, arg: _user_args}
if (_init == null) {
_init = {program: _program, arg: _user_args}
} else {
_init.program = _program
_init.arg = _user_args
}
}
use_cache['core/os'] = os
@@ -413,9 +425,11 @@ core_extras.content_hash = content_hash
core_extras.cache_path = cache_path
core_extras.ensure_build_dir = ensure_build_dir
core_extras.compile_to_blob = compile_to_blob
core_extras.native_mode = native_mode
// NOW load shop -- it receives all of the above via env
var shop = use_core('internal/shop')
if (native_mode) use_core('build')
var time = use_core('time')
var pronto = use_core('pronto')
@@ -690,6 +704,7 @@ $_.start = function start(cb, program) {
overling_id: oid,
root_id: root ? root[ACTORDATA].id : null,
program,
native_mode: native_mode,
}
greeters[id] = cb
push(message_queue, { startup })
@@ -1082,6 +1097,28 @@ $_.clock(_ => {
env.log = log
env = stone(env)
var native_build = null
var native_dylib_path = null
var native_handle = null
var native_parts = null
var native_basename = null
var native_sym = null
// Native execution path: compile to dylib and run
if (native_mode) {
native_build = use_core('build')
native_dylib_path = native_build.compile_native(prog_path, null, null, pkg)
native_handle = os.dylib_open(native_dylib_path)
native_parts = array(prog_path, '/')
native_basename = native_parts[length(native_parts) - 1]
native_sym = pkg ? shop.c_symbol_for_file(pkg, native_basename) : null
if (native_sym)
os.native_module_load_named(native_handle, native_sym, env)
else
os.native_module_load(native_handle, env)
return
}
var source_blob = fd.slurp(prog_path)
var hash = content_hash(source_blob)
var cached_path = cache_path(hash)

View File

@@ -303,7 +303,8 @@ var _default_policy = {
allow_dylib: true,
allow_static: true,
allow_mach: true,
allow_compile: true
allow_compile: true,
native: false
}
Shop.load_config = function() {
@@ -336,6 +337,7 @@ Shop.load_config = function() {
function get_policy() {
var config = Shop.load_config()
if (native_mode) config.policy.native = true
return config.policy
}
@@ -433,14 +435,37 @@ function detect_host_target() {
var host_target = detect_host_target()
// Check for a native .cm dylib at the deterministic lib path
// Returns the loaded module value, or null if no native dylib exists
// Returns a native descriptor {_native, _handle, _sym}, or null if no native dylib exists
// Also checks staleness: if source has changed, the content-addressed build artifact
// won't exist for the new hash, so the installed dylib is treated as stale.
function try_native_mod_dylib(pkg, stem) {
var dylib_path = get_dylib_path(pkg, stem)
var src_path = null
var src = null
var host = null
var hash = null
var tc_ext = null
var build_path = null
var handle = null
var sym = null
if (!fd.is_file(dylib_path)) return null
var handle = os.dylib_open(dylib_path)
// Staleness check: verify the content-addressed build artifact exists
src_path = get_packages_dir() + '/' + safe_package_path(pkg) + '/' + stem
if (fd.is_file(src_path)) {
src = text(fd.slurp(src_path))
host = detect_host_target()
hash = content_hash(src + '\n' + host + '\nnative')
tc_ext = dylib_ext
build_path = global_shop_path + '/build/' + hash + '.' + host + tc_ext
if (!fd.is_file(build_path)) return null
}
handle = os.dylib_open(dylib_path)
if (!handle) return null
var sym = Shop.c_symbol_for_file(pkg, stem)
return os.native_module_load_named(handle, sym)
sym = Shop.c_symbol_for_file(pkg, stem)
return {_native: true, _handle: handle, _sym: sym}
}
// Default capabilities injected into scripts
@@ -511,6 +536,10 @@ function resolve_mod_fn(path, pkg) {
var _pkg_dir = null
var _stem = null
var policy = null
var build_mod = null
var dylib_path = null
var handle = null
var sym = null
policy = get_policy()
@@ -525,8 +554,21 @@ function resolve_mod_fn(path, pkg) {
// Check for native .cm dylib at deterministic path first
if (policy.allow_dylib && pkg && _stem) {
native_result = try_native_mod_dylib(pkg, _stem)
if (native_result != null) {
return {_native: true, value: native_result}
if (native_result != null) return native_result
}
// Native compilation path: compile to native dylib instead of mach
if (policy.native && policy.allow_compile) {
build_mod = use_cache['core/build']
if (build_mod) {
dylib_path = build_mod.compile_native(path, null, null, pkg)
if (dylib_path) {
handle = os.dylib_open(dylib_path)
if (handle) {
sym = pkg && _stem ? Shop.c_symbol_for_file(pkg, _stem) : null
return {_native: true, _handle: handle, _sym: sym}
}
}
}
}
@@ -950,9 +992,16 @@ function execute_module(info)
var pkg = null
if (mod_resolve.scope < 900) {
// Check if native dylib was resolved
// Check if native dylib was resolved (descriptor with _handle and _sym)
if (is_object(mod_resolve.symbol) && mod_resolve.symbol._native) {
used = mod_resolve.symbol.value
file_info = Shop.file_info(mod_resolve.path)
inject = Shop.script_inject_for(file_info)
env = inject_env(inject)
pkg = file_info.package
env.use = make_use_fn(pkg)
env = stone(env)
used = os.native_module_load_named(
mod_resolve.symbol._handle, mod_resolve.symbol._sym, env)
} else {
// Build env with runtime fns, capabilities, and use function
file_info = Shop.file_info(mod_resolve.path)
@@ -1607,6 +1656,8 @@ Shop.load_as_dylib = function(path, pkg) {
var stem = null
var result = null
var real_pkg = pkg
var inject = null
var env = null
if (!locator) { print('Module ' + path + ' not found'); disrupt }
@@ -1621,7 +1672,15 @@ Shop.load_as_dylib = function(path, pkg) {
if (!starts_with(file_path, pkg_dir + '/')) return null
stem = text(file_path, length(pkg_dir) + 1)
result = try_native_mod_dylib(real_pkg, stem)
return result
if (!result) return null
// Build env and load the native module
if (!file_info) file_info = Shop.file_info(file_path)
inject = Shop.script_inject_for(file_info)
env = inject_env(inject)
env.use = make_use_fn(real_pkg)
env = stone(env)
return os.native_module_load_named(result._handle, result._sym, env)
}
Shop.audit_packages = function() {

View File

@@ -1,16 +1,18 @@
// run_native.ce — load a module both interpreted and native, compare speed
//
// Usage:
// cell --dev run_native.ce <module>
// cell --dev run_native <module>
//
// Loads <module>.cm via use() (interpreted) and <module>.cm.dylib (native),
// Loads <module>.cm via use() (interpreted) and via shop.use_native() (native),
// runs both and compares results and timing.
var os = use('os')
var fd = use('fd')
var shop = use('internal/shop')
if (length(args) < 1) {
print('usage: cell --dev run_native.ce <module>')
print(' e.g. cell --dev run_native.ce num_torture')
print('usage: cell --dev run_native <module>')
print(' e.g. cell --dev run_native num_torture')
return
}
@@ -19,11 +21,6 @@ if (ends_with(name, '.cm')) {
name = text(name, 0, length(name) - 3)
}
var safe = replace(replace(name, '/', '_'), '-', '_')
var symbol = 'js_' + safe + '_use'
var dylib_path = './' + name + '.cm.dylib'
var fd = use('fd')
// --- Test argument for function-returning modules ---
var test_arg = 5000000
if (length(args) > 1) {
@@ -48,44 +45,35 @@ print('result: ' + text(result_interp))
print('time: ' + text(ms_interp) + ' ms')
// --- Native run ---
if (!fd.is_file(dylib_path)) {
print('\nno ' + dylib_path + ' found — run compile.ce first')
// Resolve to .cm path for shop.use_native()
var mod_path = name + '.cm'
if (!fd.is_file(mod_path)) {
print('\nno ' + mod_path + ' found')
return
}
print('\n--- native ---')
var t3 = os.now()
var lib = os.dylib_open(dylib_path)
var mod_native = shop.use_native(mod_path)
var t4 = os.now()
var mod_native = os.dylib_symbol(lib, symbol)
var t5 = os.now()
var result_native = null
if (is_function(mod_native)) {
print('module returns a function, calling with ' + text(test_arg))
t4 = os.now()
t3 = os.now()
result_native = mod_native(test_arg)
t5 = os.now()
t4 = os.now()
}
result_native = result_native != null ? result_native : mod_native
var ms_load = (t4 - t3) / 1000000
var ms_exec = (t5 - t4) / 1000000
var ms_native = (t5 - t3) / 1000000
var ms_native = (t4 - t3) / 1000000
print('result: ' + text(result_native))
print('load: ' + text(ms_load) + ' ms')
print('exec: ' + text(ms_exec) + ' ms')
print('total: ' + text(ms_native) + ' ms')
print('time: ' + text(ms_native) + ' ms')
// --- Comparison ---
print('\n--- comparison ---')
var match = result_interp == result_native
var speedup = 0
var speedup_exec = 0
print('match: ' + text(match))
if (ms_native > 0) {
speedup = ms_interp / ms_native
print('speedup: ' + text(speedup) + 'x (total)')
}
if (ms_exec > 0) {
speedup_exec = ms_interp / ms_exec
print('speedup: ' + text(speedup_exec) + 'x (exec only)')
print('speedup: ' + text(speedup) + 'x')
}

116
seed.ce Normal file
View File

@@ -0,0 +1,116 @@
// seed.ce — regenerate boot seed files in boot/
//
// Usage:
// pit seed Regenerate all boot seeds
// pit seed --clean Also clear the build cache after
//
// Seeds are the pre-compiled mcode IR (JSON) files that bootstrap the
// compilation pipeline on cold start. They only need regenerating when:
// - The pipeline source (tokenize, parse, fold, mcode, streamline) changes
// in a way that the boot seeds can't compile the new source
// - You want the seed files to match the current pipeline output
// (e.g. before a release or distribution)
//
// The engine already recompiles pipeline modules from source when their
// content changes (content-addressed cache). Seeds are a fallback for
// cold start when no cache exists.
var fd = use("fd")
var json = use("json")
var os = use("os")
var shop = use("internal/shop")
var tokenize = use("tokenize")
var parse = use("parse")
var fold = use("fold")
var mcode = use("mcode")
var streamline = use("streamline")
var clean = false
var i = 0
for (i = 0; i < length(args); i++) {
if (args[i] == '--clean') {
clean = true
} else if (args[i] == '--help' || args[i] == '-h') {
print("usage: pit seed [--clean]")
print("")
print(" Regenerate boot seed files in boot/")
print(" --clean Also clear the build cache after")
$stop()
}
}
var core_dir = shop.get_package_dir('core')
var boot_dir = core_dir + '/boot'
var pipeline_modules = ['tokenize', 'parse', 'fold', 'mcode', 'streamline']
var generated = 0
var name = null
var src_path = null
var src = null
var tok = null
var ast = null
var folded = null
var compiled = null
var optimized = null
var mcode_json = null
var out_path = null
// Regenerate pipeline module seeds
for (i = 0; i < length(pipeline_modules); i++) {
name = pipeline_modules[i]
src_path = core_dir + '/' + name + '.cm'
if (!fd.is_file(src_path)) {
print('WARNING: source not found: ' + src_path)
continue
}
print('Seeding ' + name + '.cm ...')
src = text(fd.slurp(src_path))
tok = tokenize(src, src_path)
ast = parse(tok.tokens, src, src_path, tokenize)
folded = fold(ast)
compiled = mcode(folded)
optimized = streamline(compiled)
mcode_json = json.encode(optimized)
out_path = boot_dir + '/' + name + '.cm.mcode'
fd.slurpwrite(out_path, stone(blob(mcode_json)))
print(' -> ' + out_path + ' (' + text(length(mcode_json)) + ' bytes)')
generated = generated + 1
}
// Regenerate bootstrap.cm seed
var bootstrap_path = core_dir + '/internal/bootstrap.cm'
if (fd.is_file(bootstrap_path)) {
print('Seeding bootstrap.cm ...')
src = text(fd.slurp(bootstrap_path))
tok = tokenize(src, bootstrap_path)
ast = parse(tok.tokens, src, bootstrap_path, tokenize)
folded = fold(ast)
compiled = mcode(folded)
optimized = streamline(compiled)
mcode_json = json.encode(optimized)
out_path = boot_dir + '/bootstrap.cm.mcode'
fd.slurpwrite(out_path, stone(blob(mcode_json)))
print(' -> ' + out_path + ' (' + text(length(mcode_json)) + ' bytes)')
generated = generated + 1
} else {
print('WARNING: bootstrap source not found: ' + bootstrap_path)
}
print('\nRegenerated ' + text(generated) + ' seed(s)')
if (clean) {
var build_dir = shop.get_build_dir()
if (fd.is_dir(build_dir)) {
print('Clearing build cache: ' + build_dir)
os.system('rm -rf "' + build_dir + '"')
print('Build cache cleared. Next run will recompile from new seeds.')
} else {
print('No build cache to clear.')
}
}
$stop()

View File

@@ -29,6 +29,7 @@ static int run_test_suite(size_t heap_size);
cell_rt *root_cell = NULL;
static char *shop_path = NULL;
static char *core_path = NULL;
static int native_mode = 0;
static JSRuntime *g_runtime = NULL;
// Compute blake2b hash of data and return hex string (caller must free)
@@ -434,6 +435,7 @@ static void print_usage(const char *prog)
printf(" --core <path> Set core path directly (overrides CELL_CORE)\n");
printf(" --shop <path> Set shop path (overrides CELL_SHOP)\n");
printf(" --dev Dev mode (shop=.cell, core=.)\n");
printf(" --native Use AOT native code instead of bytecode\n");
printf(" --heap <size> Initial heap size (e.g. 256MB, 1GB)\n");
printf(" --test [heap_size] Run C test suite\n");
printf(" -h, --help Show this help message\n");
@@ -510,6 +512,9 @@ int cell_init(int argc, char **argv)
if (lstat(".cell/packages/core", &lst) != 0)
symlink("../..", ".cell/packages/core");
arg_start++;
} else if (strcmp(argv[arg_start], "--native") == 0) {
native_mode = 1;
arg_start++;
} else {
break;
}
@@ -648,7 +653,16 @@ int cell_init(int argc, char **argv)
JS_SetPropertyStr(ctx, env_ref.val, "actorsym", JS_DupValue(ctx, cli_rt->actor_sym_ref.val));
tmp = js_core_json_use(ctx);
JS_SetPropertyStr(ctx, env_ref.val, "json", tmp);
JS_SetPropertyStr(ctx, env_ref.val, "init", JS_NULL);
if (native_mode) {
JSGCRef init_ref;
JS_AddGCRef(ctx, &init_ref);
init_ref.val = JS_NewObject(ctx);
JS_SetPropertyStr(ctx, init_ref.val, "native_mode", JS_NewBool(ctx, 1));
JS_SetPropertyStr(ctx, env_ref.val, "init", init_ref.val);
JS_DeleteGCRef(ctx, &init_ref);
} else {
JS_SetPropertyStr(ctx, env_ref.val, "init", JS_NULL);
}
JSGCRef args_ref;
JS_AddGCRef(ctx, &args_ref);
args_ref.val = JS_NewArray(ctx);

View File

@@ -5749,6 +5749,18 @@ exception:
return JS_EXCEPTION;
}
/* Check if val is already on the visited stack (circular reference detection).
Uses identity comparison (===) since we're checking for the same object. */
static BOOL json_stack_has (JSContext *ctx, JSValue stack, JSValue val) {
if (!JS_IsArray (stack)) return FALSE;
JSArray *arr = JS_VALUE_GET_ARRAY (stack);
for (word_t i = 0; i < arr->len; i++) {
if (JS_StrictEq (ctx, arr->values[i], val))
return TRUE;
}
return FALSE;
}
static int js_json_to_str (JSContext *ctx, JSONStringifyContext *jsc, JSValue holder, JSValue val, JSValue indent) {
JSValue v;
int64_t i, len;
@@ -5784,9 +5796,7 @@ static int js_json_to_str (JSContext *ctx, JSONStringifyContext *jsc, JSValue ho
if (mist_is_gc_object (
val_ref.val)) { /* includes arrays (OBJ_ARRAY) since they have JS_TAG_PTR */
v = js_array_includes (ctx, jsc->stack, 1, &val_ref.val);
if (JS_IsException (v)) goto exception;
if (JS_ToBool (ctx, v)) {
if (json_stack_has (ctx, jsc->stack, val_ref.val)) {
JS_ThrowTypeError (ctx, "circular reference");
goto exception;
}
@@ -5801,8 +5811,7 @@ static int js_json_to_str (JSContext *ctx, JSONStringifyContext *jsc, JSValue ho
sep_ref.val = jsc->empty;
sep1_ref.val = jsc->empty;
}
v = js_cell_push (ctx, jsc->stack, 1, &val_ref.val);
if (check_exception_free (ctx, v)) goto exception;
if (JS_ArrayPush (ctx, &jsc->stack, val_ref.val) < 0) goto exception;
ret = JS_IsArray (val_ref.val);
if (ret < 0) goto exception;
if (ret) {
@@ -5890,8 +5899,8 @@ static int js_json_to_str (JSContext *ctx, JSONStringifyContext *jsc, JSValue ho
}
JSC_B_PUTC (jsc, '}');
}
if (check_exception_free (ctx, js_cell_pop (ctx, jsc->stack, 0, NULL)))
goto exception;
v = JS_ArrayPop (ctx, jsc->stack);
if (JS_IsException (v)) goto exception;
goto done;
}
switch (JS_VALUE_GET_NORM_TAG (val_ref.val)) {

View File

@@ -1847,7 +1847,25 @@ TEST(is_integer_vs_number) {
/* JSON Tests */
TEST(json_encode_object) {
/* Skip - requires GC rooting fixes in JS_JSONStringify */
/* Build an object with several properties and stringify with pretty=true */
JSGCRef obj_ref;
JS_PushGCRef(ctx, &obj_ref);
obj_ref.val = JS_NewObject(ctx);
JS_SetPropertyStr(ctx, obj_ref.val, "name", JS_NewString(ctx, "test"));
JS_SetPropertyStr(ctx, obj_ref.val, "value", JS_NewInt32(ctx, 42));
JS_SetPropertyStr(ctx, obj_ref.val, "active", JS_NewBool(ctx, 1));
JS_SetPropertyStr(ctx, obj_ref.val, "tag", JS_NewString(ctx, "hello world"));
JSValue space = JS_NewInt32(ctx, 2);
JSValue str = JS_JSONStringify(ctx, obj_ref.val, JS_NULL, space, 1);
JS_PopGCRef(ctx, &obj_ref);
ASSERT(!JS_IsException(str));
ASSERT(JS_IsText(str));
const char *s = JS_ToCString(ctx, str);
ASSERT(s != NULL);
ASSERT(strstr(s, "\"name\"") != NULL);
ASSERT(strstr(s, "\"test\"") != NULL);
ASSERT(strstr(s, "42") != NULL);
JS_FreeCString(ctx, s);
return 1;
}
@@ -1867,7 +1885,98 @@ TEST(json_decode_object) {
}
TEST(json_roundtrip_array) {
/* Skip - requires GC rooting fixes in JS_JSONStringify */
JSGCRef arr_ref;
JS_PushGCRef(ctx, &arr_ref);
arr_ref.val = JS_NewArray(ctx);
JS_ArrayPush(ctx, &arr_ref.val, JS_NewInt32(ctx, 10));
JS_ArrayPush(ctx, &arr_ref.val, JS_NewString(ctx, "two"));
JS_ArrayPush(ctx, &arr_ref.val, JS_NewBool(ctx, 0));
JSValue str = JS_JSONStringify(ctx, arr_ref.val, JS_NULL, JS_NULL, 0);
JS_PopGCRef(ctx, &arr_ref);
ASSERT(!JS_IsException(str));
ASSERT(JS_IsText(str));
const char *s = JS_ToCString(ctx, str);
ASSERT(s != NULL);
ASSERT(strstr(s, "10") != NULL);
ASSERT(strstr(s, "\"two\"") != NULL);
ASSERT(strstr(s, "false") != NULL);
JS_FreeCString(ctx, s);
return 1;
}
TEST(json_encode_large_object) {
/* Stress test: build object with many properties, stringify with pretty print.
Under FORCE_GC_AT_MALLOC this will trigger GC on every allocation,
exposing any GC rooting bugs in the JSON encoder. */
JSGCRef obj_ref, str_ref;
JS_PushGCRef(ctx, &obj_ref);
JS_PushGCRef(ctx, &str_ref);
obj_ref.val = JS_NewObject(ctx);
char key[32], val[64];
for (int i = 0; i < 50; i++) {
snprintf(key, sizeof(key), "key_%d", i);
snprintf(val, sizeof(val), "value_%d_with_some_padding_text", i);
JS_SetPropertyStr(ctx, obj_ref.val, key, JS_NewString(ctx, val));
}
JSValue space = JS_NewInt32(ctx, 2);
str_ref.val = JS_JSONStringify(ctx, obj_ref.val, JS_NULL, space, 1);
ASSERT(!JS_IsException(str_ref.val));
ASSERT(JS_IsText(str_ref.val));
const char *s = JS_ToCString(ctx, str_ref.val);
ASSERT(s != NULL);
ASSERT(strstr(s, "\"key_0\"") != NULL);
ASSERT(strstr(s, "\"key_49\"") != NULL);
JS_FreeCString(ctx, s);
JS_PopGCRef(ctx, &str_ref);
JS_PopGCRef(ctx, &obj_ref);
return 1;
}
TEST(json_encode_nested) {
/* Nested objects stress test */
JSGCRef outer_ref, inner_ref, str_ref;
JS_PushGCRef(ctx, &outer_ref);
JS_PushGCRef(ctx, &inner_ref);
JS_PushGCRef(ctx, &str_ref);
outer_ref.val = JS_NewObject(ctx);
char key[32], val[64];
for (int i = 0; i < 20; i++) {
inner_ref.val = JS_NewObject(ctx);
for (int j = 0; j < 10; j++) {
snprintf(key, sizeof(key), "f%d", j);
snprintf(val, sizeof(val), "v_%d_%d", i, j);
JS_SetPropertyStr(ctx, inner_ref.val, key, JS_NewString(ctx, val));
}
snprintf(key, sizeof(key), "obj_%d", i);
JS_SetPropertyStr(ctx, outer_ref.val, key, inner_ref.val);
}
JSValue space = JS_NewInt32(ctx, 2);
str_ref.val = JS_JSONStringify(ctx, outer_ref.val, JS_NULL, space, 1);
ASSERT(!JS_IsException(str_ref.val));
ASSERT(JS_IsText(str_ref.val));
const char *s = JS_ToCString(ctx, str_ref.val);
ASSERT(s != NULL);
ASSERT(strstr(s, "\"obj_0\"") != NULL);
ASSERT(strstr(s, "\"f0\"") != NULL);
JS_FreeCString(ctx, s);
JS_PopGCRef(ctx, &str_ref);
JS_PopGCRef(ctx, &inner_ref);
JS_PopGCRef(ctx, &outer_ref);
return 1;
}
TEST(json_circular_reference) {
/* Circular reference should throw, not infinite recurse */
JSGCRef obj_ref;
JS_PushGCRef(ctx, &obj_ref);
obj_ref.val = JS_NewObject(ctx);
JS_SetPropertyStr(ctx, obj_ref.val, "name", JS_NewString(ctx, "root"));
/* Create circular reference: obj.self = obj */
JS_SetPropertyStr(ctx, obj_ref.val, "self", obj_ref.val);
JSValue str = JS_JSONStringify(ctx, obj_ref.val, JS_NULL, JS_NULL, 0);
JS_PopGCRef(ctx, &obj_ref);
/* Should be an exception (circular reference), not a crash */
ASSERT(JS_IsException(str));
return 1;
}
@@ -2196,6 +2305,9 @@ int run_c_test_suite(JSContext *ctx)
RUN_TEST(json_encode_object);
RUN_TEST(json_decode_object);
RUN_TEST(json_roundtrip_array);
RUN_TEST(json_encode_large_object);
RUN_TEST(json_encode_nested);
RUN_TEST(json_circular_reference);
printf("\nSerialization - NOTA:\n");
RUN_TEST(nota_encode_int);

View File

@@ -1,6 +1,10 @@
// streamline.ce — run the full compile + optimize pipeline, output JSON
// streamline.ce — run the full compile + optimize pipeline
//
// Usage: ./cell --core . streamline.ce <file.ce|file.cm>
// Usage:
// pit streamline <file> Full optimized IR as JSON (default)
// pit streamline --stats <file> Summary stats per function
// pit streamline --ir <file> Human-readable IR
// pit streamline --check <file> Warnings only (e.g. high slot count)
var fd = use("fd")
var json = use("json")
@@ -9,11 +13,159 @@ var parse = use("parse")
var fold = use("fold")
var mcode = use("mcode")
var streamline = use("streamline")
var filename = args[0]
var show_stats = false
var show_ir = false
var show_check = false
var filename = null
var i = 0
for (i = 0; i < length(args); i++) {
if (args[i] == '--stats') {
show_stats = true
} else if (args[i] == '--ir') {
show_ir = true
} else if (args[i] == '--check') {
show_check = true
} else if (!starts_with(args[i], '-')) {
filename = args[i]
}
}
if (!filename) {
print("usage: pit streamline [--stats] [--ir] [--check] <file>")
$stop()
}
var src = text(fd.slurp(filename))
var result = tokenize(src, filename)
var ast = parse(result.tokens, src, filename, tokenize)
var folded = fold(ast)
var compiled = mcode(folded)
// Deep copy for before snapshot (needed by --stats)
var before = null
if (show_stats) {
before = json.decode(json.encode(compiled))
}
var optimized = streamline(compiled)
print(json.encode(optimized, true))
// If no flags, default to full JSON output
if (!show_stats && !show_ir && !show_check) {
print(json.encode(optimized, true))
$stop()
}
// --- Helpers ---
var ir_stats = use("ir_stats")
var pad_right = function(s, w) {
var r = s
while (length(r) < w) {
r = r + " "
}
return r
}
var count_nops = function(func) {
var instrs = func.instructions
var nops = 0
var i = 0
if (instrs == null) return 0
while (i < length(instrs)) {
if (is_text(instrs[i]) && starts_with(instrs[i], "_nop_")) {
nops = nops + 1
}
i = i + 1
}
return nops
}
var print_func_stats = function(func, before_func, name) {
var nr_args = func.nr_args != null ? func.nr_args : 0
var nr_slots = func.nr_slots != null ? func.nr_slots : 0
var nr_close = func.nr_close_slots != null ? func.nr_close_slots : 0
var stats = ir_stats.detailed_stats(func)
var nops = count_nops(func)
var before_stats = before_func ? ir_stats.detailed_stats(before_func) : null
var before_total = before_stats ? before_stats.instr : stats.instr
print(` ${name}`)
print(` args=${text(nr_args)} slots=${text(nr_slots)} close_slots=${text(nr_close)}`)
print(` instructions: ${text(stats.instr)} total, ${text(nops)} nops eliminated`)
if (before_stats) {
print(` before: ${text(before_total)} after: ${text(stats.instr - nops)}`)
}
print(` load=${text(stats.load)} store=${text(stats.store)} branch=${text(stats.branch)} call=${text(stats.call)}`)
print(` guard=${text(stats.guard)} arith=${text(stats.arith)} move=${text(stats.move)} const=${text(stats.const)}`)
if (nr_slots > 200) {
print(` WARNING: nr_slots=${text(nr_slots)} approaching 255 limit`)
}
}
var print_func_ir = function(func, name) {
var ir_text = ir_stats.canonical_ir(func, name, {show_nops: true})
print(ir_text)
}
var check_func = function(func, name) {
var nr_slots = func.nr_slots != null ? func.nr_slots : 0
if (nr_slots > 200) {
print(`WARNING: ${name} has ${text(nr_slots)} slots (approaching 255 limit)`)
}
}
// --- Process functions ---
var main_name = optimized.name != null ? optimized.name : "<main>"
var fi = 0
var func = null
var bfunc = null
var fname = null
if (show_stats) {
print(`\n--- Stats for ${filename} ---`)
}
// Main function
if (optimized.main != null) {
if (show_stats) {
print_func_stats(optimized.main, before ? before.main : null, main_name)
}
if (show_ir) {
print_func_ir(optimized.main, main_name)
}
if (show_check) {
check_func(optimized.main, main_name)
}
}
// Sub-functions
if (optimized.functions != null) {
fi = 0
while (fi < length(optimized.functions)) {
func = optimized.functions[fi]
bfunc = before ? before.functions[fi] : null
fname = func.name != null ? func.name : `<func_${text(fi)}>`
if (show_stats) {
print_func_stats(func, bfunc, fname)
}
if (show_ir) {
print_func_ir(func, fname)
}
if (show_check) {
check_func(func, fname)
}
fi = fi + 1
}
}
if (show_stats) {
print('---')
}
$stop()

View File

@@ -5011,6 +5011,95 @@ run("nested function used after definition", function() {
assert_eq(result[1], 'b = "hi"', "nested fn encode text")
})
// ============================================================================
// JSON ENCODING
// ============================================================================
def json = use("json")
run("json encode flat object", function() {
var obj = {}
var i = 0
for (i = 0; i < 500; i++) {
obj[text(i)] = "value_" + text(i)
}
var result = json.encode(obj)
assert_eq(is_text(result), true, "encode returns text")
var decoded = json.decode(result)
assert_eq(decoded["0"], "value_0", "first property survives roundtrip")
assert_eq(decoded["499"], "value_499", "last property survives roundtrip")
})
run("json encode nested objects", function() {
var outer = {}
var i = 0
var j = 0
var inner = null
for (i = 0; i < 50; i++) {
inner = {}
for (j = 0; j < 20; j++) {
inner[text(j)] = i * 20 + j
}
outer[text(i)] = inner
}
var result = json.encode(outer)
var decoded = json.decode(result)
assert_eq(decoded["0"]["0"], 0, "nested first value")
assert_eq(decoded["49"]["19"], 999, "nested last value")
})
run("json encode array", function() {
var arr = [1, "two", true, null, 3.14]
var result = json.encode(arr)
var decoded = json.decode(result)
assert_eq(decoded[0], 1, "array number")
assert_eq(decoded[1], "two", "array text")
assert_eq(decoded[2], true, "array logical")
assert_eq(decoded[3], null, "array null")
assert_eq(decoded[4], 3.14, "array float")
})
run("json circular reference detected", function() {
var circ = {}
circ.name = "root"
circ.self = circ
if (!should_disrupt(function() { json.encode(circ) })) {
fail("circular reference not detected")
}
})
run("json deeply nested circular reference", function() {
var a = {}
var b = {}
var c = {}
a.child = b
b.child = c
c.child = a
if (!should_disrupt(function() { json.encode(a) })) {
fail("deep circular reference not detected")
}
})
run("json roundtrip preserves types", function() {
var obj = {
"num": 42,
"txt": "hello",
"yes": true,
"no": false,
"nil": null,
"arr": [1, 2, 3],
"sub": {"a": 1}
}
var decoded = json.decode(json.encode(obj))
assert_eq(decoded.num, 42, "number preserved")
assert_eq(decoded.txt, "hello", "text preserved")
assert_eq(decoded.yes, true, "true preserved")
assert_eq(decoded.no, false, "false preserved")
assert_eq(decoded.nil, null, "null preserved")
assert_eq(decoded.arr[2], 3, "array preserved")
assert_eq(decoded.sub.a, 1, "sub-object preserved")
})
// ============================================================================
// SUMMARY
// ============================================================================