tooling improvements

This commit is contained in:
2026-02-17 13:37:17 -06:00
parent 2e78e7e0b8
commit c02fbbd9e0
10 changed files with 371 additions and 186 deletions

View File

@@ -6,6 +6,7 @@
// cell build <locator> Build dynamic library for specific package
// cell build -t <target> Cross-compile dynamic libraries for target platform
// cell build -b <type> Build type: release (default), debug, or minsize
// cell build --verbose Print resolved flags, commands, and cache status
var build = use('build')
var shop = use('internal/shop')
@@ -15,6 +16,7 @@ var fd = use('fd')
var target = null
var target_package = null
var buildtype = 'release'
var verbose = false
var force_rebuild = false
var dry_run = false
var i = 0
@@ -55,6 +57,8 @@ for (i = 0; i < length(args); i++) {
}
} else if (args[i] == '--force') {
force_rebuild = true
} else if (args[i] == '--verbose' || args[i] == '-v') {
verbose = true
} else if (args[i] == '--dry-run') {
dry_run = true
} else if (args[i] == '--list-targets') {
@@ -104,7 +108,7 @@ if (target_package) {
// Build single package
log.console('Building ' + target_package + '...')
_build = function() {
lib = build.build_dynamic(target_package, target, buildtype)
lib = build.build_dynamic(target_package, target, buildtype, {verbose: verbose})
if (lib) {
log.console(`Built ${text(length(lib))} module(s)`)
}
@@ -116,7 +120,7 @@ if (target_package) {
} else {
// Build all packages
log.console('Building all packages...')
results = build.build_all_dynamic(target, buildtype)
results = build.build_all_dynamic(target, buildtype, {verbose: verbose})
success = 0
failed = 0

View File

@@ -172,6 +172,11 @@ Build.compile_file = function(pkg, file, target, opts) {
var cmd_str = text(cmd_parts, ' ')
if (_opts.verbose) {
print('[verbose] CFLAGS: ' + text(cflags, ' '))
print('[verbose] compile: ' + cmd_str)
}
// Content hash: command + file content
var file_content = fd.slurp(src_path)
var hash_input = cmd_str + '\n' + text(file_content)
@@ -183,8 +188,10 @@ Build.compile_file = function(pkg, file, target, opts) {
// Check if already compiled
if (fd.is_file(obj_path)) {
if (_opts.verbose) print('[verbose] cache hit: ' + file)
return obj_path
}
if (_opts.verbose) print('[verbose] cache miss: ' + file)
// Compile — capture stderr to detect missing-header vs real errors
var err_path = '/tmp/cell_build_err_' + hash + '.log'
@@ -308,6 +315,10 @@ Build.build_module_dylib = function(pkg, file, target, opts) {
var cmd_str = null
var ret = null
if (_opts.verbose) {
print('[verbose] LDFLAGS: ' + text(resolved_ldflags, ' '))
}
if (!fd.is_file(dylib_path)) {
cmd_parts = [cc, '-shared', '-fPIC']
@@ -340,6 +351,7 @@ Build.build_module_dylib = function(pkg, file, target, opts) {
push(cmd_parts, '"' + dylib_path + '"')
cmd_str = text(cmd_parts, ' ')
if (_opts.verbose) print('[verbose] link: ' + cmd_str)
log.console('Linking module ' + file + ' -> ' + fd.basename(dylib_path))
ret = os.system(cmd_str)
if (ret != 0) {
@@ -361,6 +373,7 @@ Build.build_module_dylib = function(pkg, file, target, opts) {
ensure_dir(install_dir)
var install_path = shop.get_lib_dir() + '/' + shop.lib_name_for_package(pkg) + '/' + file_stem + dylib_ext
fd.slurpwrite(install_path, fd.slurp(dylib_path))
if (_opts.verbose) print('[verbose] install: ' + install_path)
return dylib_path
}
@@ -368,9 +381,10 @@ Build.build_module_dylib = function(pkg, file, target, opts) {
// Build a dynamic library for a package (one dylib per C file)
// Returns array of {file, symbol, dylib} for each module
// Also writes a manifest mapping symbols to dylib paths
Build.build_dynamic = function(pkg, target, buildtype) {
Build.build_dynamic = function(pkg, target, buildtype, opts) {
var _target = target || Build.detect_host_target()
var _buildtype = buildtype || 'release'
var _opts = opts || {}
var c_files = pkg_tools.get_c_files(pkg, _target, true)
var results = []
@@ -382,13 +396,13 @@ Build.build_dynamic = function(pkg, target, buildtype) {
var sources = pkg_tools.get_sources(pkg)
var support_objects = []
arrfor(sources, function(src_file) {
var obj = Build.compile_file(pkg, src_file, _target, {buildtype: _buildtype, cflags: cached_cflags})
var obj = Build.compile_file(pkg, src_file, _target, {buildtype: _buildtype, cflags: cached_cflags, verbose: _opts.verbose})
push(support_objects, obj)
})
arrfor(c_files, function(file) {
var sym_name = shop.c_symbol_for_file(pkg, file)
var dylib = Build.build_module_dylib(pkg, file, _target, {buildtype: _buildtype, extra_objects: support_objects, cflags: cached_cflags})
var dylib = Build.build_module_dylib(pkg, file, _target, {buildtype: _buildtype, extra_objects: support_objects, cflags: cached_cflags, verbose: _opts.verbose})
if (dylib) {
push(results, {file: file, symbol: sym_name, dylib: dylib})
}
@@ -847,9 +861,10 @@ Build.generate_module_table = function(modules, output) {
// ============================================================================
// Build dynamic libraries for all installed packages
Build.build_all_dynamic = function(target, buildtype) {
Build.build_all_dynamic = function(target, buildtype, opts) {
var _target = target || Build.detect_host_target()
var _buildtype = buildtype || 'release'
var _opts = opts || {}
var packages = shop.list_packages()
var results = []
@@ -860,14 +875,14 @@ Build.build_all_dynamic = function(target, buildtype) {
// Build core first
if (find(packages, function(p) { return p == 'core' }) != null) {
core_mods = Build.build_dynamic('core', _target, _buildtype)
core_mods = Build.build_dynamic('core', _target, _buildtype, _opts)
push(results, {package: 'core', modules: core_mods})
}
// Build other packages
arrfor(packages, function(pkg) {
if (pkg == 'core') return
var pkg_mods = Build.build_dynamic(pkg, _target, _buildtype)
var pkg_mods = Build.build_dynamic(pkg, _target, _buildtype, _opts)
push(results, {package: pkg, modules: pkg_mods})
})

View File

@@ -65,6 +65,7 @@ pit build -b debug # build type: release (default), debug, minsiz
pit build --list-targets # list available targets
pit build --force # force rebuild
pit build --dry-run # show what would be built
pit build --verbose # print resolved flags, commands, cache status
```
### pit test
@@ -330,9 +331,14 @@ pit mcode <file.cm>
Apply the full optimization pipeline to a source file and output optimized mcode as JSON.
```bash
pit streamline <file.cm>
pit streamline <file.cm> # full optimized IR as JSON (default)
pit streamline --stats <file.cm> # summary stats per function
pit streamline --ir <file.cm> # human-readable IR
pit streamline --check <file.cm> # warnings only (e.g. high slot count)
```
Flags can be combined. `--stats` output includes function name, args, slots, instruction counts by category, and nops eliminated. `--check` warns when `nr_slots > 200` (approaching the 255 limit).
### pit qbe
Compile a source file to QBE intermediate language (for native code generation).
@@ -367,6 +373,17 @@ Ahead-of-time compile and execute a program natively.
pit run_aot <program.ce>
```
### pit seed
Regenerate the boot seed files in `boot/`. Seeds are pre-compiled mcode IR (JSON) that bootstrap the compilation pipeline on cold start. They only need regenerating when the pipeline source changes in a way the existing seeds can't compile, or before distribution.
```bash
pit seed # regenerate all boot seeds
pit seed --clean # also clear the build cache after
```
The engine recompiles pipeline modules automatically when source changes (via content-addressed cache). Seeds are a fallback for cold start when no cache exists.
### Analysis
### pit explain

View File

@@ -81,9 +81,37 @@ Shows the optimized IR with type annotations. Each instruction is followed by th
Runs the full pipeline (tokenize, parse, fold, mcode, streamline) and outputs the optimized IR as JSON. Useful for piping to `jq` or saving for comparison.
```bash
./cell --core . streamline.ce <file.ce|file.cm>
./cell --core . streamline.ce <file.ce|file.cm> # full JSON (default)
./cell --core . streamline.ce --stats <file.ce|file.cm> # summary stats per function
./cell --core . streamline.ce --ir <file.ce|file.cm> # human-readable IR
./cell --core . streamline.ce --check <file.ce|file.cm> # warnings only
```
| Flag | Description |
|------|-------------|
| (none) | Full optimized IR as JSON (backward compatible) |
| `--stats` | Per-function summary: args, slots, instruction counts by category, nops eliminated |
| `--ir` | Human-readable canonical IR (same format as `ir_report.ce`) |
| `--check` | Warnings only (e.g. `nr_slots > 200` approaching 255 limit) |
Flags can be combined.
## seed.ce
Regenerates the boot seed files in `boot/`. These are pre-compiled mcode IR (JSON) files that bootstrap the compilation pipeline on cold start.
```bash
./cell --core . seed.ce # regenerate all boot seeds
./cell --core . seed.ce --clean # also clear the build cache after
```
The script compiles each pipeline module (tokenize, parse, fold, mcode, streamline) and `internal/bootstrap.cm` through the current pipeline, encodes the output as JSON, and writes it to `boot/<name>.cm.mcode`.
**When to regenerate seeds:**
- Before a release or distribution
- When the pipeline source changes in a way the existing seeds can't compile the new source (e.g. language-level changes)
- Seeds do NOT need regenerating for normal development — the engine recompiles pipeline modules from source automatically via the content-addressed cache
## ir_report.ce
The optimizer flight recorder. Runs the full pipeline with structured logging and outputs machine-readable, diff-friendly JSON. This is the most detailed tool for understanding what the optimizer did and why.

View File

@@ -9,11 +9,11 @@ Packages are the fundamental unit of code organization and sharing in ƿit.
## Package Structure
A package is a directory containing a `pit.toml` manifest:
A package is a directory containing a `cell.toml` manifest:
```
mypackage/
├── pit.toml # package manifest
├── cell.toml # package manifest
├── main.ce # entry point (optional)
├── utils.cm # module
├── helper/
@@ -23,7 +23,7 @@ mypackage/
└── helpers.cm # private module (internal/ only)
```
## pit.toml
## cell.toml
The package manifest declares metadata and dependencies:
@@ -47,7 +47,7 @@ mylib = "/Users/john/work/mylib"
When importing with `use()`, ƿit searches in order:
1. **Local package** — relative to package root
2. **Dependencies** — via aliases in `pit.toml`
2. **Dependencies** — via aliases in `cell.toml`
3. **Core** — built-in ƿit modules
```javascript
@@ -179,7 +179,7 @@ C files in a package are compiled into per-file dynamic libraries:
```
mypackage/
├── pit.toml
├── cell.toml
├── render.c # compiled to lib/mypackage/render.dylib
└── physics.c # compiled to lib/mypackage/physics.dylib
```

View File

@@ -48,7 +48,7 @@ When `use('path')` is called from a package context, the shop resolves the modul
For a call like `use('sprite')` from package `myapp`:
1. **Own package**`~/.pit/packages/myapp/sprite.cm` and C symbol `js_myapp_sprite_use`
2. **Aliased dependencies** — if `myapp/pit.toml` has `renderer = "gitea.pockle.world/john/renderer"`, checks `renderer/sprite.cm` and its C symbols
2. **Aliased dependencies** — if `myapp/cell.toml` has `renderer = "gitea.pockle.world/john/renderer"`, checks `renderer/sprite.cm` and its C symbols
3. **Core** — built-in core modules and internal C symbols
For calls without a package context (from core modules), only core is searched.

View File

@@ -93,6 +93,25 @@ String constants are interned in a data section. Integer constants are encoded i
pit --emit-qbe script.ce > output.ssa
```
## Boot Seeds
The `boot/` directory contains pre-compiled mcode IR (JSON) seed files for the pipeline modules:
```
boot/tokenize.cm.mcode
boot/parse.cm.mcode
boot/fold.cm.mcode
boot/mcode.cm.mcode
boot/streamline.cm.mcode
boot/bootstrap.cm.mcode
```
Seeds are used during cold start (empty cache) to compile the pipeline modules from source. The engine's `load_pipeline_module()` hashes the **source file** content — if the source changes, the hash changes, the cache misses, and the module is recompiled from source using the boot seeds. This means:
- Editing a pipeline module (e.g. `tokenize.cm`) takes effect on the next run automatically
- Seeds only need regenerating if the pipeline changes in a way the existing seeds can't compile the new source, or before distribution
- Use `pit seed` to regenerate all seeds, and `pit seed --clean` to also clear the build cache
## Files
| File | Role |

View File

@@ -1,166 +0,0 @@
// dump_stream.cm — show mcode IR before and after streamlining
//
// Usage: ./cell --core . dump_stream.cm <file.ce|file.cm>
var fd = use("fd")
var json = use("json")
var tokenize = use("tokenize")
var parse = use("parse")
var fold = use("fold")
var mcode = use("mcode")
var streamline = use("streamline")
if (length(args) < 1) {
print("usage: cell --core . dump_stream.cm <file>")
return
}
var filename = args[0]
var src = text(fd.slurp(filename))
var tok = tokenize(src, filename)
var ast = parse(tok.tokens, src, filename, tokenize)
var folded = fold(ast)
var compiled = mcode(folded)
// Deep copy IR for before snapshot
var before = json.decode(json.encode(compiled))
var optimized = streamline(compiled)
var pad_right = function(s, w) {
var r = s
while (length(r) < w) {
r = r + " "
}
return r
}
var fmt_val = function(v) {
if (is_null(v)) {
return "null"
}
if (is_number(v)) {
return text(v)
}
if (is_text(v)) {
return `"${v}"`
}
if (is_object(v)) {
return json.encode(v)
}
if (is_logical(v)) {
return v ? "true" : "false"
}
return text(v)
}
var count_stats = function(func) {
var instrs = func.instructions
var total = 0
var nops = 0
var calls = 0
var i = 0
var instr = null
if (instrs == null) {
return {total: 0, nops: 0, real: 0, calls: 0}
}
while (i < length(instrs)) {
instr = instrs[i]
if (is_text(instr)) {
if (starts_with(instr, "_nop_")) {
nops = nops + 1
}
} else if (is_array(instr)) {
total = total + 1
if (instr[0] == "invoke") {
calls = calls + 1
}
}
i = i + 1
}
return {total: total, nops: nops, real: total - nops, calls: calls}
}
var dump_function = function(func, show_nops) {
var instrs = func.instructions
var i = 0
var pc = 0
var instr = null
var op = null
var n = 0
var parts = null
var j = 0
var operands = null
var pc_str = null
var op_str = null
if (instrs == null || length(instrs) == 0) {
return null
}
while (i < length(instrs)) {
instr = instrs[i]
if (is_text(instr)) {
if (starts_with(instr, "_nop_")) {
if (show_nops) {
print(` ${pad_right(text(pc), 5)} --- nop ---`)
pc = pc + 1
}
} else {
print(`${instr}:`)
}
} else if (is_array(instr)) {
op = instr[0]
n = length(instr)
parts = []
j = 1
while (j < n - 2) {
push(parts, fmt_val(instr[j]))
j = j + 1
}
operands = text(parts, ", ")
pc_str = pad_right(text(pc), 5)
op_str = pad_right(op, 14)
print(` ${pc_str} ${op_str} ${operands}`)
pc = pc + 1
}
i = i + 1
}
return null
}
var dump_pair = function(before_func, after_func, name) {
var nr_args = after_func.nr_args != null ? after_func.nr_args : 0
var nr_slots = after_func.nr_slots != null ? after_func.nr_slots : 0
var b_stats = count_stats(before_func)
var a_stats = count_stats(after_func)
var eliminated = a_stats.nops
print(`\n=== ${name} (args=${text(nr_args)}, slots=${text(nr_slots)}) ===`)
print(` before: ${text(b_stats.total)} instructions, ${text(b_stats.calls)} invokes`)
print(` after: ${text(a_stats.real)} instructions (${text(eliminated)} eliminated), ${text(a_stats.calls)} invokes`)
print("\n -- streamlined --")
dump_function(after_func, false)
return null
}
var main_name = null
var fi = 0
var func = null
var bfunc = null
var fname = null
// Dump main
if (optimized.main != null && before.main != null) {
main_name = optimized.name != null ? optimized.name : "<main>"
dump_pair(before.main, optimized.main, main_name)
}
// Dump sub-functions
if (optimized.functions != null && before.functions != null) {
fi = 0
while (fi < length(optimized.functions)) {
func = optimized.functions[fi]
bfunc = before.functions[fi]
fname = func.name != null ? func.name : `<func_${text(fi)}>`
dump_pair(bfunc, func, `[${text(fi)}] ${fname}`)
fi = fi + 1
}
}

116
seed.ce Normal file
View File

@@ -0,0 +1,116 @@
// seed.ce — regenerate boot seed files in boot/
//
// Usage:
// pit seed Regenerate all boot seeds
// pit seed --clean Also clear the build cache after
//
// Seeds are the pre-compiled mcode IR (JSON) files that bootstrap the
// compilation pipeline on cold start. They only need regenerating when:
// - The pipeline source (tokenize, parse, fold, mcode, streamline) changes
// in a way that the boot seeds can't compile the new source
// - You want the seed files to match the current pipeline output
// (e.g. before a release or distribution)
//
// The engine already recompiles pipeline modules from source when their
// content changes (content-addressed cache). Seeds are a fallback for
// cold start when no cache exists.
var fd = use("fd")
var json = use("json")
var os = use("os")
var shop = use("internal/shop")
var tokenize = use("tokenize")
var parse = use("parse")
var fold = use("fold")
var mcode = use("mcode")
var streamline = use("streamline")
var clean = false
var i = 0
for (i = 0; i < length(args); i++) {
if (args[i] == '--clean') {
clean = true
} else if (args[i] == '--help' || args[i] == '-h') {
print("usage: pit seed [--clean]")
print("")
print(" Regenerate boot seed files in boot/")
print(" --clean Also clear the build cache after")
$stop()
}
}
var core_dir = shop.get_package_dir('core')
var boot_dir = core_dir + '/boot'
var pipeline_modules = ['tokenize', 'parse', 'fold', 'mcode', 'streamline']
var generated = 0
var name = null
var src_path = null
var src = null
var tok = null
var ast = null
var folded = null
var compiled = null
var optimized = null
var mcode_json = null
var out_path = null
// Regenerate pipeline module seeds
for (i = 0; i < length(pipeline_modules); i++) {
name = pipeline_modules[i]
src_path = core_dir + '/' + name + '.cm'
if (!fd.is_file(src_path)) {
print('WARNING: source not found: ' + src_path)
continue
}
print('Seeding ' + name + '.cm ...')
src = text(fd.slurp(src_path))
tok = tokenize(src, src_path)
ast = parse(tok.tokens, src, src_path, tokenize)
folded = fold(ast)
compiled = mcode(folded)
optimized = streamline(compiled)
mcode_json = json.encode(optimized)
out_path = boot_dir + '/' + name + '.cm.mcode'
fd.slurpwrite(out_path, stone(blob(mcode_json)))
print(' -> ' + out_path + ' (' + text(length(mcode_json)) + ' bytes)')
generated = generated + 1
}
// Regenerate bootstrap.cm seed
var bootstrap_path = core_dir + '/internal/bootstrap.cm'
if (fd.is_file(bootstrap_path)) {
print('Seeding bootstrap.cm ...')
src = text(fd.slurp(bootstrap_path))
tok = tokenize(src, bootstrap_path)
ast = parse(tok.tokens, src, bootstrap_path, tokenize)
folded = fold(ast)
compiled = mcode(folded)
optimized = streamline(compiled)
mcode_json = json.encode(optimized)
out_path = boot_dir + '/bootstrap.cm.mcode'
fd.slurpwrite(out_path, stone(blob(mcode_json)))
print(' -> ' + out_path + ' (' + text(length(mcode_json)) + ' bytes)')
generated = generated + 1
} else {
print('WARNING: bootstrap source not found: ' + bootstrap_path)
}
print('\nRegenerated ' + text(generated) + ' seed(s)')
if (clean) {
var build_dir = shop.get_build_dir()
if (fd.is_dir(build_dir)) {
print('Clearing build cache: ' + build_dir)
os.system('rm -rf "' + build_dir + '"')
print('Build cache cleared. Next run will recompile from new seeds.')
} else {
print('No build cache to clear.')
}
}
$stop()

View File

@@ -1,6 +1,10 @@
// streamline.ce — run the full compile + optimize pipeline, output JSON
// streamline.ce — run the full compile + optimize pipeline
//
// Usage: ./cell --core . streamline.ce <file.ce|file.cm>
// Usage:
// pit streamline <file> Full optimized IR as JSON (default)
// pit streamline --stats <file> Summary stats per function
// pit streamline --ir <file> Human-readable IR
// pit streamline --check <file> Warnings only (e.g. high slot count)
var fd = use("fd")
var json = use("json")
@@ -9,11 +13,159 @@ var parse = use("parse")
var fold = use("fold")
var mcode = use("mcode")
var streamline = use("streamline")
var filename = args[0]
var show_stats = false
var show_ir = false
var show_check = false
var filename = null
var i = 0
for (i = 0; i < length(args); i++) {
if (args[i] == '--stats') {
show_stats = true
} else if (args[i] == '--ir') {
show_ir = true
} else if (args[i] == '--check') {
show_check = true
} else if (!starts_with(args[i], '-')) {
filename = args[i]
}
}
if (!filename) {
print("usage: pit streamline [--stats] [--ir] [--check] <file>")
$stop()
}
var src = text(fd.slurp(filename))
var result = tokenize(src, filename)
var ast = parse(result.tokens, src, filename, tokenize)
var folded = fold(ast)
var compiled = mcode(folded)
// Deep copy for before snapshot (needed by --stats)
var before = null
if (show_stats) {
before = json.decode(json.encode(compiled))
}
var optimized = streamline(compiled)
print(json.encode(optimized, true))
// If no flags, default to full JSON output
if (!show_stats && !show_ir && !show_check) {
print(json.encode(optimized, true))
$stop()
}
// --- Helpers ---
var ir_stats = use("ir_stats")
var pad_right = function(s, w) {
var r = s
while (length(r) < w) {
r = r + " "
}
return r
}
var count_nops = function(func) {
var instrs = func.instructions
var nops = 0
var i = 0
if (instrs == null) return 0
while (i < length(instrs)) {
if (is_text(instrs[i]) && starts_with(instrs[i], "_nop_")) {
nops = nops + 1
}
i = i + 1
}
return nops
}
var print_func_stats = function(func, before_func, name) {
var nr_args = func.nr_args != null ? func.nr_args : 0
var nr_slots = func.nr_slots != null ? func.nr_slots : 0
var nr_close = func.nr_close_slots != null ? func.nr_close_slots : 0
var stats = ir_stats.detailed_stats(func)
var nops = count_nops(func)
var before_stats = before_func ? ir_stats.detailed_stats(before_func) : null
var before_total = before_stats ? before_stats.instr : stats.instr
print(` ${name}`)
print(` args=${text(nr_args)} slots=${text(nr_slots)} close_slots=${text(nr_close)}`)
print(` instructions: ${text(stats.instr)} total, ${text(nops)} nops eliminated`)
if (before_stats) {
print(` before: ${text(before_total)} after: ${text(stats.instr - nops)}`)
}
print(` load=${text(stats.load)} store=${text(stats.store)} branch=${text(stats.branch)} call=${text(stats.call)}`)
print(` guard=${text(stats.guard)} arith=${text(stats.arith)} move=${text(stats.move)} const=${text(stats.const)}`)
if (nr_slots > 200) {
print(` WARNING: nr_slots=${text(nr_slots)} approaching 255 limit`)
}
}
var print_func_ir = function(func, name) {
var ir_text = ir_stats.canonical_ir(func, name, {show_nops: true})
print(ir_text)
}
var check_func = function(func, name) {
var nr_slots = func.nr_slots != null ? func.nr_slots : 0
if (nr_slots > 200) {
print(`WARNING: ${name} has ${text(nr_slots)} slots (approaching 255 limit)`)
}
}
// --- Process functions ---
var main_name = optimized.name != null ? optimized.name : "<main>"
var fi = 0
var func = null
var bfunc = null
var fname = null
if (show_stats) {
print(`\n--- Stats for ${filename} ---`)
}
// Main function
if (optimized.main != null) {
if (show_stats) {
print_func_stats(optimized.main, before ? before.main : null, main_name)
}
if (show_ir) {
print_func_ir(optimized.main, main_name)
}
if (show_check) {
check_func(optimized.main, main_name)
}
}
// Sub-functions
if (optimized.functions != null) {
fi = 0
while (fi < length(optimized.functions)) {
func = optimized.functions[fi]
bfunc = before ? before.functions[fi] : null
fname = func.name != null ? func.name : `<func_${text(fi)}>`
if (show_stats) {
print_func_stats(func, bfunc, fname)
}
if (show_ir) {
print_func_ir(func, fname)
}
if (show_check) {
check_func(func, fname)
}
fi = fi + 1
}
}
if (show_stats) {
print('---')
}
$stop()