109 lines
3.0 KiB
Plaintext
109 lines
3.0 KiB
Plaintext
// Minimal bootstrap — seeds the content-addressed cache
|
|
// Only runs on cold start (C runtime couldn't find engine in cache)
|
|
// Hidden vars: os, core_path, shop_path
|
|
var load_internal = os.load_internal
|
|
function use_embed(name) {
|
|
return load_internal("js_core_" + name + "_use")
|
|
}
|
|
|
|
var fd = use_embed('internal_fd')
|
|
var json_mod = use_embed('json')
|
|
var crypto = use_embed('crypto')
|
|
|
|
function content_hash(content) {
|
|
var data = content
|
|
if (!is_blob(data)) data = stone(blob(text(data)))
|
|
return text(crypto.blake2(data), 'h')
|
|
}
|
|
|
|
function cache_path(hash) {
|
|
if (!shop_path) return null
|
|
return shop_path + '/build/' + hash
|
|
}
|
|
|
|
function ensure_build_dir() {
|
|
if (!shop_path) return null
|
|
var dir = shop_path + '/build'
|
|
if (!fd.is_dir(dir)) fd.mkdir(dir)
|
|
return dir
|
|
}
|
|
|
|
// Load seed pipeline from boot/
|
|
function boot_load(name) {
|
|
var mcode_path = core_path + '/boot/' + name + '.cm.mcode'
|
|
var mcode_blob = null
|
|
var mach_blob = null
|
|
if (!fd.is_file(mcode_path)) {
|
|
print("error: missing seed: " + name + "\n")
|
|
disrupt
|
|
}
|
|
mcode_blob = fd.slurp(mcode_path)
|
|
mach_blob = mach_compile_mcode_bin(name, text(mcode_blob))
|
|
return mach_load(mach_blob, stone({use: use_embed}))
|
|
}
|
|
|
|
var tokenize_mod = boot_load("tokenize")
|
|
var parse_mod = boot_load("parse")
|
|
var fold_mod = boot_load("fold")
|
|
var mcode_mod = boot_load("mcode")
|
|
var streamline_mod = boot_load("streamline")
|
|
|
|
function analyze(src, filename) {
|
|
var tok_result = tokenize_mod(src, filename)
|
|
var ast = parse_mod(tok_result.tokens, src, filename, tokenize_mod)
|
|
var _i = 0
|
|
var e = null
|
|
var msg = null
|
|
var has_errors = ast.errors != null && length(ast.errors) > 0
|
|
if (has_errors) {
|
|
while (_i < length(ast.errors)) {
|
|
e = ast.errors[_i]
|
|
msg = e.message
|
|
if (e.line != null && e.column != null)
|
|
print(`${filename}:${text(e.line)}:${text(e.column)}: error: ${msg}`)
|
|
else
|
|
print(`${filename}: error: ${msg}`)
|
|
_i = _i + 1
|
|
}
|
|
disrupt
|
|
}
|
|
return fold_mod(ast)
|
|
}
|
|
|
|
function compile_and_cache(name, source_path) {
|
|
var source_blob = fd.slurp(source_path)
|
|
var hash = content_hash(source_blob)
|
|
var cached = cache_path(hash)
|
|
var ast = null
|
|
var compiled = null
|
|
var mcode_json = null
|
|
var mach_blob = null
|
|
if (cached && fd.is_file(cached)) return
|
|
ast = analyze(text(source_blob), source_path)
|
|
compiled = streamline_mod(mcode_mod(ast))
|
|
mcode_json = json_mod.encode(compiled)
|
|
mach_blob = mach_compile_mcode_bin(name, mcode_json)
|
|
if (cached) {
|
|
ensure_build_dir()
|
|
fd.slurpwrite(cached, mach_blob)
|
|
}
|
|
}
|
|
|
|
// Seed the cache with everything engine needs
|
|
var seed_files = [
|
|
{name: "tokenize", path: "tokenize.cm"},
|
|
{name: "parse", path: "parse.cm"},
|
|
{name: "fold", path: "fold.cm"},
|
|
{name: "mcode", path: "mcode.cm"},
|
|
{name: "streamline", path: "streamline.cm"},
|
|
{name: "engine", path: "internal/engine.cm"}
|
|
]
|
|
var _i = 0
|
|
var entry = null
|
|
while (_i < length(seed_files)) {
|
|
entry = seed_files[_i]
|
|
compile_and_cache(entry.name, core_path + '/' + entry.path)
|
|
_i = _i + 1
|
|
}
|
|
print("bootstrap: cache seeded\n")
|