189 lines
4.9 KiB
Plaintext
189 lines
4.9 KiB
Plaintext
// regen.ce — regenerate .mcode bytecode files and pre-warm .mach cache
|
|
|
|
var fd = use("fd")
|
|
var json = use("json")
|
|
var crypto = use("crypto")
|
|
var tokenize = use("tokenize")
|
|
var parse = use("parse")
|
|
var fold = use("fold")
|
|
var mcode = use("mcode")
|
|
var streamline = use("streamline")
|
|
|
|
var files = [
|
|
{src: "tokenize.cm", name: "tokenize", out: "boot/tokenize.cm.mcode"},
|
|
{src: "parse.cm", name: "parse", out: "boot/parse.cm.mcode"},
|
|
{src: "fold.cm", name: "fold", out: "boot/fold.cm.mcode"},
|
|
{src: "mcode.cm", name: "mcode", out: "boot/mcode.cm.mcode"},
|
|
{src: "streamline.cm", name: "streamline", out: "boot/streamline.cm.mcode"},
|
|
{src: "qbe.cm", name: "qbe", out: "boot/qbe.cm.mcode"},
|
|
{src: "qbe_emit.cm", name: "qbe_emit", out: "boot/qbe_emit.cm.mcode"},
|
|
{src: "verify_ir.cm", name: "verify_ir", out: "boot/verify_ir.cm.mcode"},
|
|
{src: "internal/bootstrap.cm", name: "bootstrap", out: "boot/bootstrap.cm.mcode"},
|
|
{src: "internal/engine.cm", name: "engine", out: "boot/engine.cm.mcode"},
|
|
{src: "boot/seed_bootstrap.cm", name: "seed_bootstrap", out: "boot/seed_bootstrap.cm.mcode"}
|
|
]
|
|
|
|
// Resolve shop_path for cache writes
|
|
var os = use('os')
|
|
var shop = os.getenv('CELL_SHOP')
|
|
var home = null
|
|
var cache_dir = null
|
|
if (!shop) {
|
|
home = os.getenv('HOME')
|
|
if (home) {
|
|
shop = home + '/.cell'
|
|
}
|
|
}
|
|
if (shop) {
|
|
cache_dir = shop + '/build'
|
|
if (!fd.is_dir(cache_dir)) {
|
|
fd.mkdir(cache_dir)
|
|
}
|
|
}
|
|
|
|
var i = 0
|
|
var entry = null
|
|
var src = null
|
|
var tok_result = null
|
|
var ast = null
|
|
var folded = null
|
|
var mcode_blob = null
|
|
var hash = null
|
|
var compact_mcode = null
|
|
var mach_blob = null
|
|
var compiled = null
|
|
var optimized = null
|
|
var mcode_text = null
|
|
var f = null
|
|
var errs = null
|
|
var ei = 0
|
|
var e = null
|
|
var had_errors = false
|
|
|
|
// Collapse leaf arrays (instruction arrays) onto single lines
|
|
var compact_arrays = function(json_text) {
|
|
var lines = array(json_text, "\n")
|
|
var result = []
|
|
var i = 0
|
|
var line = null
|
|
var trimmed = null
|
|
var collecting = false
|
|
var collected = null
|
|
var indent = null
|
|
var is_leaf = null
|
|
var j = 0
|
|
var inner = null
|
|
var parts = null
|
|
var trailing = null
|
|
var chars = null
|
|
var k = 0
|
|
|
|
while (i < length(lines)) {
|
|
line = lines[i]
|
|
trimmed = trim(line)
|
|
if (collecting == false && trimmed == "[") {
|
|
collecting = true
|
|
chars = array(line)
|
|
k = 0
|
|
while (k < length(chars) && chars[k] == " ") {
|
|
k = k + 1
|
|
}
|
|
indent = text(line, 0, k)
|
|
collected = []
|
|
i = i + 1
|
|
continue
|
|
}
|
|
if (collecting) {
|
|
if (trimmed == "]" || trimmed == "],") {
|
|
is_leaf = true
|
|
j = 0
|
|
while (j < length(collected)) {
|
|
inner = trim(collected[j])
|
|
if (starts_with(inner, "[") || starts_with(inner, "{")) {
|
|
is_leaf = false
|
|
}
|
|
j = j + 1
|
|
}
|
|
if (is_leaf && length(collected) > 0) {
|
|
parts = []
|
|
j = 0
|
|
while (j < length(collected)) {
|
|
inner = trim(collected[j])
|
|
if (ends_with(inner, ",")) {
|
|
inner = text(inner, 0, length(inner) - 1)
|
|
}
|
|
parts[] = inner
|
|
j = j + 1
|
|
}
|
|
trailing = ""
|
|
if (ends_with(trimmed, ",")) {
|
|
trailing = ","
|
|
}
|
|
result[] = `${indent}[${text(parts, ", ")}]${trailing}`
|
|
} else {
|
|
result[] = `${indent}[`
|
|
j = 0
|
|
while (j < length(collected)) {
|
|
result[] = collected[j]
|
|
j = j + 1
|
|
}
|
|
result[] = line
|
|
}
|
|
collecting = false
|
|
} else {
|
|
collected[] = line
|
|
}
|
|
i = i + 1
|
|
continue
|
|
}
|
|
result[] = line
|
|
i = i + 1
|
|
}
|
|
return text(result, "\n")
|
|
}
|
|
|
|
while (i < length(files)) {
|
|
entry = files[i]
|
|
src = text(fd.slurp(entry.src))
|
|
tok_result = tokenize(src, entry.src)
|
|
ast = parse(tok_result.tokens, src, entry.src, tokenize)
|
|
// Check for parse/semantic errors
|
|
errs = ast.errors
|
|
if (errs != null && length(errs) > 0) {
|
|
ei = 0
|
|
while (ei < length(errs)) {
|
|
e = errs[ei]
|
|
if (e.line != null) {
|
|
print(`${entry.src}:${text(e.line)}:${text(e.column)}: error: ${e.message}`)
|
|
} else {
|
|
print(`${entry.src}: error: ${e.message}`)
|
|
}
|
|
ei = ei + 1
|
|
}
|
|
had_errors = true
|
|
i = i + 1
|
|
continue
|
|
}
|
|
folded = fold(ast)
|
|
compiled = mcode(folded)
|
|
optimized = streamline(compiled)
|
|
mcode_text = compact_arrays(json.encode(optimized, null, 2))
|
|
f = fd.open(entry.out, "w")
|
|
fd.write(f, mcode_text)
|
|
fd.close(f)
|
|
print(`wrote ${entry.out}`)
|
|
// Pre-warm .mach cache
|
|
if (cache_dir) {
|
|
mcode_blob = stone(blob(mcode_text))
|
|
hash = text(crypto.blake2(mcode_blob), 'h')
|
|
compact_mcode = json.encode(optimized)
|
|
mach_blob = mach_compile_mcode_bin(entry.name, compact_mcode)
|
|
fd.slurpwrite(cache_dir + '/' + hash, mach_blob)
|
|
print(` cached ${hash}`)
|
|
}
|
|
i = i + 1
|
|
}
|
|
if (had_errors) {
|
|
print("regen aborted: fix errors above")
|
|
}
|