// regen.cm — regenerate .cm.mcode files // Run with: ./cell --core . regen.cm var fd = use("fd") var json = use("json") var tokenize = use("tokenize") var parse = use("parse") var fold = use("fold") var mcode = use("mcode") var streamline = use("streamline") var files = [ {src: "tokenize.cm", name: "tokenize", out: "tokenize.cm.mcode"}, {src: "parse.cm", name: "parse", out: "parse.cm.mcode"}, {src: "fold.cm", name: "fold", out: "fold.cm.mcode"}, {src: "mcode.cm", name: "mcode", out: "mcode.cm.mcode"}, {src: "streamline.cm", name: "streamline", out: "streamline.cm.mcode"}, {src: "qbe.cm", name: "qbe", out: "qbe.cm.mcode"}, {src: "qbe_emit.cm", name: "qbe_emit", out: "qbe_emit.cm.mcode"}, {src: "internal/bootstrap.cm", name: "bootstrap", out: "internal/bootstrap.cm.mcode"}, {src: "internal/engine.cm", name: "engine", out: "internal/engine.cm.mcode"} ] var i = 0 var entry = null var src = null var tok_result = null var ast = null var folded = null var compiled = null var optimized = null var mcode_text = null var f = null var errs = null var ei = 0 var e = null var had_errors = false while (i < length(files)) { entry = files[i] src = text(fd.slurp(entry.src)) tok_result = tokenize(src, entry.src) ast = parse(tok_result.tokens, src, entry.src, tokenize) // Check for parse/semantic errors errs = ast.errors if (errs != null && length(errs) > 0) { ei = 0 while (ei < length(errs)) { e = errs[ei] if (e.line != null) { print(`${entry.src}:${text(e.line)}:${text(e.column)}: error: ${e.message}`) } else { print(`${entry.src}: error: ${e.message}`) } ei = ei + 1 } had_errors = true i = i + 1 continue } folded = fold(ast) compiled = mcode(folded) optimized = streamline(compiled) mcode_text = json.encode(optimized, null, 2) f = fd.open(entry.out, "w") fd.write(f, mcode_text) fd.close(f) print(`wrote ${entry.out}`) i = i + 1 } if (had_errors) { print("regen aborted: fix errors above") }