better sem analysis

This commit is contained in:
2026-02-18 10:34:47 -06:00
parent b16fa75706
commit 8be5936c10
16 changed files with 664 additions and 572 deletions

View File

@@ -510,6 +510,139 @@ function inject_env(inject) {
return env
}
// --- Pipeline API ---
// Lazy-loaded pipeline modules from use_cache (no re-entrancy risk).
var _tokenize_mod = null
var _parse_mod = null
var _fold_mod = null
var _index_mod = null
var _token_cache = {}
var _ast_cache = {}
var _analyze_cache = {}
var _index_cache = {}
var get_tokenize = function() {
if (!_tokenize_mod) _tokenize_mod = use_cache['core/tokenize'] || use_cache['tokenize']
return _tokenize_mod
}
var get_parse = function() {
if (!_parse_mod) _parse_mod = use_cache['core/parse'] || use_cache['parse']
return _parse_mod
}
var get_fold = function() {
if (!_fold_mod) _fold_mod = use_cache['core/fold'] || use_cache['fold']
return _fold_mod
}
var get_index = function() {
if (!_index_mod) {
_index_mod = use_cache['core/index'] || use_cache['index']
if (!_index_mod) _index_mod = Shop.use('index', 'core')
}
return _index_mod
}
Shop.tokenize_file = function(path) {
var src = text(fd.slurp(path))
var key = content_hash(stone(blob(src)))
if (_token_cache[key]) return _token_cache[key]
var result = get_tokenize()(src, path)
_token_cache[key] = result
return result
}
Shop.parse_file = function(path) {
var src = text(fd.slurp(path))
var key = content_hash(stone(blob(src)))
if (_ast_cache[key]) return _ast_cache[key]
var tok = Shop.tokenize_file(path)
var ast = get_parse()(tok.tokens, src, path, get_tokenize())
_ast_cache[key] = ast
return ast
}
Shop.analyze_file = function(path) {
var src = text(fd.slurp(path))
var key = content_hash(stone(blob(src)))
if (_analyze_cache[key]) return _analyze_cache[key]
var ast = Shop.parse_file(path)
var folded = get_fold()(ast)
_analyze_cache[key] = folded
return folded
}
// Resolve import paths on an index in-place.
Shop.resolve_imports = function(idx_obj, fname) {
var fi = Shop.file_info(fd.realpath(fname))
var ctx = fi.package
var ri = 0
var rp = null
var lp = null
while (ri < length(idx_obj.imports)) {
rp = Shop.resolve_use_path(idx_obj.imports[ri].module_path, ctx)
if (rp == null) {
lp = fd.dirname(fd.realpath(fname)) + '/' + idx_obj.imports[ri].module_path + '.cm'
if (fd.is_file(lp)) {
rp = lp
}
}
if (rp != null) {
idx_obj.imports[ri].resolved_path = rp
}
ri = ri + 1
}
}
Shop.index_file = function(path) {
var src = text(fd.slurp(path))
var key = content_hash(stone(blob(src)))
if (_index_cache[key]) return _index_cache[key]
var tok = Shop.tokenize_file(path)
var pipeline = {tokenize: get_tokenize(), parse: get_parse(), fold: get_fold()}
var idx = get_index().index_file(src, path, pipeline)
Shop.resolve_imports(idx, path)
_index_cache[key] = idx
return idx
}
Shop.pipeline = function() {
return {
tokenize: get_tokenize(),
parse: get_parse(),
fold: get_fold(),
mcode: use_cache['core/mcode'] || use_cache['mcode'],
streamline: use_cache['core/streamline'] || use_cache['streamline']
}
}
Shop.all_script_paths = function() {
var packages = Shop.list_packages()
var result = []
var i = 0
var j = 0
var scripts = null
var pkg_dir = null
var has_core = false
for (i = 0; i < length(packages); i++) {
if (packages[i] == 'core') has_core = true
}
if (!has_core) {
packages = array(packages, ['core'])
}
for (i = 0; i < length(packages); i++) {
pkg_dir = starts_with(packages[i], '/') ? packages[i] : get_packages_dir() + '/' + safe_package_path(packages[i])
scripts = get_package_scripts(packages[i])
for (j = 0; j < length(scripts); j++) {
result[] = {
package: packages[i],
rel_path: scripts[j],
full_path: pkg_dir + '/' + scripts[j]
}
}
}
return result
}
// Lazy-loaded compiler modules for on-the-fly compilation
var _mcode_mod = null
var _streamline_mod = null