better update output

This commit is contained in:
2026-02-25 23:29:37 -06:00
parent 957b964d9d
commit 080e675d18
5 changed files with 150 additions and 54 deletions

View File

@@ -771,9 +771,6 @@ Build.build_dynamic = function(pkg, target, buildtype, opts) {
})
}
if (total > 0)
os.print(' Building C modules ')
arrfor(c_files, function(file) {
var sym_name = shop.c_symbol_for_file(pkg, file)
var dylib = Build.build_module_dylib(pkg, file, _target, {buildtype: _buildtype, extra_objects: support_objects, cflags: cached_cflags, verbose: _opts.verbose, force: _opts.force})
@@ -783,11 +780,10 @@ Build.build_dynamic = function(pkg, target, buildtype, opts) {
failed = failed + 1
}
done = done + 1
os.print('.')
})
if (total > 0)
os.print(` ${text(done)}/${text(total)}${failed > 0 ? `, ${text(failed)} failed` : ''}\n`)
log.build(` Building C modules (${text(done)} ok${failed > 0 ? `, ${text(failed)} failed` : ''})`)
// Write manifest so runtime can find dylibs without the build module
var mpath = manifest_path(pkg)

47
http.cm
View File

@@ -457,7 +457,7 @@ function parse_headers(raw) {
}
}
// decode chunked transfer encoding
// decode chunked transfer encoding (text version, for async responses)
function decode_chunked(body_text) {
var result = ""
var pos = 0
@@ -475,6 +475,37 @@ function decode_chunked(body_text) {
return result
}
// decode chunked transfer encoding (blob version, preserves binary data)
function decode_chunked_blob(buf, body_start_bytes) {
var result = Blob()
var pos = body_start_bytes
var total_bytes = length(buf) / 8
var header_end = null
var header_blob = null
var header_text = null
var crlf_pos = null
var chunk_size = null
var chunk_data = null
while (pos < total_bytes) {
header_end = pos + 20
if (header_end > total_bytes) header_end = total_bytes
header_blob = buf.read_blob(pos * 8, header_end * 8)
stone(header_blob)
header_text = text(header_blob)
crlf_pos = search(header_text, CRLF)
if (crlf_pos == null) break
chunk_size = number(text(header_text, 0, crlf_pos), 16)
if (chunk_size == null || chunk_size == 0) break
pos = pos + crlf_pos + 2
chunk_data = buf.read_blob(pos * 8, (pos + chunk_size) * 8)
stone(chunk_data)
result.write_blob(chunk_data)
pos = pos + chunk_size + 2
}
stone(result)
return result
}
// receive_response requestor — async incremental receive
var receive_response = function(callback, state) {
var cancelled = false
@@ -650,6 +681,8 @@ var fetch = function(url) {
var addrs = null
var address = null
var ok = true
var status_line = null
var status_code = null
if (scheme_end != null) {
scheme = lower(text(url, 0, scheme_end))
@@ -705,8 +738,16 @@ var fetch = function(url) {
hdr_end = search(raw_text, CRLF + CRLF)
if (hdr_end == null) return null
header_text = text(raw_text, 0, hdr_end)
if (search(lower(header_text), "transfer-encoding: chunked") != null)
return decode_chunked(text(raw_text, hdr_end + 4))
status_line = text(header_text, 0, search(header_text, CRLF) || length(header_text))
status_code = number(text(status_line, 9, 12))
if (status_code == null || status_code < 200 || status_code >= 300) {
log.error("fetch: " + status_line)
disrupt
}
if (search(lower(header_text), "transfer-encoding: chunked") != null) {
body = decode_chunked_blob(buf, hdr_end + 4)
return body
}
// Headers are ASCII so char offset = byte offset
body_start_bits = (hdr_end + 4) * 8
body = buf.read_blob(body_start_bits, length(buf))

View File

@@ -1021,19 +1021,41 @@ function ensure_package_dylibs(pkg) {
var build_mod = use_cache['core/build']
var target = null
var c_files = null
var _all_ok = true
var _ri = 0
if (build_mod) {
target = detect_host_target()
if (!target) return null
c_files = pkg_tools.get_c_files(_pkg, target, true)
if (!c_files || length(c_files) == 0) {
package_dylibs[_pkg] = []
return []
// Fast path: if manifest exists and all dylibs are present, skip build_dynamic
results = read_dylib_manifest(_pkg)
if (results != null) {
_all_ok = true
_ri = 0
while (_ri < length(results)) {
if (results[_ri].dylib && !fd.is_file(results[_ri].dylib)) {
_all_ok = false
break
}
_ri = _ri + 1
}
if (_all_ok) {
log.shop('manifest ok for ' + _pkg + ' (' + text(length(results)) + ' modules)')
} else {
results = null
}
}
if (results == null) {
target = detect_host_target()
if (!target) return null
log.shop('ensuring C modules for ' + _pkg)
results = build_mod.build_dynamic(_pkg, target, 'release', {})
c_files = pkg_tools.get_c_files(_pkg, target, true)
if (!c_files || length(c_files) == 0) {
package_dylibs[_pkg] = []
return []
}
log.shop('ensuring C modules for ' + _pkg)
results = build_mod.build_dynamic(_pkg, target, 'release', {})
}
} else {
// No build module at runtime — read manifest from cell build
results = read_dylib_manifest(_pkg)
@@ -1622,12 +1644,16 @@ function download_zip(pkg, commit_hash) {
return _download()
}
// Get zip from cache, returns null if not cached
// Get zip from cache, returns null if not cached or empty
function get_cached_zip(pkg, commit_hash) {
var cache_path = get_cache_path(pkg, commit_hash)
if (fd.is_file(cache_path))
return fd.slurp(cache_path)
var data = null
if (fd.is_file(cache_path)) {
data = fd.slurp(cache_path)
stone(data)
if (length(data) > 0) return data
fd.remove(cache_path)
}
return null
}
@@ -1883,7 +1909,7 @@ Shop.sync_with_deps = function(pkg, opts) {
if (visited[current]) continue
visited[current] = true
log.console(' Fetching ' + current + '...')
log.build(' Fetching ' + current + '...')
Shop.sync(current, opts)
_read_deps = function() {
@@ -2082,15 +2108,15 @@ Shop.build_package_scripts = function(package)
ok = ok + 1
} disruption {
push(errors, script)
log.console(" compile error: " + package + '/' + script)
log.build(" compile error: " + package + '/' + script)
}
_try()
})
if (length(errors) > 0) {
log.console(' Compiling scripts (' + text(ok) + ' ok, ' + text(length(errors)) + ' errors)')
log.build(' Compiling scripts (' + text(ok) + ' ok, ' + text(length(errors)) + ' errors)')
} else if (ok > 0) {
log.console(' Compiling scripts (' + text(ok) + ' ok)')
log.build(' Compiling scripts (' + text(ok) + ' ok)')
}
return {ok: ok, errors: errors, total: length(scripts)}

6
log.ce
View File

@@ -371,6 +371,7 @@ function do_enable() {
var sink = null
var i = 0
var already = false
var new_exclude = []
if (length(args) < 2) {
log.error("Usage: cell log enable <channel>")
return
@@ -388,7 +389,7 @@ function do_enable() {
}
if (is_array(sink.channels) && length(sink.channels) == 1 && sink.channels[0] == "*") {
if (is_array(sink.exclude)) {
var new_exclude = []
new_exclude = []
arrfor(sink.exclude, function(ex) {
if (ex != channel) push(new_exclude, ex)
})
@@ -411,6 +412,7 @@ function do_disable() {
var sink = null
var i = 0
var new_channels = []
var already_excluded = false
if (length(args) < 2) {
log.error("Usage: cell log disable <channel>")
return
@@ -428,7 +430,7 @@ function do_disable() {
}
if (is_array(sink.channels) && length(sink.channels) == 1 && sink.channels[0] == "*") {
if (!is_array(sink.exclude)) sink.exclude = []
var already_excluded = false
already_excluded = false
arrfor(sink.exclude, function(ex) {
if (ex == channel) already_excluded = true
})

View File

@@ -20,7 +20,6 @@ var target_triple = null
var follow_links = false
var git_pull = false
var i = 0
var updated = 0
var packages = null
var run = function() {
@@ -54,7 +53,14 @@ var run = function() {
if (target_pkg)
target_pkg = shop.resolve_locator(target_pkg)
function update_one(pkg) {
if (target_pkg) {
update_single(target_pkg)
} else {
update_all()
}
}
function check_one(pkg) {
var effective_pkg = pkg
var link_target = null
var lock = shop.load_lock()
@@ -62,59 +68,84 @@ function update_one(pkg) {
var old_commit = old_entry ? old_entry.commit : null
var info = shop.resolve_package_info(pkg)
var new_entry = null
var old_str = null
if (follow_links) {
link_target = link.get_target(pkg)
if (link_target) {
effective_pkg = link_target
log.console(" Following link: " + pkg + " -> " + effective_pkg)
}
}
// For local packages with --git, pull first
if (git_pull && info == 'local' && fd.is_dir(effective_pkg + '/.git')) {
log.console(" " + effective_pkg + " (git pull)")
log.build(" " + effective_pkg + " (git pull)")
os.system('git -C "' + effective_pkg + '" pull')
}
// Check for update (sets lock entry if changed)
new_entry = shop.update(effective_pkg)
if (new_entry && new_entry.commit) {
old_str = old_commit ? text(old_commit, 0, 8) : "(new)"
log.console(" " + effective_pkg + " " + old_str + " -> " + text(new_entry.commit, 0, 8))
return {
pkg: pkg,
effective: effective_pkg,
old_commit: old_commit,
new_entry: new_entry,
changed: new_entry != null && new_entry.commit != null
}
// Sync: fetch, extract, build
shop.sync(effective_pkg, {target: target_triple})
return new_entry
}
if (target_pkg) {
if (update_one(target_pkg)) {
log.console("Updated " + target_pkg + ".")
function sync_one(effective_pkg) {
shop.sync(effective_pkg, {target: target_triple})
}
function update_single(pkg) {
var result = check_one(pkg)
var old_str = null
if (result.changed) {
old_str = result.old_commit ? text(result.old_commit, 0, 8) : "(new)"
log.console("==> Upgrading " + result.effective)
log.console(" " + old_str + " -> " + text(result.new_entry.commit, 0, 8))
log.console("==> Syncing " + result.effective)
sync_one(result.effective)
log.console("Updated " + result.effective + ".")
} else {
log.console(target_pkg + " is up to date.")
log.console(pkg + " is up to date.")
}
} else {
}
function update_all() {
var results = []
var changed = []
var result = null
packages = shop.list_packages()
log.console("Checking for updates (" + text(length(packages)) + " packages)...")
log.console("Checking for updates...")
arrfor(packages, function(pkg) {
if (pkg == 'core') return
if (update_one(pkg))
updated = updated + 1
result = check_one(pkg)
results[] = result
if (result.changed)
changed[] = result
})
if (updated > 0) {
log.console("Updated " + text(updated) + " package(s).")
} else {
if (length(changed) == 0) {
log.console("All packages are up to date.")
return
}
log.console("==> Upgrading " + text(length(changed)) + " outdated package" + (length(changed) != 1 ? "s" : "") + ":")
arrfor(changed, function(r) {
var old_str = r.old_commit ? text(r.old_commit, 0, 8) : "(new)"
log.console(" " + r.effective + " " + old_str + " -> " + text(r.new_entry.commit, 0, 8))
})
arrfor(changed, function(r) {
log.console("==> Syncing " + r.effective)
sync_one(r.effective)
})
log.console("Updated " + text(length(changed)) + " package(s).")
}
}
run()
$stop()