184 lines
4.1 KiB
Plaintext
184 lines
4.1 KiB
Plaintext
// bench_native.ce — compare VM vs native execution speed
|
|
//
|
|
// Usage:
|
|
// cell --dev bench_native.ce <module.cm> [iterations]
|
|
//
|
|
// Compiles (if needed) and benchmarks a module via both VM and native dylib.
|
|
// Reports median/mean timing per benchmark + speedup ratio.
|
|
|
|
var os = use('os')
|
|
var fd = use('fd')
|
|
|
|
if (length(args) < 1) {
|
|
print('usage: cell --dev bench_native.ce <module.cm> [iterations]')
|
|
return
|
|
}
|
|
|
|
var file = args[0]
|
|
var name = file
|
|
if (ends_with(name, '.cm')) {
|
|
name = text(name, 0, length(name) - 3)
|
|
}
|
|
|
|
var iterations = 11
|
|
if (length(args) > 1) {
|
|
iterations = number(args[1])
|
|
}
|
|
|
|
def WARMUP = 3
|
|
|
|
var safe = replace(replace(name, '/', '_'), '-', '_')
|
|
var symbol = 'js_' + safe + '_use'
|
|
var dylib_path = './' + file + '.dylib'
|
|
|
|
// --- Statistics ---
|
|
|
|
var stat_sort = function(arr) {
|
|
return sort(arr)
|
|
}
|
|
|
|
var stat_median = function(arr) {
|
|
if (length(arr) == 0) return 0
|
|
var sorted = stat_sort(arr)
|
|
var mid = floor(length(arr) / 2)
|
|
if (length(arr) % 2 == 0) {
|
|
return (sorted[mid - 1] + sorted[mid]) / 2
|
|
}
|
|
return sorted[mid]
|
|
}
|
|
|
|
var stat_mean = function(arr) {
|
|
if (length(arr) == 0) return 0
|
|
var sum = reduce(arr, function(a, b) { return a + b })
|
|
return sum / length(arr)
|
|
}
|
|
|
|
var format_ns = function(ns) {
|
|
if (ns < 1000) return text(round(ns)) + 'ns'
|
|
if (ns < 1000000) return text(round(ns / 1000 * 100) / 100) + 'us'
|
|
if (ns < 1000000000) return text(round(ns / 1000000 * 100) / 100) + 'ms'
|
|
return text(round(ns / 1000000000 * 100) / 100) + 's'
|
|
}
|
|
|
|
// --- Collect benchmarks from module ---
|
|
|
|
var collect_benches = function(mod) {
|
|
var benches = []
|
|
if (is_function(mod)) {
|
|
push(benches, {name: 'main', fn: mod})
|
|
} else if (is_object(mod)) {
|
|
var keys = array(mod)
|
|
var i = 0
|
|
while (i < length(keys)) {
|
|
var k = keys[i]
|
|
if (is_function(mod[k])) {
|
|
push(benches, {name: k, fn: mod[k]})
|
|
}
|
|
i = i + 1
|
|
}
|
|
}
|
|
return benches
|
|
}
|
|
|
|
// --- Run one benchmark function ---
|
|
|
|
var run_bench = function(fn, label) {
|
|
var samples = []
|
|
var i = 0
|
|
var t1 = 0
|
|
var t2 = 0
|
|
|
|
// warmup
|
|
i = 0
|
|
while (i < WARMUP) {
|
|
fn(1)
|
|
i = i + 1
|
|
}
|
|
|
|
// collect samples
|
|
i = 0
|
|
while (i < iterations) {
|
|
t1 = os.now()
|
|
fn(1)
|
|
t2 = os.now()
|
|
push(samples, t2 - t1)
|
|
i = i + 1
|
|
}
|
|
|
|
return {
|
|
label: label,
|
|
median: stat_median(samples),
|
|
mean: stat_mean(samples)
|
|
}
|
|
}
|
|
|
|
// --- Load VM module ---
|
|
|
|
print('loading VM module: ' + file)
|
|
var vm_mod = use(name)
|
|
var vm_benches = collect_benches(vm_mod)
|
|
|
|
if (length(vm_benches) == 0) {
|
|
print('no benchmarkable functions found in ' + file)
|
|
return
|
|
}
|
|
|
|
// --- Load native module ---
|
|
|
|
var native_mod = null
|
|
var native_benches = []
|
|
var has_native = fd.is_file(dylib_path)
|
|
|
|
if (has_native) {
|
|
print('loading native module: ' + dylib_path)
|
|
var lib = os.dylib_open(dylib_path)
|
|
native_mod = os.dylib_symbol(lib, symbol)
|
|
native_benches = collect_benches(native_mod)
|
|
} else {
|
|
print('no ' + dylib_path + ' found -- VM-only benchmarking')
|
|
print(' hint: cell --dev compile.ce ' + file)
|
|
}
|
|
|
|
// --- Run benchmarks ---
|
|
|
|
print('')
|
|
print('samples: ' + text(iterations) + ' (warmup: ' + text(WARMUP) + ')')
|
|
print('')
|
|
|
|
var pad = function(s, n) {
|
|
while (length(s) < n) s = s + ' '
|
|
return s
|
|
}
|
|
|
|
var i = 0
|
|
while (i < length(vm_benches)) {
|
|
var b = vm_benches[i]
|
|
var vm_result = run_bench(b.fn, 'vm')
|
|
|
|
print(pad(b.name, 20) + ' VM: ' + pad(format_ns(vm_result.median), 12) + ' (median) ' + format_ns(vm_result.mean) + ' (mean)')
|
|
|
|
// find matching native bench
|
|
var j = 0
|
|
var found = false
|
|
while (j < length(native_benches)) {
|
|
if (native_benches[j].name == b.name) {
|
|
var nat_result = run_bench(native_benches[j].fn, 'native')
|
|
print(pad('', 20) + ' NT: ' + pad(format_ns(nat_result.median), 12) + ' (median) ' + format_ns(nat_result.mean) + ' (mean)')
|
|
|
|
if (nat_result.median > 0) {
|
|
var speedup = vm_result.median / nat_result.median
|
|
print(pad('', 20) + ' speedup: ' + text(round(speedup * 100) / 100) + 'x')
|
|
}
|
|
found = true
|
|
}
|
|
j = j + 1
|
|
}
|
|
|
|
if (has_native && !found) {
|
|
print(pad('', 20) + ' NT: (no matching function)')
|
|
}
|
|
|
|
print('')
|
|
i = i + 1
|
|
}
|