Compare commits
3 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c014f29ce7 | ||
|
|
f1b3e5eddc | ||
|
|
2594c03765 |
30
.github/docker/Dockerfile.alpine
vendored
Normal file
30
.github/docker/Dockerfile.alpine
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
# Dockerfile.alpine
|
||||
FROM alpine:edge
|
||||
|
||||
# Enable the edge and edge/community repositories.
|
||||
# If you already have those in your base image, you might not need these echo lines.
|
||||
RUN echo "https://dl-cdn.alpinelinux.org/alpine/edge/main" >> /etc/apk/repositories && \
|
||||
echo "https://dl-cdn.alpinelinux.org/alpine/edge/community" >> /etc/apk/repositories
|
||||
|
||||
# Update indexes and install packages
|
||||
RUN apk update && \
|
||||
apk add --no-cache \
|
||||
build-base \
|
||||
binutils \
|
||||
mold \
|
||||
meson \
|
||||
cmake \
|
||||
ninja \
|
||||
git \
|
||||
pkgconf \
|
||||
ccache \
|
||||
nodejs \
|
||||
npm \
|
||||
zip \
|
||||
alsa-lib-dev \
|
||||
pulseaudio-dev \
|
||||
libudev-zero-dev \
|
||||
wayland-dev \
|
||||
wayland-protocols \
|
||||
mesa-dev \
|
||||
sdl3
|
||||
32
.github/docker/Dockerfile.linux
vendored
Normal file
32
.github/docker/Dockerfile.linux
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
FROM ubuntu:plucky
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
python3 python3-pip \
|
||||
libasound2-dev \
|
||||
libpulse-dev \
|
||||
libudev-dev \
|
||||
libwayland-dev \
|
||||
wayland-protocols \
|
||||
libxkbcommon-dev \
|
||||
libx11-dev \
|
||||
libxext-dev \
|
||||
libxrandr-dev \
|
||||
libxcursor-dev \
|
||||
libxi-dev \
|
||||
libxinerama-dev \
|
||||
libxss-dev \
|
||||
libegl1-mesa-dev \
|
||||
libgl1-mesa-dev \
|
||||
cmake \
|
||||
ninja-build \
|
||||
git \
|
||||
build-essential \
|
||||
binutils \
|
||||
mold \
|
||||
pkg-config \
|
||||
meson \
|
||||
ccache \
|
||||
mingw-w64 \
|
||||
wine \
|
||||
npm nodejs zip && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
15
.github/docker/Dockerfile.mingw
vendored
Normal file
15
.github/docker/Dockerfile.mingw
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
FROM ubuntu:plucky
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends \
|
||||
mingw-w64 \
|
||||
cmake \
|
||||
ninja-build \
|
||||
git \
|
||||
build-essential \
|
||||
binutils \
|
||||
pkg-config \
|
||||
zip \
|
||||
ccache \
|
||||
npm nodejs && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
304
.github/workflows/build.yml
vendored
Normal file
304
.github/workflows/build.yml
vendored
Normal file
@@ -0,0 +1,304 @@
|
||||
name: Build and Deploy
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "*" ]
|
||||
tags: [ "v*" ]
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
# ──────────────────────────────────────────────────────────────
|
||||
# LINUX BUILD
|
||||
# ──────────────────────────────────────────────────────────────
|
||||
build-linux:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: gitea.pockle.world/john/prosperon/linux:latest
|
||||
|
||||
steps:
|
||||
- name: Check Out Code
|
||||
uses: actions/checkout@v4
|
||||
with: { fetch-depth: 0 }
|
||||
|
||||
- name: Build Prosperon (Linux)
|
||||
run: |
|
||||
meson setup build -Dbuildtype=release -Db_lto=true -Db_lto_mode=thin -Db_ndebug=true
|
||||
meson compile -C build
|
||||
|
||||
- name: Test Prosperon (Linux)
|
||||
env: { TRACY_NO_INVARIANT_CHECK: 1 }
|
||||
run: |
|
||||
meson test --print-errorlogs -C build
|
||||
|
||||
- name: Upload Test Log (Linux)
|
||||
if: ${{ always() }}
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: testlog-linux
|
||||
path: build/meson-logs/testlog.txt
|
||||
|
||||
- name: Upload Artifact (Linux)
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: prosperon-artifacts-linux
|
||||
path: build/prosperon
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Gitea Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: gitea.pockle.world
|
||||
username: ${{ secrets.USER_GITEA }}
|
||||
password: ${{ secrets.TOKEN_GITEA }}
|
||||
|
||||
- name: Determine Docker Tag
|
||||
id: docker_tag
|
||||
run: |
|
||||
if [[ "${{ github.ref }}" =~ ^refs/tags/v.* ]]; then
|
||||
TAG=$(echo "${{ github.ref }}" | sed 's#refs/tags/##')
|
||||
echo "tag=$TAG" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "tag=latest" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Build and Push Docker Image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
push: true
|
||||
tags: gitea.pockle.world/john/prosperon:${{ steps.docker_tag.outputs.tag }}
|
||||
platforms: linux/amd64
|
||||
|
||||
# ──────────────────────────────────────────────────────────────
|
||||
# WINDOWS BUILD (MSYS2 / CLANG64)
|
||||
# ──────────────────────────────────────────────────────────────
|
||||
build-windows:
|
||||
runs-on: win-native
|
||||
strategy:
|
||||
matrix: { msystem: [ CLANG64 ] }
|
||||
|
||||
steps:
|
||||
- name: Check Out Code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup MSYS2
|
||||
uses: msys2/setup-msys2@v2
|
||||
with:
|
||||
msystem: ${{ matrix.msystem }}
|
||||
update: true
|
||||
cache: true
|
||||
install: |
|
||||
git zip gzip tar base-devel
|
||||
pacboy: |
|
||||
meson
|
||||
cmake
|
||||
toolchain
|
||||
|
||||
- name: Build Prosperon (Windows)
|
||||
shell: msys2 {0}
|
||||
run: |
|
||||
meson setup build -Dbuildtype=release -Db_lto=true -Db_lto_mode=thin -Db_ndebug=true -Dtracy:only_localhost=true -Dtracy:no_broadcast=true
|
||||
meson compile -C build
|
||||
|
||||
- name: Test Prosperon (Windows)
|
||||
shell: msys2 {0}
|
||||
env:
|
||||
TRACY_NO_INVARIANT_CHECK: 1
|
||||
run: |
|
||||
meson test --print-errorlogs -C build
|
||||
|
||||
- name: Upload Test Log (Windows)
|
||||
if: ${{ always() }}
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: testlog-windows
|
||||
path: build/meson-logs/testlog.txt
|
||||
|
||||
- name: Upload Artifact (Windows)
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: prosperon-artifacts-windows
|
||||
path: build/prosperon.exe
|
||||
|
||||
# ──────────────────────────────────────────────────────────────
|
||||
# MACOS BUILD
|
||||
# ──────────────────────────────────────────────────────────────
|
||||
build-macos:
|
||||
runs-on: macos-latest
|
||||
|
||||
steps:
|
||||
- name: Check Out Code
|
||||
uses: actions/checkout@v4
|
||||
with: { fetch-depth: 0 }
|
||||
|
||||
- name: Build Prosperon (macOS)
|
||||
run: |
|
||||
meson setup build -Dbuildtype=release -Db_lto=true -Db_lto_mode=thin -Db_ndebug=true
|
||||
meson compile -C build
|
||||
|
||||
- name: Test Prosperon (macOS)
|
||||
run: |
|
||||
meson test --print-errorlogs -C build
|
||||
|
||||
- name: Upload Test Log (macOS)
|
||||
if: ${{ always() }}
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: testlog-macos
|
||||
path: build/meson-logs/testlog.txt
|
||||
|
||||
- name: Upload Artifact (macOS)
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: prosperon-artifacts-macos
|
||||
path: build/prosperon
|
||||
|
||||
# ──────────────────────────────────────────────────────────────
|
||||
# PACKAGE CROSS-PLATFORM DIST
|
||||
# ──────────────────────────────────────────────────────────────
|
||||
package-dist:
|
||||
needs: [ build-linux, build-windows, build-macos ]
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Check Out Code
|
||||
uses: actions/checkout@v3
|
||||
with: { fetch-depth: 0 }
|
||||
|
||||
- name: Get Latest Tag
|
||||
id: get_tag
|
||||
run: |
|
||||
TAG=$(git describe --tags --abbrev=0)
|
||||
echo "tag=$TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Download Linux Artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: prosperon-artifacts-linux
|
||||
path: linux_artifacts
|
||||
|
||||
- name: Download Windows Artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: prosperon-artifacts-windows
|
||||
path: windows_artifacts
|
||||
|
||||
- name: Download macOS Artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: prosperon-artifacts-macos
|
||||
path: mac_artifacts
|
||||
|
||||
- name: Create Dist Folder
|
||||
run: |
|
||||
mkdir -p dist/linux dist/win dist/mac
|
||||
cp README.md dist/
|
||||
cp license.txt dist/
|
||||
cp -r examples dist/
|
||||
cp linux_artifacts/* dist/linux/
|
||||
cp windows_artifacts/* dist/win/
|
||||
cp mac_artifacts/* dist/mac/
|
||||
|
||||
- name: Package Final Dist
|
||||
run: |
|
||||
TAG=${{ steps.get_tag.outputs.tag }}
|
||||
zip -r "prosperon-${TAG}.zip" dist
|
||||
echo "Created prosperon-${TAG}.zip"
|
||||
|
||||
- name: Upload Final Dist
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: "prosperon-${{ steps.get_tag.outputs.tag }}"
|
||||
path: "prosperon-${{ steps.get_tag.outputs.tag }}.zip"
|
||||
|
||||
# ──────────────────────────────────────────────────────────────
|
||||
# DEPLOY TO ITCH.IO (single ZIP containing all OSes)
|
||||
# ──────────────────────────────────────────────────────────────
|
||||
deploy-itch:
|
||||
needs: [ package-dist ]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Check Out Code
|
||||
uses: actions/checkout@v3
|
||||
with: { fetch-depth: 0 }
|
||||
|
||||
- name: Get Latest Tag
|
||||
id: get_tag
|
||||
run: |
|
||||
TAG=$(git describe --tags --abbrev=0)
|
||||
echo "tag=$TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Download Final Distribution
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: "prosperon-${{ steps.get_tag.outputs.tag }}"
|
||||
path: dist
|
||||
|
||||
- name: Set up Butler
|
||||
uses: jdno/setup-butler@v1
|
||||
|
||||
- name: Push to itch.io
|
||||
run: |
|
||||
butler push "dist/prosperon-${{ steps.get_tag.outputs.tag }}.zip" \
|
||||
${{ secrets.ITCHIO_USERNAME }}/prosperon:universal \
|
||||
--userversion ${{ steps.get_tag.outputs.tag }}
|
||||
env:
|
||||
BUTLER_API_KEY: ${{ secrets.ITCHIO_API_KEY }}
|
||||
|
||||
# ──────────────────────────────────────────────────────────────
|
||||
# DEPLOY TO SELF-HOSTED GITEA
|
||||
# ──────────────────────────────────────────────────────────────
|
||||
deploy-gitea:
|
||||
needs: [ package-dist ]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Check Out Code
|
||||
uses: actions/checkout@v3
|
||||
with: { fetch-depth: 0 }
|
||||
|
||||
- name: Get Latest Tag & Commit Message
|
||||
id: get_tag
|
||||
run: |
|
||||
TAG=$(git describe --tags --abbrev=0)
|
||||
COMMIT_MSG=$(git log -1 --pretty=%B "$TAG")
|
||||
echo "tag=$TAG" >> $GITHUB_OUTPUT
|
||||
echo "commit_msg=$COMMIT_MSG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Download Final Distribution
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: "prosperon-${{ steps.get_tag.outputs.tag }}"
|
||||
path: dist
|
||||
|
||||
- name: Create / Update Gitea Release
|
||||
run: |
|
||||
TAG=${{ steps.get_tag.outputs.tag }}
|
||||
ZIP=dist/prosperon-${TAG}.zip
|
||||
BODY=$(echo "${{ steps.get_tag.outputs.commit_msg }}" | jq -R -s '.')
|
||||
RELEASE=$(curl -s -H "Authorization: token ${{ secrets.TOKEN_GITEA }}" \
|
||||
"https://gitea.pockle.world/api/v1/repos/john/prosperon/releases/tags/$TAG" | jq -r '.id')
|
||||
|
||||
if [ "$RELEASE" = "null" ] || [ -z "$RELEASE" ]; then
|
||||
RELEASE=$(curl -X POST \
|
||||
-H "Authorization: token ${{ secrets.TOKEN_GITEA }}" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{\"tag_name\":\"$TAG\",\"target_commitish\":\"${{ github.sha }}\",\"name\":\"$TAG\",\"body\":$BODY,\"draft\":false,\"prerelease\":false}" \
|
||||
"https://gitea.pockle.world/api/v1/repos/john/prosperon/releases" | jq -r '.id')
|
||||
fi
|
||||
|
||||
curl -X POST \
|
||||
-H "Authorization: token ${{ secrets.TOKEN_GITEA }}" \
|
||||
-H "Content-Type: application/octet-stream" \
|
||||
--data-binary @"$ZIP" \
|
||||
"https://gitea.pockle.world/api/v1/repos/john/prosperon/releases/$RELEASE/assets?name=prosperon-${TAG}.zip"
|
||||
env:
|
||||
TOKEN_GITEA: ${{ secrets.TOKEN_GITEA }}
|
||||
19
.gitignore
vendored
19
.gitignore
vendored
@@ -1,31 +1,32 @@
|
||||
.git/
|
||||
.obj/
|
||||
website/
|
||||
bin/
|
||||
build/
|
||||
*.zip
|
||||
*.o
|
||||
*.a
|
||||
*.d
|
||||
tags
|
||||
Jenkinsfile
|
||||
*~
|
||||
*.log
|
||||
*.gz
|
||||
*.tar
|
||||
.nova/
|
||||
packer*
|
||||
primum
|
||||
sokol-shdc*
|
||||
source/shaders/*.h
|
||||
core.cdb
|
||||
primum.exe
|
||||
core.cdb.h
|
||||
jsc
|
||||
.DS_Store
|
||||
*.html
|
||||
.vscode
|
||||
*.icns
|
||||
game.zip
|
||||
icon.ico
|
||||
steam/
|
||||
subprojects/*/
|
||||
build_dbg/
|
||||
modules/
|
||||
sdk/
|
||||
artifacts/
|
||||
discord_social_sdk/
|
||||
discord_partner_sdk/
|
||||
steam_api64.dll
|
||||
subprojects/.wraplock
|
||||
.gemini
|
||||
|
||||
27
AGENTS.md
Normal file
27
AGENTS.md
Normal file
@@ -0,0 +1,27 @@
|
||||
# AGENTS.md
|
||||
|
||||
## Project Overview
|
||||
This is a game engine developed using a QuickJS fork as its scripting language. It is an actor based system, based on Douglas Crockford's Misty. It is a Meson compiled project with a number of dependencies.
|
||||
|
||||
## File Structure
|
||||
- `source/`: Contains the C source code
|
||||
- `scripts/`: Contains script code that is loaded on executable start, and modules
|
||||
- `shaders/`: Contains shaders that ship with the engine (for shader based backends)
|
||||
- `benchmarks/`: Benchmark programs for testing speed
|
||||
- `tests/`: Unit tests
|
||||
- `examples/`: Contains full game examples
|
||||
|
||||
## Coding Practices
|
||||
- Use K&R style C
|
||||
- Use as little whitespace as possible
|
||||
- Javascript style prefers objects and prototypical inheritence over ES6 classes, liberal use of closures, and var everywhere
|
||||
|
||||
## Instructions
|
||||
- When generating code, adhere to the coding practices outlined above.
|
||||
- When adding new features, ensure they align with the project's goals.
|
||||
- When fixing bugs, review the code carefully before making changes.
|
||||
- When writing unit tests, cover all important scenarios.
|
||||
|
||||
## Compiling, running, and testing
|
||||
- To compile the code, run "make", which generates a prosperon executable in build_dbg/, and copy it into the root folder
|
||||
- Run a test by giving it as its command: so ./prosperon tests/overling.js would run the test overling.js, ./prosperon tests/nota.js runs the nota benchmark
|
||||
411
CLAUDE.md
411
CLAUDE.md
@@ -1,25 +1,400 @@
|
||||
# Code style
|
||||
All code is done with 2 spaces for indentation.
|
||||
# CLAUDE.md
|
||||
|
||||
For cell script and its integration files, objects are preferred over classes, and preferrably limited use of prototypes, make objects sendable between actors (.ce files).
|
||||
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
|
||||
|
||||
## cell script format
|
||||
Cell script files end in .ce or .cm. Cell script is similar to Javascript but with some differences.
|
||||
## Build Commands
|
||||
|
||||
Variables are delcared with 'var'. Var behaves like let.
|
||||
Constants are declared with 'def'.
|
||||
!= and == are strict, there is no !== or ===.
|
||||
There is no undefined, only null.
|
||||
There are no classes, only objects and prototypes.
|
||||
Prefer backticks for string interpolation. Otherwise, convering non strings with the text() function is required.
|
||||
Everything should be lowercase.
|
||||
### Build variants
|
||||
- `make debug` - Build debug version (uses meson debug configuration)
|
||||
- `make fast` - Build optimized version
|
||||
- `make release` - Build release version with LTO and optimizations
|
||||
- `make small` - Build minimal size version
|
||||
- `make web` - Build for web/emscripten platform
|
||||
- `make crosswin` - Cross-compile for Windows using mingw32
|
||||
|
||||
There are no arraybuffers, only blobs, which work with bits. They must be stoned like stone(blob) before being read from.
|
||||
### Testing
|
||||
- `meson test -C build_dbg` - Run all tests in debug build
|
||||
- `meson test -C build_<variant>` - Run tests in specific build variant
|
||||
- `./build_dbg/prosperon tests/<testname>.js` - Run specific test
|
||||
- Available tests: `spawn_actor`, `empty`, `nota`, `wota`, `portalspawner`, `overling`, `send`, `delay`
|
||||
|
||||
## c format
|
||||
For cell script integration files, everything should be declared static that can be. Most don't have headers at all. Files in a package are not shared between packages.
|
||||
### Common development commands
|
||||
- `meson setup build_<variant>` - Configure build directory
|
||||
- `meson compile -C build_<variant>` - Compile in build directory
|
||||
- `./build_dbg/prosperon examples/<example>` - Run example from build directory
|
||||
- Copy prosperon to game directory and run: `cp build_dbg/prosperon <game-dir>/ && cd <game-dir> && ./prosperon`
|
||||
|
||||
There is no undefined, so JS_IsNull and JS_NULL should be used only.
|
||||
## Architecture Overview
|
||||
|
||||
## how module loading is done in cell script
|
||||
Within a package, a c file, if using the correct macros (CELL_USE_FUNCS etc), will be loaded as a module with its name; so png.c inside ac package is loaded as <package>/png, giving you access to its functions.
|
||||
Prosperon is an actor-based game engine inspired by Douglas Crockford's Misty system. Key architectural principles:
|
||||
|
||||
### Actor Model
|
||||
- Each actor runs on its own thread
|
||||
- Communication only through message passing (no shared JavaScript objects)
|
||||
- Hierarchical actor system with spawning/killing
|
||||
- Actor lifecycle: awake, update, draw, garbage collection
|
||||
|
||||
### JavaScript Style Guide
|
||||
- Use `use()` function for imports (Misty-style, not ES6 import/export)
|
||||
- Prefer closures and javascript objects and prototypes over ES6 style classes
|
||||
- Follow existing JavaScript patterns in the codebase
|
||||
- Functions as first-class citizens
|
||||
- Do not use const or let; only var
|
||||
|
||||
### Core Systems
|
||||
1. **Actor System** (scripts/core/engine.js)
|
||||
- Message passing via `send()`, `$_.receive()`
|
||||
- Actor spawning/management
|
||||
- Register-based component system (update, draw, gui, etc.)
|
||||
|
||||
2. **Module System**
|
||||
- `use()` function for loading modules
|
||||
- Module paths: `scripts/modules/`, `scripts/modules/ext/`
|
||||
- Custom QuickJS build with embedded C modules
|
||||
|
||||
3. **Build System**
|
||||
- Meson build configuration (Makefile is convenience wrapper)
|
||||
- Multiple platform targets (Windows, macOS, Linux, Web)
|
||||
- Custom QuickJS build in `subprojects/`
|
||||
- Uses SDL3 for cross-platform support
|
||||
|
||||
### Engine Entry Points
|
||||
- `source/prosperon.c` - Main C entry point
|
||||
- `scripts/core/engine.js` - JavaScript engine initialization for system
|
||||
- `scripts/core/base.js` has modifications to this Javascript runtime (for example, additions to the base Array, String, etc)
|
||||
|
||||
### Subprojects
|
||||
- C code has many subprojects, who's source and sometimes documentation can be found in subprojects. subprojects/quickjs/doc has documentation for quickjs
|
||||
|
||||
### Resource System
|
||||
- Scripts are bundled into `core.zip` during build
|
||||
- Runtime module loading via PhysFS
|
||||
- Resource paths checked in order: `/`, `scripts/modules/`, `scripts/modules/ext/`
|
||||
|
||||
### Notable Dependencies
|
||||
- QuickJS (custom build) - JavaScript runtime
|
||||
- SDL3 - Platform abstraction
|
||||
- Chipmunk2D - Physics
|
||||
- ENet - Networking
|
||||
- Soloud - Audio
|
||||
- Tracy - Profiling (when enabled)
|
||||
|
||||
## Development Tips
|
||||
|
||||
### Running Games
|
||||
```bash
|
||||
# Build first
|
||||
make debug
|
||||
|
||||
# Run example from build directory
|
||||
./build_dbg/prosperon examples/chess
|
||||
|
||||
# Or copy to game directory
|
||||
cp build_dbg/prosperon examples/chess/
|
||||
cd examples/chess
|
||||
./prosperon
|
||||
```
|
||||
|
||||
### Documentation
|
||||
- Documentation is found in docs
|
||||
- Documentation for the JS modules loaded with 'use' is docs/api/modules
|
||||
- .md files directly in docs gives a high level overview
|
||||
- docs/dull is what this specific Javascript system is (including alterations from quickjs/es6)
|
||||
|
||||
### Shader Development
|
||||
- Shaders are in `shaders/` directory as HLSL
|
||||
- Compile script: `shaders/compile.sh`
|
||||
- Outputs to platform-specific formats: `dxil/`, `msl/`, `spv/`
|
||||
|
||||
### Example Games
|
||||
Located in `examples/` directory:
|
||||
- `chess` - Chess implementation (has its own Makefile)
|
||||
- `pong` - Classic pong game
|
||||
- `snake` - Snake game
|
||||
- `tetris` - Tetris clone
|
||||
- `bunnymark` - Performance test
|
||||
|
||||
### Testing
|
||||
```bash
|
||||
# Run all tests
|
||||
meson test -C build_dbg
|
||||
|
||||
# Run specific test
|
||||
./build_dbg/prosperon tests/spawn_actor.js
|
||||
```
|
||||
|
||||
### Debugging
|
||||
- Use debug build: `make debug`
|
||||
- Tracy profiler support when enabled
|
||||
- Console logging available via `console.log()`, `console.error()`, etc.
|
||||
- Log files written to `.prosperon/log.txt`
|
||||
|
||||
# Project Structure Notes
|
||||
|
||||
## Core JavaScript Modules
|
||||
|
||||
- JavaScript modules are defined using the MISTUSE macro in jsffi.c
|
||||
- The `js_os_funcs`, `js_io_funcs`, etc. arrays define the available functions for each module
|
||||
- New functions are added with MIST_FUNC_DEF(module, function, args_count)
|
||||
|
||||
## File I/O
|
||||
|
||||
- `io.slurp(path)` - Reads a file as text
|
||||
- `io.slurpbytes(path)` - Reads a file as an ArrayBuffer
|
||||
- `io.slurpwrite(path, data)` - Writes data (string or ArrayBuffer) to a file
|
||||
- `io.exists(path)` - Checks if a file exists
|
||||
|
||||
## Script Loading
|
||||
|
||||
- The `use(path)` function in engine.js loads JavaScript modules
|
||||
- Script loading happens in prosperon.c and the engine.js script
|
||||
- jsffi.c contains the C hooks for the QuickJS JavaScript engine
|
||||
- Added functionality for bytecode compilation and loading:
|
||||
- `os.compile_bytecode(source, filename)` - Compiles JS to bytecode, returns ArrayBuffer
|
||||
- `os.eval_bytecode(bytecode)` - Evaluates bytecode from an ArrayBuffer
|
||||
- `compile(scriptPath)` - Compiles a JS file to a .jso bytecode file
|
||||
- Modified `use()` to check for .jso files before loading .js files
|
||||
|
||||
## QuickJS Bytecode API
|
||||
|
||||
- `JS_Eval` with JS_EVAL_FLAG_COMPILE_ONLY - Compiles without executing
|
||||
- `JS_WriteObject` with JS_WRITE_OBJ_BYTECODE - Serializes to bytecode
|
||||
- `JS_ReadObject` with JS_READ_OBJ_BYTECODE - Deserializes and loads bytecode
|
||||
- Bytecode files use .jso extension alongside .js files
|
||||
|
||||
## Available JavaScript APIs
|
||||
|
||||
### Core APIs
|
||||
- `actor` - Base prototype for all actor objects
|
||||
- `$_` - Special global for actor messaging
|
||||
- `prosperon` - Global engine interface
|
||||
- `console` - Logging and debugging interface
|
||||
|
||||
### Framework APIs
|
||||
- `moth` - Higher-level game framework that simplifies Prosperon usage
|
||||
- Handles window creation, game loop, and event dispatching
|
||||
- Provides simple configuration via config.js
|
||||
- Auto-initializes systems like rendering and input
|
||||
- Manages camera, resolution, and FPS automatically
|
||||
|
||||
### Rendering
|
||||
- `draw2d` - 2D drawing primitives
|
||||
- `render` - Low-level rendering operations
|
||||
- `graphics` - Higher-level graphics utilities
|
||||
- `camera` - Camera controls and transformations
|
||||
- `sprite` - Sprite rendering and management
|
||||
|
||||
### Physics and Math
|
||||
- `math` - Mathematical utilities
|
||||
- `geometry` - Geometric calculations and shapes
|
||||
- `transform` - Object transformations
|
||||
|
||||
### Input and Events
|
||||
- `input` - Mouse, keyboard, and touch handling
|
||||
- `event` - Event management system
|
||||
|
||||
### Networking
|
||||
- `enet` - Networking through ENet library
|
||||
- `http` - HTTP client capabilities
|
||||
|
||||
### Audio
|
||||
- `sound` - Audio playback using SoLoud
|
||||
|
||||
### Utility Modules
|
||||
- `time` - Time management and delays
|
||||
- `io` - File I/O operations
|
||||
- `json` - JSON parsing and serialization
|
||||
- `util` - General utilities
|
||||
- `color` - Color manipulation
|
||||
- `miniz` - Compression utilities
|
||||
- `nota` - Structured data format
|
||||
- `wota` - Serialization format
|
||||
- `qr` - QR code generation/reading
|
||||
- `tween` - Animation tweening
|
||||
- `spline` - Spline calculations
|
||||
- `imgui` - Immediate mode GUI
|
||||
|
||||
## Game Development Patterns
|
||||
|
||||
### Project Structure
|
||||
- Game config is typically in `config.js`
|
||||
- Main entry point is `main.js`
|
||||
- Resource loading through `resources.js`
|
||||
|
||||
### Actor Pattern Usage
|
||||
- Create actors with `actor.spawn(script, config)`
|
||||
- Start actors with `$_.start(callback, script)` - the system automatically sends a greeting, callback receives {type: 'greet', actor: actor_ref}
|
||||
- No need to manually send greetings - `$_.start` handles this automatically
|
||||
- Manage actor hierarchy with overlings and underlings
|
||||
- Schedule actor tasks with `$_.delay()` method
|
||||
- Clean up with `kill()` and `garbage()`
|
||||
|
||||
### Actor Messaging with Callbacks
|
||||
When sending a message with a callback, respond by sending to the message itself:
|
||||
```javascript
|
||||
// Sender side:
|
||||
send(actor, {type: 'status'}, response => {
|
||||
console.log(response); // Handle the response
|
||||
});
|
||||
|
||||
// Receiver side:
|
||||
$_.receiver(msg => {
|
||||
if (msg.type === 'status') {
|
||||
send(msg, {status: 'ok'}); // Send response to the message itself
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
**Critical Rules for Message Callbacks**:
|
||||
- **A message can only be used ONCE as a send target** - after sending a response to a message, it cannot be used again
|
||||
- If you need to send multiple updates (like progress), only the download request message should be used for the final response
|
||||
- Status requests should each get their own individual response
|
||||
- Actor objects and message headers are completely opaque - never try to access internal properties
|
||||
- Never access `msg.__HEADER__` or similar - the actor system handles routing internally
|
||||
- Use `$_.delay()` to schedule work and avoid blocking the message receiver
|
||||
|
||||
### Game Loop Registration
|
||||
- Register functions like `update`, `draw`, `gui`, etc.
|
||||
- Set function.layer property to control execution order
|
||||
- Use `Register` system to manage callbacks
|
||||
|
||||
### Program vs Module Pattern
|
||||
- Programs are actor scripts that don't return values, they execute top-to-bottom
|
||||
- Modules are files that return single values (usually objects) that get frozen
|
||||
- Programs can spawn other programs as underlings
|
||||
- Programs have lifecycle hooks: awake, update, draw, garbage, etc.
|
||||
|
||||
## Technical Capabilities
|
||||
|
||||
### Graphics Pipeline
|
||||
- Supports multiple render backends (Direct3D, Metal, Vulkan via SDL3)
|
||||
- Custom shader system with cross-platform compilation
|
||||
- Sprite batching for efficient 2D rendering
|
||||
- Camera systems for both 2D and 3D
|
||||
|
||||
### Asset Support
|
||||
- Images: PNG, JPG, QOI, etc.
|
||||
- Audio: Various formats through SoLoud
|
||||
- Models: Basic 3D model support
|
||||
- Custom formats: Aseprite animations, etc.
|
||||
|
||||
### Developer Tools
|
||||
- Built-in documentation system with `prosperon.DOC`
|
||||
- Tracy profiler integration for performance monitoring
|
||||
- Imgui debugging tools
|
||||
- Console logging with various severity levels
|
||||
|
||||
## Misty Networking Patterns
|
||||
|
||||
Prosperon implements the Misty actor networking model. Understanding these patterns is critical for building distributed applications.
|
||||
|
||||
### Portal Reply Pattern
|
||||
Portals must reply with an actor object, not application data:
|
||||
```javascript
|
||||
// CORRECT: Portal replies with actor
|
||||
$_.portal(e => {
|
||||
send(e, $_); // Reply with server actor
|
||||
}, 5678);
|
||||
|
||||
// WRONG: Portal sends application data
|
||||
$_.portal(e => {
|
||||
send(e, {type: 'game_start'}); // This breaks the pattern
|
||||
}, 5678);
|
||||
```
|
||||
|
||||
### Two-Phase Connection Protocol
|
||||
Proper Misty networking follows a two-phase pattern:
|
||||
|
||||
**Phase 1: Actor Connection**
|
||||
- Client contacts portal using `$_.contact()`
|
||||
- Portal replies with an actor object
|
||||
- This establishes the communication channel
|
||||
|
||||
**Phase 2: Application Communication**
|
||||
- Client sends application messages to the received actor
|
||||
- Normal bidirectional messaging begins
|
||||
- Application logic handles game/service initialization
|
||||
|
||||
### Message Handling Best Practices
|
||||
Messages should be treated as opaque objects with your application data:
|
||||
|
||||
```javascript
|
||||
// CORRECT: Store actor references separately
|
||||
var players = {};
|
||||
$_.receiver(msg => {
|
||||
if (msg.type === 'join_game' && msg.player_id) {
|
||||
// Store the message for later response
|
||||
players[msg.player_id] = msg;
|
||||
// Later, respond to the stored message
|
||||
send(players[msg.player_id], {type: 'game_start'});
|
||||
}
|
||||
});
|
||||
|
||||
// WRONG: Trying to access internal message properties
|
||||
$_.receiver(msg => {
|
||||
var sender = msg.__HEADER__.replycc; // Never do this!
|
||||
});
|
||||
```
|
||||
|
||||
### Return ID Lifecycle
|
||||
- Each reply callback gets a unique return ID
|
||||
- Return IDs are consumed once and then deleted
|
||||
- Reusing message objects with return headers causes "Could not find return function" errors
|
||||
- Always create clean actor references for ongoing communication
|
||||
|
||||
### Actor Object Transparency
|
||||
Actor objects must be completely opaque black boxes that work identically regardless of transport:
|
||||
|
||||
```javascript
|
||||
// Actor objects work transparently for:
|
||||
// - Same-process communication (fastest - uses mailbox)
|
||||
// - Inter-process communication (uses mailbox)
|
||||
// - Network communication (uses ENet)
|
||||
|
||||
// The actor shouldn't know or care about the transport mechanism
|
||||
send(opponent, {type: 'move', from: [0,0], to: [1,1]});
|
||||
```
|
||||
|
||||
**Key Implementation Details:**
|
||||
- `actor_send()` in `scripts/core/engine.js` handles routing based on available actor data
|
||||
- Actor objects sent in message data automatically get address/port populated when received over network
|
||||
- Three communication pathways: `os.mailbox_exist()` check → mailbox send → network send
|
||||
- Actor objects must contain all necessary routing information for transparent messaging
|
||||
|
||||
### Common Networking Bugs
|
||||
1. **Portal sending application data**: Portal should only establish actor connections
|
||||
2. **Return ID collision**: Reusing messages with return headers for multiple sends
|
||||
3. **Mixed phases**: Trying to do application logic during connection establishment
|
||||
4. **Header pollution**: Using received message objects as actor references
|
||||
5. **Missing actor address info**: Actor objects in message data need network address population (fixed in engine.js:746-766)
|
||||
|
||||
### Example: Correct Chess Networking
|
||||
```javascript
|
||||
// Server: Portal setup
|
||||
$_.portal(e => {
|
||||
send(e, $_); // Just reply with actor
|
||||
}, 5678);
|
||||
|
||||
// Client: Two-phase connection
|
||||
$_.contact((actor, reason) => {
|
||||
if (actor) {
|
||||
opponent = actor;
|
||||
send(opponent, {type: 'join_game'}); // Phase 2: app messaging
|
||||
}
|
||||
}, {address: "localhost", port: 5678});
|
||||
|
||||
// Server: Handle application messages
|
||||
$_.receiver(e => {
|
||||
if (e.type === 'join_game') {
|
||||
opponent = e.__HEADER__.replycc;
|
||||
send(opponent, {type: 'game_start', your_color: 'black'});
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
## Memory Management
|
||||
|
||||
- When working with a conversational AI system like Claude, it's important to maintain a clean and focused memory
|
||||
- Regularly review and update memories to ensure they remain relevant and helpful
|
||||
- Delete or modify memories that are no longer accurate or useful
|
||||
- Prioritize information that can genuinely assist in future interactions
|
||||
54
Dockerfile
Normal file
54
Dockerfile
Normal file
@@ -0,0 +1,54 @@
|
||||
# Builder stage
|
||||
FROM ubuntu:plucky AS builder
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
python3 python3-pip \
|
||||
libasound2-dev \
|
||||
libpulse-dev \
|
||||
libudev-dev \
|
||||
libwayland-dev \
|
||||
wayland-protocols \
|
||||
libxkbcommon-dev \
|
||||
libx11-dev \
|
||||
libxext-dev \
|
||||
libxrandr-dev \
|
||||
libxcursor-dev \
|
||||
libxi-dev \
|
||||
libxinerama-dev \
|
||||
libxss-dev \
|
||||
libegl1-mesa-dev \
|
||||
libgl1-mesa-dev \
|
||||
cmake \
|
||||
ninja-build \
|
||||
git \
|
||||
build-essential \
|
||||
binutils \
|
||||
pkg-config \
|
||||
meson \
|
||||
zip && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /app
|
||||
RUN git clone https://gitea.pockle.world/john/prosperon.git
|
||||
WORKDIR /app/prosperon
|
||||
RUN git checkout jsffi_refactor
|
||||
RUN meson setup build -Dbuildtype=release -Db_lto=true -Db_lto_mode=thin -Db_ndebug=true
|
||||
RUN meson compile -C build
|
||||
|
||||
# Runtime stage
|
||||
FROM ubuntu:latest
|
||||
|
||||
# Install minimal runtime dependencies (e.g., for dynamically linked libraries)
|
||||
RUN apt-get update && apt-get install -y libstdc++6 && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy the compiled prosperon binary from the build stage
|
||||
COPY --from=builder /app/prosperon/build/prosperon /usr/local/bin/prosperon
|
||||
|
||||
# Create an entrypoint script
|
||||
RUN echo '#!/bin/bash' > /entrypoint.sh && \
|
||||
echo '/usr/local/bin/prosperon "$@" &' >> /entrypoint.sh && \
|
||||
echo 'tail -f /dev/null' >> /entrypoint.sh && \
|
||||
chmod +x /entrypoint.sh
|
||||
|
||||
WORKDIR /workdir
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
||||
16
Info.plist
Normal file
16
Info.plist
Normal file
@@ -0,0 +1,16 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>CFBundleExecutable</key>
|
||||
<string>Prosperon</string>
|
||||
<key>CFBundleIdentifier</key>
|
||||
<string>pockle.world.prosperon</string>
|
||||
<key>CFBundleName</key>
|
||||
<string>Prosperon</string>
|
||||
<key>CFBundleVersion</key>
|
||||
<string>0.5</string>
|
||||
<key>NSHumanReadableCopyright</key>
|
||||
<string>Copyright © 2024 Pockle World. All rights reserved.</string>
|
||||
</dict>
|
||||
</plist>
|
||||
26
LICENSE
Normal file
26
LICENSE
Normal file
@@ -0,0 +1,26 @@
|
||||
Prosperon Game Engine
|
||||
|
||||
Copyright (c) 2019-2024 John Alanbrook
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
(1) The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
(2) Any games or other derivative software must display the "Prosperon" logo
|
||||
at near the beginning of the software's startup, before the chief purpose
|
||||
of the software is underway.
|
||||
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
||||
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
95
Makefile
95
Makefile
@@ -1,80 +1,29 @@
|
||||
# Development build: creates libcell_runtime.dylib + thin main wrapper
|
||||
# This is the default target for working on cell itself
|
||||
#
|
||||
# If cell doesn't exist yet, use 'make bootstrap' first (requires meson)
|
||||
# or manually build with meson once.
|
||||
#
|
||||
# The cell shop is at ~/.cell and core scripts are installed to ~/.cell/core
|
||||
debug: FORCE
|
||||
meson setup build_dbg -Dbuildtype=debug
|
||||
meson compile -C build_dbg
|
||||
|
||||
CELL_SHOP = $(HOME)/.cell
|
||||
CELL_CORE_PACKAGE = $(CELL_SHOP)/packages/core
|
||||
fast: FORCE
|
||||
meson setup build_fast
|
||||
meson compile -C build_fast
|
||||
|
||||
makecell:
|
||||
cell pack core -o cell
|
||||
cp cell /opt/homebrew/bin/
|
||||
release: FORCE
|
||||
meson setup -Dbuildtype=release -Db_lto=true -Db_lto_mode=thin -Db_ndebug=true build_release
|
||||
meson compile -C build_release
|
||||
|
||||
# Install core: symlink this directory to ~/.cell/core
|
||||
install: bootstrap $(CELL_SHOP)
|
||||
@echo "Linking cell core to $(CELL_CORE_PACKAGE)"
|
||||
rm -rf $(CELL_CORE_PACKAGE)
|
||||
ln -s $(PWD) $(CELL_CORE_PACKAGE)
|
||||
cp cell /opt/homebrew/bin/
|
||||
cp libcell_runtime.dylib /opt/homebrew/lib/
|
||||
@echo "Core installed."
|
||||
sanitize: FORCE
|
||||
meson setup -Db_sanitize=address -Db_sanitize=memory -Db_sanitize=leak -Db_sanitize=undefined build_sani
|
||||
meson compile -C build_sani
|
||||
|
||||
cell: libcell_runtime.dylib cell_main
|
||||
cp cell_main cell
|
||||
chmod +x cell
|
||||
cp cell /opt/homebrew/bin/cell
|
||||
cp libcell_runtime.dylib /opt/homebrew/lib/
|
||||
small: FORCE
|
||||
meson setup -Dbuildtype=minsize -Db_lto=true -Db_ndebug=true build_small
|
||||
meson compile -C build_small
|
||||
|
||||
# Build the shared runtime library (everything except main.c)
|
||||
# Uses existing cell to run build -d
|
||||
libcell_runtime.dylib: $(CELL_SHOP)/build/dynamic
|
||||
cell build -d
|
||||
cp $(CELL_SHOP)/build/dynamic/libcell_runtime.dylib .
|
||||
web: FORCE
|
||||
meson setup -Deditor=false -Dbuildtype=minsize -Db_lto=true -Db_ndebug=true --cross-file emscripten.cross build_web
|
||||
meson compile -C build_web
|
||||
|
||||
# Build the thin main wrapper that links to libcell_runtime
|
||||
cell_main: source/main.c libcell_runtime.dylib
|
||||
cc -o cell_main source/main.c -L. -lcell_runtime -Wl,-rpath,@loader_path -Wl,-rpath,/opt/homebrew/lib
|
||||
crosswin: FORCE
|
||||
meson setup -Dbuildtype=debugoptimized --cross-file mingw32.cross build_win
|
||||
meson compile -C build_win
|
||||
|
||||
# Create the cell shop directories
|
||||
$(CELL_SHOP):
|
||||
mkdir -p $(CELL_SHOP)
|
||||
mkdir -p $(CELL_SHOP)/packages
|
||||
mkdir -p $(CELL_SHOP)/cache
|
||||
mkdir -p $(CELL_SHOP)/build
|
||||
|
||||
$(CELL_CORE):
|
||||
ln -s $(PWD) $(CELL_CORE)
|
||||
|
||||
# Static build: creates a fully static cell binary (for distribution)
|
||||
static:
|
||||
cell build
|
||||
cp $(CELL_SHOP)/build/static/cell .
|
||||
|
||||
# Bootstrap: build cell from scratch using meson (only needed once)
|
||||
# Also installs core scripts to ~/.cell/core
|
||||
bootstrap:
|
||||
meson setup build_bootstrap -Dbuildtype=debugoptimized
|
||||
meson compile -C build_bootstrap
|
||||
cp build_bootstrap/cell .
|
||||
cp build_bootstrap/libcell_runtime.dylib .
|
||||
@echo "Bootstrap complete. Cell shop initialized at $(CELL_SHOP)"
|
||||
@echo "Now run 'make' to rebuild with cell itself."
|
||||
|
||||
# Clean build artifacts
|
||||
clean:
|
||||
rm -rf $(CELL_SHOP)/build build_bootstrap
|
||||
rm -f cell cell_main libcell_runtime.dylib
|
||||
|
||||
# Ensure dynamic build directory exists
|
||||
$(CELL_SHOP)/build/dynamic: $(CELL_SHOP)
|
||||
mkdir -p $(CELL_SHOP)/build/dynamic
|
||||
|
||||
# Legacy meson target
|
||||
meson:
|
||||
meson setup build_dbg -Dbuildtype=debugoptimized
|
||||
meson install -C build_dbg
|
||||
|
||||
.PHONY: cell static bootstrap clean meson install
|
||||
FORCE:
|
||||
|
||||
@@ -1 +1,7 @@
|
||||
Read the docs to get started.
|
||||
Thank you for using Prosperon!
|
||||
|
||||
Provided are prosperon builds for all available platforms. Simply run prosperon for your platform in a game folder to play!
|
||||
|
||||
To get started, take a dive into the provided example games in the examples folder. You can either copy the prosperon executable into an example directory and run it there, or run `prosperon path/to/example` from the project root.
|
||||
|
||||
You can take a look through the docs folder for the prosperon manual to learn all about it. The manual is available on the web at [docs.prosperon.dev](https://docs.prosperon.dev).
|
||||
|
||||
103
add.ce
103
add.ce
@@ -1,103 +0,0 @@
|
||||
// cell add <locator> [alias] - Add a dependency to the current package
|
||||
//
|
||||
// Usage:
|
||||
// cell add <locator> Add a dependency using default alias
|
||||
// cell add <locator> <alias> Add a dependency with custom alias
|
||||
//
|
||||
// This adds the dependency to cell.toml and installs it to the shop.
|
||||
|
||||
var shop = use('internal/shop')
|
||||
var pkg = use('package')
|
||||
var build = use('build')
|
||||
var fd = use('fd')
|
||||
|
||||
var locator = null
|
||||
var alias = null
|
||||
|
||||
array(args, function(arg) {
|
||||
if (arg == '--help' || arg == '-h') {
|
||||
log.console("Usage: cell add <locator> [alias]")
|
||||
log.console("")
|
||||
log.console("Add a dependency to the current package.")
|
||||
log.console("")
|
||||
log.console("Examples:")
|
||||
log.console(" cell add gitea.pockle.world/john/prosperon")
|
||||
log.console(" cell add gitea.pockle.world/john/cell-image image")
|
||||
log.console(" cell add ../local-package")
|
||||
$stop()
|
||||
} else if (!starts_with(arg, '-')) {
|
||||
if (!locator) {
|
||||
locator = arg
|
||||
} else if (!alias) {
|
||||
alias = arg
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (!locator) {
|
||||
log.console("Usage: cell add <locator> [alias]")
|
||||
$stop()
|
||||
}
|
||||
|
||||
// Resolve relative paths to absolute paths
|
||||
if (locator == '.' || starts_with(locator, './') || starts_with(locator, '../') || fd.is_dir(locator)) {
|
||||
var resolved = fd.realpath(locator)
|
||||
if (resolved) {
|
||||
locator = resolved
|
||||
}
|
||||
}
|
||||
|
||||
// Generate default alias from locator
|
||||
if (!alias) {
|
||||
// Use the last component of the locator as alias
|
||||
var parts = array(locator, '/')
|
||||
alias = parts[length(parts) - 1]
|
||||
// Remove any version suffix
|
||||
if (search(alias, '@') != null) {
|
||||
alias = array(alias, '@')[0]
|
||||
}
|
||||
}
|
||||
|
||||
// Check we're in a package directory
|
||||
var cwd = fd.realpath('.')
|
||||
if (!fd.is_file(cwd + '/cell.toml')) {
|
||||
log.error("Not in a package directory (no cell.toml found)")
|
||||
$stop()
|
||||
}
|
||||
|
||||
log.console("Adding " + locator + " as '" + alias + "'...")
|
||||
|
||||
// Add to local project's cell.toml
|
||||
try {
|
||||
pkg.add_dependency(null, locator, alias)
|
||||
log.console(" Added to cell.toml")
|
||||
} catch (e) {
|
||||
log.error("Failed to update cell.toml: " + e)
|
||||
$stop()
|
||||
}
|
||||
|
||||
// Install to shop
|
||||
try {
|
||||
shop.get(locator)
|
||||
shop.extract(locator)
|
||||
|
||||
// Build scripts
|
||||
shop.build_package_scripts(locator)
|
||||
|
||||
// Build C code if any
|
||||
try {
|
||||
var target = build.detect_host_target()
|
||||
build.build_dynamic(locator, target, 'release')
|
||||
} catch (e) {
|
||||
// Not all packages have C code
|
||||
}
|
||||
|
||||
log.console(" Installed to shop")
|
||||
} catch (e) {
|
||||
log.error("Failed to install: " + e)
|
||||
$stop()
|
||||
}
|
||||
|
||||
log.console("Added " + alias + " (" + locator + ")")
|
||||
|
||||
$stop()
|
||||
399
archive/miniz.c
399
archive/miniz.c
@@ -1,399 +0,0 @@
|
||||
#include "quickjs.h"
|
||||
#include "miniz.h"
|
||||
#include "cell.h"
|
||||
|
||||
static JSClassID js_reader_class_id;
|
||||
static JSClassID js_writer_class_id;
|
||||
|
||||
static void js_reader_finalizer(JSRuntime *rt, JSValue val) {
|
||||
mz_zip_archive *zip = JS_GetOpaque(val, js_reader_class_id);
|
||||
mz_zip_reader_end(zip);
|
||||
js_free_rt(rt,zip);
|
||||
}
|
||||
|
||||
static void js_writer_finalizer(JSRuntime *rt, JSValue val) {
|
||||
mz_zip_archive *zip = JS_GetOpaque(val, js_writer_class_id);
|
||||
mz_zip_writer_finalize_archive(zip);
|
||||
mz_zip_writer_end(zip);
|
||||
js_free_rt(rt,zip);
|
||||
}
|
||||
|
||||
static JSClassDef js_reader_class = {
|
||||
"zip reader",
|
||||
.finalizer = js_reader_finalizer,
|
||||
};
|
||||
|
||||
static JSClassDef js_writer_class = {
|
||||
"zip writer",
|
||||
.finalizer = js_writer_finalizer,
|
||||
};
|
||||
|
||||
static mz_zip_archive *js2reader(JSContext *js, JSValue v)
|
||||
{
|
||||
return JS_GetOpaque(v, js_reader_class_id);
|
||||
}
|
||||
|
||||
static mz_zip_archive *js2writer(JSContext *js, JSValue v)
|
||||
{
|
||||
return JS_GetOpaque(v, js_writer_class_id);
|
||||
}
|
||||
|
||||
static JSValue js_miniz_read(JSContext *js, JSValue self, int argc, JSValue *argv)
|
||||
{
|
||||
size_t len;
|
||||
void *data = js_get_blob_data(js, &len, argv[0]);
|
||||
if (data == -1)
|
||||
return JS_EXCEPTION;
|
||||
|
||||
mz_zip_archive *zip = calloc(sizeof(*zip), 1);
|
||||
if (!zip)
|
||||
return JS_ThrowOutOfMemory(js);
|
||||
|
||||
mz_bool success = mz_zip_reader_init_mem(zip, data, len, 0);
|
||||
|
||||
if (!success) {
|
||||
int err = mz_zip_get_last_error(zip);
|
||||
free(zip);
|
||||
return JS_ThrowInternalError(js, "Failed to initialize zip reader: %s", mz_zip_get_error_string(err));
|
||||
}
|
||||
|
||||
JSValue jszip = JS_NewObjectClass(js, js_reader_class_id);
|
||||
JS_SetOpaque(jszip, zip);
|
||||
return jszip;
|
||||
}
|
||||
|
||||
static JSValue js_miniz_write(JSContext *js, JSValue self, int argc, JSValue *argv)
|
||||
{
|
||||
const char *file = JS_ToCString(js, argv[0]);
|
||||
if (!file)
|
||||
return JS_EXCEPTION;
|
||||
|
||||
mz_zip_archive *zip = calloc(sizeof(*zip), 1);
|
||||
if (!zip) {
|
||||
JS_FreeCString(js, file);
|
||||
return JS_ThrowOutOfMemory(js);
|
||||
}
|
||||
|
||||
mz_bool success = mz_zip_writer_init_file(zip, file, 0);
|
||||
JS_FreeCString(js, file);
|
||||
|
||||
if (!success) {
|
||||
int err = mz_zip_get_last_error(zip);
|
||||
mz_zip_writer_end(zip);
|
||||
free(zip);
|
||||
return JS_ThrowInternalError(js, "Failed to initialize zip writer: %s", mz_zip_get_error_string(err));
|
||||
}
|
||||
|
||||
JSValue jszip = JS_NewObjectClass(js, js_writer_class_id);
|
||||
JS_SetOpaque(jszip, zip);
|
||||
return jszip;
|
||||
}
|
||||
|
||||
static JSValue js_miniz_compress(JSContext *js, JSValue this_val,
|
||||
int argc, JSValueConst *argv)
|
||||
{
|
||||
if (argc < 1)
|
||||
return JS_ThrowTypeError(js,
|
||||
"compress needs a string or ArrayBuffer");
|
||||
|
||||
/* ─── 1. Grab the input data ──────────────────────────────── */
|
||||
const char *cstring = NULL;
|
||||
size_t in_len = 0;
|
||||
const void *in_ptr = NULL;
|
||||
|
||||
if (JS_IsString(argv[0])) {
|
||||
/* String → UTF-8 bytes without the terminating NUL */
|
||||
cstring = JS_ToCStringLen(js, &in_len, argv[0]);
|
||||
if (!cstring)
|
||||
return JS_EXCEPTION;
|
||||
in_ptr = cstring;
|
||||
} else {
|
||||
in_ptr = js_get_blob_data(js, &in_len, argv[0]);
|
||||
if (in_ptr == -1)
|
||||
return JS_EXCEPTION;
|
||||
}
|
||||
|
||||
/* ─── 2. Allocate an output buffer big enough ────────────── */
|
||||
mz_ulong out_len_est = mz_compressBound(in_len);
|
||||
void *out_buf = js_malloc(js, out_len_est);
|
||||
if (!out_buf) {
|
||||
if (cstring) JS_FreeCString(js, cstring);
|
||||
return JS_EXCEPTION;
|
||||
}
|
||||
|
||||
/* ─── 3. Do the compression (MZ_DEFAULT_COMPRESSION = level 6) */
|
||||
mz_ulong out_len = out_len_est;
|
||||
int st = mz_compress2(out_buf, &out_len,
|
||||
in_ptr, in_len, MZ_DEFAULT_COMPRESSION);
|
||||
|
||||
/* clean-up for string input */
|
||||
if (cstring) JS_FreeCString(js, cstring);
|
||||
|
||||
if (st != MZ_OK) {
|
||||
js_free(js, out_buf);
|
||||
return JS_ThrowInternalError(js,
|
||||
"miniz: compression failed (%d)", st);
|
||||
}
|
||||
|
||||
/* ─── 4. Hand JavaScript a copy of the compressed data ────── */
|
||||
JSValue abuf = js_new_blob_stoned_copy(js, out_buf, out_len);
|
||||
js_free(js, out_buf);
|
||||
return abuf;
|
||||
}
|
||||
|
||||
static JSValue js_miniz_decompress(JSContext *js,
|
||||
JSValueConst this_val,
|
||||
int argc,
|
||||
JSValueConst *argv)
|
||||
{
|
||||
if (argc < 1)
|
||||
return JS_ThrowTypeError(js,
|
||||
"decompress: need compressed ArrayBuffer");
|
||||
|
||||
/* grab compressed data */
|
||||
size_t in_len;
|
||||
void *in_ptr = js_get_blob_data(js, &in_len, argv[0]);
|
||||
if (in_ptr == -1)
|
||||
return JS_EXCEPTION;
|
||||
|
||||
/* zlib header present → tell tinfl to parse it */
|
||||
size_t out_len = 0;
|
||||
void *out_ptr = tinfl_decompress_mem_to_heap(
|
||||
in_ptr, in_len, &out_len,
|
||||
TINFL_FLAG_PARSE_ZLIB_HEADER);
|
||||
|
||||
if (!out_ptr)
|
||||
return JS_ThrowInternalError(js,
|
||||
"miniz: decompression failed");
|
||||
|
||||
JSValue ret;
|
||||
ret = JS_NewStringLen(js, (const char *)out_ptr, out_len);
|
||||
#ifdef MZ_FREE
|
||||
MZ_FREE(out_ptr);
|
||||
#else
|
||||
free(out_ptr);
|
||||
#endif
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
static const JSCFunctionListEntry js_miniz_funcs[] = {
|
||||
JS_CFUNC_DEF("read", 1, js_miniz_read),
|
||||
JS_CFUNC_DEF("write", 1, js_miniz_write),
|
||||
JS_CFUNC_DEF("compress", 1, js_miniz_compress),
|
||||
JS_CFUNC_DEF("decompress", 1, js_miniz_decompress),
|
||||
};
|
||||
|
||||
JSValue js_writer_add_file(JSContext *js, JSValue self, int argc, JSValue *argv)
|
||||
{
|
||||
if (argc < 2)
|
||||
return JS_ThrowTypeError(js, "add_file requires (path, arrayBuffer)");
|
||||
|
||||
mz_zip_archive *zip = js2writer(js, self);
|
||||
const char *pathInZip = JS_ToCString(js, argv[0]);
|
||||
if (!pathInZip)
|
||||
return JS_ThrowTypeError(js, "Could not parse path argument");
|
||||
|
||||
size_t dataLen;
|
||||
void *data = js_get_blob_data(js, &dataLen, argv[1]);
|
||||
if (data == -1) {
|
||||
JS_FreeCString(js, pathInZip);
|
||||
return JS_EXCEPTION;
|
||||
}
|
||||
|
||||
int success = mz_zip_writer_add_mem(zip, pathInZip, data, dataLen, MZ_DEFAULT_COMPRESSION);
|
||||
JS_FreeCString(js, pathInZip);
|
||||
|
||||
if (!success)
|
||||
return JS_ThrowInternalError(js, "Failed to add memory to zip");
|
||||
|
||||
return JS_NULL;
|
||||
}
|
||||
|
||||
|
||||
static const JSCFunctionListEntry js_writer_funcs[] = {
|
||||
JS_CFUNC_DEF("add_file", 1, js_writer_add_file),
|
||||
};
|
||||
|
||||
JSValue js_reader_mod(JSContext *js, JSValue self, int argc, JSValue *argv)
|
||||
{
|
||||
#ifndef MINIZ_NO_TIME
|
||||
const char *file = JS_ToCString(js,argv[0]);
|
||||
if (!file)
|
||||
return JS_EXCEPTION;
|
||||
|
||||
mz_zip_archive *zip = js2reader(js, self);
|
||||
if (!zip) {
|
||||
JS_FreeCString(js, file);
|
||||
return JS_ThrowInternalError(js, "Invalid zip reader");
|
||||
}
|
||||
|
||||
mz_zip_archive_file_stat pstat;
|
||||
mz_uint index = mz_zip_reader_locate_file(zip, file, NULL, 0);
|
||||
|
||||
if (index == (mz_uint)-1) {
|
||||
JS_FreeCString(js, file);
|
||||
return JS_ThrowReferenceError(js, "File '%s' not found in archive", file);
|
||||
}
|
||||
|
||||
JS_FreeCString(js, file);
|
||||
|
||||
if (!mz_zip_reader_file_stat(zip, index, &pstat)) {
|
||||
int err = mz_zip_get_last_error(zip);
|
||||
return JS_ThrowInternalError(js, "Failed to get file stats: %s", mz_zip_get_error_string(err));
|
||||
}
|
||||
|
||||
return JS_NewFloat64(js, pstat.m_time);
|
||||
#else
|
||||
return JS_ThrowInternalError(js, "MINIZ_NO_TIME is defined");
|
||||
#endif
|
||||
}
|
||||
|
||||
JSValue js_reader_exists(JSContext *js, JSValue self, int argc, JSValue *argv)
|
||||
{
|
||||
const char *file = JS_ToCString(js,argv[0]);
|
||||
if (!file)
|
||||
return JS_EXCEPTION;
|
||||
|
||||
mz_zip_archive *zip = js2reader(js, self);
|
||||
if (!zip) {
|
||||
JS_FreeCString(js, file);
|
||||
return JS_ThrowInternalError(js, "Invalid zip reader");
|
||||
}
|
||||
|
||||
mz_uint index = mz_zip_reader_locate_file(zip, file, NULL, 0);
|
||||
JS_FreeCString(js,file);
|
||||
if (index == (mz_uint)-1) return JS_NewBool(js, 0);
|
||||
return JS_NewBool(js, 1);
|
||||
}
|
||||
|
||||
JSValue js_reader_slurp(JSContext *js, JSValue self, int argc, JSValue *argv)
|
||||
{
|
||||
const char *file = JS_ToCString(js,argv[0]);
|
||||
if (!file)
|
||||
return JS_EXCEPTION;
|
||||
|
||||
mz_zip_archive *zip = js2reader(js, self);
|
||||
if (!zip) {
|
||||
JS_FreeCString(js, file);
|
||||
return JS_ThrowInternalError(js, "Invalid zip reader");
|
||||
}
|
||||
|
||||
size_t len;
|
||||
void *data = mz_zip_reader_extract_file_to_heap(zip, file, &len, 0);
|
||||
|
||||
if (!data) {
|
||||
int err = mz_zip_get_last_error(zip);
|
||||
const char *filename = file;
|
||||
JS_FreeCString(js, file);
|
||||
return JS_ThrowInternalError(js, "Failed to extract file '%s': %s", filename, mz_zip_get_error_string(err));
|
||||
}
|
||||
|
||||
JS_FreeCString(js, file);
|
||||
|
||||
JSValue ret = js_new_blob_stoned_copy(js, data, len);
|
||||
free(data);
|
||||
return ret;
|
||||
}
|
||||
|
||||
JSValue js_reader_list(JSContext *js, JSValue self, int argc, JSValue *argv)
|
||||
{
|
||||
mz_zip_archive *zip = js2reader(js, self);
|
||||
if (!zip)
|
||||
return JS_ThrowInternalError(js, "Invalid zip reader");
|
||||
|
||||
mz_uint num_files = mz_zip_reader_get_num_files(zip);
|
||||
|
||||
JSValue arr = JS_NewArray(js);
|
||||
if (JS_IsException(arr))
|
||||
return arr;
|
||||
|
||||
mz_uint arr_index = 0;
|
||||
for (mz_uint i = 0; i < num_files; i++) {
|
||||
mz_zip_archive_file_stat file_stat;
|
||||
if (!mz_zip_reader_file_stat(zip, i, &file_stat))
|
||||
continue;
|
||||
|
||||
JSValue filename = JS_NewString(js, file_stat.m_filename);
|
||||
if (JS_IsException(filename)) {
|
||||
JS_FreeValue(js, arr);
|
||||
return filename;
|
||||
}
|
||||
JS_SetPropertyUint32(js, arr, arr_index++, filename);
|
||||
}
|
||||
|
||||
return arr;
|
||||
}
|
||||
|
||||
JSValue js_reader_is_directory(JSContext *js, JSValue self, int argc, JSValue *argv)
|
||||
{
|
||||
if (argc < 1)
|
||||
return JS_ThrowTypeError(js, "is_directory requires a file index");
|
||||
|
||||
int32_t index;
|
||||
if (JS_ToInt32(js, &index, argv[0]))
|
||||
return JS_EXCEPTION;
|
||||
|
||||
mz_zip_archive *zip = js2reader(js, self);
|
||||
if (!zip)
|
||||
return JS_ThrowInternalError(js, "Invalid zip reader");
|
||||
|
||||
return JS_NewBool(js, mz_zip_reader_is_file_a_directory(zip, index));
|
||||
}
|
||||
|
||||
JSValue js_reader_get_filename(JSContext *js, JSValue self, int argc, JSValue *argv)
|
||||
{
|
||||
if (argc < 1)
|
||||
return JS_ThrowTypeError(js, "get_filename requires a file index");
|
||||
|
||||
int32_t index;
|
||||
if (JS_ToInt32(js, &index, argv[0]))
|
||||
return JS_EXCEPTION;
|
||||
|
||||
mz_zip_archive *zip = js2reader(js, self);
|
||||
if (!zip)
|
||||
return JS_ThrowInternalError(js, "Invalid zip reader");
|
||||
|
||||
mz_zip_archive_file_stat file_stat;
|
||||
if (!mz_zip_reader_file_stat(zip, index, &file_stat))
|
||||
return JS_ThrowInternalError(js, "Failed to get file stats");
|
||||
|
||||
return JS_NewString(js, file_stat.m_filename);
|
||||
}
|
||||
|
||||
JSValue js_reader_count(JSContext *js, JSValue self, int argc, JSValue *argv)
|
||||
{
|
||||
mz_zip_archive *zip = js2reader(js, self);
|
||||
if (!zip)
|
||||
return JS_ThrowInternalError(js, "Invalid zip reader");
|
||||
return JS_NewUint32(js, mz_zip_reader_get_num_files(zip));
|
||||
}
|
||||
|
||||
static const JSCFunctionListEntry js_reader_funcs[] = {
|
||||
JS_CFUNC_DEF("mod", 1, js_reader_mod),
|
||||
JS_CFUNC_DEF("exists", 1, js_reader_exists),
|
||||
JS_CFUNC_DEF("slurp", 1, js_reader_slurp),
|
||||
JS_CFUNC_DEF("list", 0, js_reader_list),
|
||||
JS_CFUNC_DEF("is_directory", 1, js_reader_is_directory),
|
||||
JS_CFUNC_DEF("get_filename", 1, js_reader_get_filename),
|
||||
JS_CFUNC_DEF("count", 0, js_reader_count),
|
||||
};
|
||||
|
||||
JSValue js_miniz_use(JSContext *js)
|
||||
{
|
||||
JS_NewClassID(&js_reader_class_id);
|
||||
JS_NewClass(JS_GetRuntime(js), js_reader_class_id, &js_reader_class);
|
||||
JSValue reader_proto = JS_NewObject(js);
|
||||
JS_SetPropertyFunctionList(js, reader_proto, js_reader_funcs, sizeof(js_reader_funcs) / sizeof(JSCFunctionListEntry));
|
||||
JS_SetClassProto(js, js_reader_class_id, reader_proto);
|
||||
|
||||
JS_NewClassID(&js_writer_class_id);
|
||||
JS_NewClass(JS_GetRuntime(js), js_writer_class_id, &js_writer_class);
|
||||
JSValue writer_proto = JS_NewObject(js);
|
||||
JS_SetPropertyFunctionList(js, writer_proto, js_writer_funcs, sizeof(js_writer_funcs) / sizeof(JSCFunctionListEntry));
|
||||
JS_SetClassProto(js, js_writer_class_id, writer_proto);
|
||||
|
||||
JSValue export = JS_NewObject(js);
|
||||
JS_SetPropertyFunctionList(js, export, js_miniz_funcs, sizeof(js_miniz_funcs)/sizeof(JSCFunctionListEntry));
|
||||
return export;
|
||||
}
|
||||
574
bench.ce
574
bench.ce
@@ -1,574 +0,0 @@
|
||||
var shop = use('internal/shop')
|
||||
var pkg = use('package')
|
||||
var fd = use('fd')
|
||||
var time = use('time')
|
||||
var json = use('json')
|
||||
var blob = use('blob')
|
||||
var os = use('os')
|
||||
var testlib = use('internal/testlib')
|
||||
var math = use('math/radians')
|
||||
|
||||
if (!args) args = []
|
||||
|
||||
var target_pkg = null // null = current package
|
||||
var target_bench = null // null = all benchmarks, otherwise specific bench file
|
||||
var all_pkgs = false
|
||||
|
||||
// Benchmark configuration
|
||||
def WARMUP_BATCHES = 3
|
||||
def SAMPLES = 11 // Number of timing samples to collect
|
||||
def TARGET_SAMPLE_NS = 20000000 // 20ms per sample (fast mode)
|
||||
def MIN_SAMPLE_NS = 2000000 // 2ms minimum sample duration
|
||||
def MIN_BATCH_SIZE = 1
|
||||
def MAX_BATCH_SIZE = 100000000 // 100M iterations max per batch
|
||||
|
||||
// Statistical functions
|
||||
function median(arr) {
|
||||
if (length(arr) == 0) return 0
|
||||
var sorted = sort(arr)
|
||||
var mid = floor(length(arr) / 2)
|
||||
if (length(arr) % 2 == 0) {
|
||||
return (sorted[mid - 1] + sorted[mid]) / 2
|
||||
}
|
||||
return sorted[mid]
|
||||
}
|
||||
|
||||
function mean(arr) {
|
||||
if (length(arr) == 0) return 0
|
||||
var sum = 0
|
||||
arrfor(arr, function(val) {
|
||||
sum += val
|
||||
})
|
||||
return sum / length(arr)
|
||||
}
|
||||
|
||||
function stddev(arr, mean_val) {
|
||||
if (length(arr) < 2) return 0
|
||||
var sum_sq_diff = 0
|
||||
arrfor(arr, function(val) {
|
||||
var diff = val - mean_val
|
||||
sum_sq_diff += diff * diff
|
||||
})
|
||||
return math.sqrt(sum_sq_diff / (length(arr) - 1))
|
||||
}
|
||||
|
||||
function percentile(arr, p) {
|
||||
if (length(arr) == 0) return 0
|
||||
var sorted = sort(arr)
|
||||
var idx = floor(arr) * p / 100
|
||||
if (idx >= length(arr)) idx = length(arr) - 1
|
||||
return sorted[idx]
|
||||
}
|
||||
|
||||
// Parse arguments similar to test.ce
|
||||
function parse_args() {
|
||||
if (length(args) == 0) {
|
||||
if (!testlib.is_valid_package('.')) {
|
||||
log.console('No cell.toml found in current directory')
|
||||
return false
|
||||
}
|
||||
target_pkg = null
|
||||
return true
|
||||
}
|
||||
|
||||
if (args[0] == 'all') {
|
||||
if (!testlib.is_valid_package('.')) {
|
||||
log.console('No cell.toml found in current directory')
|
||||
return false
|
||||
}
|
||||
target_pkg = null
|
||||
return true
|
||||
}
|
||||
|
||||
if (args[0] == 'package') {
|
||||
if (length(args) < 2) {
|
||||
log.console('Usage: cell bench package <name> [bench]')
|
||||
log.console(' cell bench package all')
|
||||
return false
|
||||
}
|
||||
|
||||
if (args[1] == 'all') {
|
||||
all_pkgs = true
|
||||
log.console('Benchmarking all packages...')
|
||||
return true
|
||||
}
|
||||
|
||||
var name = args[1]
|
||||
var lock = shop.load_lock()
|
||||
if (lock[name]) {
|
||||
target_pkg = name
|
||||
} else if (starts_with(name, '/') && testlib.is_valid_package(name)) {
|
||||
target_pkg = name
|
||||
} else {
|
||||
if (testlib.is_valid_package('.')) {
|
||||
var resolved = pkg.alias_to_package(null, name)
|
||||
if (resolved) {
|
||||
target_pkg = resolved
|
||||
} else {
|
||||
log.console(`Package not found: ${name}`)
|
||||
return false
|
||||
}
|
||||
} else {
|
||||
log.console(`Package not found: ${name}`)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
if (length(args) >= 3) {
|
||||
target_bench = args[2]
|
||||
}
|
||||
|
||||
log.console(`Benchmarking package: ${target_pkg}`)
|
||||
return true
|
||||
}
|
||||
|
||||
// cell bench benches/suite or cell bench <path>
|
||||
var bench_path = args[0]
|
||||
|
||||
// Normalize path - add benches/ prefix if not present
|
||||
if (!starts_with(bench_path, 'benches/') && !starts_with(bench_path, '/')) {
|
||||
if (!fd.is_file(bench_path + '.cm') && !fd.is_file(bench_path)) {
|
||||
if (fd.is_file('benches/' + bench_path + '.cm') || fd.is_file('benches/' + bench_path)) {
|
||||
bench_path = 'benches/' + bench_path
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
target_bench = bench_path
|
||||
target_pkg = null
|
||||
|
||||
if (!testlib.is_valid_package('.')) {
|
||||
log.console('No cell.toml found in current directory')
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
if (!parse_args()) {
|
||||
$stop()
|
||||
return
|
||||
}
|
||||
|
||||
// Collect benchmark files from a package
|
||||
function collect_benches(package_name, specific_bench) {
|
||||
var prefix = testlib.get_pkg_dir(package_name)
|
||||
var benches_dir = prefix + '/benches'
|
||||
|
||||
if (!fd.is_dir(benches_dir)) return []
|
||||
|
||||
var files = pkg.list_files(package_name)
|
||||
var bench_files = []
|
||||
arrfor(files, function(f) {
|
||||
if (starts_with(f, "benches/") && ends_with(f, ".cm")) {
|
||||
if (specific_bench) {
|
||||
var bench_name = text(f, 0, -3)
|
||||
var match_name = specific_bench
|
||||
if (!starts_with(match_name, 'benches/')) match_name = 'benches/' + match_name
|
||||
var match_base = ends_with(match_name, '.cm') ? text(match_name, 0, -3) : match_name
|
||||
if (bench_name != match_base) return
|
||||
}
|
||||
push(bench_files, f)
|
||||
}
|
||||
})
|
||||
return bench_files
|
||||
}
|
||||
|
||||
// Calibrate batch size for a benchmark
|
||||
function calibrate_batch_size(bench_fn, is_batch) {
|
||||
if (!is_batch) return 1
|
||||
|
||||
var n = MIN_BATCH_SIZE
|
||||
var dt = 0
|
||||
|
||||
// Find a batch size that takes at least MIN_SAMPLE_NS
|
||||
while (n < MAX_BATCH_SIZE) {
|
||||
// Ensure n is a valid number before calling
|
||||
if (!is_number(n) || n < 1) {
|
||||
n = 1
|
||||
break
|
||||
}
|
||||
|
||||
var start = os.now()
|
||||
bench_fn(n)
|
||||
dt = os.now() - start
|
||||
|
||||
if (dt >= MIN_SAMPLE_NS) break
|
||||
|
||||
// Double the batch size
|
||||
var new_n = n * 2
|
||||
// Check if multiplication produced a valid number
|
||||
if (!is_number(new_n) || new_n > MAX_BATCH_SIZE) {
|
||||
n = MAX_BATCH_SIZE
|
||||
break
|
||||
}
|
||||
n = new_n
|
||||
}
|
||||
|
||||
// Adjust to target sample duration
|
||||
if (dt > 0 && dt < TARGET_SAMPLE_NS && is_number(n) && is_number(dt)) {
|
||||
var calc = n * TARGET_SAMPLE_NS / dt
|
||||
if (is_number(calc) && calc > 0) {
|
||||
var target_n = floor(calc)
|
||||
// Check if floor returned a valid number
|
||||
if (is_number(target_n) && target_n > 0) {
|
||||
if (target_n > MAX_BATCH_SIZE) target_n = MAX_BATCH_SIZE
|
||||
if (target_n < MIN_BATCH_SIZE) target_n = MIN_BATCH_SIZE
|
||||
n = target_n
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Safety check - ensure we always return a valid batch size
|
||||
if (!is_number(n) || n < 1) {
|
||||
n = 1
|
||||
}
|
||||
|
||||
return n
|
||||
}
|
||||
|
||||
// Run a single benchmark function
|
||||
function run_single_bench(bench_fn, bench_name) {
|
||||
var timings_per_op = []
|
||||
|
||||
// Detect benchmark format:
|
||||
// 1. Object with { setup, run, teardown } - structured format
|
||||
// 2. Function that accepts (n) - batch format
|
||||
// 3. Function that accepts () - legacy format
|
||||
var is_structured = is_object(bench_fn) && bench_fn.run
|
||||
var is_batch = false
|
||||
var batch_size = 1
|
||||
var setup_fn = null
|
||||
var run_fn = null
|
||||
var teardown_fn = null
|
||||
|
||||
if (is_structured) {
|
||||
setup_fn = bench_fn.setup || function() { return null }
|
||||
run_fn = bench_fn.run
|
||||
teardown_fn = bench_fn.teardown || function(state) {}
|
||||
|
||||
// Check if run function accepts batch size
|
||||
try {
|
||||
var test_state = setup_fn()
|
||||
run_fn(1, test_state)
|
||||
is_batch = true
|
||||
if (teardown_fn) teardown_fn(test_state)
|
||||
} catch (e) {
|
||||
is_batch = false
|
||||
}
|
||||
|
||||
// Create wrapper for calibration
|
||||
var calibrate_fn = function(n) {
|
||||
var state = setup_fn()
|
||||
run_fn(n, state)
|
||||
if (teardown_fn) teardown_fn(state)
|
||||
}
|
||||
batch_size = calibrate_batch_size(calibrate_fn, is_batch)
|
||||
|
||||
// Safety check for structured benchmarks
|
||||
if (!is_number(batch_size) || batch_size < 1) {
|
||||
batch_size = 1
|
||||
}
|
||||
} else {
|
||||
// Simple function format
|
||||
try {
|
||||
bench_fn(1)
|
||||
is_batch = true
|
||||
} catch (e) {
|
||||
is_batch = false
|
||||
}
|
||||
batch_size = calibrate_batch_size(bench_fn, is_batch)
|
||||
}
|
||||
|
||||
// Safety check - ensure batch_size is valid
|
||||
if (!batch_size || batch_size < 1) {
|
||||
batch_size = 1
|
||||
}
|
||||
|
||||
// Warmup phase
|
||||
for (var i = 0; i < WARMUP_BATCHES; i++) {
|
||||
// Ensure batch_size is valid before warmup
|
||||
if (!is_number(batch_size) || batch_size < 1) {
|
||||
var type_str = is_null(batch_size) ? 'null' : is_number(batch_size) ? 'number' : is_text(batch_size) ? 'text' : is_object(batch_size) ? 'object' : is_array(batch_size) ? 'array' : is_function(batch_size) ? 'function' : is_logical(batch_size) ? 'logical' : 'unknown'
|
||||
log.console(`WARNING: batch_size became ${type_str} = ${batch_size}, resetting to 1`)
|
||||
batch_size = 1
|
||||
}
|
||||
|
||||
if (is_structured) {
|
||||
var state = setup_fn()
|
||||
if (is_batch) {
|
||||
run_fn(batch_size, state)
|
||||
} else {
|
||||
run_fn(state)
|
||||
}
|
||||
if (teardown_fn) teardown_fn(state)
|
||||
} else {
|
||||
if (is_batch) {
|
||||
bench_fn(batch_size)
|
||||
} else {
|
||||
bench_fn()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Measurement phase - collect SAMPLES timing samples
|
||||
for (var i = 0; i < SAMPLES; i++) {
|
||||
// Double-check batch_size is valid (should never happen, but defensive)
|
||||
if (!is_number(batch_size) || batch_size < 1) {
|
||||
batch_size = 1
|
||||
}
|
||||
|
||||
if (is_structured) {
|
||||
var state = setup_fn()
|
||||
var start = os.now()
|
||||
if (is_batch) {
|
||||
run_fn(batch_size, state)
|
||||
} else {
|
||||
run_fn(state)
|
||||
}
|
||||
var duration = os.now() - start
|
||||
if (teardown_fn) teardown_fn(state)
|
||||
|
||||
var ns_per_op = is_batch ? duration / batch_size : duration
|
||||
push(timings_per_op, ns_per_op)
|
||||
} else {
|
||||
var start = os.now()
|
||||
if (is_batch) {
|
||||
bench_fn(batch_size)
|
||||
} else {
|
||||
bench_fn()
|
||||
}
|
||||
var duration = os.now() - start
|
||||
|
||||
var ns_per_op = is_batch ? duration / batch_size : duration
|
||||
push(timings_per_op, ns_per_op)
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate statistics
|
||||
var mean_ns = mean(timings_per_op)
|
||||
var median_ns = median(timings_per_op)
|
||||
var min_ns = reduce(timings_per_op, min)
|
||||
var max_ns = reduce(timings_per_op, max)
|
||||
var stddev_ns = stddev(timings_per_op, mean_ns)
|
||||
var p95_ns = percentile(timings_per_op, 95)
|
||||
var p99_ns = percentile(timings_per_op, 99)
|
||||
|
||||
// Calculate ops/s from median
|
||||
var ops_per_sec = 0
|
||||
if (median_ns > 0) {
|
||||
ops_per_sec = floor(1000000000 / median_ns)
|
||||
}
|
||||
|
||||
return {
|
||||
name: bench_name,
|
||||
batch_size: batch_size,
|
||||
samples: SAMPLES,
|
||||
mean_ns: round(mean_ns),
|
||||
median_ns: round(median_ns),
|
||||
min_ns: round(min_ns),
|
||||
max_ns: round(max_ns),
|
||||
stddev_ns: round(stddev_ns),
|
||||
p95_ns: round(p95_ns),
|
||||
p99_ns: round(p99_ns),
|
||||
ops_per_sec: ops_per_sec
|
||||
}
|
||||
}
|
||||
|
||||
// Format nanoseconds for display
|
||||
function format_ns(ns) {
|
||||
if (ns < 1000) return `${ns}ns`
|
||||
if (ns < 1000000) return `${round(ns / 1000 * 100) / 100}µs`
|
||||
if (ns < 1000000000) return `${round(ns / 1000000 * 100) / 100}ms`
|
||||
return `${round(ns / 1000000000 * 100) / 100}s`
|
||||
}
|
||||
|
||||
// Format ops/sec for display
|
||||
function format_ops(ops) {
|
||||
if (ops < 1000) return `${ops} ops/s`
|
||||
if (ops < 1000000) return `${round(ops / 1000 * 100) / 100}K ops/s`
|
||||
if (ops < 1000000000) return `${round(ops / 1000000 * 100) / 100}M ops/s`
|
||||
return `${round(ops / 1000000000 * 100) / 100}G ops/s`
|
||||
}
|
||||
|
||||
// Run benchmarks for a package
|
||||
function run_benchmarks(package_name, specific_bench) {
|
||||
var bench_files = collect_benches(package_name, specific_bench)
|
||||
|
||||
var pkg_result = {
|
||||
package: package_name || "local",
|
||||
files: [],
|
||||
total: 0
|
||||
}
|
||||
|
||||
if (length(bench_files) == 0) return pkg_result
|
||||
|
||||
if (package_name) log.console(`Running benchmarks for ${package_name}`)
|
||||
else log.console(`Running benchmarks for local package`)
|
||||
|
||||
arrfor(bench_files, function(f) {
|
||||
var mod_path = text(f, 0, -3)
|
||||
|
||||
var file_result = {
|
||||
name: f,
|
||||
benchmarks: []
|
||||
}
|
||||
|
||||
try {
|
||||
var bench_mod
|
||||
var use_pkg = package_name ? package_name : fd.realpath('.')
|
||||
bench_mod = shop.use(mod_path, use_pkg)
|
||||
|
||||
var benches = []
|
||||
if (is_function(bench_mod)) {
|
||||
push(benches, {name: 'main', fn: bench_mod})
|
||||
} else if (is_object(bench_mod)) {
|
||||
arrfor(array(bench_mod), function(k) {
|
||||
if (is_function(bench_mod[k]))
|
||||
push(benches, {name: k, fn: bench_mod[k]})
|
||||
})
|
||||
}
|
||||
|
||||
if (length(benches) > 0) {
|
||||
log.console(` ${f}`)
|
||||
arrfor(benches, function(b) {
|
||||
try {
|
||||
var result = run_single_bench(b.fn, b.name)
|
||||
result.package = pkg_result.package
|
||||
push(file_result.benchmarks, result)
|
||||
pkg_result.total++
|
||||
|
||||
log.console(` ${result.name}`)
|
||||
log.console(` ${format_ns(result.median_ns)}/op ${format_ops(result.ops_per_sec)}`)
|
||||
log.console(` min: ${format_ns(result.min_ns)} max: ${format_ns(result.max_ns)} stddev: ${format_ns(result.stddev_ns)}`)
|
||||
if (result.batch_size > 1) {
|
||||
log.console(` batch: ${result.batch_size} samples: ${result.samples}`)
|
||||
}
|
||||
} catch (e) {
|
||||
log.console(` ERROR ${b.name}: ${e}`)
|
||||
log.error(e)
|
||||
var error_result = {
|
||||
package: pkg_result.package,
|
||||
name: b.name,
|
||||
error: e.toString()
|
||||
}
|
||||
push(file_result.benchmarks, error_result)
|
||||
pkg_result.total++
|
||||
}
|
||||
})
|
||||
}
|
||||
} catch (e) {
|
||||
log.console(` Error loading ${f}: ${e}`)
|
||||
var error_result = {
|
||||
package: pkg_result.package,
|
||||
name: "load_module",
|
||||
error: `Error loading module: ${e}`
|
||||
}
|
||||
push(file_result.benchmarks, error_result)
|
||||
pkg_result.total++
|
||||
}
|
||||
|
||||
if (length(file_result.benchmarks) > 0) {
|
||||
push(pkg_result.files, file_result)
|
||||
}
|
||||
})
|
||||
|
||||
return pkg_result
|
||||
}
|
||||
|
||||
// Run all benchmarks
|
||||
var all_results = []
|
||||
|
||||
if (all_pkgs) {
|
||||
if (testlib.is_valid_package('.')) {
|
||||
push(all_results, run_benchmarks(null, null))
|
||||
}
|
||||
|
||||
var packages = shop.list_packages()
|
||||
arrfor(packages, function(pkg) {
|
||||
push(all_results, run_benchmarks(pkg, null))
|
||||
})
|
||||
} else {
|
||||
push(all_results, run_benchmarks(target_pkg, target_bench))
|
||||
}
|
||||
|
||||
// Calculate totals
|
||||
var total_benches = 0
|
||||
arrfor(all_results, function(result) {
|
||||
total_benches += result.total
|
||||
})
|
||||
|
||||
log.console(`----------------------------------------`)
|
||||
log.console(`Benchmarks: ${total_benches} total`)
|
||||
|
||||
// Generate reports
|
||||
function generate_reports() {
|
||||
var timestamp = text(floor(time.number()))
|
||||
var report_dir = shop.get_reports_dir() + '/bench_' + timestamp
|
||||
testlib.ensure_dir(report_dir)
|
||||
|
||||
var txt_report = `BENCHMARK REPORT
|
||||
Date: ${time.text(time.number())}
|
||||
Total benchmarks: ${total_benches}
|
||||
|
||||
=== SUMMARY ===
|
||||
`
|
||||
arrfor(all_results, function(pkg_res) {
|
||||
if (pkg_res.total == 0) return
|
||||
txt_report += `Package: ${pkg_res.package}\n`
|
||||
arrfor(pkg_res.files, function(f) {
|
||||
txt_report += ` ${f.name}\n`
|
||||
arrfor(f.benchmarks, function(b) {
|
||||
if (b.error) {
|
||||
txt_report += ` ERROR ${b.name}: ${b.error}\n`
|
||||
} else {
|
||||
txt_report += ` ${b.name}: ${format_ns(b.median_ns)}/op (${format_ops(b.ops_per_sec)})\n`
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
txt_report += `\n=== DETAILED RESULTS ===\n`
|
||||
arrfor(all_results, function(pkg_res) {
|
||||
if (pkg_res.total == 0) return
|
||||
|
||||
arrfor(pkg_res.files, function(f) {
|
||||
arrfor(f.benchmarks, function(b) {
|
||||
if (b.error) return
|
||||
|
||||
txt_report += `\n${pkg_res.package}::${b.name}\n`
|
||||
txt_report += ` batch_size: ${b.batch_size} samples: ${b.samples}\n`
|
||||
txt_report += ` median: ${format_ns(b.median_ns)}/op\n`
|
||||
txt_report += ` mean: ${format_ns(b.mean_ns)}/op\n`
|
||||
txt_report += ` min: ${format_ns(b.min_ns)}\n`
|
||||
txt_report += ` max: ${format_ns(b.max_ns)}\n`
|
||||
txt_report += ` stddev: ${format_ns(b.stddev_ns)}\n`
|
||||
txt_report += ` p95: ${format_ns(b.p95_ns)}\n`
|
||||
txt_report += ` p99: ${format_ns(b.p99_ns)}\n`
|
||||
txt_report += ` ops/s: ${format_ops(b.ops_per_sec)}\n`
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
testlib.ensure_dir(report_dir)
|
||||
fd.slurpwrite(`${report_dir}/bench.txt`, stone(blob(txt_report)))
|
||||
log.console(`Report written to ${report_dir}/bench.txt`)
|
||||
|
||||
// Generate JSON per package
|
||||
arrfor(all_results, function(pkg_res) {
|
||||
if (pkg_res.total == 0) return
|
||||
|
||||
var pkg_benches = []
|
||||
arrfor(pkg_res.files, function(f) {
|
||||
arrfor(f.benchmarks, function(benchmark) {
|
||||
push(pkg_benches, benchmark)
|
||||
})
|
||||
})
|
||||
|
||||
var json_path = `${report_dir}/${replace(pkg_res.package, /\//, '_')}.json`
|
||||
fd.slurpwrite(json_path, stone(blob(json.encode(pkg_benches))))
|
||||
})
|
||||
}
|
||||
|
||||
generate_reports()
|
||||
$stop()
|
||||
@@ -1,262 +0,0 @@
|
||||
// micro_ops.bench.ce (or .cm depending on your convention)
|
||||
|
||||
// Note: We use a function-local sink in each benchmark to avoid cross-contamination
|
||||
function blackhole(sink, x) {
|
||||
// Prevent dead-code elimination
|
||||
return (sink + (x | 0)) | 0
|
||||
}
|
||||
|
||||
function make_obj_xy(x, y) {
|
||||
return { x, y }
|
||||
}
|
||||
|
||||
function make_obj_yx(x, y) {
|
||||
// Different insertion order to force a different shape in many engines
|
||||
return { y, x }
|
||||
}
|
||||
|
||||
function make_shapes(n) {
|
||||
var out = []
|
||||
for (var i = 0; i < n; i++) {
|
||||
var o = { a: i }
|
||||
o[`p${i}`] = i
|
||||
push(out, o)
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
function make_packed_array(n) {
|
||||
var a = []
|
||||
for (var i = 0; i < n; i++) push(a, i)
|
||||
return a
|
||||
}
|
||||
|
||||
function make_holey_array(n) {
|
||||
var a = []
|
||||
for (var i = 0; i < n; i += 2) a[i] = i
|
||||
return a
|
||||
}
|
||||
|
||||
return {
|
||||
// 0) Baseline loop cost
|
||||
loop_empty: function(n) {
|
||||
var sink = 0
|
||||
for (var i = 0; i < n; i++) {}
|
||||
return blackhole(sink, n)
|
||||
},
|
||||
|
||||
// 1) Numeric pipelines
|
||||
i32_add: function(n) {
|
||||
var sink = 0
|
||||
var x = 1
|
||||
for (var i = 0; i < n; i++) x = (x + 3) | 0
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
f64_add: function(n) {
|
||||
var sink = 0
|
||||
var x = 1.0
|
||||
for (var i = 0; i < n; i++) x = x + 3.14159
|
||||
return blackhole(sink, x | 0)
|
||||
},
|
||||
|
||||
mixed_add: function(n) {
|
||||
var sink = 0
|
||||
var x = 1
|
||||
for (var i = 0; i < n; i++) x = x + 0.25
|
||||
return blackhole(sink, x | 0)
|
||||
},
|
||||
|
||||
bit_ops: function(n) {
|
||||
var sink = 0
|
||||
var x = 0x12345678
|
||||
for (var i = 0; i < n; i++) x = ((x << 5) ^ (x >>> 3)) | 0
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
overflow_path: function(n) {
|
||||
var sink = 0
|
||||
var x = 0x70000000
|
||||
for (var i = 0; i < n; i++) x = (x + 0x10000000) | 0
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
// 2) Branching
|
||||
branch_predictable: function(n) {
|
||||
var sink = 0
|
||||
var x = 0
|
||||
for (var i = 0; i < n; i++) {
|
||||
if ((i & 7) != 0) x++
|
||||
else x += 2
|
||||
}
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
branch_alternating: function(n) {
|
||||
var sink = 0
|
||||
var x = 0
|
||||
for (var i = 0; i < n; i++) {
|
||||
if ((i & 1) == 0) x++
|
||||
else x += 2
|
||||
}
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
// 3) Calls
|
||||
call_direct: function(n) {
|
||||
var sink = 0
|
||||
function f(a) { return (a + 1) | 0 }
|
||||
var x = 0
|
||||
for (var i = 0; i < n; i++) x = f(x)
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
call_indirect: function(n) {
|
||||
var sink = 0
|
||||
function f(a) { return (a + 1) | 0 }
|
||||
var g = f
|
||||
var x = 0
|
||||
for (var i = 0; i < n; i++) x = g(x)
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
call_closure: function(n) {
|
||||
var sink = 0
|
||||
function make_adder(k) {
|
||||
return function(a) { return (a + k) | 0 }
|
||||
}
|
||||
var add3 = make_adder(3)
|
||||
var x = 0
|
||||
for (var i = 0; i < n; i++) x = add3(x)
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
// 4) Object props (ICs / shapes)
|
||||
prop_read_mono: function(n) {
|
||||
var sink = 0
|
||||
var o = make_obj_xy(1, 2)
|
||||
var x = 0
|
||||
for (var i = 0; i < n; i++) x = (x + o.x) | 0
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
prop_read_poly_2: function(n) {
|
||||
var sink = 0
|
||||
var a = make_obj_xy(1, 2)
|
||||
var b = make_obj_yx(1, 2)
|
||||
var x = 0
|
||||
for (var i = 0; i < n; i++) {
|
||||
var o = (i & 1) == 0 ? a : b
|
||||
x = (x + o.x) | 0
|
||||
}
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
prop_read_mega: function(n) {
|
||||
var sink = 0
|
||||
var objs = make_shapes(32)
|
||||
var x = 0
|
||||
for (var i = 0; i < n; i++) {
|
||||
var o = objs[i & 31]
|
||||
x = (x + o.a) | 0
|
||||
}
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
prop_write_mono: function(n) {
|
||||
var sink = 0
|
||||
var o = make_obj_xy(1, 2)
|
||||
for (var i = 0; i < n; i++) o.x = (o.x + 1) | 0
|
||||
return blackhole(sink, o.x)
|
||||
},
|
||||
|
||||
// 5) Arrays
|
||||
array_read_packed: function(n) {
|
||||
var sink = 0
|
||||
var a = make_packed_array(1024)
|
||||
var x = 0
|
||||
for (var i = 0; i < n; i++) x = (x + a[i & 1023]) | 0
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
array_write_packed: function(n) {
|
||||
var sink = 0
|
||||
var a = make_packed_array(1024)
|
||||
for (var i = 0; i < n; i++) a[i & 1023] = i
|
||||
return blackhole(sink, a[17] | 0)
|
||||
},
|
||||
|
||||
array_read_holey: function(n) {
|
||||
var sink = 0
|
||||
var a = make_holey_array(2048)
|
||||
var x = 0
|
||||
for (var i = 0; i < n; i++) {
|
||||
var v = a[(i & 2047)]
|
||||
// If "missing" is a special value in your language, this stresses that path too
|
||||
if (v) x = (x + v) | 0
|
||||
}
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
array_push_steady: function(n) {
|
||||
var sink = 0
|
||||
var x = 0
|
||||
for (var j = 0; j < n; j++) {
|
||||
var a = []
|
||||
for (var i = 0; i < 256; i++) push(a, i)
|
||||
x = (x + length(a)) | 0
|
||||
}
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
// 6) Strings
|
||||
string_concat_small: function(n) {
|
||||
var sink = 0
|
||||
var x = 0
|
||||
for (var j = 0; j < n; j++) {
|
||||
var s = ""
|
||||
for (var i = 0; i < 16; i++) s = s + "x"
|
||||
x = (x + length(s)) | 0
|
||||
}
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
// 7) Allocation / GC pressure
|
||||
alloc_tiny_objects: function(n) {
|
||||
var sink = 0
|
||||
var x = 0
|
||||
for (var i = 0; i < n; i++) {
|
||||
var o = { a: i, b: i + 1, c: i + 2 }
|
||||
x = (x + o.b) | 0
|
||||
}
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
alloc_linked_list: function(n) {
|
||||
var sink = 0
|
||||
var head = null
|
||||
for (var i = 0; i < n; i++) head = { v: i, next: head }
|
||||
var x = 0
|
||||
var p = head
|
||||
while (p) {
|
||||
x = (x + p.v) | 0
|
||||
p = p.next
|
||||
}
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
// 8) meme-specific (adapt these to your exact semantics)
|
||||
meme_clone_read: function(n) {
|
||||
// If meme(obj) clones like Object.create / prototypal clone, this hits it hard.
|
||||
// Replace with your exact meme call form.
|
||||
var sink = 0
|
||||
var base = { x: 1, y: 2 }
|
||||
var x = 0
|
||||
for (var i = 0; i < n; i++) {
|
||||
var o = meme(base)
|
||||
x = (x + o.x) | 0
|
||||
}
|
||||
return blackhole(sink, x)
|
||||
},
|
||||
|
||||
}
|
||||
@@ -1,43 +0,0 @@
|
||||
function mainThread() {
|
||||
var maxDepth = max(6, Number(arg[0] || 16));
|
||||
|
||||
var stretchDepth = maxDepth + 1;
|
||||
var check = itemCheck(bottomUpTree(stretchDepth));
|
||||
log.console(`stretch tree of depth ${stretchDepth}\t check: ${check}`);
|
||||
|
||||
var longLivedTree = bottomUpTree(maxDepth);
|
||||
|
||||
for (var depth = 4; depth <= maxDepth; depth += 2) {
|
||||
var iterations = 1 << maxDepth - depth + 4;
|
||||
work(iterations, depth);
|
||||
}
|
||||
|
||||
log.console(`long lived tree of depth ${maxDepth}\t check: ${itemCheck(longLivedTree)}`);
|
||||
}
|
||||
|
||||
function work(iterations, depth) {
|
||||
var check = 0;
|
||||
for (var i = 0; i < iterations; i++)
|
||||
check += itemCheck(bottomUpTree(depth));
|
||||
log.console(`${iterations}\t trees of depth ${depth}\t check: ${check}`);
|
||||
}
|
||||
|
||||
function TreeNode(left, right) {
|
||||
return {left, right};
|
||||
}
|
||||
|
||||
function itemCheck(node) {
|
||||
if (node.left == null)
|
||||
return 1;
|
||||
return 1 + itemCheck(node.left) + itemCheck(node.right);
|
||||
}
|
||||
|
||||
function bottomUpTree(depth) {
|
||||
return depth > 0
|
||||
? TreeNode(bottomUpTree(depth - 1), bottomUpTree(depth - 1))
|
||||
: TreeNode(null, null);
|
||||
}
|
||||
|
||||
mainThread()
|
||||
|
||||
$stop()
|
||||
@@ -1,25 +0,0 @@
|
||||
var blob = use('blob')
|
||||
var math = use('math/radians')
|
||||
|
||||
function eratosthenes (n) {
|
||||
var sieve = blob(n, true)
|
||||
var sqrtN = whole(math.sqrt(n));
|
||||
|
||||
for (i = 2; i <= sqrtN; i++)
|
||||
if (sieve.read_logical(i))
|
||||
for (j = i * i; j <= n; j += i)
|
||||
sieve.write_bit(j, false);
|
||||
|
||||
return sieve;
|
||||
}
|
||||
|
||||
var sieve = eratosthenes(10000000);
|
||||
stone(sieve)
|
||||
|
||||
var c = 0
|
||||
for (var i = 0; i < length(sieve); i++)
|
||||
if (sieve.read_logical(i)) c++
|
||||
|
||||
log.console(c)
|
||||
|
||||
$stop()
|
||||
@@ -1,58 +0,0 @@
|
||||
function fannkuch(n) {
|
||||
var perm1 = [n]
|
||||
for (var i = 0; i < n; i++) perm1[i] = i
|
||||
var perm = [n]
|
||||
var count = [n]
|
||||
var f = 0, flips = 0, nperm = 0, checksum = 0
|
||||
var i, k, r
|
||||
|
||||
r = n
|
||||
while (r > 0) {
|
||||
i = 0
|
||||
while (r != 1) { count[r-1] = r; r -= 1 }
|
||||
while (i < n) { perm[i] = perm1[i]; i += 1 }
|
||||
|
||||
// Count flips and update max and checksum
|
||||
f = 0
|
||||
k = perm[0]
|
||||
while (k != 0) {
|
||||
i = 0
|
||||
while (2*i < k) {
|
||||
var t = perm[i]; perm[i] = perm[k-i]; perm[k-i] = t
|
||||
i += 1
|
||||
}
|
||||
k = perm[0]
|
||||
f += 1
|
||||
}
|
||||
if (f > flips) flips = f
|
||||
if ((nperm & 0x1) == 0) checksum += f; else checksum -= f
|
||||
|
||||
// Use incremental change to generate another permutation
|
||||
var more = true
|
||||
while (more) {
|
||||
if (r == n) {
|
||||
log.console( checksum )
|
||||
return flips
|
||||
}
|
||||
var p0 = perm1[0]
|
||||
i = 0
|
||||
while (i < r) {
|
||||
var j = i + 1
|
||||
perm1[i] = perm1[j]
|
||||
i = j
|
||||
}
|
||||
perm1[r] = p0
|
||||
|
||||
count[r] -= 1
|
||||
if (count[r] > 0) more = false; else r += 1
|
||||
}
|
||||
nperm += 1
|
||||
}
|
||||
return flips;
|
||||
}
|
||||
|
||||
var n = arg[0] || 10
|
||||
|
||||
log.console(`Pfannkuchen(${n}) = ${fannkuch(n)}`)
|
||||
|
||||
$stop()
|
||||
@@ -1,16 +0,0 @@
|
||||
var time = use('time')
|
||||
|
||||
function fib(n) {
|
||||
if (n<2) return n
|
||||
return fib(n-1) + fib(n-2)
|
||||
}
|
||||
|
||||
var now = time.number()
|
||||
var arr = [1,2,3,4,5]
|
||||
arrfor(arr, function(i) {
|
||||
log.console(fib(28))
|
||||
})
|
||||
|
||||
log.console(`elapsed: ${time.number()-now}`)
|
||||
|
||||
$stop()
|
||||
@@ -1,20 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Run hyperfine with parameter lists
|
||||
# This will create a cross-product of all libraries × all scenarios
|
||||
hyperfine \
|
||||
--warmup 3 \
|
||||
--runs 20 \
|
||||
-i \
|
||||
--export-csv wota_vs_nota_vs_json.csv \
|
||||
--export-json wota_vs_nota_vs_json.json \
|
||||
--export-markdown wota_vs_nota_vs_json.md \
|
||||
--parameter-list lib wota,nota,json \
|
||||
--parameter-list scen empty,integers,floats,strings,objects,nested,large_array \
|
||||
'cell benchmarks/wota_nota_json {lib} {scen}'
|
||||
|
||||
|
||||
echo "Benchmark complete! Results saved to:"
|
||||
echo " - wota_vs_nota_vs_json.csv"
|
||||
echo " - wota_vs_nota_vs_json.json"
|
||||
echo " - wota_vs_nota_vs_json.md"
|
||||
@@ -1,395 +0,0 @@
|
||||
var time = use('time')
|
||||
var math = use('math/radians')
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// JavaScript Performance Benchmark Suite
|
||||
// Tests core JS operations: property access, function calls, arithmetic, etc.
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// Test configurations
|
||||
def iterations = {
|
||||
simple: 10000000,
|
||||
medium: 1000000,
|
||||
complex: 100000
|
||||
};
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// Utility: measureTime(fn) => how long fn() takes in seconds
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function measureTime(fn) {
|
||||
var start = time.number();
|
||||
fn();
|
||||
var end = time.number();
|
||||
return (end - start);
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// Benchmark: Property Access
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function benchPropertyAccess() {
|
||||
var obj = {
|
||||
a: 1, b: 2, c: 3, d: 4, e: 5,
|
||||
nested: { x: 10, y: 20, z: 30 }
|
||||
};
|
||||
|
||||
var readTime = measureTime(function() {
|
||||
var sum = 0;
|
||||
for (var i = 0; i < iterations.simple; i++) {
|
||||
sum += obj.a + obj.b + obj.c + obj.d + obj.e;
|
||||
sum += obj.nested.x + obj.nested.y + obj.nested.z;
|
||||
}
|
||||
});
|
||||
|
||||
var writeTime = measureTime(function() {
|
||||
for (var i = 0; i < iterations.simple; i++) {
|
||||
obj.a = i;
|
||||
obj.b = i + 1;
|
||||
obj.c = i + 2;
|
||||
obj.nested.x = i * 2;
|
||||
obj.nested.y = i * 3;
|
||||
}
|
||||
});
|
||||
|
||||
return { readTime: readTime, writeTime: writeTime };
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// Benchmark: Function Calls
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function benchFunctionCalls() {
|
||||
function add(a, b) { return a + b; }
|
||||
function multiply(a, b) { return a * b; }
|
||||
function complexCalc(a, b, c) { return (a + b) * c / 2; }
|
||||
|
||||
var obj = {
|
||||
method: function(x) { return x * 2; },
|
||||
nested: {
|
||||
deepMethod: function(x, y) { return x + y; }
|
||||
}
|
||||
};
|
||||
|
||||
var simpleCallTime = measureTime(function() {
|
||||
var result = 0;
|
||||
for (var i = 0; i < iterations.simple; i++) {
|
||||
result = add(i, 1);
|
||||
result = multiply(result, 2);
|
||||
}
|
||||
});
|
||||
|
||||
var methodCallTime = measureTime(function() {
|
||||
var result = 0;
|
||||
for (var i = 0; i < iterations.simple; i++) {
|
||||
result = obj.method(i);
|
||||
result = obj.nested.deepMethod(result, i);
|
||||
}
|
||||
});
|
||||
|
||||
var complexCallTime = measureTime(function() {
|
||||
var result = 0;
|
||||
for (var i = 0; i < iterations.medium; i++) {
|
||||
result = complexCalc(i, i + 1, i + 2);
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
simpleCallTime: simpleCallTime,
|
||||
methodCallTime: methodCallTime,
|
||||
complexCallTime: complexCallTime
|
||||
};
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// Benchmark: Array Operations
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function benchArrayOps() {
|
||||
var pushTime = measureTime(function() {
|
||||
var arr = [];
|
||||
for (var i = 0; i < iterations.medium; i++) {
|
||||
push(arr, i);
|
||||
}
|
||||
});
|
||||
|
||||
var arr = [];
|
||||
for (var i = 0; i < 10000; i++) push(arr, i);
|
||||
|
||||
var accessTime = measureTime(function() {
|
||||
var sum = 0;
|
||||
for (var i = 0; i < iterations.medium; i++) {
|
||||
sum += arr[i % 10000];
|
||||
}
|
||||
});
|
||||
|
||||
var iterateTime = measureTime(function() {
|
||||
var sum = 0;
|
||||
for (var j = 0; j < 1000; j++) {
|
||||
for (var i = 0; i < length(arr); i++) {
|
||||
sum += arr[i];
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
pushTime: pushTime,
|
||||
accessTime: accessTime,
|
||||
iterateTime: iterateTime
|
||||
};
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// Benchmark: Object Creation
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function benchObjectCreation() {
|
||||
var literalTime = measureTime(function() {
|
||||
for (var i = 0; i < iterations.medium; i++) {
|
||||
var obj = { x: i, y: i * 2, z: i * 3 };
|
||||
}
|
||||
});
|
||||
|
||||
function Point(x, y) {
|
||||
return {x,y}
|
||||
}
|
||||
|
||||
var defructorTime = measureTime(function() {
|
||||
for (var i = 0; i < iterations.medium; i++) {
|
||||
var p = Point(i, i * 2);
|
||||
}
|
||||
});
|
||||
|
||||
var protoObj = {
|
||||
x: 0,
|
||||
y: 0,
|
||||
move: function(dx, dy) {
|
||||
this.x += dx;
|
||||
this.y += dy;
|
||||
}
|
||||
};
|
||||
|
||||
var prototypeTime = measureTime(function() {
|
||||
for (var i = 0; i < iterations.medium; i++) {
|
||||
var obj = meme(protoObj);
|
||||
obj.x = i;
|
||||
obj.y = i * 2;
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
literalTime: literalTime,
|
||||
defructorTime: defructorTime,
|
||||
prototypeTime: prototypeTime
|
||||
};
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// Benchmark: String Operations
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function benchStringOps() {
|
||||
var concatTime = measureTime(function() {
|
||||
var str = "";
|
||||
for (var i = 0; i < iterations.complex; i++) {
|
||||
str = "test" + i + "value";
|
||||
}
|
||||
});
|
||||
|
||||
var strings = [];
|
||||
for (var i = 0; i < 1000; i++) {
|
||||
push(strings, "string" + i);
|
||||
}
|
||||
|
||||
var joinTime = measureTime(function() {
|
||||
for (var i = 0; i < iterations.complex; i++) {
|
||||
var result = text(strings, ",");
|
||||
}
|
||||
});
|
||||
|
||||
var splitTime = measureTime(function() {
|
||||
var str = "a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p";
|
||||
for (var i = 0; i < iterations.medium; i++) {
|
||||
var parts = array(str, ",");
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
concatTime: concatTime,
|
||||
joinTime: joinTime,
|
||||
splitTime: splitTime
|
||||
};
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// Benchmark: Arithmetic Operations
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function benchArithmetic() {
|
||||
var intMathTime = measureTime(function() {
|
||||
var result = 1;
|
||||
for (var i = 0; i < iterations.simple; i++) {
|
||||
result = ((result + i) * 2 - 1) / 3;
|
||||
result = result % 1000 + 1;
|
||||
}
|
||||
});
|
||||
|
||||
var floatMathTime = measureTime(function() {
|
||||
var result = 1.5;
|
||||
for (var i = 0; i < iterations.simple; i++) {
|
||||
result = math.sine(result) + math.cosine(i * 0.01);
|
||||
result = math.sqrt(abs(result)) + 0.1;
|
||||
}
|
||||
});
|
||||
|
||||
var bitwiseTime = measureTime(function() {
|
||||
var result = 0;
|
||||
for (var i = 0; i < iterations.simple; i++) {
|
||||
result = (result ^ i) & 0xFFFF;
|
||||
result = (result << 1) | (result >> 15);
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
intMathTime: intMathTime,
|
||||
floatMathTime: floatMathTime,
|
||||
bitwiseTime: bitwiseTime
|
||||
};
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// Benchmark: Closure Operations
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function benchClosures() {
|
||||
function makeAdder(x) {
|
||||
return function(y) { return x + y; };
|
||||
}
|
||||
|
||||
var closureCreateTime = measureTime(function() {
|
||||
var funcs = [];
|
||||
for (var i = 0; i < iterations.medium; i++) {
|
||||
push(funcs, makeAdder(i));
|
||||
}
|
||||
});
|
||||
|
||||
var adders = [];
|
||||
for (var i = 0; i < 1000; i++) {
|
||||
push(adders, makeAdder(i));
|
||||
}
|
||||
|
||||
var closureCallTime = measureTime(function() {
|
||||
var sum = 0;
|
||||
for (var i = 0; i < iterations.medium; i++) {
|
||||
sum += adders[i % 1000](i);
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
closureCreateTime: closureCreateTime,
|
||||
closureCallTime: closureCallTime
|
||||
};
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// Main benchmark runner
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
log.console("JavaScript Performance Benchmark");
|
||||
log.console("======================\n");
|
||||
|
||||
// Property Access
|
||||
log.console("BENCHMARK: Property Access");
|
||||
var propResults = benchPropertyAccess();
|
||||
log.console(" Read time: " + propResults.readTime.toFixed(3) + "s => " +
|
||||
(iterations.simple / propResults.readTime).toFixed(1) + " reads/sec [" +
|
||||
(propResults.readTime / iterations.simple * 1e9).toFixed(1) + " ns/op]");
|
||||
log.console(" Write time: " + propResults.writeTime.toFixed(3) + "s => " +
|
||||
(iterations.simple / propResults.writeTime).toFixed(1) + " writes/sec [" +
|
||||
(propResults.writeTime / iterations.simple * 1e9).toFixed(1) + " ns/op]");
|
||||
log.console("");
|
||||
|
||||
// Function Calls
|
||||
log.console("BENCHMARK: Function Calls");
|
||||
var funcResults = benchFunctionCalls();
|
||||
log.console(" Simple calls: " + funcResults.simpleCallTime.toFixed(3) + "s => " +
|
||||
(iterations.simple / funcResults.simpleCallTime).toFixed(1) + " calls/sec [" +
|
||||
(funcResults.simpleCallTime / iterations.simple * 1e9).toFixed(1) + " ns/op]");
|
||||
log.console(" Method calls: " + funcResults.methodCallTime.toFixed(3) + "s => " +
|
||||
(iterations.simple / funcResults.methodCallTime).toFixed(1) + " calls/sec [" +
|
||||
(funcResults.methodCallTime / iterations.simple * 1e9).toFixed(1) + " ns/op]");
|
||||
log.console(" Complex calls: " + funcResults.complexCallTime.toFixed(3) + "s => " +
|
||||
(iterations.medium / funcResults.complexCallTime).toFixed(1) + " calls/sec [" +
|
||||
(funcResults.complexCallTime / iterations.medium * 1e9).toFixed(1) + " ns/op]");
|
||||
log.console("");
|
||||
|
||||
// Array Operations
|
||||
log.console("BENCHMARK: Array Operations");
|
||||
var arrayResults = benchArrayOps();
|
||||
log.console(" Push: " + arrayResults.pushTime.toFixed(3) + "s => " +
|
||||
(iterations.medium / arrayResults.pushTime).toFixed(1) + " pushes/sec [" +
|
||||
(arrayResults.pushTime / iterations.medium * 1e9).toFixed(1) + " ns/op]");
|
||||
log.console(" Access: " + arrayResults.accessTime.toFixed(3) + "s => " +
|
||||
(iterations.medium / arrayResults.accessTime).toFixed(1) + " accesses/sec [" +
|
||||
(arrayResults.accessTime / iterations.medium * 1e9).toFixed(1) + " ns/op]");
|
||||
log.console(" Iterate: " + arrayResults.iterateTime.toFixed(3) + "s => " +
|
||||
(1000 / arrayResults.iterateTime).toFixed(1) + " full iterations/sec");
|
||||
log.console("");
|
||||
|
||||
// Object Creation
|
||||
log.console("BENCHMARK: Object Creation");
|
||||
var objResults = benchObjectCreation();
|
||||
log.console(" Literal: " + objResults.literalTime.toFixed(3) + "s => " +
|
||||
(iterations.medium / objResults.literalTime).toFixed(1) + " creates/sec [" +
|
||||
(objResults.literalTime / iterations.medium * 1e9).toFixed(1) + " ns/op]");
|
||||
log.console(" Constructor: " + objResults.defructorTime.toFixed(3) + "s => " +
|
||||
(iterations.medium / objResults.defructorTime).toFixed(1) + " creates/sec [" +
|
||||
(objResults.defructorTime / iterations.medium * 1e9).toFixed(1) + " ns/op]");
|
||||
log.console(" Prototype: " + objResults.prototypeTime.toFixed(3) + "s => " +
|
||||
(iterations.medium / objResults.prototypeTime).toFixed(1) + " creates/sec [" +
|
||||
(objResults.prototypeTime / iterations.medium * 1e9).toFixed(1) + " ns/op]");
|
||||
log.console("");
|
||||
|
||||
// String Operations
|
||||
log.console("BENCHMARK: String Operations");
|
||||
var strResults = benchStringOps();
|
||||
log.console(" Concat: " + strResults.concatTime.toFixed(3) + "s => " +
|
||||
(iterations.complex / strResults.concatTime).toFixed(1) + " concats/sec [" +
|
||||
(strResults.concatTime / iterations.complex * 1e9).toFixed(1) + " ns/op]");
|
||||
log.console(" Join: " + strResults.joinTime.toFixed(3) + "s => " +
|
||||
(iterations.complex / strResults.joinTime).toFixed(1) + " joins/sec [" +
|
||||
(strResults.joinTime / iterations.complex * 1e9).toFixed(1) + " ns/op]");
|
||||
log.console(" Split: " + strResults.splitTime.toFixed(3) + "s => " +
|
||||
(iterations.medium / strResults.splitTime).toFixed(1) + " splits/sec [" +
|
||||
(strResults.splitTime / iterations.medium * 1e9).toFixed(1) + " ns/op]");
|
||||
log.console("");
|
||||
|
||||
// Arithmetic Operations
|
||||
log.console("BENCHMARK: Arithmetic Operations");
|
||||
var mathResults = benchArithmetic();
|
||||
log.console(" Integer math: " + mathResults.intMathTime.toFixed(3) + "s => " +
|
||||
(iterations.simple / mathResults.intMathTime).toFixed(1) + " ops/sec [" +
|
||||
(mathResults.intMathTime / iterations.simple * 1e9).toFixed(1) + " ns/op]");
|
||||
log.console(" Float math: " + mathResults.floatMathTime.toFixed(3) + "s => " +
|
||||
(iterations.simple / mathResults.floatMathTime).toFixed(1) + " ops/sec [" +
|
||||
(mathResults.floatMathTime / iterations.simple * 1e9).toFixed(1) + " ns/op]");
|
||||
log.console(" Bitwise: " + mathResults.bitwiseTime.toFixed(3) + "s => " +
|
||||
(iterations.simple / mathResults.bitwiseTime).toFixed(1) + " ops/sec [" +
|
||||
(mathResults.bitwiseTime / iterations.simple * 1e9).toFixed(1) + " ns/op]");
|
||||
log.console("");
|
||||
|
||||
// Closures
|
||||
log.console("BENCHMARK: Closures");
|
||||
var closureResults = benchClosures();
|
||||
log.console(" Create: " + closureResults.closureCreateTime.toFixed(3) + "s => " +
|
||||
(iterations.medium / closureResults.closureCreateTime).toFixed(1) + " creates/sec [" +
|
||||
(closureResults.closureCreateTime / iterations.medium * 1e9).toFixed(1) + " ns/op]");
|
||||
log.console(" Call: " + closureResults.closureCallTime.toFixed(3) + "s => " +
|
||||
(iterations.medium / closureResults.closureCallTime).toFixed(1) + " calls/sec [" +
|
||||
(closureResults.closureCallTime / iterations.medium * 1e9).toFixed(1) + " ns/op]");
|
||||
log.console("");
|
||||
|
||||
log.console("---------------------------------------------------------");
|
||||
log.console("Benchmark complete.\n");
|
||||
|
||||
$stop()
|
||||
@@ -1,40 +0,0 @@
|
||||
var blob = use('blob')
|
||||
|
||||
var iter = 50, limit = 2.0;
|
||||
var zr, zi, cr, ci, tr, ti;
|
||||
|
||||
var h = Number(arg[0]) || 500
|
||||
var w = h
|
||||
|
||||
log.console(`P4\n${w} ${h}`);
|
||||
|
||||
for (var y = 0; y < h; ++y) {
|
||||
// Create a blob for the row - we need w bits
|
||||
var row = blob(w);
|
||||
|
||||
for (var x = 0; x < w; ++x) {
|
||||
zr = zi = tr = ti = 0;
|
||||
cr = 2 * x / w - 1.5;
|
||||
ci = 2 * y / h - 1;
|
||||
for (var i = 0; i < iter && (tr + ti <= limit * limit); ++i) {
|
||||
zi = 2 * zr * zi + ci;
|
||||
zr = tr - ti + cr;
|
||||
tr = zr * zr;
|
||||
ti = zi * zi;
|
||||
}
|
||||
|
||||
// Write a 1 bit if inside the set, 0 if outside
|
||||
if (tr + ti <= limit * limit)
|
||||
row.write_bit(1);
|
||||
else
|
||||
row.write_bit(0);
|
||||
}
|
||||
|
||||
// Convert the blob to stone (immutable) to prepare for output
|
||||
stone(row)
|
||||
|
||||
// Output the blob data as raw bytes
|
||||
log.console(text(row, 'b'));
|
||||
}
|
||||
|
||||
$stop()
|
||||
@@ -1,12 +0,0 @@
|
||||
var math = use('math/radians')
|
||||
var N = 1000000;
|
||||
var num = 0;
|
||||
for (var i = 0; i < N; i ++) {
|
||||
var x = 2 * $random();
|
||||
var y = $random();
|
||||
if (y < math.sine(x * x))
|
||||
num++;
|
||||
}
|
||||
log.console(2 * num / N);
|
||||
|
||||
$stop()
|
||||
@@ -1,155 +0,0 @@
|
||||
var math = use('math/radians')
|
||||
var SOLAR_MASS = 4 * pi * pi;
|
||||
var DAYS_PER_YEAR = 365.24;
|
||||
|
||||
function Body(x, y, z, vx, vy, vz, mass) {
|
||||
return {x, y, z, vx, vy, vz, mass};
|
||||
}
|
||||
|
||||
function Jupiter() {
|
||||
return Body(
|
||||
4.84143144246472090e+00,
|
||||
-1.16032004402742839e+00,
|
||||
-1.03622044471123109e-01,
|
||||
1.66007664274403694e-03 * DAYS_PER_YEAR,
|
||||
7.69901118419740425e-03 * DAYS_PER_YEAR,
|
||||
-6.90460016972063023e-05 * DAYS_PER_YEAR,
|
||||
9.54791938424326609e-04 * SOLAR_MASS
|
||||
);
|
||||
}
|
||||
|
||||
function Saturn() {
|
||||
return Body(
|
||||
8.34336671824457987e+00,
|
||||
4.12479856412430479e+00,
|
||||
-4.03523417114321381e-01,
|
||||
-2.76742510726862411e-03 * DAYS_PER_YEAR,
|
||||
4.99852801234917238e-03 * DAYS_PER_YEAR,
|
||||
2.30417297573763929e-05 * DAYS_PER_YEAR,
|
||||
2.85885980666130812e-04 * SOLAR_MASS
|
||||
);
|
||||
}
|
||||
|
||||
function Uranus() {
|
||||
return Body(
|
||||
1.28943695621391310e+01,
|
||||
-1.51111514016986312e+01,
|
||||
-2.23307578892655734e-01,
|
||||
2.96460137564761618e-03 * DAYS_PER_YEAR,
|
||||
2.37847173959480950e-03 * DAYS_PER_YEAR,
|
||||
-2.96589568540237556e-05 * DAYS_PER_YEAR,
|
||||
4.36624404335156298e-05 * SOLAR_MASS
|
||||
);
|
||||
}
|
||||
|
||||
function Neptune() {
|
||||
return Body(
|
||||
1.53796971148509165e+01,
|
||||
-2.59193146099879641e+01,
|
||||
1.79258772950371181e-01,
|
||||
2.68067772490389322e-03 * DAYS_PER_YEAR,
|
||||
1.62824170038242295e-03 * DAYS_PER_YEAR,
|
||||
-9.51592254519715870e-05 * DAYS_PER_YEAR,
|
||||
5.15138902046611451e-05 * SOLAR_MASS
|
||||
);
|
||||
}
|
||||
|
||||
function Sun() {
|
||||
return Body(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, SOLAR_MASS);
|
||||
}
|
||||
|
||||
var bodies = Array(Sun(), Jupiter(), Saturn(), Uranus(), Neptune());
|
||||
|
||||
function offsetMomentum() {
|
||||
var px = 0;
|
||||
var py = 0;
|
||||
var pz = 0;
|
||||
var size = length(bodies);
|
||||
for (var i = 0; i < size; i++) {
|
||||
var body = bodies[i];
|
||||
var mass = body.mass;
|
||||
px += body.vx * mass;
|
||||
py += body.vy * mass;
|
||||
pz += body.vz * mass;
|
||||
}
|
||||
|
||||
var body = bodies[0];
|
||||
body.vx = -px / SOLAR_MASS;
|
||||
body.vy = -py / SOLAR_MASS;
|
||||
body.vz = -pz / SOLAR_MASS;
|
||||
}
|
||||
|
||||
function advance(dt) {
|
||||
var size = length(bodies);
|
||||
|
||||
for (var i = 0; i < size; i++) {
|
||||
var bodyi = bodies[i];
|
||||
var vxi = bodyi.vx;
|
||||
var vyi = bodyi.vy;
|
||||
var vzi = bodyi.vz;
|
||||
for (var j = i + 1; j < size; j++) {
|
||||
var bodyj = bodies[j];
|
||||
var dx = bodyi.x - bodyj.x;
|
||||
var dy = bodyi.y - bodyj.y;
|
||||
var dz = bodyi.z - bodyj.z;
|
||||
|
||||
var d2 = dx * dx + dy * dy + dz * dz;
|
||||
var mag = dt / (d2 * math.sqrt(d2));
|
||||
|
||||
var massj = bodyj.mass;
|
||||
vxi -= dx * massj * mag;
|
||||
vyi -= dy * massj * mag;
|
||||
vzi -= dz * massj * mag;
|
||||
|
||||
var massi = bodyi.mass;
|
||||
bodyj.vx += dx * massi * mag;
|
||||
bodyj.vy += dy * massi * mag;
|
||||
bodyj.vz += dz * massi * mag;
|
||||
}
|
||||
bodyi.vx = vxi;
|
||||
bodyi.vy = vyi;
|
||||
bodyi.vz = vzi;
|
||||
}
|
||||
|
||||
for (var i = 0; i < size; i++) {
|
||||
var body = bodies[i];
|
||||
body.x += dt * body.vx;
|
||||
body.y += dt * body.vy;
|
||||
body.z += dt * body.vz;
|
||||
}
|
||||
}
|
||||
|
||||
function energy() {
|
||||
var e = 0;
|
||||
var size = length(bodies);
|
||||
|
||||
for (var i = 0; i < size; i++) {
|
||||
var bodyi = bodies[i];
|
||||
|
||||
e += 0.5 * bodyi.mass * ( bodyi.vx * bodyi.vx +
|
||||
bodyi.vy * bodyi.vy + bodyi.vz * bodyi.vz );
|
||||
|
||||
for (var j = i + 1; j < size; j++) {
|
||||
var bodyj = bodies[j];
|
||||
var dx = bodyi.x - bodyj.x;
|
||||
var dy = bodyi.y - bodyj.y;
|
||||
var dz = bodyi.z - bodyj.z;
|
||||
|
||||
var distance = math.sqrt(dx * dx + dy * dy + dz * dz);
|
||||
e -= (bodyi.mass * bodyj.mass) / distance;
|
||||
}
|
||||
}
|
||||
return e;
|
||||
}
|
||||
|
||||
var n = arg[0] || 100000
|
||||
|
||||
offsetMomentum();
|
||||
|
||||
log.console(`n = ${n}`)
|
||||
log.console(energy().toFixed(9))
|
||||
for (var i = 0; i < n; i++)
|
||||
advance(0.01);
|
||||
log.console(energy().toFixed(9))
|
||||
|
||||
$stop()
|
||||
@@ -1,78 +0,0 @@
|
||||
var nota = use('nota')
|
||||
var os = use('os')
|
||||
var io = use('fd')
|
||||
var json = use('json')
|
||||
|
||||
var ll = io.slurp('benchmarks/nota.json')
|
||||
|
||||
var newarr = []
|
||||
var accstr = ""
|
||||
for (var i = 0; i < 10000; i++) {
|
||||
accstr += i;
|
||||
newarrpush(i.toString())
|
||||
}
|
||||
// Arrays to store timing results
|
||||
var jsonDecodeTimes = [];
|
||||
var jsonEncodeTimes = [];
|
||||
var notaEncodeTimes = [];
|
||||
var notaDecodeTimes = [];
|
||||
var notaSizes = [];
|
||||
|
||||
// Run 100 tests
|
||||
for (var i = 0; i < 100; i++) {
|
||||
// JSON Decode test
|
||||
var start = os.now();
|
||||
var jll = json.decode(ll);
|
||||
jsonDecodeTimespush((os.now() - start) * 1000);
|
||||
|
||||
// JSON Encode test
|
||||
start = os.now();
|
||||
var jsonStr = JSON.stringify(jll);
|
||||
jsonEncodeTimespush((os.now() - start) * 1000);
|
||||
|
||||
// NOTA Encode test
|
||||
start = os.now();
|
||||
var nll = nota.encode(jll);
|
||||
notaEncodeTimespush((os.now() - start) * 1000);
|
||||
|
||||
// NOTA Decode test
|
||||
start = os.now();
|
||||
var oll = nota.decode(nll);
|
||||
notaDecodeTimespush((os.now() - start) * 1000);
|
||||
}
|
||||
|
||||
// Calculate statistics
|
||||
function getStats(arr) {
|
||||
return {
|
||||
avg: reduce(arr, (a,b) => a+b, 0) / length(arr),
|
||||
min: reduce(arr, min),
|
||||
max: reduce(arr, max)
|
||||
};
|
||||
}
|
||||
|
||||
// Pretty print results
|
||||
log.console("\n== Performance Test Results (100 iterations) ==");
|
||||
log.console("\nJSON Decoding (ms):");
|
||||
def jsonDecStats = getStats(jsonDecodeTimes);
|
||||
log.console(`Average: ${jsonDecStats.avg.toFixed(2)} ms`);
|
||||
log.console(`Min: ${jsonDecStats.min.toFixed(2)} ms`);
|
||||
log.console(`Max: ${jsonDecStats.max.toFixed(2)} ms`);
|
||||
|
||||
log.console("\nJSON Encoding (ms):");
|
||||
def jsonEncStats = getStats(jsonEncodeTimes);
|
||||
log.console(`Average: ${jsonEncStats.avg.toFixed(2)} ms`);
|
||||
log.console(`Min: ${jsonEncStats.min.toFixed(2)} ms`);
|
||||
log.console(`Max: ${jsonEncStats.max.toFixed(2)} ms`);
|
||||
|
||||
log.console("\nNOTA Encoding (ms):");
|
||||
def notaEncStats = getStats(notaEncodeTimes);
|
||||
log.console(`Average: ${notaEncStats.avg.toFixed(2)} ms`);
|
||||
log.console(`Min: ${notaEncStats.min.toFixed(2)} ms`);
|
||||
log.console(`Max: ${notaEncStats.max.toFixed(2)} ms`);
|
||||
|
||||
log.console("\nNOTA Decoding (ms):");
|
||||
def notaDecStats = getStats(notaDecodeTimes);
|
||||
log.console(`Average: ${notaDecStats.avg.toFixed(2)} ms`);
|
||||
log.console(`Min: ${notaDecStats.min.toFixed(2)} ms`);
|
||||
log.console(`Max: ${notaDecStats.max.toFixed(2)} ms`);
|
||||
|
||||
76
benchmarks/nota.js
Normal file
76
benchmarks/nota.js
Normal file
@@ -0,0 +1,76 @@
|
||||
var nota = use('nota')
|
||||
var os = use('os')
|
||||
var io = use('io')
|
||||
|
||||
var ll = io.slurp('benchmarks/nota.json')
|
||||
|
||||
var newarr = []
|
||||
var accstr = ""
|
||||
for (var i = 0; i < 10000; i++) {
|
||||
accstr += i;
|
||||
newarr.push(i.toString())
|
||||
}
|
||||
// Arrays to store timing results
|
||||
var jsonDecodeTimes = [];
|
||||
var jsonEncodeTimes = [];
|
||||
var notaEncodeTimes = [];
|
||||
var notaDecodeTimes = [];
|
||||
var notaSizes = [];
|
||||
|
||||
// Run 100 tests
|
||||
for (let i = 0; i < 100; i++) {
|
||||
// JSON Decode test
|
||||
let start = os.now();
|
||||
var jll = json.decode(ll);
|
||||
jsonDecodeTimes.push((os.now() - start) * 1000);
|
||||
|
||||
// JSON Encode test
|
||||
start = os.now();
|
||||
let jsonStr = JSON.stringify(jll);
|
||||
jsonEncodeTimes.push((os.now() - start) * 1000);
|
||||
|
||||
// NOTA Encode test
|
||||
start = os.now();
|
||||
var nll = nota.encode(jll);
|
||||
notaEncodeTimes.push((os.now() - start) * 1000);
|
||||
|
||||
// NOTA Decode test
|
||||
start = os.now();
|
||||
var oll = nota.decode(nll);
|
||||
notaDecodeTimes.push((os.now() - start) * 1000);
|
||||
}
|
||||
|
||||
// Calculate statistics
|
||||
function getStats(arr) {
|
||||
const avg = arr.reduce((a, b) => a + b) / arr.length;
|
||||
const min = Math.min(...arr);
|
||||
const max = Math.max(...arr);
|
||||
return { avg, min, max };
|
||||
}
|
||||
|
||||
// Pretty print results
|
||||
console.log("\n=== Performance Test Results (100 iterations) ===");
|
||||
console.log("\nJSON Decoding (ms):");
|
||||
const jsonDecStats = getStats(jsonDecodeTimes);
|
||||
console.log(`Average: ${jsonDecStats.avg.toFixed(2)} ms`);
|
||||
console.log(`Min: ${jsonDecStats.min.toFixed(2)} ms`);
|
||||
console.log(`Max: ${jsonDecStats.max.toFixed(2)} ms`);
|
||||
|
||||
console.log("\nJSON Encoding (ms):");
|
||||
const jsonEncStats = getStats(jsonEncodeTimes);
|
||||
console.log(`Average: ${jsonEncStats.avg.toFixed(2)} ms`);
|
||||
console.log(`Min: ${jsonEncStats.min.toFixed(2)} ms`);
|
||||
console.log(`Max: ${jsonEncStats.max.toFixed(2)} ms`);
|
||||
|
||||
console.log("\nNOTA Encoding (ms):");
|
||||
const notaEncStats = getStats(notaEncodeTimes);
|
||||
console.log(`Average: ${notaEncStats.avg.toFixed(2)} ms`);
|
||||
console.log(`Min: ${notaEncStats.min.toFixed(2)} ms`);
|
||||
console.log(`Max: ${notaEncStats.max.toFixed(2)} ms`);
|
||||
|
||||
console.log("\nNOTA Decoding (ms):");
|
||||
const notaDecStats = getStats(notaDecodeTimes);
|
||||
console.log(`Average: ${notaDecStats.avg.toFixed(2)} ms`);
|
||||
console.log(`Min: ${notaDecStats.min.toFixed(2)} ms`);
|
||||
console.log(`Max: ${notaDecStats.max.toFixed(2)} ms`);
|
||||
|
||||
@@ -1,52 +0,0 @@
|
||||
def math = use('math/radians');
|
||||
|
||||
function A(i,j) {
|
||||
return 1/((i+j)*(i+j+1)/2+i+1);
|
||||
}
|
||||
|
||||
function Au(u,v) {
|
||||
for (var i=0; i<length(u); ++i) {
|
||||
var t = 0;
|
||||
for (var j=0; j<length(u); ++j)
|
||||
t += A(i,j) * u[j];
|
||||
|
||||
v[i] = t;
|
||||
}
|
||||
}
|
||||
|
||||
function Atu(u,v) {
|
||||
for (var i=0; i<length(u); ++i) {
|
||||
var t = 0;
|
||||
for (var j=0; j<length(u); ++j)
|
||||
t += A(j,i) * u[j];
|
||||
|
||||
v[i] = t;
|
||||
}
|
||||
}
|
||||
|
||||
function AtAu(u,v,w) {
|
||||
Au(u,w);
|
||||
Atu(w,v);
|
||||
}
|
||||
|
||||
function spectralnorm(n) {
|
||||
var i, u=[], v=[], w=[], vv=0, vBv=0;
|
||||
for (i=0; i<n; ++i)
|
||||
u[i] = 1; v[i] = w[i] = 0;
|
||||
|
||||
for (i=0; i<10; ++i) {
|
||||
AtAu(u,v,w);
|
||||
AtAu(v,u,w);
|
||||
}
|
||||
|
||||
for (i=0; i<n; ++i) {
|
||||
vBv += u[i]*v[i];
|
||||
vv += v[i]*v[i];
|
||||
}
|
||||
|
||||
return math.sqrt(vBv/vv);
|
||||
}
|
||||
|
||||
log.console(spectralnorm(arg[0]).toFixed(9));
|
||||
|
||||
$stop()
|
||||
@@ -14,18 +14,18 @@
|
||||
// Helper to run a function repeatedly and measure total time in seconds.
|
||||
// Returns elapsed time in seconds.
|
||||
function measureTime(fn, iterations) {
|
||||
var t1 = os.now();
|
||||
for (var i = 0; i < iterations; i++) {
|
||||
let t1 = os.now();
|
||||
for (let i = 0; i < iterations; i++) {
|
||||
fn();
|
||||
}
|
||||
var t2 = os.now();
|
||||
let t2 = os.now();
|
||||
return t2 - t1;
|
||||
}
|
||||
|
||||
// We'll define a function that does `encode -> decode` for a given value:
|
||||
function roundTripWota(value) {
|
||||
var encoded = wota.encode(value);
|
||||
var decoded = wota.decode(encoded);
|
||||
let encoded = wota.encode(value);
|
||||
let decoded = wota.decode(encoded);
|
||||
// Not doing a deep compare here, just measuring performance.
|
||||
// (We trust the test suite to verify correctness.)
|
||||
}
|
||||
@@ -36,7 +36,7 @@ function roundTripWota(value) {
|
||||
// iterations: how many times to loop
|
||||
//
|
||||
// You can tweak these as you like for heavier or lighter tests.
|
||||
def benchmarks = [
|
||||
const benchmarks = [
|
||||
{
|
||||
name: "Small Integers",
|
||||
data: [0, 42, -1, 2023],
|
||||
@@ -63,33 +63,44 @@ def benchmarks = [
|
||||
{
|
||||
name: "Large Array (1k numbers)",
|
||||
// A thousand random numbers
|
||||
data: [ array(1000, i => i *0.5) ],
|
||||
data: [ Array.from({length:1000}, (_, i) => i * 0.5) ],
|
||||
iterations: 1000
|
||||
},
|
||||
{
|
||||
name: "Large Binary Blob (256KB)",
|
||||
// A 256KB ArrayBuffer
|
||||
data: [ new Uint8Array(256 * 1024).buffer ],
|
||||
iterations: 200
|
||||
}
|
||||
];
|
||||
|
||||
// Print a header
|
||||
log.console("Wota Encode/Decode Benchmark");
|
||||
log.console("===================\n");
|
||||
console.log("Wota Encode/Decode Benchmark");
|
||||
console.log("============================\n");
|
||||
|
||||
// We'll run each benchmark scenario in turn.
|
||||
arrfor(benchmarks, function(bench) {
|
||||
var totalIterations = bench.iterations * length(bench.data);
|
||||
for (let bench of benchmarks) {
|
||||
// We'll measure how long it takes to do 'iterations' *for each test value*
|
||||
// in bench.data. The total loop count is `bench.iterations * bench.data.length`.
|
||||
// Then we compute an overall encode+decode throughput (ops/s).
|
||||
let totalIterations = bench.iterations * bench.data.length;
|
||||
|
||||
// We'll define a function that does a roundTrip for *each* data item in bench.data
|
||||
// to measure in one loop iteration. Then we multiply by bench.iterations.
|
||||
function runAllData() {
|
||||
arrfor(bench.data, roundTripWota)
|
||||
for (let val of bench.data) {
|
||||
roundTripWota(val);
|
||||
}
|
||||
}
|
||||
|
||||
var elapsedSec = measureTime(runAllData, bench.iterations);
|
||||
var opsPerSec = (totalIterations / elapsedSec).toFixed(1);
|
||||
let elapsedSec = measureTime(runAllData, bench.iterations);
|
||||
let opsPerSec = (totalIterations / elapsedSec).toFixed(1);
|
||||
|
||||
log.console(`${bench.name}:`);
|
||||
log.console(` Iterations: ${bench.iterations} × ${length(bench.data)} data items = ${totalIterations}`);
|
||||
log.console(` Elapsed: ${elapsedSec.toFixed(3)} s`);
|
||||
log.console(` Throughput: ${opsPerSec} encode+decode ops/sec\n`);
|
||||
})
|
||||
console.log(`${bench.name}:`);
|
||||
console.log(` Iterations: ${bench.iterations} × ${bench.data.length} data items = ${totalIterations}`);
|
||||
console.log(` Elapsed: ${elapsedSec.toFixed(3)} s`);
|
||||
console.log(` Throughput: ${opsPerSec} encode+decode ops/sec\n`);
|
||||
}
|
||||
|
||||
// All done
|
||||
log.console("Benchmark completed.\n");
|
||||
console.log("Benchmark completed.\n");
|
||||
@@ -2,56 +2,47 @@
|
||||
// benchmark_wota_nota_json.js
|
||||
//
|
||||
// Usage in QuickJS:
|
||||
// qjs benchmark_wota_nota_json.js <LibraryName> <ScenarioName>
|
||||
// qjs benchmark_wota_nota_json.js
|
||||
//
|
||||
// Ensure wota, nota, json, and os are all available, e.g.:
|
||||
var wota = use('wota');
|
||||
var nota = use('nota');
|
||||
var json = use('json');
|
||||
var jswota = use('jswota')
|
||||
var os = use('os');
|
||||
//
|
||||
|
||||
// Parse command line arguments
|
||||
if (length(arg) != 2) {
|
||||
log.console('Usage: cell benchmark_wota_nota_json.ce <LibraryName> <ScenarioName>');
|
||||
$stop()
|
||||
}
|
||||
|
||||
var lib_name = arg[0];
|
||||
var scenario_name = arg[1];
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// 1. Setup "libraries" array to easily switch among wota, nota, and json
|
||||
// 1. Setup "libraries" array to easily switch among Wota, Nota, and JSON
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
def libraries = [
|
||||
const libraries = [
|
||||
{
|
||||
name: "wota",
|
||||
name: "Wota",
|
||||
encode: wota.encode,
|
||||
decode: wota.decode,
|
||||
// wota produces an ArrayBuffer. We'll count `buffer.byteLength` as size.
|
||||
// Wota produces an ArrayBuffer. We'll count `buffer.byteLength` as size.
|
||||
getSize(encoded) {
|
||||
return length(encoded);
|
||||
return encoded.byteLength;
|
||||
}
|
||||
},
|
||||
{
|
||||
name: "nota",
|
||||
name: "Nota",
|
||||
encode: nota.encode,
|
||||
decode: nota.decode,
|
||||
// nota also produces an ArrayBuffer:
|
||||
// Nota also produces an ArrayBuffer:
|
||||
getSize(encoded) {
|
||||
return length(encoded);
|
||||
return encoded.byteLength;
|
||||
}
|
||||
},
|
||||
{
|
||||
name: "json",
|
||||
name: "JSON",
|
||||
encode: json.encode,
|
||||
decode: json.decode,
|
||||
// json produces a JS string. We'll measure its UTF-16 code unit length
|
||||
// JSON produces a JS string. We'll measure its UTF-16 code unit length
|
||||
// as a rough "size". Alternatively, you could convert to UTF-8 for
|
||||
// a more accurate byte size. Here we just use `string.length`.
|
||||
getSize(encodedStr) {
|
||||
return length(encodedStr);
|
||||
return encodedStr.length;
|
||||
}
|
||||
}
|
||||
];
|
||||
@@ -61,29 +52,24 @@ def libraries = [
|
||||
// Each scenario has { name, data, iterations }
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
def benchmarks = [
|
||||
const benchmarks = [
|
||||
{
|
||||
name: "empty",
|
||||
data: [{}, {}, {}, {}],
|
||||
iterations: 10000
|
||||
},
|
||||
{
|
||||
name: "integers",
|
||||
name: "Small Integers",
|
||||
data: [0, 42, -1, 2023],
|
||||
iterations: 100000
|
||||
},
|
||||
{
|
||||
name: "floats",
|
||||
name: "Floating point",
|
||||
data: [0.1, 1e-50, 3.14159265359],
|
||||
iterations: 100000
|
||||
},
|
||||
{
|
||||
name: "strings",
|
||||
data: ["Hello, wota!", "short", "Emoji: \u{1f600}\u{1f64f}"],
|
||||
name: "Strings (short, emoji)",
|
||||
data: ["Hello, Wota!", "short", "Emoji: \u{1f600}\u{1f64f}"],
|
||||
iterations: 100000
|
||||
},
|
||||
{
|
||||
name: "objects",
|
||||
name: "Small Objects",
|
||||
data: [
|
||||
{ a:1, b:2.2, c:"3", d:false },
|
||||
{ x:42, y:null, z:"test" }
|
||||
@@ -91,15 +77,20 @@ def benchmarks = [
|
||||
iterations: 50000
|
||||
},
|
||||
{
|
||||
name: "nested",
|
||||
name: "Nested Arrays",
|
||||
data: [ [ [ [1,2], [3,4] ] ], [[[]]], [1, [2, [3, [4]]]] ],
|
||||
iterations: 50000
|
||||
},
|
||||
{
|
||||
name: "large_array",
|
||||
data: [ array(1000, i => i) ],
|
||||
name: "Large Array (1k integers)",
|
||||
data: [ Array.from({length:1000}, (_, i) => i) ],
|
||||
iterations: 1000
|
||||
},
|
||||
{
|
||||
name: "Large Binary Blob (256KB)",
|
||||
data: [ new Uint8Array(256 * 1024).buffer ],
|
||||
iterations: 200
|
||||
}
|
||||
];
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
@@ -107,9 +98,9 @@ def benchmarks = [
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function measureTime(fn) {
|
||||
var start = os.now();
|
||||
let start = os.now();
|
||||
fn();
|
||||
var end = os.now();
|
||||
let end = os.now();
|
||||
return (end - start); // in seconds
|
||||
}
|
||||
|
||||
@@ -117,7 +108,7 @@ function measureTime(fn) {
|
||||
// 4. For each library, we run each benchmark scenario and measure:
|
||||
// - Encoding time (seconds)
|
||||
// - Decoding time (seconds)
|
||||
// - Total encoded size (bytes or code units for json)
|
||||
// - Total encoded size (bytes or code units for JSON)
|
||||
//
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
@@ -127,19 +118,19 @@ function runBenchmarkForLibrary(lib, bench) {
|
||||
|
||||
// Pre-store the encoded results for all items so we can measure decode time
|
||||
// in a separate pass. Also measure total size once.
|
||||
var encodedList = [];
|
||||
var totalSize = 0;
|
||||
let encodedList = [];
|
||||
let totalSize = 0;
|
||||
|
||||
// 1) Measure ENCODING
|
||||
var encodeTime = measureTime(() => {
|
||||
for (var i = 0; i < bench.iterations; i++) {
|
||||
let encodeTime = measureTime(() => {
|
||||
for (let i = 0; i < bench.iterations; i++) {
|
||||
// For each data item, encode it
|
||||
for (var j = 0; j < length(bench.data); j++) {
|
||||
var e = lib.encode(bench.data[j]);
|
||||
for (let d of bench.data) {
|
||||
let e = lib.encode(d);
|
||||
// store only in the very first iteration, so we can decode them later
|
||||
// but do not store them every iteration or we blow up memory.
|
||||
if (i == 0) {
|
||||
push(encodedList, e);
|
||||
if (i === 0) {
|
||||
encodedList.push(e);
|
||||
totalSize += lib.getSize(e);
|
||||
}
|
||||
}
|
||||
@@ -147,9 +138,13 @@ function runBenchmarkForLibrary(lib, bench) {
|
||||
});
|
||||
|
||||
// 2) Measure DECODING
|
||||
var decodeTime = measureTime(() => {
|
||||
for (var i = 0; i < bench.iterations; i++) {
|
||||
arrfor(encodedList, lib.decode)
|
||||
let decodeTime = measureTime(() => {
|
||||
for (let i = 0; i < bench.iterations; i++) {
|
||||
// decode everything we stored during the first iteration
|
||||
for (let e of encodedList) {
|
||||
let decoded = lib.decode(e);
|
||||
// not verifying correctness here, just measuring speed
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@@ -157,43 +152,31 @@ function runBenchmarkForLibrary(lib, bench) {
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
// 5. Main driver: run only the specified library and scenario
|
||||
// 5. Main driver: run across all benchmarks, for each library.
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
// Find the requested library and scenario
|
||||
var lib = libraries[find(libraries, l => l.name == lib_name)];
|
||||
var bench = benchmarks[find(benchmarks, b => b.name == scenario_name)];
|
||||
console.log("Benchmark: Wota vs Nota vs JSON");
|
||||
console.log("================================\n");
|
||||
|
||||
if (!lib) {
|
||||
log.console('Unknown library:', lib_name);
|
||||
log.console('Available libraries:', text(array(libraries, l => l.name), ', '));
|
||||
$stop()
|
||||
for (let bench of benchmarks) {
|
||||
console.log(`SCENARIO: ${bench.name}`);
|
||||
console.log(` Data length: ${bench.data.length} | Iterations: ${bench.iterations}\n`);
|
||||
|
||||
for (let lib of libraries) {
|
||||
let { encodeTime, decodeTime, totalSize } = runBenchmarkForLibrary(lib, bench);
|
||||
|
||||
// We'll compute total operations = bench.iterations * bench.data.length
|
||||
let totalOps = bench.iterations * bench.data.length;
|
||||
let encOpsPerSec = (totalOps / encodeTime).toFixed(1);
|
||||
let decOpsPerSec = (totalOps / decodeTime).toFixed(1);
|
||||
|
||||
console.log(` ${lib.name}:`);
|
||||
console.log(` Encode time: ${encodeTime.toFixed(3)}s => ${encOpsPerSec} encodes/sec`);
|
||||
console.log(` Decode time: ${decodeTime.toFixed(3)}s => ${decOpsPerSec} decodes/sec`);
|
||||
console.log(` Total size: ${totalSize} bytes (or code units for JSON)`);
|
||||
console.log("");
|
||||
}
|
||||
console.log("---------------------------------------------------------\n");
|
||||
}
|
||||
|
||||
if (!bench) {
|
||||
log.console('Unknown scenario:', scenario_name);
|
||||
log.console('Available scenarios:', text(array(benchmarks, b => b.name), ', '));
|
||||
$stop()
|
||||
}
|
||||
|
||||
// Run the benchmark for this library/scenario combination
|
||||
var { encodeTime, decodeTime, totalSize } = runBenchmarkForLibrary(lib, bench);
|
||||
|
||||
// Output json for easy parsing by hyperfine or other tools
|
||||
var totalOps = bench.iterations * length(bench.data);
|
||||
var result = {
|
||||
lib: lib_name,
|
||||
scenario: scenario_name,
|
||||
encodeTime: encodeTime,
|
||||
decodeTime: decodeTime,
|
||||
totalSize: totalSize,
|
||||
totalOps: totalOps,
|
||||
encodeOpsPerSec: totalOps / encodeTime,
|
||||
decodeOpsPerSec: totalOps / decodeTime,
|
||||
encodeNsPerOp: (encodeTime / totalOps) * 1e9,
|
||||
decodeNsPerOp: (decodeTime / totalOps) * 1e9
|
||||
};
|
||||
|
||||
log.console(result);
|
||||
|
||||
$stop()
|
||||
console.log("Benchmark complete.\n");
|
||||
124
build.ce
124
build.ce
@@ -1,124 +0,0 @@
|
||||
// cell build [<locator>] - Build dynamic libraries locally for the current machine
|
||||
//
|
||||
// Usage:
|
||||
// cell build Build dynamic libraries for all packages in shop
|
||||
// cell build . Build dynamic library for current directory package
|
||||
// cell build <locator> Build dynamic library for specific package
|
||||
// cell build -t <target> Cross-compile dynamic libraries for target platform
|
||||
// cell build -b <type> Build type: release (default), debug, or minsize
|
||||
|
||||
var build = use('build')
|
||||
var shop = use('internal/shop')
|
||||
var pkg_tools = use('package')
|
||||
var fd = use('fd')
|
||||
|
||||
var target = null
|
||||
var target_package = null
|
||||
var buildtype = 'release'
|
||||
var force_rebuild = false
|
||||
var dry_run = false
|
||||
|
||||
for (var i = 0; i < length(args); i++) {
|
||||
if (args[i] == '-t' || args[i] == '--target') {
|
||||
if (i + 1 < length(args)) {
|
||||
target = args[++i]
|
||||
} else {
|
||||
log.error('-t requires a target')
|
||||
$stop()
|
||||
}
|
||||
} else if (args[i] == '-p' || args[i] == '--package') {
|
||||
// Legacy support for -p flag
|
||||
if (i + 1 < length(args)) {
|
||||
target_package = args[++i]
|
||||
} else {
|
||||
log.error('-p requires a package name')
|
||||
$stop()
|
||||
}
|
||||
} else if (args[i] == '-b' || args[i] == '--buildtype') {
|
||||
if (i + 1 < length(args)) {
|
||||
buildtype = args[++i]
|
||||
if (buildtype != 'release' && buildtype != 'debug' && buildtype != 'minsize') {
|
||||
log.error('Invalid buildtype: ' + buildtype + '. Must be release, debug, or minsize')
|
||||
$stop()
|
||||
}
|
||||
} else {
|
||||
log.error('-b requires a buildtype (release, debug, minsize)')
|
||||
$stop()
|
||||
}
|
||||
} else if (args[i] == '--force') {
|
||||
force_rebuild = true
|
||||
} else if (args[i] == '--dry-run') {
|
||||
dry_run = true
|
||||
} else if (args[i] == '--list-targets') {
|
||||
log.console('Available targets:')
|
||||
var targets = build.list_targets()
|
||||
for (var t = 0; t < length(targets); t++) {
|
||||
log.console(' ' + targets[t])
|
||||
}
|
||||
$stop()
|
||||
} else if (!starts_with(args[i], '-') && !target_package) {
|
||||
// Positional argument - treat as package locator
|
||||
target_package = args[i]
|
||||
}
|
||||
}
|
||||
|
||||
// Resolve local paths to absolute paths
|
||||
if (target_package) {
|
||||
if (target_package == '.' || starts_with(target_package, './') || starts_with(target_package, '../') || fd.is_dir(target_package)) {
|
||||
var resolved = fd.realpath(target_package)
|
||||
if (resolved) {
|
||||
target_package = resolved
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Detect target if not specified
|
||||
if (!target) {
|
||||
target = build.detect_host_target()
|
||||
if (target) log.console('Target: ' + target)
|
||||
}
|
||||
|
||||
if (target && !build.has_target(target)) {
|
||||
log.error('Invalid target: ' + target)
|
||||
log.console('Available targets: ' + text(build.list_targets(), ', '))
|
||||
$stop()
|
||||
}
|
||||
|
||||
var packages = shop.list_packages()
|
||||
log.console('Preparing packages...')
|
||||
arrfor(packages, function(package) {
|
||||
if (package == 'core') return
|
||||
shop.extract(package)
|
||||
})
|
||||
|
||||
if (target_package) {
|
||||
// Build single package
|
||||
log.console('Building ' + target_package + '...')
|
||||
try {
|
||||
var lib = build.build_dynamic(target_package, target, buildtype)
|
||||
if (lib) {
|
||||
log.console('Built: ' + lib)
|
||||
}
|
||||
} catch (e) {
|
||||
log.error('Build failed: ' + e)
|
||||
$stop()
|
||||
}
|
||||
} else {
|
||||
// Build all packages
|
||||
log.console('Building all packages...')
|
||||
var results = build.build_all_dynamic(target, buildtype)
|
||||
|
||||
var success = 0
|
||||
var failed = 0
|
||||
for (var i = 0; i < length(results); i++) {
|
||||
if (results[i].library) {
|
||||
success++
|
||||
} else if (results[i].error) {
|
||||
failed++
|
||||
}
|
||||
}
|
||||
|
||||
log.console(`Build complete: ${success} libraries built${failed > 0 ? `, ${failed} failed` : ''}`)
|
||||
}
|
||||
|
||||
$stop()
|
||||
455
build.cm
455
build.cm
@@ -1,455 +0,0 @@
|
||||
// build.cm - Simplified build utilities for Cell
|
||||
//
|
||||
// Key functions:
|
||||
// Build.compile_file(pkg, file, target) - Compile a C file, returns object path
|
||||
// Build.build_package(pkg, target) - Build all C files for a package
|
||||
// Build.build_dynamic(pkg, target) - Build dynamic library for a package
|
||||
// Build.build_static(packages, target, output) - Build static binary
|
||||
|
||||
var fd = use('fd')
|
||||
var crypto = use('crypto')
|
||||
var blob = use('blob')
|
||||
var os = use('os')
|
||||
var toolchains = use('toolchains')
|
||||
var shop = use('internal/shop')
|
||||
var pkg_tools = use('package')
|
||||
|
||||
var Build = {}
|
||||
|
||||
// ============================================================================
|
||||
// Sigil replacement
|
||||
// ============================================================================
|
||||
|
||||
// Get the local directory for prebuilt libraries
|
||||
function get_local_dir() {
|
||||
return shop.get_local_dir()
|
||||
}
|
||||
|
||||
// Replace sigils in a string
|
||||
// Currently supports: $LOCAL -> .cell/local full path
|
||||
function replace_sigils(str) {
|
||||
return replace(str, '$LOCAL', get_local_dir())
|
||||
}
|
||||
|
||||
// Replace sigils in an array of flags
|
||||
function replace_sigils_array(flags) {
|
||||
var result = []
|
||||
arrfor(flags, function(flag) {
|
||||
push(result, replace_sigils(flag))
|
||||
})
|
||||
return result
|
||||
}
|
||||
|
||||
Build.get_local_dir = get_local_dir
|
||||
|
||||
// ============================================================================
|
||||
// Toolchain helpers
|
||||
// ============================================================================
|
||||
|
||||
Build.list_targets = function() {
|
||||
return array(toolchains)
|
||||
}
|
||||
|
||||
Build.has_target = function(target) {
|
||||
return toolchains[target] != null
|
||||
}
|
||||
|
||||
Build.detect_host_target = function() {
|
||||
var platform = os.platform()
|
||||
var arch = os.arch ? os.arch() : 'arm64'
|
||||
|
||||
if (platform == 'macOS' || platform == 'darwin') {
|
||||
return arch == 'x86_64' ? 'macos_x86_64' : 'macos_arm64'
|
||||
} else if (platform == 'Linux' || platform == 'linux') {
|
||||
return arch == 'x86_64' ? 'linux' : 'linux_arm64'
|
||||
} else if (platform == 'Windows' || platform == 'windows') {
|
||||
return 'windows'
|
||||
}
|
||||
return null
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Content-addressed build cache
|
||||
// ============================================================================
|
||||
|
||||
function content_hash(str) {
|
||||
var bb = stone(blob(str))
|
||||
return text(crypto.blake2(bb, 32), 'h')
|
||||
}
|
||||
|
||||
function get_build_dir() {
|
||||
return shop.get_build_dir()
|
||||
}
|
||||
|
||||
function ensure_dir(path) {
|
||||
if (fd.stat(path).isDirectory) return
|
||||
var parts = array(path, '/')
|
||||
var current = starts_with(path, '/') ? '/' : ''
|
||||
for (var i = 0; i < length(parts); i++) {
|
||||
if (parts[i] == '') continue
|
||||
current += parts[i] + '/'
|
||||
if (!fd.stat(current).isDirectory) fd.mkdir(current)
|
||||
}
|
||||
}
|
||||
|
||||
Build.ensure_dir = ensure_dir
|
||||
|
||||
// ============================================================================
|
||||
// Compilation
|
||||
// ============================================================================
|
||||
|
||||
// Compile a single C file for a package
|
||||
// Returns the object file path (content-addressed in .cell/build)
|
||||
Build.compile_file = function(pkg, file, target, buildtype = 'release') {
|
||||
var pkg_dir = shop.get_package_dir(pkg)
|
||||
var src_path = pkg_dir + '/' + file
|
||||
|
||||
if (!fd.is_file(src_path)) {
|
||||
throw Error('Source file not found: ' + src_path)
|
||||
}
|
||||
|
||||
// Get flags (with sigil replacement)
|
||||
var cflags = replace_sigils_array(pkg_tools.get_flags(pkg, 'CFLAGS', target))
|
||||
var target_cflags = toolchains[target].c_args || []
|
||||
var cc = toolchains[target].c
|
||||
|
||||
// Symbol name for this file
|
||||
var sym_name = shop.c_symbol_for_file(pkg, file)
|
||||
|
||||
// Build command
|
||||
var cmd_parts = [cc, '-c', '-fPIC']
|
||||
|
||||
// Add buildtype-specific flags
|
||||
if (buildtype == 'release') {
|
||||
cmd_parts = array(cmd_parts, ['-O3', '-DNDEBUG'])
|
||||
} else if (buildtype == 'debug') {
|
||||
cmd_parts = array(cmd_parts, ['-O2', '-g'])
|
||||
} else if (buildtype == 'minsize') {
|
||||
cmd_parts = array(cmd_parts, ['-Os', '-DNDEBUG'])
|
||||
}
|
||||
|
||||
push(cmd_parts, '-DCELL_USE_NAME=' + sym_name)
|
||||
push(cmd_parts, '-I"' + pkg_dir + '"')
|
||||
|
||||
// Add package CFLAGS (resolve relative -I paths)
|
||||
arrfor(cflags, function(flag) {
|
||||
if (starts_with(flag, '-I') && !starts_with(flag, '-I/')) {
|
||||
flag = '-I"' + pkg_dir + '/' + text(flag, 2) + '"'
|
||||
}
|
||||
push(cmd_parts, flag)
|
||||
})
|
||||
|
||||
// Add target CFLAGS
|
||||
arrfor(target_cflags, function(flag) {
|
||||
push(cmd_parts, flag)
|
||||
})
|
||||
|
||||
push(cmd_parts, '"' + src_path + '"')
|
||||
|
||||
var cmd_str = text(cmd_parts, ' ')
|
||||
|
||||
// Content hash: command + file content
|
||||
var file_content = fd.slurp(src_path)
|
||||
var hash_input = cmd_str + '\n' + text(file_content)
|
||||
var hash = content_hash(hash_input)
|
||||
|
||||
var build_dir = get_build_dir()
|
||||
ensure_dir(build_dir)
|
||||
var obj_path = build_dir + '/' + hash
|
||||
|
||||
// Check if already compiled
|
||||
if (fd.is_file(obj_path)) {
|
||||
return obj_path
|
||||
}
|
||||
|
||||
// Compile
|
||||
var full_cmd = cmd_str + ' -o "' + obj_path + '"'
|
||||
log.console('Compiling ' + file)
|
||||
var ret = os.system(full_cmd)
|
||||
if (ret != 0) {
|
||||
throw Error('Compilation failed: ' + file)
|
||||
}
|
||||
|
||||
return obj_path
|
||||
}
|
||||
|
||||
// Build all C files for a package
|
||||
// Returns array of object file paths
|
||||
Build.build_package = function(pkg, target = Build.detect_host_target(), exclude_main, buildtype = 'release') {
|
||||
var c_files = pkg_tools.get_c_files(pkg, target, exclude_main)
|
||||
var objects = []
|
||||
|
||||
arrfor(c_files, function(file) {
|
||||
var obj = Build.compile_file(pkg, file, target, buildtype)
|
||||
push(objects, obj)
|
||||
})
|
||||
|
||||
return objects
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Dynamic library building
|
||||
// ============================================================================
|
||||
|
||||
// Compute link key from all inputs that affect the dylib output
|
||||
function compute_link_key(objects, ldflags, target_ldflags, target, cc) {
|
||||
// Sort objects for deterministic hash
|
||||
var sorted_objects = sort(objects)
|
||||
|
||||
// Build a string representing all link inputs
|
||||
var parts = []
|
||||
push(parts, 'target:' + target)
|
||||
push(parts, 'cc:' + cc)
|
||||
arrfor(sorted_objects, function(obj) {
|
||||
// Object paths are content-addressed, so the path itself is the hash
|
||||
push(parts, 'obj:' + obj)
|
||||
})
|
||||
arrfor(ldflags, function(flag) {
|
||||
push(parts, 'ldflag:' + flag)
|
||||
})
|
||||
arrfor(target_ldflags, function(flag) {
|
||||
push(parts, 'target_ldflag:' + flag)
|
||||
})
|
||||
|
||||
return content_hash(text(parts, '\n'))
|
||||
}
|
||||
|
||||
// Build a dynamic library for a package
|
||||
// Output goes to .cell/lib/<package_name>.<ext>
|
||||
// Dynamic libraries do NOT link against core; undefined symbols are resolved at dlopen time
|
||||
// Uses content-addressed store + symlink for caching
|
||||
Build.build_dynamic = function(pkg, target = Build.detect_host_target(), buildtype = 'release') {
|
||||
var objects = Build.build_package(pkg, target, true, buildtype) // exclude main.c
|
||||
|
||||
if (length(objects) == 0) {
|
||||
log.console('No C files in ' + pkg)
|
||||
return null
|
||||
}
|
||||
|
||||
var lib_dir = shop.get_lib_dir()
|
||||
var store_dir = lib_dir + '/store'
|
||||
ensure_dir(lib_dir)
|
||||
ensure_dir(store_dir)
|
||||
|
||||
var lib_name = shop.lib_name_for_package(pkg)
|
||||
var dylib_ext = toolchains[target].system == 'windows' ? '.dll' : (toolchains[target].system == 'darwin' ? '.dylib' : '.so')
|
||||
var stable_path = lib_dir + '/' + lib_name + dylib_ext
|
||||
|
||||
// Get link flags (with sigil replacement)
|
||||
var ldflags = replace_sigils_array(pkg_tools.get_flags(pkg, 'LDFLAGS', target))
|
||||
var target_ldflags = toolchains[target].c_link_args || []
|
||||
var cc = toolchains[target].cpp || toolchains[target].c
|
||||
var pkg_dir = shop.get_package_dir(pkg)
|
||||
var local_dir = get_local_dir()
|
||||
var tc = toolchains[target]
|
||||
|
||||
// Resolve relative -L paths in ldflags for hash computation
|
||||
var resolved_ldflags = []
|
||||
arrfor(ldflags, function(flag) {
|
||||
if (starts_with(flag, '-L') && !starts_with(flag, '-L/')) {
|
||||
flag = '-L"' + pkg_dir + '/' + text(flag, 2) + '"'
|
||||
}
|
||||
push(resolved_ldflags, flag)
|
||||
})
|
||||
|
||||
// Compute link key
|
||||
var link_key = compute_link_key(objects, resolved_ldflags, target_ldflags, target, cc)
|
||||
var store_path = store_dir + '/' + lib_name + '-' + link_key + dylib_ext
|
||||
|
||||
// Check if already linked in store
|
||||
if (fd.is_file(store_path)) {
|
||||
// Ensure symlink points to the store file
|
||||
if (fd.is_link(stable_path)) {
|
||||
var current_target = fd.readlink(stable_path)
|
||||
if (current_target == store_path) {
|
||||
// Already up to date
|
||||
return stable_path
|
||||
}
|
||||
fd.unlink(stable_path)
|
||||
} else if (fd.is_file(stable_path)) {
|
||||
fd.unlink(stable_path)
|
||||
}
|
||||
fd.symlink(store_path, stable_path)
|
||||
return stable_path
|
||||
}
|
||||
|
||||
// Build link command
|
||||
var cmd_parts = [cc, '-shared', '-fPIC']
|
||||
|
||||
// Platform-specific flags for undefined symbols (resolved at dlopen) and size optimization
|
||||
if (tc.system == 'darwin') {
|
||||
cmd_parts = array(cmd_parts, [
|
||||
'-undefined', 'dynamic_lookup',
|
||||
'-Wl,-dead_strip',
|
||||
'-Wl,-install_name,' + stable_path,
|
||||
'-Wl,-rpath,@loader_path/../local',
|
||||
'-Wl,-rpath,' + local_dir
|
||||
])
|
||||
} else if (tc.system == 'linux') {
|
||||
cmd_parts = array(cmd_parts, [
|
||||
'-Wl,--allow-shlib-undefined',
|
||||
'-Wl,--gc-sections',
|
||||
'-Wl,-rpath,$ORIGIN/../local',
|
||||
'-Wl,-rpath,' + local_dir
|
||||
])
|
||||
} else if (tc.system == 'windows') {
|
||||
// Windows DLLs: use --allow-shlib-undefined for mingw
|
||||
push(cmd_parts, '-Wl,--allow-shlib-undefined')
|
||||
}
|
||||
|
||||
// Add .cell/local to library search path
|
||||
push(cmd_parts, '-L"' + local_dir + '"')
|
||||
|
||||
arrfor(objects, function(obj) {
|
||||
push(cmd_parts, '"' + obj + '"')
|
||||
})
|
||||
|
||||
// Do NOT link against core library - symbols resolved at dlopen time
|
||||
cmd_parts = array(cmd_parts, resolved_ldflags)
|
||||
cmd_parts = array(cmd_parts, target_ldflags)
|
||||
|
||||
push(cmd_parts, '-o')
|
||||
push(cmd_parts, '"' + store_path + '"')
|
||||
|
||||
var cmd_str = text(cmd_parts, ' ')
|
||||
|
||||
log.console('Linking ' + lib_name + dylib_ext)
|
||||
var ret = os.system(cmd_str)
|
||||
if (ret != 0) {
|
||||
throw Error('Linking failed: ' + pkg)
|
||||
}
|
||||
|
||||
// Update symlink to point to the new store file
|
||||
if (fd.is_link(stable_path)) {
|
||||
fd.unlink(stable_path)
|
||||
} else if (fd.is_file(stable_path)) {
|
||||
fd.unlink(stable_path)
|
||||
}
|
||||
fd.symlink(store_path, stable_path)
|
||||
|
||||
return stable_path
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Static binary building
|
||||
// ============================================================================
|
||||
|
||||
// Build a static binary from multiple packages
|
||||
// packages: array of package names
|
||||
// output: output binary path
|
||||
Build.build_static = function(packages, target = Build.detect_host_target(), output, buildtype = 'release') {
|
||||
var all_objects = []
|
||||
var all_ldflags = []
|
||||
var seen_flags = {}
|
||||
|
||||
// Compile all packages
|
||||
arrfor(packages, function(pkg) {
|
||||
var is_core = (pkg == 'core')
|
||||
|
||||
// For core, include main.c; for others, exclude it
|
||||
var objects = Build.build_package(pkg, target, !is_core, buildtype)
|
||||
|
||||
arrfor(objects, function(obj) {
|
||||
push(all_objects, obj)
|
||||
})
|
||||
|
||||
// Collect LDFLAGS (with sigil replacement)
|
||||
var ldflags = replace_sigils_array(pkg_tools.get_flags(pkg, 'LDFLAGS', target))
|
||||
var pkg_dir = shop.get_package_dir(pkg)
|
||||
|
||||
// Deduplicate based on the entire LDFLAGS string for this package
|
||||
var ldflags_key = pkg + ':' + text(ldflags, ' ')
|
||||
if (!seen_flags[ldflags_key]) {
|
||||
seen_flags[ldflags_key] = true
|
||||
arrfor(ldflags, function(flag) {
|
||||
// Resolve relative -L paths
|
||||
if (starts_with(flag, '-L') && !starts_with(flag, '-L/')) {
|
||||
flag = '-L"' + pkg_dir + '/' + text(flag, 2) + '"'
|
||||
}
|
||||
push(all_ldflags, flag)
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
if (length(all_objects) == 0) {
|
||||
throw Error('No object files to link')
|
||||
}
|
||||
|
||||
// Link
|
||||
var cc = toolchains[target].c
|
||||
var target_ldflags = toolchains[target].c_link_args || []
|
||||
var exe_ext = toolchains[target].system == 'windows' ? '.exe' : ''
|
||||
|
||||
if (!ends_with(output, exe_ext) && exe_ext) {
|
||||
output = output + exe_ext
|
||||
}
|
||||
|
||||
var cmd_parts = [cc]
|
||||
|
||||
arrfor(all_objects, function(obj) {
|
||||
push(cmd_parts, '"' + obj + '"')
|
||||
})
|
||||
|
||||
arrfor(all_ldflags, function(flag) {
|
||||
push(cmd_parts, flag)
|
||||
})
|
||||
|
||||
arrfor(target_ldflags, function(flag) {
|
||||
push(cmd_parts, flag)
|
||||
})
|
||||
|
||||
push(cmd_parts, '-o', '"' + output + '"')
|
||||
|
||||
var cmd_str = text(cmd_parts, ' ')
|
||||
|
||||
log.console('Linking ' + output)
|
||||
var ret = os.system(cmd_str)
|
||||
if (ret != 0) {
|
||||
throw Error('Linking failed with command: ' + cmd_str)
|
||||
}
|
||||
|
||||
log.console('Built ' + output)
|
||||
return output
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Convenience functions
|
||||
// ============================================================================
|
||||
|
||||
// Build dynamic libraries for all installed packages
|
||||
Build.build_all_dynamic = function(target, buildtype = 'release') {
|
||||
target = target || Build.detect_host_target()
|
||||
|
||||
var packages = shop.list_packages()
|
||||
var results = []
|
||||
|
||||
// Build core first
|
||||
if (find(packages, 'core') != null) {
|
||||
try {
|
||||
var lib = Build.build_dynamic('core', target, buildtype)
|
||||
push(results, { package: 'core', library: lib })
|
||||
} catch (e) {
|
||||
log.error('Failed to build core: ' + text(e))
|
||||
push(results, { package: 'core', error: e })
|
||||
}
|
||||
}
|
||||
|
||||
// Build other packages
|
||||
arrfor(packages, function(pkg) {
|
||||
if (pkg == 'core') return
|
||||
|
||||
try {
|
||||
var lib = Build.build_dynamic(pkg, target, buildtype)
|
||||
push(results, { package: pkg, library: lib })
|
||||
} catch (e) {
|
||||
log.error('Failed to build ' + pkg + ': ')
|
||||
log.console(e.message)
|
||||
log.console(e.stack)
|
||||
push(results, { package: pkg, error: e })
|
||||
}
|
||||
})
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
return Build
|
||||
@@ -1 +0,0 @@
|
||||
// cake file for making a playdate package
|
||||
13
cell.toml
13
cell.toml
@@ -1,13 +0,0 @@
|
||||
[compilation]
|
||||
CFLAGS = "-Isource -Wno-incompatible-pointer-types -Wno-missing-braces -Wno-strict-prototypes -Wno-unused-function -Wno-int-conversion"
|
||||
LDFLAGS = "-lstdc++ -lm"
|
||||
|
||||
[compilation.macos_arm64]
|
||||
CFLAGS = "-x objective-c"
|
||||
LDFLAGS = "-framework CoreFoundation -framework CFNetwork"
|
||||
|
||||
[compilation.playdate]
|
||||
CFLAGS = "-DMINIZ_NO_TIME -DTARGET_EXTENSION -DTARGET_PLAYDATE -I$LOCAL/PlaydateSDK/C_API"
|
||||
|
||||
[compilation.windows]
|
||||
LDFLAGS = "-lws2_32 -lwinmm -liphlpapi -lbcrypt -lwinhttp -static-libgcc -static-libstdc++"
|
||||
468
cellfs.cm
468
cellfs.cm
@@ -1,468 +0,0 @@
|
||||
var cellfs = {}
|
||||
|
||||
// CellFS: A filesystem implementation using miniz and raw OS filesystem
|
||||
// Supports mounting multiple sources (fs, zip) and named mounts (@name)
|
||||
|
||||
var fd = use('fd')
|
||||
var miniz = use('miniz')
|
||||
var qop = use('qop')
|
||||
var wildstar = use('wildstar')
|
||||
|
||||
// Internal state
|
||||
var mounts = [] // Array of {source, type, handle, name}
|
||||
|
||||
var writepath = "."
|
||||
|
||||
// Helper to normalize paths
|
||||
function normalize_path(path) {
|
||||
if (!path) return ""
|
||||
// Remove leading/trailing slashes and normalize
|
||||
return replace(path, /^\/+|\/+$/, "")
|
||||
}
|
||||
|
||||
// Check if a file exists in a specific mount
|
||||
function mount_exists(mount, path) {
|
||||
if (mount.type == 'zip') {
|
||||
try {
|
||||
mount.handle.mod(path)
|
||||
return true
|
||||
} catch (e) {
|
||||
return false
|
||||
}
|
||||
} else if (mount.type == 'qop') {
|
||||
try {
|
||||
return mount.handle.stat(path) != null
|
||||
} catch (e) {
|
||||
return false
|
||||
}
|
||||
} else { // fs
|
||||
var full_path = fd.join_paths(mount.source, path)
|
||||
try {
|
||||
var st = fd.stat(full_path)
|
||||
return st.isFile || st.isDirectory
|
||||
} catch (e) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if a path refers to a directory in a specific mount
|
||||
function is_directory(path) {
|
||||
var res = resolve(path)
|
||||
var mount = res.mount
|
||||
if (mount.type == 'zip') {
|
||||
try {
|
||||
return mount.handle.is_directory(path);
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
} else if (mount.type == 'qop') {
|
||||
try {
|
||||
return mount.handle.is_directory(path);
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
} else { // fs
|
||||
var full_path = fd.join_paths(mount.source, path)
|
||||
try {
|
||||
var st = fd.stat(full_path)
|
||||
return st.isDirectory
|
||||
} catch (e) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Resolve a path to a specific mount and relative path
|
||||
// Returns { mount, path } or throws/returns null
|
||||
function resolve(path, must_exist) {
|
||||
path = normalize_path(path)
|
||||
|
||||
// Check for named mount
|
||||
if (starts_with(path, "@")) {
|
||||
var idx = search(path, "/")
|
||||
var mount_name = ""
|
||||
var rel_path = ""
|
||||
|
||||
if (idx == null) {
|
||||
mount_name = text(path, 1)
|
||||
rel_path = ""
|
||||
} else {
|
||||
mount_name = text(path, 1, idx)
|
||||
rel_path = text(path, idx + 1)
|
||||
}
|
||||
|
||||
// Find named mount
|
||||
var mount = null
|
||||
arrfor(mounts, function(m) {
|
||||
if (m.name == mount_name) {
|
||||
mount = m
|
||||
return true
|
||||
}
|
||||
}, false, true)
|
||||
|
||||
if (!mount) {
|
||||
throw Error("Unknown mount point: @" + mount_name)
|
||||
}
|
||||
|
||||
return { mount: mount, path: rel_path }
|
||||
}
|
||||
|
||||
// Search path
|
||||
var found_mount = null
|
||||
arrfor(mounts, function(mount) {
|
||||
if (mount_exists(mount, path)) {
|
||||
found_mount = { mount: mount, path: path }
|
||||
return true
|
||||
}
|
||||
}, false, true)
|
||||
|
||||
if (found_mount) {
|
||||
return found_mount
|
||||
}
|
||||
|
||||
if (must_exist) {
|
||||
throw Error("File not found in any mount: " + path)
|
||||
}
|
||||
}
|
||||
|
||||
// Mount a source
|
||||
function mount(source, name) {
|
||||
// Check if source exists
|
||||
var st = fd.stat(source)
|
||||
|
||||
var mount_info = {
|
||||
source: source,
|
||||
name: name || null,
|
||||
type: 'fs',
|
||||
handle: null,
|
||||
zip_blob: null
|
||||
}
|
||||
|
||||
if (st.isDirectory) {
|
||||
mount_info.type = 'fs'
|
||||
} else if (st.isFile) {
|
||||
var blob = fd.slurp(source)
|
||||
|
||||
// Try QOP first (it's likely faster to fail?) or Zip?
|
||||
// QOP open checks magic.
|
||||
var qop_archive = null
|
||||
try {
|
||||
qop_archive = qop.open(blob)
|
||||
} catch(e) {}
|
||||
|
||||
if (qop_archive) {
|
||||
mount_info.type = 'qop'
|
||||
mount_info.handle = qop_archive
|
||||
mount_info.zip_blob = blob // keep blob alive
|
||||
} else {
|
||||
var zip = miniz.read(blob)
|
||||
if (!is_object(zip) || !is_function(zip.count)) {
|
||||
throw Error("Invalid archive file (not zip or qop): " + source)
|
||||
}
|
||||
|
||||
mount_info.type = 'zip'
|
||||
mount_info.handle = zip
|
||||
mount_info.zip_blob = blob // keep blob alive
|
||||
}
|
||||
} else {
|
||||
throw Error("Unsupported mount source type: " + source)
|
||||
}
|
||||
|
||||
push(mounts, mount_info)
|
||||
}
|
||||
|
||||
// Unmount
|
||||
function unmount(name_or_source) {
|
||||
mounts = filter(mounts, function(mount) {
|
||||
return mount.name != name_or_source && mount.source != name_or_source
|
||||
})
|
||||
}
|
||||
|
||||
// Read file
|
||||
function slurp(path) {
|
||||
var res = resolve(path, true)
|
||||
if (!res) throw Error("File not found: " + path)
|
||||
|
||||
if (res.mount.type == 'zip') {
|
||||
return res.mount.handle.slurp(res.path)
|
||||
} else if (res.mount.type == 'qop') {
|
||||
var data = res.mount.handle.read(res.path)
|
||||
if (!data) throw Error("File not found in qop: " + path)
|
||||
return data
|
||||
} else {
|
||||
var full_path = fd.join_paths(res.mount.source, res.path)
|
||||
return fd.slurp(full_path)
|
||||
}
|
||||
}
|
||||
|
||||
// Write file
|
||||
function slurpwrite(path, data) {
|
||||
var full_path = writepath + "/" + path
|
||||
|
||||
var f = fd.open(full_path, 'w')
|
||||
fd.write(f, data)
|
||||
fd.close(f)
|
||||
}
|
||||
|
||||
// Check existence
|
||||
function exists(path) {
|
||||
var res = resolve(path, false)
|
||||
if (starts_with(path, "@")) {
|
||||
return mount_exists(res.mount, res.path)
|
||||
}
|
||||
return res != null
|
||||
}
|
||||
|
||||
// Stat
|
||||
function stat(path) {
|
||||
var res = resolve(path, true)
|
||||
if (!res) throw Error("File not found: " + path)
|
||||
|
||||
if (res.mount.type == 'zip') {
|
||||
var mod = res.mount.handle.mod(res.path)
|
||||
return {
|
||||
filesize: 0,
|
||||
modtime: mod * 1000,
|
||||
isDirectory: false
|
||||
}
|
||||
} else if (res.mount.type == 'qop') {
|
||||
var s = res.mount.handle.stat(res.path)
|
||||
if (!s) throw Error("File not found in qop: " + path)
|
||||
return {
|
||||
filesize: s.size,
|
||||
modtime: s.modtime,
|
||||
isDirectory: s.isDirectory
|
||||
}
|
||||
} else {
|
||||
var full_path = fd.join_paths(res.mount.source, res.path)
|
||||
var s = fd.stat(full_path)
|
||||
return {
|
||||
filesize: s.size,
|
||||
modtime: s.mtime,
|
||||
isDirectory: s.isDirectory
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Get search paths
|
||||
function searchpath() {
|
||||
return array(mounts)
|
||||
}
|
||||
|
||||
// Mount a package using the shop system
|
||||
function mount_package(name) {
|
||||
if (name == null) {
|
||||
mount('.', null)
|
||||
return
|
||||
}
|
||||
|
||||
var shop = use('internal/shop')
|
||||
var dir = shop.get_package_dir(name)
|
||||
|
||||
if (!dir) {
|
||||
throw Error("Package not found: " + name)
|
||||
}
|
||||
|
||||
mount(dir, name)
|
||||
}
|
||||
|
||||
// New functions for qjs_io compatibility
|
||||
|
||||
function match(str, pattern) {
|
||||
return wildstar.match(pattern, str, wildstar.WM_PATHNAME | wildstar.WM_PERIOD | wildstar.WM_WILDSTAR)
|
||||
}
|
||||
|
||||
function rm(path) {
|
||||
var res = resolve(path, true)
|
||||
if (res.mount.type != 'fs') throw Error("Cannot delete from non-fs mount")
|
||||
|
||||
var full_path = fd.join_paths(res.mount.source, res.path)
|
||||
var st = fd.stat(full_path)
|
||||
if (st.isDirectory) fd.rmdir(full_path)
|
||||
else fd.unlink(full_path)
|
||||
}
|
||||
|
||||
function mkdir(path) {
|
||||
var full = fd.join_paths(writepath, path)
|
||||
fd.mkdir(full)
|
||||
}
|
||||
|
||||
function set_writepath(path) {
|
||||
writepath = path
|
||||
}
|
||||
|
||||
function basedir() {
|
||||
return fd.getcwd()
|
||||
}
|
||||
|
||||
function prefdir(org, app) {
|
||||
return "./"
|
||||
}
|
||||
|
||||
function realdir(path) {
|
||||
var res = resolve(path, false)
|
||||
if (!res) return null
|
||||
return fd.join_paths(res.mount.source, res.path)
|
||||
}
|
||||
|
||||
function enumerate(path, recurse) {
|
||||
if (path == null) path = ""
|
||||
|
||||
var res = resolve(path, true)
|
||||
var results = []
|
||||
|
||||
function visit(curr_full, rel_prefix) {
|
||||
var list = fd.readdir(curr_full)
|
||||
if (!list) return
|
||||
|
||||
arrfor(list, function(item) {
|
||||
var item_rel = rel_prefix ? rel_prefix + "/" + item : item
|
||||
push(results, item_rel)
|
||||
|
||||
if (recurse) {
|
||||
var st = fd.stat(fd.join_paths(curr_full, item))
|
||||
if (st.isDirectory) {
|
||||
visit(fd.join_paths(curr_full, item), item_rel)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
if (res.mount.type == 'fs') {
|
||||
var full = fd.join_paths(res.mount.source, res.path)
|
||||
var st = fd.stat(full)
|
||||
if (st && st.isDirectory) {
|
||||
visit(full, "")
|
||||
}
|
||||
} else if (res.mount.type == 'qop') {
|
||||
var all = res.mount.handle.list()
|
||||
var prefix = res.path ? res.path + "/" : ""
|
||||
var prefix_len = length(prefix)
|
||||
|
||||
// Use a set to avoid duplicates if we are simulating directories
|
||||
var seen = {}
|
||||
|
||||
arrfor(all, function(p) {
|
||||
if (starts_with(p, prefix)) {
|
||||
var rel = text(p, prefix_len)
|
||||
if (length(rel) == 0) return
|
||||
|
||||
if (!recurse) {
|
||||
var slash = search(rel, '/')
|
||||
if (slash != null) {
|
||||
rel = text(rel, 0, slash)
|
||||
}
|
||||
}
|
||||
|
||||
if (!seen[rel]) {
|
||||
seen[rel] = true
|
||||
push(results, rel)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
function globfs(globs, dir) {
|
||||
if (dir == null) dir = ""
|
||||
var res = resolve(dir, true)
|
||||
var results = []
|
||||
|
||||
function check_neg(path) {
|
||||
var result = false
|
||||
arrfor(globs, function(g) {
|
||||
if (starts_with(g, "!") && wildstar.match(text(g, 1), path, wildstar.WM_WILDSTAR)) {
|
||||
result = true
|
||||
return true
|
||||
}
|
||||
}, false, true)
|
||||
return result
|
||||
}
|
||||
|
||||
function check_pos(path) {
|
||||
var result = false
|
||||
arrfor(globs, function(g) {
|
||||
if (!starts_with(g, "!") && wildstar.match(g, path, wildstar.WM_WILDSTAR)) {
|
||||
result = true
|
||||
return true
|
||||
}
|
||||
}, false, true)
|
||||
return result
|
||||
}
|
||||
|
||||
function visit(curr_full, rel_prefix) {
|
||||
if (rel_prefix && check_neg(rel_prefix)) return
|
||||
|
||||
var list = fd.readdir(curr_full)
|
||||
if (!list) return
|
||||
|
||||
arrfor(list, function(item) {
|
||||
var item_rel = rel_prefix ? rel_prefix + "/" + item : item
|
||||
|
||||
var child_full = fd.join_paths(curr_full, item)
|
||||
var st = fd.stat(child_full)
|
||||
|
||||
if (st.isDirectory) {
|
||||
if (!check_neg(item_rel)) {
|
||||
visit(child_full, item_rel)
|
||||
}
|
||||
} else {
|
||||
if (!check_neg(item_rel) && check_pos(item_rel)) {
|
||||
push(results, item_rel)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
if (res.mount.type == 'fs') {
|
||||
var full = fd.join_paths(res.mount.source, res.path)
|
||||
var st = fd.stat(full)
|
||||
if (st && st.isDirectory) {
|
||||
visit(full, "")
|
||||
}
|
||||
} else if (res.mount.type == 'qop') {
|
||||
var all = res.mount.handle.list()
|
||||
var prefix = res.path ? res.path + "/" : ""
|
||||
var prefix_len = length(prefix)
|
||||
|
||||
arrfor(all, function(p) {
|
||||
if (starts_with(p, prefix)) {
|
||||
var rel = text(p, prefix_len)
|
||||
if (length(rel) == 0) return
|
||||
|
||||
if (!check_neg(rel) && check_pos(rel)) {
|
||||
push(results, rel)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
// Exports
|
||||
cellfs.mount = mount
|
||||
cellfs.mount_package = mount_package
|
||||
cellfs.unmount = unmount
|
||||
cellfs.slurp = slurp
|
||||
cellfs.slurpwrite = slurpwrite
|
||||
cellfs.exists = exists
|
||||
cellfs.is_directory = is_directory
|
||||
cellfs.stat = stat
|
||||
cellfs.searchpath = searchpath
|
||||
cellfs.match = match
|
||||
cellfs.enumerate = enumerate
|
||||
cellfs.globfs = globfs
|
||||
cellfs.rm = rm
|
||||
cellfs.mkdir = mkdir
|
||||
cellfs.writepath = set_writepath
|
||||
cellfs.basedir = basedir
|
||||
cellfs.prefdir = prefdir
|
||||
cellfs.realdir = realdir
|
||||
|
||||
cellfs.mount('.')
|
||||
|
||||
return cellfs
|
||||
218
clean.ce
218
clean.ce
@@ -1,218 +0,0 @@
|
||||
// cell clean [<scope>] - Remove cached material to force refetch/rebuild
|
||||
//
|
||||
// Usage:
|
||||
// cell clean Clean build outputs for current directory package
|
||||
// cell clean . Clean build outputs for current directory package
|
||||
// cell clean <locator> Clean build outputs for specific package
|
||||
// cell clean shop Clean entire shop
|
||||
// cell clean world Clean all world packages
|
||||
//
|
||||
// Options:
|
||||
// --build Remove build outputs only (default)
|
||||
// --fetch Remove fetched sources only
|
||||
// --all Remove both build outputs and fetched sources
|
||||
// --deep Apply to full dependency closure
|
||||
// --dry-run Show what would be deleted
|
||||
|
||||
var shop = use('internal/shop')
|
||||
var pkg = use('package')
|
||||
var fd = use('fd')
|
||||
|
||||
var scope = null
|
||||
var clean_build = false
|
||||
var clean_fetch = false
|
||||
var deep = false
|
||||
var dry_run = false
|
||||
|
||||
for (var i = 0; i < length(args); i++) {
|
||||
if (args[i] == '--build') {
|
||||
clean_build = true
|
||||
} else if (args[i] == '--fetch') {
|
||||
clean_fetch = true
|
||||
} else if (args[i] == '--all') {
|
||||
clean_build = true
|
||||
clean_fetch = true
|
||||
} else if (args[i] == '--deep') {
|
||||
deep = true
|
||||
} else if (args[i] == '--dry-run') {
|
||||
dry_run = true
|
||||
} else if (args[i] == '--help' || args[i] == '-h') {
|
||||
log.console("Usage: cell clean [<scope>] [options]")
|
||||
log.console("")
|
||||
log.console("Remove cached material to force refetch/rebuild.")
|
||||
log.console("")
|
||||
log.console("Scopes:")
|
||||
log.console(" <locator> Clean specific package")
|
||||
log.console(" shop Clean entire shop")
|
||||
log.console(" world Clean all world packages")
|
||||
log.console("")
|
||||
log.console("Options:")
|
||||
log.console(" --build Remove build outputs only (default)")
|
||||
log.console(" --fetch Remove fetched sources only")
|
||||
log.console(" --all Remove both build outputs and fetched sources")
|
||||
log.console(" --deep Apply to full dependency closure")
|
||||
log.console(" --dry-run Show what would be deleted")
|
||||
$stop()
|
||||
} else if (!starts_with(args[i], '-')) {
|
||||
scope = args[i]
|
||||
}
|
||||
}
|
||||
|
||||
// Default to --build if nothing specified
|
||||
if (!clean_build && !clean_fetch) {
|
||||
clean_build = true
|
||||
}
|
||||
|
||||
// Default scope to current directory
|
||||
if (!scope) {
|
||||
scope = '.'
|
||||
}
|
||||
|
||||
// Resolve local paths for single package scope
|
||||
var is_shop_scope = (scope == 'shop')
|
||||
var is_world_scope = (scope == 'world')
|
||||
|
||||
if (!is_shop_scope && !is_world_scope) {
|
||||
if (scope == '.' || starts_with(scope, './') || starts_with(scope, '../') || fd.is_dir(scope)) {
|
||||
var resolved = fd.realpath(scope)
|
||||
if (resolved) {
|
||||
scope = resolved
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var files_to_delete = []
|
||||
var dirs_to_delete = []
|
||||
|
||||
// Gather packages to clean
|
||||
var packages_to_clean = []
|
||||
|
||||
if (is_shop_scope) {
|
||||
packages_to_clean = shop.list_packages()
|
||||
} else if (is_world_scope) {
|
||||
// For now, world is the same as shop
|
||||
packages_to_clean = shop.list_packages()
|
||||
} else {
|
||||
// Single package
|
||||
push(packages_to_clean, scope)
|
||||
|
||||
if (deep) {
|
||||
try {
|
||||
var deps = pkg.gather_dependencies(scope)
|
||||
arrfor(deps, function(dep) {
|
||||
push(packages_to_clean, dep)
|
||||
})
|
||||
} catch (e) {
|
||||
// Skip if can't read dependencies
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Gather files to clean
|
||||
var lib_dir = shop.get_lib_dir()
|
||||
var build_dir = shop.get_build_dir()
|
||||
var packages_dir = replace(shop.get_package_dir(''), /\/$/, '') // Get base packages dir
|
||||
|
||||
if (clean_build) {
|
||||
if (is_shop_scope) {
|
||||
// Clean entire build and lib directories
|
||||
if (fd.is_dir(build_dir)) {
|
||||
push(dirs_to_delete, build_dir)
|
||||
}
|
||||
if (fd.is_dir(lib_dir)) {
|
||||
push(dirs_to_delete, lib_dir)
|
||||
}
|
||||
} else {
|
||||
// Clean specific package libraries
|
||||
arrfor(packages_to_clean, function(p) {
|
||||
if (p == 'core') return
|
||||
|
||||
var lib_name = shop.lib_name_for_package(p)
|
||||
var dylib_ext = '.dylib'
|
||||
var lib_path = lib_dir + '/' + lib_name + dylib_ext
|
||||
|
||||
if (fd.is_file(lib_path)) {
|
||||
push(files_to_delete, lib_path)
|
||||
}
|
||||
|
||||
// Also check for .so and .dll
|
||||
var so_path = lib_dir + '/' + lib_name + '.so'
|
||||
var dll_path = lib_dir + '/' + lib_name + '.dll'
|
||||
if (fd.is_file(so_path)) {
|
||||
push(files_to_delete, so_path)
|
||||
}
|
||||
if (fd.is_file(dll_path)) {
|
||||
push(files_to_delete, dll_path)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if (clean_fetch) {
|
||||
if (is_shop_scope) {
|
||||
// Clean entire packages directory (dangerous!)
|
||||
if (fd.is_dir(packages_dir)) {
|
||||
push(dirs_to_delete, packages_dir)
|
||||
}
|
||||
} else {
|
||||
// Clean specific package directories
|
||||
arrfor(packages_to_clean, function(p) {
|
||||
if (p == 'core') return
|
||||
|
||||
var pkg_dir = shop.get_package_dir(p)
|
||||
if (fd.is_dir(pkg_dir) || fd.is_link(pkg_dir)) {
|
||||
push(dirs_to_delete, pkg_dir)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Execute or report
|
||||
if (dry_run) {
|
||||
log.console("Would delete:")
|
||||
if (length(files_to_delete) == 0 && length(dirs_to_delete) == 0) {
|
||||
log.console(" (nothing to clean)")
|
||||
} else {
|
||||
arrfor(files_to_delete, function(f) {
|
||||
log.console(" [file] " + f)
|
||||
})
|
||||
arrfor(dirs_to_delete, function(d) {
|
||||
log.console(" [dir] " + d)
|
||||
})
|
||||
}
|
||||
} else {
|
||||
var deleted_count = 0
|
||||
|
||||
arrfor(files_to_delete, function(f) {
|
||||
try {
|
||||
fd.unlink(f)
|
||||
log.console("Deleted: " + f)
|
||||
deleted_count++
|
||||
} catch (e) {
|
||||
log.error("Failed to delete " + f + ": " + e)
|
||||
}
|
||||
})
|
||||
|
||||
arrfor(dirs_to_delete, function(d) {
|
||||
try {
|
||||
if (fd.is_link(d)) {
|
||||
fd.unlink(d)
|
||||
} else {
|
||||
fd.rmdir(d, 1) // recursive
|
||||
}
|
||||
log.console("Deleted: " + d)
|
||||
deleted_count++
|
||||
} catch (e) {
|
||||
log.error("Failed to delete " + d + ": " + e)
|
||||
}
|
||||
})
|
||||
|
||||
if (deleted_count == 0) {
|
||||
log.console("Nothing to clean.")
|
||||
} else {
|
||||
log.console("")
|
||||
log.console("Clean complete: " + text(deleted_count) + " item(s) deleted.")
|
||||
}
|
||||
}
|
||||
|
||||
$stop()
|
||||
121
clone.ce
121
clone.ce
@@ -1,121 +0,0 @@
|
||||
// cell clone <origin> <path>
|
||||
// Clones a cell package <origin> to the local <path>, and links it.
|
||||
|
||||
var shop = use('internal/shop')
|
||||
var link = use('link')
|
||||
var fd = use('fd')
|
||||
var http = use('http')
|
||||
var miniz = use('miniz')
|
||||
|
||||
if (length(args) < 2) {
|
||||
log.console("Usage: cell clone <origin> <path>")
|
||||
log.console("Clones a cell package to a local path and links it.")
|
||||
$stop()
|
||||
return
|
||||
}
|
||||
|
||||
var origin = args[0]
|
||||
var target_path = args[1]
|
||||
|
||||
// Resolve target path to absolute
|
||||
if (target_path == '.' || starts_with(target_path, './') || starts_with(target_path, '../')) {
|
||||
var resolved = fd.realpath(target_path)
|
||||
if (resolved) {
|
||||
target_path = resolved
|
||||
} else {
|
||||
// Path doesn't exist yet, resolve relative to cwd
|
||||
var cwd = fd.realpath('.')
|
||||
if (target_path == '.') {
|
||||
target_path = cwd
|
||||
} else if (starts_with(target_path, './')) {
|
||||
target_path = cwd + text(target_path, 1)
|
||||
} else if (starts_with(target_path, '../')) {
|
||||
// Go up one directory from cwd
|
||||
var parent = fd.dirname(cwd)
|
||||
target_path = parent + text(target_path, 2)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if target already exists
|
||||
if (fd.is_dir(target_path)) {
|
||||
log.console("Error: " + target_path + " already exists")
|
||||
$stop()
|
||||
return
|
||||
}
|
||||
|
||||
log.console("Cloning " + origin + " to " + target_path + "...")
|
||||
|
||||
// Get the latest commit
|
||||
var info = shop.resolve_package_info(origin)
|
||||
if (!info || info == 'local') {
|
||||
log.console("Error: " + origin + " is not a remote package")
|
||||
$stop()
|
||||
return
|
||||
}
|
||||
|
||||
// Update to get the commit hash
|
||||
var update_result = shop.update(origin)
|
||||
if (!update_result) {
|
||||
log.console("Error: Could not fetch " + origin)
|
||||
$stop()
|
||||
return
|
||||
}
|
||||
|
||||
// Fetch and extract to the target path
|
||||
var lock = shop.load_lock()
|
||||
var entry = lock[origin]
|
||||
if (!entry || !entry.commit) {
|
||||
log.console("Error: No commit found for " + origin)
|
||||
$stop()
|
||||
return
|
||||
}
|
||||
|
||||
var download_url = shop.get_download_url(origin, entry.commit)
|
||||
log.console("Downloading from " + download_url)
|
||||
|
||||
try {
|
||||
var zip_blob = http.fetch(download_url)
|
||||
|
||||
// Extract zip to target path
|
||||
var zip = miniz.read(zip_blob)
|
||||
if (!zip) {
|
||||
log.console("Error: Failed to read zip archive")
|
||||
$stop()
|
||||
return
|
||||
}
|
||||
|
||||
// Create target directory
|
||||
fd.mkdir(target_path)
|
||||
|
||||
var count = zip.count()
|
||||
for (var i = 0; i < count; i++) {
|
||||
if (zip.is_directory(i)) continue
|
||||
var filename = zip.get_filename(i)
|
||||
var first_slash = search(filename, '/')
|
||||
if (first_slash == null) continue
|
||||
if (first_slash + 1 >= length(filename)) continue
|
||||
|
||||
var rel_path = text(filename, first_slash + 1)
|
||||
var full_path = target_path + '/' + rel_path
|
||||
var dir_path = fd.dirname(full_path)
|
||||
|
||||
// Ensure directory exists
|
||||
if (!fd.is_dir(dir_path)) {
|
||||
fd.mkdir(dir_path)
|
||||
}
|
||||
fd.slurpwrite(full_path, zip.slurp(filename))
|
||||
}
|
||||
|
||||
log.console("Extracted to " + target_path)
|
||||
|
||||
// Link the origin to the cloned path
|
||||
link.add(origin, target_path, shop)
|
||||
log.console("Linked " + origin + " -> " + target_path)
|
||||
|
||||
} catch (e) {
|
||||
log.console("Error: " + e.message)
|
||||
if (e.stack) log.console(e.stack)
|
||||
}
|
||||
|
||||
$stop()
|
||||
249
config.ce
249
config.ce
@@ -1,249 +0,0 @@
|
||||
// cell config - Manage system and actor configurations
|
||||
|
||||
var toml = use('toml')
|
||||
var pkg = use('package')
|
||||
|
||||
function print_help() {
|
||||
log.console("Usage: cell config <command> [options]")
|
||||
log.console("")
|
||||
log.console("Commands:")
|
||||
log.console(" get <key> Get a configuration value")
|
||||
log.console(" set <key> <value> Set a configuration value")
|
||||
log.console(" list List all configurations")
|
||||
log.console(" actor <name> get <key> Get actor-specific config")
|
||||
log.console(" actor <name> set <key> <val> Set actor-specific config")
|
||||
log.console(" actor <name> list List actor configurations")
|
||||
log.console("")
|
||||
log.console("Examples:")
|
||||
log.console(" cell config get system.ar_timer")
|
||||
log.console(" cell config set system.net_service 0.2")
|
||||
log.console(" cell config actor prosperon/_sdl_video set resolution 1920x1080")
|
||||
log.console(" cell config actor extramath/spline set precision high")
|
||||
log.console("")
|
||||
log.console("System keys:")
|
||||
log.console(" system.ar_timer - Seconds before idle actor reclamation")
|
||||
log.console(" system.actor_memory - MB of memory an actor can use (0=unbounded)")
|
||||
log.console(" system.net_service - Seconds per network service pull")
|
||||
log.console(" system.reply_timeout - Seconds to hold callback for replies (0=unbounded)")
|
||||
log.console(" system.actor_max - Max number of simultaneous actors")
|
||||
log.console(" system.stack_max - MB of memory each actor's stack can grow to")
|
||||
}
|
||||
|
||||
// Parse a dot-notation key into path segments
|
||||
function parse_key(key) {
|
||||
return array(key, '.')
|
||||
}
|
||||
|
||||
// Get a value from nested object using path
|
||||
function get_nested(obj, path) {
|
||||
var current = obj
|
||||
arrfor(path, function(segment) {
|
||||
if (is_null(current) || !is_object(current)) return null
|
||||
current = current[segment]
|
||||
})
|
||||
return current
|
||||
}
|
||||
|
||||
// Set a value in nested object using path
|
||||
function set_nested(obj, path, value) {
|
||||
var current = obj
|
||||
for (var i = 0; i < length(path) - 1; i++) {
|
||||
var segment = path[i]
|
||||
if (is_null(current[segment]) || !is_object(current[segment])) {
|
||||
current[segment] = {}
|
||||
}
|
||||
current = current[segment]
|
||||
}
|
||||
current[path[length(path) - 1]] = value
|
||||
}
|
||||
|
||||
// Parse value string into appropriate type
|
||||
function parse_value(str) {
|
||||
// Boolean
|
||||
if (str == 'true') return true
|
||||
if (str == 'false') return false
|
||||
|
||||
// Number (including underscores)
|
||||
var num_str = replace(str, /_/g, '')
|
||||
if (/^-?\d+$/.test(num_str)) return parseInt(num_str)
|
||||
if (/^-?\d*\.\d+$/.test(num_str)) return parseFloat(num_str)
|
||||
|
||||
// String
|
||||
return str
|
||||
}
|
||||
|
||||
// Format value for display
|
||||
function format_value(val) {
|
||||
if (is_text(val)) return '"' + val + '"'
|
||||
if (is_number(val) && val >= 1000) {
|
||||
// Add underscores to large numbers
|
||||
return replace(val.toString(), /\B(?=(\d{3})+(?!\d))/g, '_')
|
||||
}
|
||||
return text(val)
|
||||
}
|
||||
|
||||
// Print configuration tree recursively
|
||||
function print_config(obj, prefix = '') {
|
||||
arrfor(array(obj), function(key) {
|
||||
var val = obj[key]
|
||||
var full_key = prefix ? prefix + '.' + key : key
|
||||
|
||||
if (is_object(val))
|
||||
print_config(val, full_key)
|
||||
else
|
||||
log.console(full_key + ' = ' + format_value(val))
|
||||
})
|
||||
}
|
||||
|
||||
// Main command handling
|
||||
if (length(args) == 0) {
|
||||
print_help()
|
||||
$stop()
|
||||
return
|
||||
}
|
||||
|
||||
var config = pkg.load_config()
|
||||
if (!config) {
|
||||
log.error("Failed to load cell.toml")
|
||||
$stop()
|
||||
return
|
||||
}
|
||||
|
||||
var command = args[0]
|
||||
var key
|
||||
var path
|
||||
var value
|
||||
|
||||
switch (command) {
|
||||
case 'help':
|
||||
case '-h':
|
||||
case '--help':
|
||||
print_help()
|
||||
break
|
||||
|
||||
case 'list':
|
||||
log.console("# Cell Configuration")
|
||||
log.console("")
|
||||
print_config(config)
|
||||
break
|
||||
|
||||
case 'get':
|
||||
if (length(args) < 2) {
|
||||
log.error("Usage: cell config get <key>")
|
||||
$stop()
|
||||
return
|
||||
}
|
||||
key = args[1]
|
||||
path = parse_key(key)
|
||||
value = get_nested(config, path)
|
||||
|
||||
if (value == null) {
|
||||
log.error("Key not found: " + key)
|
||||
} else if (isa(value, object)) {
|
||||
// Print all nested values
|
||||
print_config(value, key)
|
||||
} else {
|
||||
log.console(key + ' = ' + format_value(value))
|
||||
}
|
||||
break
|
||||
|
||||
case 'set':
|
||||
if (length(args) < 3) {
|
||||
log.error("Usage: cell config set <key> <value>")
|
||||
$stop()
|
||||
return
|
||||
}
|
||||
var key = args[1]
|
||||
var value_str = args[2]
|
||||
var path = parse_key(key)
|
||||
var value = parse_value(value_str)
|
||||
|
||||
// Validate system keys
|
||||
if (path[0] == 'system') {
|
||||
var valid_system_keys = [
|
||||
'ar_timer', 'actor_memory', 'net_service',
|
||||
'reply_timeout', 'actor_max', 'stack_max'
|
||||
]
|
||||
if (find(valid_system_keys, path[1]) == null) {
|
||||
log.error("Invalid system key. Valid keys: " + text(valid_system_keys, ', '))
|
||||
$stop()
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
set_nested(config, path, value)
|
||||
pkg.save_config(config)
|
||||
log.console("Set " + key + " = " + format_value(value))
|
||||
break
|
||||
|
||||
case 'actor':
|
||||
// Handle actor-specific configuration
|
||||
if (length(args) < 3) {
|
||||
log.error("Usage: cell config actor <name> <command> [options]")
|
||||
$stop()
|
||||
return
|
||||
}
|
||||
|
||||
var actor_name = args[1]
|
||||
var actor_cmd = args[2]
|
||||
|
||||
// Initialize actors section if needed
|
||||
config.actors = config.actors || {}
|
||||
config.actors[actor_name] = config.actors[actor_name] || {}
|
||||
|
||||
switch (actor_cmd) {
|
||||
case 'list':
|
||||
if (length(array(config.actors[actor_name])) == 0) {
|
||||
log.console("No configuration for actor: " + actor_name)
|
||||
} else {
|
||||
log.console("# Configuration for actor: " + actor_name)
|
||||
log.console("")
|
||||
print_config(config.actors[actor_name], 'actors.' + actor_name)
|
||||
}
|
||||
break
|
||||
|
||||
case 'get':
|
||||
if (length(args) < 4) {
|
||||
log.error("Usage: cell config actor <name> get <key>")
|
||||
$stop()
|
||||
return
|
||||
}
|
||||
key = args[3]
|
||||
path = parse_key(key)
|
||||
value = get_nested(config.actors[actor_name], path)
|
||||
|
||||
if (value == null) {
|
||||
log.error("Key not found for actor " + actor_name + ": " + key)
|
||||
} else {
|
||||
log.console('actors.' + actor_name + '.' + key + ' = ' + format_value(value))
|
||||
}
|
||||
break
|
||||
|
||||
case 'set':
|
||||
if (length(args) < 5) {
|
||||
log.error("Usage: cell config actor <name> set <key> <value>")
|
||||
$stop()
|
||||
return
|
||||
}
|
||||
key = args[3]
|
||||
var value_str = args[4]
|
||||
path = parse_key(key)
|
||||
value = parse_value(value_str)
|
||||
|
||||
set_nested(config.actors[actor_name], path, value)
|
||||
pkg.save_config(config)
|
||||
log.console("Set actors." + actor_name + "." + key + " = " + format_value(value))
|
||||
break
|
||||
|
||||
default:
|
||||
log.error("Unknown actor command: " + actor_cmd)
|
||||
log.console("Valid commands: list, get, set")
|
||||
}
|
||||
break
|
||||
|
||||
default:
|
||||
log.error("Unknown command: " + command)
|
||||
print_help()
|
||||
}
|
||||
|
||||
$stop()
|
||||
246
crypto.c
246
crypto.c
@@ -1,246 +0,0 @@
|
||||
#include "cell.h"
|
||||
#include <stdint.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
|
||||
#include "monocypher.h"
|
||||
|
||||
/*
|
||||
Crypto Module Documentation
|
||||
|
||||
This module provides cryptographic functions using the Monocypher library.
|
||||
All inputs and outputs are Blobs.
|
||||
|
||||
Functions:
|
||||
|
||||
- keypair() -> { public: Blob(256 bits), private: Blob(256 bits) }
|
||||
Generates a new random X25519 keypair.
|
||||
|
||||
- shared(public_key, private_key) -> Blob(256 bits)
|
||||
Computes a shared secret from your private key and another's public key (X25519).
|
||||
Input keys must be 256 bits (32 bytes).
|
||||
|
||||
- blake2(data, [hash_size_bytes=32]) -> Blob
|
||||
Computes the BLAKE2b hash of the data.
|
||||
Default hash size is 32 bytes (256 bits). Supports 1-64 bytes.
|
||||
|
||||
- sign(secret_key, message) -> Blob(512 bits)
|
||||
Signs a message using EdDSA.
|
||||
secret_key must be 512 bits (64 bytes).
|
||||
(Note: If you have a 32-byte seed, extend it first or use appropriate key generation).
|
||||
Returns a 64-byte signature.
|
||||
|
||||
- verify(signature, public_key, message) -> bool
|
||||
Verifies an EdDSA signature.
|
||||
signature: 512 bits (64 bytes).
|
||||
public_key: 256 bits (32 bytes).
|
||||
Returns true if valid, false otherwise.
|
||||
|
||||
- lock(key, nonce, message, [ad]) -> Blob
|
||||
Encrypts and authenticates a message using XChaCha20-Poly1305.
|
||||
key: 256 bits (32 bytes).
|
||||
nonce: 192 bits (24 bytes).
|
||||
ad: Optional associated data (Blob).
|
||||
Returns a blob containing the ciphertext followed by the 16-byte MAC.
|
||||
|
||||
- unlock(key, nonce, ciphertext_with_mac, [ad]) -> Blob or null
|
||||
Decrypts and verifies a message.
|
||||
key: 256 bits (32 bytes).
|
||||
nonce: 192 bits (24 bytes).
|
||||
ciphertext_with_mac: Must include the 16-byte MAC at the end.
|
||||
ad: Optional associated data (Blob).
|
||||
Returns the plaintext Blob if successful, or null if verification fails.
|
||||
*/
|
||||
|
||||
// Helper to get blob data and check exact bit length
|
||||
static void *get_blob_check_bits(JSContext *js, JSValue val, size_t expected_bits, const char *name) {
|
||||
size_t bits;
|
||||
void* result = js_get_blob_data_bits(js, &bits, val);
|
||||
if (result == -1) {
|
||||
return NULL; // Exception already thrown by js_get_blob_data_bits
|
||||
}
|
||||
|
||||
if (bits != expected_bits) {
|
||||
JS_ThrowTypeError(js, "%s: expected %zu bits, got %zu", name, expected_bits, bits);
|
||||
return NULL;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
// Helper to get any blob data (checking it is a stoned blob)
|
||||
static void *get_blob_any(JSContext *js, JSValue val, size_t *out_bits, const char *name) {
|
||||
void *result = js_get_blob_data_bits(js, out_bits, val);
|
||||
if (result == -1)
|
||||
return NULL;
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
JSValue js_crypto_shared(JSContext *js, JSValue self, int argc, JSValue *argv)
|
||||
{
|
||||
if (argc < 2) {
|
||||
return JS_ThrowTypeError(js, "crypto.shared: expected public_key, private_key");
|
||||
}
|
||||
|
||||
uint8_t *pub = get_blob_check_bits(js, argv[0], 256, "crypto.shared public_key");
|
||||
if (!pub) return JS_EXCEPTION;
|
||||
|
||||
uint8_t *priv = get_blob_check_bits(js, argv[1], 256, "crypto.shared private_key");
|
||||
if (!priv) return JS_EXCEPTION;
|
||||
|
||||
uint8_t shared[32];
|
||||
crypto_x25519(shared, priv, pub);
|
||||
|
||||
return js_new_blob_stoned_copy(js, shared, 32);
|
||||
}
|
||||
|
||||
JSValue js_crypto_blake2(JSContext *js, JSValue self, int argc, JSValue *argv)
|
||||
{
|
||||
if (argc < 1)
|
||||
return JS_ThrowTypeError(js, "crypto.blake2: expected data blob");
|
||||
|
||||
size_t data_bits;
|
||||
uint8_t *data = get_blob_any(js, argv[0], &data_bits, "crypto.blake2 data");
|
||||
if (!data) return JS_EXCEPTION;
|
||||
|
||||
int32_t hash_len = 32;
|
||||
if (argc > 1) {
|
||||
if (JS_ToInt32(js, &hash_len, argv[1]))
|
||||
return JS_EXCEPTION;
|
||||
if (hash_len < 1 || hash_len > 64)
|
||||
return JS_ThrowRangeError(js, "crypto.blake2: hash length must be between 1 and 64 bytes");
|
||||
}
|
||||
|
||||
uint8_t hash[64];
|
||||
// Use (bits + 7) / 8 to get byte length covering all bits
|
||||
crypto_blake2b(hash, hash_len, data, (data_bits + 7) / 8);
|
||||
|
||||
return js_new_blob_stoned_copy(js, hash, hash_len);
|
||||
}
|
||||
|
||||
JSValue js_crypto_sign(JSContext *js, JSValue self, int argc, JSValue *argv) {
|
||||
if (argc < 2) return JS_ThrowTypeError(js, "crypto.sign: expected secret_key, message");
|
||||
|
||||
uint8_t *sk = get_blob_check_bits(js, argv[0], 512, "crypto.sign secret_key");
|
||||
if (!sk) return JS_EXCEPTION;
|
||||
|
||||
size_t msg_bits;
|
||||
uint8_t *msg = get_blob_any(js, argv[1], &msg_bits, "crypto.sign message");
|
||||
if (!msg) return JS_EXCEPTION;
|
||||
|
||||
uint8_t sig[64];
|
||||
crypto_eddsa_sign(sig, sk, msg, (msg_bits + 7) / 8);
|
||||
|
||||
return js_new_blob_stoned_copy(js, sig, 64);
|
||||
}
|
||||
|
||||
JSValue js_crypto_verify(JSContext *js, JSValue self, int argc, JSValue *argv) {
|
||||
if (argc < 3) return JS_ThrowTypeError(js, "crypto.verify: expected signature, public_key, message");
|
||||
|
||||
uint8_t *sig = get_blob_check_bits(js, argv[0], 512, "crypto.verify signature");
|
||||
if (!sig) return JS_EXCEPTION;
|
||||
|
||||
uint8_t *pk = get_blob_check_bits(js, argv[1], 256, "crypto.verify public_key");
|
||||
if (!pk) return JS_EXCEPTION;
|
||||
|
||||
size_t msg_bits;
|
||||
uint8_t *msg = get_blob_any(js, argv[2], &msg_bits, "crypto.verify message");
|
||||
if (!msg) return JS_EXCEPTION;
|
||||
|
||||
int ret = crypto_eddsa_check(sig, pk, msg, (msg_bits + 7) / 8);
|
||||
return JS_NewBool(js, ret == 0);
|
||||
}
|
||||
|
||||
JSValue js_crypto_lock(JSContext *js, JSValue self, int argc, JSValue *argv) {
|
||||
if (argc < 3) return JS_ThrowTypeError(js, "crypto.lock: expected key, nonce, message, [ad]");
|
||||
|
||||
uint8_t *key = get_blob_check_bits(js, argv[0], 256, "crypto.lock key");
|
||||
if (!key) return JS_EXCEPTION;
|
||||
|
||||
uint8_t *nonce = get_blob_check_bits(js, argv[1], 192, "crypto.lock nonce");
|
||||
if (!nonce) return JS_EXCEPTION;
|
||||
|
||||
size_t msg_bits;
|
||||
uint8_t *msg = get_blob_any(js, argv[2], &msg_bits, "crypto.lock message");
|
||||
if (!msg) return JS_EXCEPTION;
|
||||
size_t msg_len = (msg_bits + 7) / 8;
|
||||
|
||||
size_t ad_len = 0;
|
||||
uint8_t *ad = NULL;
|
||||
if (argc > 3 && !JS_IsNull(argv[3])) {
|
||||
size_t ad_bits;
|
||||
ad = get_blob_any(js, argv[3], &ad_bits, "crypto.lock ad");
|
||||
if (!ad) return JS_EXCEPTION;
|
||||
ad_len = (ad_bits + 7) / 8;
|
||||
}
|
||||
|
||||
size_t out_len = msg_len + 16;
|
||||
uint8_t *out = malloc(out_len);
|
||||
if (!out) return JS_ThrowOutOfMemory(js);
|
||||
|
||||
// Output: [Ciphertext (msg_len)] [MAC (16)]
|
||||
crypto_aead_lock(out, out + msg_len, key, nonce, ad, ad_len, msg, msg_len);
|
||||
|
||||
JSValue ret = js_new_blob_stoned_copy(js, out, out_len);
|
||||
free(out);
|
||||
return ret;
|
||||
}
|
||||
|
||||
JSValue js_crypto_unlock(JSContext *js, JSValue self, int argc, JSValue *argv) {
|
||||
if (argc < 3) return JS_ThrowTypeError(js, "crypto.unlock: expected key, nonce, ciphertext, [ad]");
|
||||
|
||||
uint8_t *key = get_blob_check_bits(js, argv[0], 256, "crypto.unlock key");
|
||||
if (!key) return JS_EXCEPTION;
|
||||
|
||||
uint8_t *nonce = get_blob_check_bits(js, argv[1], 192, "crypto.unlock nonce");
|
||||
if (!nonce) return JS_EXCEPTION;
|
||||
|
||||
size_t cipher_bits;
|
||||
uint8_t *cipher = get_blob_any(js, argv[2], &cipher_bits, "crypto.unlock ciphertext");
|
||||
if (!cipher) return JS_EXCEPTION;
|
||||
|
||||
size_t cipher_len = (cipher_bits + 7) / 8;
|
||||
if (cipher_len < 16) return JS_ThrowTypeError(js, "crypto.unlock: ciphertext too short (min 16 bytes)");
|
||||
|
||||
size_t msg_len = cipher_len - 16;
|
||||
|
||||
size_t ad_len = 0;
|
||||
uint8_t *ad = NULL;
|
||||
if (argc > 3 && !JS_IsNull(argv[3])) {
|
||||
size_t ad_bits;
|
||||
ad = get_blob_any(js, argv[3], &ad_bits, "crypto.unlock ad");
|
||||
if (!ad) return JS_EXCEPTION;
|
||||
ad_len = (ad_bits + 7) / 8;
|
||||
}
|
||||
|
||||
uint8_t *out = malloc(msg_len > 0 ? msg_len : 1);
|
||||
if (!out) return JS_ThrowOutOfMemory(js);
|
||||
|
||||
// MAC is at cipher + msg_len
|
||||
const uint8_t *mac = cipher + msg_len;
|
||||
|
||||
if (crypto_aead_unlock(out, mac, key, nonce, ad, ad_len, cipher, msg_len) != 0) {
|
||||
free(out);
|
||||
return JS_NULL;
|
||||
}
|
||||
|
||||
JSValue ret = js_new_blob_stoned_copy(js, out, msg_len);
|
||||
free(out);
|
||||
return ret;
|
||||
}
|
||||
|
||||
static const JSCFunctionListEntry js_crypto_funcs[] = {
|
||||
JS_CFUNC_DEF("shared", 2, js_crypto_shared),
|
||||
JS_CFUNC_DEF("blake2", 2, js_crypto_blake2),
|
||||
JS_CFUNC_DEF("sign", 2, js_crypto_sign),
|
||||
JS_CFUNC_DEF("verify", 3, js_crypto_verify),
|
||||
JS_CFUNC_DEF("lock", 3, js_crypto_lock),
|
||||
JS_CFUNC_DEF("unlock", 3, js_crypto_unlock),
|
||||
};
|
||||
|
||||
JSValue js_crypto_use(JSContext *js)
|
||||
{
|
||||
JSValue obj = JS_NewObject(js);
|
||||
JS_SetPropertyFunctionList(js, obj, js_crypto_funcs, sizeof(js_crypto_funcs)/sizeof(js_crypto_funcs[0]));
|
||||
return obj;
|
||||
}
|
||||
111
debug/js.c
111
debug/js.c
@@ -1,111 +0,0 @@
|
||||
#include "cell.h"
|
||||
|
||||
JSC_CCALL(os_gc, JS_RunGC(JS_GetRuntime(js)) )
|
||||
JSC_CCALL(os_mem_limit, JS_SetMemoryLimit(JS_GetRuntime(js), js2number(js,argv[0])))
|
||||
JSC_CCALL(os_gc_threshold, JS_SetGCThreshold(JS_GetRuntime(js), js2number(js,argv[0])))
|
||||
JSC_CCALL(os_max_stacksize, JS_SetMaxStackSize(JS_GetRuntime(js), js2number(js,argv[0])))
|
||||
|
||||
// Compute the approximate size of a single JS value in memory.
|
||||
JSC_CCALL(os_calc_mem,
|
||||
JSMemoryUsage mu;
|
||||
JS_ComputeMemoryUsage(JS_GetRuntime(js),&mu);
|
||||
ret = JS_NewObject(js);
|
||||
JS_SetPropertyStr(js,ret,"malloc_size",number2js(js,mu.malloc_size));
|
||||
JS_SetPropertyStr(js,ret,"malloc_limit",number2js(js,mu.malloc_limit));
|
||||
JS_SetPropertyStr(js,ret,"memory_used_size",number2js(js,mu.memory_used_size));
|
||||
JS_SetPropertyStr(js,ret,"malloc_count",number2js(js,mu.malloc_count));
|
||||
JS_SetPropertyStr(js,ret,"memory_used_count",number2js(js,mu.memory_used_count));
|
||||
JS_SetPropertyStr(js,ret,"atom_count",number2js(js,mu.atom_count));
|
||||
JS_SetPropertyStr(js,ret,"atom_size",number2js(js,mu.atom_size));
|
||||
JS_SetPropertyStr(js,ret,"str_count",number2js(js,mu.str_count));
|
||||
JS_SetPropertyStr(js,ret,"str_size",number2js(js,mu.str_size));
|
||||
JS_SetPropertyStr(js,ret,"obj_count",number2js(js,mu.obj_count));
|
||||
JS_SetPropertyStr(js,ret,"obj_size",number2js(js,mu.obj_size));
|
||||
JS_SetPropertyStr(js,ret,"prop_count",number2js(js,mu.prop_count));
|
||||
JS_SetPropertyStr(js,ret,"prop_size",number2js(js,mu.prop_size));
|
||||
JS_SetPropertyStr(js,ret,"shape_count",number2js(js,mu.shape_count));
|
||||
JS_SetPropertyStr(js,ret,"shape_size",number2js(js,mu.shape_size));
|
||||
JS_SetPropertyStr(js,ret,"js_func_count",number2js(js,mu.js_func_count));
|
||||
JS_SetPropertyStr(js,ret,"js_func_size",number2js(js,mu.js_func_size));
|
||||
JS_SetPropertyStr(js,ret,"js_func_code_size",number2js(js,mu.js_func_code_size));
|
||||
JS_SetPropertyStr(js,ret,"js_func_pc2line_count",number2js(js,mu.js_func_pc2line_count));
|
||||
JS_SetPropertyStr(js,ret,"js_func_pc2line_size",number2js(js,mu.js_func_pc2line_size));
|
||||
JS_SetPropertyStr(js,ret,"c_func_count",number2js(js,mu.c_func_count));
|
||||
JS_SetPropertyStr(js,ret,"array_count",number2js(js,mu.array_count));
|
||||
JS_SetPropertyStr(js,ret,"fast_array_count",number2js(js,mu.fast_array_count));
|
||||
JS_SetPropertyStr(js,ret,"fast_array_elements",number2js(js,mu.fast_array_elements));
|
||||
JS_SetPropertyStr(js,ret,"binary_object_count",number2js(js,mu.binary_object_count));
|
||||
JS_SetPropertyStr(js,ret,"binary_object_size",number2js(js,mu.binary_object_size));
|
||||
)
|
||||
|
||||
// Evaluate a string of JavaScript code in the current QuickJS context.
|
||||
JSC_SSCALL(os_eval,
|
||||
if (!str2) return JS_ThrowReferenceError(js, "Second argument should be the script.");
|
||||
if (!str) return JS_ThrowReferenceError(js, "First argument should be the name of the script.");
|
||||
ret = JS_Eval(js,str2,strlen(str2),str, 0);
|
||||
)
|
||||
|
||||
// Compile a string of JavaScript code into a function object.
|
||||
JSC_SSCALL(js_compile,
|
||||
if (!str2) return JS_ThrowReferenceError(js, "Second argument should be the script.");
|
||||
if (!str) return JS_ThrowReferenceError(js, "First argument should be the name of the script.");
|
||||
ret = JS_Eval(js, str2, strlen(str2), str, JS_EVAL_FLAG_COMPILE_ONLY | JS_EVAL_FLAG_BACKTRACE_BARRIER);
|
||||
)
|
||||
|
||||
// Evaluate a function object in the current QuickJS context.
|
||||
JSC_CCALL(js_eval_compile,
|
||||
JS_DupValue(js,argv[0]);
|
||||
ret = JS_EvalFunction(js, argv[0]);
|
||||
)
|
||||
|
||||
// Compile a function object into a bytecode blob.
|
||||
JSC_CCALL(js_compile_blob,
|
||||
size_t size;
|
||||
uint8_t *data = JS_WriteObject(js, &size, argv[0], JS_WRITE_OBJ_BYTECODE);
|
||||
if (!data) {
|
||||
return JS_ThrowInternalError(js, "Failed to serialize bytecode");
|
||||
}
|
||||
ret = js_new_blob_stoned_copy(js, data, size);
|
||||
js_free(js, data);
|
||||
)
|
||||
|
||||
// Compile a bytecode blob into a function object.
|
||||
JSC_CCALL(js_compile_unblob,
|
||||
size_t size;
|
||||
void *data = js_get_blob_data(js, &size, argv[0]);
|
||||
if (data == -1) return JS_EXCEPTION;
|
||||
if (!data) return JS_ThrowReferenceError(js, "No data present in blob.");
|
||||
|
||||
return JS_ReadObject(js, data, size, JS_READ_OBJ_BYTECODE);
|
||||
)
|
||||
|
||||
// Disassemble a function object into a string.
|
||||
JSC_CCALL(js_disassemble,
|
||||
return js_debugger_fn_bytecode(js, argv[0]);
|
||||
)
|
||||
|
||||
// Return metadata about a given function.
|
||||
JSC_CCALL(js_fn_info,
|
||||
return js_debugger_fn_info(js, argv[0]);
|
||||
)
|
||||
|
||||
static const JSCFunctionListEntry js_js_funcs[] = {
|
||||
MIST_FUNC_DEF(os, calc_mem, 0),
|
||||
MIST_FUNC_DEF(os, mem_limit, 1),
|
||||
MIST_FUNC_DEF(os, gc_threshold, 1),
|
||||
MIST_FUNC_DEF(os, max_stacksize, 1),
|
||||
MIST_FUNC_DEF(os, gc, 0),
|
||||
MIST_FUNC_DEF(os, eval, 2),
|
||||
MIST_FUNC_DEF(js, compile, 2),
|
||||
MIST_FUNC_DEF(js, eval_compile, 1),
|
||||
MIST_FUNC_DEF(js, compile_blob, 1),
|
||||
MIST_FUNC_DEF(js, compile_unblob, 1),
|
||||
MIST_FUNC_DEF(js, disassemble, 1),
|
||||
MIST_FUNC_DEF(js, fn_info, 1),
|
||||
};
|
||||
|
||||
JSValue js_js_use(JSContext *js) {
|
||||
JSValue mod = JS_NewObject(js);
|
||||
JS_SetPropertyFunctionList(js,mod,js_js_funcs,countof(js_js_funcs));
|
||||
return mod;
|
||||
}
|
||||
13
docs/.pages
13
docs/.pages
@@ -1,9 +1,12 @@
|
||||
nav:
|
||||
- index.md
|
||||
- cellscript.md
|
||||
- tutorial.md
|
||||
- actors.md
|
||||
- packages.md
|
||||
- cli.md
|
||||
- c-modules.md
|
||||
- Standard Library: library
|
||||
- rendering.md
|
||||
- resources.md
|
||||
- input.md
|
||||
- exporting.md
|
||||
- ...
|
||||
- Appendix A - dull: dull
|
||||
- Appendix B - api: api
|
||||
|
||||
251
docs/actors.md
251
docs/actors.md
@@ -1,230 +1,77 @@
|
||||
# Actors and Modules
|
||||
# Programs: Programs and Modules
|
||||
|
||||
Cell organizes code into two types of scripts: **modules** (`.cm`) and **actors** (`.ce`).
|
||||
Prosperon organizes your code into two broad categories: **modules** and **programs**. Modules are used to extend programs with new functionality, while programs are used to spawn actors.
|
||||
|
||||
## The Actor Model
|
||||
## Modules
|
||||
|
||||
Cell is built on the actor model of computation. Each actor:
|
||||
A **module** is any file that returns a single value. This return value is commonly an object, but it can be any data type (string, number, function, etc.). Once a module returns its value, Prosperon **freezes** that value, preventing accidental modification. The module is then cached so that subsequent imports of the same module don’t re-run the file—they reuse the cached result.
|
||||
|
||||
- Has its own **isolated memory** — actors never share state
|
||||
- Runs to completion each **turn** — no preemption
|
||||
- Performs its own **garbage collection**
|
||||
- Communicates only through **message passing**
|
||||
### Importing a Module
|
||||
|
||||
This isolation makes concurrent programming safer and more predictable.
|
||||
Use the built-in `use` function to import a module by file path (or by name if resolvable via Prosperon’s path settings). For example:
|
||||
|
||||
## Modules (.cm)
|
||||
|
||||
A module is a script that **returns a value**. The returned value is cached and frozen (made stone).
|
||||
|
||||
```javascript
|
||||
// math_utils.cm
|
||||
var math = use('math/radians')
|
||||
|
||||
function distance(x1, y1, x2, y2) {
|
||||
var dx = x2 - x1
|
||||
var dy = y2 - y1
|
||||
return math.sqrt(dx * dx + dy * dy)
|
||||
}
|
||||
|
||||
function midpoint(x1, y1, x2, y2) {
|
||||
return {
|
||||
x: (x1 + x2) / 2,
|
||||
y: (y1 + y2) / 2
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
distance: distance,
|
||||
midpoint: midpoint
|
||||
}
|
||||
```
|
||||
var myModule = use('scripts/modules/myModule')
|
||||
```
|
||||
|
||||
**Key properties:**
|
||||
`use('module')` returns the **exact** same object if called multiple times, since modules are cached and not re-run.
|
||||
|
||||
- **Must return a value** — it's an error not to
|
||||
- **Executed once per actor** — subsequent `use()` calls return the cached value
|
||||
- **Return value is stone** — immutable, safe to share
|
||||
- Modules can import other modules with `use()`
|
||||
Dull based modules are resolved by searching for them from the `prosperon.PATH` array. Engine modules are stored under `scripts/modules`, which is already added to the PATH for you.
|
||||
|
||||
### Using Modules
|
||||
Prosperon can also load C based modules. If two modules have the same path resolution, the C based library will be imported.
|
||||
|
||||
```javascript
|
||||
var utils = use('math_utils')
|
||||
var d = utils.distance(0, 0, 3, 4) // 5
|
||||
```
|
||||
## Programs
|
||||
|
||||
## Actors (.ce)
|
||||
An **program** is a file that **does not** return a value. Instead, the file’s contents run top to bottom as soon as the program is spawned. Programs are your game’s “live” scripts: each program can hold its own state and logic, spawn sub-programs, schedule timed tasks, and eventually **kill** itself (or be killed) when it’s done.
|
||||
|
||||
An actor is a script that **does not return a value**. It runs as an independent unit of execution.
|
||||
### Program Intrinsic Functions
|
||||
|
||||
```javascript
|
||||
// worker.ce
|
||||
log.console("Worker started")
|
||||
Certain functions are intrinsic to the program and cannot be overridden. They’re assigned to each new program instance at spawn time:
|
||||
|
||||
$on_message = function(msg) {
|
||||
log.console("Received:", msg)
|
||||
// Process message...
|
||||
}
|
||||
```
|
||||
1. **`spawn(script, config, callback)`**
|
||||
Creates (spawns) a new program from another script file.
|
||||
- **`script`**: Path to the program script (a file containing statements, not returning anything).
|
||||
- **`config`**: Optional object of extra properties to assign to the new program.
|
||||
- **`callback(underling, info)`**: Optional function invoked right after the program is instantiated but before it fully initializes.
|
||||
|
||||
**Key properties:**
|
||||
The newly spawned program:
|
||||
- Receives a reference to its parent (the `overling`) and can store child programs (the `underlings`).
|
||||
- Automatically calls `awake()` if that function is defined, after basic setup completes.
|
||||
- Registers any recognized event handlers (like `update`, `draw`, etc.) if they exist.
|
||||
|
||||
- **Must not return a value** — it's an error to return
|
||||
- Has access to **actor intrinsics** (functions starting with `$`)
|
||||
- Runs until explicitly stopped or crashes
|
||||
2. **`kill()`**
|
||||
Destroys the program, all of its timers, and recursively kills any underling (child) programs. If the program has a parent, it is removed from the parent’s `underlings` set.
|
||||
|
||||
## Actor Intrinsics
|
||||
3. **`delay(fn, seconds)`**
|
||||
Runs the given function `fn` after `seconds`. This is implemented under the hood with a timer that automatically clears itself once it fires.
|
||||
- **Example**:
|
||||
```js
|
||||
this.delay(_ => {
|
||||
console.log("3 seconds later!")
|
||||
}, 3)
|
||||
```
|
||||
|
||||
Actors have access to special functions prefixed with `$`:
|
||||
4. **`clear()`**
|
||||
Recursively kills all child programs, clearing your immediate `underlings` set. This is not called automatically. You can use it to manually clean up all children without necessarily killing the program itself.
|
||||
|
||||
### $me
|
||||
### The program Lifecycle
|
||||
|
||||
Reference to the current actor.
|
||||
Specific hooks can be set on a program when it is initialized.
|
||||
|
||||
```javascript
|
||||
log.console($me) // actor reference
|
||||
```
|
||||
- **Awake**: If the new program defines `awake()`, Prosperon calls it after the script finishes its top-level execution. This is a common place to do initialization.
|
||||
- **Garbage**: When the program is killed, if it has a `garbage()` function, Prosperon calls it before final removal.
|
||||
- **Then**: If the program has a `then()` function, Prosperon calls it at the very end of the kill process, allowing any final statements after your `garbage()` logic completes.
|
||||
- **Registration**: In addition, if the object has **any** function named the same thing as a hook created with **prosperon.on**, that function will be registered with it after initialization.
|
||||
|
||||
### $stop()
|
||||
### Overlings and Underlings
|
||||
|
||||
Stop the current actor.
|
||||
Programs have access to its creator and other programs created underneath it, termed its overling and underlings.
|
||||
|
||||
```javascript
|
||||
$stop()
|
||||
```
|
||||
- **`this.overling`** is the parent program that spawned the current one.
|
||||
- **`this.underlings`** is a set of child programs that the current program has spawned.
|
||||
|
||||
### $send(actor, message, callback)
|
||||
Killing a parent automatically kills all of its underlings, which in turn can kill their own underlings, and so on.
|
||||
|
||||
Send a message to another actor.
|
||||
## Program Documentation
|
||||
|
||||
```javascript
|
||||
$send(other_actor, {type: "ping", data: 42}, function(reply) {
|
||||
log.console("Got reply:", reply)
|
||||
})
|
||||
```
|
||||
|
||||
Messages are automatically **splatted** — flattened to plain data without prototypes.
|
||||
|
||||
### $start(callback, program)
|
||||
|
||||
Start a new actor from a script.
|
||||
|
||||
```javascript
|
||||
$start(function(new_actor) {
|
||||
log.console("Started:", new_actor)
|
||||
}, "worker")
|
||||
```
|
||||
|
||||
### $delay(callback, seconds)
|
||||
|
||||
Schedule a callback after a delay.
|
||||
|
||||
```javascript
|
||||
$delay(function() {
|
||||
log.console("5 seconds later")
|
||||
}, 5)
|
||||
```
|
||||
|
||||
### $clock(callback)
|
||||
|
||||
Get called every frame/tick.
|
||||
|
||||
```javascript
|
||||
$clock(function(dt) {
|
||||
// Called each tick with delta time
|
||||
})
|
||||
```
|
||||
|
||||
### $receiver(callback)
|
||||
|
||||
Set up a message receiver.
|
||||
|
||||
```javascript
|
||||
$receiver(function(message, reply) {
|
||||
// Handle incoming message
|
||||
reply({status: "ok"})
|
||||
})
|
||||
```
|
||||
|
||||
### $portal(callback, port)
|
||||
|
||||
Open a network port.
|
||||
|
||||
```javascript
|
||||
$portal(function(connection) {
|
||||
// Handle new connection
|
||||
}, 8080)
|
||||
```
|
||||
|
||||
### $contact(callback, record)
|
||||
|
||||
Connect to a remote address.
|
||||
|
||||
```javascript
|
||||
$contact(function(connection) {
|
||||
// Connected
|
||||
}, {host: "example.com", port: 80})
|
||||
```
|
||||
|
||||
### $time_limit(requestor, seconds)
|
||||
|
||||
Wrap a requestor with a timeout.
|
||||
|
||||
```javascript
|
||||
$time_limit(my_requestor, 10) // 10 second timeout
|
||||
```
|
||||
|
||||
## Module Resolution
|
||||
|
||||
When you call `use('name')`, Cell searches:
|
||||
|
||||
1. **Current package** — files relative to package root
|
||||
2. **Dependencies** — packages declared in `cell.toml`
|
||||
3. **Core** — built-in Cell modules
|
||||
|
||||
```javascript
|
||||
// From within package 'myapp':
|
||||
use('utils') // myapp/utils.cm
|
||||
use('helper/math') // myapp/helper/math.cm
|
||||
use('json') // core json module
|
||||
use('otherlib/foo') // dependency 'otherlib', file foo.cm
|
||||
```
|
||||
|
||||
Files starting with underscore (`_helper.cm`) are private to the package.
|
||||
|
||||
## Example: Simple Actor System
|
||||
|
||||
```javascript
|
||||
// main.ce - Entry point
|
||||
var config = use('config')
|
||||
|
||||
log.console("Starting application...")
|
||||
|
||||
$start(function(worker) {
|
||||
$send(worker, {task: "process", data: [1, 2, 3]})
|
||||
}, "worker")
|
||||
|
||||
$delay(function() {
|
||||
log.console("Shutting down")
|
||||
$stop()
|
||||
}, 10)
|
||||
```
|
||||
|
||||
```javascript
|
||||
// worker.ce - Worker actor
|
||||
$receiver(function(msg, reply) {
|
||||
if (msg.task == "process") {
|
||||
var result = array(msg.data, x => x * 2)
|
||||
reply({result: result})
|
||||
}
|
||||
})
|
||||
```
|
||||
|
||||
```javascript
|
||||
// config.cm - Shared configuration
|
||||
return {
|
||||
debug: true,
|
||||
timeout: 30
|
||||
}
|
||||
```
|
||||
Prosperon includes a module called `doc.js` which helps generate documentation for your modules and programs. Any function and value can be assigned a docstring, and prosperon will then be able to generate documentation for it via doc.js. Look under the module API for more info.
|
||||
|
||||
11
docs/api/actor.md
Normal file
11
docs/api/actor.md
Normal file
@@ -0,0 +1,11 @@
|
||||
# actor
|
||||
|
||||
### toString() <sub>function</sub>
|
||||
|
||||
### spawn(script, config, callback) <sub>function</sub>
|
||||
|
||||
### clear() <sub>function</sub>
|
||||
|
||||
### kill() <sub>function</sub>
|
||||
|
||||
### delay(fn, seconds) <sub>function</sub>
|
||||
37
docs/api/console.md
Normal file
37
docs/api/console.md
Normal file
@@ -0,0 +1,37 @@
|
||||
# console
|
||||
|
||||
The console object provides various logging, debugging, and output methods.
|
||||
|
||||
### print() <sub>function</sub>
|
||||
|
||||
### spam(msg) <sub>function</sub>
|
||||
|
||||
Output a spam-level message for very verbose logging.
|
||||
|
||||
### debug(msg) <sub>function</sub>
|
||||
|
||||
Output a debug-level message.
|
||||
|
||||
### info(msg) <sub>function</sub>
|
||||
|
||||
Output info level message.
|
||||
|
||||
### warn(msg) <sub>function</sub>
|
||||
|
||||
Output warn level message.
|
||||
|
||||
### log(msg) <sub>function</sub>
|
||||
|
||||
Output directly to in game console.
|
||||
|
||||
### error(e) <sub>function</sub>
|
||||
|
||||
Output error level message, and print stacktrace.
|
||||
|
||||
### panic(e) <sub>function</sub>
|
||||
|
||||
Output a panic-level message and exit the program.
|
||||
|
||||
### assert(op, str = `assertion failed [value '${op}']`) <sub>function</sub>
|
||||
|
||||
If the condition is false, print an error and panic.
|
||||
5
docs/api/index.md
Normal file
5
docs/api/index.md
Normal file
@@ -0,0 +1,5 @@
|
||||
# Appendix B - api
|
||||
|
||||
This is a complete list of accessible functions and parameters that are built into Prosperon. For the most part, developers will concern themselves with the modules, all of which can be imported with `use`.
|
||||
|
||||
Types document particular javascript objects with a specific object in their prototype chain, which can allow access to an underlying C data structure. A lot of these are used only internally by Prosperon, but brave developers can pick around in the module internals to see how they're used and do their own thing if they want!
|
||||
87
docs/api/modules/actor.md
Normal file
87
docs/api/modules/actor.md
Normal file
@@ -0,0 +1,87 @@
|
||||
# actor
|
||||
|
||||
|
||||
A set of utilities for iterating over a hierarchy of actor-like objects, as well
|
||||
as managing tag-based lookups. Objects are assumed to have a "objects" property,
|
||||
pointing to children or sub-objects, forming a tree.
|
||||
|
||||
|
||||
### all_objects(fn, startobj) <sub>function</sub>
|
||||
|
||||
|
||||
Iterate over each object (and its sub-objects) in the hierarchy, calling fn for each one.
|
||||
|
||||
|
||||
**fn**: A callback function that receives each object. If it returns a truthy value, iteration stops and that value is returned.
|
||||
|
||||
**startobj**: The root object at which iteration begins, default is the global "world".
|
||||
|
||||
|
||||
**Returns**: The first truthy value returned by fn, or undefined if none.
|
||||
|
||||
|
||||
### find_object(fn, startobj) <sub>function</sub>
|
||||
|
||||
|
||||
Intended to find a matching object within the hierarchy.
|
||||
|
||||
|
||||
**fn**: A callback or criteria to locate a particular object.
|
||||
|
||||
**startobj**: The root object at which search begins, default "world".
|
||||
|
||||
|
||||
**Returns**: Not yet implemented.
|
||||
|
||||
|
||||
### tag_add(tag, obj) <sub>function</sub>
|
||||
|
||||
|
||||
Associate the given object with the specified tag. Creates a new tag set if it does not exist.
|
||||
|
||||
|
||||
**tag**: A string tag to associate with the object.
|
||||
|
||||
**obj**: The object to add under this tag.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### tag_rm(tag, obj) <sub>function</sub>
|
||||
|
||||
|
||||
Remove the given object from the specified tag’s set, if it exists.
|
||||
|
||||
|
||||
**tag**: The tag to remove the object from.
|
||||
|
||||
**obj**: The object to remove from the tag set.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### tag_clear_guid(obj) <sub>function</sub>
|
||||
|
||||
|
||||
Remove the object from all tag sets.
|
||||
|
||||
|
||||
**obj**: The object whose tags should be cleared.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### objects_with_tag(tag) <sub>function</sub>
|
||||
|
||||
|
||||
Retrieve all objects currently tagged with the specified tag.
|
||||
|
||||
|
||||
**tag**: A string tag to look up.
|
||||
|
||||
|
||||
**Returns**: An array of objects associated with the given tag.
|
||||
|
||||
46
docs/api/modules/camera.md
Normal file
46
docs/api/modules/camera.md
Normal file
@@ -0,0 +1,46 @@
|
||||
# camera
|
||||
|
||||
### list() <sub>function</sub>
|
||||
|
||||
Return an array of available camera device IDs.
|
||||
|
||||
|
||||
|
||||
**Returns**: An array of camera IDs, or undefined if no cameras are available.
|
||||
|
||||
|
||||
### open(id) <sub>function</sub>
|
||||
|
||||
Open a camera device with the given ID.
|
||||
|
||||
|
||||
|
||||
**id**: The camera ID to open.
|
||||
|
||||
|
||||
**Returns**: A camera object on success, or throws an error if the camera cannot be opened.
|
||||
|
||||
|
||||
### name(id) <sub>function</sub>
|
||||
|
||||
Return the name of the camera with the given ID.
|
||||
|
||||
|
||||
|
||||
**id**: The camera ID to query.
|
||||
|
||||
|
||||
**Returns**: A string with the camera's name, or throws an error if the name cannot be retrieved.
|
||||
|
||||
|
||||
### position(id) <sub>function</sub>
|
||||
|
||||
Return the physical position of the camera with the given ID.
|
||||
|
||||
|
||||
|
||||
**id**: The camera ID to query.
|
||||
|
||||
|
||||
**Returns**: A string indicating the camera position ("unknown", "front", or "back").
|
||||
|
||||
7
docs/api/modules/cmd.md
Normal file
7
docs/api/modules/cmd.md
Normal file
@@ -0,0 +1,7 @@
|
||||
# cmd
|
||||
|
||||
### length <sub>number</sub>
|
||||
|
||||
### name <sub>string</sub>
|
||||
|
||||
### prototype <sub>object</sub>
|
||||
7
docs/api/modules/color.md
Normal file
7
docs/api/modules/color.md
Normal file
@@ -0,0 +1,7 @@
|
||||
# color
|
||||
|
||||
### Color <sub>object</sub>
|
||||
|
||||
### esc <sub>object</sub>
|
||||
|
||||
### ColorMap <sub>object</sub>
|
||||
76
docs/api/modules/debug.md
Normal file
76
docs/api/modules/debug.md
Normal file
@@ -0,0 +1,76 @@
|
||||
# debug
|
||||
|
||||
### stack_depth() <sub>function</sub>
|
||||
|
||||
Return the current stack depth.
|
||||
|
||||
|
||||
|
||||
**Returns**: A number representing the stack depth.
|
||||
|
||||
|
||||
### build_backtrace() <sub>function</sub>
|
||||
|
||||
Build and return a backtrace of the current call stack.
|
||||
|
||||
|
||||
|
||||
**Returns**: An object representing the call stack backtrace.
|
||||
|
||||
|
||||
### closure_vars(fn) <sub>function</sub>
|
||||
|
||||
Return the closure variables for a given function.
|
||||
|
||||
|
||||
|
||||
**fn**: The function object to inspect.
|
||||
|
||||
|
||||
**Returns**: An object containing the closure variables.
|
||||
|
||||
|
||||
### local_vars(depth) <sub>function</sub>
|
||||
|
||||
Return the local variables for a specific stack frame.
|
||||
|
||||
|
||||
|
||||
**depth**: The stack frame depth to inspect.
|
||||
|
||||
|
||||
**Returns**: An object containing the local variables at the specified depth.
|
||||
|
||||
|
||||
### fn_info(fn) <sub>function</sub>
|
||||
|
||||
Return metadata about a given function.
|
||||
|
||||
|
||||
|
||||
**fn**: The function object to inspect.
|
||||
|
||||
|
||||
**Returns**: An object with metadata about the function.
|
||||
|
||||
|
||||
### backtrace_fns() <sub>function</sub>
|
||||
|
||||
Return an array of functions in the current backtrace.
|
||||
|
||||
|
||||
|
||||
**Returns**: An array of function objects from the call stack.
|
||||
|
||||
|
||||
### dump_obj(obj) <sub>function</sub>
|
||||
|
||||
Return a string representation of a given object.
|
||||
|
||||
|
||||
|
||||
**obj**: The object to dump.
|
||||
|
||||
|
||||
**Returns**: A string describing the object's contents.
|
||||
|
||||
39
docs/api/modules/dmon.md
Normal file
39
docs/api/modules/dmon.md
Normal file
@@ -0,0 +1,39 @@
|
||||
# dmon
|
||||
|
||||
### watch() <sub>function</sub>
|
||||
|
||||
Start watching the root directory, recursively.
|
||||
|
||||
This function begins monitoring the specified directory and its subdirectories recursively for events such as file creation, deletion, modification, or movement. Events are queued and can be retrieved by calling poll.
|
||||
|
||||
:throws: An error if dmon is already watching.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### unwatch() <sub>function</sub>
|
||||
|
||||
Stop watching the currently monitored directory.
|
||||
|
||||
This function halts filesystem monitoring for the directory previously set by watch. It clears the watch state, allowing a new watch to be started.
|
||||
|
||||
:throws: An error if no directory is currently being watched.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### poll(callback) <sub>function</sub>
|
||||
|
||||
Retrieve and process queued filesystem events.
|
||||
|
||||
This function dequeues all pending filesystem events and invokes the provided callback for each one. The callback receives an event object with properties: 'action' (string: "create", "delete", "modify", or "move"), 'root' (string: watched directory), 'file' (string: affected file path), and 'old' (string: previous file path for move events, empty if not applicable).
|
||||
|
||||
|
||||
|
||||
**callback**: A function to call for each event, receiving an event object as its argument.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
38
docs/api/modules/doc.md
Normal file
38
docs/api/modules/doc.md
Normal file
@@ -0,0 +1,38 @@
|
||||
# doc
|
||||
|
||||
|
||||
Provides a consistent way to create documentation for prosperon elements. Objects are documented by adding docstrings directly to object-like things (functions, objects, ...), or to an object's own "doc object".
|
||||
|
||||
Docstrings are set to the symbol `prosperon.DOC`
|
||||
|
||||
```js
|
||||
// Suppose we have a module that returns a function
|
||||
function greet(name) { console.log("Hello, " + name) }
|
||||
|
||||
// We can attach a docstring
|
||||
greet.doc = `
|
||||
Greets the user by name.
|
||||
`
|
||||
|
||||
// A single function is a valid return!
|
||||
return greet
|
||||
```
|
||||
|
||||
```js
|
||||
// Another way is to add a docstring object to an object
|
||||
var greet = {
|
||||
hello() { console.log('hello!') }
|
||||
}
|
||||
|
||||
greet[prosperon.DOC] = {}
|
||||
greet[prosperon.DOC][prosperon.DOC] = 'An object full of different greeter functions'
|
||||
greet[prosperon.DOC].hello = 'A greeter that says, "hello!"'
|
||||
```
|
||||
|
||||
|
||||
**name**: The name of the person to greet.
|
||||
|
||||
|
||||
### writeDocFile(obj, title) <sub>function</sub>
|
||||
|
||||
Return a markdown string for a given obj, with an optional title.
|
||||
228
docs/api/modules/draw2d.md
Normal file
228
docs/api/modules/draw2d.md
Normal file
@@ -0,0 +1,228 @@
|
||||
# draw2d
|
||||
|
||||
|
||||
A collection of 2D drawing functions that operate in screen space. Provides primitives
|
||||
for lines, rectangles, text, sprite drawing, etc.
|
||||
|
||||
|
||||
### point(pos, size, color) <sub>function</sub>
|
||||
|
||||
|
||||
|
||||
|
||||
**pos**: A 2D position ([x, y]) where the point should be drawn.
|
||||
|
||||
**size**: The size of the point (not currently affecting rendering).
|
||||
|
||||
**color**: The color of the point, defaults to Color.blue.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### line(points, color, thickness, pipeline) <sub>function</sub>
|
||||
|
||||
|
||||
|
||||
|
||||
**points**: An array of 2D positions representing the line vertices.
|
||||
|
||||
**color**: The color of the line, default Color.white.
|
||||
|
||||
**thickness**: The line thickness, default 1.
|
||||
|
||||
**pipeline**: (Optional) A pipeline or rendering state object.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### cross(pos, size, color, thickness, pipe) <sub>function</sub>
|
||||
|
||||
|
||||
|
||||
|
||||
**pos**: The center of the cross as a 2D position ([x, y]).
|
||||
|
||||
**size**: Half the size of each cross arm.
|
||||
|
||||
**color**: The color of the cross, default Color.red.
|
||||
|
||||
**thickness**: The thickness of each line, default 1.
|
||||
|
||||
**pipe**: (Optional) A pipeline or rendering state object.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### arrow(start, end, color, wingspan, wingangle, pipe) <sub>function</sub>
|
||||
|
||||
|
||||
|
||||
|
||||
**start**: The start position of the arrow ([x, y]).
|
||||
|
||||
**end**: The end (tip) position of the arrow ([x, y]).
|
||||
|
||||
**color**: The color, default Color.red.
|
||||
|
||||
**wingspan**: The length of each arrowhead 'wing', default 4.
|
||||
|
||||
**wingangle**: Wing rotation in degrees, default 10.
|
||||
|
||||
**pipe**: (Optional) A pipeline or rendering state object.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### rectangle(rect, color, pipeline) <sub>function</sub>
|
||||
|
||||
|
||||
|
||||
|
||||
**rect**: A rectangle object with {x, y, width, height}.
|
||||
|
||||
**color**: The fill color, default Color.white.
|
||||
|
||||
**pipeline**: (Optional) A pipeline or rendering state object.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### tile(image, rect, color, tile, pipeline) <sub>function</sub>
|
||||
|
||||
|
||||
:raises Error: If no image is provided.
|
||||
|
||||
|
||||
**image**: An image object or string path to a texture.
|
||||
|
||||
**rect**: A rectangle specifying draw location/size ({x, y, width, height}).
|
||||
|
||||
**color**: The color tint, default Color.white.
|
||||
|
||||
**tile**: A tiling definition ({repeat_x, repeat_y}), default tile_def.
|
||||
|
||||
**pipeline**: (Optional) A pipeline or rendering state object.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### slice9(image, rect, slice, color, info, pipeline) <sub>function</sub>
|
||||
|
||||
|
||||
:raises Error: If no image is provided.
|
||||
|
||||
|
||||
**image**: An image object or string path to a texture.
|
||||
|
||||
**rect**: A rectangle specifying draw location/size, default [0, 0].
|
||||
|
||||
**slice**: The pixel inset or spacing for the 9-slice (number or object).
|
||||
|
||||
**color**: The color tint, default Color.white.
|
||||
|
||||
**info**: A slice9 info object controlling tiling of edges/corners.
|
||||
|
||||
**pipeline**: (Optional) A pipeline or rendering state object.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### image(image, rect, rotation, color, pipeline) <sub>function</sub>
|
||||
|
||||
|
||||
:raises Error: If no image is provided.
|
||||
|
||||
|
||||
**image**: An image object or string path to a texture.
|
||||
|
||||
**rect**: A rectangle specifying draw location/size, default [0,0]; width/height default to image size.
|
||||
|
||||
**rotation**: Rotation in degrees (not currently used).
|
||||
|
||||
**color**: The color tint, default none.
|
||||
|
||||
**pipeline**: (Optional) A pipeline or rendering state object.
|
||||
|
||||
|
||||
**Returns**: A sprite object that was created for this draw call.
|
||||
|
||||
|
||||
### images(image, rects, config) <sub>function</sub>
|
||||
|
||||
|
||||
:raises Error: If no image is provided.
|
||||
|
||||
|
||||
**image**: An image object or string path to a texture.
|
||||
|
||||
**rects**: An array of rectangle objects ({x, y, width, height}) to draw.
|
||||
|
||||
**config**: (Unused) Additional config data if needed.
|
||||
|
||||
|
||||
**Returns**: An array of sprite objects created and queued for rendering.
|
||||
|
||||
|
||||
### sprites(sprites, sort, pipeline) <sub>function</sub>
|
||||
|
||||
|
||||
|
||||
|
||||
**sprites**: An array of sprite objects to draw.
|
||||
|
||||
**sort**: Sorting mode or order, default 0.
|
||||
|
||||
**pipeline**: (Optional) A pipeline or rendering state object.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### circle(pos, radius, color, inner_radius, pipeline) <sub>function</sub>
|
||||
|
||||
|
||||
|
||||
|
||||
**pos**: Center of the circle ([x, y]).
|
||||
|
||||
**radius**: The circle radius.
|
||||
|
||||
**color**: The fill color of the circle, default none.
|
||||
|
||||
**inner_radius**: (Unused) Possibly ring thickness, default 1.
|
||||
|
||||
**pipeline**: (Optional) A pipeline or rendering state object.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### text(text, rect, font, size, color, wrap, pipeline) <sub>function</sub>
|
||||
|
||||
|
||||
|
||||
|
||||
**text**: The string to draw.
|
||||
|
||||
**rect**: A rectangle specifying draw position (and possibly wrapping area).
|
||||
|
||||
**font**: A font object or string path, default sysfont.
|
||||
|
||||
**size**: (Unused) Possibly intended for scaling the font size.
|
||||
|
||||
**color**: The text color, default Color.white.
|
||||
|
||||
**wrap**: Pixel width for text wrapping, default 0 (no wrap).
|
||||
|
||||
**pipeline**: (Optional) A pipeline or rendering state object.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
45
docs/api/modules/enet.md
Normal file
45
docs/api/modules/enet.md
Normal file
@@ -0,0 +1,45 @@
|
||||
# enet
|
||||
|
||||
### initialize() <sub>function</sub>
|
||||
|
||||
|
||||
Initialize the ENet library. Must be called before using any ENet functionality.
|
||||
Throws an error if initialization fails.
|
||||
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### deinitialize() <sub>function</sub>
|
||||
|
||||
|
||||
Deinitialize the ENet library, cleaning up all resources. Call this when you no longer
|
||||
need any ENet functionality.
|
||||
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### create_host(address) <sub>function</sub>
|
||||
|
||||
|
||||
Create an ENet host for either a client-like unbound host or a server bound to a specific
|
||||
address and port:
|
||||
|
||||
- If no argument is provided, creates an unbound "client-like" host with default settings
|
||||
(maximum 32 peers, 2 channels, unlimited bandwidth).
|
||||
- If you pass an "ip:port" string (e.g. "127.0.0.1:7777"), it creates a server bound to
|
||||
that address. The server supports up to 32 peers, 2 channels, and unlimited bandwidth.
|
||||
|
||||
Throws an error if host creation fails for any reason.
|
||||
|
||||
omit to create an unbound client-like host.
|
||||
|
||||
|
||||
**address**: (optional) A string in 'ip:port' format to bind the host (server), or
|
||||
|
||||
|
||||
**Returns**: An ENetHost object.
|
||||
|
||||
25
docs/api/modules/event.md
Normal file
25
docs/api/modules/event.md
Normal file
@@ -0,0 +1,25 @@
|
||||
# event
|
||||
|
||||
### push_event(event) <sub>function</sub>
|
||||
|
||||
Push a custom user event into SDL's queue, passing a callback function.
|
||||
|
||||
|
||||
|
||||
**event**: A function to call when this event is consumed.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### engine_input(callback) <sub>function</sub>
|
||||
|
||||
Poll all system events (keyboard, mouse, etc.) and call the given function with each event object.
|
||||
|
||||
|
||||
|
||||
**callback**: A function that executes on each event consumed from the poll.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
221
docs/api/modules/geometry.md
Normal file
221
docs/api/modules/geometry.md
Normal file
@@ -0,0 +1,221 @@
|
||||
# geometry
|
||||
|
||||
|
||||
A collection of geometry-related functions for circles, spheres, boxes, polygons,
|
||||
and rectangle utilities. Some functionality is implemented in C and exposed here.
|
||||
|
||||
|
||||
### rect_intersection(a, b) <sub>function</sub>
|
||||
|
||||
|
||||
Return the intersection of two rectangles. The result may be empty if no intersection.
|
||||
|
||||
|
||||
**a**: The first rectangle as {x, y, w, h}.
|
||||
|
||||
**b**: The second rectangle as {x, y, w, h}.
|
||||
|
||||
|
||||
**Returns**: A rectangle that is the intersection of the two. May have zero width/height if no overlap.
|
||||
|
||||
|
||||
### rect_intersects(a, b) <sub>function</sub>
|
||||
|
||||
|
||||
|
||||
|
||||
**a**: Rectangle {x,y,w,h}.
|
||||
|
||||
**b**: Rectangle {x,y,w,h}.
|
||||
|
||||
|
||||
**Returns**: A boolean indicating whether the two rectangles overlap.
|
||||
|
||||
|
||||
### rect_expand(a, b) <sub>function</sub>
|
||||
|
||||
|
||||
Merge or combine two rectangles, returning their bounding rectangle.
|
||||
|
||||
|
||||
**a**: Rectangle {x,y,w,h}.
|
||||
|
||||
**b**: Rectangle {x,y,w,h}.
|
||||
|
||||
|
||||
**Returns**: A new rectangle that covers the bounds of both input rectangles.
|
||||
|
||||
|
||||
### rect_inside(inner, outer) <sub>function</sub>
|
||||
|
||||
|
||||
|
||||
|
||||
**inner**: A rectangle to test.
|
||||
|
||||
**outer**: A rectangle that may contain 'inner'.
|
||||
|
||||
|
||||
**Returns**: True if 'inner' is completely inside 'outer', otherwise false.
|
||||
|
||||
|
||||
### rect_random(rect) <sub>function</sub>
|
||||
|
||||
|
||||
|
||||
|
||||
**rect**: A rectangle {x,y,w,h}.
|
||||
|
||||
|
||||
**Returns**: A random point within the rectangle (uniform distribution).
|
||||
|
||||
|
||||
### cwh2rect(center, wh) <sub>function</sub>
|
||||
|
||||
|
||||
Helper: convert a center point and width/height vector to a rect object.
|
||||
|
||||
|
||||
**center**: A 2D point [cx, cy].
|
||||
|
||||
**wh**: A 2D size [width, height].
|
||||
|
||||
|
||||
**Returns**: A rectangle {x, y, w, h} with x,y set to center and w,h set to the given size.
|
||||
|
||||
|
||||
### rect_point_inside(rect, point) <sub>function</sub>
|
||||
|
||||
|
||||
|
||||
|
||||
**rect**: A rectangle {x,y,w,h}.
|
||||
|
||||
**point**: A 2D point [px, py].
|
||||
|
||||
|
||||
**Returns**: True if the point lies inside the rectangle, otherwise false.
|
||||
|
||||
|
||||
### rect_pos(rect) <sub>function</sub>
|
||||
|
||||
|
||||
|
||||
|
||||
**rect**: A rectangle {x,y,w,h}.
|
||||
|
||||
|
||||
**Returns**: A 2D vector [x,y] giving the rectangle's position.
|
||||
|
||||
|
||||
### rect_move(rect, offset) <sub>function</sub>
|
||||
|
||||
|
||||
|
||||
|
||||
**rect**: A rectangle {x,y,w,h}.
|
||||
|
||||
**offset**: A 2D vector to add to the rectangle's position.
|
||||
|
||||
|
||||
**Returns**: A new rectangle with updated x,y offset.
|
||||
|
||||
|
||||
### box(w, h) <sub>function</sub>
|
||||
|
||||
|
||||
Construct a box centered at the origin with the given width and height. This overrides the box object above.
|
||||
|
||||
|
||||
**w**: The width of the box.
|
||||
|
||||
**h**: The height of the box.
|
||||
|
||||
|
||||
**Returns**: An array of four 2D points representing the corners of a rectangle centered at [0,0].
|
||||
|
||||
|
||||
### sphere <sub>object</sub>
|
||||
|
||||
|
||||
Sphere-related geometry functions:
|
||||
- volume(r): Return the volume of a sphere with radius r.
|
||||
- random(r, theta, phi): Return a random point on or inside a sphere.
|
||||
|
||||
|
||||
### circle <sub>object</sub>
|
||||
|
||||
|
||||
Circle-related geometry functions:
|
||||
- area(r): Return the area of a circle with radius r.
|
||||
- random(r, theta): Return a random 2D point on a circle; uses sphere.random internally and extracts x,z.
|
||||
|
||||
|
||||
### ngon(radius, n) <sub>function</sub>
|
||||
|
||||
|
||||
Generates a regular n-gon by calling geometry.arc with full 360 degrees.
|
||||
|
||||
|
||||
**radius**: The radius of the n-gon from center to each vertex.
|
||||
|
||||
**n**: Number of sides/vertices.
|
||||
|
||||
|
||||
**Returns**: An array of 2D points forming a regular n-gon.
|
||||
|
||||
|
||||
### arc(radius, angle, n, start) <sub>function</sub>
|
||||
|
||||
|
||||
Generate an arc (or partial circle) of n points, each angle spread equally over 'angle' degrees from 'start'.
|
||||
|
||||
|
||||
**radius**: The distance from center to the arc points.
|
||||
|
||||
**angle**: The total angle (in degrees) over which points are generated, capped at 360.
|
||||
|
||||
**n**: Number of segments (if <=1, empty array is returned).
|
||||
|
||||
**start**: Starting angle (in degrees), default 0.
|
||||
|
||||
|
||||
**Returns**: An array of 2D points along the arc.
|
||||
|
||||
|
||||
### corners2points(ll, ur) <sub>function</sub>
|
||||
|
||||
|
||||
Similar to box.points, but calculates differently.
|
||||
|
||||
|
||||
**ll**: Lower-left 2D coordinate.
|
||||
|
||||
**ur**: Upper-right 2D coordinate (relative offset in x,y).
|
||||
|
||||
|
||||
**Returns**: A four-point array of corners [ll, lower-right, upper-right, upper-left].
|
||||
|
||||
|
||||
### sortpointsccw(points) <sub>function</sub>
|
||||
|
||||
|
||||
Sort an array of points in CCW order based on their angles from the centroid.
|
||||
|
||||
|
||||
**points**: An array of 2D points.
|
||||
|
||||
|
||||
**Returns**: A new array of the same points, sorted counterclockwise around their centroid.
|
||||
|
||||
|
||||
### points2cm(points) <sub>function</sub>
|
||||
|
||||
|
||||
|
||||
|
||||
**points**: An array of 2D points.
|
||||
|
||||
|
||||
**Returns**: The centroid (average x,y) of the given points.
|
||||
|
||||
278
docs/api/modules/graphics.md
Normal file
278
docs/api/modules/graphics.md
Normal file
@@ -0,0 +1,278 @@
|
||||
# graphics
|
||||
|
||||
|
||||
Provides functionality for loading and managing images, fonts, textures, and sprite meshes.
|
||||
Includes both JavaScript and C-implemented routines for creating geometry buffers, performing
|
||||
rectangle packing, etc.
|
||||
|
||||
|
||||
### make_sprite_mesh(sprites) <sub>function</sub>
|
||||
|
||||
|
||||
:param oldMesh (optional): An existing mesh object to reuse/resize if possible.
|
||||
Given an array of sprites, build a single geometry mesh for rendering them.
|
||||
|
||||
|
||||
**sprites**: An array of sprite objects, each containing .rect (or transform), .src (UV region), .color, etc.
|
||||
|
||||
|
||||
**Returns**: A GPU mesh object with pos, uv, color, and indices buffers for all sprites.
|
||||
|
||||
|
||||
### make_sprite_queue(sprites, camera, pipeline, sort) <sub>function</sub>
|
||||
|
||||
|
||||
Given an array of sprites, optionally sort them, then build a queue of pipeline commands.
|
||||
Each group with a shared image becomes one command.
|
||||
|
||||
|
||||
**sprites**: An array of sprite objects.
|
||||
|
||||
**camera**: (unused in the C code example) Typically a camera or transform for sorting?
|
||||
|
||||
**pipeline**: A pipeline object for rendering.
|
||||
|
||||
**sort**: An integer or boolean for whether to sort sprites; if truthy, sorts by layer & texture.
|
||||
|
||||
|
||||
**Returns**: An array of pipeline commands: geometry with mesh references, grouped by image.
|
||||
|
||||
|
||||
### make_text_buffer(text, rect, angle, color, wrap, font) <sub>function</sub>
|
||||
|
||||
|
||||
Generate a GPU buffer mesh of text quads for rendering with a font, etc.
|
||||
|
||||
|
||||
**text**: The string to render.
|
||||
|
||||
**rect**: A rectangle specifying position and possibly wrapping.
|
||||
|
||||
**angle**: Rotation angle (unused or optional).
|
||||
|
||||
**color**: A color for the text (could be a vec4).
|
||||
|
||||
**wrap**: The width in pixels to wrap text, or 0 for no wrap.
|
||||
|
||||
**font**: A font object created by graphics.make_font or graphics.get_font.
|
||||
|
||||
|
||||
**Returns**: A geometry buffer mesh (pos, uv, color, indices) for rendering text.
|
||||
|
||||
|
||||
### rectpack(width, height, sizes) <sub>function</sub>
|
||||
|
||||
|
||||
Perform a rectangle packing using the stbrp library. Return positions for each rect.
|
||||
|
||||
|
||||
**width**: The width of the area to pack into.
|
||||
|
||||
**height**: The height of the area to pack into.
|
||||
|
||||
**sizes**: An array of [w,h] pairs for the rectangles to pack.
|
||||
|
||||
|
||||
**Returns**: An array of [x,y] coordinates placing each rect, or null if they don't fit.
|
||||
|
||||
|
||||
### make_rtree() <sub>function</sub>
|
||||
|
||||
|
||||
Create a new R-Tree for geometry queries.
|
||||
|
||||
|
||||
**Returns**: An R-Tree object for quickly querying many rectangles or sprite bounds.
|
||||
|
||||
|
||||
### make_texture(data) <sub>function</sub>
|
||||
|
||||
|
||||
Convert raw image bytes into an SDL_Surface object.
|
||||
|
||||
|
||||
**data**: Raw image bytes (PNG, JPG, etc.) as an ArrayBuffer.
|
||||
|
||||
|
||||
**Returns**: An SDL_Surface object representing the decoded image in RAM, for use with GPU or software rendering.
|
||||
|
||||
|
||||
### make_gif(data) <sub>function</sub>
|
||||
|
||||
|
||||
Load a GIF, returning its frames. If it's a single-frame GIF, the result may have .surface only.
|
||||
|
||||
|
||||
**data**: An ArrayBuffer containing GIF data.
|
||||
|
||||
|
||||
**Returns**: An object with frames[], each frame having its own .surface. Some also have a .texture for GPU use.
|
||||
|
||||
|
||||
### make_aseprite(data) <sub>function</sub>
|
||||
|
||||
|
||||
Load an Aseprite/ASE file from an array of bytes, returning frames or animations.
|
||||
|
||||
|
||||
**data**: An ArrayBuffer containing Aseprite (ASE) file data.
|
||||
|
||||
|
||||
**Returns**: An object containing frames or animations, each with .surface. May also have top-level .surface for a single-layer case.
|
||||
|
||||
|
||||
### cull_sprites(sprites, camera) <sub>function</sub>
|
||||
|
||||
|
||||
Filter an array of sprites to only those visible in the provided camera’s view.
|
||||
|
||||
|
||||
**sprites**: An array of sprite objects (each has rect or transform).
|
||||
|
||||
**camera**: A camera or bounding rectangle defining the view area.
|
||||
|
||||
|
||||
**Returns**: A new array of sprites that are visible in the camera's view.
|
||||
|
||||
|
||||
### rects_to_sprites(rects, image) <sub>function</sub>
|
||||
|
||||
|
||||
Convert an array of rect coords into sprite objects referencing a single image.
|
||||
|
||||
|
||||
**rects**: An array of rect coords or objects.
|
||||
|
||||
**image**: An image object (with .texture).
|
||||
|
||||
|
||||
**Returns**: An array of sprite objects referencing the 'image' and each rect for UV or position.
|
||||
|
||||
|
||||
### make_surface(dimensions) <sub>function</sub>
|
||||
|
||||
|
||||
Create a blank surface in RAM.
|
||||
|
||||
|
||||
**dimensions**: The size object {width, height}, or an array [w,h].
|
||||
|
||||
|
||||
**Returns**: A blank RGBA surface with the given dimensions, typically for software rendering or icons.
|
||||
|
||||
|
||||
### make_cursor(opts) <sub>function</sub>
|
||||
|
||||
|
||||
|
||||
|
||||
**opts**: An object with {surface, hotx, hoty} or similar.
|
||||
|
||||
|
||||
**Returns**: An SDL_Cursor object referencing the given surface for a custom mouse cursor.
|
||||
|
||||
|
||||
### make_font(data, size) <sub>function</sub>
|
||||
|
||||
|
||||
Load a font from TTF/OTF data at the given size.
|
||||
|
||||
|
||||
**data**: TTF/OTF file data as an ArrayBuffer.
|
||||
|
||||
**size**: Pixel size for rendering glyphs.
|
||||
|
||||
|
||||
**Returns**: A font object with surface, texture, and glyph data, for text rendering with make_text_buffer.
|
||||
|
||||
|
||||
### make_sprite() <sub>function</sub>
|
||||
|
||||
|
||||
Create a new sprite object, storing default properties.
|
||||
|
||||
|
||||
**Returns**: A new sprite object, which typically has .rect, .color, .layer, .image, etc.
|
||||
|
||||
|
||||
### make_line_prim(points, thickness, startCap, endCap, color) <sub>function</sub>
|
||||
|
||||
|
||||
Build a GPU mesh representing a thick polyline from an array of points, using parsl or a similar library under the hood.
|
||||
|
||||
|
||||
**points**: An array of [x,y] points forming the line.
|
||||
|
||||
**thickness**: The thickness (width) of the polyline.
|
||||
|
||||
**startCap**: (Unused) Possibly the type of cap for the start.
|
||||
|
||||
**endCap**: (Unused) Possibly the type of cap for the end.
|
||||
|
||||
**color**: A color to apply to the line.
|
||||
|
||||
|
||||
**Returns**: A geometry mesh object suitable for rendering the line via a pipeline command.
|
||||
|
||||
|
||||
### is_image(obj) <sub>function</sub>
|
||||
|
||||
|
||||
|
||||
|
||||
**obj**: An object to check.
|
||||
|
||||
|
||||
**Returns**: True if 'obj' has a .texture and a .rect property, indicating it's an image object.
|
||||
|
||||
|
||||
### texture(path) <sub>function</sub>
|
||||
|
||||
|
||||
Load or retrieve a cached image, converting it into a GPU texture. If 'path' is already an object, it’s returned directly.
|
||||
|
||||
|
||||
**path**: A string path to an image file or an already-loaded image object.
|
||||
|
||||
|
||||
**Returns**: An image object with {surface, texture, frames?, etc.} depending on the format.
|
||||
|
||||
|
||||
### tex_hotreload(file) <sub>function</sub>
|
||||
|
||||
|
||||
Reload the image for the given file, updating the cached copy in memory and GPU.
|
||||
|
||||
|
||||
**file**: The file path that was changed on disk.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### get_font(path, size) <sub>function</sub>
|
||||
|
||||
|
||||
Load a font from file if not cached, or retrieve from cache if already loaded.
|
||||
|
||||
|
||||
**path**: A string path to a font file, optionally with ".size" appended.
|
||||
|
||||
**size**: Pixel size of the font, if not included in 'path'.
|
||||
|
||||
|
||||
**Returns**: A font object with .surface and .texture for rendering text.
|
||||
|
||||
|
||||
### queue_sprite_mesh(queue) <sub>function</sub>
|
||||
|
||||
|
||||
Builds a single geometry mesh for all sprite-type commands in the queue, storing first_index/num_indices
|
||||
so they can be rendered in one draw call.
|
||||
|
||||
|
||||
**queue**: An array of draw commands, some of which are {type:'sprite'} objects.
|
||||
|
||||
|
||||
**Returns**: An array of references to GPU buffers [pos,uv,color,indices].
|
||||
|
||||
1244
docs/api/modules/imgui.md
Normal file
1244
docs/api/modules/imgui.md
Normal file
File diff suppressed because it is too large
Load Diff
68
docs/api/modules/input.md
Normal file
68
docs/api/modules/input.md
Normal file
@@ -0,0 +1,68 @@
|
||||
# input
|
||||
|
||||
### mouse_show(show) <sub>function</sub>
|
||||
|
||||
Show or hide the mouse cursor. Pass true to show, false to hide.
|
||||
|
||||
|
||||
|
||||
**show**: Boolean. True to show, false to hide.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### mouse_lock(lock) <sub>function</sub>
|
||||
|
||||
Capture or release the mouse, confining it within the window if locked.
|
||||
|
||||
|
||||
|
||||
**lock**: Boolean. True to lock, false to unlock.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### cursor_set(cursor) <sub>function</sub>
|
||||
|
||||
Set the given cursor (created by os.make_cursor) as the active mouse cursor.
|
||||
|
||||
|
||||
|
||||
**cursor**: The cursor to set.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### keyname(keycode) <sub>function</sub>
|
||||
|
||||
Given a numeric keycode, return the corresponding key name (e.g., from SDL).
|
||||
|
||||
|
||||
|
||||
**keycode**: A numeric SDL keycode.
|
||||
|
||||
|
||||
**Returns**: A string with the key name.
|
||||
|
||||
|
||||
### keymod() <sub>function</sub>
|
||||
|
||||
Return an object describing the current modifier keys, e.g. {shift:true, ctrl:true}.
|
||||
|
||||
|
||||
|
||||
**Returns**: An object with boolean fields for each modifier key.
|
||||
|
||||
|
||||
### mousestate() <sub>function</sub>
|
||||
|
||||
Return an object describing the current mouse state, including x,y coordinates
|
||||
and booleans for pressed buttons (left, middle, right, x1, x2).
|
||||
|
||||
|
||||
|
||||
**Returns**: Object { x, y, left, middle, right, x1, x2 }
|
||||
|
||||
243
docs/api/modules/io.md
Normal file
243
docs/api/modules/io.md
Normal file
@@ -0,0 +1,243 @@
|
||||
# io
|
||||
|
||||
### rm(path) <sub>function</sub>
|
||||
|
||||
Remove the file or empty directory at the given path.
|
||||
|
||||
|
||||
|
||||
**path**: The file or empty directory to remove. Must be empty if a directory.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### mkdir(path) <sub>function</sub>
|
||||
|
||||
Create a directory at the given path.
|
||||
|
||||
|
||||
|
||||
**path**: The directory path to create.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### stat(path) <sub>function</sub>
|
||||
|
||||
Return an object describing file metadata for the given path. The object includes
|
||||
filesize, modtime, createtime, and accesstime. Throw an error if the path does not exist.
|
||||
|
||||
|
||||
|
||||
**path**: The file or directory to retrieve metadata for.
|
||||
|
||||
|
||||
**Returns**: An object with metadata (filesize, modtime, createtime, accesstime).
|
||||
|
||||
|
||||
### globfs(patterns) <sub>function</sub>
|
||||
|
||||
Return an array of files that do not match any of the provided glob patterns. It
|
||||
recursively enumerates the filesystem within PHYSFS. Each pattern is treated as an
|
||||
"ignore" rule, similar to .gitignore usage.
|
||||
|
||||
|
||||
|
||||
**patterns**: An array of glob patterns to ignore. Any file matching one of these is skipped.
|
||||
|
||||
|
||||
**Returns**: An array of matching file paths.
|
||||
|
||||
|
||||
### match(pattern, string) <sub>function</sub>
|
||||
|
||||
Return boolean indicating whether the given wildcard pattern matches the provided
|
||||
string. Dots must match dots. Case is not ignored.
|
||||
|
||||
Patterns can incorporate:
|
||||
'?' - Matches exactly one character (except leading dots or slashes).
|
||||
'*' - Matches zero or more characters (excluding path separators).
|
||||
'**' - Matches zero or more characters, including path separators.
|
||||
'[abc]' - A bracket expression; matches any single character from the set. Ranges like [a-z], [0-9] also work.
|
||||
'[[:alpha:]]' - POSIX character classes can be used inside brackets.
|
||||
'\' - Backslash escapes the next character.
|
||||
'!' - If placed immediately inside brackets (like [!abc]), it negates the set.
|
||||
|
||||
|
||||
|
||||
**pattern**: The wildcard pattern to compare.
|
||||
|
||||
**string**: The string to test against the wildcard pattern.
|
||||
|
||||
|
||||
**Returns**: True if matched, otherwise false.
|
||||
|
||||
|
||||
### exists(path) <sub>function</sub>
|
||||
|
||||
Return a boolean indicating whether the file or directory at the given path exists.
|
||||
|
||||
|
||||
|
||||
**path**: The file or directory path to check.
|
||||
|
||||
|
||||
**Returns**: True if the path exists, otherwise false.
|
||||
|
||||
|
||||
### mount(archiveOrDir, mountPoint) <sub>function</sub>
|
||||
|
||||
Mount a directory or archive at the specified mount point. An undefined mount
|
||||
point mounts to '/'. Throw on error.
|
||||
|
||||
|
||||
|
||||
**archiveOrDir**: The directory or archive to mount.
|
||||
|
||||
**mountPoint**: The path at which to mount. If omitted or undefined, '/' is used.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### unmount(path) <sub>function</sub>
|
||||
|
||||
Unmount a previously mounted directory or archive. Throw on error.
|
||||
|
||||
|
||||
|
||||
**path**: The directory or archive mount point to unmount.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### slurp(path) <sub>function</sub>
|
||||
|
||||
Read the entire file at the given path as a string. Throw on error.
|
||||
|
||||
|
||||
|
||||
**path**: The file path to read from.
|
||||
|
||||
|
||||
**Returns**: A string with the file’s contents.
|
||||
|
||||
|
||||
### slurpbytes(path) <sub>function</sub>
|
||||
|
||||
Read the entire file at the given path as a raw ArrayBuffer. Throw on error.
|
||||
|
||||
|
||||
|
||||
**path**: The file path to read from.
|
||||
|
||||
|
||||
**Returns**: An ArrayBuffer containing the file’s raw bytes.
|
||||
|
||||
|
||||
### slurpwrite(data, path) <sub>function</sub>
|
||||
|
||||
Write data (string or ArrayBuffer) to the given file path. Overwrite if it exists.
|
||||
Throw on error.
|
||||
|
||||
|
||||
|
||||
**data**: The data to write (string or ArrayBuffer).
|
||||
|
||||
**path**: The file path to write to.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### writepath(path) <sub>function</sub>
|
||||
|
||||
Set the write directory. Subsequent writes will go here by default. Throw on error.
|
||||
|
||||
|
||||
|
||||
**path**: The directory path to set as writable.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### basedir() <sub>function</sub>
|
||||
|
||||
Return the application's base directory (where the executable is located).
|
||||
|
||||
|
||||
|
||||
**Returns**: A string with the base directory path.
|
||||
|
||||
|
||||
### prefdir(org, app) <sub>function</sub>
|
||||
|
||||
Get the user-and-app-specific path where files can be written.
|
||||
|
||||
|
||||
|
||||
**org**: The name of your organization.
|
||||
|
||||
**app**: The name of your application.
|
||||
|
||||
|
||||
**Returns**: A string with the user's directory path.
|
||||
|
||||
|
||||
### realdir(path) <sub>function</sub>
|
||||
|
||||
Return the actual, real directory (on the host filesystem) that contains the given
|
||||
file path. Return undefined if not found.
|
||||
|
||||
|
||||
|
||||
**path**: The file path whose real directory is requested.
|
||||
|
||||
|
||||
**Returns**: A string with the real directory path, or undefined.
|
||||
|
||||
|
||||
### open(path) <sub>function</sub>
|
||||
|
||||
Open a file for writing, returning a file object that can be used for further
|
||||
operations. Throw on error.
|
||||
|
||||
|
||||
|
||||
**path**: The file path to open for writing.
|
||||
|
||||
|
||||
**Returns**: A file object for subsequent write operations.
|
||||
|
||||
|
||||
### searchpath() <sub>function</sub>
|
||||
|
||||
Return an array of all directories in the current paths.
|
||||
|
||||
|
||||
|
||||
**Returns**: An array of directory paths in the search path.
|
||||
|
||||
|
||||
### enumerate(path, recurse) <sub>function</sub>
|
||||
|
||||
Return an array of files within the given directory, optionally recursing into
|
||||
subdirectories.
|
||||
|
||||
|
||||
|
||||
**path**: The directory to list.
|
||||
|
||||
**recurse**: Whether to recursively include subdirectories (true or false).
|
||||
|
||||
|
||||
**Returns**: An array of file (and directory) paths found.
|
||||
|
||||
|
||||
### mount_core() <sub>function</sub>
|
||||
|
||||
### is_directory() <sub>function</sub>
|
||||
175
docs/api/modules/js.md
Normal file
175
docs/api/modules/js.md
Normal file
@@ -0,0 +1,175 @@
|
||||
# js
|
||||
|
||||
|
||||
Provides functions for introspecting and configuring the QuickJS runtime engine.
|
||||
Includes debug info, memory usage, GC controls, code evaluation, etc.
|
||||
|
||||
|
||||
### cycle_hook(callback) <sub>function</sub>
|
||||
|
||||
|
||||
or undefined to remove the callback.
|
||||
|
||||
Register or remove a hook function that QuickJS calls once per execution cycle. If the callback
|
||||
is set, it receives a single argument (an optional object/value describing the cycle). If callback
|
||||
is undefined, the hook is removed.
|
||||
|
||||
|
||||
**callback**: A function to call each time QuickJS completes a "cycle" (internal VM loop),
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### dump_shapes() <sub>function</sub>
|
||||
|
||||
|
||||
Use this for internal debugging of object shapes.
|
||||
|
||||
|
||||
**Returns**: A debug string describing the internal shape hierarchy used by QuickJS.
|
||||
|
||||
|
||||
### dump_atoms() <sub>function</sub>
|
||||
|
||||
|
||||
known by QuickJS. Helpful for diagnosing memory usage or potential key collisions.
|
||||
|
||||
|
||||
**Returns**: A debug string listing all currently registered atoms (internal property keys/symbols)
|
||||
|
||||
|
||||
### dump_class() <sub>function</sub>
|
||||
|
||||
|
||||
Shows how many objects of each class exist, useful for advanced memory or performance profiling.
|
||||
|
||||
|
||||
**Returns**: A debug string describing the distribution of JS object classes in the QuickJS runtime.
|
||||
|
||||
|
||||
### dump_objects() <sub>function</sub>
|
||||
|
||||
|
||||
useful for debugging memory leaks or object lifetimes.
|
||||
|
||||
|
||||
**Returns**: A debug string listing certain internal QuickJS objects and their references,
|
||||
|
||||
|
||||
### dump_type_overheads() <sub>function</sub>
|
||||
|
||||
|
||||
Displays memory usage breakdown for different internal object types.
|
||||
|
||||
|
||||
**Returns**: A debug string describing the overheads for various JS object types in QuickJS.
|
||||
|
||||
|
||||
### stack_info() <sub>function</sub>
|
||||
|
||||
|
||||
Internal debugging utility to examine call stack details.
|
||||
|
||||
|
||||
**Returns**: An object or string describing the runtime's current stack usage and capacity.
|
||||
|
||||
|
||||
### calc_mem(value) <sub>function</sub>
|
||||
|
||||
|
||||
|
||||
Compute the approximate size of a single JS value in memory. This is a best-effort estimate.
|
||||
|
||||
|
||||
**value**: A JavaScript value to analyze.
|
||||
|
||||
|
||||
**Returns**: Approximate memory usage (in bytes) of that single value.
|
||||
|
||||
|
||||
### mem() <sub>function</sub>
|
||||
|
||||
|
||||
including total allocated bytes, object counts, and more.
|
||||
|
||||
Retrieve an overview of the runtime’s memory usage.
|
||||
|
||||
|
||||
**Returns**: An object containing a comprehensive snapshot of memory usage for the current QuickJS runtime,
|
||||
|
||||
|
||||
### mem_limit(bytes) <sub>function</sub>
|
||||
|
||||
|
||||
|
||||
Set the upper memory limit for the QuickJS runtime. Exceeding this limit may cause operations to
|
||||
fail or throw errors.
|
||||
|
||||
|
||||
**bytes**: The maximum memory (in bytes) QuickJS is allowed to use.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### gc_threshold(bytes) <sub>function</sub>
|
||||
|
||||
|
||||
|
||||
Set the threshold (in bytes) for QuickJS to perform an automatic GC pass when memory usage surpasses it.
|
||||
|
||||
|
||||
**bytes**: The threshold (in bytes) at which the engine triggers automatic garbage collection.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### max_stacksize(bytes) <sub>function</sub>
|
||||
|
||||
|
||||
|
||||
Set the maximum stack size for QuickJS. If exceeded, the runtime may throw a stack overflow error.
|
||||
|
||||
|
||||
**bytes**: The maximum allowed stack size (in bytes) for QuickJS.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### memstate() <sub>function</sub>
|
||||
|
||||
|
||||
|
||||
Gives a quick overview of the memory usage, including malloc size and other allocations.
|
||||
|
||||
|
||||
**Returns**: A simpler memory usage object (malloc sizes, etc.) for the QuickJS runtime.
|
||||
|
||||
|
||||
### gc() <sub>function</sub>
|
||||
|
||||
|
||||
|
||||
Force an immediate, full garbage collection pass, reclaiming unreachable memory.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### eval(src, filename) <sub>function</sub>
|
||||
|
||||
|
||||
|
||||
Execute a string of JavaScript code in the current QuickJS context.
|
||||
|
||||
|
||||
**src**: A string of JavaScript source code to evaluate.
|
||||
|
||||
**filename**: (Optional) A string for the filename or label, used in debugging or stack traces.
|
||||
|
||||
|
||||
**Returns**: The result of evaluating the given source code.
|
||||
|
||||
15
docs/api/modules/json.md
Normal file
15
docs/api/modules/json.md
Normal file
@@ -0,0 +1,15 @@
|
||||
# json
|
||||
|
||||
### encode(val,space,replacer,whitelist) <sub>function</sub>
|
||||
|
||||
Produce a JSON text from a Javascript object. If a record value, at any level, contains a json() method, it will be called, and the value it returns (usually a simpler record) will be JSONified.
|
||||
|
||||
If the record does not have a json() method, and if whitelist is a record, then only the keys that are associated with true in the whitelist are included.
|
||||
|
||||
If the space input is true, then line breaks and extra whitespace will be included in the text.
|
||||
|
||||
### decode(text,reviver) <sub>function</sub>
|
||||
|
||||
The text text is parsed, and the resulting value (usually a record or an array) is returned.
|
||||
|
||||
The optional reviver input is a method that will be called for every key and value at every level of the result. Each value will be replaced by the result of the reviver function. This can be used to reform data-only records into method-bearing records, or to transform date strings into seconds.
|
||||
3
docs/api/modules/loop.md
Normal file
3
docs/api/modules/loop.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# loop
|
||||
|
||||
### step() <sub>function</sub>
|
||||
115
docs/api/modules/math.md
Normal file
115
docs/api/modules/math.md
Normal file
@@ -0,0 +1,115 @@
|
||||
# math
|
||||
|
||||
### dot() <sub>function</sub>
|
||||
|
||||
Compute the dot product between two numeric arrays, returning a scalar. Extra elements are ignored.
|
||||
|
||||
### project() <sub>function</sub>
|
||||
|
||||
Project one vector onto another, returning a new array of the same dimension.
|
||||
|
||||
### rotate() <sub>function</sub>
|
||||
|
||||
Rotate a 2D point (or array of length 2) by the given angle (in turns) around an optional pivot.
|
||||
|
||||
### midpoint() <sub>function</sub>
|
||||
|
||||
Compute the midpoint of two arrays of numbers. Only the first two entries are used if 2D is intended.
|
||||
|
||||
### reflect() <sub>function</sub>
|
||||
|
||||
Reflect a vector across a plane normal. Both arguments must be numeric arrays.
|
||||
|
||||
### distance() <sub>function</sub>
|
||||
|
||||
Compute the Euclidean distance between two numeric arrays of matching length.
|
||||
|
||||
### direction() <sub>function</sub>
|
||||
|
||||
Compute the normalized direction vector from the first array to the second.
|
||||
|
||||
### angle() <sub>function</sub>
|
||||
|
||||
Given a 2D vector, return its angle from the X-axis in radians or some chosen units.
|
||||
|
||||
### norm() <sub>function</sub>
|
||||
|
||||
Return a normalized copy of the given numeric array. For 2D/3D/4D or arbitrary length.
|
||||
|
||||
### angle_between() <sub>function</sub>
|
||||
|
||||
Compute the angle between two vectors (2D/3D/4D).
|
||||
|
||||
### lerp() <sub>function</sub>
|
||||
|
||||
Linear interpolation between two numbers: lerp(a, b, t).
|
||||
|
||||
### gcd() <sub>function</sub>
|
||||
|
||||
Compute the greatest common divisor of two integers.
|
||||
|
||||
### lcm() <sub>function</sub>
|
||||
|
||||
Compute the least common multiple of two integers.
|
||||
|
||||
### clamp() <sub>function</sub>
|
||||
|
||||
Clamp a number between low and high. clamp(value, low, high).
|
||||
|
||||
### angledist() <sub>function</sub>
|
||||
|
||||
Compute the signed distance between two angles in 'turn' units, e.g. 0..1 range.
|
||||
|
||||
### jitter() <sub>function</sub>
|
||||
|
||||
Apply a random +/- percentage noise to a number. Example: jitter(100, 0.05) -> ~95..105.
|
||||
|
||||
### mean() <sub>function</sub>
|
||||
|
||||
Compute the arithmetic mean of an array of numbers.
|
||||
|
||||
### sum() <sub>function</sub>
|
||||
|
||||
Sum all elements of an array of numbers.
|
||||
|
||||
### sigma() <sub>function</sub>
|
||||
|
||||
Compute standard deviation of an array of numbers.
|
||||
|
||||
### median() <sub>function</sub>
|
||||
|
||||
Compute the median of an array of numbers.
|
||||
|
||||
### length() <sub>function</sub>
|
||||
|
||||
Return the length of a vector (i.e. sqrt of sum of squares).
|
||||
|
||||
### from_to() <sub>function</sub>
|
||||
|
||||
Return an array of points from a start to an end, spaced out by a certain distance.
|
||||
|
||||
### rand() <sub>function</sub>
|
||||
|
||||
Return a random float in [0,1).
|
||||
|
||||
### randi() <sub>function</sub>
|
||||
|
||||
Return a random 32-bit integer.
|
||||
|
||||
### srand() <sub>function</sub>
|
||||
|
||||
Seed the random number generator with the given integer, or with current time if none.
|
||||
|
||||
### TAU <sub>number</sub>
|
||||
|
||||
### deg2rad(deg) <sub>function</sub>
|
||||
|
||||
### rad2deg(rad) <sub>function</sub>
|
||||
|
||||
### turn2rad(x) <sub>function</sub>
|
||||
|
||||
### rad2turn(x) <sub>function</sub>
|
||||
|
||||
### turn2deg(x) <sub>function</sub>
|
||||
|
||||
### deg2turn(x) <sub>function</sub>
|
||||
27
docs/api/modules/miniz.md
Normal file
27
docs/api/modules/miniz.md
Normal file
@@ -0,0 +1,27 @@
|
||||
# miniz
|
||||
|
||||
### read(data) <sub>function</sub>
|
||||
|
||||
Create a zip reader from the given ArrayBuffer containing an entire ZIP archive.
|
||||
Return undefined if the data is invalid.
|
||||
|
||||
|
||||
|
||||
**data**: An ArrayBuffer with the entire ZIP file.
|
||||
|
||||
|
||||
**Returns**: A 'zip reader' object with methods for reading from the archive (mod, exists, slurp).
|
||||
|
||||
|
||||
### write(path) <sub>function</sub>
|
||||
|
||||
Create a zip writer that writes to the specified file path. Overwrites the file if
|
||||
it already exists. Return undefined on error.
|
||||
|
||||
|
||||
|
||||
**path**: The file path where the ZIP archive will be written.
|
||||
|
||||
|
||||
**Returns**: A 'zip writer' object with methods for adding files to the archive (add_file).
|
||||
|
||||
30
docs/api/modules/nota.md
Normal file
30
docs/api/modules/nota.md
Normal file
@@ -0,0 +1,30 @@
|
||||
# nota
|
||||
|
||||
### encode(value) <sub>function</sub>
|
||||
|
||||
Convert a JavaScript value into a NOTA-encoded ArrayBuffer.
|
||||
|
||||
This function serializes JavaScript values (such as numbers, strings, booleans, arrays, objects, or ArrayBuffers) into the NOTA binary format. The resulting ArrayBuffer can be stored or transmitted and later decoded back into a JavaScript value.
|
||||
|
||||
:throws: An error if no argument is provided.
|
||||
|
||||
|
||||
**value**: The JavaScript value to encode (e.g., number, string, boolean, array, object, or ArrayBuffer).
|
||||
|
||||
|
||||
**Returns**: An ArrayBuffer containing the NOTA-encoded data.
|
||||
|
||||
|
||||
### decode(buffer) <sub>function</sub>
|
||||
|
||||
Decode a NOTA-encoded ArrayBuffer into a JavaScript value.
|
||||
|
||||
This function deserializes a NOTA-formatted ArrayBuffer into its corresponding JavaScript representation, such as a number, string, boolean, array, object, or ArrayBuffer. If the input is invalid or empty, it returns undefined.
|
||||
|
||||
|
||||
|
||||
**buffer**: An ArrayBuffer containing NOTA-encoded data to decode.
|
||||
|
||||
|
||||
**Returns**: The decoded JavaScript value (e.g., number, string, boolean, array, object, or ArrayBuffer), or undefined if no argument is provided.
|
||||
|
||||
101
docs/api/modules/os.md
Normal file
101
docs/api/modules/os.md
Normal file
@@ -0,0 +1,101 @@
|
||||
# os
|
||||
|
||||
### make_transform() <sub>function</sub>
|
||||
|
||||
Create a new transform object that can be used for 2D/3D positioning, scaling, and rotation.
|
||||
|
||||
### clean_transforms() <sub>function</sub>
|
||||
|
||||
Force an update on all transforms to remove dangling references or perform house-keeping.
|
||||
|
||||
### platform() <sub>function</sub>
|
||||
|
||||
Return a string with the underlying platform name, like 'Windows', 'Linux', or 'macOS'.
|
||||
|
||||
### arch() <sub>function</sub>
|
||||
|
||||
Return the CPU architecture string for this system (e.g. 'x64', 'arm64').
|
||||
|
||||
### totalmem() <sub>function</sub>
|
||||
|
||||
Return the total system RAM in bytes.
|
||||
|
||||
### freemem() <sub>function</sub>
|
||||
|
||||
Return the amount of free system RAM in bytes, if known.
|
||||
|
||||
### hostname() <sub>function</sub>
|
||||
|
||||
Return the system's hostname, or an empty string if not available.
|
||||
|
||||
### version() <sub>function</sub>
|
||||
|
||||
Return the OS or kernel version string, if the platform provides it.
|
||||
|
||||
### kill() <sub>function</sub>
|
||||
|
||||
Send a signal (e.g., 'SIGINT', 'SIGTERM', etc.) to the current process.
|
||||
|
||||
### exit() <sub>function</sub>
|
||||
|
||||
Exit the application with the specified exit code.
|
||||
|
||||
### now() <sub>function</sub>
|
||||
|
||||
Return current time (in seconds as a float) with high resolution.
|
||||
|
||||
### openurl() <sub>function</sub>
|
||||
|
||||
Open the provided URL in the default web browser, if possible.
|
||||
|
||||
### make_timer() <sub>function</sub>
|
||||
|
||||
Create a new timer object that will call a specified function after a certain delay.
|
||||
|
||||
### update_timers() <sub>function</sub>
|
||||
|
||||
Advance all timers by the provided time delta (in seconds).
|
||||
|
||||
### sleep() <sub>function</sub>
|
||||
|
||||
Block execution for the specified number of seconds.
|
||||
|
||||
### battery_pct() <sub>function</sub>
|
||||
|
||||
Return the battery level (percentage) or negative if unknown.
|
||||
|
||||
### battery_voltage() <sub>function</sub>
|
||||
|
||||
Return the current battery voltage in volts, if available.
|
||||
|
||||
### battery_seconds() <sub>function</sub>
|
||||
|
||||
Return the estimated remaining battery time in seconds, or negative if unknown.
|
||||
|
||||
### power_state() <sub>function</sub>
|
||||
|
||||
Return a string describing power status: 'on battery', 'charging', 'charged', etc.
|
||||
|
||||
### on() <sub>function</sub>
|
||||
|
||||
Register a global callback for certain engine-wide or system-level events.
|
||||
|
||||
### rt_info() <sub>function</sub>
|
||||
|
||||
Return internal QuickJS runtime info, such as object counts.
|
||||
|
||||
### rusage() <sub>function</sub>
|
||||
|
||||
Return resource usage stats for this process, if the platform supports it.
|
||||
|
||||
### mallinfo() <sub>function</sub>
|
||||
|
||||
Return detailed memory allocation info (arena size, free blocks, etc.) on some platforms.
|
||||
|
||||
### env() <sub>function</sub>
|
||||
|
||||
Fetch the value of a given environment variable, or undefined if it doesn't exist.
|
||||
|
||||
### system() <sub>function</sub>
|
||||
|
||||
Execute a shell command using the system() call. Returns the command's exit code.
|
||||
44
docs/api/modules/packer.md
Normal file
44
docs/api/modules/packer.md
Normal file
@@ -0,0 +1,44 @@
|
||||
# packer
|
||||
|
||||
### getAllFiles(dir) <sub>function</sub>
|
||||
|
||||
|
||||
Return a list of all files in the given directory that are not matched by .prosperonignore,
|
||||
skipping directories.
|
||||
|
||||
|
||||
|
||||
**dir**: The directory to search.
|
||||
|
||||
|
||||
**Returns**: An array of file paths found.
|
||||
|
||||
|
||||
### gatherStats(filePaths) <sub>function</sub>
|
||||
|
||||
|
||||
Analyze a list of files and categorize them as modules, programs, images, or other.
|
||||
|
||||
|
||||
|
||||
**filePaths**: An array of file paths to analyze.
|
||||
|
||||
|
||||
**Returns**: An object { modules, programs, images, other, total } with counts.
|
||||
|
||||
|
||||
### pack(dir, outPath) <sub>function</sub>
|
||||
|
||||
|
||||
Create a ZIP archive of all files (skipping those matched by .prosperonignore) in the
|
||||
specified directory and write it to outPath. This uses the miniz module.
|
||||
|
||||
|
||||
|
||||
**dir**: The directory to zip.
|
||||
|
||||
**outPath**: The path (including filename) for the resulting ZIP file.
|
||||
|
||||
|
||||
**Returns**: None (synchronous). Throws an Error if the directory does not exist.
|
||||
|
||||
96
docs/api/modules/render.md
Normal file
96
docs/api/modules/render.md
Normal file
@@ -0,0 +1,96 @@
|
||||
# render
|
||||
|
||||
### _main <sub>object</sub>
|
||||
|
||||
A handle for low-level GPU operations via SDL GPU. Freed on GC.
|
||||
|
||||
|
||||
### device <sub>object</sub>
|
||||
|
||||
### stencil_writer(...args) <sub>function</sub>
|
||||
|
||||
### fillmask(ref) <sub>function</sub>
|
||||
|
||||
Draw a fullscreen shape using a 'screenfill' shader to populate the stencil buffer with a given reference.
|
||||
|
||||
|
||||
|
||||
**ref**: The stencil reference value to write.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### mask(image, pos, scale, rotation, ref) <sub>function</sub>
|
||||
|
||||
Draw an image to the stencil buffer, marking its area with a specified reference value.
|
||||
|
||||
|
||||
|
||||
**image**: A texture or string path (which is converted to a texture).
|
||||
|
||||
**pos**: The translation (x, y) for the image placement.
|
||||
|
||||
**scale**: Optional scaling applied to the texture.
|
||||
|
||||
**rotation**: Optional rotation in radians (unused by default).
|
||||
|
||||
**ref**: The stencil reference value to write.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### viewport(rect) <sub>function</sub>
|
||||
|
||||
Set the GPU viewport to the specified rectangle.
|
||||
|
||||
|
||||
|
||||
**rect**: A rectangle [x, y, width, height].
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### scissor(rect) <sub>function</sub>
|
||||
|
||||
Set the GPU scissor region to the specified rectangle (alias of render.viewport).
|
||||
|
||||
|
||||
|
||||
**rect**: A rectangle [x, y, width, height].
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### queue(cmd) <sub>function</sub>
|
||||
|
||||
Enqueue one or more draw commands. These commands are batched until render_camera is called.
|
||||
|
||||
|
||||
|
||||
**cmd**: Either a single command object or an array of command objects.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### setup_draw() <sub>function</sub>
|
||||
|
||||
Switch the current queue to the primary scene render queue, then invoke 'prosperon.draw' if defined.
|
||||
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### setup_hud() <sub>function</sub>
|
||||
|
||||
Switch the current queue to the HUD render queue, then invoke 'prosperon.hud' if defined.
|
||||
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
68
docs/api/modules/resources.md
Normal file
68
docs/api/modules/resources.md
Normal file
@@ -0,0 +1,68 @@
|
||||
# resources
|
||||
|
||||
### scripts <sub>object</sub>
|
||||
|
||||
### images <sub>object</sub>
|
||||
|
||||
### sounds <sub>object</sub>
|
||||
|
||||
### fonts <sub>object</sub>
|
||||
|
||||
### lib <sub>object</sub>
|
||||
|
||||
### canonical(file) <sub>function</sub>
|
||||
|
||||
### find_image(...args) <sub>function</sub>
|
||||
|
||||
### find_sound(...args) <sub>function</sub>
|
||||
|
||||
### find_script(...args) <sub>function</sub>
|
||||
|
||||
### find_font(...args) <sub>function</sub>
|
||||
|
||||
### getAllFiles(dir) <sub>function</sub>
|
||||
|
||||
|
||||
Return a list of recognized files in the given directory that are not matched by
|
||||
.prosperonignore, skipping directories. Recognized extensions include scripts,
|
||||
images, sounds, fonts, and libs.
|
||||
|
||||
|
||||
|
||||
**dir**: The directory to search.
|
||||
|
||||
|
||||
**Returns**: An array of recognized file paths.
|
||||
|
||||
|
||||
### gatherStats(filePaths) <sub>function</sub>
|
||||
|
||||
|
||||
Analyze a list of recognized files and categorize them by scripts, images, sounds,
|
||||
fonts, libs, or other. Return a stats object with these counts and the total.
|
||||
|
||||
|
||||
|
||||
**filePaths**: An array of file paths to analyze.
|
||||
|
||||
|
||||
**Returns**: { scripts, images, sounds, fonts, lib, other, total }
|
||||
|
||||
|
||||
### pack(dir, outPath) <sub>function</sub>
|
||||
|
||||
|
||||
Create a ZIP archive of all recognized files (skipping those matched by .prosperonignore)
|
||||
in the specified directory and write it to outPath. Recognized extensions are scripts,
|
||||
images, sounds, fonts, or libs.
|
||||
|
||||
:raises Error: If the directory does not exist.
|
||||
|
||||
|
||||
**dir**: The directory to zip.
|
||||
|
||||
**outPath**: The path (including filename) for the resulting ZIP file.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
11
docs/api/modules/sound.md
Normal file
11
docs/api/modules/sound.md
Normal file
@@ -0,0 +1,11 @@
|
||||
# sound
|
||||
|
||||
### undefined <sub>string</sub>
|
||||
|
||||
### pcm(file) <sub>function</sub>
|
||||
|
||||
### play(file) <sub>function</sub>
|
||||
|
||||
### cry(file) <sub>function</sub>
|
||||
|
||||
### music(file, fade = 0.5) <sub>function</sub>
|
||||
9
docs/api/modules/spline.md
Normal file
9
docs/api/modules/spline.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# spline
|
||||
|
||||
### catmull() <sub>function</sub>
|
||||
|
||||
Perform Catmull-Rom spline sampling on an array of 2D points, returning an array of samples.
|
||||
|
||||
### bezier() <sub>function</sub>
|
||||
|
||||
Perform a Bezier spline (or catmull) sampling on 2D points, returning an array of sampled points.
|
||||
103
docs/api/modules/time.md
Normal file
103
docs/api/modules/time.md
Normal file
@@ -0,0 +1,103 @@
|
||||
# time
|
||||
|
||||
The main time object, handling date/time utilities in earth-seconds.
|
||||
|
||||
### now() <sub>function</sub>
|
||||
|
||||
Return the current system time in seconds (implemented in C extension).
|
||||
|
||||
### computer_dst() <sub>function</sub>
|
||||
|
||||
Return true if local system time is currently in DST (implemented in C extension).
|
||||
|
||||
### computer_zone() <sub>function</sub>
|
||||
|
||||
Return local time zone offset from UTC in hours (implemented in C extension).
|
||||
|
||||
### second <sub>number</sub>
|
||||
|
||||
Number of seconds in a (real) second (always 1).
|
||||
|
||||
### minute <sub>number</sub>
|
||||
|
||||
Number of seconds in a minute (60).
|
||||
|
||||
### hour <sub>number</sub>
|
||||
|
||||
Number of seconds in an hour (3600).
|
||||
|
||||
### day <sub>number</sub>
|
||||
|
||||
Number of seconds in a day (86400).
|
||||
|
||||
### week <sub>number</sub>
|
||||
|
||||
Number of seconds in a week (604800).
|
||||
|
||||
### weekdays <sub>object</sub>
|
||||
|
||||
Names of the days of the week, Sunday through Saturday.
|
||||
|
||||
### monthstr <sub>object</sub>
|
||||
|
||||
Full names of the months of the year, January through December.
|
||||
|
||||
### epoch <sub>number</sub>
|
||||
|
||||
Base epoch year, from which day 0 is calculated (default 1970).
|
||||
|
||||
### hour2minute() <sub>function</sub>
|
||||
|
||||
Return the ratio of hour to minute in seconds, e.g. 3600 / 60 => 60.
|
||||
|
||||
### day2hour() <sub>function</sub>
|
||||
|
||||
Return the ratio of day to hour in seconds, e.g. 86400 / 3600 => 24.
|
||||
|
||||
### minute2second() <sub>function</sub>
|
||||
|
||||
Return the ratio of minute to second in seconds, e.g. 60 / 1 => 60.
|
||||
|
||||
### week2day() <sub>function</sub>
|
||||
|
||||
Return the ratio of week to day in seconds, e.g. 604800 / 86400 => 7.
|
||||
|
||||
### strparse <sub>object</sub>
|
||||
|
||||
Mapping of format tokens (yyyy, mm, dd, etc.) to time fields (year, month, day...).
|
||||
|
||||
### isleap(year) <sub>function</sub>
|
||||
|
||||
Return true if a given year is leap, based on whether it has 366 days.
|
||||
|
||||
### yearsize(y) <sub>function</sub>
|
||||
|
||||
Given a year, return 365 or 366 depending on leap-year rules.
|
||||
|
||||
### timecode(t, fps = 24) <sub>function</sub>
|
||||
|
||||
Convert seconds into a "S:frames" timecode string, with optional FPS (default 24).
|
||||
|
||||
### monthdays <sub>object</sub>
|
||||
|
||||
An array of days in each month for a non-leap year.
|
||||
|
||||
### zones <sub>object</sub>
|
||||
|
||||
Table of recognized time zone abbreviations, with offsets (e.g., "-12" -> "IDLW").
|
||||
|
||||
### record(num, zone = this.computer_zone() <sub>function</sub>
|
||||
|
||||
Convert a timestamp (in seconds) into a record with fields like day, month, year, etc.
|
||||
|
||||
### number(rec) <sub>function</sub>
|
||||
|
||||
Convert a record back into a numeric timestamp (seconds).
|
||||
|
||||
### fmt <sub>string</sub>
|
||||
|
||||
Default format string for time.text(), containing tokens like 'yyyy', 'dd', 'hh', etc.
|
||||
|
||||
### text(num, fmt = this.fmt, zone) <sub>function</sub>
|
||||
|
||||
Format a numeric or record time into a string using a format pattern, e.g. 'hh:nn:ss'.
|
||||
58
docs/api/modules/tween.md
Normal file
58
docs/api/modules/tween.md
Normal file
@@ -0,0 +1,58 @@
|
||||
# tween
|
||||
|
||||
### Tween <sub>object</sub>
|
||||
|
||||
|
||||
An object providing methods to create and control tweens with additional features
|
||||
like looping, custom easing, multiple stages, etc.
|
||||
|
||||
Properties:
|
||||
- default: A template object with loop/time/ease/whole/cb properties.
|
||||
Methods:
|
||||
- start(obj, target, tvals, options): Create a tween over multiple target values.
|
||||
- make: Alias of start.
|
||||
|
||||
|
||||
### Ease <sub>object</sub>
|
||||
|
||||
|
||||
This object provides multiple easing functions that remap a 0..1 input to produce
|
||||
a smoothed or non-linear output. They can be used standalone or inside tweens.
|
||||
|
||||
Available functions:
|
||||
- linear(t)
|
||||
- in(t), out(t), inout(t)
|
||||
- quad.in, quad.out, quad.inout
|
||||
- cubic.in, cubic.out, cubic.inout
|
||||
- quart.in, quart.out, quart.inout
|
||||
- quint.in, quint.out, quint.inout
|
||||
- expo.in, expo.out, expo.inout
|
||||
- bounce.in, bounce.out, bounce.inout
|
||||
- sine.in, sine.out, sine.inout
|
||||
- elastic.in, elastic.out, elastic.inout
|
||||
|
||||
All easing functions expect t in [0..1] and return a remapped value in [0..1].
|
||||
|
||||
|
||||
### tween(from, to, time, fn, cb) <sub>function</sub>
|
||||
|
||||
|
||||
|
||||
Creates a simple tween that linearly interpolates from "from" to "to" over "time"
|
||||
and calls "fn" with each interpolated value. Once finished, "fn" is called with "to",
|
||||
then "cb" is invoked if provided, and the tween is cleaned up.
|
||||
|
||||
|
||||
**from**: The starting object or value to interpolate from.
|
||||
|
||||
**to**: The ending object or value to interpolate to.
|
||||
|
||||
**time**: The total duration of the tween in milliseconds or some time unit.
|
||||
|
||||
**fn**: A callback function that receives the interpolated value at each update.
|
||||
|
||||
**cb**: (Optional) A callback invoked once the tween completes.
|
||||
|
||||
|
||||
**Returns**: A function that, when called, cleans up and stops the tween.
|
||||
|
||||
192
docs/api/modules/util.md
Normal file
192
docs/api/modules/util.md
Normal file
@@ -0,0 +1,192 @@
|
||||
# util
|
||||
|
||||
|
||||
A collection of general-purpose utility functions for object manipulation, merging,
|
||||
deep copying, safe property access, etc.
|
||||
|
||||
|
||||
### guid() <sub>function</sub>
|
||||
|
||||
|
||||
Return a random 32-character hexadecimal UUID-like string (not guaranteed RFC4122-compliant).
|
||||
|
||||
|
||||
**Returns**: A random 32-character string (hex).
|
||||
|
||||
|
||||
### insertion_sort(arr, cmp) <sub>function</sub>
|
||||
|
||||
|
||||
In-place insertion sort of an array using cmp(a,b)->Number for ordering.
|
||||
|
||||
|
||||
**arr**: The array to be sorted in-place.
|
||||
|
||||
**cmp**: Comparison function cmp(a,b)->Number.
|
||||
|
||||
|
||||
**Returns**: The same array, sorted in-place.
|
||||
|
||||
|
||||
### deepfreeze(obj) <sub>function</sub>
|
||||
|
||||
|
||||
Recursively freeze an object and all of its nested objects so they cannot be modified.
|
||||
|
||||
|
||||
**obj**: The object to recursively freeze.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### dainty_assign(target, source) <sub>function</sub>
|
||||
|
||||
|
||||
Copy non-function properties from source into matching keys of target without overwriting
|
||||
keys that don't exist in target. Arrays are deep-copied, and objects are recursively assigned.
|
||||
|
||||
|
||||
**target**: The target object whose keys may be updated.
|
||||
|
||||
**source**: The source object containing new values.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### get(obj, path, defValue) <sub>function</sub>
|
||||
|
||||
|
||||
Safely retrieve a nested property from obj at path (array or dot-string).
|
||||
Returns defValue if the property is undefined.
|
||||
|
||||
|
||||
**obj**: The object to traverse.
|
||||
|
||||
**path**: A string like "a.b.c" or an array of path segments.
|
||||
|
||||
**defValue**: The default value if the property is undefined.
|
||||
|
||||
|
||||
**Returns**: The nested property or defValue.
|
||||
|
||||
|
||||
### isEmpty(o) <sub>function</sub>
|
||||
|
||||
|
||||
Return true if the object has no own properties, otherwise false.
|
||||
|
||||
|
||||
**o**: The object to check.
|
||||
|
||||
|
||||
**Returns**: Boolean indicating if the object is empty.
|
||||
|
||||
|
||||
### dig(obj, path, def) <sub>function</sub>
|
||||
|
||||
|
||||
Ensure a nested path of objects exists inside obj; create objects if missing, and set
|
||||
the final path component to def.
|
||||
|
||||
|
||||
**obj**: The root object to modify.
|
||||
|
||||
**path**: A dot-string specifying nested objects to create.
|
||||
|
||||
**def**: The value to store in the final path component, default {}.
|
||||
|
||||
|
||||
**Returns**: The assigned final value.
|
||||
|
||||
|
||||
### access(obj, name) <sub>function</sub>
|
||||
|
||||
|
||||
Traverse obj by dot-separated path name, returning the final value or undefined
|
||||
if any step is missing.
|
||||
|
||||
|
||||
**obj**: The object to traverse.
|
||||
|
||||
**name**: A dot-string path (e.g. "foo.bar.baz").
|
||||
|
||||
|
||||
**Returns**: The value at that path, or undefined if missing.
|
||||
|
||||
|
||||
### mergekey(o1, o2, k) <sub>function</sub>
|
||||
|
||||
|
||||
Helper for merge, updating key k from o2 into o1. Arrays are deep-copied and objects are
|
||||
recursively merged.
|
||||
|
||||
|
||||
**o1**: The target object.
|
||||
|
||||
**o2**: The source object.
|
||||
|
||||
**k**: The key to merge.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### merge(target, objs) <sub>function</sub>
|
||||
|
||||
|
||||
Merge all passed objects into target, copying or merging each key as needed.
|
||||
Arrays are deep-copied, objects are recursively merged, etc.
|
||||
|
||||
|
||||
**target**: The target object.
|
||||
|
||||
**objs**: One or more objects to merge into target.
|
||||
|
||||
|
||||
**Returns**: The updated target object.
|
||||
|
||||
|
||||
### copy(proto, objs) <sub>function</sub>
|
||||
|
||||
|
||||
Create a new object with proto as its prototype, then mix in additional objects’ properties.
|
||||
|
||||
|
||||
**proto**: The prototype object for the new object.
|
||||
|
||||
**objs**: One or more objects whose properties will be mixed in.
|
||||
|
||||
|
||||
**Returns**: The newly created object.
|
||||
|
||||
|
||||
### obj_lerp(a, b, t) <sub>function</sub>
|
||||
|
||||
|
||||
Linearly interpolate between two objects a and b by factor t, assuming each property
|
||||
supports .lerp().
|
||||
|
||||
|
||||
**a**: The start object (its properties must have .lerp()).
|
||||
|
||||
**b**: The end object (matching properties).
|
||||
|
||||
**t**: Interpolation factor (0..1).
|
||||
|
||||
|
||||
**Returns**: A new object with interpolated properties.
|
||||
|
||||
|
||||
### normalizeSpacing(spacing) <sub>function</sub>
|
||||
|
||||
|
||||
Normalize any spacing input into a {l, r, t, b} object.
|
||||
|
||||
|
||||
**spacing**: A number, an array of length 2 or 4, or an object with l/r/t/b.
|
||||
|
||||
|
||||
**Returns**: An object {l, r, t, b}.
|
||||
|
||||
5
docs/api/modules/video.md
Normal file
5
docs/api/modules/video.md
Normal file
@@ -0,0 +1,5 @@
|
||||
# video
|
||||
|
||||
### make_video() <sub>function</sub>
|
||||
|
||||
Decode a video file (MPEG, etc.) from an ArrayBuffer, returning a datastream object.
|
||||
35
docs/api/prosperon.md
Normal file
35
docs/api/prosperon.md
Normal file
@@ -0,0 +1,35 @@
|
||||
# prosperon
|
||||
|
||||
### c_types <sub>object</sub>
|
||||
|
||||
### argv <sub>object</sub>
|
||||
|
||||
### version <sub>string</sub>
|
||||
|
||||
### revision <sub>string</sub>
|
||||
|
||||
### engine_start() <sub>function</sub>
|
||||
|
||||
### DOC <sub>symbol</sub>
|
||||
|
||||
### on(type, callback) <sub>function</sub>
|
||||
|
||||
### dispatch(type, data) <sub>function</sub>
|
||||
|
||||
### PATH <sub>object</sub>
|
||||
|
||||
### appupdate(...args) <sub>function</sub>
|
||||
|
||||
### update(...args) <sub>function</sub>
|
||||
|
||||
### physupdate(...args) <sub>function</sub>
|
||||
|
||||
### gui(...args) <sub>function</sub>
|
||||
|
||||
### hud(...args) <sub>function</sub>
|
||||
|
||||
### draw(...args) <sub>function</sub>
|
||||
|
||||
### imgui(...args) <sub>function</sub>
|
||||
|
||||
### app(...args) <sub>function</sub>
|
||||
55
docs/api/types/PHYSFS_File.md
Normal file
55
docs/api/types/PHYSFS_File.md
Normal file
@@ -0,0 +1,55 @@
|
||||
# PHYSFS_File
|
||||
|
||||
A file handle opened via PhysFS for writing or reading. Freed automatically when references go away.
|
||||
|
||||
|
||||
### close() <sub>function</sub>
|
||||
|
||||
Close this file handle. Throws on error.
|
||||
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### write(data) <sub>function</sub>
|
||||
|
||||
Write data (string or ArrayBuffer) to the file. Throws on error.
|
||||
|
||||
|
||||
|
||||
**data**: The data to write (string or ArrayBuffer).
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### buffer(size) <sub>function</sub>
|
||||
|
||||
Enable an internal write buffer of the given size on this file.
|
||||
|
||||
|
||||
|
||||
**size**: Size in bytes of the buffer.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### tell() <sub>function</sub>
|
||||
|
||||
Return the current position in the file.
|
||||
|
||||
|
||||
|
||||
**Returns**: A numeric offset.
|
||||
|
||||
|
||||
### eof() <sub>function</sub>
|
||||
|
||||
Return whether the file pointer is at end-of-file.
|
||||
|
||||
|
||||
|
||||
**Returns**: True if at EOF, false otherwise.
|
||||
|
||||
27
docs/api/types/SDL_Camera.md
Normal file
27
docs/api/types/SDL_Camera.md
Normal file
@@ -0,0 +1,27 @@
|
||||
# SDL_Camera
|
||||
|
||||
A handle to a physical camera device. Freed when references drop or camera is closed.
|
||||
|
||||
|
||||
### frame() <sub>function</sub>
|
||||
|
||||
Acquire the latest camera frame (as an SDL_Surface). Returns undefined if no
|
||||
new frame is available yet. Throws on error.
|
||||
|
||||
|
||||
|
||||
**Returns**: SDL_Surface or undefined.
|
||||
|
||||
|
||||
### release_frame(surface) <sub>function</sub>
|
||||
|
||||
Release a frame surface previously acquired via camera.frame(). Must be
|
||||
done for each acquired frame.
|
||||
|
||||
|
||||
|
||||
**surface**: The surface to release.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
4
docs/api/types/SDL_Cursor.md
Normal file
4
docs/api/types/SDL_Cursor.md
Normal file
@@ -0,0 +1,4 @@
|
||||
# SDL_Cursor
|
||||
|
||||
An SDL cursor handle. Freed automatically on GC. No direct methods.
|
||||
|
||||
3
docs/api/types/SDL_GPUBuffer.md
Normal file
3
docs/api/types/SDL_GPUBuffer.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# SDL_GPUBuffer
|
||||
|
||||
### name() <sub>function</sub>
|
||||
231
docs/api/types/SDL_GPUCommandBuffer.md
Normal file
231
docs/api/types/SDL_GPUCommandBuffer.md
Normal file
@@ -0,0 +1,231 @@
|
||||
# SDL_GPUCommandBuffer
|
||||
|
||||
A command buffer that accumulates rendering, copy, and compute operations. Freed after submission or GC.
|
||||
|
||||
|
||||
### render_pass(passDesc) <sub>function</sub>
|
||||
|
||||
Begin a render pass with color/depth attachments. Provide an object with
|
||||
'color_targets' and optional 'depth_stencil'. Returns an SDL_GPURenderPass handle.
|
||||
|
||||
|
||||
|
||||
**passDesc**: {color_targets:[...], depth_stencil:...}
|
||||
|
||||
|
||||
**Returns**: SDL_GPURenderPass
|
||||
|
||||
|
||||
### compute_pass(storageTextures, storageBuffers) <sub>function</sub>
|
||||
|
||||
Begin a compute pass reading/writing given arrays of textures and buffers.
|
||||
|
||||
|
||||
|
||||
**storageTextures**: array of read/write textures
|
||||
|
||||
**storageBuffers**: array of read/write buffers
|
||||
|
||||
|
||||
**Returns**: SDL_GPUComputePass
|
||||
|
||||
|
||||
### swapchain_pass(clearColor) <sub>function</sub>
|
||||
|
||||
Begin a render pass that directly targets the swapchain (the window). Clears
|
||||
with the specified color.
|
||||
|
||||
|
||||
|
||||
**clearColor**: [r,g,b,a]
|
||||
|
||||
|
||||
**Returns**: SDL_GPURenderPass
|
||||
|
||||
|
||||
### acquire_swapchain() <sub>function</sub>
|
||||
|
||||
Acquire the current swapchain texture from the window. Internal usage.
|
||||
|
||||
|
||||
|
||||
**Returns**: SDL_GPUTexture handle
|
||||
|
||||
|
||||
### bind_vertex_buffer(slot, buffer) <sub>function</sub>
|
||||
|
||||
Bind a GPU buffer as the vertex buffer at a given slot.
|
||||
|
||||
|
||||
|
||||
**slot**: Integer slot index.
|
||||
|
||||
**buffer**: The SDL_GPUBuffer.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### bind_index_buffer(buffer, offset) <sub>function</sub>
|
||||
|
||||
Bind a GPU buffer as the index buffer (16-bit or 32-bit).
|
||||
|
||||
|
||||
|
||||
**buffer**: The SDL_GPUBuffer.
|
||||
|
||||
**offset**: Optional offset in bytes.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### bind_fragment_sampler(slot, texture, sampler) <sub>function</sub>
|
||||
|
||||
Bind a texture+sampler pair to a particular fragment shader slot.
|
||||
|
||||
|
||||
|
||||
**slot**: Index of the sampler binding.
|
||||
|
||||
**texture**: The SDL_GPUTexture
|
||||
|
||||
**sampler**: The SDL_GPUSampler
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### push_vertex_uniform_data(slot, data) <sub>function</sub>
|
||||
|
||||
Push raw data to a vertex shader uniform block.
|
||||
|
||||
|
||||
|
||||
**slot**: The uniform buffer slot.
|
||||
|
||||
**data**: An ArrayBuffer with the data to upload.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### push_fragment_uniform_data(slot, data) <sub>function</sub>
|
||||
|
||||
Push raw data to a fragment shader uniform block.
|
||||
|
||||
|
||||
|
||||
**slot**: The uniform buffer slot index.
|
||||
|
||||
**data**: An ArrayBuffer with uniform data.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### push_compute_uniform_data(slot, data) <sub>function</sub>
|
||||
|
||||
Push raw data to a compute shader uniform buffer.
|
||||
|
||||
|
||||
|
||||
**slot**: The uniform buffer slot.
|
||||
|
||||
**data**: An ArrayBuffer with the data.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### submit() <sub>function</sub>
|
||||
|
||||
Submit this command buffer to the GPU and return a fence for synchronization.
|
||||
|
||||
|
||||
|
||||
**Returns**: An SDL_GPUFence
|
||||
|
||||
|
||||
### cancel() <sub>function</sub>
|
||||
|
||||
Cancel (discard) this command buffer without submitting.
|
||||
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### camera(cameraTransform, uniformSlot) <sub>function</sub>
|
||||
|
||||
Write a camera transform (projection/view) to a uniform slot for 3D or 2D usage.
|
||||
|
||||
|
||||
|
||||
**cameraTransform**: A camera object or transform with .pos, fov, etc.
|
||||
|
||||
**uniformSlot**: The integer uniform buffer slot to which data is pushed.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### hud(sizeVec2, uniformSlot) <sub>function</sub>
|
||||
|
||||
Write an orthographic full-screen "HUD" matrix to a uniform slot. Typically used
|
||||
for 2D overlays.
|
||||
|
||||
|
||||
|
||||
**sizeVec2**: [width, height] of the viewport area.
|
||||
|
||||
**uniformSlot**: The integer uniform buffer slot.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### push_debug_group(name) <sub>function</sub>
|
||||
|
||||
Push a named debug group marker onto the GPU command list (for debuggers/profilers).
|
||||
|
||||
|
||||
|
||||
**name**: The debug label string.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### pop_debug_group() <sub>function</sub>
|
||||
|
||||
Pop the most recent debug group marker.
|
||||
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### debug_label(label) <sub>function</sub>
|
||||
|
||||
Insert a one-off debug label at the current spot in the command list.
|
||||
|
||||
|
||||
|
||||
**label**: The debug label string
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### blit(blitDesc) <sub>function</sub>
|
||||
|
||||
Blit one GPU texture to another with optional flip mode, filter, and clear operations.
|
||||
|
||||
|
||||
|
||||
**blitDesc**: { src:{texture,mip_level, etc}, dst:{texture,...}, load_op, flip, filter, clear_color:[r,g,b,a] }
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
83
docs/api/types/SDL_GPUComputePass.md
Normal file
83
docs/api/types/SDL_GPUComputePass.md
Normal file
@@ -0,0 +1,83 @@
|
||||
# SDL_GPUComputePass
|
||||
|
||||
A compute pass for dispatching compute pipelines. Freed after end() or GC.
|
||||
|
||||
|
||||
### dispatch(x, y, z) <sub>function</sub>
|
||||
|
||||
Dispatch the compute pipeline with the specified threadgroup counts.
|
||||
|
||||
|
||||
|
||||
**x**: Number of groups in X dimension
|
||||
|
||||
**y**: Number of groups in Y dimension
|
||||
|
||||
**z**: Number of groups in Z dimension
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### end() <sub>function</sub>
|
||||
|
||||
End this compute pass.
|
||||
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### pipeline(computePipeline) <sub>function</sub>
|
||||
|
||||
Bind a compute pipeline in this pass.
|
||||
|
||||
|
||||
|
||||
**computePipeline**: The SDL_GPUComputePipeline
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### samplers(arrayOfSamplerBindings, firstSlot) <sub>function</sub>
|
||||
|
||||
Bind a set of texture/sampler pairs for compute usage.
|
||||
|
||||
|
||||
|
||||
**arrayOfSamplerBindings**: e.g. [ {texture, sampler}, ...]
|
||||
|
||||
**firstSlot**: The starting sampler slot.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### storage_buffers(arrayOfBuffers, firstSlot) <sub>function</sub>
|
||||
|
||||
Bind an array of storage buffers for the compute shader.
|
||||
|
||||
|
||||
|
||||
**arrayOfBuffers**: The buffers
|
||||
|
||||
**firstSlot**: Starting binding slot.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### storage_textures(arrayOfTextures, firstSlot) <sub>function</sub>
|
||||
|
||||
Bind an array of storage textures for the compute shader.
|
||||
|
||||
|
||||
|
||||
**arrayOfTextures**: The textures
|
||||
|
||||
**firstSlot**: Starting binding slot
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
5
docs/api/types/SDL_GPUComputePipeline.md
Normal file
5
docs/api/types/SDL_GPUComputePipeline.md
Normal file
@@ -0,0 +1,5 @@
|
||||
# SDL_GPUComputePipeline
|
||||
|
||||
Encapsulates a compute shader program plus associated resource layouts.
|
||||
Created via device.compute_pipeline(...).
|
||||
|
||||
4
docs/api/types/SDL_GPUCopyPass.md
Normal file
4
docs/api/types/SDL_GPUCopyPass.md
Normal file
@@ -0,0 +1,4 @@
|
||||
# SDL_GPUCopyPass
|
||||
|
||||
A pass for CPU<->GPU or GPU<->GPU copy operations. No direct JS API besides internal usage.
|
||||
|
||||
236
docs/api/types/SDL_GPUDevice.md
Normal file
236
docs/api/types/SDL_GPUDevice.md
Normal file
@@ -0,0 +1,236 @@
|
||||
# SDL_GPUDevice
|
||||
|
||||
A handle for low-level GPU operations via SDL GPU. Freed on GC.
|
||||
|
||||
|
||||
### claim_window(window) <sub>function</sub>
|
||||
|
||||
Claim an existing SDL_Window so this GPU device can render to it.
|
||||
|
||||
|
||||
|
||||
**window**: The SDL_Window to attach.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### make_pipeline(pipelineDesc) <sub>function</sub>
|
||||
|
||||
Create a new graphics pipeline from a descriptor object specifying shaders,
|
||||
blend states, vertex format, etc.
|
||||
|
||||
|
||||
|
||||
**pipelineDesc**: An object containing pipeline fields (vertexShader, blend, etc.).
|
||||
|
||||
|
||||
**Returns**: A SDL_GPUGraphicsPipeline handle.
|
||||
|
||||
|
||||
### compute_pipeline(desc) <sub>function</sub>
|
||||
|
||||
Create a compute pipeline from a descriptor (shader code, threadgroup sizes, etc.).
|
||||
|
||||
|
||||
|
||||
**desc**: An object with shader code, thread counts, etc.
|
||||
|
||||
|
||||
**Returns**: SDL_GPUComputePipeline handle.
|
||||
|
||||
|
||||
### set_swapchain(composition, presentMode) <sub>function</sub>
|
||||
|
||||
Specify how the swapchain (final rendered image) is composed, e.g. 'sdr', 'hdr',
|
||||
and present mode like 'vsync' or 'immediate'.
|
||||
|
||||
|
||||
|
||||
**composition**: E.g. 'sdr', 'linear', or 'hdr'.
|
||||
|
||||
**presentMode**: E.g. 'vsync', 'immediate', 'mailbox'.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### sort_sprite(a, b) <sub>function</sub>
|
||||
|
||||
A comparator function used for sorting sprite objects by layer, y, and texture.
|
||||
Usually used internally.
|
||||
|
||||
|
||||
|
||||
**a**: A sprite object.
|
||||
|
||||
**b**: Another sprite object.
|
||||
|
||||
|
||||
**Returns**: <0, 0, or >0 for sort ordering.
|
||||
|
||||
|
||||
### make_sampler(samplerDesc) <sub>function</sub>
|
||||
|
||||
Create a sampler object specifying filtering, wrapping, anisotropy, etc.
|
||||
|
||||
|
||||
|
||||
**samplerDesc**: An object with min_filter, mag_filter, etc.
|
||||
|
||||
|
||||
**Returns**: SDL_GPUSampler handle.
|
||||
|
||||
|
||||
### load_texture(surface, compressionLevel) <sub>function</sub>
|
||||
|
||||
Upload an SDL_Surface into a GPU texture, optionally compressing with DXT. Freed automatically.
|
||||
|
||||
|
||||
|
||||
**surface**: An SDL_Surface.
|
||||
|
||||
**compressionLevel**: 0=none, 1=DXT1 or DXT5, 2=high quality, etc.
|
||||
|
||||
|
||||
**Returns**: SDL_GPUTexture
|
||||
|
||||
|
||||
### texture(desc) <sub>function</sub>
|
||||
|
||||
Create a GPU texture with the specified format usage.
|
||||
|
||||
|
||||
|
||||
**desc**: Object with {width, height, layers, type, format, usage, etc.}
|
||||
|
||||
|
||||
**Returns**: SDL_GPUTexture
|
||||
|
||||
|
||||
### make_quad() <sub>function</sub>
|
||||
|
||||
Return a simple 2-triangle quad geometry covering [0,1]x[0,1].
|
||||
Useful for post-processing passes.
|
||||
|
||||
|
||||
|
||||
**Returns**: A mesh {pos, uv, color, indices}.
|
||||
|
||||
|
||||
### driver() <sub>function</sub>
|
||||
|
||||
Return the name of the underlying GPU driver in use (e.g. 'OpenGL').
|
||||
|
||||
|
||||
|
||||
**Returns**: A string with driver name.
|
||||
|
||||
|
||||
### make_shader(desc) <sub>function</sub>
|
||||
|
||||
Compile raw shader code (vertex or fragment) in e.g. SPIR-V, MSL, or DXIL format.
|
||||
|
||||
|
||||
|
||||
**desc**: {code:ArrayBuffer, stage:'vertex'|'fragment', format:'spv'|..., entrypoint:'main', ...}
|
||||
|
||||
|
||||
**Returns**: SDL_GPUShader object
|
||||
|
||||
|
||||
### acquire_cmd_buffer() <sub>function</sub>
|
||||
|
||||
Obtain a new command buffer for recording GPU commands. Must be submitted or canceled.
|
||||
|
||||
|
||||
|
||||
**Returns**: SDL_GPUCommandBuffer handle
|
||||
|
||||
|
||||
### upload(cmdBuffer, buffers, transferBuffer) <sub>function</sub>
|
||||
|
||||
Upload CPU data into a list of GPU buffers, optionally reusing or returning a
|
||||
transfer buffer. Typically you provide (cmdBuf, arrayOfTypedArrays, [transferBuffer]).
|
||||
|
||||
|
||||
|
||||
**cmdBuffer**: The command buffer in which to record copy commands.
|
||||
|
||||
**buffers**: An array of typed-array data to upload, each must have a 'gpu' property or so.
|
||||
|
||||
**transferBuffer**: Optional existing GPU transfer buffer to reuse.
|
||||
|
||||
|
||||
**Returns**: The transfer buffer used or newly created.
|
||||
|
||||
|
||||
### wait_for_fences(fences, waitAll) <sub>function</sub>
|
||||
|
||||
Wait on an array of GPU fence objects, optionally requiring all or any.
|
||||
|
||||
|
||||
|
||||
**fences**: An array of SDL_GPUFence objects.
|
||||
|
||||
**waitAll**: Boolean, true to wait for all fences, false for any.
|
||||
|
||||
|
||||
**Returns**: True if fences signaled, false on timeout or error.
|
||||
|
||||
|
||||
### query_fence(fence) <sub>function</sub>
|
||||
|
||||
Check if the given fence has been signaled yet. Non-blocking.
|
||||
|
||||
|
||||
|
||||
**fence**: SDL_GPUFence handle
|
||||
|
||||
|
||||
**Returns**: True if signaled, false if still pending
|
||||
|
||||
|
||||
### shader_format() <sub>function</sub>
|
||||
|
||||
Return an array of supported GPU shader binary formats (like 'spv', 'dxbc', etc.).
|
||||
|
||||
|
||||
|
||||
**Returns**: Array of strings naming supported formats.
|
||||
|
||||
|
||||
### slice9(texture, dstRect, edges) <sub>function</sub>
|
||||
|
||||
Generate a 9-slice tiling geometry in one shot. For advanced usage with GPU pipeline.
|
||||
|
||||
|
||||
|
||||
**texture**: An SDL_GPUTexture
|
||||
|
||||
**dstRect**: The rectangle {x, y, w, h}
|
||||
|
||||
**edges**: {l, r, t, b} edge sizes
|
||||
|
||||
|
||||
**Returns**: A mesh object
|
||||
|
||||
|
||||
### tile(texture, srcRect, dstRect, tileInfo) <sub>function</sub>
|
||||
|
||||
Generate geometry to tile a texture portion inside a dest rect.
|
||||
Often used for repeating backgrounds.
|
||||
|
||||
|
||||
|
||||
**texture**: The SDL_GPUTexture
|
||||
|
||||
**srcRect**: The portion to tile in pixels
|
||||
|
||||
**dstRect**: Where to fill
|
||||
|
||||
**tileInfo**: e.g. {repeat_x:true, repeat_y:true}
|
||||
|
||||
|
||||
**Returns**: A mesh object
|
||||
|
||||
5
docs/api/types/SDL_GPUFence.md
Normal file
5
docs/api/types/SDL_GPUFence.md
Normal file
@@ -0,0 +1,5 @@
|
||||
# SDL_GPUFence
|
||||
|
||||
A GPU fence for synchronization. Created upon commandBuffer.submit().
|
||||
Wait or query it with device.wait_for_fences or device.query_fence.
|
||||
|
||||
5
docs/api/types/SDL_GPUGraphicsPipeline.md
Normal file
5
docs/api/types/SDL_GPUGraphicsPipeline.md
Normal file
@@ -0,0 +1,5 @@
|
||||
# SDL_GPUGraphicsPipeline
|
||||
|
||||
Encapsulates vertex+fragment shaders, blend/cull states, and vertex attribute layouts.
|
||||
Created via device.make_pipeline(...).
|
||||
|
||||
159
docs/api/types/SDL_GPURenderPass.md
Normal file
159
docs/api/types/SDL_GPURenderPass.md
Normal file
@@ -0,0 +1,159 @@
|
||||
# SDL_GPURenderPass
|
||||
|
||||
A single pass of drawing commands with color/depth attachments. Freed after end() or GC.
|
||||
|
||||
|
||||
### bind_pipeline(pipeline) <sub>function</sub>
|
||||
|
||||
Bind a previously created graphics pipeline (shaders, states, vertex layouts, etc.).
|
||||
|
||||
|
||||
|
||||
**pipeline**: The SDL_GPUGraphicsPipeline
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### viewport(rect) <sub>function</sub>
|
||||
|
||||
Set the viewport for clipping or scaling draws, in pass-local coordinates.
|
||||
|
||||
|
||||
|
||||
**rect**: {x,y,w,h}
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### scissor(rect) <sub>function</sub>
|
||||
|
||||
Set a scissor rectangle for discarding pixels outside it.
|
||||
|
||||
|
||||
|
||||
**rect**: {x,y,w,h}
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### draw(primitiveType, baseVertex, firstVertex, vertexCount) <sub>function</sub>
|
||||
|
||||
Issue a non-indexed draw call.
|
||||
|
||||
|
||||
|
||||
**primitiveType**: e.g. SDL_GPU_PRIMITIVETYPE_TRIANGLELIST
|
||||
|
||||
**baseVertex**: Starting vertex offset.
|
||||
|
||||
**firstVertex**: The first vertex to draw.
|
||||
|
||||
**vertexCount**: How many vertices to draw.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### draw_indexed(primitiveType, baseVertex, firstIndex, indexCount, instanceCount) <sub>function</sub>
|
||||
|
||||
Issue an indexed draw call from the bound index buffer.
|
||||
|
||||
|
||||
|
||||
**primitiveType**: The primitive type constant.
|
||||
|
||||
**baseVertex**: Offset in the vertex buffer.
|
||||
|
||||
**firstIndex**: Which index to start from.
|
||||
|
||||
**indexCount**: Number of indices to draw.
|
||||
|
||||
**instanceCount**: For instanced drawing, or 1 if normal.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### end() <sub>function</sub>
|
||||
|
||||
End this render pass, finalizing the draw operations.
|
||||
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### bind_index_buffer(buffer, elementSize16bit) <sub>function</sub>
|
||||
|
||||
Bind an index buffer inside this pass, possibly overriding the global one.
|
||||
|
||||
|
||||
|
||||
**buffer**: The SDL_GPUBuffer
|
||||
|
||||
**elementSize16bit**: If 2, uses 16-bit indices; if 4, uses 32-bit indices
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### bind_buffers(firstSlot, arrayOfBuffers) <sub>function</sub>
|
||||
|
||||
Bind multiple vertex buffers at consecutive slots.
|
||||
|
||||
|
||||
|
||||
**firstSlot**: The starting vertex buffer slot.
|
||||
|
||||
**arrayOfBuffers**: An array of GPUBuffer objects
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### bind_samplers(vertexOrFragment, firstSlot, samplerBindings) <sub>function</sub>
|
||||
|
||||
Bind multiple texture/sampler pairs to either vertex or fragment slots.
|
||||
|
||||
|
||||
|
||||
**vertexOrFragment**: Boolean, true for vertex stage, false for fragment.
|
||||
|
||||
**firstSlot**: The first sampler slot to bind.
|
||||
|
||||
**samplerBindings**: An array of {texture, sampler}.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### bind_storage_buffers(firstSlot, buffers) <sub>function</sub>
|
||||
|
||||
Bind one or more storage buffers for read/write in the pipeline.
|
||||
|
||||
|
||||
|
||||
**firstSlot**: Starting buffer slot index.
|
||||
|
||||
**buffers**: An array of SDL_GPUBuffer objects.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### bind_storage_textures(firstSlot, textures) <sub>function</sub>
|
||||
|
||||
Bind one or more storage textures for read/write in the pipeline.
|
||||
|
||||
|
||||
|
||||
**firstSlot**: Starting texture slot index.
|
||||
|
||||
**textures**: An array of SDL_GPUTexture objects.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
4
docs/api/types/SDL_GPUSampler.md
Normal file
4
docs/api/types/SDL_GPUSampler.md
Normal file
@@ -0,0 +1,4 @@
|
||||
# SDL_GPUSampler
|
||||
|
||||
Defines how a texture is sampled (filter mode, address mode, anisotropy, compare op, etc.).
|
||||
|
||||
5
docs/api/types/SDL_GPUShader.md
Normal file
5
docs/api/types/SDL_GPUShader.md
Normal file
@@ -0,0 +1,5 @@
|
||||
# SDL_GPUShader
|
||||
|
||||
A single compiled shader (vertex or fragment) in a GPU-friendly format
|
||||
(e.g., SPIR-V, MSL). Combined into a pipeline for drawing.
|
||||
|
||||
3
docs/api/types/SDL_GPUTexture.md
Normal file
3
docs/api/types/SDL_GPUTexture.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# SDL_GPUTexture
|
||||
|
||||
### name() <sub>function</sub>
|
||||
5
docs/api/types/SDL_GPUTransferBuffer.md
Normal file
5
docs/api/types/SDL_GPUTransferBuffer.md
Normal file
@@ -0,0 +1,5 @@
|
||||
# SDL_GPUTransferBuffer
|
||||
|
||||
A staging buffer used for copying data to or from GPU buffers/textures. Typically
|
||||
allocated/used internally by device.upload(...).
|
||||
|
||||
333
docs/api/types/SDL_Renderer.md
Normal file
333
docs/api/types/SDL_Renderer.md
Normal file
@@ -0,0 +1,333 @@
|
||||
# SDL_Renderer
|
||||
|
||||
A 2D rendering context using the SDL renderer API. Freed automatically.
|
||||
|
||||
|
||||
### draw_color(color) <sub>function</sub>
|
||||
|
||||
Set the render draw color for subsequent primitive calls (rect, line, etc.).
|
||||
|
||||
|
||||
|
||||
**color**: [r, g, b, a] in 0..1.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### present() <sub>function</sub>
|
||||
|
||||
Display whatever has been rendered (swap buffers). Must be called each frame.
|
||||
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### clear() <sub>function</sub>
|
||||
|
||||
Clear the current render target with the renderer's draw color.
|
||||
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### rect(rectOrArray, color) <sub>function</sub>
|
||||
|
||||
Draw one or more outlines of rectangles.
|
||||
|
||||
|
||||
|
||||
**rectOrArray**: A single rect {x,y,w,h} or an array of rects.
|
||||
|
||||
**color**: Optional [r,g,b,a]. If provided, overrides current draw color.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### fillrect(rectOrArray, color) <sub>function</sub>
|
||||
|
||||
Fill one or more rectangles with the renderer's current color or an optional override.
|
||||
|
||||
|
||||
|
||||
**rectOrArray**: A single rect {x,y,w,h} or an array of rects.
|
||||
|
||||
**color**: Optional [r,g,b,a].
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### line(points, color) <sub>function</sub>
|
||||
|
||||
Draw a sequence of lines connecting points in an array.
|
||||
|
||||
|
||||
|
||||
**points**: An array of [x,y] points. Lines connect consecutive points.
|
||||
|
||||
**color**: Optional [r,g,b,a].
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### point(points, color) <sub>function</sub>
|
||||
|
||||
Draw a list of points (pixels).
|
||||
|
||||
|
||||
|
||||
**points**: An array of [x,y] positions.
|
||||
|
||||
**color**: Optional [r,g,b,a].
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### load_texture(surface) <sub>function</sub>
|
||||
|
||||
Create an SDL_Texture from a given SDL_Surface for use with this renderer.
|
||||
|
||||
|
||||
|
||||
**surface**: An SDL_Surface.
|
||||
|
||||
|
||||
**Returns**: An SDL_Texture object.
|
||||
|
||||
|
||||
### texture(tex, dstRect, srcRect, color) <sub>function</sub>
|
||||
|
||||
Draw a texture onto the render target.
|
||||
|
||||
|
||||
|
||||
**tex**: The SDL_Texture to draw.
|
||||
|
||||
**dstRect**: The destination rect {x, y, w, h}.
|
||||
|
||||
**srcRect**: Optional portion of the texture to draw {x, y, w, h}.
|
||||
|
||||
**color**: Optional color mod [r,g,b,a].
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### slice9(tex, dstRect, edges, srcRect) <sub>function</sub>
|
||||
|
||||
Draw a texture with 9-slice scaling. The argument includes edges {l, r, t, b}
|
||||
for the corners/borders that remain unscaled. The rest is tiled or stretched.
|
||||
|
||||
|
||||
|
||||
**tex**: The SDL_Texture.
|
||||
|
||||
**dstRect**: Destination region {x, y, w, h}.
|
||||
|
||||
**edges**: {l, r, t, b} for corner sizes in pixels.
|
||||
|
||||
**srcRect**: Optional portion in the texture.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### tile(tex, dstRect, srcRect, scale) <sub>function</sub>
|
||||
|
||||
Tile a texture repeatedly within the specified region. Optionally use a srcRect.
|
||||
|
||||
|
||||
|
||||
**tex**: The SDL_Texture to tile.
|
||||
|
||||
**dstRect**: The region to fill {x, y, w, h}.
|
||||
|
||||
**srcRect**: Optional portion of texture.
|
||||
|
||||
**scale**: A float scale factor for each tile.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### get_image(rect) <sub>function</sub>
|
||||
|
||||
Read back the rendered pixels into a new SDL_Surface. If rect is undefined, capture entire output.
|
||||
|
||||
|
||||
|
||||
**rect**: Optional {x,y,w,h}.
|
||||
|
||||
|
||||
**Returns**: An SDL_Surface with the requested region's pixels.
|
||||
|
||||
|
||||
### fasttext(text, pos, color) <sub>function</sub>
|
||||
|
||||
Draw debug text using an internal fast path. Typically used for quick debugging overlays.
|
||||
|
||||
|
||||
|
||||
**text**: The string to draw.
|
||||
|
||||
**pos**: The [x, y] position to draw text.
|
||||
|
||||
**color**: Optional [r,g,b,a].
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### geometry(texture, meshObject) <sub>function</sub>
|
||||
|
||||
Render custom geometry from a mesh object {pos, uv, color, indices, count} with an optional texture.
|
||||
|
||||
|
||||
|
||||
**texture**: The SDL_Texture or undefined.
|
||||
|
||||
**meshObject**: The geometry data with typed arrays.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### scale(scaleVec2) <sub>function</sub>
|
||||
|
||||
Set a scaling factor for all subsequent rendering on this renderer.
|
||||
|
||||
|
||||
|
||||
**scaleVec2**: [sx, sy] scaling factors.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### logical_size(size) <sub>function</sub>
|
||||
|
||||
Set a "logical" size that the renderer will scale to.
|
||||
For example, (320, 240) can auto-scale up to the window resolution.
|
||||
|
||||
|
||||
|
||||
**size**: [width, height].
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### viewport(rect) <sub>function</sub>
|
||||
|
||||
Set the clipping viewport for rendering. Pass undefined to use the full render target.
|
||||
|
||||
|
||||
|
||||
**rect**: {x, y, w, h}, or undefined.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### clip(rect) <sub>function</sub>
|
||||
|
||||
Set or clear the clipping rectangle for drawing. Pass undefined to clear.
|
||||
|
||||
|
||||
|
||||
**rect**: {x, y, w, h} or undefined.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### vsync(flag) <sub>function</sub>
|
||||
|
||||
Enable or disable vertical sync. This may have no effect depending on the driver.
|
||||
|
||||
|
||||
|
||||
**flag**: True or false.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### coords(pos) <sub>function</sub>
|
||||
|
||||
Convert window coordinates to this renderer's coordinate space.
|
||||
|
||||
|
||||
|
||||
**pos**: [x, y] in window space.
|
||||
|
||||
|
||||
**Returns**: [x, y] in renderer coordinate space.
|
||||
|
||||
|
||||
### camera(cameraTransform, centered) <sub>function</sub>
|
||||
|
||||
Set up a basic 2D camera matrix from a given transform. If 'centered' is true,
|
||||
the origin is the center of the viewport, else top-left.
|
||||
|
||||
|
||||
|
||||
**cameraTransform**: The transform whose pos is used.
|
||||
|
||||
**centered**: Boolean true or false.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### get_viewport() <sub>function</sub>
|
||||
|
||||
Return the current viewport rect.
|
||||
|
||||
|
||||
|
||||
**Returns**: {x, y, w, h}
|
||||
|
||||
|
||||
### screen2world(pos) <sub>function</sub>
|
||||
|
||||
Convert a screen coordinate to world space based on the current camera transform.
|
||||
|
||||
|
||||
|
||||
**pos**: [x, y] screen coords
|
||||
|
||||
|
||||
**Returns**: [wx, wy] in world space
|
||||
|
||||
|
||||
### target(texture) <sub>function</sub>
|
||||
|
||||
Set or clear the current render target texture. Pass undefined to reset to the default/window.
|
||||
|
||||
|
||||
|
||||
**texture**: An SDL_Texture or undefined
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### make_sprite_mesh(sprites) <sub>function</sub>
|
||||
|
||||
Generate a mesh from an array of sprite objects, combining their positions, UVs,
|
||||
and colors into a single geometry block.
|
||||
|
||||
|
||||
|
||||
**sprites**: An array of sprite-like objects.
|
||||
|
||||
|
||||
**Returns**: A 'mesh' object with pos, uv, color, indices, etc.
|
||||
|
||||
68
docs/api/types/SDL_Surface.md
Normal file
68
docs/api/types/SDL_Surface.md
Normal file
@@ -0,0 +1,68 @@
|
||||
# SDL_Surface
|
||||
|
||||
A software (CPU) image in memory. Freed when references vanish. Typically converted
|
||||
to SDL_Texture for drawing, or used as raw pixel data.
|
||||
|
||||
|
||||
### blit(dstRect, srcSurface, srcRect) <sub>function</sub>
|
||||
|
||||
Blit (copy) another surface onto this surface, scaling if needed.
|
||||
|
||||
|
||||
|
||||
**dstRect**: Destination {x, y, w, h}
|
||||
|
||||
**srcSurface**: The source SDL_Surface
|
||||
|
||||
**srcRect**: {x, y, w, h} portion from source
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### scale(newSize) <sub>function</sub>
|
||||
|
||||
Return a new SDL_Surface scaled to [width, height] using linear filtering.
|
||||
|
||||
|
||||
|
||||
**newSize**: [width, height]
|
||||
|
||||
|
||||
**Returns**: A new SDL_Surface with the scaled result.
|
||||
|
||||
|
||||
### fill(color) <sub>function</sub>
|
||||
|
||||
Fill the entire surface with a single color.
|
||||
|
||||
|
||||
|
||||
**color**: [r, g, b, a] in 0..1
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### rect(rect, color) <sub>function</sub>
|
||||
|
||||
Fill a sub-rectangle of the surface with a color.
|
||||
|
||||
|
||||
|
||||
**rect**: {x, y, w, h}
|
||||
|
||||
**color**: [r, g, b, a]
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### dup() <sub>function</sub>
|
||||
|
||||
Make a copy of this surface in RGBA format.
|
||||
|
||||
|
||||
|
||||
**Returns**: A new SDL_Surface copy.
|
||||
|
||||
16
docs/api/types/SDL_Texture.md
Normal file
16
docs/api/types/SDL_Texture.md
Normal file
@@ -0,0 +1,16 @@
|
||||
# SDL_Texture
|
||||
|
||||
A 2D GPU-accelerated texture for rendering with SDL_Renderer. Freed automatically.
|
||||
|
||||
|
||||
### mode(mode) <sub>function</sub>
|
||||
|
||||
Set texture scale mode or filtering mode (nearest/linear).
|
||||
|
||||
|
||||
|
||||
**mode**: A string or numeric mode to set (e.g., 'linear').
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
15
docs/api/types/SDL_Thread.md
Normal file
15
docs/api/types/SDL_Thread.md
Normal file
@@ -0,0 +1,15 @@
|
||||
# SDL_Thread
|
||||
|
||||
A handle to an SDL-created thread. Freed on GC after join.
|
||||
|
||||
Note: The engine generally doesn't expose custom usage for threads.
|
||||
|
||||
|
||||
### wait() <sub>function</sub>
|
||||
|
||||
Block until this thread terminates.
|
||||
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
126
docs/api/types/SDL_Window.md
Normal file
126
docs/api/types/SDL_Window.md
Normal file
@@ -0,0 +1,126 @@
|
||||
# SDL_Window
|
||||
|
||||
An application window, created via prosperon.engine_start or SDL calls. Freed on GC.
|
||||
|
||||
|
||||
### fullscreen() <sub>function</sub>
|
||||
|
||||
Toggle fullscreen mode for this window (SDL_WINDOW_FULLSCREEN).
|
||||
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### make_renderer(name) <sub>function</sub>
|
||||
|
||||
Create an SDL_Renderer for 2D rendering tied to this window.
|
||||
|
||||
|
||||
|
||||
**name**: The renderer driver name, e.g. "opengl" (may be optional).
|
||||
|
||||
|
||||
**Returns**: An SDL_Renderer object.
|
||||
|
||||
|
||||
### make_gpu(debug, driverName) <sub>function</sub>
|
||||
|
||||
Create an SDL_GPUDevice for low-level GPU rendering on this window.
|
||||
|
||||
|
||||
|
||||
**debug**: If true, enable debugging in the GPU device.
|
||||
|
||||
**driverName**: The GPU back-end driver, e.g. "opengl".
|
||||
|
||||
|
||||
**Returns**: An SDL_GPUDevice.
|
||||
|
||||
|
||||
### keyboard_shown() <sub>function</sub>
|
||||
|
||||
Return whether the on-screen keyboard is visible (mobile/tablet).
|
||||
|
||||
|
||||
|
||||
**Returns**: True if shown, false otherwise.
|
||||
|
||||
|
||||
### theme() <sub>function</sub>
|
||||
|
||||
Currently returns undefined. Placeholder for retrieving OS window theme info.
|
||||
|
||||
|
||||
|
||||
**Returns**: undefined
|
||||
|
||||
|
||||
### safe_area() <sub>function</sub>
|
||||
|
||||
Return a rect describing any OS-specific "safe" region for UI, e.g. on iPhone with a notch.
|
||||
|
||||
|
||||
|
||||
**Returns**: A rect object {x, y, w, h}.
|
||||
|
||||
|
||||
### bordered(flag) <sub>function</sub>
|
||||
|
||||
Enable or disable window borders.
|
||||
|
||||
|
||||
|
||||
**flag**: True to show borders, false to hide.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### set_icon(surface) <sub>function</sub>
|
||||
|
||||
Set the window's icon from an SDL_Surface.
|
||||
|
||||
|
||||
|
||||
**surface**: An SDL_Surface holding the icon.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### title <sub>accessor</sub>
|
||||
|
||||
Get or set the window's title text in the title bar.
|
||||
|
||||
|
||||
|
||||
**newTitle**: (when setting) A string title.
|
||||
|
||||
|
||||
**Returns**: The current title if getting, or None if setting.
|
||||
|
||||
|
||||
### size <sub>accessor</sub>
|
||||
|
||||
Get or set the window's size as [width, height].
|
||||
|
||||
|
||||
|
||||
**newSize**: (when setting) e.g. [640, 480]
|
||||
|
||||
|
||||
**Returns**: The current [width, height] or None if setting.
|
||||
|
||||
|
||||
### mouse_grab(flag) <sub>function</sub>
|
||||
|
||||
Grab or ungrab the mouse for this window (so the pointer won't leave).
|
||||
|
||||
|
||||
|
||||
**flag**: True to grab mouse input, false to release.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
67
docs/api/types/datastream.md
Normal file
67
docs/api/types/datastream.md
Normal file
@@ -0,0 +1,67 @@
|
||||
# datastream
|
||||
|
||||
A streaming media handle, typically for MPEG video. Freed automatically.
|
||||
|
||||
|
||||
### time() <sub>function</sub>
|
||||
|
||||
Return the current playback time in seconds.
|
||||
|
||||
|
||||
|
||||
**Returns**: Current time as a float in seconds.
|
||||
|
||||
|
||||
### seek(seconds) <sub>function</sub>
|
||||
|
||||
Seek to the specified time (in seconds).
|
||||
|
||||
|
||||
|
||||
**seconds**: The time to jump to in the stream.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### advance(seconds) <sub>function</sub>
|
||||
|
||||
Advance by a certain number of seconds, decoding video as needed.
|
||||
|
||||
|
||||
|
||||
**seconds**: The amount of time to skip forward.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### duration() <sub>function</sub>
|
||||
|
||||
Return the total duration of the video stream, in seconds, if known.
|
||||
|
||||
|
||||
|
||||
**Returns**: Float seconds duration, or 0 if unknown.
|
||||
|
||||
|
||||
### framerate() <sub>function</sub>
|
||||
|
||||
Return the framerate (FPS) of the stream if known.
|
||||
|
||||
|
||||
|
||||
**Returns**: Float frames per second, or 0 if unknown.
|
||||
|
||||
|
||||
### callback <sub>accessor</sub>
|
||||
|
||||
A function to call whenever a new frame is decoded. If not set, no callback is invoked.
|
||||
|
||||
|
||||
|
||||
**fn**: (when setting) A function that receives (surface).
|
||||
|
||||
|
||||
**Returns**: The existing function or undefined if none.
|
||||
|
||||
66
docs/api/types/enet_host.md
Normal file
66
docs/api/types/enet_host.md
Normal file
@@ -0,0 +1,66 @@
|
||||
# enet_host
|
||||
|
||||
### service(callback, timeout) <sub>function</sub>
|
||||
|
||||
|
||||
Poll for and process any available network events (connect, receive, disconnect, or none)
|
||||
from this host, calling the provided callback for each event. This function loops until
|
||||
no more events are available in the current timeframe.
|
||||
|
||||
Event object properties:
|
||||
- type: String, one of "connect", "receive", "disconnect", or "none".
|
||||
- peer: (present if type = "connect") The ENetPeer object for the new connection.
|
||||
- channelID: (present if type = "receive") The channel on which the data was received.
|
||||
- data: (present if type = "receive") The received data as a *plain JavaScript object*.
|
||||
If the JSON parse fails or the data isn't an object, a JavaScript error is thrown.
|
||||
|
||||
object as its single argument.
|
||||
|
||||
|
||||
**callback**: A function called once for each available event, receiving an event
|
||||
|
||||
**timeout**: (optional) Timeout in milliseconds. Defaults to 0 (non-blocking).
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### connect(host, port) <sub>function</sub>
|
||||
|
||||
|
||||
Initiate a connection from this host to a remote server. Throws an error if the
|
||||
connection cannot be started.
|
||||
|
||||
|
||||
|
||||
**host**: The hostname or IP address of the remote server (e.g. "example.com" or "127.0.0.1").
|
||||
|
||||
**port**: The port number to connect to.
|
||||
|
||||
|
||||
**Returns**: An ENetPeer object representing the connection.
|
||||
|
||||
|
||||
### flush() <sub>function</sub>
|
||||
|
||||
|
||||
Flush all pending outgoing packets for this host immediately.
|
||||
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
|
||||
### broadcast(data) <sub>function</sub>
|
||||
|
||||
|
||||
Broadcast a JavaScript object to all connected peers on channel 0. The object is
|
||||
serialized to JSON, and the packet is sent reliably. Throws an error if serialization fails.
|
||||
|
||||
|
||||
|
||||
**data**: A JavaScript object to broadcast to all peers.
|
||||
|
||||
|
||||
**Returns**: None
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user