1227 lines
33 KiB
Plaintext
1227 lines
33 KiB
Plaintext
var prosperon = {}
|
|
|
|
// This file is hard coded for the SDL GPU case
|
|
|
|
var video = use('sdl3/video')
|
|
var surface = use('sdl3/surface')
|
|
var sdl_gpu = use('sdl3/gpu')
|
|
var io = use('cellfs')
|
|
var geometry = use('geometry')
|
|
var blob = use('blob')
|
|
var imgui = use('imgui')
|
|
var json = use('json')
|
|
|
|
var os = use('os')
|
|
var math = use('math/radians')
|
|
|
|
var win_size = {width:500,height:500}
|
|
|
|
function makeOrthoMetal(l,r,b,t,n,f){
|
|
return [
|
|
2/(r-l), 0, 0, 0,
|
|
0, 2/(t-b), 0, 0,
|
|
0, 0, 1/(f-n), 0,
|
|
-(r+l)/(r-l), -(t+b)/(t-b), -n/(f-n), 1
|
|
]
|
|
}
|
|
|
|
var driver = "vulkan"
|
|
switch(os.platform()) {
|
|
case "Linux":
|
|
driver = "vulkan"
|
|
break
|
|
case "Windows":
|
|
// driver = "direct3d12"
|
|
driver = "vulkan"
|
|
break
|
|
case "macOS":
|
|
driver = "metal"
|
|
break
|
|
}
|
|
|
|
var default_sampler = {
|
|
min_filter: "linear",
|
|
mag_filter: "linear",
|
|
mipmap: "nearest",
|
|
u: "repeat",
|
|
v: "repeat",
|
|
w: "repeat",
|
|
mip_bias: 0,
|
|
max_anisotropy: 0,
|
|
compare_op: "none",
|
|
min_lod: 0,
|
|
max_lod: 10,
|
|
anisotropy: false,
|
|
compare: false
|
|
};
|
|
|
|
var main_color = {
|
|
type:"2d",
|
|
format: "rgba8",
|
|
layers: 1,
|
|
mip_levels: 1,
|
|
samples: 0,
|
|
sampler:true,
|
|
color_target:true
|
|
};
|
|
|
|
var main_depth = {
|
|
type: "2d",
|
|
format: "d32 float s8",
|
|
layers:1,
|
|
mip_levels:1,
|
|
samples:0,
|
|
sampler:true,
|
|
depth_target:true
|
|
};
|
|
|
|
var default_window = {
|
|
// Basic properties
|
|
title: "Prosperon Window",
|
|
width: 640,
|
|
height: 360,
|
|
|
|
// Position - can be numbers or "centered"
|
|
x: null, // SDL_WINDOWPOS_null by default
|
|
y: null, // SDL_WINDOWPOS_null by default
|
|
|
|
// Window behavior flags
|
|
resizable: true,
|
|
fullscreen: false,
|
|
hidden: false,
|
|
borderless: false,
|
|
alwaysOnTop: false,
|
|
minimized: false,
|
|
maximized: false,
|
|
|
|
// Input grabbing
|
|
mouseGrabbed: false,
|
|
keyboardGrabbed: false,
|
|
|
|
// Display properties
|
|
highPixelDensity: false,
|
|
transparent: false,
|
|
opacity: 1.0, // 0.0 to 1.0
|
|
|
|
// Focus behavior
|
|
notFocusable: false,
|
|
|
|
// Special window types (mutually exclusive)
|
|
utility: false, // Utility window (not in taskbar)
|
|
tooltip: false, // Tooltip window (requires parent)
|
|
popupMenu: false, // Popup menu window (requires parent)
|
|
|
|
// Graphics API flags (var SDL choose if not specified)
|
|
opengl: false, // Force OpenGL context
|
|
vulkan: false, // Force Vulkan context
|
|
metal: false, // Force Metal context (macOS)
|
|
|
|
// Advanced properties
|
|
parent: null, // Parent window for tooltips/popups/modal
|
|
modal: false, // Modal to parent window (requires parent)
|
|
externalGraphicsContext: false, // Use external graphics context
|
|
|
|
// Input handling
|
|
textInput: true, // Enable text input on creation
|
|
}
|
|
|
|
var win_config = meme(default_window)
|
|
|
|
win_config.metal = true
|
|
|
|
var window = new video.window(win_config)
|
|
prosperon.window = window
|
|
var win_proto = proto(window)
|
|
win_proto.toJSON = function()
|
|
{
|
|
var flags = this.flags
|
|
var ret = {
|
|
title: this.title,
|
|
size: this.size,
|
|
pixel_size: this.sizeInPixels,
|
|
display_scale: this.displayScale,
|
|
pixel_density: this.pixelDensity,
|
|
pos: this.position,
|
|
opacity: this.opacity,
|
|
fullscreen: this.fullscreen,
|
|
safe_area: this.safe_area(),
|
|
}
|
|
|
|
for (var i in flags)
|
|
ret[i] = flags[i]
|
|
return ret
|
|
}
|
|
|
|
window.resizable = true
|
|
|
|
var device = new sdl_gpu.gpu({
|
|
shaders_msl:true,
|
|
shaders_metallib:true,
|
|
name: "metal"
|
|
})
|
|
device.claim_window(window)
|
|
device.set_swapchain(window, 'sdr', 'vsync')
|
|
|
|
var white_pixel = {
|
|
width:1,
|
|
height:1,
|
|
pixels: new blob(32, true), // 32 bits, all set to 1 for a white blob
|
|
pitch:32
|
|
}
|
|
|
|
stone(white_pixel.pixels)
|
|
|
|
var shader_type = device.shader_format()[0]
|
|
shader_type = 'msl'
|
|
|
|
var sampler_cache = {}
|
|
|
|
function canonicalize_sampler(desc) {
|
|
return json.encode(desc)
|
|
}
|
|
|
|
function get_sampler(desc) {
|
|
var key = canonicalize_sampler(desc)
|
|
|
|
if (!sampler_cache[key]) {
|
|
|
|
var sampler_config = json.decode(key)
|
|
sampler_cache[key] = new sdl_gpu.sampler(device, sampler_config)
|
|
}
|
|
|
|
return sampler_cache[key]
|
|
}
|
|
|
|
var std_sampler = get_sampler(true)
|
|
|
|
// Shader and pipeline cache
|
|
var shader_cache = {}
|
|
var pipeline_cache = {}
|
|
|
|
function upload(copypass, buffer, toblob)
|
|
{
|
|
stone(toblob)
|
|
var trans = new sdl_gpu.transfer_buffer(device, {
|
|
size: toblob.length/8,
|
|
usage:"upload"
|
|
})
|
|
|
|
trans.copy_blob(device, toblob)
|
|
|
|
copypass.upload_to_buffer({
|
|
transfer_buffer: trans,
|
|
offset:0
|
|
}, {
|
|
buffer: buffer,
|
|
offset: 0,
|
|
size: toblob.length/8
|
|
})
|
|
}
|
|
|
|
function make_shader(sh_file)
|
|
{
|
|
var file = `shaders/${shader_type}/${sh_file}.${shader_type}`
|
|
if (shader_cache[file]) return shader_cache[file]
|
|
var refl = json.decode(text(io.slurp(`shaders/reflection/${sh_file}.json`)))
|
|
|
|
var shader = {
|
|
code: io.slurp(file),
|
|
format: shader_type,
|
|
stage: sh_file.endsWith("vert") ? "vertex" : "fragment",
|
|
num_samplers: refl.separate_samplers ? refl.separate_samplers.length : 0,
|
|
num_textures: 0,
|
|
num_storage_buffers: refl.separate_storage_buffers ? refl.separate_storage_buffers.length : 0,
|
|
num_uniform_buffers: refl.ubos ? refl.ubos.length : 0,
|
|
entrypoint: shader_type == "msl" ? "main0" : "main"
|
|
}
|
|
|
|
shader[GPU] = new sdl_gpu.shader(device, shader)
|
|
shader.reflection = refl;
|
|
shader_cache[file] = shader
|
|
shader.file = sh_file
|
|
return shader
|
|
}
|
|
|
|
def material_pipeline_cache = {};
|
|
|
|
function get_pipeline_for_material(mat = {}) {
|
|
def key = json.encode({
|
|
vert: mat.vertex || sprite_pipeline.vertex,
|
|
frag: mat.fragment || sprite_pipeline.fragment,
|
|
blend: mat.blend || sprite_pipeline.blend,
|
|
cull: mat.cull || sprite_pipeline.cull,
|
|
});
|
|
|
|
if (!material_pipeline_cache[key]) {
|
|
def cfg = meme(sprite_pipeline, {
|
|
vertex: mat.vertex || sprite_pipeline.vertex,
|
|
fragment: mat.fragment || sprite_pipeline.fragment,
|
|
blend: mat.blend || sprite_pipeline.blend,
|
|
cull: mat.cull || sprite_pipeline.cull,
|
|
})
|
|
|
|
material_pipeline_cache[key] = load_pipeline(cfg)
|
|
}
|
|
|
|
return material_pipeline_cache[key];
|
|
}
|
|
|
|
function load_pipeline(config)
|
|
{
|
|
// pull back the JS shader objects (they have `.reflection`)
|
|
def vertShader = make_shader(config.vertex);
|
|
def fragShader = make_shader(config.fragment);
|
|
|
|
// build the GPU pipeline
|
|
def gpuPipeline = new sdl_gpu.graphics_pipeline(device, {
|
|
vertex: vertShader[GPU],
|
|
fragment: fragShader[GPU],
|
|
// ...all the other config fields...
|
|
primitive: config.primitive,
|
|
blend: config.blend,
|
|
cull: config.cull,
|
|
face: config.face,
|
|
depth: config.depth,
|
|
stencil: config.stencil,
|
|
alpha_to_coverage: config.alpha_to_coverage,
|
|
multisample: config.multisample,
|
|
label: config.label,
|
|
target: config.target,
|
|
vertex_buffer_descriptions: config.vertex_buffer_descriptions,
|
|
vertex_attributes: config.vertex_attributes
|
|
});
|
|
|
|
// stash the reflection in the JS wrapper for easy access later
|
|
gpuPipeline._reflection = {
|
|
vertex: vertShader.reflection,
|
|
fragment: fragShader.reflection
|
|
};
|
|
|
|
return gpuPipeline;
|
|
}
|
|
|
|
// Helper function to pack JavaScript objects into binary blob for UBOs
|
|
function pack_ubo(obj, ubo_type, reflection) {
|
|
var type_def = reflection.types[ubo_type];
|
|
if (!type_def) {
|
|
log.console(`Warning: No type definition found for ${ubo_type}`);
|
|
return geometry.array_blob([]);
|
|
}
|
|
|
|
var result_blob = new blob();
|
|
|
|
// Process each member in the UBO structure
|
|
for (var member of type_def.members) {
|
|
var value = obj[member.name];
|
|
|
|
if (value == null) {
|
|
if (member.type == "vec4") {
|
|
result_blob.write_blob(geometry.array_blob([1, 1, 1, 1]));
|
|
} else if (member.type == "vec3") {
|
|
result_blob.write_blob(geometry.array_blob([1, 1, 1]));
|
|
} else if (member.type == "vec2") {
|
|
result_blob.write_blob(geometry.array_blob([1, 1]));
|
|
} else if (member.type == "float") {
|
|
result_blob.write_blob(geometry.array_blob([1]));
|
|
}
|
|
continue;
|
|
}
|
|
|
|
// Convert value to appropriate format based on type
|
|
if (member.type == "vec4") {
|
|
if (isa(value, array)) {
|
|
result_blob.write_blob(geometry.array_blob(value));
|
|
} else if (typeof value == "object" && value.r != null) {
|
|
// Color object
|
|
result_blob.write_blob(geometry.array_blob([value.r, value.g, value.b, value.a || 1]));
|
|
} else {
|
|
// Single value, expand to vec4
|
|
result_blob.write_blob(geometry.array_blob([value, value, value, value]));
|
|
}
|
|
} else if (member.type == "vec3") {
|
|
if (isa(value, array)) {
|
|
result_blob.write_blob(geometry.array_blob(value));
|
|
} else if (typeof value == 'object' && value.r != null)
|
|
result_blob.write_blob(geometry.array_blob([value.r, value.g, value.b]));
|
|
else
|
|
result_blob.write_blob(geometry.array_blob([value, value, value]));
|
|
} else if (member.type == "vec2") {
|
|
if (isa(value, array)) {
|
|
result_blob.write_blob(geometry.array_blob(value));
|
|
} else {
|
|
result_blob.write_blob(geometry.array_blob([value, value]));
|
|
}
|
|
} else if (member.type == "float") {
|
|
result_blob.write_blob(geometry.array_blob([value]));
|
|
}
|
|
}
|
|
|
|
return stone(result_blob)
|
|
}
|
|
|
|
// Specialized function to pack TransformBuffer UBO with camera data
|
|
function pack_transform_buffer(camera) {
|
|
if (!camera) return geometry.array_blob([]);
|
|
|
|
// Calculate world_to_projection matrix (same as existing make_camera_pblob)
|
|
var cw = camera.surface ? camera.surface.width : win_size.width;
|
|
var ch = camera.surface ? camera.surface.height : win_size.height;
|
|
|
|
var world_w, world_h;
|
|
if (camera.width && camera.aspect_ratio) {
|
|
world_w = camera.width;
|
|
world_h = camera.width / camera.aspect_ratio;
|
|
} else {
|
|
var zoom = camera.zoom || ch;
|
|
world_h = zoom;
|
|
world_w = zoom * cw / ch;
|
|
}
|
|
|
|
var l = camera.pos[0] - camera.anchor[0] * world_w;
|
|
var b = camera.pos[1] - camera.anchor[1] * world_h;
|
|
var r = l + world_w;
|
|
var t = b + world_h;
|
|
|
|
var world_to_projection = makeOrthoMetal(l, r, b, t, 0, 1);
|
|
|
|
// Calculate projection_to_world (inverse of world_to_projection)
|
|
// For orthographic projection, this is more complex, but for now use identity
|
|
var projection_to_world = [
|
|
1, 0, 0, 0,
|
|
0, 1, 0, 0,
|
|
0, 0, 1, 0,
|
|
0, 0, 0, 1
|
|
];
|
|
|
|
// world_to_view - for now, assume identity (no separate view transform)
|
|
var world_to_view = [
|
|
1, 0, 0, 0,
|
|
0, 1, 0, 0,
|
|
0, 0, 1, 0,
|
|
0, 0, 0, 1
|
|
];
|
|
|
|
// view_to_projection - same as world_to_projection since no view transform
|
|
var view_to_projection = world_to_projection.slice();
|
|
|
|
// camera_pos_world - camera position in world space
|
|
var camera_pos_world = camera.pos || [0, 0, 0];
|
|
|
|
// viewport_min_z, viewport_max_z - typical near/far clip planes
|
|
var viewport_min_z = 0.0;
|
|
var viewport_max_z = 1.0;
|
|
|
|
// camera_dir_world - camera direction, assume looking down -Z
|
|
var camera_dir_world = [0, 0, -1];
|
|
|
|
// viewport_size - size of viewport in pixels
|
|
var viewport_size = [cw, ch];
|
|
|
|
// viewport_offset - offset of viewport
|
|
var viewport_offset = [0, 0];
|
|
|
|
// render_size - same as viewport_size for now
|
|
var render_size = viewport_size.slice();
|
|
|
|
// time - current time
|
|
var time = 0.0; // TODO: get actual time
|
|
|
|
// Pack all data into blob in correct order
|
|
var result_blob = new blob();
|
|
result_blob.write_blob(geometry.array_blob(world_to_projection));
|
|
result_blob.write_blob(geometry.array_blob(projection_to_world));
|
|
result_blob.write_blob(geometry.array_blob(world_to_view));
|
|
result_blob.write_blob(geometry.array_blob(view_to_projection));
|
|
result_blob.write_blob(geometry.array_blob(camera_pos_world));
|
|
result_blob.write_blob(geometry.array_blob([viewport_min_z]));
|
|
result_blob.write_blob(geometry.array_blob(camera_dir_world));
|
|
result_blob.write_blob(geometry.array_blob([viewport_max_z]));
|
|
result_blob.write_blob(geometry.array_blob(viewport_size));
|
|
result_blob.write_blob(geometry.array_blob(viewport_offset));
|
|
result_blob.write_blob(geometry.array_blob(render_size));
|
|
result_blob.write_blob(geometry.array_blob([time]));
|
|
|
|
return stone(result_blob);
|
|
}
|
|
|
|
// Specialized function to pack model UBO with model matrix and color
|
|
function pack_model_buffer(material) {
|
|
// Default model matrix - identity (unit matrix, no scaling/rotation/translation)
|
|
var model_matrix = [
|
|
1, 0, 0, 0,
|
|
0, 1, 0, 0,
|
|
0, 0, 1, 0,
|
|
0, 0, 0, 1
|
|
];
|
|
|
|
// Default color - white
|
|
var color = [1, 1, 1, 1];
|
|
|
|
// Use material properties if available
|
|
if (material && material.model) {
|
|
if (isa(material.model, array) && material.model.length >= 16) {
|
|
model_matrix = material.model.slice(0, 16);
|
|
}
|
|
}
|
|
|
|
if (material && material.color) {
|
|
if (isa(material.color, array) && material.color.length >= 4) {
|
|
color = material.color.slice(0, 4);
|
|
} else if (typeof material.color == "object" && material.color.r != null) {
|
|
color = [material.color.r, material.color.g, material.color.b, material.color.a || 1];
|
|
}
|
|
}
|
|
|
|
// Pack data into blob
|
|
var result_blob = new blob();
|
|
result_blob.write_blob(geometry.array_blob(model_matrix));
|
|
result_blob.write_blob(geometry.array_blob(color));
|
|
|
|
return stone(result_blob);
|
|
}
|
|
|
|
var imgui = use('imgui')
|
|
imgui.init(window, device)
|
|
|
|
var rasterize = use('rasterize');
|
|
var time = use('time')
|
|
var tilemap = use('tilemap')
|
|
|
|
var res = use('resources')
|
|
var input = use('sdl3/input')
|
|
|
|
var graphics = use('graphics')
|
|
|
|
var camera = {}
|
|
|
|
// Pipeline component definitions
|
|
var default_depth_state = {
|
|
compare: "always", // never/less/equal/less_equal/greater/not_equal/greater_equal/always
|
|
test: false,
|
|
write: false,
|
|
bias: 0,
|
|
bias_slope_scale: 0,
|
|
bias_clamp: 0
|
|
}
|
|
|
|
var default_stencil_state = {
|
|
compare: "always", // never/less/equal/less_equal/greater/neq/greq/always
|
|
fail: "keep", // keep/zero/replace/incr_clamp/decr_clamp/invert/incr_wrap/decr_wrap
|
|
depth_fail: "keep",
|
|
pass: "keep"
|
|
}
|
|
|
|
var disabled_blend_state = {
|
|
enabled: false,
|
|
src_rgb: "zero",
|
|
dst_rgb: "zero",
|
|
op_rgb: "add",
|
|
src_alpha: "one",
|
|
dst_alpha: "zero",
|
|
op_alpha: "add"
|
|
}
|
|
|
|
var alpha_blend_state = {
|
|
enabled: true,
|
|
src_rgb: "src_alpha",
|
|
dst_rgb: "one_minus_src_alpha",
|
|
op_rgb: "add",
|
|
src_alpha: "one",
|
|
dst_alpha: "one_minus_src_alpha",
|
|
op_alpha: "add"
|
|
}
|
|
|
|
var default_multisample_state = {
|
|
count: 1,
|
|
mask: 0xFFFFFFFF,
|
|
domask: false
|
|
}
|
|
|
|
// Helper function to create pipeline config
|
|
function create_pipeline_config(options) {
|
|
var config = {
|
|
vertex: options.vertex,
|
|
fragment: options.fragment,
|
|
primitive: options.primitive || "triangle",
|
|
fill: options.fill ?? true,
|
|
depth: options.depth || default_depth_state,
|
|
stencil: {
|
|
enabled: options.stencil_enabled ?? false,
|
|
front: options.stencil_front || default_stencil_state,
|
|
back: options.stencil_back || default_stencil_state,
|
|
test: options.stencil_test ?? false,
|
|
compare_mask: options.stencil_compare_mask ?? 0,
|
|
write_mask: options.stencil_write_mask ?? 0
|
|
},
|
|
blend: options.blend || disabled_blend_state,
|
|
cull: options.cull || "none",
|
|
face: options.face || "cw",
|
|
alpha_to_coverage: options.alpha_to_coverage ?? false,
|
|
multisample: options.multisample || default_multisample_state,
|
|
label: options.label || "pipeline",
|
|
target: options.target || {}
|
|
}
|
|
|
|
// Ensure target has required properties
|
|
if (!config.target.color_targets) {
|
|
config.target.color_targets = [{
|
|
format: "rgba8",
|
|
blend: config.blend
|
|
}]
|
|
}
|
|
|
|
return config
|
|
}
|
|
|
|
var gameactor
|
|
|
|
var images = {}
|
|
|
|
var renderer_commands = []
|
|
|
|
///// input /////
|
|
var input_cb
|
|
var input_rate = 1/60
|
|
function poll_input() {
|
|
var evs = input.get_events()
|
|
|
|
// Filter and transform events
|
|
if (isa(evs, array)) {
|
|
var filteredEvents = []
|
|
// var wantMouse = imgui.wantmouse()
|
|
// var wantKeys = imgui.wantkeys()
|
|
var wantMouse = false
|
|
var wantKeys = false
|
|
|
|
for (var i = 0; i < evs.length; i++) {
|
|
var event = evs[i]
|
|
var shouldInclude = true
|
|
|
|
// Filter mouse events if ImGui wants mouse input
|
|
if (wantMouse && (event.type == 'mouse_motion' ||
|
|
event.type == 'mouse_button_down' ||
|
|
event.type == 'mouse_button_up' ||
|
|
event.type == 'mouse_wheel')) {
|
|
shouldInclude = false
|
|
}
|
|
|
|
// Filter keyboard events if ImGui wants keyboard input
|
|
if (wantKeys && (event.type == 'key_down' ||
|
|
event.type == 'key_up' ||
|
|
event.type == 'text_input' ||
|
|
event.type == 'text_editing')) {
|
|
shouldInclude = false
|
|
}
|
|
|
|
if (shouldInclude) {
|
|
if (event.type == 'window_pixel_size_changed') {
|
|
win_size.width = event.width
|
|
win_size.height = event.height
|
|
}
|
|
|
|
if (event.type == 'quit')
|
|
$stop()
|
|
|
|
if (event.type.includes('key')) {
|
|
if (event.key)
|
|
event.key = input.keyname(event.key)
|
|
}
|
|
|
|
if (event.type.startsWith('mouse_') && event.pos && event.pos.y) {
|
|
event.pos.y = -event.pos.y + win_size.height
|
|
event.pos.y /= win_size.height
|
|
event.pos.x /= win_size.width
|
|
}
|
|
|
|
filteredEvents.push(event)
|
|
}
|
|
}
|
|
|
|
evs = filteredEvents
|
|
}
|
|
|
|
input_cb(evs)
|
|
$delay(poll_input, input_rate)
|
|
}
|
|
|
|
prosperon.input = function(fn)
|
|
{
|
|
input_cb = fn
|
|
poll_input()
|
|
}
|
|
|
|
var sprite_pipeline = {
|
|
vertex: "sprite.vert",
|
|
fragment: "sprite.frag",
|
|
cull: "none",
|
|
target: {
|
|
color_targets: [
|
|
{format: device.swapchain_format(window), blend:alpha_blend_state}
|
|
],
|
|
},
|
|
vertex_buffer_descriptions: [ { slot:0, input_rate: "vertex", instance_step_rate: 0,
|
|
pitch: 8},
|
|
{slot:1, input_rate:"vertex", instance_step_rate: 0, pitch: 8},
|
|
{slot:2, input_rate:"vertex", instance_step_rate: 0, pitch: 16}
|
|
],
|
|
vertex_attributes: [
|
|
{ location: 0, buffer_slot: 0, format: "float2", offset: 0},
|
|
{ location: 1, buffer_slot: 1, format: "float2", offset: 0},
|
|
{ location: 2, buffer_slot: 2, format: "float4", offset: 0}
|
|
],
|
|
primitive: "triangle",
|
|
blend: alpha_blend_state
|
|
}
|
|
|
|
var GPU = "prosperon:gpu"
|
|
|
|
var cur_cam
|
|
var cmd_fns = {}
|
|
cmd_fns.scissor = function(cmd)
|
|
{
|
|
draw_queue.push(cmd)
|
|
}
|
|
|
|
cmd_fns.camera = function(cmd)
|
|
{
|
|
if (cmd.camera.surface && !cmd.camera.surface[GPU]) {
|
|
cmd.camera.surface[GPU] = new sdl_gpu.texture(device, cmd.camera.surface)
|
|
// Don't store sampler on texture - samplers belong to materials
|
|
}
|
|
draw_queue.push(cmd)
|
|
}
|
|
|
|
var new_tex = []
|
|
|
|
function get_img_gpu(surface)
|
|
{
|
|
if (!surface) return
|
|
|
|
var full_mip = number.floor(math.log2(number.max(surface.width,surface.height))) + 1
|
|
var gpu = new sdl_gpu.texture(device, {
|
|
width: surface.width,
|
|
height: surface.height,
|
|
layers: 1,
|
|
mip_levels: full_mip,
|
|
samples: 0,
|
|
type: "2d",
|
|
format: "rgba8",
|
|
sampler: surface.sampler != null ? surface.sampler : default_sampler,
|
|
color_target: true
|
|
})
|
|
|
|
var tbuf = new sdl_gpu.transfer_buffer(device, {
|
|
size: surface.pixels.length/8,
|
|
usage: "upload"
|
|
})
|
|
|
|
tbuf.copy_blob(device, surface.pixels)
|
|
|
|
copy_pass.upload_to_texture({
|
|
transfer_buffer: tbuf,
|
|
offset: 0,
|
|
pixels_per_row: surface.width,
|
|
rows_per_layer: surface.height,
|
|
}, {
|
|
texture: gpu,
|
|
mip_level: 0,
|
|
layer: 0,
|
|
x: 0, y: 0, z: 0,
|
|
w: surface.width,
|
|
h: surface.height,
|
|
d: 1
|
|
}, false);
|
|
|
|
if (full_mip > 1)
|
|
new_tex.push(gpu)
|
|
|
|
return gpu
|
|
}
|
|
|
|
var pos_blob
|
|
var uv_blob
|
|
var color_blob
|
|
var index_blob
|
|
|
|
var draw_queue = []
|
|
var index_count = 0
|
|
var vertex_count = 0
|
|
|
|
function render_geom(geom, img, pipeline = get_pipeline_for_material(null), material = null)
|
|
{
|
|
if (!img[GPU]) {
|
|
if (img.surface)
|
|
img[GPU] = get_img_gpu(img.surface)
|
|
else
|
|
img[GPU] = get_img_gpu(img.cpu)
|
|
|
|
if (!img[GPU]) return
|
|
}
|
|
|
|
pos_blob.write_blob(geom.xy)
|
|
uv_blob.write_blob(geom.uv)
|
|
color_blob.write_blob(geom.color)
|
|
index_blob.write_blob(geom.indices)
|
|
|
|
draw_queue.push({
|
|
pipeline,
|
|
texture: img[GPU],
|
|
material: material,
|
|
num_indices: geom.num_indices,
|
|
first_index: index_count,
|
|
vertex_offset: vertex_count
|
|
})
|
|
|
|
vertex_count += (geom.xy.length/8) / 8
|
|
index_count += geom.num_indices
|
|
}
|
|
|
|
cmd_fns.draw_image = function(cmd)
|
|
{
|
|
var img
|
|
if (typeof cmd.image == 'string')
|
|
img = graphics.texture(cmd.image)
|
|
else
|
|
img = cmd.image
|
|
|
|
if (cmd.rect.width && !cmd.rect.height)
|
|
cmd.rect.height = cmd.rect.width * img.height / img.width
|
|
else if (cmd.rect.height && !cmd.rect.width)
|
|
cmd.rect.width = cmd.rect.height * img.width / img.height
|
|
else if (!cmd.rect.height && !cmd.rect.width) {
|
|
cmd.rect.width = img.width
|
|
cmd.rect.height = img.height
|
|
}
|
|
|
|
var geom = geometry.make_rect_quad(cmd.rect)
|
|
geom.indices = geometry.make_quad_indices(1)
|
|
geom.num_indices = 6
|
|
|
|
// Ensure material has diffuse property for dynamic binding
|
|
if (!cmd.material) cmd.material = {}
|
|
if (!cmd.material.diffuse) cmd.material.diffuse = img
|
|
|
|
var pipeline = get_pipeline_for_material(cmd.material)
|
|
render_geom(geom, img, pipeline, cmd.material)
|
|
}
|
|
|
|
cmd_fns.draw_text = function(cmd)
|
|
{
|
|
if (!cmd.text || !cmd.pos) return
|
|
|
|
var font = graphics.get_font(cmd.font)
|
|
if (!font[GPU])
|
|
font[GPU] = get_img_gpu(font.surface)
|
|
|
|
var size = font.text_size(cmd.text, cmd.wrap, cmd.config.break, cmd.config.align)
|
|
cmd.pos.width ??= size[0]
|
|
cmd.pos.height ??= size[1]
|
|
|
|
var mesh = font.make_text_buffer(
|
|
cmd.text,
|
|
cmd.pos,
|
|
[cmd.material.color.r, cmd.material.color.g, cmd.material.color.b, cmd.material.color.a],
|
|
cmd.wrap || 0,
|
|
cmd.config.break,
|
|
cmd.config.align
|
|
)
|
|
|
|
// Ensure material has diffuse property for dynamic binding
|
|
if (!cmd.material) cmd.material = {}
|
|
if (!cmd.material.diffuse) cmd.material.diffuse = font
|
|
|
|
var pipeline = get_pipeline_for_material(cmd.material)
|
|
render_geom(mesh, font, pipeline, cmd.material)
|
|
}
|
|
|
|
cmd_fns.tilemap = function(cmd)
|
|
{
|
|
var geometryCommands = cmd.tilemap.draw()
|
|
|
|
for (var geomCmd of geometryCommands) {
|
|
var img = graphics.texture(geomCmd.image)
|
|
if (!img) continue
|
|
|
|
// Create a new material for each tile image with diffuse property
|
|
var tileMaterial = object(cmd.material || {})
|
|
tileMaterial.diffuse = img
|
|
|
|
var pipeline = get_pipeline_for_material(tileMaterial)
|
|
render_geom(geomCmd.geometry, img, pipeline, tileMaterial)
|
|
}
|
|
}
|
|
|
|
cmd_fns.geometry = function(cmd)
|
|
{
|
|
var img
|
|
if (typeof cmd.image == 'object') {
|
|
img = cmd.image
|
|
} else {
|
|
if (!cmd.image) return
|
|
img = graphics.texture(cmd.image)
|
|
if (!img) return
|
|
}
|
|
|
|
// Ensure material has diffuse property for dynamic binding
|
|
if (!cmd.material) cmd.material = {}
|
|
if (!cmd.material.diffuse) cmd.material.diffuse = img
|
|
|
|
var pipeline = get_pipeline_for_material(cmd.material)
|
|
render_geom(cmd.geometry, img, pipeline, cmd.material)
|
|
}
|
|
|
|
cmd_fns.draw_slice9 = function(cmd)
|
|
{
|
|
var img = graphics.texture(cmd.image)
|
|
if (!img) return
|
|
|
|
var slice_info = {
|
|
tile_top: true,
|
|
tile_bottom: true,
|
|
tile_left: true,
|
|
tile_right: true,
|
|
tile_center_x: true,
|
|
tile_center_y: true
|
|
}
|
|
|
|
// Convert single slice value to LRTB object if needed
|
|
var slice_lrtb = cmd.slice
|
|
if (typeof cmd.slice == 'number') {
|
|
var slice_val = cmd.slice
|
|
if (slice_val > 0 && slice_val < 1) {
|
|
slice_lrtb = {
|
|
l: slice_val * img.width,
|
|
r: slice_val * img.width,
|
|
t: slice_val * img.height,
|
|
b: slice_val * img.height
|
|
}
|
|
} else {
|
|
slice_lrtb = {
|
|
l: slice_val,
|
|
r: slice_val,
|
|
t: slice_val,
|
|
b: slice_val
|
|
}
|
|
}
|
|
} else {
|
|
// Handle percentage values for each side individually
|
|
slice_lrtb = {
|
|
l: (cmd.slice.l > 0 && cmd.slice.l < 1) ? cmd.slice.l * img.width : cmd.slice.l,
|
|
r: (cmd.slice.r > 0 && cmd.slice.r < 1) ? cmd.slice.r * img.width : cmd.slice.r,
|
|
t: (cmd.slice.t > 0 && cmd.slice.t < 1) ? cmd.slice.t * img.height : cmd.slice.t,
|
|
b: (cmd.slice.b > 0 && cmd.slice.b < 1) ? cmd.slice.b * img.height : cmd.slice.b
|
|
}
|
|
}
|
|
|
|
var mesh = geometry.slice9(img, cmd.rect, slice_lrtb, slice_info)
|
|
|
|
// Ensure material has diffuse property for dynamic binding
|
|
if (!cmd.material) cmd.material = {}
|
|
if (!cmd.material.diffuse) cmd.material.diffuse = img
|
|
|
|
var pipeline = get_pipeline_for_material(cmd.material)
|
|
render_geom(mesh, img, pipeline, cmd.material)
|
|
}
|
|
|
|
cmd_fns.draw_rect = function(cmd)
|
|
{
|
|
// Create geometry for a rectangle quad
|
|
var geom = geometry.make_rect_quad(cmd.rect, null, cmd.material.color)
|
|
geom.indices = geometry.make_quad_indices(1)
|
|
geom.num_indices = 6
|
|
|
|
// Use white_pixel as the texture so the color modulation works
|
|
if (!white_pixel[GPU])
|
|
white_pixel[GPU] = get_img_gpu(white_pixel)
|
|
|
|
var pipeline = get_pipeline_for_material(cmd.material)
|
|
render_geom(geom, {[GPU]: white_pixel[GPU]}, pipeline, cmd.material)
|
|
}
|
|
|
|
var copy_pass
|
|
|
|
prosperon.create_batch = function create_batch(draw_cmds, done) {
|
|
pos_blob = new blob
|
|
uv_blob = new blob
|
|
color_blob = new blob
|
|
index_blob = new blob
|
|
draw_queue = []
|
|
index_count = 0
|
|
vertex_count = 0
|
|
new_tex = []
|
|
|
|
var render_queue = device.acquire_cmd_buffer()
|
|
copy_pass = render_queue.copy_pass()
|
|
|
|
for (var cmd of draw_cmds)
|
|
if (cmd_fns[cmd.cmd])
|
|
cmd_fns[cmd.cmd](cmd)
|
|
|
|
var pos_buffer = new sdl_gpu.buffer(device,{ vertex:true, size:pos_blob.length/8});
|
|
var uv_buffer = new sdl_gpu.buffer(device,{ vertex:true, size:uv_blob.length/8});
|
|
var color_buffer = new sdl_gpu.buffer(device,{ vertex:true, size:color_blob.length/8});
|
|
var index_buffer = new sdl_gpu.buffer(device,{ index:true, size:index_blob.length/8});
|
|
|
|
upload(copy_pass, pos_buffer, pos_blob)
|
|
upload(copy_pass, uv_buffer, uv_blob)
|
|
upload(copy_pass, color_buffer, color_blob)
|
|
upload(copy_pass, index_buffer, index_blob)
|
|
|
|
copy_pass.end();
|
|
|
|
// imgui.prepare(render_queue)
|
|
|
|
for (var g of new_tex)
|
|
render_queue.generate_mipmaps(g)
|
|
|
|
var render_pass
|
|
var render_target
|
|
|
|
// State tracking for optimization
|
|
var current_pipeline = null
|
|
var buffers_bound = false
|
|
|
|
for (var cmd of draw_queue) {
|
|
if (cmd.cmd == "scissor") {
|
|
if (!cmd.rect)
|
|
render_pass.scissor({x:0,y:0,width:win_size.width,height:win_size.height})
|
|
else
|
|
render_pass.scissor(cmd.rect)
|
|
|
|
continue
|
|
}
|
|
if (cmd.camera) {
|
|
if (!cmd.camera.surface && render_target != "swap") {
|
|
if (render_pass)
|
|
render_pass.end()
|
|
render_target = "swap"
|
|
render_pass = render_queue.swapchain_pass(window)
|
|
current_pipeline = null
|
|
buffers_bound = false
|
|
} else if (cmd.camera.surface && render_target != cmd.camera.surface) {
|
|
if (render_pass)
|
|
render_pass.end()
|
|
render_target = cmd.camera.surface
|
|
render_pass = render_queue.render_pass({
|
|
color_targets: [{
|
|
texture: cmd.camera.surface[GPU],
|
|
mip_level: 0,
|
|
layer: 0,
|
|
load: "clear",
|
|
clear_color: cmd.camera.background,
|
|
store: "store",
|
|
}]
|
|
})
|
|
// Reset state tracking when render pass changes
|
|
current_pipeline = null
|
|
buffers_bound = false
|
|
}
|
|
|
|
var vpW, vpH
|
|
|
|
if (render_target == "swap") {
|
|
vpW = win_size.width
|
|
vpH = win_size.height
|
|
} else {
|
|
vpW = render_target.width
|
|
vpH = render_target.height
|
|
}
|
|
|
|
render_pass.viewport({
|
|
x: cmd.camera.viewport.x*vpW,
|
|
y: cmd.camera.viewport.y * vpH,
|
|
width: cmd.camera.viewport.width * vpW,
|
|
height: cmd.camera.viewport.height * vpH
|
|
})
|
|
|
|
// Store current camera for use by subsequent draw commands
|
|
cur_cam = cmd.camera
|
|
continue
|
|
}
|
|
|
|
// Only bind pipeline if it changed
|
|
if (current_pipeline != cmd.pipeline) {
|
|
render_pass.bind_pipeline(cmd.pipeline)
|
|
current_pipeline = cmd.pipeline
|
|
// When pipeline changes, we need to rebind buffers and uniforms
|
|
buffers_bound = false
|
|
}
|
|
|
|
// Dynamic material binding - bind uniforms and textures from material
|
|
if (cmd.material && cmd.pipeline._reflection) {
|
|
def refl = cmd.pipeline._reflection;
|
|
|
|
// Bind UBOs (uniform buffer objects) for both vertex and fragment stages
|
|
var all_ubos = [];
|
|
|
|
// Collect UBOs from both vertex and fragment shaders
|
|
if (refl.vertex && refl.vertex.ubos) {
|
|
for (def ubo of refl.vertex.ubos) {
|
|
all_ubos.push({ubo: ubo, stage: 'vertex', reflection: refl.vertex});
|
|
}
|
|
}
|
|
if (refl.fragment && refl.fragment.ubos) {
|
|
for (def ubo of refl.fragment.ubos) {
|
|
all_ubos.push({ubo: ubo, stage: 'fragment', reflection: refl.fragment});
|
|
}
|
|
}
|
|
|
|
// Process all UBOs
|
|
for (def ubo_info of all_ubos) {
|
|
def ubo = ubo_info.ubo;
|
|
def stage = ubo_info.stage;
|
|
def stage_refl = ubo_info.reflection;
|
|
def name = ubo.name;
|
|
def ubo_type = ubo.type;
|
|
def binding = ubo.binding;
|
|
|
|
var packed_blob;
|
|
|
|
// Handle special UBO types
|
|
if (ubo_type == "_8" && name == "type.TransformBuffer") {
|
|
// Pack camera/transform data
|
|
packed_blob = pack_transform_buffer(cmd.camera || cur_cam);
|
|
} else if (ubo_type == "_10" && name == "type.model") {
|
|
// Pack model matrix and color
|
|
packed_blob = pack_model_buffer(cmd.material);
|
|
} else {
|
|
// Use generic UBO packing for other types
|
|
packed_blob = pack_ubo(cmd.material, ubo_type, stage_refl);
|
|
}
|
|
|
|
if (packed_blob && packed_blob.length > 0) {
|
|
if (stage == 'vertex') {
|
|
render_queue.push_vertex_uniform_data(binding, packed_blob);
|
|
} else {
|
|
render_queue.push_fragment_uniform_data(binding, packed_blob);
|
|
}
|
|
}
|
|
}
|
|
|
|
// Bind textures for any separate_images
|
|
if (refl.fragment && refl.fragment.separate_images) {
|
|
for (def imgDesc of refl.fragment.separate_images) {
|
|
def name = imgDesc.name;
|
|
def binding = imgDesc.binding;
|
|
def img = cmd.material[name];
|
|
if (img) {
|
|
// Ensure texture is on GPU
|
|
if (!img[GPU]) {
|
|
if (img.surface) {
|
|
img[GPU] = get_img_gpu(img.surface);
|
|
} else if (img.cpu) {
|
|
img[GPU] = get_img_gpu(img.cpu);
|
|
}
|
|
}
|
|
|
|
if (img[GPU]) {
|
|
// Use material's sampler or default_sampler
|
|
def sampler_desc = cmd.material.sampler || default_sampler;
|
|
render_pass.bind_samplers(false, binding, [{
|
|
texture: img[GPU],
|
|
sampler: get_sampler(sampler_desc)
|
|
}]);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// Only bind buffers if not already bound or pipeline changed
|
|
if (!buffers_bound) {
|
|
render_pass.bind_buffers(0, [
|
|
{ buffer: pos_buffer, offset: 0 },
|
|
{ buffer: uv_buffer, offset: 0 },
|
|
{ buffer: color_buffer, offset: 0 }
|
|
])
|
|
|
|
render_pass.bind_index_buffer(
|
|
{ buffer: index_buffer, offset: 0 }, // the binding itself is in bytes
|
|
16 // 16 = Uint32 indices
|
|
);
|
|
buffers_bound = true
|
|
}
|
|
|
|
// Bind default texture if material didn't already bind "diffuse"
|
|
// Always bind the diffuse texture with material's sampler
|
|
if (cmd.texture) {
|
|
// Use material's sampler if specified, otherwise use default_sampler
|
|
var sampler_desc = (cmd.material && cmd.material.sampler)
|
|
? cmd.material.sampler
|
|
: default_sampler
|
|
|
|
var sampler_obj = get_sampler(sampler_desc)
|
|
render_pass.bind_samplers(false, 0, [{texture: cmd.texture, sampler: sampler_obj}])
|
|
}
|
|
|
|
render_pass.draw_indexed(
|
|
cmd.num_indices,
|
|
1,
|
|
cmd.first_index,
|
|
cmd.vertex_offset,
|
|
0
|
|
)
|
|
}
|
|
|
|
// imgui.endframe(render_queue, render_pass)
|
|
render_pass.end()
|
|
|
|
render_queue.submit()
|
|
|
|
if (done) done()
|
|
}
|
|
|
|
var shop = use('util')
|
|
////////// dmon hot reload ////////
|
|
function poll_file_changes() {
|
|
dmon.poll(e => {
|
|
if (e.action == 'modify' || e.action == 'create') {
|
|
// Check if it's an image file
|
|
var ext = e.file.split('.').pop().toLowerCase()
|
|
var imageExts = ['png', 'jpg', 'jpeg', 'gif', 'bmp', 'tga', 'webp', 'qoi', 'ase', 'aseprite']
|
|
|
|
if (ext == 'cm') {
|
|
log.console(e.file)
|
|
shop.file_reload(e.file)
|
|
}
|
|
|
|
if (imageExts.includes(ext)) {
|
|
// Try to find the full path for this image
|
|
var possiblePaths = [
|
|
e.file,
|
|
e.root + e.file,
|
|
res.find_image(e.file.split('/').pop().split('.')[0])
|
|
].filter(p => p)
|
|
|
|
for (var path of possiblePaths) {
|
|
graphics.tex_hotreload(path)
|
|
}
|
|
}
|
|
}
|
|
})
|
|
|
|
$delay(poll_file_changes, 0.5)
|
|
}
|
|
|
|
var dmon = use('dmon/dmon')
|
|
prosperon.dmon = function()
|
|
{
|
|
if (!dmon) return
|
|
dmon.watch('.')
|
|
poll_file_changes()
|
|
}
|
|
|
|
var window_cmds = {
|
|
size(size) {
|
|
window.size = size
|
|
},
|
|
}
|
|
|
|
prosperon.set_window = function(config)
|
|
{
|
|
for (var c in config)
|
|
if (window_cmds[c]) window_cmds[c](config[c])
|
|
}
|
|
|
|
return prosperon
|