rectangle shading

This commit is contained in:
2025-01-09 19:31:08 -06:00
parent b698b1862e
commit 4a7543fa68
18 changed files with 239 additions and 166 deletions

View File

@@ -20,6 +20,7 @@ function full_upload(buffers)
function queue_sprite_mesh(queue)
{
var sprites = queue.filter(x => x.type === 'sprite');
if (sprites.length === 0) return [];
var mesh = render._main.make_sprite_mesh(sprites);
for (var i = 0; i < sprites.length; i++) {
sprites[i].mesh = mesh;
@@ -111,6 +112,12 @@ sprite_pipeline.blend = {
op_alpha: "add"
};
var rect_pipeline = Object.create(sprite_pipeline)
rect_pipeline.fragment = "rectangle.frag"
var line_pipeline = Object.create(rect_pipeline);
line_pipeline.primitive = "line"
var dbgline_pipeline = Object.create(base_pipeline);
dbgline_pipeline.vertex = "dbgline.vert.hlsl"
dbgline_pipeline.fragment = "dbgline.frag.hlsl"
@@ -124,7 +131,7 @@ post_camera.size = [640,360];
post_camera.mode = 'keep';
post_camera.viewport = {x:0,y:0,width:1,height:1}
post_camera.fov = 45;
post_camera.type = 'ortho';
post_camera.ortho = true;
post_camera.aspect = 16/9;
function get_pipeline_ubo_slot(pipeline, name)
@@ -227,7 +234,7 @@ render.hotreload = function shader_hotreload(file) {
};
function make_pipeline(pipeline) {
if (pipeline.gpu) return; // this pipeline has already been made
if (pipeline.hasOwnProperty("gpu")) return; // this pipeline has already been made
if (typeof pipeline.vertex === 'string')
pipeline.vertex = make_shader(pipeline.vertex);
@@ -302,9 +309,8 @@ var shader_type;
function make_shader(sh_file) {
var file = `shaders/${shader_type}/${sh_file}.${shader_type}`
var refl = json.decode(io.slurp(`shaders/reflection/${sh_file}.json`))
if (shader_cache[file]) return shader_cache[file]
var refl = json.decode(io.slurp(`shaders/reflection/${sh_file}.json`))
var shader = {
code: io.slurpbytes(file),
@@ -358,6 +364,9 @@ render.device = {
};
var render_queue = [];
var hud_queue = [];
var current_queue = render_queue;
render.device.doc = `Device resolutions given as [x,y,inches diagonal].`;
var std_sampler = {
@@ -495,11 +504,15 @@ function render_camera(cmds, camera)
};
}
if (render_queue.length == 0) return;
var buffers = queue_sprite_mesh(render_queue);
for (var q of render_queue) {
var buffers = [];
buffers = buffers.concat(queue_sprite_mesh(render_queue));
for (var q of render_queue)
if (q.type === 'geometry') buffers = buffers.concat([q.mesh.pos, q.mesh.color,q.mesh.uv,q.mesh.indices]);
}
buffers = buffers.concat(queue_sprite_mesh(hud_queue));
for (var q of hud_queue)
if (q.type === 'geometry') buffers = buffers.concat([q.mesh.pos, q.mesh.color,q.mesh.uv,q.mesh.indices]);
full_upload(buffers)
var pass = cmds.render_pass(camera.target);
@@ -517,7 +530,7 @@ function render_camera(cmds, camera)
var camslot = get_pipeline_ubo_slot(pipeline, 'TransformBuffer');
if (typeof camslot !== 'undefined')
cmds.camera(camera, pass, undefined, camslot);
cmds.camera(camera, camslot);
modelslot = get_pipeline_ubo_slot(pipeline, "model");
if (typeof modelslot !== 'undefined') {
@@ -531,7 +544,37 @@ function render_camera(cmds, camera)
bind_model(pass,pipeline,mesh);
}
if (img != group.image) {
if (group.image && img != group.image) {
img = group.image;
img.sampler = std_sampler;
bind_mat(pass,pipeline,{diffuse:img});
}
pass.draw_indexed(group.num_indices, 1, group.first_index, 0, 0);
}
for (var group of hud_queue) {
if (pipeline != group.pipeline) {
pipeline = group.pipeline;
bind_pipeline(pass, pipeline);
var camslot = get_pipeline_ubo_slot(pipeline, 'TransformBuffer');
if (typeof camslot !== 'undefined')
cmds.hud(camera.size, camslot);
modelslot = get_pipeline_ubo_slot(pipeline, "model");
if (typeof modelslot !== 'undefined') {
var ubo = ubo_obj_to_array(pipeline, 'model', sprite_model_ubo);
cmds.push_vertex_uniform_data(modelslot, ubo);
}
}
if (mesh != group.mesh) {
mesh = group.mesh;
bind_model(pass,pipeline,mesh);
}
if (group.image && img != group.image) {
img = group.image;
img.sampler = std_sampler;
bind_mat(pass,pipeline,{diffuse:img});
@@ -597,7 +640,7 @@ function gpupresent()
cmds.cancel();
else {
var mode = prosperon.camera.presentation || "letterbox"
var src_rect = {x:0,y:0,width:640,height:360}
var src_rect = {x:0,y:0,width:prosperon.camera.size.x,height:prosperon.camera.size.y}
var dst_rect = {x:0,y:0,width:swapchain_tex.width,height:swapchain_tex.height};
var torect = mode_rect(src_rect,dst_rect,mode);
torect.texture = swapchain_tex;
@@ -713,7 +756,8 @@ render.poly = function render_poly(points, color, transform) {
};
// render.line has uv and can be texture mapped; dbg_line is hardware standard lines
render.line = function render_line(points, color = Color.white, thickness = 1, pipe = base_pipeline) {
render.line = function render_line(points, color = Color.white, thickness = 1, pipeline = line_pipeline) {
var mesh = os.make_line_prim(points);
// render._main.line(points, color);
};
@@ -732,14 +776,14 @@ render.point = function (pos, size, color = Color.blue) {
render._main.point(pos,color);
};
render.cross = function render_cross(pos, size, color = Color.red, thickness = 1, pipe = base_pipeline) {
render.cross = function render_cross(pos, size, color = Color.red, thickness = 1, pipe = sprite_pipeline) {
var a = [pos.add([0, size]), pos.add([0, -size])];
var b = [pos.add([size, 0]), pos.add([-size, 0])];
render.line(a, color, thickness);
render.line(b, color, thickness);
};
render.arrow = function render_arrow(start, end, color = Color.red, wingspan = 4, wingangle = 10, pipe = base_pipeline) {
render.arrow = function render_arrow(start, end, color = Color.red, wingspan = 4, wingangle = 10, pipe = sprite_pipeline) {
var dir = end.sub(start).normalized();
var wing1 = [Vector.rotate(dir, wingangle).scale(wingspan).add(end), end];
var wing2 = [Vector.rotate(dir, -wingangle).scale(wingspan).add(end), end];
@@ -755,8 +799,15 @@ render.coordinate = function render_coordinate(pos, size, color) {
var queued_shader;
var queued_pipe;
render.rectangle = function render_rectangle(rect, color = Color.white, pipe = base_pipeline) {
render._main.fillrect(rect,color);
render.rectangle = function render_rectangle(rect, color = Color.white, pipeline = rect_pipeline) {
var T = os.make_transform();
T.rect(rect);
current_queue.push({
type:'sprite',
transform:T,
color,
pipeline
});
};
render.text = function text(text, rect, font = prosperon.font, size = 0, color = Color.white, wrap = 0, pipeline = sprite_pipeline) {
@@ -765,7 +816,7 @@ render.text = function text(text, rect, font = prosperon.font, size = 0, color =
var mesh = os.make_text_buffer(text, rect, 0, color, wrap, font);
// full_upload(mesh)
render_queue.push({
current_queue.push({
type: 'geometry',
mesh: mesh,
image: font,
@@ -894,12 +945,12 @@ render.image = function image(image, rect = [0,0], rotation = 0, color = Color.w
rect.height ??= image.texture.height;
var T = os.make_transform();
T.rect(rect);
render_queue.push({
current_queue.push({
type: 'sprite',
transform: T,
color: color,
image:image,
pipeline: pipeline
image,
pipeline,
color
});
};
@@ -1019,17 +1070,6 @@ prosperon.make_camera = function make_camera() {
cam.screen2cam = screen2cam;
cam.screen2hud = screen2hud;
cam.zoom = 1; // the "scale factor" this camera demonstrates
// camera renders draw calls, and then hud
cam.render = function() {
prosperon.draw();
draw_sprites();
render._main.camera(this.transform,true);
render._main.scale([this.zoom, this.zoom]);
prosperon.hud();
render._main.scale([1,1]);
render._main.camera(unit_transform,false);
}
return cam;
};
@@ -1136,34 +1176,6 @@ var imgui_fn = function imgui_fn() {
*/
prosperon.imgui();
// imgui.endframe(render._main);
};
// figure out the highest resolution we can render at that's an integer
/* var basesize = prosperon.camera.size.slice();
var baseview = prosperon.camera.view();
var wh = [baseview[2]-baseview[0], baseview[3]-baseview[1]];
var mult = 1;
var trysize = basesize.scale(mult);
while (trysize.x <= wh.x && trysize.y <= wh.y) {
mult++;
trysize = basesize.scale(mult);
}
if (Math.abs(wh.x - basesize.scale(mult-1).x) < Math.abs(wh.x - trysize.x))
mult--;
\prosperon.window_render(basesize.scale(mult));
*/
prosperon.render = function prosperon_render() {
try{
try {
imgui_fn();
} catch(e) { console.error(e) }
} catch(e) {
console.error(e)
} finally {
render._main.present();
}
};
//if (dmon) dmon.watch('.');
@@ -1222,8 +1234,14 @@ try {
*/
}
try { prosperon.draw(); } catch(e) {console.error(e)}
prosperon.render();
current_queue = render_queue;
try { prosperon.draw(); } catch(e) { console.error(e) }
current_queue = hud_queue;
try { prosperon.hud(); } catch(e) { console.error(e) }
try { imgui_fn(); } catch(e) { console.error(e) }
render._main.present();
} catch(e) {
console.error(e)
}

View File

@@ -1,15 +1,21 @@
#!/usr/bin/env bash
# Ensure directories exist
mkdir -p spirv
mkdir -p spv
mkdir -p msl
mkdir -p dxil
mkdir -p reflection
# Vertex shaders
for filename in *.vert.hlsl; do
if [ -f "$filename" ]; then echo "compiling ${filename}"
# Produce SPIR-V
dxc -spirv -T vs_6_0 -Fo "spv/${filename/.hlsl/.spv}" "$filename"
# Produce DXIL
dxc -T vs_6_0 -Fo "dxil/${filename/.hlsl/.dxil}" "$filename"
# Convert SPIR-V to Metal Shader Language
spirv-cross "spv/${filename/.hlsl/.spv}" --msl > "msl/${filename/.hlsl/.msl}"
# Generate reflection
spirv-cross "spv/${filename/.hlsl/.spv}" --reflect > "reflection/${filename/.hlsl/.json}"
fi
done
@@ -17,8 +23,13 @@ done
# Fragment shaders
for filename in *.frag.hlsl; do
if [ -f "$filename" ]; then echo "compiling ${filename}"
# Produce SPIR-V
dxc -spirv -T ps_6_0 -Fo "spv/${filename/.hlsl/.spv}" "$filename"
# Produce DXIL
dxc -T ps_6_0 -Fo "dxil/${filename/.hlsl/.dxil}" "$filename"
# Convert SPIR-V to Metal Shader Language
spirv-cross "spv/${filename/.hlsl/.spv}" --msl > "msl/${filename/.hlsl/.msl}"
# Generate reflection
spirv-cross "spv/${filename/.hlsl/.spv}" --reflect > "reflection/${filename/.hlsl/.json}"
fi
done
@@ -26,8 +37,13 @@ done
# Compute shaders
for filename in *.comp.hlsl; do
if [ -f "$filename" ]; then echo "compiling ${filename}"
# Produce SPIR-V
dxc -spirv -T cs_6_0 -Fo "spv/${filename/.hlsl/.spv}" "$filename"
# Produce DXIL
dxc -T cs_6_0 -Fo "dxil/${filename/.hlsl/.dxil}" "$filename"
# Convert SPIR-V to Metal Shader Language
spirv-cross "spv/${filename/.hlsl/.spv}" --msl > "msl/${filename/.hlsl/.msl}"
# Generate reflection
spirv-cross "spv/${filename/.hlsl/.spv}" --reflect > "reflection/${filename/.hlsl/.json}"
fi
done

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -0,0 +1,22 @@
#include <metal_stdlib>
#include <simd/simd.h>
using namespace metal;
struct main0_out
{
float4 out_var_SV_TARGET [[color(0)]];
};
struct main0_in
{
float4 in_var_COLOR0 [[user(locn1)]];
};
fragment main0_out main0(main0_in in [[stage_in]])
{
main0_out out = {};
out.out_var_SV_TARGET = in.in_var_COLOR0;
return out;
}

View File

@@ -0,0 +1,7 @@
#include "common/pixel.hlsl"
// Pixel shader main function
float4 main(PSInput input) : SV_TARGET
{
return input.color;
}

View File

@@ -0,0 +1,22 @@
{
"entryPoints" : [
{
"name" : "main",
"mode" : "frag"
}
],
"inputs" : [
{
"type" : "vec4",
"name" : "in.var.COLOR0",
"location" : 1
}
],
"outputs" : [
{
"type" : "vec4",
"name" : "out.var.SV_TARGET",
"location" : 0
}
]
}

35
shaders/sdf.hlsl Normal file
View File

@@ -0,0 +1,35 @@
struct SDF {
float circle(vec2 p, float r)
{
return length(p) - r;
}
// p = uv point
// b = width,height
// r = roundedness of the 4 corners
float rounded_box(vec2 p, vec2 b, vec4 r)
{
r.xy = (p.x>0.0)?r.xy : r.zw;
r.x = (p.y>0.0)?r.x : r.y;
vec2 q = abs(p)-b+r.x;
return min(max(q.x,q.y),0.0) + length(max(q,0.0)) - r.x;
}
float box(vec2 p, vec2 b)
{
vec2 d = abs(p)-b;
return length(max(d,0)) + min(max(d.x,d.y),0);
}
float heart( in vec2 p )
{
p.x = abs(p.x);
if( p.y+p.x>1.0 )
return sqrt(dot2(p-vec2(0.25,0.75))) - sqrt(2.0)/4.0;
return sqrt(min(dot2(p-vec2(0.00,1.00)), dot2(p-0.5*max(p.x+p.y,0.0)))) * sign(p.x-p.y);
}
}
SDF sdf;

Binary file not shown.

View File

@@ -4545,129 +4545,81 @@ JSC_CCALL(cmd_submit,
return SDL_GPUFence2js(js,fence);
)
JSC_CCALL(cmd_hud,
SDL_GPUCommandBuffer *cmds = js2SDL_GPUCommandBuffer(js,self);
HMM_Vec2 size = js2vec2(js,argv[0]);
HMM_Mat4 proj = HMM_Orthographic_RH_NO(0,size.x,0,size.y,-1,1);
shader_globals data = {0};
data.world_to_projection = proj;
data.projection_to_world = HMM_InvGeneralM4(proj);
data.viewport_min_z = -1,
data.viewport_max_z = 1;
data.render_size = size;
data.view_to_projection = proj;
data.viewport_size = (HMM_Vec2){1,1};
data.viewport_offset = (HMM_Vec2){0,0};
data.time = SDL_GetTicksNS() / 1000000000.0f;
SDL_PushGPUVertexUniformData(cmds, js2number(js,argv[1]), &data, sizeof(data));
)
JSC_CCALL(cmd_camera,
SDL_GPUCommandBuffer *cmds = js2SDL_GPUCommandBuffer(js, self);
JSValue camera = argv[0];
SDL_GPURenderPass *pass = js2SDL_GPURenderPass(js, argv[1]);
HMM_Vec2 size;
HMM_Vec2 drawsize;
JS_PULLPROPSTR(js,argv[1],size,vec2);
drawsize = size;
transform *transform;
JS_PULLPROPSTR(js, camera, size, vec2)
JS_PULLPROPSTR(js, camera, transform, transform)
// Pull out fov and aspect if needed (e.g. for perspective)
double fov;
JS_PULLPROPSTR(js, camera, fov, number)
double aspect;
JS_PULLPROPSTR(js, camera, aspect, number)
int ortho;
double near;
double far;
JS_GETPROP(js, size, camera, size, vec2)
JS_GETPROP(js, transform, camera, transform, transform)
JS_GETPROP(js, fov, camera, fov, number)
JS_GETPROP(js, aspect, camera, aspect, number)
JS_GETPROP(js, ortho, camera,ortho,bool)
JS_GETPROP(js,near,camera,near,number)
JS_GETPROP(js,far,camera,far,number)
// Retrieve the user-specified viewport (could be normalized or exact)
JSValue jsViewport = JS_GetPropertyStr(js, camera, "viewport");
SDL_FRect cameraViewport = js2rect(js, jsViewport); // note: rect is typedef SDL_FRect
JS_FreeValue(js, jsViewport);
// Determine if we're in normalized or exact mode.
// If any dimension is > 1, treat them all as pixels (and clamp).
bool isNormalized = (cameraViewport.x <= 1.0f && cameraViewport.y <= 1.0f &&
cameraViewport.w <= 1.0f && cameraViewport.h <= 1.0f);
if (isNormalized) {
// Convert fraction -> pixels
cameraViewport.x *= drawsize.x;
cameraViewport.y *= drawsize.y;
cameraViewport.w *= drawsize.x;
cameraViewport.h *= drawsize.y;
} else {
// Clamp pixel coords to window bounds
if (cameraViewport.x < 0) cameraViewport.x = 0;
if (cameraViewport.y < 0) cameraViewport.y = 0;
if (cameraViewport.x + cameraViewport.w > drawsize.x) cameraViewport.w = drawsize.x - cameraViewport.x;
if (cameraViewport.y + cameraViewport.h > drawsize.y) cameraViewport.h = drawsize.y - cameraViewport.y;
}
// Get the camera mode
JSValue jsmode = JS_GetPropertyStr(js, camera, "mode");
char *mode = JS_ToCString(js, jsmode);
JS_FreeValue(js, jsmode);
// finalVP: the actual region in which we'll render this camera
SDL_FRect finalVP = cameraViewport;
// Apply the mode to figure out how to place/scale 'size' in finalVP
// “stretch” = fill entire finalVP
// “fit” = scale down so entire size is visible, preserving aspect
// “cover” = fill the entire region, preserving aspect (crop if needed)
// “keep” (or unknown) = no scaling; just center the cameras nominal size
if (!strcmp(mode, "stretch")) {
finalVP.w = cameraViewport.w;
finalVP.h = cameraViewport.h;
finalVP.x = cameraViewport.x;
finalVP.y = cameraViewport.y;
} else if (!strcmp(mode, "fit")) {
float scale = fminf(cameraViewport.w / size.x, cameraViewport.h / size.y);
finalVP.w = size.x * scale;
finalVP.h = size.y * scale;
finalVP.x = cameraViewport.x + (cameraViewport.w - finalVP.w) * 0.5f;
finalVP.y = cameraViewport.y + (cameraViewport.h - finalVP.h) * 0.5f;
} else if (!strcmp(mode, "cover")) {
float scale = fmaxf(cameraViewport.w / size.x, cameraViewport.h / size.y);
finalVP.w = size.x * scale;
finalVP.h = size.y * scale;
finalVP.x = cameraViewport.x + (cameraViewport.w - finalVP.w) * 0.5f;
finalVP.y = cameraViewport.y + (cameraViewport.h - finalVP.h) * 0.5f;
} else {
// "keep": do not scale
finalVP.w = size.x;
finalVP.h = size.y;
finalVP.x = cameraViewport.x + (cameraViewport.w - finalVP.w) * 0.5f;
finalVP.y = cameraViewport.y + (cameraViewport.h - finalVP.h) * 0.5f;
}
JS_FreeCString(js, mode);
// Build an orthographic projection
// - If pass is defined, treat (0,0) as screen center.
// - Otherwise, treat (0,0) as bottom-left corner.
HMM_Mat4 proj;
if (!JS_IsUndefined(argv[2])) {
if (ortho)
proj = HMM_Orthographic_RH_NO(
-finalVP.w * 0.5f, finalVP.w * 0.5f,
-finalVP.h * 0.5f, finalVP.h * 0.5f,
-size.x*0.5, 0.5*size.x,
-size.y*0.5, 0.5*size.y,
-1.0f, 1.0f
);
} else {
proj = HMM_Orthographic_RH_NO(
0.0f, finalVP.w,
0.0f, finalVP.h,
-1.0f, 1.0f
);
}
else
proj = HMM_Perspective_RH_NO(fov, aspect,near,far);
// Build the view matrix (translate by -camera.pos)
HMM_Mat4 view = HMM_Translate((HMM_Vec3){ -transform->pos.x, -transform->pos.y, 0.0f });
HMM_Mat4 view;
if (ortho)
view = HMM_Translate((HMM_Vec3){ -transform->pos.x, -transform->pos.y, 0.0f });
else {
HMM_Mat4 camera_transform = HMM_Translate(transform->pos);
camera_transform = HMM_MulM4(camera_transform, HMM_QToM4(transform->rotation));
// camera_transform = HMM_MulM4(camera_transform, HMM_Scale(transform->scale)); // don't bother w/ scale
view = HMM_InvGeneralM4(camera_transform);
}
// Update your shader globals
shader_globals data = {0};
data.world_to_projection = HMM_MulM4(proj, view);
data.projection_to_world = HMM_InvGeneralM4(data.world_to_projection);
data.camera_pos_world = transform->pos;
data.viewport_min_z = near;
data.viewport_max_z = far;
data.render_size = size;
data.world_to_view = view;
data.view_to_projection = proj;
data.camera_dir_world = HMM_NormV3(HMM_QVRot((HMM_Vec3){0,0,-1},transform->rotation));
data.viewport_size = (HMM_Vec2){0.5,0.5};
data.viewport_offset = (HMM_Vec2){0,0};
data.time = SDL_GetTicksNS() / 1000000000.0f;
// SDL_SetGPUViewport expects an SDL_GPUViewport (with float x,y,w,h and min/max depth)
// We'll fill that struct from finalVP, with default depth range [0..1].
SDL_GPUViewport sdlvp;
sdlvp.x = finalVP.x;
sdlvp.y = finalVP.y;
sdlvp.w = finalVP.w;
sdlvp.h = finalVP.h;
sdlvp.min_depth = 0.0f;
sdlvp.max_depth = 1.0f;
// Set the final viewport and push uniform data
// SDL_SetGPUViewport(pass, &sdlvp);
SDL_PushGPUVertexUniformData(cmds, js2number(js,argv[3]), &data, sizeof(data));
SDL_PushGPUVertexUniformData(cmds, js2number(js,argv[1]), &data, sizeof(data));
)
JSC_SCALL(cmd_push_debug_group,
@@ -4774,7 +4726,8 @@ static const JSCFunctionListEntry js_SDL_GPUCommandBuffer_funcs[] = {
MIST_FUNC_DEF(cmd, push_compute_uniform_data, 2),
MIST_FUNC_DEF(cmd, submit, 0),
MIST_FUNC_DEF(cmd, cancel, 0),
MIST_FUNC_DEF(cmd, camera, 4),
MIST_FUNC_DEF(cmd, camera, 2),
MIST_FUNC_DEF(cmd, hud, 2),
MIST_FUNC_DEF(cmd, push_debug_group, 1),
MIST_FUNC_DEF(cmd, pop_debug_group, 0),
MIST_FUNC_DEF(cmd, debug_label, 1),