more sdl gpu work

This commit is contained in:
2024-12-28 17:01:53 -06:00
parent 78fb926a5e
commit 4914af8a45
4 changed files with 232 additions and 218 deletions

View File

@@ -32,4 +32,117 @@ Now that layer will draw at the -5 layer.
# CAMERAS
Everything is drawn via cameras. Cameras can draw directly to the screen, or they can draw to an offscreen render target. By default, everything is drawn to all cameras. There will eventually be a tag that lets you filter what is drawn to specifc cameras.
Cameras have a resolution they draw at, "size".
Cameras have a resolution they draw at, "size".
## TEXTURES
Anatomy of rendpering an image render.image(path)
Path can be a file like "toad"
If this is a gif, this would display the entire range of the animation
It can be a frame of animation, like "frog.0"
If it's an aseprite, it can have multiple animations, like "frog.walk.0"
file^ frame^ idx
render.image("frog.walk.0",
game.image("frog.walk.0") ==> retrieve
image = {
texture: "spritesheet.png",
rect: [x,y,w,h],
time: 100
},
frames: {
toad: {
x: 4,
y: 5,
w: 10,
h: 10
},
frog: {
walk: [
{ texture: spritesheet.png, x: 10, y:10, w:6,h:6, time: 100 },
{ texture: spritesheet.png, x:16,y:10,w:6,h:6,time:100} <--- two frame walk animation
],
},
},
}
texture frog {
texture: {"frog.png"}, <--- this is the actual thing to send to the gpu
x:0,
y:0,
w:10,
h:10
},
## RENDER MODES
/* rendering modes
ps1
gouraud
diffuse // 16 bit color, 5-5-5
7 dynamic lights, 1 ambient
textures are affine
no vertex skinning
256x256 texture max (generally 128x128)
320x240, variable up to 640x480
n64
gouraud
diffuse
combiner // a secondary texture sometimes used to combine
7 dynamic lights, 1 ambient
320x240, or 640x480
sega saturn
gouraud
diffuse
320x240 or 640x480
ps2
phong
diffuse
combiner // second texture for modulation of diffuse
combine_mode // int for how to combine
dreamcast
phong
diffuse
combiner // second texture; could be an environment map, or emboss bump mapping
fog
640x480
640x448, special mode to 1280x1024
gamecube
phong
diffuse
+7 textures // wow!
8 dynamic lights
640x480
*/
/* meshes
position (float3)
color (rgba)
uv
*/
/* materials, modern pbr
any object can act as a "material". The engine expects some standardized things:
diffuse - base color texture
bump - a normal map for dot3 bump maping used in phong shading
height - a grayscale heightmap
occlusion - ambient occlusion texture
emission - texture for where model emits light
bump2 - a second normal map for detail
metallic - a metal/smoothness map
specular - specular map, alternative for the metallic workflow
*/
// world coordinates, the "actual" view relative to the game's universe
// camera coordinates, normalized from 0 to 1 inside of a camera's viewport, bottom left is 0,0, top right is 1,1
// screen coordinates, pixels, 0,0 at the top left of the window and [w,h] at the bottom right of the window
// hud coordinates, same as screen coordinates but the top left is 0,0

View File

@@ -1,48 +1,5 @@
var unit_transform = os.make_transform();
/*
Anatomy of rendpering an image
render.image(path)
Path can be a file like "toad"
If this is a gif, this would display the entire range of the animation
It can be a frame of animation, like "frog.0"
If it's an aseprite, it can have multiple animations, like "frog.walk.0"
file^ frame^ idx
render.image("frog.walk.0",
game.image("frog.walk.0") ==> retrieve
image = {
texture: "spritesheet.png",
rect: [x,y,w,h],
time: 100
},
frames: {
toad: {
x: 4,
y: 5,
w: 10,
h: 10
},
frog: {
walk: [
{ texture: spritesheet.png, x: 10, y:10, w:6,h:6, time: 100 },
{ texture: spritesheet.png, x:16,y:10,w:6,h:6,time:100} <--- two frame walk animation
],
},
},
}
texture frog {
texture: {"frog.png"}, <--- this is the actual thing to send to the gpu
x:0,
y:0,
w:10,
h:10
},
*/
var sprite_mesh = {};
render.doc = {
@@ -109,75 +66,20 @@ var base_pipeline = {
}
var post_pipeline = Object.create(base_pipeline);
post_pipeline.stencil = {
enabled: false,
test: false
};
post_pipeline.depth = {
test: false,
write: false
};
var post_camera = {};
post_camera.transform = os.make_transform();
post_camera.zoom = 1;
// post_camera.
/* rendering modes
ps1
gouraud
diffuse // 16 bit color, 5-5-5
7 dynamic lights, 1 ambient
textures are affine
no vertex skinning
256x256 texture max (generally 128x128)
320x240, variable up to 640x480
n64
gouraud
diffuse
combiner // a secondary texture sometimes used to combine
7 dynamic lights, 1 ambient
320x240, or 640x480
sega saturn
gouraud
diffuse
320x240 or 640x480
ps2
phong
diffuse
combiner // second texture for modulation of diffuse
combine_mode // int for how to combine
dreamcast
phong
diffuse
combiner // second texture; could be an environment map, or emboss bump mapping
fog
640x480
640x448, special mode to 1280x1024
gamecube
phong
diffuse
+7 textures // wow!
8 dynamic lights
640x480
*/
/* meshes
position (float3)
color (rgba)
uv
*/
/* materials, modern pbr
any object can act as a "material". The engine expects some standardized things:
diffuse - base color texture
bump - a normal map for dot3 bump maping used in phong shading
height - a grayscale heightmap
occlusion - ambient occlusion texture
emission - texture for where model emits light
bump2 - a second normal map for detail
metallic - a metal/smoothness map
specular - specular map, alternative for the metallic workflow
*/
render.poly_prim = function poly_prim(verts) {
var index = [];
if (verts.length < 1) return undefined;
@@ -348,7 +250,7 @@ var std_sampler;
var tbuffer;
var spritemesh;
function upload_model(cmds, model)
function upload_model(model)
{
var bufs = [];
for (var i in model) {
@@ -356,11 +258,12 @@ function upload_model(cmds, model)
if (i === 'indices') model[i].index = true;
bufs.push(model[i]);
}
tbuffer = render._main.upload(cmds, bufs, tbuffer);
tbuffer = render._main.upload(this, bufs, tbuffer);
}
function bind_model(pass, model, pipeline)
function bind_model(model)
{
var pipeline = this.pipeline;
var buffers = pipeline.vertex_buffer_descriptions;
var bufs = [];
for (var b of buffers) {
@@ -369,12 +272,13 @@ function bind_model(pass, model, pipeline)
else
throw Error (`could not find buffer ${b.name} on model`);
}
pass.bind_buffers(0,bufs);
pass.bind_index_buffer(model.indices);
this.bind_buffers(0,bufs);
this.bind_index_buffer(model.indices);
}
function bind_mat(pass, mat, pipeline)
function bind_mat(mat)
{
var pipeline = this.pipeline;
var imgs = [];
var refl = pipeline.fragment.reflection;
if (refl.separate_images) {
@@ -385,36 +289,28 @@ function bind_mat(pass, mat, pipeline)
} else
throw Error (`could not find all necessary images: ${i.name}`)
}
pass.bind_samplers(false, 0,imgs);
this.bind_samplers(false, 0,imgs);
}
}
function render_camera(camera)
{
var cmds = render._main.acquire_cmd_buffer();
}
function gpupresent()
{
var cmds = render._main.acquire_cmd_buffer();
var myimg = game.texture("pockle");
myimg.sampler = std_sampler;
spritemesh = render._main.make_sprite_mesh(sprite_stack, spritemesh);
upload_model(cmds, spritemesh);
cmds.submit();
sprite_stack.length = 0;
cmds = render._main.acquire_cmd_buffer();
var pass = cmds.render_pass(prosperon.camera.target);
try{
pass.bind_pipeline(base_pipeline.gpu);
bind_model(pass,spritemesh,base_pipeline);
bind_mat(pass,{diffuse:myimg}, base_pipeline);
cmds.upload_model(spritemesh);
sprite_stack.length = 0;
var pass = cmds.render_pass(camera.target);
if (!pass.__proto__.bind_model) {
pass.__proto__.bind_model = bind_model;
pass.__proto__.bind_mat = bind_mat;
}
pass.bind_pipeline(base_pipeline);
pass.bind_model(spritemesh);
pass.bind_mat({diffuse:myimg});
cmds.camera(prosperon.camera, pass);
pass.draw(spritemesh.count,1,0,0,0);
prosperon.camera.fov = 60;
prosperon.camera.near = 0.1;
prosperon.camera.far = 100000;
/* cmds.camera(prosperon.camera);
pass.bind_pipeline(pipeline_model.gpu);
bind_model(pass,ducky.mesh,pipeline_model);
@@ -424,15 +320,35 @@ try{
// cmds.camera(prosperon.camera, true);
// pass.bind_pipeline(base_pipeline.gpu);
// pass.draw(spritemesh.count,1,0,0,0);
} catch(e) { console.error(e); } finally {
pass.end();
pass = cmds.swapchain_pass();
pass.bind_pipeline(post_pipeline.gpu);
pass.end();
cmds.submit();
cmds.submit();
}
function gpupresent()
{
try{
render_camera(prosperon.camera);
} catch(e) { console.error(e); } finally {
var cmds = render._main.acquire_cmd_buffer();
render.image(prosperon.camera.target.color_targets[0], {x:0,y:0,width:200,height:200});
var mmesh = render._main.make_sprite_mesh(sprite_stack);
sprite_stack.length = 0;
cmds.upload_model(mmesh);
var pass = cmds.swapchain_pass();
cmds.camera(prosperon.camera, pass);
pass.bind_pipeline(base_pipeline);
pass.bind_model(mmesh);
var mat = {};
mat.diffuse = {
texture:prosperon.camera.target.color_targets[0].texture,
sampler:std_sampler
};
pass.bind_mat(mat);
pass.draw(mmesh.count,1,0,0,0);
pass.end();
cmds.submit();
}
}
var display_res;
@@ -445,6 +361,8 @@ function logical_size(size)
var ducky;
var pipeline_model;
var quad_model;
render.init = function () {
std_sampler = render._main.make_sampler({
min_filter: "nearest",
@@ -454,7 +372,7 @@ render.init = function () {
address_mode_v: "clamp_edge",
address_mode_w: "clamp_edge"
});
quad_model = render._main.make_quad();
io.mount("core");
render._main.present = gpupresent;
render._main.logical_size = logical_size;
@@ -474,7 +392,9 @@ render.init = function () {
ducky = ducky[0];
var cmds = render._main.acquire_cmd_buffer();
upload_model(cmds,ducky.mesh);
cmds.__proto__.upload_model = upload_model;
cmds.upload_model(ducky.mesh);
cmds.upload_model(quad_model);
cmds.submit();
var sprite_vert = make_shader("sprite.vert");
var sprite_frag = make_shader("sprite.frag");
@@ -1014,68 +934,6 @@ render.scissor = function(rect)
render.viewport(rect)
}
// Camera viewport is a rectangle with the bottom left corner defined as x,y. Units are pixels on the window.
function camviewport() {
var aspect = (((this.viewport[2] - this.viewport[0]) / (this.viewport[3] - this.viewport[1])) * prosperon.size.x) / prosperon.size.y;
var raspect = this.size.x / this.size.y;
var left = this.viewport[0] * prosperon.size.x;
var bottom = this.viewport[1] * prosperon.size.y;
var usemode = this.mode;
if (this.break && this.size.x > prosperon.size.x && this.size.y > prosperon.size.y) usemode = this.break;
if (usemode === "fit")
if (raspect < aspect) usemode = "height";
else usemode = "width";
switch (usemode) {
case "stretch":
case "expand":
return {
x: 0,
y: 0,
width: prosperon.size.x,
height: prosperon.size.y
};
case "keep":
return {
x: left,
y: bottom,
width:left+this.size.x,
height:bottom+this.size.y
}
case "height":
var ret = {
x:left,
y:0,
width:this.size.x*(prosperon.size.y/this.size.y),
height:prosperon.size.y
};
ret.x = (prosperon.size.x - (ret.width-ret.x))/2;
return ret;
case "width":
var ret = {
x:0,
y:bottom,
width:prosperon.size.x,
height:this.size.y*(prosperon.size.x/this.size.x)
};
ret.y = (prosperon.size.y - (ret.height-ret.y))/2;
return ret;
}
return {
x:0,
y:0,
width:prosperon.size.x,
height:prosperon.size.y
};
}
// pos is screen coordinates
function camscreen2world(pos) {
var view = this.screen2cam(pos);
var viewport = render._main.get_viewport();
@@ -1129,14 +987,12 @@ prosperon.make_camera = function make_camera() {
cam.near = 1;
cam.far = -1000;
cam.ortho = true; // True if this is a 2d camera
cam.viewport = [0, 0, 1, 1]; // normalized screen coordinates of where to draw
cam.size = prosperon.size.slice() // The render size of this camera in pixels
// In ortho mode, this determines how many pixels it will see
cam.mode = "stretch";
cam.screen2world = camscreen2world;
cam.screen2cam = screen2cam;
cam.screen2hud = screen2hud;
cam.view = camviewport;
cam.zoom = 1; // the "scale factor" this camera demonstrates
// camera renders draw calls, and then hud
cam.render = function() {

View File

@@ -1,16 +1,16 @@
#include "common.hlsl"
#include "common/common.hlsl"
struct input
{
float 2 pos;
float 2 uv;
}
float2 pos : pos;
float2 uv : uv;
};
struct output
{
float4 pos : SV_Position;
float2 uv : TEXCOORD0;
}
};
output main(input i)
{

View File

@@ -509,7 +509,7 @@ void *gpu_buffer_unpack(JSContext *js, SDL_GPUDevice *device, JSValue buffer, si
JSValue gpu = JS_GetPropertyStr(js,buffer,"gpu");
*send_gpu = js2SDL_GPUBuffer(js,gpu);
if (!*send_gpu) {
*send_gpu = SDL_CreateGPUBuffer(device, &(SDL_GPUBufferCreateInfo) { .usage=usage,.size=*size});
*send_gpu = SDL_CreateGPUBuffer(device, &(SDL_GPUBufferCreateInfo) { .usage=usage,.size=msize});
if (!*send_gpu) printf("COULDN'T MAKE GPU BUFFER: %s\n", SDL_GetError());
JS_SetPropertyStr(js, buffer, "gpu", SDL_GPUBuffer2js(js,*send_gpu));
}
@@ -4144,6 +4144,54 @@ JSC_CCALL(gpu_make_sprite_mesh,
return ret;
)
JSC_CCALL(gpu_make_quad,
size_t quads = 1;
size_t verts = quads*4;
size_t count = quads*6;
// Prepare arrays on CPU
HMM_Vec2 *posdata = malloc(sizeof(*posdata)*verts);
HMM_Vec2 *uvdata = malloc(sizeof(*uvdata)*verts);
HMM_Vec4 *colordata = malloc(sizeof(*colordata)*verts);
HMM_Vec4 usecolor = (HMM_Vec4){1,1,1,1};
posdata[0] = (HMM_Vec2){0,1};
posdata[1] = (HMM_Vec2){1,1};
posdata[2] = (HMM_Vec2){0,0};
posdata[3] = (HMM_Vec2){1,0};
uvdata[0] = (HMM_Vec2){0,1};
uvdata[1] = (HMM_Vec2){1,1};
uvdata[2] = (HMM_Vec2){0,0};
uvdata[3] = (HMM_Vec2){1,0};
colordata[0] = usecolor;
colordata[1] = usecolor;
colordata[2] = usecolor;
colordata[3] = usecolor;
ret = JS_NewObject(js);
JSValue pos = make_gpu_buffer(js, posdata, sizeof(*posdata)*verts, JS_TYPED_ARRAY_FLOAT32, 2, 1,0);
JSValue uv = make_gpu_buffer(js, uvdata, sizeof(*uvdata)*verts, JS_TYPED_ARRAY_FLOAT32, 2, 1,0);
JSValue color = make_gpu_buffer(js, colordata, sizeof(*colordata)*verts, JS_TYPED_ARRAY_FLOAT32, 0, 1,0);
JS_SetProperty(js, ret, pos_atom, pos);
JS_SetProperty(js, ret, uv_atom, uv);
JS_SetProperty(js, ret, color_atom, color);
JSValue indices = make_quad_indices_buffer(js, quads);
JS_SetProperty(js, ret, indices_atom, indices);
JS_SetProperty(js, ret, vertices_atom, number2js(js, verts));
JS_SetProperty(js, ret, count_atom, number2js(js, count));
// Free temporary CPU arrays
free(posdata);
free(uvdata);
free(colordata);
return ret;
)
JSC_CCALL(gpu_driver,
SDL_GPUDevice *gpu = js2SDL_GPUDevice(js,self);
ret = JS_NewString(js, SDL_GetGPUDeviceDriver(gpu));
@@ -4381,6 +4429,7 @@ static const JSCFunctionListEntry js_SDL_GPUDevice_funcs[] = {
// MIST_FUNC_DEF(gpu, geometry, 2),
MIST_FUNC_DEF(gpu, viewport, 1),
MIST_FUNC_DEF(gpu, make_sprite_mesh, 2),
MIST_FUNC_DEF(gpu, make_quad, 0),
MIST_FUNC_DEF(gpu, driver, 0),
MIST_FUNC_DEF(gpu, make_shader, 1),
MIST_FUNC_DEF(gpu, load_gltf_model, 1),
@@ -4391,8 +4440,11 @@ static const JSCFunctionListEntry js_SDL_GPUDevice_funcs[] = {
JSC_CCALL(renderpass_bind_pipeline,
SDL_GPURenderPass *r = js2SDL_GPURenderPass(js,self);
SDL_GPUGraphicsPipeline *pipe = js2SDL_GPUGraphicsPipeline(js,argv[0]);
JSValue pipe_gpu = JS_GetPropertyStr(js,argv[0],"gpu");
SDL_GPUGraphicsPipeline *pipe = js2SDL_GPUGraphicsPipeline(js,pipe_gpu);
JS_FreeValue(js,pipe_gpu);
SDL_BindGPUGraphicsPipeline(r,pipe);
JS_SetPropertyStr(js,self, "pipeline", JS_DupValue(js,argv[0]));
)
JSC_CCALL(renderpass_draw,
@@ -4589,12 +4641,6 @@ JSC_CCALL(cmd_render_pass,
return SDL_GPURenderPass2js(js, pass);
)
JSC_CCALL(cmd_bind_pipeline,
SDL_GPUCommandBuffer *cmds = js2SDL_GPUCommandBuffer(js, self);
SDL_GPUGraphicsPipeline *pipeline = js2SDL_GPUGraphicsPipeline(js, argv[0]);
SDL_BindGPUGraphicsPipeline(cmds, pipeline);
)
JSC_CCALL(cmd_bind_vertex_buffer,
SDL_GPUCommandBuffer *cmds = js2SDL_GPUCommandBuffer(js, self);
int slot;
@@ -4775,7 +4821,6 @@ JSC_CCALL(cmd_camera,
static const JSCFunctionListEntry js_SDL_GPUCommandBuffer_funcs[] = {
MIST_FUNC_DEF(cmd, render_pass, 1),
MIST_FUNC_DEF(cmd, swapchain_pass, 1),
MIST_FUNC_DEF(cmd, bind_pipeline, 1),
MIST_FUNC_DEF(cmd, bind_vertex_buffer, 2),
MIST_FUNC_DEF(cmd, bind_index_buffer, 1),
MIST_FUNC_DEF(cmd, bind_fragment_sampler, 3),