add image conversion functions
This commit is contained in:
@@ -4,6 +4,7 @@ var transform = use('transform');
|
||||
var rasterize = use('rasterize');
|
||||
var time = use('time')
|
||||
var tilemap = use('tilemap')
|
||||
var json = use('json')
|
||||
|
||||
// Frame timing variables
|
||||
var framerate = 60
|
||||
|
||||
@@ -24,8 +24,9 @@ function makeOrthoMetal(l,r,b,t,n,f){
|
||||
|
||||
function make_camera_pblob(camera) {
|
||||
def zoom = camera.zoom;
|
||||
def cw = win_size.width;
|
||||
def ch = win_size.height;
|
||||
// Use surface dimensions if rendering to a surface, otherwise window dimensions
|
||||
def cw = camera.surface ? camera.surface.width : win_size.width;
|
||||
def ch = camera.surface ? camera.surface.height : win_size.height;
|
||||
// how big is the world window?
|
||||
def world_w = cw / zoom;
|
||||
def world_h = ch / zoom;
|
||||
@@ -68,7 +69,7 @@ var default_sampler = {
|
||||
max_anisotropy: 0,
|
||||
compare_op: "none",
|
||||
min_lod: 0,
|
||||
max_lod: 2,
|
||||
max_lod: 10,
|
||||
anisotropy: false,
|
||||
compare: false
|
||||
};
|
||||
@@ -159,7 +160,44 @@ device.set_swapchain(window, 'sdr', 'vsync')
|
||||
|
||||
var shader_type = device.shader_format()[0]
|
||||
shader_type = 'msl'
|
||||
var std_sampler = new sdl_gpu.sampler(device, default_sampler)
|
||||
|
||||
var sampler_cache = {}
|
||||
|
||||
function canonicalize_sampler(desc) {
|
||||
if (desc == true)
|
||||
return json.encode(default_sampler)
|
||||
|
||||
var sampler_obj = {}
|
||||
sampler_obj.__proto__ = default_sampler
|
||||
|
||||
if (typeof desc == 'object') {
|
||||
for (var key in desc) {
|
||||
if (desc.hasOwnProperty(key)) {
|
||||
sampler_obj[key] = desc[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var keys = Object.keys(sampler_obj).sort()
|
||||
var canonical = {}
|
||||
for (var i = 0; i < keys.length; i++)
|
||||
canonical[keys[i]] = sampler_obj[keys[i]]
|
||||
|
||||
return json.encode(canonical)
|
||||
}
|
||||
|
||||
function get_sampler(desc) {
|
||||
var key = canonicalize_sampler(desc)
|
||||
|
||||
if (!sampler_cache[key]) {
|
||||
var sampler_config = json.decode(key)
|
||||
sampler_cache[key] = new sdl_gpu.sampler(device, sampler_config)
|
||||
}
|
||||
|
||||
return sampler_cache[key]
|
||||
}
|
||||
|
||||
var std_sampler = get_sampler(true)
|
||||
|
||||
// Shader and pipeline cache
|
||||
var shader_cache = {}
|
||||
@@ -443,8 +481,13 @@ var cur_cam
|
||||
var cmd_fns = {}
|
||||
cmd_fns.camera = function(cmd)
|
||||
{
|
||||
if (cmd.camera.surface && !cmd.camera.surface[GPU])
|
||||
if (cmd.camera.surface && !cmd.camera.surface[GPU]) {
|
||||
cmd.camera.surface[GPU] = new sdl_gpu.texture(device, cmd.camera.surface)
|
||||
// Store the sampler description on the texture for later use
|
||||
if (cmd.camera.surface.sampler != null) {
|
||||
cmd.camera.surface[GPU].sampler_desc = cmd.camera.surface.sampler
|
||||
}
|
||||
}
|
||||
draw_queue.push(cmd)
|
||||
}
|
||||
|
||||
@@ -452,6 +495,8 @@ var new_tex = []
|
||||
|
||||
function get_img_gpu(surface)
|
||||
{
|
||||
if (!surface) return
|
||||
|
||||
var full_mip = Math.floor(Math.log2(Math.max(surface.width,surface.height))) + 1
|
||||
var gpu = new sdl_gpu.texture(device, {
|
||||
width: surface.width,
|
||||
@@ -461,9 +506,14 @@ function get_img_gpu(surface)
|
||||
samples: 0,
|
||||
type: "2d",
|
||||
format: "rgba8",
|
||||
sampler: true,
|
||||
sampler: surface.sampler != null ? surface.sampler : true,
|
||||
color_target: true
|
||||
})
|
||||
|
||||
// Store the sampler description on the texture for later use
|
||||
if (surface.sampler != null) {
|
||||
gpu.sampler_desc = surface.sampler
|
||||
}
|
||||
|
||||
var tbuf = new sdl_gpu.transfer_buffer(device, {
|
||||
size: surface.pixels.length/8,
|
||||
@@ -508,6 +558,8 @@ function render_geom(geom, img)
|
||||
img[GPU] = get_img_gpu(img.surface)
|
||||
else
|
||||
img[GPU] = get_img_gpu(img.cpu)
|
||||
|
||||
if (!img[GPU]) return
|
||||
}
|
||||
|
||||
pos_blob.write_blob(geom.xy)
|
||||
@@ -535,7 +587,10 @@ cmd_fns.draw_image = function(cmd)
|
||||
else
|
||||
img = cmd.image
|
||||
|
||||
var geom = geometry.make_rect_quad({x:cmd.rect.x, y:cmd.rect.y, width: img.width, height: img.height})
|
||||
cmd.rect.width ??= img.width
|
||||
cmd.rect.height ??= img.height
|
||||
|
||||
var geom = geometry.make_rect_quad(cmd.rect)
|
||||
geom.indices = geometry.make_quad_indices(1)
|
||||
geom.num_indices = 6
|
||||
|
||||
@@ -662,13 +717,14 @@ prosperon.create_batch = function create_batch(draw_cmds, done) {
|
||||
} else if (cmd.camera.surface && render_target != cmd.camera.surface) {
|
||||
if (render_pass)
|
||||
render_pass.end()
|
||||
render_target = cmd.camera.surface
|
||||
render_target = cmd.camera.surface
|
||||
render_pass = render_queue.render_pass({
|
||||
color_targets: [{
|
||||
texture: cmd.camera.surface[GPU],
|
||||
mip_level: 0,
|
||||
layer: 0,
|
||||
load: "clear",
|
||||
clear_color: cmd.camera.background,
|
||||
store: "store",
|
||||
}]
|
||||
})
|
||||
@@ -705,7 +761,12 @@ prosperon.create_batch = function create_batch(draw_cmds, done) {
|
||||
render_queue.push_vertex_uniform_data(0, cur_cam)
|
||||
continue
|
||||
}
|
||||
render_pass.bind_samplers(false, 0, [{texture:cmd.texture, sampler: std_sampler}])
|
||||
// Use texture's sampler if it has one, otherwise use standard sampler
|
||||
var sampler_to_use = std_sampler
|
||||
if (cmd.texture && cmd.texture.sampler_desc) {
|
||||
sampler_to_use = get_sampler(cmd.texture.sampler_desc)
|
||||
}
|
||||
render_pass.bind_samplers(false, 0, [{texture:cmd.texture, sampler: sampler_to_use}])
|
||||
|
||||
render_pass.draw_indexed(
|
||||
cmd.num_indices,
|
||||
|
||||
@@ -9,8 +9,6 @@
|
||||
#include "stb_ds.h"
|
||||
#include "stb_image.h"
|
||||
#include "stb_rect_pack.h"
|
||||
#define STB_DXT_IMPLEMENTATION
|
||||
#include "stb_dxt.h"
|
||||
#include "stb_image_write.h"
|
||||
#include "string.h"
|
||||
#include <assert.h>
|
||||
@@ -421,21 +419,10 @@ colorf js2color(JSContext *js,JSValue v) {
|
||||
|
||||
for (int i = 0; i < 4; i++) JS_FreeValue(js,c[i]);
|
||||
} else if (JS_IsObject(v)) {
|
||||
// Handle object format: {r, g, b, a}
|
||||
JSValue r_val = JS_GetPropertyStr(js, v, "r");
|
||||
JSValue g_val = JS_GetPropertyStr(js, v, "g");
|
||||
JSValue b_val = JS_GetPropertyStr(js, v, "b");
|
||||
JSValue a_val = JS_GetPropertyStr(js, v, "a");
|
||||
|
||||
color.r = JS_IsNull(r_val) ? 1.0 : js2number(js, r_val);
|
||||
color.g = JS_IsNull(g_val) ? 1.0 : js2number(js, g_val);
|
||||
color.b = JS_IsNull(b_val) ? 1.0 : js2number(js, b_val);
|
||||
color.a = JS_IsNull(a_val) ? 1.0 : js2number(js, a_val);
|
||||
|
||||
JS_FreeValue(js, r_val);
|
||||
JS_FreeValue(js, g_val);
|
||||
JS_FreeValue(js, b_val);
|
||||
JS_FreeValue(js, a_val);
|
||||
JS_GETPROP(js, color.r, v, r, number)
|
||||
JS_GETPROP(js, color.g, v, g, number)
|
||||
JS_GETPROP(js, color.b, v, b, number)
|
||||
JS_GETPROP(js, color.a, v, a, number)
|
||||
}
|
||||
|
||||
return color;
|
||||
@@ -1056,6 +1043,29 @@ static const JSCFunctionListEntry js_font_funcs[] = {
|
||||
MIST_GET(font, descent),
|
||||
};
|
||||
|
||||
JSC_CCALL(os_image_info,
|
||||
size_t len;
|
||||
void *raw = js_get_blob_data(js, &len, argv[0]);
|
||||
if (!raw)
|
||||
return JS_ThrowReferenceError(js, "could not load image with array buffer");
|
||||
|
||||
int depth = stbi_is_16_bit_from_memory(raw, len) ? 16 : 8;
|
||||
int width, height, comp;
|
||||
if (!stbi_info_from_memory(raw, len, &width, &height, &comp))
|
||||
return JS_ThrowReferenceError(js, "could not parse image info: %s", stbi_failure_reason());
|
||||
|
||||
int hdr = stbi_is_hdr_from_memory(raw, len);
|
||||
|
||||
JSValue obj = JS_NewObject(js);
|
||||
JS_SetPropertyStr(js, obj, "width", JS_NewInt32(js, width));
|
||||
JS_SetPropertyStr(js, obj, "height", JS_NewInt32(js, height));
|
||||
JS_SetPropertyStr(js, obj, "components", JS_NewInt32(js, comp));
|
||||
JS_SetPropertyStr(js, obj, "depth", JS_NewInt32(js, depth));
|
||||
JS_SetPropertyStr(js, obj, "hdr", JS_NewBool(js, hdr));
|
||||
|
||||
ret = obj;
|
||||
)
|
||||
|
||||
// input: (encoded image data of jpg, png, bmp, tiff)
|
||||
JSC_CCALL(os_make_texture,
|
||||
size_t len;
|
||||
@@ -1083,12 +1093,13 @@ JSC_CCALL(os_make_texture,
|
||||
JS_SetPropertyStr(js, obj, "format", JS_NewString(js, "rgba32"));
|
||||
JS_SetPropertyStr(js, obj, "pitch", JS_NewInt32(js, pitch));
|
||||
JS_SetPropertyStr(js, obj, "pixels", js_new_blob_stoned_copy(js, data, pixels_size));
|
||||
JS_SetPropertyStr(js, obj, "depth", JS_NewInt32(js, 8));
|
||||
JS_SetPropertyStr(js, obj, "hdr", JS_NewBool(js,0));
|
||||
|
||||
free(data);
|
||||
ret = obj;
|
||||
)
|
||||
|
||||
|
||||
// input: (gif image data)
|
||||
JSC_CCALL(os_make_gif,
|
||||
size_t rawlen;
|
||||
@@ -1493,6 +1504,7 @@ JSC_CCALL(graphics_save_jpg,
|
||||
static const JSCFunctionListEntry js_graphics_funcs[] = {
|
||||
MIST_FUNC_DEF(os, make_text_buffer, 5),
|
||||
MIST_FUNC_DEF(os, rectpack, 3),
|
||||
MIST_FUNC_DEF(os, image_info, 1),
|
||||
MIST_FUNC_DEF(os, make_texture, 1),
|
||||
MIST_FUNC_DEF(os, make_gif, 1),
|
||||
MIST_FUNC_DEF(os, make_aseprite, 1),
|
||||
|
||||
@@ -78,4 +78,28 @@ JSValue quads_to_mesh(JSContext *js, text_vert *buffer);
|
||||
SDL_Window *js2SDL_Window(JSContext *js, JSValue v);
|
||||
JSValue SDL_Window2js(JSContext *js, SDL_Window *w);
|
||||
|
||||
// X-macro enum definition system for string<->enum conversion
|
||||
#define ENUM_MAPPING_TABLE(ENUM) \
|
||||
static const struct { int value; const char *name; } ENUM##_mapping[]
|
||||
|
||||
#define JS2ENUM(NAME) \
|
||||
int js2##NAME(JSContext *js, JSValue v) { \
|
||||
if (JS_IsNull(v)) return 0; \
|
||||
const char *str = JS_ToCString(js, v); \
|
||||
if (!str) return 0; \
|
||||
for(int i = 0; i < sizeof(NAME##_mapping)/sizeof(NAME##_mapping[0]); i++) \
|
||||
if(!strcmp(NAME##_mapping[i].name, str)) { \
|
||||
JS_FreeCString(js, str); \
|
||||
return NAME##_mapping[i].value; \
|
||||
} \
|
||||
JS_FreeCString(js, str); \
|
||||
return 0; \
|
||||
} \
|
||||
JSValue NAME##2js(JSContext *js, int enumval) { \
|
||||
for(int i = 0; i < sizeof(NAME##_mapping)/sizeof(NAME##_mapping[0]); i++) \
|
||||
if(NAME##_mapping[i].value == enumval) \
|
||||
return JS_NewString(js, NAME##_mapping[i].name); \
|
||||
return JS_NULL; \
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
211
source/qjs_sdl.c
211
source/qjs_sdl.c
@@ -26,148 +26,79 @@ QJSCLASS(SDL_AudioStream,)
|
||||
|
||||
// CAMERA FUNCTIONS
|
||||
|
||||
// Helper functions for camera format conversion
|
||||
const char *pixelformat2str(SDL_PixelFormat format) {
|
||||
switch(format) {
|
||||
case SDL_PIXELFORMAT_UNKNOWN: return "unknown";
|
||||
case SDL_PIXELFORMAT_INDEX1LSB: return "index1lsb";
|
||||
case SDL_PIXELFORMAT_INDEX1MSB: return "index1msb";
|
||||
case SDL_PIXELFORMAT_INDEX2LSB: return "index2lsb";
|
||||
case SDL_PIXELFORMAT_INDEX2MSB: return "index2msb";
|
||||
case SDL_PIXELFORMAT_INDEX4LSB: return "index4lsb";
|
||||
case SDL_PIXELFORMAT_INDEX4MSB: return "index4msb";
|
||||
case SDL_PIXELFORMAT_INDEX8: return "index8";
|
||||
case SDL_PIXELFORMAT_RGB332: return "rgb332";
|
||||
case SDL_PIXELFORMAT_XRGB4444: return "xrgb4444";
|
||||
case SDL_PIXELFORMAT_XBGR4444: return "xbgr4444";
|
||||
case SDL_PIXELFORMAT_XRGB1555: return "xrgb1555";
|
||||
case SDL_PIXELFORMAT_XBGR1555: return "xbgr1555";
|
||||
case SDL_PIXELFORMAT_ARGB4444: return "argb4444";
|
||||
case SDL_PIXELFORMAT_RGBA4444: return "rgba4444";
|
||||
case SDL_PIXELFORMAT_ABGR4444: return "abgr4444";
|
||||
case SDL_PIXELFORMAT_BGRA4444: return "bgra4444";
|
||||
case SDL_PIXELFORMAT_ARGB1555: return "argb1555";
|
||||
case SDL_PIXELFORMAT_RGBA5551: return "rgba5551";
|
||||
case SDL_PIXELFORMAT_ABGR1555: return "abgr1555";
|
||||
case SDL_PIXELFORMAT_BGRA5551: return "bgra5551";
|
||||
case SDL_PIXELFORMAT_RGB565: return "rgb565";
|
||||
case SDL_PIXELFORMAT_BGR565: return "bgr565";
|
||||
case SDL_PIXELFORMAT_RGB24: return "rgb24";
|
||||
case SDL_PIXELFORMAT_BGR24: return "bgr24";
|
||||
case SDL_PIXELFORMAT_XRGB8888: return "xrgb8888";
|
||||
case SDL_PIXELFORMAT_RGBX8888: return "rgbx8888";
|
||||
case SDL_PIXELFORMAT_XBGR8888: return "xbgr8888";
|
||||
case SDL_PIXELFORMAT_BGRX8888: return "bgrx8888";
|
||||
case SDL_PIXELFORMAT_ARGB8888: return "argb8888";
|
||||
case SDL_PIXELFORMAT_RGBA8888: return "rgba8888";
|
||||
case SDL_PIXELFORMAT_ABGR8888: return "abgr8888";
|
||||
case SDL_PIXELFORMAT_BGRA8888: return "bgra8888";
|
||||
case SDL_PIXELFORMAT_XRGB2101010: return "xrgb2101010";
|
||||
case SDL_PIXELFORMAT_XBGR2101010: return "xbgr2101010";
|
||||
case SDL_PIXELFORMAT_ARGB2101010: return "argb2101010";
|
||||
case SDL_PIXELFORMAT_ABGR2101010: return "abgr2101010";
|
||||
case SDL_PIXELFORMAT_RGB48: return "rgb48";
|
||||
case SDL_PIXELFORMAT_BGR48: return "bgr48";
|
||||
case SDL_PIXELFORMAT_RGBA64: return "rgba64";
|
||||
case SDL_PIXELFORMAT_ARGB64: return "argb64";
|
||||
case SDL_PIXELFORMAT_BGRA64: return "bgra64";
|
||||
case SDL_PIXELFORMAT_ABGR64: return "abgr64";
|
||||
case SDL_PIXELFORMAT_RGB48_FLOAT: return "rgb48_float";
|
||||
case SDL_PIXELFORMAT_BGR48_FLOAT: return "bgr48_float";
|
||||
case SDL_PIXELFORMAT_RGBA64_FLOAT: return "rgba64_float";
|
||||
case SDL_PIXELFORMAT_ARGB64_FLOAT: return "argb64_float";
|
||||
case SDL_PIXELFORMAT_BGRA64_FLOAT: return "bgra64_float";
|
||||
case SDL_PIXELFORMAT_ABGR64_FLOAT: return "abgr64_float";
|
||||
case SDL_PIXELFORMAT_RGB96_FLOAT: return "rgb96_float";
|
||||
case SDL_PIXELFORMAT_BGR96_FLOAT: return "bgr96_float";
|
||||
case SDL_PIXELFORMAT_RGBA128_FLOAT: return "rgba128_float";
|
||||
case SDL_PIXELFORMAT_ARGB128_FLOAT: return "argb128_float";
|
||||
case SDL_PIXELFORMAT_BGRA128_FLOAT: return "bgra128_float";
|
||||
case SDL_PIXELFORMAT_ABGR128_FLOAT: return "abgr128_float";
|
||||
case SDL_PIXELFORMAT_YV12: return "yv12";
|
||||
case SDL_PIXELFORMAT_IYUV: return "iyuv";
|
||||
case SDL_PIXELFORMAT_YUY2: return "yuy2";
|
||||
case SDL_PIXELFORMAT_UYVY: return "uyvy";
|
||||
case SDL_PIXELFORMAT_YVYU: return "yvyu";
|
||||
case SDL_PIXELFORMAT_NV12: return "nv12";
|
||||
case SDL_PIXELFORMAT_NV21: return "nv21";
|
||||
case SDL_PIXELFORMAT_P010: return "p010";
|
||||
default: return "unknown";
|
||||
}
|
||||
}
|
||||
|
||||
SDL_PixelFormat str2pixelformat(const char *str) {
|
||||
if (!strcmp(str, "unknown")) return SDL_PIXELFORMAT_UNKNOWN;
|
||||
if (!strcmp(str, "index1lsb")) return SDL_PIXELFORMAT_INDEX1LSB;
|
||||
if (!strcmp(str, "index1msb")) return SDL_PIXELFORMAT_INDEX1MSB;
|
||||
if (!strcmp(str, "index2lsb")) return SDL_PIXELFORMAT_INDEX2LSB;
|
||||
if (!strcmp(str, "index2msb")) return SDL_PIXELFORMAT_INDEX2MSB;
|
||||
if (!strcmp(str, "index4lsb")) return SDL_PIXELFORMAT_INDEX4LSB;
|
||||
if (!strcmp(str, "index4msb")) return SDL_PIXELFORMAT_INDEX4MSB;
|
||||
if (!strcmp(str, "index8")) return SDL_PIXELFORMAT_INDEX8;
|
||||
if (!strcmp(str, "rgb332")) return SDL_PIXELFORMAT_RGB332;
|
||||
if (!strcmp(str, "xrgb4444")) return SDL_PIXELFORMAT_XRGB4444;
|
||||
if (!strcmp(str, "xbgr4444")) return SDL_PIXELFORMAT_XBGR4444;
|
||||
if (!strcmp(str, "xrgb1555")) return SDL_PIXELFORMAT_XRGB1555;
|
||||
if (!strcmp(str, "xbgr1555")) return SDL_PIXELFORMAT_XBGR1555;
|
||||
if (!strcmp(str, "argb4444")) return SDL_PIXELFORMAT_ARGB4444;
|
||||
if (!strcmp(str, "rgba4444")) return SDL_PIXELFORMAT_RGBA4444;
|
||||
if (!strcmp(str, "abgr4444")) return SDL_PIXELFORMAT_ABGR4444;
|
||||
if (!strcmp(str, "bgra4444")) return SDL_PIXELFORMAT_BGRA4444;
|
||||
if (!strcmp(str, "argb1555")) return SDL_PIXELFORMAT_ARGB1555;
|
||||
if (!strcmp(str, "rgba5551")) return SDL_PIXELFORMAT_RGBA5551;
|
||||
if (!strcmp(str, "abgr1555")) return SDL_PIXELFORMAT_ABGR1555;
|
||||
if (!strcmp(str, "bgra5551")) return SDL_PIXELFORMAT_BGRA5551;
|
||||
if (!strcmp(str, "rgb565")) return SDL_PIXELFORMAT_RGB565;
|
||||
if (!strcmp(str, "bgr565")) return SDL_PIXELFORMAT_BGR565;
|
||||
if (!strcmp(str, "rgb24")) return SDL_PIXELFORMAT_RGB24;
|
||||
if (!strcmp(str, "bgr24")) return SDL_PIXELFORMAT_BGR24;
|
||||
if (!strcmp(str, "xrgb8888")) return SDL_PIXELFORMAT_XRGB8888;
|
||||
if (!strcmp(str, "rgbx8888")) return SDL_PIXELFORMAT_RGBX8888;
|
||||
if (!strcmp(str, "xbgr8888")) return SDL_PIXELFORMAT_XBGR8888;
|
||||
if (!strcmp(str, "bgrx8888")) return SDL_PIXELFORMAT_BGRX8888;
|
||||
if (!strcmp(str, "argb8888")) return SDL_PIXELFORMAT_ARGB8888;
|
||||
if (!strcmp(str, "rgba8888")) return SDL_PIXELFORMAT_RGBA8888;
|
||||
if (!strcmp(str, "abgr8888")) return SDL_PIXELFORMAT_ABGR8888;
|
||||
if (!strcmp(str, "bgra8888")) return SDL_PIXELFORMAT_BGRA8888;
|
||||
if (!strcmp(str, "xrgb2101010")) return SDL_PIXELFORMAT_XRGB2101010;
|
||||
if (!strcmp(str, "xbgr2101010")) return SDL_PIXELFORMAT_XBGR2101010;
|
||||
if (!strcmp(str, "argb2101010")) return SDL_PIXELFORMAT_ARGB2101010;
|
||||
if (!strcmp(str, "abgr2101010")) return SDL_PIXELFORMAT_ABGR2101010;
|
||||
if (!strcmp(str, "rgb48")) return SDL_PIXELFORMAT_RGB48;
|
||||
if (!strcmp(str, "bgr48")) return SDL_PIXELFORMAT_BGR48;
|
||||
if (!strcmp(str, "rgba64")) return SDL_PIXELFORMAT_RGBA64;
|
||||
if (!strcmp(str, "argb64")) return SDL_PIXELFORMAT_ARGB64;
|
||||
if (!strcmp(str, "bgra64")) return SDL_PIXELFORMAT_BGRA64;
|
||||
if (!strcmp(str, "abgr64")) return SDL_PIXELFORMAT_ABGR64;
|
||||
if (!strcmp(str, "rgb48_float")) return SDL_PIXELFORMAT_RGB48_FLOAT;
|
||||
if (!strcmp(str, "bgr48_float")) return SDL_PIXELFORMAT_BGR48_FLOAT;
|
||||
if (!strcmp(str, "rgba64_float")) return SDL_PIXELFORMAT_RGBA64_FLOAT;
|
||||
if (!strcmp(str, "argb64_float")) return SDL_PIXELFORMAT_ARGB64_FLOAT;
|
||||
if (!strcmp(str, "bgra64_float")) return SDL_PIXELFORMAT_BGRA64_FLOAT;
|
||||
if (!strcmp(str, "abgr64_float")) return SDL_PIXELFORMAT_ABGR64_FLOAT;
|
||||
if (!strcmp(str, "rgb96_float")) return SDL_PIXELFORMAT_RGB96_FLOAT;
|
||||
if (!strcmp(str, "bgr96_float")) return SDL_PIXELFORMAT_BGR96_FLOAT;
|
||||
if (!strcmp(str, "rgba128_float")) return SDL_PIXELFORMAT_RGBA128_FLOAT;
|
||||
if (!strcmp(str, "argb128_float")) return SDL_PIXELFORMAT_ARGB128_FLOAT;
|
||||
if (!strcmp(str, "bgra128_float")) return SDL_PIXELFORMAT_BGRA128_FLOAT;
|
||||
if (!strcmp(str, "abgr128_float")) return SDL_PIXELFORMAT_ABGR128_FLOAT;
|
||||
if (!strcmp(str, "yv12")) return SDL_PIXELFORMAT_YV12;
|
||||
if (!strcmp(str, "iyuv")) return SDL_PIXELFORMAT_IYUV;
|
||||
if (!strcmp(str, "yuy2")) return SDL_PIXELFORMAT_YUY2;
|
||||
if (!strcmp(str, "uyvy")) return SDL_PIXELFORMAT_UYVY;
|
||||
if (!strcmp(str, "yvyu")) return SDL_PIXELFORMAT_YVYU;
|
||||
if (!strcmp(str, "nv12")) return SDL_PIXELFORMAT_NV12;
|
||||
if (!strcmp(str, "nv21")) return SDL_PIXELFORMAT_NV21;
|
||||
if (!strcmp(str, "p010")) return SDL_PIXELFORMAT_P010;
|
||||
if (!strcmp(str, "rgba32")) return SDL_PIXELFORMAT_RGBA32;
|
||||
return SDL_PIXELFORMAT_UNKNOWN;
|
||||
}
|
||||
// Pixel format enum conversion using the new system
|
||||
ENUM_MAPPING_TABLE(SDL_PixelFormat) = {
|
||||
{SDL_PIXELFORMAT_UNKNOWN, "unknown"},
|
||||
{SDL_PIXELFORMAT_INDEX1LSB, "index1lsb"},
|
||||
{SDL_PIXELFORMAT_INDEX1MSB, "index1msb"},
|
||||
{SDL_PIXELFORMAT_INDEX2LSB, "index2lsb"},
|
||||
{SDL_PIXELFORMAT_INDEX2MSB, "index2msb"},
|
||||
{SDL_PIXELFORMAT_INDEX4LSB, "index4lsb"},
|
||||
{SDL_PIXELFORMAT_INDEX4MSB, "index4msb"},
|
||||
{SDL_PIXELFORMAT_INDEX8, "index8"},
|
||||
{SDL_PIXELFORMAT_RGB332, "rgb332"},
|
||||
{SDL_PIXELFORMAT_XRGB4444, "xrgb4444"},
|
||||
{SDL_PIXELFORMAT_XBGR4444, "xbgr4444"},
|
||||
{SDL_PIXELFORMAT_XRGB1555, "xrgb1555"},
|
||||
{SDL_PIXELFORMAT_XBGR1555, "xbgr1555"},
|
||||
{SDL_PIXELFORMAT_ARGB4444, "argb4444"},
|
||||
{SDL_PIXELFORMAT_RGBA4444, "rgba4444"},
|
||||
{SDL_PIXELFORMAT_ABGR4444, "abgr4444"},
|
||||
{SDL_PIXELFORMAT_BGRA4444, "bgra4444"},
|
||||
{SDL_PIXELFORMAT_ARGB1555, "argb1555"},
|
||||
{SDL_PIXELFORMAT_RGBA5551, "rgba5551"},
|
||||
{SDL_PIXELFORMAT_ABGR1555, "abgr1555"},
|
||||
{SDL_PIXELFORMAT_BGRA5551, "bgra5551"},
|
||||
{SDL_PIXELFORMAT_RGB565, "rgb565"},
|
||||
{SDL_PIXELFORMAT_BGR565, "bgr565"},
|
||||
{SDL_PIXELFORMAT_RGB24, "rgb24"},
|
||||
{SDL_PIXELFORMAT_BGR24, "bgr24"},
|
||||
{SDL_PIXELFORMAT_XRGB8888, "xrgb8888"},
|
||||
{SDL_PIXELFORMAT_RGBX8888, "rgbx8888"},
|
||||
{SDL_PIXELFORMAT_XBGR8888, "xbgr8888"},
|
||||
{SDL_PIXELFORMAT_BGRX8888, "bgrx8888"},
|
||||
{SDL_PIXELFORMAT_ARGB8888, "argb8888"},
|
||||
{SDL_PIXELFORMAT_RGBA8888, "rgba8888"},
|
||||
{SDL_PIXELFORMAT_ABGR8888, "abgr8888"},
|
||||
{SDL_PIXELFORMAT_BGRA8888, "bgra8888"},
|
||||
{SDL_PIXELFORMAT_XRGB2101010, "xrgb2101010"},
|
||||
{SDL_PIXELFORMAT_XBGR2101010, "xbgr2101010"},
|
||||
{SDL_PIXELFORMAT_ARGB2101010, "argb2101010"},
|
||||
{SDL_PIXELFORMAT_ABGR2101010, "abgr2101010"},
|
||||
{SDL_PIXELFORMAT_RGB48, "rgb48"},
|
||||
{SDL_PIXELFORMAT_BGR48, "bgr48"},
|
||||
{SDL_PIXELFORMAT_RGBA64, "rgba64"},
|
||||
{SDL_PIXELFORMAT_ARGB64, "argb64"},
|
||||
{SDL_PIXELFORMAT_BGRA64, "bgra64"},
|
||||
{SDL_PIXELFORMAT_ABGR64, "abgr64"},
|
||||
{SDL_PIXELFORMAT_RGB48_FLOAT, "rgb48_float"},
|
||||
{SDL_PIXELFORMAT_BGR48_FLOAT, "bgr48_float"},
|
||||
{SDL_PIXELFORMAT_RGBA64_FLOAT, "rgba64_float"},
|
||||
{SDL_PIXELFORMAT_ARGB64_FLOAT, "argb64_float"},
|
||||
{SDL_PIXELFORMAT_BGRA64_FLOAT, "bgra64_float"},
|
||||
{SDL_PIXELFORMAT_ABGR64_FLOAT, "abgr64_float"},
|
||||
{SDL_PIXELFORMAT_RGB96_FLOAT, "rgb96_float"},
|
||||
{SDL_PIXELFORMAT_BGR96_FLOAT, "bgr96_float"},
|
||||
{SDL_PIXELFORMAT_RGBA128_FLOAT, "rgba128_float"},
|
||||
{SDL_PIXELFORMAT_ARGB128_FLOAT, "argb128_float"},
|
||||
{SDL_PIXELFORMAT_BGRA128_FLOAT, "bgra128_float"},
|
||||
{SDL_PIXELFORMAT_ABGR128_FLOAT, "abgr128_float"},
|
||||
{SDL_PIXELFORMAT_YV12, "yv12"},
|
||||
{SDL_PIXELFORMAT_IYUV, "iyuv"},
|
||||
{SDL_PIXELFORMAT_YUY2, "yuy2"},
|
||||
{SDL_PIXELFORMAT_UYVY, "uyvy"},
|
||||
{SDL_PIXELFORMAT_YVYU, "yvyu"},
|
||||
{SDL_PIXELFORMAT_NV12, "nv12"},
|
||||
{SDL_PIXELFORMAT_NV21, "nv21"},
|
||||
{SDL_PIXELFORMAT_P010, "p010"},
|
||||
{SDL_PIXELFORMAT_RGBA32, "rgba32"}
|
||||
};
|
||||
JS2ENUM(SDL_PixelFormat)
|
||||
|
||||
static JSValue cameraspec2js(JSContext *js, const SDL_CameraSpec *spec) {
|
||||
JSValue obj = JS_NewObject(js);
|
||||
|
||||
JS_SetPropertyStr(js, obj, "format", JS_NewString(js, pixelformat2str(spec->format)));
|
||||
JS_SetPropertyStr(js, obj, "format", SDL_PixelFormat2js(js, spec->format));
|
||||
JS_SetPropertyStr(js, obj, "colorspace", JS_NewInt32(js, spec->colorspace));
|
||||
JS_SetPropertyStr(js, obj, "width", JS_NewInt32(js, spec->width));
|
||||
JS_SetPropertyStr(js, obj, "height", JS_NewInt32(js, spec->height));
|
||||
@@ -184,9 +115,7 @@ static SDL_CameraSpec js2cameraspec(JSContext *js, JSValue obj) {
|
||||
|
||||
v = JS_GetPropertyStr(js, obj, "format");
|
||||
if (!JS_IsNull(v)) {
|
||||
const char *s = JS_ToCString(js, v);
|
||||
spec.format = str2pixelformat(s);
|
||||
JS_FreeCString(js, s);
|
||||
spec.format = js2SDL_PixelFormat(js, v);
|
||||
}
|
||||
JS_FreeValue(js, v);
|
||||
|
||||
|
||||
@@ -13,6 +13,10 @@ SDL_PixelFormat str2pixelformat(const char *str);
|
||||
SDL_PixelFormat js2pixelformat(JSContext *js, JSValue v);
|
||||
JSValue pixelformat2js(JSContext *js, SDL_PixelFormat format);
|
||||
const char *pixelformat2str(SDL_PixelFormat format);
|
||||
|
||||
// New enum system functions
|
||||
int js2SDL_PixelFormat(JSContext *js, JSValue v);
|
||||
JSValue SDL_PixelFormat2js(JSContext *js, int enumval);
|
||||
SDL_Colorspace str2colorspace(const char *str);
|
||||
SDL_Colorspace js2colorspace(JSContext *js, JSValue v);
|
||||
JSValue colorspace2js(JSContext *js, SDL_Colorspace colorspace);
|
||||
|
||||
@@ -418,7 +418,8 @@ SDL_GPUColorTargetInfo js2SDL_GPUColorTargetInfo(JSContext *js, JSValue v)
|
||||
JS_GETPROP(js,info.store_op,v,store,SDL_GPUStoreOp)
|
||||
JS_GETPROP(js,info.resolve_mip_level,v,resolve_mip_level,number)
|
||||
JS_GETPROP(js,info.resolve_layer,v,resolve_layer,number)
|
||||
JS_GETPROP(js,info.clear_color,v,clear,SDL_FColor)
|
||||
JS_GETPROP(js,info.clear_color,v,clear_color,SDL_FColor)
|
||||
|
||||
return info;
|
||||
}
|
||||
|
||||
|
||||
@@ -5,24 +5,21 @@
|
||||
#include <SDL3/SDL_gpu.h>
|
||||
#include <SDL3/SDL_surface.h>
|
||||
#include <string.h>
|
||||
#include <stdint.h>
|
||||
#include "qjs_sdl.h"
|
||||
#include "qjs_common.h"
|
||||
|
||||
#define STB_DXT_IMPLEMENTATION
|
||||
#include "thirdparty/stb/stb_dxt.h"
|
||||
|
||||
JSValue pixelformat2js(JSContext *js, SDL_PixelFormat fmt)
|
||||
{
|
||||
const char *str = pixelformat2str(fmt);
|
||||
return JS_NewString(js, str);
|
||||
return SDL_PixelFormat2js(js, fmt);
|
||||
}
|
||||
|
||||
SDL_PixelFormat js2pixelformat(JSContext *js, JSValue v)
|
||||
{
|
||||
if (JS_IsNull(v)) return SDL_PIXELFORMAT_UNKNOWN;
|
||||
const char *s = JS_ToCString(js, v);
|
||||
if (!s) return SDL_PIXELFORMAT_UNKNOWN;
|
||||
|
||||
SDL_PixelFormat fmt = str2pixelformat(s);
|
||||
JS_FreeCString(js,s);
|
||||
return fmt;
|
||||
return js2SDL_PixelFormat(js, v);
|
||||
}
|
||||
|
||||
typedef struct { const char *name; SDL_ScaleMode mode; } scale_entry;
|
||||
@@ -225,6 +222,355 @@ JSC_CCALL(surface_toJSON,
|
||||
return obj;
|
||||
)
|
||||
|
||||
// Check for integer overflow in size calculations
|
||||
static int check_size_overflow(size_t a, size_t b, size_t c, size_t *result)
|
||||
{
|
||||
if (a > SIZE_MAX / b) return 1;
|
||||
size_t temp = a * b;
|
||||
if (temp > SIZE_MAX / c) return 1;
|
||||
*result = temp * c;
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Helper function for BC1/BC3 compression
|
||||
static JSValue compress_bc_common(JSContext *js, JSValueConst *argv, int argc, int alpha_mode, const char *format_name)
|
||||
{
|
||||
if (argc < 1)
|
||||
return JS_ThrowTypeError(js, "compress_%s requires an object argument", format_name);
|
||||
|
||||
// Check if width/height properties exist
|
||||
JSValue width_val = JS_GetPropertyStr(js, argv[0], "width");
|
||||
JSValue height_val = JS_GetPropertyStr(js, argv[0], "height");
|
||||
|
||||
if (JS_IsNull(width_val) || JS_IsNull(height_val)) {
|
||||
JS_FreeValue(js, width_val);
|
||||
JS_FreeValue(js, height_val);
|
||||
return JS_ThrowTypeError(js, "compress_%s requires width and height properties", format_name);
|
||||
}
|
||||
|
||||
int width, height;
|
||||
if (JS_ToInt32(js, &width, width_val) < 0 || JS_ToInt32(js, &height, height_val) < 0) {
|
||||
JS_FreeValue(js, width_val);
|
||||
JS_FreeValue(js, height_val);
|
||||
return JS_ThrowTypeError(js, "width and height must be numbers");
|
||||
}
|
||||
JS_FreeValue(js, width_val);
|
||||
JS_FreeValue(js, height_val);
|
||||
|
||||
if (width < 1 || height < 1)
|
||||
return JS_ThrowRangeError(js, "width and height must be at least 1");
|
||||
|
||||
if (width % 4 != 0 || height % 4 != 0)
|
||||
return JS_ThrowRangeError(js, "Width and height must be multiples of 4 for BC compression");
|
||||
|
||||
// Get pixel format
|
||||
JSValue format_val = JS_GetPropertyStr(js, argv[0], "format");
|
||||
SDL_PixelFormat format = js2pixelformat(js, format_val);
|
||||
JS_FreeValue(js, format_val);
|
||||
|
||||
if (format == SDL_PIXELFORMAT_UNKNOWN)
|
||||
return JS_ThrowTypeError(js, "Invalid or missing pixel format");
|
||||
|
||||
// Get pixels
|
||||
JSValue pixels_val = JS_GetPropertyStr(js, argv[0], "pixels");
|
||||
size_t pixel_len;
|
||||
void *pixel_data = js_get_blob_data(js, &pixel_len, pixels_val);
|
||||
|
||||
if (!pixel_data) {
|
||||
JS_FreeValue(js, pixels_val);
|
||||
return JS_ThrowTypeError(js, "pixels property must be an ArrayBuffer");
|
||||
}
|
||||
|
||||
// Validate buffer size
|
||||
int bytes_per_pixel = SDL_BYTESPERPIXEL(format);
|
||||
size_t required_size;
|
||||
if (check_size_overflow(width, height, bytes_per_pixel, &required_size)) {
|
||||
JS_FreeValue(js, pixels_val);
|
||||
return JS_ThrowRangeError(js, "Image dimensions too large");
|
||||
}
|
||||
|
||||
if (pixel_len < required_size) {
|
||||
JS_FreeValue(js, pixels_val);
|
||||
return JS_ThrowRangeError(js, "pixels buffer too small for %dx%d format (need %zu bytes, got %zu)",
|
||||
width, height, required_size, pixel_len);
|
||||
}
|
||||
|
||||
// Get high quality mode (default true)
|
||||
int high_quality = 1;
|
||||
if (argc > 1) {
|
||||
high_quality = JS_ToBool(js, argv[1]);
|
||||
}
|
||||
|
||||
int mode = high_quality ? STB_DXT_HIGHQUAL : STB_DXT_NORMAL;
|
||||
|
||||
// Calculate output size with overflow check
|
||||
int blocks_x = width / 4;
|
||||
int blocks_y = height / 4;
|
||||
int bytes_per_block = (alpha_mode == 0) ? 8 : 16; // BC1=8, BC3=16
|
||||
size_t output_size;
|
||||
if (check_size_overflow(blocks_x, blocks_y, bytes_per_block, &output_size)) {
|
||||
JS_FreeValue(js, pixels_val);
|
||||
return JS_ThrowRangeError(js, "Output dimensions too large");
|
||||
}
|
||||
|
||||
// Allocate output buffer
|
||||
unsigned char *output = malloc(output_size);
|
||||
if (!output) {
|
||||
JS_FreeValue(js, pixels_val);
|
||||
return JS_ThrowOutOfMemory(js);
|
||||
}
|
||||
|
||||
// Allocate RGBA conversion buffer
|
||||
size_t rgba_size;
|
||||
if (check_size_overflow(width, height, 4, &rgba_size)) {
|
||||
free(output);
|
||||
JS_FreeValue(js, pixels_val);
|
||||
return JS_ThrowRangeError(js, "RGBA buffer size too large");
|
||||
}
|
||||
|
||||
unsigned char *rgba_data = malloc(rgba_size);
|
||||
if (!rgba_data) {
|
||||
free(output);
|
||||
JS_FreeValue(js, pixels_val);
|
||||
return JS_ThrowOutOfMemory(js);
|
||||
}
|
||||
|
||||
// Convert to RGBA using SDL
|
||||
int convert_result = SDL_ConvertPixels(
|
||||
width, height,
|
||||
format, pixel_data, width * bytes_per_pixel,
|
||||
SDL_PIXELFORMAT_RGBA32, rgba_data, width * 4
|
||||
);
|
||||
|
||||
JS_FreeValue(js, pixels_val);
|
||||
|
||||
if (convert_result != 0) {
|
||||
free(output);
|
||||
free(rgba_data);
|
||||
return JS_ThrowInternalError(js, "Failed to convert pixels: %s", SDL_GetError());
|
||||
}
|
||||
|
||||
// Compress blocks
|
||||
for (int by = 0; by < blocks_y; by++) {
|
||||
for (int bx = 0; bx < blocks_x; bx++) {
|
||||
unsigned char block[64]; // 4x4 RGBA = 64 bytes
|
||||
|
||||
// Extract 4x4 block
|
||||
for (int y = 0; y < 4; y++) {
|
||||
for (int x = 0; x < 4; x++) {
|
||||
int src_x = bx * 4 + x;
|
||||
int src_y = by * 4 + y;
|
||||
int src_idx = (src_y * width + src_x) * 4;
|
||||
int dst_idx = (y * 4 + x) * 4;
|
||||
|
||||
block[dst_idx + 0] = rgba_data[src_idx + 0];
|
||||
block[dst_idx + 1] = rgba_data[src_idx + 1];
|
||||
block[dst_idx + 2] = rgba_data[src_idx + 2];
|
||||
block[dst_idx + 3] = rgba_data[src_idx + 3];
|
||||
}
|
||||
}
|
||||
|
||||
// Compress block
|
||||
int output_idx = (by * blocks_x + bx) * bytes_per_block;
|
||||
stb_compress_dxt_block(output + output_idx, block, alpha_mode, mode);
|
||||
}
|
||||
}
|
||||
|
||||
free(rgba_data);
|
||||
|
||||
// Create result object
|
||||
JSValue result = JS_NewObject(js);
|
||||
JS_SetPropertyStr(js, result, "width", JS_NewInt32(js, width));
|
||||
JS_SetPropertyStr(js, result, "height", JS_NewInt32(js, height));
|
||||
JS_SetPropertyStr(js, result, "format", JS_NewString(js, format_name));
|
||||
JS_SetPropertyStr(js, result, "pitch", JS_NewInt32(js, blocks_x * bytes_per_block));
|
||||
|
||||
JSValue compressed_pixels = js_new_blob_stoned_copy(js, output, output_size);
|
||||
free(output); // Free the output buffer after copying to blob
|
||||
JS_SetPropertyStr(js, result, "pixels", compressed_pixels);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// BC1/DXT1 compression
|
||||
JSC_CCALL(surface_compress_bc1,
|
||||
return compress_bc_common(js, argv, argc, 0, "bc1");
|
||||
)
|
||||
|
||||
// BC3/DXT5 compression
|
||||
JSC_CCALL(surface_compress_bc3,
|
||||
return compress_bc_common(js, argv, argc, 1, "bc3");
|
||||
)
|
||||
|
||||
// Generic helper for BC4/BC5 channel compression
|
||||
static JSValue compress_bc_channels(JSContext *js, JSValueConst *argv, int argc,
|
||||
int num_channels, const char *format_name,
|
||||
void (*compress_func)(unsigned char *dest, const unsigned char *src))
|
||||
{
|
||||
if (argc < 1)
|
||||
return JS_ThrowTypeError(js, "compress_%s requires an object argument", format_name);
|
||||
|
||||
// Check if width/height properties exist
|
||||
JSValue width_val = JS_GetPropertyStr(js, argv[0], "width");
|
||||
JSValue height_val = JS_GetPropertyStr(js, argv[0], "height");
|
||||
|
||||
if (JS_IsNull(width_val) || JS_IsNull(height_val)) {
|
||||
JS_FreeValue(js, width_val);
|
||||
JS_FreeValue(js, height_val);
|
||||
return JS_ThrowTypeError(js, "compress_%s requires width and height properties", format_name);
|
||||
}
|
||||
|
||||
int width, height;
|
||||
if (JS_ToInt32(js, &width, width_val) < 0 || JS_ToInt32(js, &height, height_val) < 0) {
|
||||
JS_FreeValue(js, width_val);
|
||||
JS_FreeValue(js, height_val);
|
||||
return JS_ThrowTypeError(js, "width and height must be numbers");
|
||||
}
|
||||
JS_FreeValue(js, width_val);
|
||||
JS_FreeValue(js, height_val);
|
||||
|
||||
if (width < 1 || height < 1)
|
||||
return JS_ThrowRangeError(js, "width and height must be at least 1");
|
||||
|
||||
if (width % 4 != 0 || height % 4 != 0)
|
||||
return JS_ThrowRangeError(js, "Width and height must be multiples of 4 for BC compression");
|
||||
|
||||
// Get pixel format
|
||||
JSValue format_val = JS_GetPropertyStr(js, argv[0], "format");
|
||||
SDL_PixelFormat format = js2pixelformat(js, format_val);
|
||||
JS_FreeValue(js, format_val);
|
||||
|
||||
if (format == SDL_PIXELFORMAT_UNKNOWN)
|
||||
return JS_ThrowTypeError(js, "Invalid or missing pixel format");
|
||||
|
||||
// Get pixels
|
||||
JSValue pixels_val = JS_GetPropertyStr(js, argv[0], "pixels");
|
||||
size_t pixel_len;
|
||||
void *pixel_data = js_get_blob_data(js, &pixel_len, pixels_val);
|
||||
|
||||
if (!pixel_data) {
|
||||
JS_FreeValue(js, pixels_val);
|
||||
return JS_ThrowTypeError(js, "pixels property must be an ArrayBuffer");
|
||||
}
|
||||
|
||||
// Validate buffer size
|
||||
int bytes_per_pixel = SDL_BYTESPERPIXEL(format);
|
||||
if (bytes_per_pixel < num_channels) {
|
||||
JS_FreeValue(js, pixels_val);
|
||||
return JS_ThrowTypeError(js, "%s compression requires a format with at least %d channel(s)",
|
||||
format_name, num_channels);
|
||||
}
|
||||
|
||||
size_t required_size;
|
||||
if (check_size_overflow(width, height, bytes_per_pixel, &required_size)) {
|
||||
JS_FreeValue(js, pixels_val);
|
||||
return JS_ThrowRangeError(js, "Image dimensions too large");
|
||||
}
|
||||
|
||||
if (pixel_len < required_size) {
|
||||
JS_FreeValue(js, pixels_val);
|
||||
return JS_ThrowRangeError(js, "pixels buffer too small for %dx%d format (need %zu bytes, got %zu)",
|
||||
width, height, required_size, pixel_len);
|
||||
}
|
||||
|
||||
// Calculate output size with overflow check
|
||||
int blocks_x = width / 4;
|
||||
int blocks_y = height / 4;
|
||||
int bytes_per_block = (num_channels == 1) ? 8 : 16; // BC4=8, BC5=16
|
||||
size_t output_size;
|
||||
if (check_size_overflow(blocks_x, blocks_y, bytes_per_block, &output_size)) {
|
||||
JS_FreeValue(js, pixels_val);
|
||||
return JS_ThrowRangeError(js, "Output dimensions too large");
|
||||
}
|
||||
|
||||
// Allocate output buffer
|
||||
unsigned char *output = malloc(output_size);
|
||||
if (!output) {
|
||||
JS_FreeValue(js, pixels_val);
|
||||
return JS_ThrowOutOfMemory(js);
|
||||
}
|
||||
|
||||
// Allocate channel extraction buffer
|
||||
size_t channel_size;
|
||||
if (check_size_overflow(width, height, num_channels, &channel_size)) {
|
||||
free(output);
|
||||
JS_FreeValue(js, pixels_val);
|
||||
return JS_ThrowRangeError(js, "Channel buffer size too large");
|
||||
}
|
||||
|
||||
unsigned char *channel_data = malloc(channel_size);
|
||||
if (!channel_data) {
|
||||
free(output);
|
||||
JS_FreeValue(js, pixels_val);
|
||||
return JS_ThrowOutOfMemory(js);
|
||||
}
|
||||
|
||||
// Extract channels
|
||||
for (int y = 0; y < height; y++) {
|
||||
for (int x = 0; x < width; x++) {
|
||||
int src_idx = (y * width + x) * bytes_per_pixel;
|
||||
int dst_idx = (y * width + x) * num_channels;
|
||||
|
||||
// Extract first 'num_channels' channels
|
||||
for (int c = 0; c < num_channels; c++) {
|
||||
channel_data[dst_idx + c] = ((unsigned char*)pixel_data)[src_idx + c];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
JS_FreeValue(js, pixels_val);
|
||||
|
||||
// Compress blocks
|
||||
for (int by = 0; by < blocks_y; by++) {
|
||||
for (int bx = 0; bx < blocks_x; bx++) {
|
||||
unsigned char block[32]; // Max 4x4 * 2 channels = 32 bytes
|
||||
|
||||
// Extract 4x4 block
|
||||
for (int y = 0; y < 4; y++) {
|
||||
for (int x = 0; x < 4; x++) {
|
||||
int src_x = bx * 4 + x;
|
||||
int src_y = by * 4 + y;
|
||||
int src_idx = (src_y * width + src_x) * num_channels;
|
||||
int dst_idx = (y * 4 + x) * num_channels;
|
||||
|
||||
for (int c = 0; c < num_channels; c++) {
|
||||
block[dst_idx + c] = channel_data[src_idx + c];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Compress block
|
||||
int output_idx = (by * blocks_x + bx) * bytes_per_block;
|
||||
compress_func(output + output_idx, block);
|
||||
}
|
||||
}
|
||||
|
||||
free(channel_data);
|
||||
|
||||
// Create result object
|
||||
JSValue result = JS_NewObject(js);
|
||||
JS_SetPropertyStr(js, result, "width", JS_NewInt32(js, width));
|
||||
JS_SetPropertyStr(js, result, "height", JS_NewInt32(js, height));
|
||||
JS_SetPropertyStr(js, result, "format", JS_NewString(js, format_name));
|
||||
JS_SetPropertyStr(js, result, "pitch", JS_NewInt32(js, blocks_x * bytes_per_block));
|
||||
|
||||
JSValue compressed_pixels = js_new_blob_stoned_copy(js, output, output_size);
|
||||
free(output); // Free the output buffer after copying to blob
|
||||
JS_SetPropertyStr(js, result, "pixels", compressed_pixels);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// BC4 compression (single channel)
|
||||
JSC_CCALL(surface_compress_bc4,
|
||||
return compress_bc_channels(js, argv, argc, 1, "bc4", stb_compress_bc4_block);
|
||||
)
|
||||
|
||||
// BC5 compression (two channels)
|
||||
JSC_CCALL(surface_compress_bc5,
|
||||
return compress_bc_channels(js, argv, argc, 2, "bc5", stb_compress_bc5_block);
|
||||
)
|
||||
|
||||
// Constructor for SDL_Surface
|
||||
JSC_CCALL(surface_constructor,
|
||||
if (argc < 1)
|
||||
@@ -312,16 +658,148 @@ static const JSCFunctionListEntry js_SDL_Surface_funcs[] = {
|
||||
JS_CGETSET_DEF("pitch", js_surface_get_pitch, NULL),
|
||||
};
|
||||
|
||||
// Generic convert function for pixel format/colorspace conversion
|
||||
JSC_CCALL(surface_convert_generic,
|
||||
if (argc < 2)
|
||||
return JS_ThrowTypeError(js, "convert requires source and conversion objects");
|
||||
|
||||
// Parse source object
|
||||
int src_width, src_height;
|
||||
JS_GETATOM(js, src_width, argv[0], width, number)
|
||||
JS_GETATOM(js, src_height, argv[0], height, number)
|
||||
|
||||
if (!src_width || !src_height)
|
||||
return JS_ThrowTypeError(js, "source object requires width and height");
|
||||
|
||||
// Get source format
|
||||
JSValue src_format_val = JS_GetPropertyStr(js, argv[0], "format");
|
||||
SDL_PixelFormat src_format = js2pixelformat(js, src_format_val);
|
||||
JS_FreeValue(js, src_format_val);
|
||||
|
||||
if (src_format == SDL_PIXELFORMAT_UNKNOWN)
|
||||
return JS_ThrowTypeError(js, "source object requires valid format");
|
||||
|
||||
// Get source pixels
|
||||
JSValue src_pixels_val = JS_GetPropertyStr(js, argv[0], "pixels");
|
||||
size_t src_len;
|
||||
void *src_pixels = js_get_blob_data(js, &src_len, src_pixels_val);
|
||||
|
||||
if (!src_pixels) {
|
||||
JS_FreeValue(js, src_pixels_val);
|
||||
return JS_ThrowTypeError(js, "source pixels must be an ArrayBuffer");
|
||||
}
|
||||
|
||||
// Get source pitch (optional, calculate if not provided)
|
||||
int src_pitch;
|
||||
JSValue src_pitch_val = JS_GetPropertyStr(js, argv[0], "pitch");
|
||||
if (!JS_IsNull(src_pitch_val)) {
|
||||
src_pitch = js2number(js, src_pitch_val);
|
||||
JS_FreeValue(js, src_pitch_val);
|
||||
} else {
|
||||
src_pitch = src_width * SDL_BYTESPERPIXEL(src_format);
|
||||
}
|
||||
|
||||
// Get source colorspace (optional)
|
||||
JSValue src_colorspace_val = JS_GetPropertyStr(js, argv[0], "colorspace");
|
||||
SDL_Colorspace src_colorspace = SDL_COLORSPACE_SRGB; // default
|
||||
if (!JS_IsNull(src_colorspace_val)) {
|
||||
// For now, we'll use a simple numeric value for colorspace
|
||||
int colorspace_num;
|
||||
if (JS_ToInt32(js, &colorspace_num, src_colorspace_val) == 0) {
|
||||
src_colorspace = (SDL_Colorspace)colorspace_num;
|
||||
}
|
||||
}
|
||||
JS_FreeValue(js, src_colorspace_val);
|
||||
|
||||
// Parse conversion object
|
||||
JSValue dst_format_val = JS_GetPropertyStr(js, argv[1], "format");
|
||||
SDL_PixelFormat dst_format = js2pixelformat(js, dst_format_val);
|
||||
JS_FreeValue(js, dst_format_val);
|
||||
|
||||
if (dst_format == SDL_PIXELFORMAT_UNKNOWN)
|
||||
return JS_ThrowTypeError(js, "conversion object requires valid format");
|
||||
|
||||
// Get destination pitch (optional)
|
||||
int dst_pitch;
|
||||
JSValue dst_pitch_val = JS_GetPropertyStr(js, argv[1], "pitch");
|
||||
if (!JS_IsNull(dst_pitch_val)) {
|
||||
dst_pitch = js2number(js, dst_pitch_val);
|
||||
JS_FreeValue(js, dst_pitch_val);
|
||||
} else {
|
||||
dst_pitch = src_width * SDL_BYTESPERPIXEL(dst_format);
|
||||
}
|
||||
|
||||
// Get destination colorspace (optional)
|
||||
JSValue dst_colorspace_val = JS_GetPropertyStr(js, argv[1], "colorspace");
|
||||
SDL_Colorspace dst_colorspace = SDL_COLORSPACE_SRGB; // default
|
||||
if (!JS_IsNull(dst_colorspace_val)) {
|
||||
int colorspace_num;
|
||||
if (JS_ToInt32(js, &colorspace_num, dst_colorspace_val) == 0) {
|
||||
dst_colorspace = (SDL_Colorspace)colorspace_num;
|
||||
}
|
||||
}
|
||||
JS_FreeValue(js, dst_colorspace_val);
|
||||
|
||||
// Calculate destination buffer size
|
||||
size_t dst_size = dst_pitch * src_height;
|
||||
void *dst_pixels = malloc(dst_size);
|
||||
if (!dst_pixels) {
|
||||
JS_FreeValue(js, src_pixels_val);
|
||||
return JS_ThrowOutOfMemory(js);
|
||||
}
|
||||
|
||||
// Check if we have colorspace info for both source and dest
|
||||
bool has_src_colorspace = !JS_IsNull(JS_GetPropertyStr(js, argv[0], "colorspace"));
|
||||
bool has_dst_colorspace = !JS_IsNull(JS_GetPropertyStr(js, argv[1], "colorspace"));
|
||||
|
||||
bool success;
|
||||
if (has_src_colorspace || has_dst_colorspace) {
|
||||
// Use SDL_ConvertPixelsAndColorspace
|
||||
success = SDL_ConvertPixelsAndColorspace(
|
||||
src_width, src_height,
|
||||
src_format, src_colorspace, 0, src_pixels, src_pitch,
|
||||
dst_format, dst_colorspace, 0, dst_pixels, dst_pitch
|
||||
);
|
||||
} else {
|
||||
// Use SDL_ConvertPixels
|
||||
success = SDL_ConvertPixels(
|
||||
src_width, src_height,
|
||||
src_format, src_pixels, src_pitch,
|
||||
dst_format, dst_pixels, dst_pitch
|
||||
);
|
||||
}
|
||||
|
||||
JS_FreeValue(js, src_pixels_val);
|
||||
|
||||
if (!success) {
|
||||
free(dst_pixels);
|
||||
return JS_ThrowInternalError(js, "Pixel conversion failed: %s", SDL_GetError());
|
||||
}
|
||||
|
||||
// Return a stoned blob with the converted pixels
|
||||
ret = js_new_blob_stoned_copy(js, dst_pixels, dst_size);
|
||||
free(dst_pixels);
|
||||
)
|
||||
|
||||
JSValue js_sdl_surface_use(JSContext *js)
|
||||
{
|
||||
QJSCLASSPREP_FUNCS(SDL_Surface)
|
||||
|
||||
// Return a constructor function that creates SDL_Surface objects
|
||||
// Add the surface constructor
|
||||
JSValue ctor = JS_NewCFunction2(js, js_surface_constructor, "surface", 1, JS_CFUNC_constructor, 0);
|
||||
|
||||
JSValue proto = JS_GetClassProto(js, js_SDL_Surface_id);
|
||||
JS_SetConstructor(js, ctor, proto);
|
||||
JS_FreeValue(js, proto);
|
||||
|
||||
// Add the generic convert function as a property on the constructor
|
||||
JS_SetPropertyStr(js, ctor, "convert", JS_NewCFunction(js, js_surface_convert_generic, "convert", 2));
|
||||
|
||||
// Add the compression functions as static methods on the constructor
|
||||
JS_SetPropertyStr(js, ctor, "compress_bc1", JS_NewCFunction(js, js_surface_compress_bc1, "compress_bc1", 2));
|
||||
JS_SetPropertyStr(js, ctor, "compress_bc3", JS_NewCFunction(js, js_surface_compress_bc3, "compress_bc3", 2));
|
||||
JS_SetPropertyStr(js, ctor, "compress_bc4", JS_NewCFunction(js, js_surface_compress_bc4, "compress_bc4", 1));
|
||||
JS_SetPropertyStr(js, ctor, "compress_bc5", JS_NewCFunction(js, js_surface_compress_bc5, "compress_bc5", 1));
|
||||
|
||||
return ctor;
|
||||
}
|
||||
Reference in New Issue
Block a user