extend camera
Some checks failed
Build and Deploy / build-macos (push) Failing after 7s
Build and Deploy / build-windows (CLANG64) (push) Has been cancelled
Build and Deploy / package-dist (push) Has been cancelled
Build and Deploy / deploy-itch (push) Has been cancelled
Build and Deploy / deploy-gitea (push) Has been cancelled
Build and Deploy / build-linux (push) Has been cancelled
Some checks failed
Build and Deploy / build-macos (push) Failing after 7s
Build and Deploy / build-windows (CLANG64) (push) Has been cancelled
Build and Deploy / package-dist (push) Has been cancelled
Build and Deploy / deploy-itch (push) Has been cancelled
Build and Deploy / deploy-gitea (push) Has been cancelled
Build and Deploy / build-linux (push) Has been cancelled
This commit is contained in:
282
source/qjs_sdl.c
282
source/qjs_sdl.c
@@ -4,6 +4,7 @@
|
||||
#include "prosperon.h"
|
||||
#include "stb_ds.h"
|
||||
#include "qjs_actor.h"
|
||||
#include "qjs_sdl_surface.h"
|
||||
|
||||
#include <SDL3/SDL.h>
|
||||
|
||||
@@ -159,11 +160,197 @@ JSValue js_input_use(JSContext *js) {
|
||||
}
|
||||
|
||||
// CAMERA FUNCTIONS
|
||||
|
||||
// Helper functions for camera format conversion
|
||||
static const char *pixelformat2str(SDL_PixelFormat format) {
|
||||
switch(format) {
|
||||
case SDL_PIXELFORMAT_UNKNOWN: return "unknown";
|
||||
case SDL_PIXELFORMAT_INDEX1LSB: return "index1lsb";
|
||||
case SDL_PIXELFORMAT_INDEX1MSB: return "index1msb";
|
||||
case SDL_PIXELFORMAT_INDEX2LSB: return "index2lsb";
|
||||
case SDL_PIXELFORMAT_INDEX2MSB: return "index2msb";
|
||||
case SDL_PIXELFORMAT_INDEX4LSB: return "index4lsb";
|
||||
case SDL_PIXELFORMAT_INDEX4MSB: return "index4msb";
|
||||
case SDL_PIXELFORMAT_INDEX8: return "index8";
|
||||
case SDL_PIXELFORMAT_RGB332: return "rgb332";
|
||||
case SDL_PIXELFORMAT_XRGB4444: return "xrgb4444";
|
||||
case SDL_PIXELFORMAT_XBGR4444: return "xbgr4444";
|
||||
case SDL_PIXELFORMAT_XRGB1555: return "xrgb1555";
|
||||
case SDL_PIXELFORMAT_XBGR1555: return "xbgr1555";
|
||||
case SDL_PIXELFORMAT_ARGB4444: return "argb4444";
|
||||
case SDL_PIXELFORMAT_RGBA4444: return "rgba4444";
|
||||
case SDL_PIXELFORMAT_ABGR4444: return "abgr4444";
|
||||
case SDL_PIXELFORMAT_BGRA4444: return "bgra4444";
|
||||
case SDL_PIXELFORMAT_ARGB1555: return "argb1555";
|
||||
case SDL_PIXELFORMAT_RGBA5551: return "rgba5551";
|
||||
case SDL_PIXELFORMAT_ABGR1555: return "abgr1555";
|
||||
case SDL_PIXELFORMAT_BGRA5551: return "bgra5551";
|
||||
case SDL_PIXELFORMAT_RGB565: return "rgb565";
|
||||
case SDL_PIXELFORMAT_BGR565: return "bgr565";
|
||||
case SDL_PIXELFORMAT_RGB24: return "rgb24";
|
||||
case SDL_PIXELFORMAT_BGR24: return "bgr24";
|
||||
case SDL_PIXELFORMAT_XRGB8888: return "xrgb8888";
|
||||
case SDL_PIXELFORMAT_RGBX8888: return "rgbx8888";
|
||||
case SDL_PIXELFORMAT_XBGR8888: return "xbgr8888";
|
||||
case SDL_PIXELFORMAT_BGRX8888: return "bgrx8888";
|
||||
case SDL_PIXELFORMAT_ARGB8888: return "argb8888";
|
||||
case SDL_PIXELFORMAT_RGBA8888: return "rgba8888";
|
||||
case SDL_PIXELFORMAT_ABGR8888: return "abgr8888";
|
||||
case SDL_PIXELFORMAT_BGRA8888: return "bgra8888";
|
||||
case SDL_PIXELFORMAT_XRGB2101010: return "xrgb2101010";
|
||||
case SDL_PIXELFORMAT_XBGR2101010: return "xbgr2101010";
|
||||
case SDL_PIXELFORMAT_ARGB2101010: return "argb2101010";
|
||||
case SDL_PIXELFORMAT_ABGR2101010: return "abgr2101010";
|
||||
case SDL_PIXELFORMAT_RGB48: return "rgb48";
|
||||
case SDL_PIXELFORMAT_BGR48: return "bgr48";
|
||||
case SDL_PIXELFORMAT_RGBA64: return "rgba64";
|
||||
case SDL_PIXELFORMAT_ARGB64: return "argb64";
|
||||
case SDL_PIXELFORMAT_BGRA64: return "bgra64";
|
||||
case SDL_PIXELFORMAT_ABGR64: return "abgr64";
|
||||
case SDL_PIXELFORMAT_RGB48_FLOAT: return "rgb48_float";
|
||||
case SDL_PIXELFORMAT_BGR48_FLOAT: return "bgr48_float";
|
||||
case SDL_PIXELFORMAT_RGBA64_FLOAT: return "rgba64_float";
|
||||
case SDL_PIXELFORMAT_ARGB64_FLOAT: return "argb64_float";
|
||||
case SDL_PIXELFORMAT_BGRA64_FLOAT: return "bgra64_float";
|
||||
case SDL_PIXELFORMAT_ABGR64_FLOAT: return "abgr64_float";
|
||||
case SDL_PIXELFORMAT_RGB96_FLOAT: return "rgb96_float";
|
||||
case SDL_PIXELFORMAT_BGR96_FLOAT: return "bgr96_float";
|
||||
case SDL_PIXELFORMAT_RGBA128_FLOAT: return "rgba128_float";
|
||||
case SDL_PIXELFORMAT_ARGB128_FLOAT: return "argb128_float";
|
||||
case SDL_PIXELFORMAT_BGRA128_FLOAT: return "bgra128_float";
|
||||
case SDL_PIXELFORMAT_ABGR128_FLOAT: return "abgr128_float";
|
||||
case SDL_PIXELFORMAT_YV12: return "yv12";
|
||||
case SDL_PIXELFORMAT_IYUV: return "iyuv";
|
||||
case SDL_PIXELFORMAT_YUY2: return "yuy2";
|
||||
case SDL_PIXELFORMAT_UYVY: return "uyvy";
|
||||
case SDL_PIXELFORMAT_YVYU: return "yvyu";
|
||||
case SDL_PIXELFORMAT_NV12: return "nv12";
|
||||
case SDL_PIXELFORMAT_NV21: return "nv21";
|
||||
case SDL_PIXELFORMAT_P010: return "p010";
|
||||
default: return "unknown";
|
||||
}
|
||||
}
|
||||
|
||||
static SDL_PixelFormat str2pixelformat(const char *str) {
|
||||
if (!strcmp(str, "unknown")) return SDL_PIXELFORMAT_UNKNOWN;
|
||||
if (!strcmp(str, "index1lsb")) return SDL_PIXELFORMAT_INDEX1LSB;
|
||||
if (!strcmp(str, "index1msb")) return SDL_PIXELFORMAT_INDEX1MSB;
|
||||
if (!strcmp(str, "index2lsb")) return SDL_PIXELFORMAT_INDEX2LSB;
|
||||
if (!strcmp(str, "index2msb")) return SDL_PIXELFORMAT_INDEX2MSB;
|
||||
if (!strcmp(str, "index4lsb")) return SDL_PIXELFORMAT_INDEX4LSB;
|
||||
if (!strcmp(str, "index4msb")) return SDL_PIXELFORMAT_INDEX4MSB;
|
||||
if (!strcmp(str, "index8")) return SDL_PIXELFORMAT_INDEX8;
|
||||
if (!strcmp(str, "rgb332")) return SDL_PIXELFORMAT_RGB332;
|
||||
if (!strcmp(str, "xrgb4444")) return SDL_PIXELFORMAT_XRGB4444;
|
||||
if (!strcmp(str, "xbgr4444")) return SDL_PIXELFORMAT_XBGR4444;
|
||||
if (!strcmp(str, "xrgb1555")) return SDL_PIXELFORMAT_XRGB1555;
|
||||
if (!strcmp(str, "xbgr1555")) return SDL_PIXELFORMAT_XBGR1555;
|
||||
if (!strcmp(str, "argb4444")) return SDL_PIXELFORMAT_ARGB4444;
|
||||
if (!strcmp(str, "rgba4444")) return SDL_PIXELFORMAT_RGBA4444;
|
||||
if (!strcmp(str, "abgr4444")) return SDL_PIXELFORMAT_ABGR4444;
|
||||
if (!strcmp(str, "bgra4444")) return SDL_PIXELFORMAT_BGRA4444;
|
||||
if (!strcmp(str, "argb1555")) return SDL_PIXELFORMAT_ARGB1555;
|
||||
if (!strcmp(str, "rgba5551")) return SDL_PIXELFORMAT_RGBA5551;
|
||||
if (!strcmp(str, "abgr1555")) return SDL_PIXELFORMAT_ABGR1555;
|
||||
if (!strcmp(str, "bgra5551")) return SDL_PIXELFORMAT_BGRA5551;
|
||||
if (!strcmp(str, "rgb565")) return SDL_PIXELFORMAT_RGB565;
|
||||
if (!strcmp(str, "bgr565")) return SDL_PIXELFORMAT_BGR565;
|
||||
if (!strcmp(str, "rgb24")) return SDL_PIXELFORMAT_RGB24;
|
||||
if (!strcmp(str, "bgr24")) return SDL_PIXELFORMAT_BGR24;
|
||||
if (!strcmp(str, "xrgb8888")) return SDL_PIXELFORMAT_XRGB8888;
|
||||
if (!strcmp(str, "rgbx8888")) return SDL_PIXELFORMAT_RGBX8888;
|
||||
if (!strcmp(str, "xbgr8888")) return SDL_PIXELFORMAT_XBGR8888;
|
||||
if (!strcmp(str, "bgrx8888")) return SDL_PIXELFORMAT_BGRX8888;
|
||||
if (!strcmp(str, "argb8888")) return SDL_PIXELFORMAT_ARGB8888;
|
||||
if (!strcmp(str, "rgba8888")) return SDL_PIXELFORMAT_RGBA8888;
|
||||
if (!strcmp(str, "abgr8888")) return SDL_PIXELFORMAT_ABGR8888;
|
||||
if (!strcmp(str, "bgra8888")) return SDL_PIXELFORMAT_BGRA8888;
|
||||
if (!strcmp(str, "xrgb2101010")) return SDL_PIXELFORMAT_XRGB2101010;
|
||||
if (!strcmp(str, "xbgr2101010")) return SDL_PIXELFORMAT_XBGR2101010;
|
||||
if (!strcmp(str, "argb2101010")) return SDL_PIXELFORMAT_ARGB2101010;
|
||||
if (!strcmp(str, "abgr2101010")) return SDL_PIXELFORMAT_ABGR2101010;
|
||||
if (!strcmp(str, "rgb48")) return SDL_PIXELFORMAT_RGB48;
|
||||
if (!strcmp(str, "bgr48")) return SDL_PIXELFORMAT_BGR48;
|
||||
if (!strcmp(str, "rgba64")) return SDL_PIXELFORMAT_RGBA64;
|
||||
if (!strcmp(str, "argb64")) return SDL_PIXELFORMAT_ARGB64;
|
||||
if (!strcmp(str, "bgra64")) return SDL_PIXELFORMAT_BGRA64;
|
||||
if (!strcmp(str, "abgr64")) return SDL_PIXELFORMAT_ABGR64;
|
||||
if (!strcmp(str, "rgb48_float")) return SDL_PIXELFORMAT_RGB48_FLOAT;
|
||||
if (!strcmp(str, "bgr48_float")) return SDL_PIXELFORMAT_BGR48_FLOAT;
|
||||
if (!strcmp(str, "rgba64_float")) return SDL_PIXELFORMAT_RGBA64_FLOAT;
|
||||
if (!strcmp(str, "argb64_float")) return SDL_PIXELFORMAT_ARGB64_FLOAT;
|
||||
if (!strcmp(str, "bgra64_float")) return SDL_PIXELFORMAT_BGRA64_FLOAT;
|
||||
if (!strcmp(str, "abgr64_float")) return SDL_PIXELFORMAT_ABGR64_FLOAT;
|
||||
if (!strcmp(str, "rgb96_float")) return SDL_PIXELFORMAT_RGB96_FLOAT;
|
||||
if (!strcmp(str, "bgr96_float")) return SDL_PIXELFORMAT_BGR96_FLOAT;
|
||||
if (!strcmp(str, "rgba128_float")) return SDL_PIXELFORMAT_RGBA128_FLOAT;
|
||||
if (!strcmp(str, "argb128_float")) return SDL_PIXELFORMAT_ARGB128_FLOAT;
|
||||
if (!strcmp(str, "bgra128_float")) return SDL_PIXELFORMAT_BGRA128_FLOAT;
|
||||
if (!strcmp(str, "abgr128_float")) return SDL_PIXELFORMAT_ABGR128_FLOAT;
|
||||
if (!strcmp(str, "yv12")) return SDL_PIXELFORMAT_YV12;
|
||||
if (!strcmp(str, "iyuv")) return SDL_PIXELFORMAT_IYUV;
|
||||
if (!strcmp(str, "yuy2")) return SDL_PIXELFORMAT_YUY2;
|
||||
if (!strcmp(str, "uyvy")) return SDL_PIXELFORMAT_UYVY;
|
||||
if (!strcmp(str, "yvyu")) return SDL_PIXELFORMAT_YVYU;
|
||||
if (!strcmp(str, "nv12")) return SDL_PIXELFORMAT_NV12;
|
||||
if (!strcmp(str, "nv21")) return SDL_PIXELFORMAT_NV21;
|
||||
if (!strcmp(str, "p010")) return SDL_PIXELFORMAT_P010;
|
||||
return SDL_PIXELFORMAT_UNKNOWN;
|
||||
}
|
||||
|
||||
static JSValue cameraspec2js(JSContext *js, const SDL_CameraSpec *spec) {
|
||||
JSValue obj = JS_NewObject(js);
|
||||
|
||||
JS_SetPropertyStr(js, obj, "format", JS_NewString(js, pixelformat2str(spec->format)));
|
||||
JS_SetPropertyStr(js, obj, "colorspace", JS_NewInt32(js, spec->colorspace));
|
||||
JS_SetPropertyStr(js, obj, "width", JS_NewInt32(js, spec->width));
|
||||
JS_SetPropertyStr(js, obj, "height", JS_NewInt32(js, spec->height));
|
||||
JS_SetPropertyStr(js, obj, "framerate_numerator", JS_NewInt32(js, spec->framerate_numerator));
|
||||
JS_SetPropertyStr(js, obj, "framerate_denominator", JS_NewInt32(js, spec->framerate_denominator));
|
||||
|
||||
return obj;
|
||||
}
|
||||
|
||||
static SDL_CameraSpec js2cameraspec(JSContext *js, JSValue obj) {
|
||||
SDL_CameraSpec spec = {0};
|
||||
|
||||
JSValue v;
|
||||
|
||||
v = JS_GetPropertyStr(js, obj, "format");
|
||||
if (!JS_IsUndefined(v)) {
|
||||
const char *s = JS_ToCString(js, v);
|
||||
spec.format = str2pixelformat(s);
|
||||
JS_FreeCString(js, s);
|
||||
}
|
||||
JS_FreeValue(js, v);
|
||||
|
||||
v = JS_GetPropertyStr(js, obj, "colorspace");
|
||||
if (!JS_IsUndefined(v)) JS_ToInt32(js, &spec.colorspace, v);
|
||||
JS_FreeValue(js, v);
|
||||
|
||||
v = JS_GetPropertyStr(js, obj, "width");
|
||||
if (!JS_IsUndefined(v)) JS_ToInt32(js, &spec.width, v);
|
||||
JS_FreeValue(js, v);
|
||||
|
||||
v = JS_GetPropertyStr(js, obj, "height");
|
||||
if (!JS_IsUndefined(v)) JS_ToInt32(js, &spec.height, v);
|
||||
JS_FreeValue(js, v);
|
||||
|
||||
v = JS_GetPropertyStr(js, obj, "framerate_numerator");
|
||||
if (!JS_IsUndefined(v)) JS_ToInt32(js, &spec.framerate_numerator, v);
|
||||
JS_FreeValue(js, v);
|
||||
|
||||
v = JS_GetPropertyStr(js, obj, "framerate_denominator");
|
||||
if (!JS_IsUndefined(v)) JS_ToInt32(js, &spec.framerate_denominator, v);
|
||||
JS_FreeValue(js, v);
|
||||
|
||||
return spec;
|
||||
}
|
||||
|
||||
JSC_CCALL(camera_list,
|
||||
int num;
|
||||
SDL_CameraID *ids = SDL_GetCameras(&num);
|
||||
if (num == 0) return JS_UNDEFINED;
|
||||
JSValue jsids = JS_NewArray(js);
|
||||
SDL_CameraID *ids = SDL_GetCameras(&num);
|
||||
for (int i = 0; i < num; i++)
|
||||
JS_SetPropertyUint32(js,jsids, i, number2js(js,ids[i]));
|
||||
|
||||
@@ -172,7 +359,16 @@ JSC_CCALL(camera_list,
|
||||
|
||||
JSC_CCALL(camera_open,
|
||||
int id = js2number(js,argv[0]);
|
||||
SDL_Camera *cam = SDL_OpenCamera(id, NULL);
|
||||
SDL_CameraSpec *spec_ptr = NULL;
|
||||
SDL_CameraSpec spec;
|
||||
|
||||
// Check if a format spec was provided
|
||||
if (argc > 1 && !JS_IsUndefined(argv[1])) {
|
||||
spec = js2cameraspec(js, argv[1]);
|
||||
spec_ptr = &spec;
|
||||
}
|
||||
|
||||
SDL_Camera *cam = SDL_OpenCamera(id, spec_ptr);
|
||||
if (!cam) ret = JS_ThrowReferenceError(js, "Could not open camera %d: %s\n", id, SDL_GetError());
|
||||
else
|
||||
ret = SDL_Camera2js(js,cam);
|
||||
@@ -194,18 +390,45 @@ JSC_CCALL(camera_position,
|
||||
}
|
||||
)
|
||||
|
||||
JSC_CCALL(camera_drivers,
|
||||
int num = SDL_GetNumCameraDrivers();
|
||||
JSValue arr = JS_NewArray(js);
|
||||
for (int i = 0; i < num; i++)
|
||||
JS_SetPropertyUint32(js, arr, i, JS_NewString(js, SDL_GetCameraDriver(i)));
|
||||
return arr;
|
||||
)
|
||||
|
||||
JSC_CCALL(camera_supported_formats,
|
||||
SDL_CameraID id = js2number(js,argv[0]);
|
||||
int num;
|
||||
SDL_CameraSpec **specs = SDL_GetCameraSupportedFormats(id, &num);
|
||||
|
||||
if (!specs)
|
||||
return JS_ThrowReferenceError(js, "Could not get supported formats for camera %d: %s", id, SDL_GetError());
|
||||
|
||||
JSValue arr = JS_NewArray(js);
|
||||
for (int i = 0; i < num; i++) {
|
||||
JS_SetPropertyUint32(js, arr, i, cameraspec2js(js, specs[i]));
|
||||
}
|
||||
|
||||
SDL_free(specs);
|
||||
return arr;
|
||||
)
|
||||
|
||||
static const JSCFunctionListEntry js_camera_funcs[] = {
|
||||
MIST_FUNC_DEF(camera, list, 0),
|
||||
MIST_FUNC_DEF(camera, open, 1),
|
||||
MIST_FUNC_DEF(camera, open, 2),
|
||||
MIST_FUNC_DEF(camera, name, 1),
|
||||
MIST_FUNC_DEF(camera, position, 1),
|
||||
MIST_FUNC_DEF(camera, drivers, 0),
|
||||
MIST_FUNC_DEF(camera, supported_formats, 1),
|
||||
};
|
||||
|
||||
JSC_CCALL(camera_capture,
|
||||
/*
|
||||
SDL_ClearError();
|
||||
SDL_Camera *cam = js2SDL_Camera(js,self);
|
||||
if (!cam) return JS_ThrowReferenceError(js,"Self was not a camera: %s", SDL_GetError());
|
||||
|
||||
SDL_Surface *surf = SDL_AcquireCameraFrame(cam, NULL);
|
||||
if (!surf) {
|
||||
const char *msg = SDL_GetError();
|
||||
@@ -213,31 +436,58 @@ JSC_CCALL(camera_capture,
|
||||
return JS_ThrowReferenceError(js,"Could not get camera frame: %s", SDL_GetError());
|
||||
else return JS_UNDEFINED;
|
||||
}
|
||||
return SDL_Surface2js(js,surf);
|
||||
SDL_Surface *newsurf = SDL_CreateSurface(surf->w, surf->h, surf->format);
|
||||
SDL_ReleaseCameraFrame(cam,surf);
|
||||
|
||||
int didit = SDL_BlitSurface(surf, NULL, newsurf, NULL);
|
||||
if (!didit) {
|
||||
// Create a copy of the surface
|
||||
SDL_Surface *newsurf = SDL_CreateSurface(surf->w, surf->h, surf->format);
|
||||
if (!newsurf) {
|
||||
SDL_ReleaseCameraFrame(cam, surf);
|
||||
return JS_ThrowReferenceError(js, "Could not create surface: %s", SDL_GetError());
|
||||
}
|
||||
|
||||
// Copy the surface data
|
||||
int result = SDL_BlitSurface(surf, NULL, newsurf, NULL);
|
||||
|
||||
// Release the camera frame
|
||||
SDL_ReleaseCameraFrame(cam, surf);
|
||||
|
||||
if (result != 0) {
|
||||
SDL_DestroySurface(newsurf);
|
||||
return JS_ThrowReferenceError(js, "Could not blit: %s", SDL_GetError());
|
||||
return JS_ThrowReferenceError(js, "Could not blit surface: %s", SDL_GetError());
|
||||
}
|
||||
|
||||
return SDL_Surface2js(js,newsurf);
|
||||
*/
|
||||
)
|
||||
|
||||
/* SDL_Camera *cam = js2SDL_Camera(js,self);
|
||||
SDL_Surface *surf = js2SDL_Surface(js,argv[0]);
|
||||
SDL_ReleaseCameraFrame(cam,surf);
|
||||
*/
|
||||
JSC_CCALL(camera_get_driver,
|
||||
SDL_Camera *cam = js2SDL_Camera(js,self);
|
||||
if (!cam) return JS_ThrowReferenceError(js,"Self was not a camera: %s", SDL_GetError());
|
||||
|
||||
const char *driver = SDL_GetCurrentCameraDriver();
|
||||
if (!driver) return JS_UNDEFINED;
|
||||
|
||||
return JS_NewString(js, driver);
|
||||
)
|
||||
|
||||
JSC_CCALL(camera_get_format,
|
||||
SDL_Camera *cam = js2SDL_Camera(js,self);
|
||||
if (!cam) return JS_ThrowReferenceError(js,"Self was not a camera: %s", SDL_GetError());
|
||||
|
||||
SDL_CameraSpec spec;
|
||||
if (!SDL_GetCameraFormat(cam, &spec))
|
||||
return JS_ThrowReferenceError(js, "Could not get camera format: %s", SDL_GetError());
|
||||
|
||||
return cameraspec2js(js, &spec);
|
||||
)
|
||||
|
||||
static const JSCFunctionListEntry js_SDL_Camera_funcs[] =
|
||||
{
|
||||
MIST_FUNC_DEF(camera, capture, 0),
|
||||
MIST_FUNC_DEF(camera, get_driver, 0),
|
||||
MIST_FUNC_DEF(camera, get_format, 0),
|
||||
};
|
||||
|
||||
JSValue js_camera_use(JSContext *js) {
|
||||
SDL_Init(SDL_INIT_CAMERA);
|
||||
JSValue mod = JS_NewObject(js);
|
||||
JS_SetPropertyFunctionList(js,mod,js_camera_funcs,countof(js_camera_funcs));
|
||||
|
||||
|
||||
75
tests/camera_info.js
Normal file
75
tests/camera_info.js
Normal file
@@ -0,0 +1,75 @@
|
||||
// Test the new camera functionality
|
||||
var camera = use('camera');
|
||||
|
||||
// Get camera drivers
|
||||
var drivers = camera.drivers();
|
||||
console.log("Available camera drivers:", drivers);
|
||||
|
||||
// Get list of cameras
|
||||
var cameras = camera.list();
|
||||
console.log("Found", cameras.length, "cameras");
|
||||
|
||||
// Get info about each camera
|
||||
for (var i = 0; i < cameras.length; i++) {
|
||||
var cam_id = cameras[i];
|
||||
console.log("\nCamera", i + 1, "ID:", cam_id);
|
||||
console.log(" Name:", camera.name(cam_id));
|
||||
console.log(" Position:", camera.position(cam_id));
|
||||
|
||||
// Get supported formats
|
||||
var formats = camera.supported_formats(cam_id);
|
||||
console.log(" Supported formats:", formats.length);
|
||||
|
||||
// Show first few formats
|
||||
for (var j = 0; j < formats.length; j++) {
|
||||
var fmt = formats[j];
|
||||
console.log(" Format", j + 1 + ":");
|
||||
console.log(" Pixel format:", fmt.format);
|
||||
console.log(" Resolution:", fmt.width + "x" + fmt.height);
|
||||
console.log(" FPS:", fmt.framerate_numerator + "/" + fmt.framerate_denominator,
|
||||
"(" + (fmt.framerate_numerator / fmt.framerate_denominator) + ")");
|
||||
console.log(" Colorspace:", fmt.colorspace);
|
||||
}
|
||||
}
|
||||
|
||||
// Open the first camera with a specific format if available
|
||||
if (cameras.length > 0) {
|
||||
console.log("\nOpening first camera...");
|
||||
var cam_id = cameras[0];
|
||||
var formats = camera.supported_formats(cam_id);
|
||||
|
||||
// Try to find a 640x480 format
|
||||
var preferred_format = null;
|
||||
for (var i = 0; i < formats.length; i++) {
|
||||
if (formats[i].width === 640 && formats[i].height === 480) {
|
||||
preferred_format = formats[i];
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
var cam;
|
||||
if (preferred_format) {
|
||||
console.log("Opening with 640x480 format...");
|
||||
cam = camera.open(cam_id, preferred_format);
|
||||
} else {
|
||||
console.log("Opening with default format...");
|
||||
cam = camera.open(cam_id);
|
||||
}
|
||||
|
||||
if (cam) {
|
||||
console.log("Camera opened successfully!");
|
||||
console.log("Driver being used:", cam.get_driver());
|
||||
|
||||
// Get the actual format being used
|
||||
var actual_format = cam.get_format();
|
||||
console.log("Actual format being used:");
|
||||
console.log(" Pixel format:", actual_format.format);
|
||||
console.log(" Resolution:", actual_format.width + "x" + actual_format.height);
|
||||
console.log(" FPS:", actual_format.framerate_numerator + "/" + actual_format.framerate_denominator,
|
||||
"(" + (actual_format.framerate_numerator / actual_format.framerate_denominator) + ")");
|
||||
console.log(" Colorspace:", actual_format.colorspace);
|
||||
|
||||
// Clean up - camera will be closed when object is freed
|
||||
cam = null;
|
||||
}
|
||||
}
|
||||
38
tests/test_event_watch.js
Normal file
38
tests/test_event_watch.js
Normal file
@@ -0,0 +1,38 @@
|
||||
// Test event watching functionality
|
||||
use('input');
|
||||
|
||||
// Start watching events
|
||||
input.watch($_);
|
||||
|
||||
$_.receiver(msg => {
|
||||
if (msg.type) {
|
||||
console.log("Received event:", msg.type);
|
||||
|
||||
// Log specific event details
|
||||
switch(msg.type) {
|
||||
case "key_down":
|
||||
case "key_up":
|
||||
console.log(" Key:", msg.key, "Scancode:", msg.scancode, "Down:", msg.down);
|
||||
break;
|
||||
case "mouse_motion":
|
||||
console.log(" Mouse position:", msg.pos, "Delta:", msg.d_pos);
|
||||
break;
|
||||
case "mouse_button_down":
|
||||
case "mouse_button_up":
|
||||
console.log(" Button:", msg.button, "Position:", msg.mouse, "Down:", msg.down);
|
||||
break;
|
||||
}
|
||||
|
||||
// Stop watching after receiving 10 events
|
||||
if (!$_.event_count) $_.event_count = 0;
|
||||
$_.event_count++;
|
||||
|
||||
if ($_.event_count >= 10) {
|
||||
console.log("Received 10 events, stopping watch");
|
||||
input.unwatch($_);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
console.log("Event watcher started. Press keys or move mouse to generate events.");
|
||||
console.log("Will stop after 10 events.");
|
||||
247
tests/webcam.js
Normal file
247
tests/webcam.js
Normal file
@@ -0,0 +1,247 @@
|
||||
// Test webcam display
|
||||
var draw2d
|
||||
var graphics
|
||||
var os = use('os');
|
||||
var input = use('input')
|
||||
var json = use('json')
|
||||
var surface = use('surface')
|
||||
input.watch($_)
|
||||
|
||||
// Create SDL video actor
|
||||
var video_actor = use('sdl_video');
|
||||
var camera = use('camera')
|
||||
|
||||
var window_id = null;
|
||||
var renderer_id = null;
|
||||
var cam_id = null;
|
||||
var cam_obj = null;
|
||||
var cam_approved = false;
|
||||
var webcam_texture = null;
|
||||
|
||||
// Handle camera events
|
||||
$_.receiver(e => {
|
||||
if (e.type === 'camera_device_approved' && e.which === cam_id) {
|
||||
console.log("Camera approved!");
|
||||
cam_approved = true;
|
||||
} else if (e.type === 'camera_device_denied' && e.which === cam_id) {
|
||||
console.error("Camera access denied!");
|
||||
$_.stop();
|
||||
}
|
||||
})
|
||||
|
||||
// Create window
|
||||
send(video_actor, {
|
||||
kind: "window",
|
||||
op: "create",
|
||||
data: {
|
||||
title: "Webcam Test",
|
||||
width: 800,
|
||||
height: 600
|
||||
}
|
||||
}, function(response) {
|
||||
if (response.error) {
|
||||
console.error("Failed to create window:", response.error);
|
||||
return;
|
||||
}
|
||||
|
||||
window_id = response.id;
|
||||
console.log("Created window with id:", window_id);
|
||||
|
||||
// Create renderer
|
||||
send(video_actor, {
|
||||
kind: "window",
|
||||
op: "makeRenderer",
|
||||
id: window_id
|
||||
}, function(response) {
|
||||
if (response.error) {
|
||||
console.error("Failed to create renderer:", response.error);
|
||||
return;
|
||||
}
|
||||
|
||||
renderer_id = response.id;
|
||||
console.log("Created renderer with id:", renderer_id);
|
||||
|
||||
// Configure draw2d and graphics
|
||||
draw2d = use('draw2d', video_actor, renderer_id)
|
||||
graphics = use('graphics', video_actor, renderer_id)
|
||||
|
||||
// List available cameras
|
||||
var cameras = camera.list();
|
||||
if (cameras.length === 0) {
|
||||
console.error("No cameras found!");
|
||||
console.log(json.encode(cameras))
|
||||
$_.stop();
|
||||
return;
|
||||
}
|
||||
|
||||
console.log("Found", cameras.length, "camera(s)");
|
||||
|
||||
// Open the first camera
|
||||
cam_id = cameras[0];
|
||||
var cam_name = camera.name(cam_id);
|
||||
var cam_position = camera.position(cam_id);
|
||||
console.log("Opening camera:", cam_name, "Position:", cam_position);
|
||||
|
||||
// Get supported formats and try to find a good one
|
||||
var formats = camera.supported_formats(cam_id);
|
||||
console.log("Camera supports", formats.length, "formats");
|
||||
|
||||
// Look for a 640x480 format, or fall back to first format
|
||||
var preferred_format = null;
|
||||
for (var i = 0; i < formats.length; i++) {
|
||||
if (formats[i].width === 640 && formats[i].height === 480) {
|
||||
preferred_format = formats[i];
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!preferred_format && formats.length > 0) {
|
||||
// Use first available format
|
||||
preferred_format = formats[0];
|
||||
}
|
||||
|
||||
if (preferred_format) {
|
||||
console.log("Using format:", preferred_format.width + "x" + preferred_format.height,
|
||||
"FPS:", preferred_format.framerate_numerator + "/" + preferred_format.framerate_denominator,
|
||||
"Format:", preferred_format.format);
|
||||
cam_obj = camera.open(cam_id, preferred_format);
|
||||
} else {
|
||||
console.log("Using default format");
|
||||
cam_obj = camera.open(cam_id);
|
||||
}
|
||||
|
||||
if (!cam_obj) {
|
||||
console.error("Failed to open camera!");
|
||||
$_.stop();
|
||||
return;
|
||||
}
|
||||
|
||||
console.log("Camera driver:", cam_obj.get_driver());
|
||||
|
||||
// Get and display the actual format being used
|
||||
var actual_format = cam_obj.get_format();
|
||||
console.log("Actual camera format:");
|
||||
console.log(" Resolution:", actual_format.width + "x" + actual_format.height);
|
||||
console.log(" Format:", actual_format.format);
|
||||
console.log(" FPS:", actual_format.framerate_numerator + "/" + actual_format.framerate_denominator);
|
||||
|
||||
// Start capturing after a short delay to wait for approval
|
||||
$_.delay(start_capturing, 0.5);
|
||||
});
|
||||
});
|
||||
|
||||
function start_capturing() {
|
||||
if (!cam_approved) {
|
||||
console.log("Waiting for camera approval...");
|
||||
$_.delay(start_capturing, 0.1);
|
||||
return;
|
||||
}
|
||||
|
||||
var frame = 0;
|
||||
var start_time = os.now();
|
||||
|
||||
function capture_and_draw() {
|
||||
frame++;
|
||||
var t = os.now() - start_time;
|
||||
|
||||
// Clear the screen with a dark background
|
||||
send(video_actor, {
|
||||
kind: "renderer",
|
||||
id: renderer_id,
|
||||
op: "set",
|
||||
prop: "drawColor",
|
||||
value: [0.1, 0.1, 0.15, 1]
|
||||
});
|
||||
|
||||
send(video_actor, {
|
||||
kind: "renderer",
|
||||
id: renderer_id,
|
||||
op: "clear"
|
||||
});
|
||||
|
||||
// Clear draw2d commands
|
||||
draw2d.clear();
|
||||
|
||||
// Capture frame from camera
|
||||
var surface = cam_obj.capture();
|
||||
console.log(`rendering cam! ${json.encode(surface)}`)
|
||||
if (surface) {
|
||||
// Create texture from surface directly
|
||||
send(video_actor, {
|
||||
kind: "renderer",
|
||||
id: renderer_id,
|
||||
op: "loadTexture",
|
||||
data: surface
|
||||
}, function(tex_response) {
|
||||
if (tex_response.id) {
|
||||
// Destroy old texture if exists to avoid memory leak
|
||||
if (webcam_texture) {
|
||||
send(video_actor, {
|
||||
kind: "texture",
|
||||
id: webcam_texture,
|
||||
op: "destroy"
|
||||
});
|
||||
}
|
||||
webcam_texture = tex_response.id;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Draw the webcam texture if we have one
|
||||
if (webcam_texture) {
|
||||
send(video_actor, {
|
||||
kind: "renderer",
|
||||
id: renderer_id,
|
||||
op: "copyTexture",
|
||||
data: {
|
||||
texture_id: webcam_texture,
|
||||
dest: {x: 50, y: 50, width: 700, height: 500}
|
||||
}
|
||||
});
|
||||
} else {
|
||||
// Draw placeholder text while waiting for first frame
|
||||
draw2d.text("Waiting for webcam...", {x: 200, y: 250, size: 20});
|
||||
}
|
||||
|
||||
// Draw info
|
||||
draw2d.text("Camera: " + camera.name(cam_id), {x: 20, y: 20, size: 16});
|
||||
draw2d.text("Position: " + camera.position(cam_id), {x: 20, y: 40, size: 16});
|
||||
draw2d.text("Frame: " + frame, {x: 20, y: 60, size: 16});
|
||||
|
||||
// Flush all commands to renderer
|
||||
draw2d.flush();
|
||||
|
||||
// Present the frame
|
||||
send(video_actor, {
|
||||
kind: "renderer",
|
||||
id: renderer_id,
|
||||
op: "present"
|
||||
});
|
||||
|
||||
// Schedule next frame (30 FPS for webcam)
|
||||
if (frame < 300) { // Run for 10 seconds
|
||||
$_.delay(capture_and_draw, 1/30);
|
||||
} else {
|
||||
console.log("Test completed - captured", frame, "frames");
|
||||
|
||||
// Clean up resources
|
||||
if (webcam_texture) {
|
||||
send(video_actor, {
|
||||
kind: "texture",
|
||||
id: webcam_texture,
|
||||
op: "destroy"
|
||||
});
|
||||
}
|
||||
|
||||
// Note: Camera is automatically closed when the object is garbage collected
|
||||
cam_obj = null;
|
||||
|
||||
$_.delay($_.stop, 0.5);
|
||||
}
|
||||
}
|
||||
|
||||
capture_and_draw();
|
||||
}
|
||||
|
||||
// Stop after 12 seconds if not already stopped
|
||||
$_.delay($_.stop, 12);
|
||||
Reference in New Issue
Block a user