From 5a079a2d114f96d4847d1ee305d5b7c16eeec50e Mon Sep 17 00:00:00 2001 From: 3gg <3gg@shellblade.net> Date: Sat, 27 Dec 2025 12:03:39 -0800 Subject: Initial commit --- contrib/SDL-3.2.8/src/camera/SDL_camera.c | 1583 ++++++++++++++++++++ contrib/SDL-3.2.8/src/camera/SDL_camera_c.h | 35 + contrib/SDL-3.2.8/src/camera/SDL_syscamera.h | 224 +++ .../src/camera/android/SDL_camera_android.c | 905 +++++++++++ .../src/camera/coremedia/SDL_camera_coremedia.m | 508 +++++++ .../SDL-3.2.8/src/camera/dummy/SDL_camera_dummy.c | 81 + .../src/camera/emscripten/SDL_camera_emscripten.c | 275 ++++ .../mediafoundation/SDL_camera_mediafoundation.c | 1143 ++++++++++++++ .../src/camera/pipewire/SDL_camera_pipewire.c | 1144 ++++++++++++++ .../SDL-3.2.8/src/camera/v4l2/SDL_camera_v4l2.c | 929 ++++++++++++ .../SDL-3.2.8/src/camera/vita/SDL_camera_vita.c | 258 ++++ 11 files changed, 7085 insertions(+) create mode 100644 contrib/SDL-3.2.8/src/camera/SDL_camera.c create mode 100644 contrib/SDL-3.2.8/src/camera/SDL_camera_c.h create mode 100644 contrib/SDL-3.2.8/src/camera/SDL_syscamera.h create mode 100644 contrib/SDL-3.2.8/src/camera/android/SDL_camera_android.c create mode 100644 contrib/SDL-3.2.8/src/camera/coremedia/SDL_camera_coremedia.m create mode 100644 contrib/SDL-3.2.8/src/camera/dummy/SDL_camera_dummy.c create mode 100644 contrib/SDL-3.2.8/src/camera/emscripten/SDL_camera_emscripten.c create mode 100644 contrib/SDL-3.2.8/src/camera/mediafoundation/SDL_camera_mediafoundation.c create mode 100644 contrib/SDL-3.2.8/src/camera/pipewire/SDL_camera_pipewire.c create mode 100644 contrib/SDL-3.2.8/src/camera/v4l2/SDL_camera_v4l2.c create mode 100644 contrib/SDL-3.2.8/src/camera/vita/SDL_camera_vita.c (limited to 'contrib/SDL-3.2.8/src/camera') diff --git a/contrib/SDL-3.2.8/src/camera/SDL_camera.c b/contrib/SDL-3.2.8/src/camera/SDL_camera.c new file mode 100644 index 0000000..9f71cea --- /dev/null +++ b/contrib/SDL-3.2.8/src/camera/SDL_camera.c @@ -0,0 +1,1583 @@ +/* + Simple DirectMedia Layer + Copyright (C) 1997-2025 Sam Lantinga + + This software is provided 'as-is', without any express or implied + warranty. In no event will the authors be held liable for any damages + arising from the use of this software. + + Permission is granted to anyone to use this software for any purpose, + including commercial applications, and to alter it and redistribute it + freely, subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + 3. This notice may not be removed or altered from any source distribution. +*/ +#include "SDL_internal.h" + +#include "SDL_syscamera.h" +#include "SDL_camera_c.h" +#include "../video/SDL_pixels_c.h" +#include "../video/SDL_surface_c.h" +#include "../thread/SDL_systhread.h" + + +// A lot of this is a simplified version of SDL_audio.c; if fixing stuff here, +// maybe check that file, too. + +// Available camera drivers +static const CameraBootStrap *const bootstrap[] = { +#ifdef SDL_CAMERA_DRIVER_V4L2 + &V4L2_bootstrap, +#endif +#ifdef SDL_CAMERA_DRIVER_PIPEWIRE + &PIPEWIRECAMERA_bootstrap, +#endif +#ifdef SDL_CAMERA_DRIVER_COREMEDIA + &COREMEDIA_bootstrap, +#endif +#ifdef SDL_CAMERA_DRIVER_ANDROID + &ANDROIDCAMERA_bootstrap, +#endif +#ifdef SDL_CAMERA_DRIVER_EMSCRIPTEN + &EMSCRIPTENCAMERA_bootstrap, +#endif +#ifdef SDL_CAMERA_DRIVER_MEDIAFOUNDATION + &MEDIAFOUNDATION_bootstrap, +#endif +#ifdef SDL_CAMERA_DRIVER_VITA + &VITACAMERA_bootstrap, +#endif +#ifdef SDL_CAMERA_DRIVER_DUMMY + &DUMMYCAMERA_bootstrap, +#endif + NULL +}; + +static SDL_CameraDriver camera_driver; + + +int SDL_GetNumCameraDrivers(void) +{ + return SDL_arraysize(bootstrap) - 1; +} + +const char *SDL_GetCameraDriver(int index) +{ + if (index >= 0 && index < SDL_GetNumCameraDrivers()) { + return bootstrap[index]->name; + } + SDL_InvalidParamError("index"); + return NULL; +} + +const char *SDL_GetCurrentCameraDriver(void) +{ + return camera_driver.name; +} + +char *SDL_GetCameraThreadName(SDL_Camera *device, char *buf, size_t buflen) +{ + (void)SDL_snprintf(buf, buflen, "SDLCamera%d", (int) device->instance_id); + return buf; +} + +bool SDL_AddCameraFormat(CameraFormatAddData *data, SDL_PixelFormat format, SDL_Colorspace colorspace, int w, int h, int framerate_numerator, int framerate_denominator) +{ + SDL_assert(data != NULL); + if (data->allocated_specs <= data->num_specs) { + const int newalloc = data->allocated_specs ? (data->allocated_specs * 2) : 16; + void *ptr = SDL_realloc(data->specs, sizeof (SDL_CameraSpec) * newalloc); + if (!ptr) { + return false; + } + data->specs = (SDL_CameraSpec *) ptr; + data->allocated_specs = newalloc; + } + + SDL_CameraSpec *spec = &data->specs[data->num_specs]; + spec->format = format; + spec->colorspace = colorspace; + spec->width = w; + spec->height = h; + spec->framerate_numerator = framerate_numerator; + spec->framerate_denominator = framerate_denominator; + + data->num_specs++; + + return true; +} + + +// Zombie device implementation... + +// These get used when a device is disconnected or fails. Apps that ignore the +// loss notifications will get black frames but otherwise keep functioning. +static bool ZombieWaitDevice(SDL_Camera *device) +{ + if (!SDL_GetAtomicInt(&device->shutdown)) { + // !!! FIXME: this is bad for several reasons (uses double, could be precalculated, doesn't track elapsed time). + const double duration = ((double) device->actual_spec.framerate_denominator / ((double) device->actual_spec.framerate_numerator)); + SDL_Delay((Uint32) (duration * 1000.0)); + } + return true; +} + +static size_t GetFrameBufLen(const SDL_CameraSpec *spec) +{ + const size_t w = (const size_t) spec->width; + const size_t h = (const size_t) spec->height; + const size_t wxh = w * h; + const SDL_PixelFormat fmt = spec->format; + + switch (fmt) { + // Some YUV formats have a larger Y plane than their U or V planes. + case SDL_PIXELFORMAT_YV12: + case SDL_PIXELFORMAT_IYUV: + case SDL_PIXELFORMAT_NV12: + case SDL_PIXELFORMAT_NV21: + return wxh + (wxh / 2); + + default: break; + } + + // this is correct for most things. + return wxh * SDL_BYTESPERPIXEL(fmt); +} + +static SDL_CameraFrameResult ZombieAcquireFrame(SDL_Camera *device, SDL_Surface *frame, Uint64 *timestampNS) +{ + const SDL_CameraSpec *spec = &device->actual_spec; + + if (!device->zombie_pixels) { + // attempt to allocate and initialize a fake frame of pixels. + const size_t buflen = GetFrameBufLen(&device->actual_spec); + device->zombie_pixels = (Uint8 *)SDL_aligned_alloc(SDL_GetSIMDAlignment(), buflen); + if (!device->zombie_pixels) { + *timestampNS = 0; + return SDL_CAMERA_FRAME_SKIP; // oh well, say there isn't a frame yet, so we'll go back to waiting. Maybe allocation will succeed later...? + } + + Uint8 *dst = device->zombie_pixels; + switch (spec->format) { + // in YUV formats, the U and V values must be 128 to get a black frame. If set to zero, it'll be bright green. + case SDL_PIXELFORMAT_YV12: + case SDL_PIXELFORMAT_IYUV: + case SDL_PIXELFORMAT_NV12: + case SDL_PIXELFORMAT_NV21: + SDL_memset(dst, 0, spec->width * spec->height); // set Y to zero. + SDL_memset(dst + (spec->width * spec->height), 128, (spec->width * spec->height) / 2); // set U and V to 128. + break; + + case SDL_PIXELFORMAT_YUY2: + case SDL_PIXELFORMAT_YVYU: + // Interleaved Y1[U1|V1]Y2[U2|V2]. + for (size_t i = 0; i < buflen; i += 4) { + dst[i] = 0; + dst[i+1] = 128; + dst[i+2] = 0; + dst[i+3] = 128; + } + break; + + + case SDL_PIXELFORMAT_UYVY: + // Interleaved [U1|V1]Y1[U2|V2]Y2. + for (size_t i = 0; i < buflen; i += 4) { + dst[i] = 128; + dst[i+1] = 0; + dst[i+2] = 128; + dst[i+3] = 0; + } + break; + + default: + // just zero everything else, it'll _probably_ be okay. + SDL_memset(dst, 0, buflen); + break; + } + } + + + *timestampNS = SDL_GetTicksNS(); + frame->pixels = device->zombie_pixels; + + // SDL (currently) wants the pitch of YUV formats to be the pitch of the (1-byte-per-pixel) Y plane. + frame->pitch = spec->width; + if (!SDL_ISPIXELFORMAT_FOURCC(spec->format)) { // checking if it's not FOURCC to only do this for non-YUV data is good enough for now. + frame->pitch *= SDL_BYTESPERPIXEL(spec->format); + } + + #if DEBUG_CAMERA + SDL_Log("CAMERA: dev[%p] Acquired Zombie frame, timestamp %llu", device, (unsigned long long) *timestampNS); + #endif + + return SDL_CAMERA_FRAME_READY; // frame is available. +} + +static void ZombieReleaseFrame(SDL_Camera *device, SDL_Surface *frame) // Reclaim frame->pixels and frame->pitch! +{ + if (frame->pixels != device->zombie_pixels) { + // this was a frame from before the disconnect event; let the backend make an attempt to free it. + camera_driver.impl.ReleaseFrame(device, frame); + } + // we just leave zombie_pixels alone, as we'll reuse it for every new frame until the camera is closed. +} + +static void ClosePhysicalCamera(SDL_Camera *device) +{ + if (!device) { + return; + } + + SDL_SetAtomicInt(&device->shutdown, 1); + +// !!! FIXME: the close_cond stuff from audio might help the race condition here. + + if (device->thread != NULL) { + SDL_WaitThread(device->thread, NULL); + device->thread = NULL; + } + + // release frames that are queued up somewhere... + if (!device->needs_conversion && !device->needs_scaling) { + for (SurfaceList *i = device->filled_output_surfaces.next; i != NULL; i = i->next) { + device->ReleaseFrame(device, i->surface); + } + for (SurfaceList *i = device->app_held_output_surfaces.next; i != NULL; i = i->next) { + device->ReleaseFrame(device, i->surface); + } + } + + camera_driver.impl.CloseDevice(device); + + SDL_DestroyProperties(device->props); + + SDL_DestroySurface(device->acquire_surface); + device->acquire_surface = NULL; + SDL_DestroySurface(device->conversion_surface); + device->conversion_surface = NULL; + + for (int i = 0; i < SDL_arraysize(device->output_surfaces); i++) { + SDL_DestroySurface(device->output_surfaces[i].surface); + } + SDL_zeroa(device->output_surfaces); + + SDL_aligned_free(device->zombie_pixels); + + device->permission = 0; + device->zombie_pixels = NULL; + device->filled_output_surfaces.next = NULL; + device->empty_output_surfaces.next = NULL; + device->app_held_output_surfaces.next = NULL; + + device->base_timestamp = 0; + device->adjust_timestamp = 0; + + SDL_zero(device->spec); +} + +// Don't hold the device lock when calling this, as we may destroy the device! +void UnrefPhysicalCamera(SDL_Camera *device) +{ + if (SDL_AtomicDecRef(&device->refcount)) { + // take it out of the device list. + SDL_LockRWLockForWriting(camera_driver.device_hash_lock); + if (SDL_RemoveFromHashTable(camera_driver.device_hash, (const void *) (uintptr_t) device->instance_id)) { + SDL_AddAtomicInt(&camera_driver.device_count, -1); + } + SDL_UnlockRWLock(camera_driver.device_hash_lock); + } +} + +void RefPhysicalCamera(SDL_Camera *device) +{ + SDL_AtomicIncRef(&device->refcount); +} + +static void ObtainPhysicalCameraObj(SDL_Camera *device) SDL_NO_THREAD_SAFETY_ANALYSIS // !!! FIXME: SDL_ACQUIRE +{ + if (device) { + RefPhysicalCamera(device); + SDL_LockMutex(device->lock); + } +} + +static SDL_Camera *ObtainPhysicalCamera(SDL_CameraID devid) // !!! FIXME: SDL_ACQUIRE +{ + if (!SDL_GetCurrentCameraDriver()) { + SDL_SetError("Camera subsystem is not initialized"); + return NULL; + } + + SDL_Camera *device = NULL; + SDL_LockRWLockForReading(camera_driver.device_hash_lock); + SDL_FindInHashTable(camera_driver.device_hash, (const void *) (uintptr_t) devid, (const void **) &device); + SDL_UnlockRWLock(camera_driver.device_hash_lock); + if (!device) { + SDL_SetError("Invalid camera device instance ID"); + } else { + ObtainPhysicalCameraObj(device); + } + return device; +} + +static void ReleaseCamera(SDL_Camera *device) SDL_NO_THREAD_SAFETY_ANALYSIS // !!! FIXME: SDL_RELEASE +{ + if (device) { + SDL_UnlockMutex(device->lock); + UnrefPhysicalCamera(device); + } +} + +// we want these sorted by format first, so you can find a block of all +// resolutions that are supported for a format. The formats are sorted in +// "best" order, but that's subjective: right now, we prefer planar +// formats, since they're likely what the cameras prefer to produce +// anyhow, and they basically send the same information in less space +// than an RGB-style format. After that, sort by bits-per-pixel. + +// we want specs sorted largest to smallest dimensions, larger width taking precedence over larger height. +static int SDLCALL CameraSpecCmp(const void *vpa, const void *vpb) +{ + const SDL_CameraSpec *a = (const SDL_CameraSpec *) vpa; + const SDL_CameraSpec *b = (const SDL_CameraSpec *) vpb; + + // driver shouldn't send specs like this, check here since we're eventually going to sniff the whole array anyhow. + SDL_assert(a->format != SDL_PIXELFORMAT_UNKNOWN); + SDL_assert(a->width > 0); + SDL_assert(a->height > 0); + SDL_assert(b->format != SDL_PIXELFORMAT_UNKNOWN); + SDL_assert(b->width > 0); + SDL_assert(b->height > 0); + + const SDL_PixelFormat afmt = a->format; + const SDL_PixelFormat bfmt = b->format; + if (SDL_ISPIXELFORMAT_FOURCC(afmt) && !SDL_ISPIXELFORMAT_FOURCC(bfmt)) { + return -1; + } else if (!SDL_ISPIXELFORMAT_FOURCC(afmt) && SDL_ISPIXELFORMAT_FOURCC(bfmt)) { + return 1; + } else if (SDL_BITSPERPIXEL(afmt) > SDL_BITSPERPIXEL(bfmt)) { + return -1; + } else if (SDL_BITSPERPIXEL(bfmt) > SDL_BITSPERPIXEL(afmt)) { + return 1; + } else if (a->width > b->width) { + return -1; + } else if (b->width > a->width) { + return 1; + } else if (a->height > b->height) { + return -1; + } else if (b->height > a->height) { + return 1; + } + + // still here? We care about framerate less than format or size, but faster is better than slow. + if (a->framerate_numerator && !b->framerate_numerator) { + return -1; + } else if (!a->framerate_numerator && b->framerate_numerator) { + return 1; + } + + const float fpsa = ((float)a->framerate_numerator / a->framerate_denominator); + const float fpsb = ((float)b->framerate_numerator / b->framerate_denominator); + if (fpsa > fpsb) { + return -1; + } else if (fpsb > fpsa) { + return 1; + } + + if (SDL_COLORSPACERANGE(a->colorspace) == SDL_COLOR_RANGE_FULL && + SDL_COLORSPACERANGE(b->colorspace) != SDL_COLOR_RANGE_FULL) { + return -1; + } + if (SDL_COLORSPACERANGE(a->colorspace) != SDL_COLOR_RANGE_FULL && + SDL_COLORSPACERANGE(b->colorspace) == SDL_COLOR_RANGE_FULL) { + return 1; + } + + return 0; // apparently, they're equal. +} + +// The camera backends call this when a new device is plugged in. +SDL_Camera *SDL_AddCamera(const char *name, SDL_CameraPosition position, int num_specs, const SDL_CameraSpec *specs, void *handle) +{ + SDL_assert(name != NULL); + SDL_assert(num_specs >= 0); + SDL_assert((specs != NULL) == (num_specs > 0)); + SDL_assert(handle != NULL); + + SDL_LockRWLockForReading(camera_driver.device_hash_lock); + const int shutting_down = SDL_GetAtomicInt(&camera_driver.shutting_down); + SDL_UnlockRWLock(camera_driver.device_hash_lock); + if (shutting_down) { + return NULL; // we're shutting down, don't add any devices that are hotplugged at the last possible moment. + } + + SDL_Camera *device = (SDL_Camera *)SDL_calloc(1, sizeof(SDL_Camera)); + if (!device) { + return NULL; + } + + device->name = SDL_strdup(name); + if (!device->name) { + SDL_free(device); + return NULL; + } + + device->position = position; + + device->lock = SDL_CreateMutex(); + if (!device->lock) { + SDL_free(device->name); + SDL_free(device); + return NULL; + } + + device->all_specs = (SDL_CameraSpec *)SDL_calloc(num_specs + 1, sizeof (*specs)); + if (!device->all_specs) { + SDL_DestroyMutex(device->lock); + SDL_free(device->name); + SDL_free(device); + return NULL; + } + + if (num_specs > 0) { + SDL_memcpy(device->all_specs, specs, sizeof (*specs) * num_specs); + SDL_qsort(device->all_specs, num_specs, sizeof (*specs), CameraSpecCmp); + + // weed out duplicates, just in case. + for (int i = 0; i < num_specs; i++) { + SDL_CameraSpec *a = &device->all_specs[i]; + SDL_CameraSpec *b = &device->all_specs[i + 1]; + if (SDL_memcmp(a, b, sizeof (*a)) == 0) { + SDL_memmove(a, b, sizeof (*specs) * (num_specs - i)); + i--; + num_specs--; + } + } + } + + #if DEBUG_CAMERA + const char *posstr = "unknown position"; + if (position == SDL_CAMERA_POSITION_FRONT_FACING) { + posstr = "front-facing"; + } else if (position == SDL_CAMERA_POSITION_BACK_FACING) { + posstr = "back-facing"; + } + SDL_Log("CAMERA: Adding device '%s' (%s) with %d spec%s%s", name, posstr, num_specs, (num_specs == 1) ? "" : "s", (num_specs == 0) ? "" : ":"); + for (int i = 0; i < num_specs; i++) { + const SDL_CameraSpec *spec = &device->all_specs[i]; + SDL_Log("CAMERA: - fmt=%s, w=%d, h=%d, numerator=%d, denominator=%d", SDL_GetPixelFormatName(spec->format), spec->width, spec->height, spec->framerate_numerator, spec->framerate_denominator); + } + #endif + + device->num_specs = num_specs; + device->handle = handle; + device->instance_id = SDL_GetNextObjectID(); + SDL_SetAtomicInt(&device->shutdown, 0); + SDL_SetAtomicInt(&device->zombie, 0); + RefPhysicalCamera(device); + + SDL_LockRWLockForWriting(camera_driver.device_hash_lock); + if (SDL_InsertIntoHashTable(camera_driver.device_hash, (const void *) (uintptr_t) device->instance_id, device, false)) { + SDL_AddAtomicInt(&camera_driver.device_count, 1); + } else { + SDL_DestroyMutex(device->lock); + SDL_free(device->all_specs); + SDL_free(device->name); + SDL_free(device); + device = NULL; + } + + // Add a device add event to the pending list, to be pushed when the event queue is pumped (away from any of our internal threads). + if (device) { + SDL_PendingCameraEvent *p = (SDL_PendingCameraEvent *) SDL_malloc(sizeof (SDL_PendingCameraEvent)); + if (p) { // if allocation fails, you won't get an event, but we can't help that. + p->type = SDL_EVENT_CAMERA_DEVICE_ADDED; + p->devid = device->instance_id; + p->next = NULL; + SDL_assert(camera_driver.pending_events_tail != NULL); + SDL_assert(camera_driver.pending_events_tail->next == NULL); + camera_driver.pending_events_tail->next = p; + camera_driver.pending_events_tail = p; + } + } + SDL_UnlockRWLock(camera_driver.device_hash_lock); + + return device; +} + +// Called when a device is removed from the system, or it fails unexpectedly, from any thread, possibly even the camera device's thread. +void SDL_CameraDisconnected(SDL_Camera *device) +{ + if (!device) { + return; + } + + #if DEBUG_CAMERA + SDL_Log("CAMERA: DISCONNECTED! dev[%p]", device); + #endif + + // Save off removal info in a list so we can send events for each, next + // time the event queue pumps, in case something tries to close a device + // from an event filter, as this would risk deadlocks and other disasters + // if done from the device thread. + SDL_PendingCameraEvent pending; + pending.next = NULL; + SDL_PendingCameraEvent *pending_tail = &pending; + + ObtainPhysicalCameraObj(device); + + const bool first_disconnect = SDL_CompareAndSwapAtomicInt(&device->zombie, 0, 1); + if (first_disconnect) { // if already disconnected this device, don't do it twice. + // Swap in "Zombie" versions of the usual platform interfaces, so the device will keep + // making progress until the app closes it. Otherwise, streams might continue to + // accumulate waste data that never drains, apps that depend on audio callbacks to + // progress will freeze, etc. + device->WaitDevice = ZombieWaitDevice; + device->AcquireFrame = ZombieAcquireFrame; + device->ReleaseFrame = ZombieReleaseFrame; + + // Zombie functions will just report the timestamp as SDL_GetTicksNS(), so we don't need to adjust anymore to get it to match. + device->adjust_timestamp = 0; + device->base_timestamp = 0; + + SDL_PendingCameraEvent *p = (SDL_PendingCameraEvent *) SDL_malloc(sizeof (SDL_PendingCameraEvent)); + if (p) { // if this failed, no event for you, but you have deeper problems anyhow. + p->type = SDL_EVENT_CAMERA_DEVICE_REMOVED; + p->devid = device->instance_id; + p->next = NULL; + pending_tail->next = p; + pending_tail = p; + } + } + + ReleaseCamera(device); + + if (first_disconnect) { + if (pending.next) { // NULL if event is disabled or disaster struck. + SDL_LockRWLockForWriting(camera_driver.device_hash_lock); + SDL_assert(camera_driver.pending_events_tail != NULL); + SDL_assert(camera_driver.pending_events_tail->next == NULL); + camera_driver.pending_events_tail->next = pending.next; + camera_driver.pending_events_tail = pending_tail; + SDL_UnlockRWLock(camera_driver.device_hash_lock); + } + } +} + +void SDL_CameraPermissionOutcome(SDL_Camera *device, bool approved) +{ + if (!device) { + return; + } + + SDL_PendingCameraEvent pending; + pending.next = NULL; + SDL_PendingCameraEvent *pending_tail = &pending; + + const int permission = approved ? 1 : -1; + + ObtainPhysicalCameraObj(device); + if (device->permission != permission) { + device->permission = permission; + SDL_PendingCameraEvent *p = (SDL_PendingCameraEvent *) SDL_malloc(sizeof (SDL_PendingCameraEvent)); + if (p) { // if this failed, no event for you, but you have deeper problems anyhow. + p->type = approved ? SDL_EVENT_CAMERA_DEVICE_APPROVED : SDL_EVENT_CAMERA_DEVICE_DENIED; + p->devid = device->instance_id; + p->next = NULL; + pending_tail->next = p; + pending_tail = p; + } + } + + ReleaseCamera(device); + + if (pending.next) { // NULL if event is disabled or disaster struck. + SDL_LockRWLockForWriting(camera_driver.device_hash_lock); + SDL_assert(camera_driver.pending_events_tail != NULL); + SDL_assert(camera_driver.pending_events_tail->next == NULL); + camera_driver.pending_events_tail->next = pending.next; + camera_driver.pending_events_tail = pending_tail; + SDL_UnlockRWLock(camera_driver.device_hash_lock); + } +} + +typedef struct FindOnePhysicalCameraByCallbackData +{ + bool (*callback)(SDL_Camera *device, void *userdata); + void *userdata; + SDL_Camera *device; +} FindOnePhysicalCameraByCallbackData; + +static bool SDLCALL FindOnePhysicalCameraByCallback(void *userdata, const SDL_HashTable *table, const void *key, const void *value) +{ + FindOnePhysicalCameraByCallbackData *data = (FindOnePhysicalCameraByCallbackData *) userdata; + SDL_Camera *device = (SDL_Camera *) value; + if (data->callback(device, data->userdata)) { + data->device = device; + return false; // stop iterating. + } + return true; // keep iterating. +} + +// !!! FIXME: this doesn't follow SDL convention of `userdata` being the first param of the callback. +SDL_Camera *SDL_FindPhysicalCameraByCallback(bool (*callback)(SDL_Camera *device, void *userdata), void *userdata) +{ + if (!SDL_GetCurrentCameraDriver()) { + SDL_SetError("Camera subsystem is not initialized"); + return NULL; + } + + + FindOnePhysicalCameraByCallbackData data = { callback, userdata, NULL }; + SDL_LockRWLockForReading(camera_driver.device_hash_lock); + SDL_IterateHashTable(camera_driver.device_hash, FindOnePhysicalCameraByCallback, &data); + SDL_UnlockRWLock(camera_driver.device_hash_lock); + + if (!data.device) { + SDL_SetError("Device not found"); + } + + return data.device; +} + +void SDL_CloseCamera(SDL_Camera *camera) +{ + SDL_Camera *device = camera; // currently there's no separation between physical and logical device. + ClosePhysicalCamera(device); +} + +bool SDL_GetCameraFormat(SDL_Camera *camera, SDL_CameraSpec *spec) +{ + bool result; + + if (!camera) { + return SDL_InvalidParamError("camera"); + } else if (!spec) { + return SDL_InvalidParamError("spec"); + } + + SDL_Camera *device = camera; // currently there's no separation between physical and logical device. + ObtainPhysicalCameraObj(device); + if (device->permission > 0) { + SDL_copyp(spec, &device->spec); + result = true; + } else { + SDL_zerop(spec); + result = SDL_SetError("Camera permission has not been granted"); + } + ReleaseCamera(device); + + return result; +} + +const char *SDL_GetCameraName(SDL_CameraID instance_id) +{ + const char *result = NULL; + SDL_Camera *device = ObtainPhysicalCamera(instance_id); + if (device) { + result = SDL_GetPersistentString(device->name); + ReleaseCamera(device); + } + return result; +} + +SDL_CameraPosition SDL_GetCameraPosition(SDL_CameraID instance_id) +{ + SDL_CameraPosition result = SDL_CAMERA_POSITION_UNKNOWN; + SDL_Camera *device = ObtainPhysicalCamera(instance_id); + if (device) { + result = device->position; + ReleaseCamera(device); + } + return result; +} + + +typedef struct GetOneCameraData +{ + SDL_CameraID *result; + int devs_seen; +} GetOneCameraData; + +static bool SDLCALL GetOneCamera(void *userdata, const SDL_HashTable *table, const void *key, const void *value) +{ + GetOneCameraData *data = (GetOneCameraData *) userdata; + data->result[data->devs_seen++] = (SDL_CameraID) (uintptr_t) key; + return true; // keep iterating. +} + +SDL_CameraID *SDL_GetCameras(int *count) +{ + int dummy_count; + if (!count) { + count = &dummy_count; + } + + if (!SDL_GetCurrentCameraDriver()) { + *count = 0; + SDL_SetError("Camera subsystem is not initialized"); + return NULL; + } + + SDL_CameraID *result = NULL; + + SDL_LockRWLockForReading(camera_driver.device_hash_lock); + int num_devices = SDL_GetAtomicInt(&camera_driver.device_count); + result = (SDL_CameraID *) SDL_malloc((num_devices + 1) * sizeof (SDL_CameraID)); + if (!result) { + num_devices = 0; + } else { + GetOneCameraData data = { result, 0 }; + SDL_IterateHashTable(camera_driver.device_hash, GetOneCamera, &data); + SDL_assert(data.devs_seen == num_devices); + result[num_devices] = 0; // null-terminated. + } + SDL_UnlockRWLock(camera_driver.device_hash_lock); + + *count = num_devices; + + return result; +} + +SDL_CameraSpec **SDL_GetCameraSupportedFormats(SDL_CameraID instance_id, int *count) +{ + if (count) { + *count = 0; + } + + SDL_Camera *device = ObtainPhysicalCamera(instance_id); + if (!device) { + return NULL; + } + + int i; + int num_specs = device->num_specs; + SDL_CameraSpec **result = (SDL_CameraSpec **) SDL_malloc(((num_specs + 1) * sizeof(*result)) + (num_specs * sizeof (**result))); + if (result) { + SDL_CameraSpec *specs = (SDL_CameraSpec *)(result + (num_specs + 1)); + SDL_memcpy(specs, device->all_specs, num_specs * sizeof(*specs)); + for (i = 0; i < num_specs; ++i) { + result[i] = specs++; + } + result[i] = NULL; + + if (count) { + *count = num_specs; + } + } + + ReleaseCamera(device); + + return result; +} + + +// Camera device thread. This is split into chunks, so drivers that need to control this directly can use the pieces they need without duplicating effort. + +void SDL_CameraThreadSetup(SDL_Camera *device) +{ + //camera_driver.impl.ThreadInit(device); +#ifdef SDL_VIDEO_DRIVER_ANDROID + // TODO + /* + { + // Set thread priority to THREAD_PRIORITY_VIDEO + extern void Android_JNI_CameraSetThreadPriority(int, int); + Android_JNI_CameraSetThreadPriority(device->recording, device); + }*/ +#else + // The camera capture is always a high priority thread + SDL_SetCurrentThreadPriority(SDL_THREAD_PRIORITY_HIGH); +#endif +} + +bool SDL_CameraThreadIterate(SDL_Camera *device) +{ + SDL_LockMutex(device->lock); + + if (SDL_GetAtomicInt(&device->shutdown)) { + SDL_UnlockMutex(device->lock); + return false; // we're done, shut it down. + } + + const int permission = device->permission; + if (permission <= 0) { + SDL_UnlockMutex(device->lock); + return (permission < 0) ? false : true; // if permission was denied, shut it down. if undecided, we're done for now. + } + + bool failed = false; // set to true if disaster worthy of treating the device as lost has happened. + SDL_Surface *acquired = NULL; + SDL_Surface *output_surface = NULL; + SurfaceList *slist = NULL; + Uint64 timestampNS = 0; + + // AcquireFrame SHOULD NOT BLOCK, as we are holding a lock right now. Block in WaitDevice instead! + const SDL_CameraFrameResult rc = device->AcquireFrame(device, device->acquire_surface, ×tampNS); + + if (rc == SDL_CAMERA_FRAME_READY) { // new frame acquired! + #if DEBUG_CAMERA + SDL_Log("CAMERA: New frame available! pixels=%p pitch=%d", device->acquire_surface->pixels, device->acquire_surface->pitch); + #endif + + if (device->drop_frames > 0) { + #if DEBUG_CAMERA + SDL_Log("CAMERA: Dropping an initial frame"); + #endif + device->drop_frames--; + device->ReleaseFrame(device, device->acquire_surface); + device->acquire_surface->pixels = NULL; + device->acquire_surface->pitch = 0; + } else if (device->empty_output_surfaces.next == NULL) { + // uhoh, no output frames available! Either the app is slow, or it forgot to release frames when done with them. Drop this new frame. + #if DEBUG_CAMERA + SDL_Log("CAMERA: No empty output surfaces! Dropping frame!"); + #endif + device->ReleaseFrame(device, device->acquire_surface); + device->acquire_surface->pixels = NULL; + device->acquire_surface->pitch = 0; + } else { + if (!device->adjust_timestamp) { + device->adjust_timestamp = SDL_GetTicksNS(); + device->base_timestamp = timestampNS; + } + timestampNS = (timestampNS - device->base_timestamp) + device->adjust_timestamp; + + slist = device->empty_output_surfaces.next; + output_surface = slist->surface; + device->empty_output_surfaces.next = slist->next; + acquired = device->acquire_surface; + slist->timestampNS = timestampNS; + } + } else if (rc == SDL_CAMERA_FRAME_SKIP) { // no frame available yet; not an error. + #if 0 //DEBUG_CAMERA + SDL_Log("CAMERA: No frame available yet."); + #endif + } else { // fatal error! + #if DEBUG_CAMERA + SDL_Log("CAMERA: dev[%p] error AcquireFrame: %s", device, SDL_GetError()); + #endif + failed = true; + } + + // we can let go of the lock once we've tried to grab a frame of video and maybe moved the output frame off the empty list. + // this lets us chew up the CPU for conversion and scaling without blocking other threads. + SDL_UnlockMutex(device->lock); + + if (failed) { + SDL_assert(slist == NULL); + SDL_assert(acquired == NULL); + SDL_CameraDisconnected(device); // doh. + } else if (acquired) { // we have a new frame, scale/convert if necessary and queue it for the app! + SDL_assert(slist != NULL); + if (!device->needs_scaling && !device->needs_conversion) { // no conversion needed? Just move the pointer/pitch into the output surface. + #if DEBUG_CAMERA + SDL_Log("CAMERA: Frame is going through without conversion!"); + #endif + output_surface->w = acquired->w; + output_surface->h = acquired->h; + output_surface->pixels = acquired->pixels; + output_surface->pitch = acquired->pitch; + } else { // convert/scale into a different surface. + #if DEBUG_CAMERA + SDL_Log("CAMERA: Frame is getting converted!"); + #endif + SDL_Surface *srcsurf = acquired; + if (device->needs_scaling == -1) { // downscaling? Do it first. -1: downscale, 0: no scaling, 1: upscale + SDL_Surface *dstsurf = device->needs_conversion ? device->conversion_surface : output_surface; + SDL_StretchSurface(srcsurf, NULL, dstsurf, NULL, SDL_SCALEMODE_NEAREST); // !!! FIXME: linear scale? letterboxing? + srcsurf = dstsurf; + } + if (device->needs_conversion) { + SDL_Surface *dstsurf = (device->needs_scaling == 1) ? device->conversion_surface : output_surface; + SDL_ConvertPixels(srcsurf->w, srcsurf->h, + srcsurf->format, srcsurf->pixels, srcsurf->pitch, + dstsurf->format, dstsurf->pixels, dstsurf->pitch); + srcsurf = dstsurf; + } + if (device->needs_scaling == 1) { // upscaling? Do it last. -1: downscale, 0: no scaling, 1: upscale + SDL_StretchSurface(srcsurf, NULL, output_surface, NULL, SDL_SCALEMODE_NEAREST); // !!! FIXME: linear scale? letterboxing? + } + + // we made a copy, so we can give the driver back its resources. + device->ReleaseFrame(device, acquired); + } + + // we either released these already after we copied the data, or the pointer was migrated to output_surface. + acquired->pixels = NULL; + acquired->pitch = 0; + + // make the filled output surface available to the app. + SDL_LockMutex(device->lock); + slist->next = device->filled_output_surfaces.next; + device->filled_output_surfaces.next = slist; + SDL_UnlockMutex(device->lock); + } + + return true; // always go on if not shutting down, even if device failed. +} + +void SDL_CameraThreadShutdown(SDL_Camera *device) +{ + //device->FlushRecording(device); + //camera_driver.impl.ThreadDeinit(device); + //SDL_CameraThreadFinalize(device); +} + +// Actual thread entry point, if driver didn't handle this itself. +static int SDLCALL CameraThread(void *devicep) +{ + SDL_Camera *device = (SDL_Camera *) devicep; + + #if DEBUG_CAMERA + SDL_Log("CAMERA: dev[%p] Start thread 'CameraThread'", devicep); + #endif + + SDL_assert(device != NULL); + SDL_CameraThreadSetup(device); + + do { + if (!device->WaitDevice(device)) { + SDL_CameraDisconnected(device); // doh. (but don't break out of the loop, just be a zombie for now!) + } + } while (SDL_CameraThreadIterate(device)); + + SDL_CameraThreadShutdown(device); + + #if DEBUG_CAMERA + SDL_Log("CAMERA: dev[%p] End thread 'CameraThread'", devicep); + #endif + + return 0; +} + +bool SDL_PrepareCameraSurfaces(SDL_Camera *device) +{ + SDL_CameraSpec *appspec = &device->spec; // the app wants this format. + const SDL_CameraSpec *devspec = &device->actual_spec; // the hardware is set to this format. + + SDL_assert(device->acquire_surface == NULL); // shouldn't call this function twice on an opened camera! + SDL_assert(devspec->format != SDL_PIXELFORMAT_UNKNOWN); // fix the backend, it should have an actual format by now. + SDL_assert(devspec->width >= 0); // fix the backend, it should have an actual format by now. + SDL_assert(devspec->height >= 0); // fix the backend, it should have an actual format by now. + + if (appspec->width <= 0 || appspec->height <= 0) { + appspec->width = devspec->width; + appspec->height = devspec->height; + } + + if (appspec->format == SDL_PIXELFORMAT_UNKNOWN) { + appspec->format = devspec->format; + } + + if (appspec->framerate_denominator == 0) { + appspec->framerate_numerator = devspec->framerate_numerator; + appspec->framerate_denominator = devspec->framerate_denominator; + } + + if ((devspec->width == appspec->width) && (devspec->height == appspec->height)) { + device->needs_scaling = 0; + } else { + const Uint64 srcarea = ((Uint64) devspec->width) * ((Uint64) devspec->height); + const Uint64 dstarea = ((Uint64) appspec->width) * ((Uint64) appspec->height); + if (dstarea <= srcarea) { + device->needs_scaling = -1; // downscaling (or changing to new aspect ratio with same area) + } else { + device->needs_scaling = 1; // upscaling + } + } + + device->needs_conversion = (devspec->format != appspec->format); + + device->acquire_surface = SDL_CreateSurfaceFrom(devspec->width, devspec->height, devspec->format, NULL, 0); + if (!device->acquire_surface) { + goto failed; + } + SDL_SetSurfaceColorspace(device->acquire_surface, devspec->colorspace); + + // if we have to scale _and_ convert, we need a middleman surface, since we can't do both changes at once. + if (device->needs_scaling && device->needs_conversion) { + const bool downscaling_first = (device->needs_scaling < 0); + const SDL_CameraSpec *s = downscaling_first ? appspec : devspec; + const SDL_PixelFormat fmt = downscaling_first ? devspec->format : appspec->format; + device->conversion_surface = SDL_CreateSurface(s->width, s->height, fmt); + if (!device->conversion_surface) { + goto failed; + } + SDL_SetSurfaceColorspace(device->conversion_surface, devspec->colorspace); + } + + // output surfaces are in the app-requested format. If no conversion is necessary, we'll just use the pointers + // the backend fills into acquired_surface, and you can get all the way from DMA access in the camera hardware + // to the app without a single copy. Otherwise, these will be full surfaces that hold converted/scaled copies. + + for (int i = 0; i < (SDL_arraysize(device->output_surfaces) - 1); i++) { + device->output_surfaces[i].next = &device->output_surfaces[i + 1]; + } + device->empty_output_surfaces.next = device->output_surfaces; + + for (int i = 0; i < SDL_arraysize(device->output_surfaces); i++) { + SDL_Surface *surf; + if (device->needs_scaling || device->needs_conversion) { + surf = SDL_CreateSurface(appspec->width, appspec->height, appspec->format); + } else { + surf = SDL_CreateSurfaceFrom(appspec->width, appspec->height, appspec->format, NULL, 0); + } + if (!surf) { + goto failed; + } + SDL_SetSurfaceColorspace(surf, devspec->colorspace); + + device->output_surfaces[i].surface = surf; + } + + return true; + +failed: + if (device->acquire_surface) { + SDL_DestroySurface(device->acquire_surface); + device->acquire_surface = NULL; + } + + if (device->conversion_surface) { + SDL_DestroySurface(device->conversion_surface); + device->conversion_surface = NULL; + } + + for (int i = 0; i < SDL_arraysize(device->output_surfaces); i++) { + SDL_Surface *surf = device->output_surfaces[i].surface; + if (surf) { + SDL_DestroySurface(surf); + } + } + SDL_zeroa(device->output_surfaces); + + return false; +} + +static void ChooseBestCameraSpec(SDL_Camera *device, const SDL_CameraSpec *spec, SDL_CameraSpec *closest) +{ + // Find the closest available native format/size... + // + // We want the exact size if possible, even if we have + // to convert formats, because we can _probably_ do that + // conversion losslessly at less expense verses scaling. + // + // Failing that, we want the size that's closest to the + // requested aspect ratio, then the closest size within + // that. + + SDL_zerop(closest); + SDL_assert(((Uint32) SDL_PIXELFORMAT_UNKNOWN) == 0); // since we SDL_zerop'd to this value. + + if (device->num_specs == 0) { // device listed no specs! You get whatever you want! + if (spec) { + SDL_copyp(closest, spec); + } + return; + } else if (!spec) { // nothing specifically requested, get the best format we can... + // we sorted this into the "best" format order when adding the camera. + SDL_copyp(closest, &device->all_specs[0]); + } else { // specific thing requested, try to get as close to that as possible... + const int num_specs = device->num_specs; + int wantw = spec->width; + int wanth = spec->height; + + if (wantw > 0 && wanth > 0) { + // Find the sizes with the closest aspect ratio and then find the best fit of those. + const float wantaspect = ((float)wantw) / ((float)wanth); + const float epsilon = 1e-6f; + float closestaspect = -9999999.0f; + float closestdiff = 999999.0f; + int closestdiffw = 9999999; + + for (int i = 0; i < num_specs; i++) { + const SDL_CameraSpec *thisspec = &device->all_specs[i]; + const int thisw = thisspec->width; + const int thish = thisspec->height; + const float thisaspect = ((float)thisw) / ((float)thish); + const float aspectdiff = SDL_fabsf(wantaspect - thisaspect); + const float diff = SDL_fabsf(closestaspect - thisaspect); + const int diffw = SDL_abs(thisw - wantw); + if (diff < epsilon) { // matches current closestaspect? See if resolution is closer in size. + if (diffw < closestdiffw) { + closestdiffw = diffw; + closest->width = thisw; + closest->height = thish; + } + } else if (aspectdiff < closestdiff) { // this is a closer aspect ratio? Take it, reset resolution checks. + closestdiff = aspectdiff; + closestaspect = thisaspect; + closestdiffw = diffw; + closest->width = thisw; + closest->height = thish; + } + } + } else { + SDL_copyp(closest, &device->all_specs[0]); + } + + SDL_assert(closest->width > 0); + SDL_assert(closest->height > 0); + + // okay, we have what we think is the best resolution, now we just need the best format that supports it... + const SDL_PixelFormat wantfmt = spec->format; + SDL_PixelFormat best_format = SDL_PIXELFORMAT_UNKNOWN; + SDL_Colorspace best_colorspace = SDL_COLORSPACE_UNKNOWN; + for (int i = 0; i < num_specs; i++) { + const SDL_CameraSpec *thisspec = &device->all_specs[i]; + if ((thisspec->width == closest->width) && (thisspec->height == closest->height)) { + if (best_format == SDL_PIXELFORMAT_UNKNOWN) { + best_format = thisspec->format; // spec list is sorted by what we consider "best" format, so unless we find an exact match later, first size match is the one! + best_colorspace = thisspec->colorspace; + } + if (thisspec->format == wantfmt) { + best_format = thisspec->format; + best_colorspace = thisspec->colorspace; + break; // exact match, stop looking. + } + } + } + + SDL_assert(best_format != SDL_PIXELFORMAT_UNKNOWN); + SDL_assert(best_colorspace != SDL_COLORSPACE_UNKNOWN); + closest->format = best_format; + closest->colorspace = best_colorspace; + + // We have a resolution and a format, find the closest framerate... + const float wantfps = spec->framerate_denominator ? ((float)spec->framerate_numerator / spec->framerate_denominator) : 0.0f; + float closestfps = 9999999.0f; + for (int i = 0; i < num_specs; i++) { + const SDL_CameraSpec *thisspec = &device->all_specs[i]; + if ((thisspec->format == closest->format) && (thisspec->width == closest->width) && (thisspec->height == closest->height)) { + if ((thisspec->framerate_numerator == spec->framerate_numerator) && (thisspec->framerate_denominator == spec->framerate_denominator)) { + closest->framerate_numerator = thisspec->framerate_numerator; + closest->framerate_denominator = thisspec->framerate_denominator; + break; // exact match, stop looking. + } + + const float thisfps = thisspec->framerate_denominator ? ((float)thisspec->framerate_numerator / thisspec->framerate_denominator) : 0.0f; + const float fpsdiff = SDL_fabsf(wantfps - thisfps); + if (fpsdiff < closestfps) { // this is a closest FPS? Take it until something closer arrives. + closestfps = fpsdiff; + closest->framerate_numerator = thisspec->framerate_numerator; + closest->framerate_denominator = thisspec->framerate_denominator; + } + } + } + } + + SDL_assert(closest->width > 0); + SDL_assert(closest->height > 0); + SDL_assert(closest->format != SDL_PIXELFORMAT_UNKNOWN); +} + +SDL_Camera *SDL_OpenCamera(SDL_CameraID instance_id, const SDL_CameraSpec *spec) +{ + SDL_Camera *device = ObtainPhysicalCamera(instance_id); + if (!device) { + return NULL; + } + + if (device->hidden != NULL) { + ReleaseCamera(device); + SDL_SetError("Camera already opened"); // we may remove this limitation at some point. + return NULL; + } + + SDL_SetAtomicInt(&device->shutdown, 0); + + // These start with the backend's implementation, but we might swap them out with zombie versions later. + device->WaitDevice = camera_driver.impl.WaitDevice; + device->AcquireFrame = camera_driver.impl.AcquireFrame; + device->ReleaseFrame = camera_driver.impl.ReleaseFrame; + + SDL_CameraSpec closest; + ChooseBestCameraSpec(device, spec, &closest); + + #if DEBUG_CAMERA + SDL_Log("CAMERA: App wanted [(%dx%d) fmt=%s framerate=%d/%d], chose [(%dx%d) fmt=%s framerate=%d/%d]", + spec ? spec->width : -1, spec ? spec->height : -1, spec ? SDL_GetPixelFormatName(spec->format) : "(null)", spec ? spec->framerate_numerator : -1, spec ? spec->framerate_denominator : -1, + closest.width, closest.height, SDL_GetPixelFormatName(closest.format), closest.framerate_numerator, closest.framerate_denominator); + #endif + + if (!camera_driver.impl.OpenDevice(device, &closest)) { + ClosePhysicalCamera(device); // in case anything is half-initialized. + ReleaseCamera(device); + return NULL; + } + + SDL_copyp(&device->spec, spec ? spec : &closest); + SDL_copyp(&device->actual_spec, &closest); + + // SDL_PIXELFORMAT_UNKNOWN here is taken as a signal that the backend + // doesn't know its format yet (Emscripten waiting for user permission, + // in this case), and the backend will call SDL_PrepareCameraSurfaces() + // itself, later but before the app is allowed to acquire images. + if (closest.format != SDL_PIXELFORMAT_UNKNOWN) { + if (!SDL_PrepareCameraSurfaces(device)) { + ClosePhysicalCamera(device); + ReleaseCamera(device); + return NULL; + } + } + + device->drop_frames = 1; + + // Start the camera thread if necessary + if (!camera_driver.impl.ProvidesOwnCallbackThread) { + char threadname[64]; + SDL_GetCameraThreadName(device, threadname, sizeof (threadname)); + device->thread = SDL_CreateThread(CameraThread, threadname, device); + if (!device->thread) { + ClosePhysicalCamera(device); + ReleaseCamera(device); + SDL_SetError("Couldn't create camera thread"); + return NULL; + } + } + + ReleaseCamera(device); // unlock, we're good to go! + + return device; // currently there's no separation between physical and logical device. +} + +SDL_Surface *SDL_AcquireCameraFrame(SDL_Camera *camera, Uint64 *timestampNS) +{ + if (timestampNS) { + *timestampNS = 0; + } + + if (!camera) { + SDL_InvalidParamError("camera"); + return NULL; + } + + SDL_Camera *device = camera; // currently there's no separation between physical and logical device. + + ObtainPhysicalCameraObj(device); + + if (device->permission <= 0) { + ReleaseCamera(device); + SDL_SetError("Camera permission has not been granted"); + return NULL; + } + + SDL_Surface *result = NULL; + + // frames are in this list from newest to oldest, so find the end of the list... + SurfaceList *slistprev = &device->filled_output_surfaces; + SurfaceList *slist = slistprev; + while (slist->next) { + slistprev = slist; + slist = slist->next; + } + + const bool list_is_empty = (slist == slistprev); + if (!list_is_empty) { // report the oldest frame. + if (timestampNS) { + *timestampNS = slist->timestampNS; + } + result = slist->surface; + slistprev->next = slist->next; // remove from filled list. + slist->next = device->app_held_output_surfaces.next; // add to app_held list. + device->app_held_output_surfaces.next = slist; + } + + ReleaseCamera(device); + + return result; +} + +void SDL_ReleaseCameraFrame(SDL_Camera *camera, SDL_Surface *frame) +{ + if (!camera || !frame) { + return; + } + + SDL_Camera *device = camera; // currently there's no separation between physical and logical device. + ObtainPhysicalCameraObj(device); + + SurfaceList *slistprev = &device->app_held_output_surfaces; + SurfaceList *slist; + for (slist = slistprev->next; slist != NULL; slist = slist->next) { + if (slist->surface == frame) { + break; + } + slistprev = slist; + } + + if (!slist) { + ReleaseCamera(device); + return; + } + + // this pointer was owned by the backend (DMA memory or whatever), clear it out. + if (!device->needs_conversion && !device->needs_scaling) { + device->ReleaseFrame(device, frame); + frame->pixels = NULL; + frame->pitch = 0; + } + + slist->timestampNS = 0; + + // remove from app_held list... + slistprev->next = slist->next; + + // insert at front of empty list (and we'll use it first when we need to fill a new frame). + slist->next = device->empty_output_surfaces.next; + device->empty_output_surfaces.next = slist; + + ReleaseCamera(device); +} + +SDL_CameraID SDL_GetCameraID(SDL_Camera *camera) +{ + SDL_CameraID result = 0; + if (!camera) { + SDL_InvalidParamError("camera"); + } else { + SDL_Camera *device = camera; // currently there's no separation between physical and logical device. + ObtainPhysicalCameraObj(device); + result = device->instance_id; + ReleaseCamera(device); + } + + return result; +} + +SDL_PropertiesID SDL_GetCameraProperties(SDL_Camera *camera) +{ + SDL_PropertiesID result = 0; + if (!camera) { + SDL_InvalidParamError("camera"); + } else { + SDL_Camera *device = camera; // currently there's no separation between physical and logical device. + ObtainPhysicalCameraObj(device); + if (device->props == 0) { + device->props = SDL_CreateProperties(); + } + result = device->props; + ReleaseCamera(device); + } + + return result; +} + +int SDL_GetCameraPermissionState(SDL_Camera *camera) +{ + int result; + if (!camera) { + SDL_InvalidParamError("camera"); + result = -1; + } else { + SDL_Camera *device = camera; // currently there's no separation between physical and logical device. + ObtainPhysicalCameraObj(device); + result = device->permission; + ReleaseCamera(device); + } + return result; +} + + +static void CompleteCameraEntryPoints(void) +{ + // this doesn't currently fill in stub implementations, it just asserts the backend filled them all in. + #define FILL_STUB(x) SDL_assert(camera_driver.impl.x != NULL) + FILL_STUB(DetectDevices); + FILL_STUB(OpenDevice); + FILL_STUB(CloseDevice); + FILL_STUB(AcquireFrame); + FILL_STUB(ReleaseFrame); + FILL_STUB(FreeDeviceHandle); + FILL_STUB(Deinitialize); + #undef FILL_STUB +} + +void SDL_QuitCamera(void) +{ + if (!camera_driver.name) { // not initialized?! + return; + } + + SDL_LockRWLockForWriting(camera_driver.device_hash_lock); + SDL_SetAtomicInt(&camera_driver.shutting_down, 1); + SDL_HashTable *device_hash = camera_driver.device_hash; + camera_driver.device_hash = NULL; + SDL_PendingCameraEvent *pending_events = camera_driver.pending_events.next; + camera_driver.pending_events.next = NULL; + SDL_SetAtomicInt(&camera_driver.device_count, 0); + SDL_UnlockRWLock(camera_driver.device_hash_lock); + + SDL_PendingCameraEvent *pending_next = NULL; + for (SDL_PendingCameraEvent *i = pending_events; i; i = pending_next) { + pending_next = i->next; + SDL_free(i); + } + + SDL_DestroyHashTable(device_hash); + + // Free the driver data + camera_driver.impl.Deinitialize(); + + SDL_DestroyRWLock(camera_driver.device_hash_lock); + + SDL_zero(camera_driver); +} + +// Physical camera objects are only destroyed when removed from the device hash. +static void SDLCALL DestroyCameraHashItem(void *userdata, const void *key, const void *value) +{ + SDL_Camera *device = (SDL_Camera *) value; + ClosePhysicalCamera(device); + camera_driver.impl.FreeDeviceHandle(device); + SDL_DestroyMutex(device->lock); + SDL_free(device->all_specs); + SDL_free(device->name); + SDL_free(device); +} + +bool SDL_CameraInit(const char *driver_name) +{ + if (SDL_GetCurrentCameraDriver()) { + SDL_QuitCamera(); // shutdown driver if already running. + } + + SDL_RWLock *device_hash_lock = SDL_CreateRWLock(); // create this early, so if it fails we don't have to tear down the whole camera subsystem. + if (!device_hash_lock) { + return false; + } + + SDL_HashTable *device_hash = SDL_CreateHashTable(0, false, SDL_HashID, SDL_KeyMatchID, DestroyCameraHashItem, NULL); + if (!device_hash) { + SDL_DestroyRWLock(device_hash_lock); + return false; + } + + // Select the proper camera driver + if (!driver_name) { + driver_name = SDL_GetHint(SDL_HINT_CAMERA_DRIVER); + } + + bool initialized = false; + bool tried_to_init = false; + + if (driver_name && (*driver_name != 0)) { + char *driver_name_copy = SDL_strdup(driver_name); + const char *driver_attempt = driver_name_copy; + + if (!driver_name_copy) { + SDL_DestroyRWLock(device_hash_lock); + SDL_DestroyHashTable(device_hash); + return false; + } + + while (driver_attempt && (*driver_attempt != 0) && !initialized) { + char *driver_attempt_end = SDL_strchr(driver_attempt, ','); + if (driver_attempt_end) { + *driver_attempt_end = '\0'; + } + + for (int i = 0; bootstrap[i]; i++) { + if (SDL_strcasecmp(bootstrap[i]->name, driver_attempt) == 0) { + tried_to_init = true; + SDL_zero(camera_driver); + camera_driver.pending_events_tail = &camera_driver.pending_events; + camera_driver.device_hash_lock = device_hash_lock; + camera_driver.device_hash = device_hash; + if (bootstrap[i]->init(&camera_driver.impl)) { + camera_driver.name = bootstrap[i]->name; + camera_driver.desc = bootstrap[i]->desc; + initialized = true; + } + break; + } + } + + driver_attempt = (driver_attempt_end) ? (driver_attempt_end + 1) : NULL; + } + + SDL_free(driver_name_copy); + } else { + for (int i = 0; !initialized && bootstrap[i]; i++) { + if (bootstrap[i]->demand_only) { + continue; + } + + tried_to_init = true; + SDL_zero(camera_driver); + camera_driver.pending_events_tail = &camera_driver.pending_events; + camera_driver.device_hash_lock = device_hash_lock; + camera_driver.device_hash = device_hash; + if (bootstrap[i]->init(&camera_driver.impl)) { + camera_driver.name = bootstrap[i]->name; + camera_driver.desc = bootstrap[i]->desc; + initialized = true; + } + } + } + + if (!initialized) { + // specific drivers will set the error message if they fail, but otherwise we do it here. + if (!tried_to_init) { + if (driver_name) { + SDL_SetError("Camera driver '%s' not available", driver_name); + } else { + SDL_SetError("No available camera driver"); + } + } + + SDL_zero(camera_driver); + SDL_DestroyRWLock(device_hash_lock); + SDL_DestroyHashTable(device_hash); + return false; // No driver was available, so fail. + } + + CompleteCameraEntryPoints(); + + // Make sure we have a list of devices available at startup... + camera_driver.impl.DetectDevices(); + + return true; +} + +// This is an internal function, so SDL_PumpEvents() can check for pending camera device events. +// ("UpdateSubsystem" is the same naming that the other things that hook into PumpEvents use.) +void SDL_UpdateCamera(void) +{ + SDL_LockRWLockForReading(camera_driver.device_hash_lock); + SDL_PendingCameraEvent *pending_events = camera_driver.pending_events.next; + SDL_UnlockRWLock(camera_driver.device_hash_lock); + + if (!pending_events) { + return; // nothing to do, check next time. + } + + // okay, let's take this whole list of events so we can dump the lock, and new ones can queue up for a later update. + SDL_LockRWLockForWriting(camera_driver.device_hash_lock); + pending_events = camera_driver.pending_events.next; // in case this changed... + camera_driver.pending_events.next = NULL; + camera_driver.pending_events_tail = &camera_driver.pending_events; + SDL_UnlockRWLock(camera_driver.device_hash_lock); + + SDL_PendingCameraEvent *pending_next = NULL; + for (SDL_PendingCameraEvent *i = pending_events; i; i = pending_next) { + pending_next = i->next; + if (SDL_EventEnabled(i->type)) { + SDL_Event event; + SDL_zero(event); + event.type = i->type; + event.cdevice.which = (Uint32) i->devid; + SDL_PushEvent(&event); + } + SDL_free(i); + } +} + diff --git a/contrib/SDL-3.2.8/src/camera/SDL_camera_c.h b/contrib/SDL-3.2.8/src/camera/SDL_camera_c.h new file mode 100644 index 0000000..316ae7d --- /dev/null +++ b/contrib/SDL-3.2.8/src/camera/SDL_camera_c.h @@ -0,0 +1,35 @@ +/* + Simple DirectMedia Layer + Copyright (C) 1997-2025 Sam Lantinga + + This software is provided 'as-is', without any express or implied + warranty. In no event will the authors be held liable for any damages + arising from the use of this software. + + Permission is granted to anyone to use this software for any purpose, + including commercial applications, and to alter it and redistribute it + freely, subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + 3. This notice may not be removed or altered from any source distribution. +*/ +#include "../SDL_internal.h" + +#ifndef SDL_camera_c_h_ +#define SDL_camera_c_h_ + +// Initialize the camera subsystem +extern bool SDL_CameraInit(const char *driver_name); + +// Shutdown the camera subsystem +extern void SDL_QuitCamera(void); + +// "Pump" the event queue. +extern void SDL_UpdateCamera(void); + +#endif // SDL_camera_c_h_ diff --git a/contrib/SDL-3.2.8/src/camera/SDL_syscamera.h b/contrib/SDL-3.2.8/src/camera/SDL_syscamera.h new file mode 100644 index 0000000..30a02f3 --- /dev/null +++ b/contrib/SDL-3.2.8/src/camera/SDL_syscamera.h @@ -0,0 +1,224 @@ +/* + Simple DirectMedia Layer + Copyright (C) 1997-2025 Sam Lantinga + + This software is provided 'as-is', without any express or implied + warranty. In no event will the authors be held liable for any damages + arising from the use of this software. + + Permission is granted to anyone to use this software for any purpose, + including commercial applications, and to alter it and redistribute it + freely, subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + 3. This notice may not be removed or altered from any source distribution. +*/ +#include "../SDL_internal.h" + +#ifndef SDL_syscamera_h_ +#define SDL_syscamera_h_ + +#include "../video/SDL_surface_c.h" + +#define DEBUG_CAMERA 0 + +/* Backends should call this as devices are added to the system (such as + a USB camera being plugged in), and should also be called for + for every device found during DetectDevices(). */ +extern SDL_Camera *SDL_AddCamera(const char *name, SDL_CameraPosition position, int num_specs, const SDL_CameraSpec *specs, void *handle); + +/* Backends should call this if an opened camera device is lost. + This can happen due to i/o errors, or a device being unplugged, etc. */ +extern void SDL_CameraDisconnected(SDL_Camera *device); + +// Find an SDL_Camera, selected by a callback. NULL if not found. DOES NOT LOCK THE DEVICE. +extern SDL_Camera *SDL_FindPhysicalCameraByCallback(bool (*callback)(SDL_Camera *device, void *userdata), void *userdata); + +// Backends should call this when the user has approved/denied access to a camera. +extern void SDL_CameraPermissionOutcome(SDL_Camera *device, bool approved); + +// Backends can call this to get a standardized name for a thread to power a specific camera device. +extern char *SDL_GetCameraThreadName(SDL_Camera *device, char *buf, size_t buflen); + +// Backends can call these to change a device's refcount. +extern void RefPhysicalCamera(SDL_Camera *device); +extern void UnrefPhysicalCamera(SDL_Camera *device); + +// These functions are the heart of the camera threads. Backends can call them directly if they aren't using the SDL-provided thread. +extern void SDL_CameraThreadSetup(SDL_Camera *device); +extern bool SDL_CameraThreadIterate(SDL_Camera *device); +extern void SDL_CameraThreadShutdown(SDL_Camera *device); + +// Backends can call this if they have to finish initializing later, like Emscripten. Most backends should _not_ call this directly! +extern bool SDL_PrepareCameraSurfaces(SDL_Camera *device); + + +// common utility functionality to gather up camera specs. Not required! +typedef struct CameraFormatAddData +{ + SDL_CameraSpec *specs; + int num_specs; + int allocated_specs; +} CameraFormatAddData; + +bool SDL_AddCameraFormat(CameraFormatAddData *data, SDL_PixelFormat format, SDL_Colorspace colorspace, int w, int h, int framerate_numerator, int framerate_denominator); + +typedef enum SDL_CameraFrameResult +{ + SDL_CAMERA_FRAME_ERROR, + SDL_CAMERA_FRAME_SKIP, + SDL_CAMERA_FRAME_READY +} SDL_CameraFrameResult; + +typedef struct SurfaceList +{ + SDL_Surface *surface; + Uint64 timestampNS; + struct SurfaceList *next; +} SurfaceList; + +// Define the SDL camera driver structure +struct SDL_Camera +{ + // A mutex for locking + SDL_Mutex *lock; + + // Human-readable device name. + char *name; + + // Position of camera (front-facing, back-facing, etc). + SDL_CameraPosition position; + + // When refcount hits zero, we destroy the device object. + SDL_AtomicInt refcount; + + // These are, initially, set from camera_driver, but we might swap them out with Zombie versions on disconnect/failure. + bool (*WaitDevice)(SDL_Camera *device); + SDL_CameraFrameResult (*AcquireFrame)(SDL_Camera *device, SDL_Surface *frame, Uint64 *timestampNS); + void (*ReleaseFrame)(SDL_Camera *device, SDL_Surface *frame); + + // All supported formats/dimensions for this device. + SDL_CameraSpec *all_specs; + + // Elements in all_specs. + int num_specs; + + // The device's actual specification that the camera is outputting, before conversion. + SDL_CameraSpec actual_spec; + + // The device's current camera specification, after conversions. + SDL_CameraSpec spec; + + // Unique value assigned at creation time. + SDL_CameraID instance_id; + + // Driver-specific hardware data on how to open device (`hidden` is driver-specific data _when opened_). + void *handle; + + // Dropping the first frame(s) after open seems to help timing on some platforms. + int drop_frames; + + // Backend timestamp of first acquired frame, so we can keep these meaningful regardless of epoch. + Uint64 base_timestamp; + + // SDL timestamp of first acquired frame, so we can roughly convert to SDL ticks. + Uint64 adjust_timestamp; + + // Pixel data flows from the driver into these, then gets converted for the app if necessary. + SDL_Surface *acquire_surface; + + // acquire_surface converts or scales to this surface before landing in output_surfaces, if necessary. + SDL_Surface *conversion_surface; + + // A queue of surfaces that buffer converted/scaled frames of video until the app claims them. + SurfaceList output_surfaces[8]; + SurfaceList filled_output_surfaces; // this is FIFO + SurfaceList empty_output_surfaces; // this is LIFO + SurfaceList app_held_output_surfaces; + + // A fake video frame we allocate if the camera fails/disconnects. + Uint8 *zombie_pixels; + + // non-zero if acquire_surface needs to be scaled for final output. + int needs_scaling; // -1: downscale, 0: no scaling, 1: upscale + + // true if acquire_surface needs to be converted for final output. + bool needs_conversion; + + // Current state flags + SDL_AtomicInt shutdown; + SDL_AtomicInt zombie; + + // A thread to feed the camera device + SDL_Thread *thread; + + // Optional properties. + SDL_PropertiesID props; + + // -1: user denied permission, 0: waiting for user response, 1: user approved permission. + int permission; + + // Data private to this driver, used when device is opened and running. + struct SDL_PrivateCameraData *hidden; +}; + +typedef struct SDL_CameraDriverImpl +{ + void (*DetectDevices)(void); + bool (*OpenDevice)(SDL_Camera *device, const SDL_CameraSpec *spec); + void (*CloseDevice)(SDL_Camera *device); + bool (*WaitDevice)(SDL_Camera *device); + SDL_CameraFrameResult (*AcquireFrame)(SDL_Camera *device, SDL_Surface *frame, Uint64 *timestampNS); // set frame->pixels, frame->pitch, and *timestampNS! + void (*ReleaseFrame)(SDL_Camera *device, SDL_Surface *frame); // Reclaim frame->pixels and frame->pitch! + void (*FreeDeviceHandle)(SDL_Camera *device); // SDL is done with this device; free the handle from SDL_AddCamera() + void (*Deinitialize)(void); + + bool ProvidesOwnCallbackThread; +} SDL_CameraDriverImpl; + +typedef struct SDL_PendingCameraEvent +{ + Uint32 type; + SDL_CameraID devid; + struct SDL_PendingCameraEvent *next; +} SDL_PendingCameraEvent; + +typedef struct SDL_CameraDriver +{ + const char *name; // The name of this camera driver + const char *desc; // The description of this camera driver + SDL_CameraDriverImpl impl; // the backend's interface + + SDL_RWLock *device_hash_lock; // A rwlock that protects `device_hash` // !!! FIXME: device_hash _also_ has a rwlock, see if we still need this one. + SDL_HashTable *device_hash; // the collection of currently-available camera devices + SDL_PendingCameraEvent pending_events; + SDL_PendingCameraEvent *pending_events_tail; + + SDL_AtomicInt device_count; + SDL_AtomicInt shutting_down; // non-zero during SDL_Quit, so we known not to accept any last-minute device hotplugs. +} SDL_CameraDriver; + +typedef struct CameraBootStrap +{ + const char *name; + const char *desc; + bool (*init)(SDL_CameraDriverImpl *impl); + bool demand_only; // if true: request explicitly, or it won't be available. +} CameraBootStrap; + +// Not all of these are available in a given build. Use #ifdefs, etc. +extern CameraBootStrap DUMMYCAMERA_bootstrap; +extern CameraBootStrap PIPEWIRECAMERA_bootstrap; +extern CameraBootStrap V4L2_bootstrap; +extern CameraBootStrap COREMEDIA_bootstrap; +extern CameraBootStrap ANDROIDCAMERA_bootstrap; +extern CameraBootStrap EMSCRIPTENCAMERA_bootstrap; +extern CameraBootStrap MEDIAFOUNDATION_bootstrap; +extern CameraBootStrap VITACAMERA_bootstrap; + +#endif // SDL_syscamera_h_ diff --git a/contrib/SDL-3.2.8/src/camera/android/SDL_camera_android.c b/contrib/SDL-3.2.8/src/camera/android/SDL_camera_android.c new file mode 100644 index 0000000..54b539a --- /dev/null +++ b/contrib/SDL-3.2.8/src/camera/android/SDL_camera_android.c @@ -0,0 +1,905 @@ +/* + Simple DirectMedia Layer + Copyright (C) 1997-2025 Sam Lantinga + + This software is provided 'as-is', without any express or implied + warranty. In no event will the authors be held liable for any damages + arising from the use of this software. + + Permission is granted to anyone to use this software for any purpose, + including commercial applications, and to alter it and redistribute it + freely, subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + 3. This notice may not be removed or altered from any source distribution. +*/ +#include "SDL_internal.h" + +#include "../SDL_syscamera.h" +#include "../SDL_camera_c.h" +#include "../../video/SDL_pixels_c.h" +#include "../../video/SDL_surface_c.h" +#include "../../thread/SDL_systhread.h" + +#ifdef SDL_CAMERA_DRIVER_ANDROID + +/* + * AndroidManifest.xml: + * + * + * + * Very likely SDL must be build with YUV support (done by default) + * + * https://developer.android.com/reference/android/hardware/camera2/CameraManager + * "All camera devices intended to be operated concurrently, must be opened using openCamera(String, CameraDevice.StateCallback, Handler), + * before configuring sessions on any of the camera devices." + */ + +// this is kinda gross, but on older NDK headers all the camera stuff is +// gated behind __ANDROID_API__. We'll dlopen() it at runtime, so we'll do +// the right thing on pre-Android 7.0 devices, but we still +// need the struct declarations and such in those headers. +// The other option is to make a massive jump in minimum Android version we +// support--going from ancient to merely really old--but this seems less +// distasteful and using dlopen matches practices on other SDL platforms. +// We'll see if it works out. +#if __ANDROID_API__ < 24 +#undef __ANDROID_API__ +#define __ANDROID_API__ 24 +#endif + +#include +#include +#include +#include +#include + +#include "../../core/android/SDL_android.h" + +static void *libcamera2ndk = NULL; +typedef ACameraManager* (*pfnACameraManager_create)(void); +typedef camera_status_t (*pfnACameraManager_registerAvailabilityCallback)(ACameraManager*, const ACameraManager_AvailabilityCallbacks*); +typedef camera_status_t (*pfnACameraManager_unregisterAvailabilityCallback)(ACameraManager*, const ACameraManager_AvailabilityCallbacks*); +typedef camera_status_t (*pfnACameraManager_getCameraIdList)(ACameraManager*, ACameraIdList**); +typedef void (*pfnACameraManager_deleteCameraIdList)(ACameraIdList*); +typedef void (*pfnACameraCaptureSession_close)(ACameraCaptureSession*); +typedef void (*pfnACaptureRequest_free)(ACaptureRequest*); +typedef void (*pfnACameraOutputTarget_free)(ACameraOutputTarget*); +typedef camera_status_t (*pfnACameraDevice_close)(ACameraDevice*); +typedef void (*pfnACameraManager_delete)(ACameraManager*); +typedef void (*pfnACaptureSessionOutputContainer_free)(ACaptureSessionOutputContainer*); +typedef void (*pfnACaptureSessionOutput_free)(ACaptureSessionOutput*); +typedef camera_status_t (*pfnACameraManager_openCamera)(ACameraManager*, const char*, ACameraDevice_StateCallbacks*, ACameraDevice**); +typedef camera_status_t (*pfnACameraDevice_createCaptureRequest)(const ACameraDevice*, ACameraDevice_request_template, ACaptureRequest**); +typedef camera_status_t (*pfnACameraDevice_createCaptureSession)(ACameraDevice*, const ACaptureSessionOutputContainer*, const ACameraCaptureSession_stateCallbacks*,ACameraCaptureSession**); +typedef camera_status_t (*pfnACameraManager_getCameraCharacteristics)(ACameraManager*, const char*, ACameraMetadata**); +typedef void (*pfnACameraMetadata_free)(ACameraMetadata*); +typedef camera_status_t (*pfnACameraMetadata_getConstEntry)(const ACameraMetadata*, uint32_t tag, ACameraMetadata_const_entry*); +typedef camera_status_t (*pfnACameraCaptureSession_setRepeatingRequest)(ACameraCaptureSession*, ACameraCaptureSession_captureCallbacks*, int numRequests, ACaptureRequest**, int*); +typedef camera_status_t (*pfnACameraOutputTarget_create)(ACameraWindowType*,ACameraOutputTarget**); +typedef camera_status_t (*pfnACaptureRequest_addTarget)(ACaptureRequest*, const ACameraOutputTarget*); +typedef camera_status_t (*pfnACaptureSessionOutputContainer_add)(ACaptureSessionOutputContainer*, const ACaptureSessionOutput*); +typedef camera_status_t (*pfnACaptureSessionOutputContainer_create)(ACaptureSessionOutputContainer**); +typedef camera_status_t (*pfnACaptureSessionOutput_create)(ACameraWindowType*, ACaptureSessionOutput**); +static pfnACameraManager_create pACameraManager_create = NULL; +static pfnACameraManager_registerAvailabilityCallback pACameraManager_registerAvailabilityCallback = NULL; +static pfnACameraManager_unregisterAvailabilityCallback pACameraManager_unregisterAvailabilityCallback = NULL; +static pfnACameraManager_getCameraIdList pACameraManager_getCameraIdList = NULL; +static pfnACameraManager_deleteCameraIdList pACameraManager_deleteCameraIdList = NULL; +static pfnACameraCaptureSession_close pACameraCaptureSession_close = NULL; +static pfnACaptureRequest_free pACaptureRequest_free = NULL; +static pfnACameraOutputTarget_free pACameraOutputTarget_free = NULL; +static pfnACameraDevice_close pACameraDevice_close = NULL; +static pfnACameraManager_delete pACameraManager_delete = NULL; +static pfnACaptureSessionOutputContainer_free pACaptureSessionOutputContainer_free = NULL; +static pfnACaptureSessionOutput_free pACaptureSessionOutput_free = NULL; +static pfnACameraManager_openCamera pACameraManager_openCamera = NULL; +static pfnACameraDevice_createCaptureRequest pACameraDevice_createCaptureRequest = NULL; +static pfnACameraDevice_createCaptureSession pACameraDevice_createCaptureSession = NULL; +static pfnACameraManager_getCameraCharacteristics pACameraManager_getCameraCharacteristics = NULL; +static pfnACameraMetadata_free pACameraMetadata_free = NULL; +static pfnACameraMetadata_getConstEntry pACameraMetadata_getConstEntry = NULL; +static pfnACameraCaptureSession_setRepeatingRequest pACameraCaptureSession_setRepeatingRequest = NULL; +static pfnACameraOutputTarget_create pACameraOutputTarget_create = NULL; +static pfnACaptureRequest_addTarget pACaptureRequest_addTarget = NULL; +static pfnACaptureSessionOutputContainer_add pACaptureSessionOutputContainer_add = NULL; +static pfnACaptureSessionOutputContainer_create pACaptureSessionOutputContainer_create = NULL; +static pfnACaptureSessionOutput_create pACaptureSessionOutput_create = NULL; + +static void *libmediandk = NULL; +typedef void (*pfnAImage_delete)(AImage*); +typedef media_status_t (*pfnAImage_getTimestamp)(const AImage*, int64_t*); +typedef media_status_t (*pfnAImage_getNumberOfPlanes)(const AImage*, int32_t*); +typedef media_status_t (*pfnAImage_getPlaneRowStride)(const AImage*, int, int32_t*); +typedef media_status_t (*pfnAImage_getPlaneData)(const AImage*, int, uint8_t**, int*); +typedef media_status_t (*pfnAImageReader_acquireNextImage)(AImageReader*, AImage**); +typedef void (*pfnAImageReader_delete)(AImageReader*); +typedef media_status_t (*pfnAImageReader_setImageListener)(AImageReader*, AImageReader_ImageListener*); +typedef media_status_t (*pfnAImageReader_getWindow)(AImageReader*, ANativeWindow**); +typedef media_status_t (*pfnAImageReader_new)(int32_t, int32_t, int32_t, int32_t, AImageReader**); +static pfnAImage_delete pAImage_delete = NULL; +static pfnAImage_getTimestamp pAImage_getTimestamp = NULL; +static pfnAImage_getNumberOfPlanes pAImage_getNumberOfPlanes = NULL; +static pfnAImage_getPlaneRowStride pAImage_getPlaneRowStride = NULL; +static pfnAImage_getPlaneData pAImage_getPlaneData = NULL; +static pfnAImageReader_acquireNextImage pAImageReader_acquireNextImage = NULL; +static pfnAImageReader_delete pAImageReader_delete = NULL; +static pfnAImageReader_setImageListener pAImageReader_setImageListener = NULL; +static pfnAImageReader_getWindow pAImageReader_getWindow = NULL; +static pfnAImageReader_new pAImageReader_new = NULL; + +typedef media_status_t (*pfnAImage_getWidth)(const AImage*, int32_t*); +typedef media_status_t (*pfnAImage_getHeight)(const AImage*, int32_t*); +static pfnAImage_getWidth pAImage_getWidth = NULL; +static pfnAImage_getHeight pAImage_getHeight = NULL; + +struct SDL_PrivateCameraData +{ + ACameraDevice *device; + AImageReader *reader; + ANativeWindow *window; + ACaptureSessionOutput *sessionOutput; + ACaptureSessionOutputContainer *sessionOutputContainer; + ACameraOutputTarget *outputTarget; + ACaptureRequest *request; + ACameraCaptureSession *session; + SDL_CameraSpec requested_spec; +}; + +static bool SetErrorStr(const char *what, const char *errstr, const int rc) +{ + char errbuf[128]; + if (!errstr) { + SDL_snprintf(errbuf, sizeof (errbuf), "Unknown error #%d", rc); + errstr = errbuf; + } + return SDL_SetError("%s: %s", what, errstr); +} + +static const char *CameraStatusStr(const camera_status_t rc) +{ + switch (rc) { + case ACAMERA_OK: return "no error"; + case ACAMERA_ERROR_UNKNOWN: return "unknown error"; + case ACAMERA_ERROR_INVALID_PARAMETER: return "invalid parameter"; + case ACAMERA_ERROR_CAMERA_DISCONNECTED: return "camera disconnected"; + case ACAMERA_ERROR_NOT_ENOUGH_MEMORY: return "not enough memory"; + case ACAMERA_ERROR_METADATA_NOT_FOUND: return "metadata not found"; + case ACAMERA_ERROR_CAMERA_DEVICE: return "camera device error"; + case ACAMERA_ERROR_CAMERA_SERVICE: return "camera service error"; + case ACAMERA_ERROR_SESSION_CLOSED: return "session closed"; + case ACAMERA_ERROR_INVALID_OPERATION: return "invalid operation"; + case ACAMERA_ERROR_STREAM_CONFIGURE_FAIL: return "configure failure"; + case ACAMERA_ERROR_CAMERA_IN_USE: return "camera in use"; + case ACAMERA_ERROR_MAX_CAMERA_IN_USE: return "max cameras in use"; + case ACAMERA_ERROR_CAMERA_DISABLED: return "camera disabled"; + case ACAMERA_ERROR_PERMISSION_DENIED: return "permission denied"; + case ACAMERA_ERROR_UNSUPPORTED_OPERATION: return "unsupported operation"; + default: break; + } + + return NULL; // unknown error +} + +static bool SetCameraError(const char *what, const camera_status_t rc) +{ + return SetErrorStr(what, CameraStatusStr(rc), (int) rc); +} + +static const char *MediaStatusStr(const media_status_t rc) +{ + switch (rc) { + case AMEDIA_OK: return "no error"; + case AMEDIACODEC_ERROR_INSUFFICIENT_RESOURCE: return "insufficient resources"; + case AMEDIACODEC_ERROR_RECLAIMED: return "reclaimed"; + case AMEDIA_ERROR_UNKNOWN: return "unknown error"; + case AMEDIA_ERROR_MALFORMED: return "malformed"; + case AMEDIA_ERROR_UNSUPPORTED: return "unsupported"; + case AMEDIA_ERROR_INVALID_OBJECT: return "invalid object"; + case AMEDIA_ERROR_INVALID_PARAMETER: return "invalid parameter"; + case AMEDIA_ERROR_INVALID_OPERATION: return "invalid operation"; + case AMEDIA_ERROR_END_OF_STREAM: return "end of stream"; + case AMEDIA_ERROR_IO: return "i/o error"; + case AMEDIA_ERROR_WOULD_BLOCK: return "operation would block"; + case AMEDIA_DRM_NOT_PROVISIONED: return "DRM not provisioned"; + case AMEDIA_DRM_RESOURCE_BUSY: return "DRM resource busy"; + case AMEDIA_DRM_DEVICE_REVOKED: return "DRM device revoked"; + case AMEDIA_DRM_SHORT_BUFFER: return "DRM short buffer"; + case AMEDIA_DRM_SESSION_NOT_OPENED: return "DRM session not opened"; + case AMEDIA_DRM_TAMPER_DETECTED: return "DRM tampering detected"; + case AMEDIA_DRM_VERIFY_FAILED: return "DRM verify failed"; + case AMEDIA_DRM_NEED_KEY: return "DRM need key"; + case AMEDIA_DRM_LICENSE_EXPIRED: return "DRM license expired"; + case AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE: return "no buffer available"; + case AMEDIA_IMGREADER_MAX_IMAGES_ACQUIRED: return "maximum images acquired"; + case AMEDIA_IMGREADER_CANNOT_LOCK_IMAGE: return "cannot lock image"; + case AMEDIA_IMGREADER_CANNOT_UNLOCK_IMAGE: return "cannot unlock image"; + case AMEDIA_IMGREADER_IMAGE_NOT_LOCKED: return "image not locked"; + default: break; + } + + return NULL; // unknown error +} + +static bool SetMediaError(const char *what, const media_status_t rc) +{ + return SetErrorStr(what, MediaStatusStr(rc), (int) rc); +} + + +static ACameraManager *cameraMgr = NULL; + +static bool CreateCameraManager(void) +{ + SDL_assert(cameraMgr == NULL); + + cameraMgr = pACameraManager_create(); + if (!cameraMgr) { + return SDL_SetError("Error creating ACameraManager"); + } + return true; +} + +static void DestroyCameraManager(void) +{ + if (cameraMgr) { + pACameraManager_delete(cameraMgr); + cameraMgr = NULL; + } +} + +static void format_android_to_sdl(Uint32 fmt, SDL_PixelFormat *format, SDL_Colorspace *colorspace) +{ + switch (fmt) { + #define CASE(x, y, z) case x: *format = y; *colorspace = z; return + CASE(AIMAGE_FORMAT_YUV_420_888, SDL_PIXELFORMAT_NV12, SDL_COLORSPACE_BT709_LIMITED); + CASE(AIMAGE_FORMAT_RGB_565, SDL_PIXELFORMAT_RGB565, SDL_COLORSPACE_SRGB); + CASE(AIMAGE_FORMAT_RGB_888, SDL_PIXELFORMAT_XRGB8888, SDL_COLORSPACE_SRGB); + CASE(AIMAGE_FORMAT_RGBA_8888, SDL_PIXELFORMAT_RGBA8888, SDL_COLORSPACE_SRGB); + CASE(AIMAGE_FORMAT_RGBX_8888, SDL_PIXELFORMAT_RGBX8888, SDL_COLORSPACE_SRGB); + CASE(AIMAGE_FORMAT_RGBA_FP16, SDL_PIXELFORMAT_RGBA64_FLOAT, SDL_COLORSPACE_SRGB); + #undef CASE + default: break; + } + + #if DEBUG_CAMERA + //SDL_Log("Unknown format AIMAGE_FORMAT '%d'", fmt); + #endif + + *format = SDL_PIXELFORMAT_UNKNOWN; + *colorspace = SDL_COLORSPACE_UNKNOWN; +} + +static Uint32 format_sdl_to_android(SDL_PixelFormat fmt) +{ + switch (fmt) { + #define CASE(x, y) case y: return x + CASE(AIMAGE_FORMAT_YUV_420_888, SDL_PIXELFORMAT_NV12); + CASE(AIMAGE_FORMAT_RGB_565, SDL_PIXELFORMAT_RGB565); + CASE(AIMAGE_FORMAT_RGB_888, SDL_PIXELFORMAT_XRGB8888); + CASE(AIMAGE_FORMAT_RGBA_8888, SDL_PIXELFORMAT_RGBA8888); + CASE(AIMAGE_FORMAT_RGBX_8888, SDL_PIXELFORMAT_RGBX8888); + #undef CASE + default: + return 0; + } +} + +static bool ANDROIDCAMERA_WaitDevice(SDL_Camera *device) +{ + return true; // this isn't used atm, since we run our own thread via onImageAvailable callbacks. +} + +static SDL_CameraFrameResult ANDROIDCAMERA_AcquireFrame(SDL_Camera *device, SDL_Surface *frame, Uint64 *timestampNS) +{ + SDL_CameraFrameResult result = SDL_CAMERA_FRAME_READY; + media_status_t res; + AImage *image = NULL; + + res = pAImageReader_acquireNextImage(device->hidden->reader, &image); + // We could also use this one: + //res = AImageReader_acquireLatestImage(device->hidden->reader, &image); + + SDL_assert(res != AMEDIA_IMGREADER_NO_BUFFER_AVAILABLE); // we should only be here if onImageAvailable was called. + + if (res != AMEDIA_OK) { + SetMediaError("Error AImageReader_acquireNextImage", res); + return SDL_CAMERA_FRAME_ERROR; + } + + int64_t atimestamp = 0; + if (pAImage_getTimestamp(image, &atimestamp) == AMEDIA_OK) { + *timestampNS = (Uint64) atimestamp; + } else { + *timestampNS = 0; + } + + // !!! FIXME: this currently copies the data to the surface (see FIXME about non-contiguous planar surfaces, but in theory we could just keep this locked until ReleaseFrame... + int32_t num_planes = 0; + pAImage_getNumberOfPlanes(image, &num_planes); + + if ((num_planes == 3) && (device->spec.format == SDL_PIXELFORMAT_NV12)) { + num_planes--; // treat the interleaved planes as one. + } + + size_t buflen = 0; + pAImage_getPlaneRowStride(image, 0, &frame->pitch); + for (int i = 0; (i < num_planes) && (i < 3); i++) { + int32_t expected; + if (i == 0) { + expected = frame->pitch * frame->h; + } else { + expected = frame->pitch * (frame->h + 1) / 2; + } + buflen += expected; + } + + frame->pixels = SDL_aligned_alloc(SDL_GetSIMDAlignment(), buflen); + if (frame->pixels == NULL) { + result = SDL_CAMERA_FRAME_ERROR; + } else { + Uint8 *dst = frame->pixels; + + for (int i = 0; (i < num_planes) && (i < 3); i++) { + uint8_t *data = NULL; + int32_t datalen = 0; + int32_t expected; + if (i == 0) { + expected = frame->pitch * frame->h; + } else { + expected = frame->pitch * (frame->h + 1) / 2; + } + pAImage_getPlaneData(image, i, &data, &datalen); + + int32_t row_stride = 0; + pAImage_getPlaneRowStride(image, i, &row_stride); + SDL_assert(row_stride == frame->pitch); + SDL_memcpy(dst, data, SDL_min(expected, datalen)); + dst += expected; + } + } + + pAImage_delete(image); + + return result; +} + +static void ANDROIDCAMERA_ReleaseFrame(SDL_Camera *device, SDL_Surface *frame) +{ + // !!! FIXME: this currently copies the data to the surface, but in theory we could just keep the AImage until ReleaseFrame... + SDL_aligned_free(frame->pixels); +} + +static void onImageAvailable(void *context, AImageReader *reader) +{ + #if DEBUG_CAMERA + SDL_Log("CAMERA: CB onImageAvailable"); + #endif + SDL_Camera *device = (SDL_Camera *) context; + SDL_CameraThreadIterate(device); +} + +static void onDisconnected(void *context, ACameraDevice *device) +{ + #if DEBUG_CAMERA + SDL_Log("CAMERA: CB onDisconnected"); + #endif + SDL_CameraDisconnected((SDL_Camera *) context); +} + +static void onError(void *context, ACameraDevice *device, int error) +{ + #if DEBUG_CAMERA + SDL_Log("CAMERA: CB onError"); + #endif + SDL_CameraDisconnected((SDL_Camera *) context); +} + +static void onClosed(void* context, ACameraCaptureSession *session) +{ + // SDL_Camera *_this = (SDL_Camera *) context; + #if DEBUG_CAMERA + SDL_Log("CAMERA: CB onClosed"); + #endif +} + +static void onReady(void* context, ACameraCaptureSession *session) +{ + // SDL_Camera *_this = (SDL_Camera *) context; + #if DEBUG_CAMERA + SDL_Log("CAMERA: CB onReady"); + #endif +} + +static void onActive(void* context, ACameraCaptureSession *session) +{ + // SDL_Camera *_this = (SDL_Camera *) context; + #if DEBUG_CAMERA + SDL_Log("CAMERA: CB onActive"); + #endif +} + +static void ANDROIDCAMERA_CloseDevice(SDL_Camera *device) +{ + if (device && device->hidden) { + struct SDL_PrivateCameraData *hidden = device->hidden; + device->hidden = NULL; + + if (hidden->reader) { + pAImageReader_setImageListener(hidden->reader, NULL); + } + + if (hidden->session) { + pACameraCaptureSession_close(hidden->session); + } + + if (hidden->request) { + pACaptureRequest_free(hidden->request); + } + + if (hidden->outputTarget) { + pACameraOutputTarget_free(hidden->outputTarget); + } + + if (hidden->sessionOutputContainer) { + pACaptureSessionOutputContainer_free(hidden->sessionOutputContainer); + } + + if (hidden->sessionOutput) { + pACaptureSessionOutput_free(hidden->sessionOutput); + } + + // we don't free hidden->window here, it'll be cleaned up by AImageReader_delete. + + if (hidden->reader) { + pAImageReader_delete(hidden->reader); + } + + if (hidden->device) { + pACameraDevice_close(hidden->device); + } + + SDL_free(hidden); + } +} + +// this is where the "opening" of the camera happens, after permission is granted. +static bool PrepareCamera(SDL_Camera *device) +{ + SDL_assert(device->hidden != NULL); + + camera_status_t res; + media_status_t res2; + + ACameraDevice_StateCallbacks dev_callbacks; + SDL_zero(dev_callbacks); + dev_callbacks.context = device; + dev_callbacks.onDisconnected = onDisconnected; + dev_callbacks.onError = onError; + + ACameraCaptureSession_stateCallbacks capture_callbacks; + SDL_zero(capture_callbacks); + capture_callbacks.context = device; + capture_callbacks.onClosed = onClosed; + capture_callbacks.onReady = onReady; + capture_callbacks.onActive = onActive; + + AImageReader_ImageListener imglistener; + SDL_zero(imglistener); + imglistener.context = device; + imglistener.onImageAvailable = onImageAvailable; + + // just in case SDL_OpenCamera is overwriting device->spec as CameraPermissionCallback runs, we work from a different copy. + const SDL_CameraSpec *spec = &device->hidden->requested_spec; + + if ((res = pACameraManager_openCamera(cameraMgr, (const char *) device->handle, &dev_callbacks, &device->hidden->device)) != ACAMERA_OK) { + return SetCameraError("Failed to open camera", res); + } else if ((res2 = pAImageReader_new(spec->width, spec->height, format_sdl_to_android(spec->format), 10 /* nb buffers */, &device->hidden->reader)) != AMEDIA_OK) { + return SetMediaError("Error AImageReader_new", res2); + } else if ((res2 = pAImageReader_getWindow(device->hidden->reader, &device->hidden->window)) != AMEDIA_OK) { + return SetMediaError("Error AImageReader_getWindow", res2); + } else if ((res = pACaptureSessionOutput_create(device->hidden->window, &device->hidden->sessionOutput)) != ACAMERA_OK) { + return SetCameraError("Error ACaptureSessionOutput_create", res); + } else if ((res = pACaptureSessionOutputContainer_create(&device->hidden->sessionOutputContainer)) != ACAMERA_OK) { + return SetCameraError("Error ACaptureSessionOutputContainer_create", res); + } else if ((res = pACaptureSessionOutputContainer_add(device->hidden->sessionOutputContainer, device->hidden->sessionOutput)) != ACAMERA_OK) { + return SetCameraError("Error ACaptureSessionOutputContainer_add", res); + } else if ((res = pACameraOutputTarget_create(device->hidden->window, &device->hidden->outputTarget)) != ACAMERA_OK) { + return SetCameraError("Error ACameraOutputTarget_create", res); + } else if ((res = pACameraDevice_createCaptureRequest(device->hidden->device, TEMPLATE_RECORD, &device->hidden->request)) != ACAMERA_OK) { + return SetCameraError("Error ACameraDevice_createCaptureRequest", res); + } else if ((res = pACaptureRequest_addTarget(device->hidden->request, device->hidden->outputTarget)) != ACAMERA_OK) { + return SetCameraError("Error ACaptureRequest_addTarget", res); + } else if ((res = pACameraDevice_createCaptureSession(device->hidden->device, device->hidden->sessionOutputContainer, &capture_callbacks, &device->hidden->session)) != ACAMERA_OK) { + return SetCameraError("Error ACameraDevice_createCaptureSession", res); + } else if ((res = pACameraCaptureSession_setRepeatingRequest(device->hidden->session, NULL, 1, &device->hidden->request, NULL)) != ACAMERA_OK) { + return SetCameraError("Error ACameraCaptureSession_setRepeatingRequest", res); + } else if ((res2 = pAImageReader_setImageListener(device->hidden->reader, &imglistener)) != AMEDIA_OK) { + return SetMediaError("Error AImageReader_setImageListener", res2); + } + + return true; +} + +static void SDLCALL CameraPermissionCallback(void *userdata, const char *permission, bool granted) +{ + SDL_Camera *device = (SDL_Camera *) userdata; + if (device->hidden != NULL) { // if device was already closed, don't send an event. + if (!granted) { + SDL_CameraPermissionOutcome(device, false); // sorry, permission denied. + } else if (!PrepareCamera(device)) { // permission given? Actually open the camera now. + // uhoh, setup failed; since the app thinks we already "opened" the device, mark it as disconnected and don't report the permission. + SDL_CameraDisconnected(device); + } else { + // okay! We have permission to use the camera _and_ opening the hardware worked out, report that the camera is usable! + SDL_CameraPermissionOutcome(device, true); // go go go! + } + } + + UnrefPhysicalCamera(device); // we ref'd this in OpenDevice, release the extra reference. +} + + +static bool ANDROIDCAMERA_OpenDevice(SDL_Camera *device, const SDL_CameraSpec *spec) +{ +#if 0 // !!! FIXME: for now, we'll just let this fail if it is going to fail, without checking for this + /* Cannot open a second camera, while the first one is opened. + * If you want to play several camera, they must all be opened first, then played. + * + * https://developer.android.com/reference/android/hardware/camera2/CameraManager + * "All camera devices intended to be operated concurrently, must be opened using openCamera(String, CameraDevice.StateCallback, Handler), + * before configuring sessions on any of the camera devices. * " + * + */ + if (CheckDevicePlaying()) { + return SDL_SetError("A camera is already playing"); + } +#endif + + device->hidden = (struct SDL_PrivateCameraData *) SDL_calloc(1, sizeof (struct SDL_PrivateCameraData)); + if (device->hidden == NULL) { + return false; + } + + RefPhysicalCamera(device); // ref'd until permission callback fires. + + // just in case SDL_OpenCamera is overwriting device->spec as CameraPermissionCallback runs, we work from a different copy. + SDL_copyp(&device->hidden->requested_spec, spec); + if (!SDL_RequestAndroidPermission("android.permission.CAMERA", CameraPermissionCallback, device)) { + UnrefPhysicalCamera(device); + return false; + } + + return true; // we don't open the camera until permission is granted, so always succeed for now. +} + +static void ANDROIDCAMERA_FreeDeviceHandle(SDL_Camera *device) +{ + if (device) { + SDL_free(device->handle); + } +} + +static void GatherCameraSpecs(const char *devid, CameraFormatAddData *add_data, char **fullname, SDL_CameraPosition *position) +{ + SDL_zerop(add_data); + + ACameraMetadata *metadata = NULL; + ACameraMetadata_const_entry cfgentry; + ACameraMetadata_const_entry durentry; + ACameraMetadata_const_entry infoentry; + + // This can fail with an "unknown error" (with `adb logcat` reporting "no such file or directory") + // for "LEGACY" level cameras. I saw this happen on a 30-dollar budget phone I have for testing + // (but a different brand budget phone worked, so it's not strictly the low-end of Android devices). + // LEGACY devices are seen by onCameraAvailable, but are not otherwise accessible through + // libcamera2ndk. The Java camera2 API apparently _can_ access these cameras, but we're going on + // without them here for now, in hopes that such hardware is a dying breed. + if (pACameraManager_getCameraCharacteristics(cameraMgr, devid, &metadata) != ACAMERA_OK) { + return; // oh well. + } else if (pACameraMetadata_getConstEntry(metadata, ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, &cfgentry) != ACAMERA_OK) { + pACameraMetadata_free(metadata); + return; // oh well. + } else if (pACameraMetadata_getConstEntry(metadata, ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, &durentry) != ACAMERA_OK) { + pACameraMetadata_free(metadata); + return; // oh well. + } + + *fullname = NULL; + if (pACameraMetadata_getConstEntry(metadata, ACAMERA_INFO_VERSION, &infoentry) == ACAMERA_OK) { + *fullname = (char *) SDL_malloc(infoentry.count + 1); + if (*fullname) { + SDL_strlcpy(*fullname, (const char *) infoentry.data.u8, infoentry.count + 1); + } + } + + ACameraMetadata_const_entry posentry; + if (pACameraMetadata_getConstEntry(metadata, ACAMERA_LENS_FACING, &posentry) == ACAMERA_OK) { // ignore this if it fails. + if (*posentry.data.u8 == ACAMERA_LENS_FACING_FRONT) { + *position = SDL_CAMERA_POSITION_FRONT_FACING; + if (!*fullname) { + *fullname = SDL_strdup("Front-facing camera"); + } + } else if (*posentry.data.u8 == ACAMERA_LENS_FACING_BACK) { + *position = SDL_CAMERA_POSITION_BACK_FACING; + if (!*fullname) { + *fullname = SDL_strdup("Back-facing camera"); + } + } + } + + if (!*fullname) { + *fullname = SDL_strdup("Generic camera"); // we tried. + } + + const int32_t *i32ptr = cfgentry.data.i32; + for (int i = 0; i < cfgentry.count; i++, i32ptr += 4) { + const int32_t fmt = i32ptr[0]; + const int w = i32ptr[1]; + const int h = i32ptr[2]; + const int32_t type = i32ptr[3]; + SDL_PixelFormat sdlfmt = SDL_PIXELFORMAT_UNKNOWN; + SDL_Colorspace colorspace = SDL_COLORSPACE_UNKNOWN; + + if (type == ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT) { + continue; + } else if ((w <= 0) || (h <= 0)) { + continue; + } else { + format_android_to_sdl(fmt, &sdlfmt, &colorspace); + if (sdlfmt == SDL_PIXELFORMAT_UNKNOWN) { + continue; + } + } + +#if 0 // !!! FIXME: these all come out with 0 durations on my test phone. :( + const int64_t *i64ptr = durentry.data.i64; + for (int j = 0; j < durentry.count; j++, i64ptr += 4) { + const int32_t fpsfmt = (int32_t) i64ptr[0]; + const int fpsw = (int) i64ptr[1]; + const int fpsh = (int) i64ptr[2]; + const long long duration = (long long) i64ptr[3]; + SDL_Log("CAMERA: possible fps %s %dx%d duration=%lld", SDL_GetPixelFormatName(sdlfmt), fpsw, fpsh, duration); + if ((duration > 0) && (fpsfmt == fmt) && (fpsw == w) && (fpsh == h)) { + SDL_AddCameraFormat(add_data, sdlfmt, colorspace, w, h, 1000000000, duration); + } + } +#else + SDL_AddCameraFormat(add_data, sdlfmt, colorspace, w, h, 30, 1); +#endif + } + + pACameraMetadata_free(metadata); +} + +static bool FindAndroidCameraByID(SDL_Camera *device, void *userdata) +{ + const char *devid = (const char *) userdata; + return (SDL_strcmp(devid, (const char *) device->handle) == 0); +} + +static void MaybeAddDevice(const char *devid) +{ + #if DEBUG_CAMERA + SDL_Log("CAMERA: MaybeAddDevice('%s')", devid); + #endif + + if (SDL_FindPhysicalCameraByCallback(FindAndroidCameraByID, (void *) devid)) { + return; // already have this one. + } + + SDL_CameraPosition position = SDL_CAMERA_POSITION_UNKNOWN; + char *fullname = NULL; + CameraFormatAddData add_data; + GatherCameraSpecs(devid, &add_data, &fullname, &position); + if (add_data.num_specs > 0) { + char *namecpy = SDL_strdup(devid); + if (namecpy) { + SDL_Camera *device = SDL_AddCamera(fullname, position, add_data.num_specs, add_data.specs, namecpy); + if (!device) { + SDL_free(namecpy); + } + } + } + + SDL_free(fullname); + SDL_free(add_data.specs); +} + +// note that camera "availability" covers both hotplugging and whether another +// has the device opened, but for something like Android, it's probably fine +// to treat both unplugging and loss of access as disconnection events. When +// the other app closes the camera, we get an available event as if it was +// just plugged back in. + +static void onCameraAvailable(void *context, const char *cameraId) +{ + #if DEBUG_CAMERA + SDL_Log("CAMERA: CB onCameraAvailable('%s')", cameraId); + #endif + SDL_assert(cameraId != NULL); + MaybeAddDevice(cameraId); +} + +static void onCameraUnavailable(void *context, const char *cameraId) +{ + #if DEBUG_CAMERA + SDL_Log("CAMERA: CB onCameraUnvailable('%s')", cameraId); + #endif + + SDL_assert(cameraId != NULL); + + // THIS CALLBACK FIRES WHEN YOU OPEN THE DEVICE YOURSELF. :( + // Make sure we don't have the device opened, in which case onDisconnected will fire instead if actually lost. + SDL_Camera *device = SDL_FindPhysicalCameraByCallback(FindAndroidCameraByID, (void *) cameraId); + if (device && !device->hidden) { + SDL_CameraDisconnected(device); + } +} + +static const ACameraManager_AvailabilityCallbacks camera_availability_listener = { + NULL, + onCameraAvailable, + onCameraUnavailable +}; + +static void ANDROIDCAMERA_DetectDevices(void) +{ + ACameraIdList *list = NULL; + camera_status_t res = pACameraManager_getCameraIdList(cameraMgr, &list); + + if ((res == ACAMERA_OK) && list) { + const int total = list->numCameras; + for (int i = 0; i < total; i++) { + MaybeAddDevice(list->cameraIds[i]); + } + + pACameraManager_deleteCameraIdList(list); + } + + pACameraManager_registerAvailabilityCallback(cameraMgr, &camera_availability_listener); +} + +static void ANDROIDCAMERA_Deinitialize(void) +{ + pACameraManager_unregisterAvailabilityCallback(cameraMgr, &camera_availability_listener); + DestroyCameraManager(); + + dlclose(libcamera2ndk); + libcamera2ndk = NULL; + pACameraManager_create = NULL; + pACameraManager_registerAvailabilityCallback = NULL; + pACameraManager_unregisterAvailabilityCallback = NULL; + pACameraManager_getCameraIdList = NULL; + pACameraManager_deleteCameraIdList = NULL; + pACameraCaptureSession_close = NULL; + pACaptureRequest_free = NULL; + pACameraOutputTarget_free = NULL; + pACameraDevice_close = NULL; + pACameraManager_delete = NULL; + pACaptureSessionOutputContainer_free = NULL; + pACaptureSessionOutput_free = NULL; + pACameraManager_openCamera = NULL; + pACameraDevice_createCaptureRequest = NULL; + pACameraDevice_createCaptureSession = NULL; + pACameraManager_getCameraCharacteristics = NULL; + pACameraMetadata_free = NULL; + pACameraMetadata_getConstEntry = NULL; + pACameraCaptureSession_setRepeatingRequest = NULL; + pACameraOutputTarget_create = NULL; + pACaptureRequest_addTarget = NULL; + pACaptureSessionOutputContainer_add = NULL; + pACaptureSessionOutputContainer_create = NULL; + pACaptureSessionOutput_create = NULL; + + dlclose(libmediandk); + libmediandk = NULL; + pAImage_delete = NULL; + pAImage_getTimestamp = NULL; + pAImage_getNumberOfPlanes = NULL; + pAImage_getPlaneRowStride = NULL; + pAImage_getPlaneData = NULL; + pAImageReader_acquireNextImage = NULL; + pAImageReader_delete = NULL; + pAImageReader_setImageListener = NULL; + pAImageReader_getWindow = NULL; + pAImageReader_new = NULL; +} + +static bool ANDROIDCAMERA_Init(SDL_CameraDriverImpl *impl) +{ + // !!! FIXME: slide this off into a subroutine + // system libraries are in android-24 and later; we currently target android-16 and later, so check if they exist at runtime. + void *libcamera2 = dlopen("libcamera2ndk.so", RTLD_NOW | RTLD_LOCAL); + if (!libcamera2) { + SDL_Log("CAMERA: libcamera2ndk.so can't be loaded: %s", dlerror()); + return false; + } + + void *libmedia = dlopen("libmediandk.so", RTLD_NOW | RTLD_LOCAL); + if (!libmedia) { + SDL_Log("CAMERA: libmediandk.so can't be loaded: %s", dlerror()); + dlclose(libcamera2); + return false; + } + + bool okay = true; + #define LOADSYM(lib, fn) if (okay) { p##fn = (pfn##fn) dlsym(lib, #fn); if (!p##fn) { SDL_Log("CAMERA: symbol '%s' can't be found in %s: %s", #fn, #lib "ndk.so", dlerror()); okay = false; } } + //#define LOADSYM(lib, fn) p##fn = (pfn##fn) fn + LOADSYM(libcamera2, ACameraManager_create); + LOADSYM(libcamera2, ACameraManager_registerAvailabilityCallback); + LOADSYM(libcamera2, ACameraManager_unregisterAvailabilityCallback); + LOADSYM(libcamera2, ACameraManager_getCameraIdList); + LOADSYM(libcamera2, ACameraManager_deleteCameraIdList); + LOADSYM(libcamera2, ACameraCaptureSession_close); + LOADSYM(libcamera2, ACaptureRequest_free); + LOADSYM(libcamera2, ACameraOutputTarget_free); + LOADSYM(libcamera2, ACameraDevice_close); + LOADSYM(libcamera2, ACameraManager_delete); + LOADSYM(libcamera2, ACaptureSessionOutputContainer_free); + LOADSYM(libcamera2, ACaptureSessionOutput_free); + LOADSYM(libcamera2, ACameraManager_openCamera); + LOADSYM(libcamera2, ACameraDevice_createCaptureRequest); + LOADSYM(libcamera2, ACameraDevice_createCaptureSession); + LOADSYM(libcamera2, ACameraManager_getCameraCharacteristics); + LOADSYM(libcamera2, ACameraMetadata_free); + LOADSYM(libcamera2, ACameraMetadata_getConstEntry); + LOADSYM(libcamera2, ACameraCaptureSession_setRepeatingRequest); + LOADSYM(libcamera2, ACameraOutputTarget_create); + LOADSYM(libcamera2, ACaptureRequest_addTarget); + LOADSYM(libcamera2, ACaptureSessionOutputContainer_add); + LOADSYM(libcamera2, ACaptureSessionOutputContainer_create); + LOADSYM(libcamera2, ACaptureSessionOutput_create); + LOADSYM(libmedia, AImage_delete); + LOADSYM(libmedia, AImage_getTimestamp); + LOADSYM(libmedia, AImage_getNumberOfPlanes); + LOADSYM(libmedia, AImage_getPlaneRowStride); + LOADSYM(libmedia, AImage_getPlaneData); + LOADSYM(libmedia, AImageReader_acquireNextImage); + LOADSYM(libmedia, AImageReader_delete); + LOADSYM(libmedia, AImageReader_setImageListener); + LOADSYM(libmedia, AImageReader_getWindow); + LOADSYM(libmedia, AImageReader_new); + LOADSYM(libmedia, AImage_getWidth); + LOADSYM(libmedia, AImage_getHeight); + + #undef LOADSYM + + if (!okay) { + dlclose(libmedia); + dlclose(libcamera2); + } + + if (!CreateCameraManager()) { + dlclose(libmedia); + dlclose(libcamera2); + return false; + } + + libcamera2ndk = libcamera2; + libmediandk = libmedia; + + impl->DetectDevices = ANDROIDCAMERA_DetectDevices; + impl->OpenDevice = ANDROIDCAMERA_OpenDevice; + impl->CloseDevice = ANDROIDCAMERA_CloseDevice; + impl->WaitDevice = ANDROIDCAMERA_WaitDevice; + impl->AcquireFrame = ANDROIDCAMERA_AcquireFrame; + impl->ReleaseFrame = ANDROIDCAMERA_ReleaseFrame; + impl->FreeDeviceHandle = ANDROIDCAMERA_FreeDeviceHandle; + impl->Deinitialize = ANDROIDCAMERA_Deinitialize; + + impl->ProvidesOwnCallbackThread = true; + + return true; +} + +CameraBootStrap ANDROIDCAMERA_bootstrap = { + "android", "SDL Android camera driver", ANDROIDCAMERA_Init, false +}; + +#endif diff --git a/contrib/SDL-3.2.8/src/camera/coremedia/SDL_camera_coremedia.m b/contrib/SDL-3.2.8/src/camera/coremedia/SDL_camera_coremedia.m new file mode 100644 index 0000000..2ecfd13 --- /dev/null +++ b/contrib/SDL-3.2.8/src/camera/coremedia/SDL_camera_coremedia.m @@ -0,0 +1,508 @@ +/* + Simple DirectMedia Layer + Copyright (C) 1997-2025 Sam Lantinga + + This software is provided 'as-is', without any express or implied + warranty. In no event will the authors be held liable for any damages + arising from the use of this software. + + Permission is granted to anyone to use this software for any purpose, + including commercial applications, and to alter it and redistribute it + freely, subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + 3. This notice may not be removed or altered from any source distribution. +*/ +#include "SDL_internal.h" + +#ifdef SDL_CAMERA_DRIVER_COREMEDIA + +#include "../SDL_syscamera.h" +#include "../SDL_camera_c.h" +#include "../../thread/SDL_systhread.h" + +#import +#import + +/* + * Need to link with:: CoreMedia CoreVideo + * + * Add in pInfo.list: + * NSCameraUsageDescription Access camera + * + * + * MACOSX: + * Add to the Code Sign Entitlement file: + * com.apple.security.device.camera + */ + +static void CoreMediaFormatToSDL(FourCharCode fmt, SDL_PixelFormat *pixel_format, SDL_Colorspace *colorspace) +{ + switch (fmt) { + #define CASE(x, y, z) case x: *pixel_format = y; *colorspace = z; return + // the 16LE ones should use 16BE if we're on a Bigendian system like PowerPC, + // but at current time there is no bigendian Apple platform that has CoreMedia. + CASE(kCMPixelFormat_16LE555, SDL_PIXELFORMAT_XRGB1555, SDL_COLORSPACE_SRGB); + CASE(kCMPixelFormat_16LE5551, SDL_PIXELFORMAT_RGBA5551, SDL_COLORSPACE_SRGB); + CASE(kCMPixelFormat_16LE565, SDL_PIXELFORMAT_RGB565, SDL_COLORSPACE_SRGB); + CASE(kCMPixelFormat_24RGB, SDL_PIXELFORMAT_RGB24, SDL_COLORSPACE_SRGB); + CASE(kCMPixelFormat_32ARGB, SDL_PIXELFORMAT_ARGB32, SDL_COLORSPACE_SRGB); + CASE(kCMPixelFormat_32BGRA, SDL_PIXELFORMAT_BGRA32, SDL_COLORSPACE_SRGB); + CASE(kCMPixelFormat_422YpCbCr8, SDL_PIXELFORMAT_UYVY, SDL_COLORSPACE_BT709_LIMITED); + CASE(kCMPixelFormat_422YpCbCr8_yuvs, SDL_PIXELFORMAT_YUY2, SDL_COLORSPACE_BT709_LIMITED); + CASE(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, SDL_PIXELFORMAT_NV12, SDL_COLORSPACE_BT709_LIMITED); + CASE(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange, SDL_PIXELFORMAT_NV12, SDL_COLORSPACE_BT709_FULL); + CASE(kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange, SDL_PIXELFORMAT_P010, SDL_COLORSPACE_BT2020_LIMITED); + CASE(kCVPixelFormatType_420YpCbCr10BiPlanarFullRange, SDL_PIXELFORMAT_P010, SDL_COLORSPACE_BT2020_FULL); + #undef CASE + default: + #if DEBUG_CAMERA + SDL_Log("CAMERA: Unknown format FourCharCode '%d'", (int) fmt); + #endif + break; + } + *pixel_format = SDL_PIXELFORMAT_UNKNOWN; + *colorspace = SDL_COLORSPACE_UNKNOWN; +} + +@class SDLCaptureVideoDataOutputSampleBufferDelegate; + +// just a simple wrapper to help ARC manage memory... +@interface SDLPrivateCameraData : NSObject +@property(nonatomic, retain) AVCaptureSession *session; +@property(nonatomic, retain) SDLCaptureVideoDataOutputSampleBufferDelegate *delegate; +@property(nonatomic, assign) CMSampleBufferRef current_sample; +@end + +@implementation SDLPrivateCameraData +@end + + +static bool CheckCameraPermissions(SDL_Camera *device) +{ + if (device->permission == 0) { // still expecting a permission result. + if (@available(macOS 14, *)) { + const AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo]; + if (status != AVAuthorizationStatusNotDetermined) { // NotDetermined == still waiting for an answer from the user. + SDL_CameraPermissionOutcome(device, (status == AVAuthorizationStatusAuthorized) ? true : false); + } + } else { + SDL_CameraPermissionOutcome(device, true); // always allowed (or just unqueryable...?) on older macOS. + } + } + + return (device->permission > 0); +} + +// this delegate just receives new video frames on a Grand Central Dispatch queue, and fires off the +// main device thread iterate function directly to consume it. +@interface SDLCaptureVideoDataOutputSampleBufferDelegate : NSObject + @property SDL_Camera *device; + -(id) init:(SDL_Camera *) dev; + -(void) captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection; +@end + +@implementation SDLCaptureVideoDataOutputSampleBufferDelegate + + -(id) init:(SDL_Camera *) dev { + if ( self = [super init] ) { + _device = dev; + } + return self; + } + + - (void) captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection + { + SDL_Camera *device = self.device; + if (!device || !device->hidden) { + return; // oh well. + } + + if (!CheckCameraPermissions(device)) { + return; // nothing to do right now, dump what is probably a completely black frame. + } + + SDLPrivateCameraData *hidden = (__bridge SDLPrivateCameraData *) device->hidden; + hidden.current_sample = sampleBuffer; + SDL_CameraThreadIterate(device); + hidden.current_sample = NULL; + } + + - (void)captureOutput:(AVCaptureOutput *)output didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection + { + #if DEBUG_CAMERA + SDL_Log("CAMERA: Drop frame."); + #endif + } +@end + +static bool COREMEDIA_WaitDevice(SDL_Camera *device) +{ + return true; // this isn't used atm, since we run our own thread out of Grand Central Dispatch. +} + +static SDL_CameraFrameResult COREMEDIA_AcquireFrame(SDL_Camera *device, SDL_Surface *frame, Uint64 *timestampNS) +{ + SDL_CameraFrameResult result = SDL_CAMERA_FRAME_READY; + SDLPrivateCameraData *hidden = (__bridge SDLPrivateCameraData *) device->hidden; + CMSampleBufferRef sample_buffer = hidden.current_sample; + hidden.current_sample = NULL; + SDL_assert(sample_buffer != NULL); // should only have been called from our delegate with a new frame. + + CMSampleTimingInfo timinginfo; + if (CMSampleBufferGetSampleTimingInfo(sample_buffer, 0, &timinginfo) == noErr) { + *timestampNS = (Uint64) (CMTimeGetSeconds(timinginfo.presentationTimeStamp) * ((Float64) SDL_NS_PER_SECOND)); + } else { + SDL_assert(!"this shouldn't happen, I think."); + *timestampNS = 0; + } + + CVImageBufferRef image = CMSampleBufferGetImageBuffer(sample_buffer); // does not retain `image` (and we don't want it to). + const int numPlanes = (int) CVPixelBufferGetPlaneCount(image); + const int planar = (int) CVPixelBufferIsPlanar(image); + + #if DEBUG_CAMERA + const int w = (int) CVPixelBufferGetWidth(image); + const int h = (int) CVPixelBufferGetHeight(image); + const int sz = (int) CVPixelBufferGetDataSize(image); + const int pitch = (int) CVPixelBufferGetBytesPerRow(image); + SDL_Log("CAMERA: buffer planar=%d numPlanes=%d %d x %d sz=%d pitch=%d", planar, numPlanes, w, h, sz, pitch); + #endif + + // !!! FIXME: this currently copies the data to the surface (see FIXME about non-contiguous planar surfaces, but in theory we could just keep this locked until ReleaseFrame... + CVPixelBufferLockBaseAddress(image, 0); + + frame->w = (int)CVPixelBufferGetWidth(image); + frame->h = (int)CVPixelBufferGetHeight(image); + + if ((planar == 0) && (numPlanes == 0)) { + const int pitch = (int) CVPixelBufferGetBytesPerRow(image); + const size_t buflen = pitch * frame->h; + frame->pixels = SDL_aligned_alloc(SDL_GetSIMDAlignment(), buflen); + if (frame->pixels == NULL) { + result = SDL_CAMERA_FRAME_ERROR; + } else { + frame->pitch = pitch; + SDL_memcpy(frame->pixels, CVPixelBufferGetBaseAddress(image), buflen); + } + } else { + // !!! FIXME: we have an open issue in SDL3 to allow SDL_Surface to support non-contiguous planar data, but we don't have it yet. + size_t buflen = 0; + for (int i = 0; i < numPlanes; i++) { + size_t plane_height = CVPixelBufferGetHeightOfPlane(image, i); + size_t plane_pitch = CVPixelBufferGetBytesPerRowOfPlane(image, i); + size_t plane_size = (plane_pitch * plane_height); + buflen += plane_size; + } + + frame->pitch = (int)CVPixelBufferGetBytesPerRowOfPlane(image, 0); // this is what SDL3 currently expects + frame->pixels = SDL_aligned_alloc(SDL_GetSIMDAlignment(), buflen); + if (frame->pixels == NULL) { + result = SDL_CAMERA_FRAME_ERROR; + } else { + Uint8 *dst = frame->pixels; + for (int i = 0; i < numPlanes; i++) { + const void *src = CVPixelBufferGetBaseAddressOfPlane(image, i); + size_t plane_height = CVPixelBufferGetHeightOfPlane(image, i); + size_t plane_pitch = CVPixelBufferGetBytesPerRowOfPlane(image, i); + size_t plane_size = (plane_pitch * plane_height); + SDL_memcpy(dst, src, plane_size); + dst += plane_size; + } + } + } + + CVPixelBufferUnlockBaseAddress(image, 0); + + return result; +} + +static void COREMEDIA_ReleaseFrame(SDL_Camera *device, SDL_Surface *frame) +{ + // !!! FIXME: this currently copies the data to the surface, but in theory we could just keep this locked until ReleaseFrame... + SDL_aligned_free(frame->pixels); +} + +static void COREMEDIA_CloseDevice(SDL_Camera *device) +{ + if (device && device->hidden) { + SDLPrivateCameraData *hidden = (SDLPrivateCameraData *) CFBridgingRelease(device->hidden); + device->hidden = NULL; + + AVCaptureSession *session = hidden.session; + if (session) { + hidden.session = nil; + [session stopRunning]; + [session removeInput:[session.inputs objectAtIndex:0]]; + [session removeOutput:(AVCaptureVideoDataOutput*)[session.outputs objectAtIndex:0]]; + session = nil; + } + + hidden.delegate = NULL; + hidden.current_sample = NULL; + } +} + +static bool COREMEDIA_OpenDevice(SDL_Camera *device, const SDL_CameraSpec *spec) +{ + AVCaptureDevice *avdevice = (__bridge AVCaptureDevice *) device->handle; + + // Pick format that matches the spec + const int w = spec->width; + const int h = spec->height; + const float rate = (float)spec->framerate_numerator / spec->framerate_denominator; + AVCaptureDeviceFormat *spec_format = nil; + NSArray *formats = [avdevice formats]; + for (AVCaptureDeviceFormat *format in formats) { + CMFormatDescriptionRef formatDescription = [format formatDescription]; + SDL_PixelFormat device_format = SDL_PIXELFORMAT_UNKNOWN; + SDL_Colorspace device_colorspace = SDL_COLORSPACE_UNKNOWN; + CoreMediaFormatToSDL(CMFormatDescriptionGetMediaSubType(formatDescription), &device_format, &device_colorspace); + if (device_format != spec->format || device_colorspace != spec->colorspace) { + continue; + } + + const CMVideoDimensions dim = CMVideoFormatDescriptionGetDimensions(formatDescription); + if ((int)dim.width != w || (int)dim.height != h) { + continue; + } + + const float FRAMERATE_EPSILON = 0.01f; + for (AVFrameRateRange *framerate in format.videoSupportedFrameRateRanges) { + if (rate > (framerate.minFrameRate - FRAMERATE_EPSILON) && + rate < (framerate.maxFrameRate + FRAMERATE_EPSILON)) { + spec_format = format; + break; + } + } + + if (spec_format != nil) { + break; + } + } + + if (spec_format == nil) { + return SDL_SetError("camera spec format not available"); + } else if (![avdevice lockForConfiguration:NULL]) { + return SDL_SetError("Cannot lockForConfiguration"); + } + + avdevice.activeFormat = spec_format; + [avdevice unlockForConfiguration]; + + AVCaptureSession *session = [[AVCaptureSession alloc] init]; + if (session == nil) { + return SDL_SetError("Failed to allocate/init AVCaptureSession"); + } + + session.sessionPreset = AVCaptureSessionPresetHigh; +#if defined(SDL_PLATFORM_IOS) + if (@available(iOS 10.0, tvOS 17.0, *)) { + session.automaticallyConfiguresCaptureDeviceForWideColor = NO; + } +#endif + + NSError *error = nil; + AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:avdevice error:&error]; + if (!input) { + return SDL_SetError("Cannot create AVCaptureDeviceInput"); + } + + AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init]; + if (!output) { + return SDL_SetError("Cannot create AVCaptureVideoDataOutput"); + } + + output.videoSettings = @{ + (id)kCVPixelBufferWidthKey : @(spec->width), + (id)kCVPixelBufferHeightKey : @(spec->height), + (id)kCVPixelBufferPixelFormatTypeKey : @(CMFormatDescriptionGetMediaSubType([spec_format formatDescription])) + }; + + char threadname[64]; + SDL_GetCameraThreadName(device, threadname, sizeof (threadname)); + dispatch_queue_t queue = dispatch_queue_create(threadname, NULL); + //dispatch_queue_t queue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0); + if (!queue) { + return SDL_SetError("dispatch_queue_create() failed"); + } + + SDLCaptureVideoDataOutputSampleBufferDelegate *delegate = [[SDLCaptureVideoDataOutputSampleBufferDelegate alloc] init:device]; + if (delegate == nil) { + return SDL_SetError("Cannot create SDLCaptureVideoDataOutputSampleBufferDelegate"); + } + [output setSampleBufferDelegate:delegate queue:queue]; + + if (![session canAddInput:input]) { + return SDL_SetError("Cannot add AVCaptureDeviceInput"); + } + [session addInput:input]; + + if (![session canAddOutput:output]) { + return SDL_SetError("Cannot add AVCaptureVideoDataOutput"); + } + [session addOutput:output]; + + [session commitConfiguration]; + + SDLPrivateCameraData *hidden = [[SDLPrivateCameraData alloc] init]; + if (hidden == nil) { + return SDL_SetError("Cannot create SDLPrivateCameraData"); + } + + hidden.session = session; + hidden.delegate = delegate; + hidden.current_sample = NULL; + device->hidden = (struct SDL_PrivateCameraData *)CFBridgingRetain(hidden); + + [session startRunning]; // !!! FIXME: docs say this can block while camera warms up and shouldn't be done on main thread. Maybe push through `queue`? + + CheckCameraPermissions(device); // check right away, in case the process is already granted permission. + + return true; +} + +static void COREMEDIA_FreeDeviceHandle(SDL_Camera *device) +{ + if (device && device->handle) { + CFBridgingRelease(device->handle); + } +} + +static void GatherCameraSpecs(AVCaptureDevice *device, CameraFormatAddData *add_data) +{ + SDL_zerop(add_data); + + for (AVCaptureDeviceFormat *fmt in device.formats) { + if (CMFormatDescriptionGetMediaType(fmt.formatDescription) != kCMMediaType_Video) { + continue; + } + +//NSLog(@"Available camera format: %@\n", fmt); + SDL_PixelFormat device_format = SDL_PIXELFORMAT_UNKNOWN; + SDL_Colorspace device_colorspace = SDL_COLORSPACE_UNKNOWN; + CoreMediaFormatToSDL(CMFormatDescriptionGetMediaSubType(fmt.formatDescription), &device_format, &device_colorspace); + if (device_format == SDL_PIXELFORMAT_UNKNOWN) { + continue; + } + + const CMVideoDimensions dims = CMVideoFormatDescriptionGetDimensions(fmt.formatDescription); + const int w = (int) dims.width; + const int h = (int) dims.height; + for (AVFrameRateRange *framerate in fmt.videoSupportedFrameRateRanges) { + int min_numerator = 0, min_denominator = 1; + int max_numerator = 0, max_denominator = 1; + + SDL_CalculateFraction(framerate.minFrameRate, &min_numerator, &min_denominator); + SDL_AddCameraFormat(add_data, device_format, device_colorspace, w, h, min_numerator, min_denominator); + SDL_CalculateFraction(framerate.maxFrameRate, &max_numerator, &max_denominator); + if (max_numerator != min_numerator || max_denominator != min_denominator) { + SDL_AddCameraFormat(add_data, device_format, device_colorspace, w, h, max_numerator, max_denominator); + } + } + } +} + +static bool FindCoreMediaCameraByUniqueID(SDL_Camera *device, void *userdata) +{ + NSString *uniqueid = (__bridge NSString *) userdata; + AVCaptureDevice *avdev = (__bridge AVCaptureDevice *) device->handle; + return ([uniqueid isEqualToString:avdev.uniqueID]) ? true : false; +} + +static void MaybeAddDevice(AVCaptureDevice *avdevice) +{ + if (!avdevice.connected) { + return; // not connected. + } else if (![avdevice hasMediaType:AVMediaTypeVideo]) { + return; // not a camera. + } else if (SDL_FindPhysicalCameraByCallback(FindCoreMediaCameraByUniqueID, (__bridge void *) avdevice.uniqueID)) { + return; // already have this one. + } + + CameraFormatAddData add_data; + GatherCameraSpecs(avdevice, &add_data); + if (add_data.num_specs > 0) { + SDL_CameraPosition position = SDL_CAMERA_POSITION_UNKNOWN; + if (avdevice.position == AVCaptureDevicePositionFront) { + position = SDL_CAMERA_POSITION_FRONT_FACING; + } else if (avdevice.position == AVCaptureDevicePositionBack) { + position = SDL_CAMERA_POSITION_BACK_FACING; + } + SDL_AddCamera(avdevice.localizedName.UTF8String, position, add_data.num_specs, add_data.specs, (void *) CFBridgingRetain(avdevice)); + } + + SDL_free(add_data.specs); +} + +static void COREMEDIA_DetectDevices(void) +{ + NSArray *devices = nil; + + if (@available(macOS 10.15, iOS 13, *)) { + // kind of annoying that there isn't a "give me anything that looks like a camera" option, + // so this list will need to be updated when Apple decides to add + // AVCaptureDeviceTypeBuiltInQuadrupleCamera some day. + NSArray *device_types = @[ + #ifdef SDL_PLATFORM_IOS + AVCaptureDeviceTypeBuiltInTelephotoCamera, + AVCaptureDeviceTypeBuiltInDualCamera, + AVCaptureDeviceTypeBuiltInDualWideCamera, + AVCaptureDeviceTypeBuiltInTripleCamera, + AVCaptureDeviceTypeBuiltInUltraWideCamera, + #else + AVCaptureDeviceTypeExternalUnknown, + #endif + AVCaptureDeviceTypeBuiltInWideAngleCamera + ]; + + AVCaptureDeviceDiscoverySession *discoverySession = [AVCaptureDeviceDiscoverySession + discoverySessionWithDeviceTypes:device_types + mediaType:AVMediaTypeVideo + position:AVCaptureDevicePositionUnspecified]; + + devices = discoverySession.devices; + // !!! FIXME: this can use Key Value Observation to get hotplug events. + } else { + // this is deprecated but works back to macOS 10.7; 10.15 added AVCaptureDeviceDiscoverySession as a replacement. + devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; + // !!! FIXME: this can use AVCaptureDeviceWasConnectedNotification and AVCaptureDeviceWasDisconnectedNotification with NSNotificationCenter to get hotplug events. + } + + for (AVCaptureDevice *device in devices) { + MaybeAddDevice(device); + } +} + +static void COREMEDIA_Deinitialize(void) +{ + // !!! FIXME: disable hotplug. +} + +static bool COREMEDIA_Init(SDL_CameraDriverImpl *impl) +{ + impl->DetectDevices = COREMEDIA_DetectDevices; + impl->OpenDevice = COREMEDIA_OpenDevice; + impl->CloseDevice = COREMEDIA_CloseDevice; + impl->WaitDevice = COREMEDIA_WaitDevice; + impl->AcquireFrame = COREMEDIA_AcquireFrame; + impl->ReleaseFrame = COREMEDIA_ReleaseFrame; + impl->FreeDeviceHandle = COREMEDIA_FreeDeviceHandle; + impl->Deinitialize = COREMEDIA_Deinitialize; + + impl->ProvidesOwnCallbackThread = true; + + return true; +} + +CameraBootStrap COREMEDIA_bootstrap = { + "coremedia", "SDL Apple CoreMedia camera driver", COREMEDIA_Init, false +}; + +#endif // SDL_CAMERA_DRIVER_COREMEDIA + diff --git a/contrib/SDL-3.2.8/src/camera/dummy/SDL_camera_dummy.c b/contrib/SDL-3.2.8/src/camera/dummy/SDL_camera_dummy.c new file mode 100644 index 0000000..b2a4dc1 --- /dev/null +++ b/contrib/SDL-3.2.8/src/camera/dummy/SDL_camera_dummy.c @@ -0,0 +1,81 @@ +/* + Simple DirectMedia Layer + Copyright (C) 1997-2025 Sam Lantinga + + This software is provided 'as-is', without any express or implied + warranty. In no event will the authors be held liable for any damages + arising from the use of this software. + + Permission is granted to anyone to use this software for any purpose, + including commercial applications, and to alter it and redistribute it + freely, subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + 3. This notice may not be removed or altered from any source distribution. +*/ +#include "SDL_internal.h" + +#ifdef SDL_CAMERA_DRIVER_DUMMY + +#include "../SDL_syscamera.h" + +static bool DUMMYCAMERA_OpenDevice(SDL_Camera *device, const SDL_CameraSpec *spec) +{ + return SDL_Unsupported(); +} + +static void DUMMYCAMERA_CloseDevice(SDL_Camera *device) +{ +} + +static bool DUMMYCAMERA_WaitDevice(SDL_Camera *device) +{ + return SDL_Unsupported(); +} + +static SDL_CameraFrameResult DUMMYCAMERA_AcquireFrame(SDL_Camera *device, SDL_Surface *frame, Uint64 *timestampNS) +{ + SDL_Unsupported(); + return SDL_CAMERA_FRAME_ERROR; +} + +static void DUMMYCAMERA_ReleaseFrame(SDL_Camera *device, SDL_Surface *frame) +{ +} + +static void DUMMYCAMERA_DetectDevices(void) +{ +} + +static void DUMMYCAMERA_FreeDeviceHandle(SDL_Camera *device) +{ +} + +static void DUMMYCAMERA_Deinitialize(void) +{ +} + +static bool DUMMYCAMERA_Init(SDL_CameraDriverImpl *impl) +{ + impl->DetectDevices = DUMMYCAMERA_DetectDevices; + impl->OpenDevice = DUMMYCAMERA_OpenDevice; + impl->CloseDevice = DUMMYCAMERA_CloseDevice; + impl->WaitDevice = DUMMYCAMERA_WaitDevice; + impl->AcquireFrame = DUMMYCAMERA_AcquireFrame; + impl->ReleaseFrame = DUMMYCAMERA_ReleaseFrame; + impl->FreeDeviceHandle = DUMMYCAMERA_FreeDeviceHandle; + impl->Deinitialize = DUMMYCAMERA_Deinitialize; + + return true; +} + +CameraBootStrap DUMMYCAMERA_bootstrap = { + "dummy", "SDL dummy camera driver", DUMMYCAMERA_Init, true +}; + +#endif // SDL_CAMERA_DRIVER_DUMMY diff --git a/contrib/SDL-3.2.8/src/camera/emscripten/SDL_camera_emscripten.c b/contrib/SDL-3.2.8/src/camera/emscripten/SDL_camera_emscripten.c new file mode 100644 index 0000000..fa2a511 --- /dev/null +++ b/contrib/SDL-3.2.8/src/camera/emscripten/SDL_camera_emscripten.c @@ -0,0 +1,275 @@ +/* + Simple DirectMedia Layer + Copyright (C) 1997-2025 Sam Lantinga + + This software is provided 'as-is', without any express or implied + warranty. In no event will the authors be held liable for any damages + arising from the use of this software. + + Permission is granted to anyone to use this software for any purpose, + including commercial applications, and to alter it and redistribute it + freely, subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + 3. This notice may not be removed or altered from any source distribution. +*/ +#include "SDL_internal.h" + +#ifdef SDL_CAMERA_DRIVER_EMSCRIPTEN + +#include "../SDL_syscamera.h" +#include "../SDL_camera_c.h" +#include "../../video/SDL_pixels_c.h" +#include "../../video/SDL_surface_c.h" + +#include + +// just turn off clang-format for this whole file, this INDENT_OFF stuff on +// each EM_ASM section is ugly. +/* *INDENT-OFF* */ // clang-format off + +EM_JS_DEPS(sdlcamera, "$dynCall"); + +static bool EMSCRIPTENCAMERA_WaitDevice(SDL_Camera *device) +{ + SDL_assert(!"This shouldn't be called"); // we aren't using SDL's internal thread. + return false; +} + +static SDL_CameraFrameResult EMSCRIPTENCAMERA_AcquireFrame(SDL_Camera *device, SDL_Surface *frame, Uint64 *timestampNS) +{ + void *rgba = SDL_malloc(device->actual_spec.width * device->actual_spec.height * 4); + if (!rgba) { + return SDL_CAMERA_FRAME_ERROR; + } + + *timestampNS = SDL_GetTicksNS(); // best we can do here. + + const int rc = MAIN_THREAD_EM_ASM_INT({ + const w = $0; + const h = $1; + const rgba = $2; + const SDL3 = Module['SDL3']; + if ((typeof(SDL3) === 'undefined') || (typeof(SDL3.camera) === 'undefined') || (typeof(SDL3.camera.ctx2d) === 'undefined')) { + return 0; // don't have something we need, oh well. + } + + SDL3.camera.ctx2d.drawImage(SDL3.camera.video, 0, 0, w, h); + const imgrgba = SDL3.camera.ctx2d.getImageData(0, 0, w, h).data; + Module.HEAPU8.set(imgrgba, rgba); + + return 1; + }, device->actual_spec.width, device->actual_spec.height, rgba); + + if (!rc) { + SDL_free(rgba); + return SDL_CAMERA_FRAME_ERROR; // something went wrong, maybe shutting down; just don't return a frame. + } + + frame->pixels = rgba; + frame->pitch = device->actual_spec.width * 4; + + return SDL_CAMERA_FRAME_READY; +} + +static void EMSCRIPTENCAMERA_ReleaseFrame(SDL_Camera *device, SDL_Surface *frame) +{ + SDL_free(frame->pixels); +} + +static void EMSCRIPTENCAMERA_CloseDevice(SDL_Camera *device) +{ + if (device) { + MAIN_THREAD_EM_ASM({ + const SDL3 = Module['SDL3']; + if ((typeof(SDL3) === 'undefined') || (typeof(SDL3.camera) === 'undefined') || (typeof(SDL3.camera.stream) === 'undefined')) { + return; // camera was closed and/or subsystem was shut down, we're already done. + } + SDL3.camera.stream.getTracks().forEach(track => track.stop()); // stop all recording. + SDL3.camera = {}; // dump our references to everything. + }); + SDL_free(device->hidden); + device->hidden = NULL; + } +} + +static int SDLEmscriptenCameraPermissionOutcome(SDL_Camera *device, int approved, int w, int h, int fps) +{ + if (approved) { + device->actual_spec.format = SDL_PIXELFORMAT_RGBA32; + device->actual_spec.width = w; + device->actual_spec.height = h; + device->actual_spec.framerate_numerator = fps; + device->actual_spec.framerate_denominator = 1; + + if (!SDL_PrepareCameraSurfaces(device)) { + // uhoh, we're in trouble. Probably ran out of memory. + SDL_LogError(SDL_LOG_CATEGORY_ERROR, "Camera could not prepare surfaces: %s ... revoking approval!", SDL_GetError()); + approved = 0; // disconnecting the SDL camera might not be safe here, just mark it as denied by user. + } + } + + SDL_CameraPermissionOutcome(device, approved ? true : false); + return approved; +} + +static bool EMSCRIPTENCAMERA_OpenDevice(SDL_Camera *device, const SDL_CameraSpec *spec) +{ + MAIN_THREAD_EM_ASM({ + // Since we can't get actual specs until we make a move that prompts the user for + // permission, we don't list any specs for the device and wrangle it during device open. + const device = $0; + const w = $1; + const h = $2; + const framerate_numerator = $3; + const framerate_denominator = $4; + const outcome = $5; + const iterate = $6; + + const constraints = {}; + if ((w <= 0) || (h <= 0)) { + constraints.video = true; // didn't ask for anything, let the system choose. + } else { + constraints.video = {}; // asked for a specific thing: request it as "ideal" but take closest hardware will offer. + constraints.video.width = w; + constraints.video.height = h; + } + + if ((framerate_numerator > 0) && (framerate_denominator > 0)) { + var fps = framerate_numerator / framerate_denominator; + constraints.video.frameRate = { ideal: fps }; + } + + function grabNextCameraFrame() { // !!! FIXME: this (currently) runs as a requestAnimationFrame callback, for lack of a better option. + const SDL3 = Module['SDL3']; + if ((typeof(SDL3) === 'undefined') || (typeof(SDL3.camera) === 'undefined') || (typeof(SDL3.camera.stream) === 'undefined')) { + return; // camera was closed and/or subsystem was shut down, stop iterating here. + } + + // time for a new frame from the camera? + const nextframems = SDL3.camera.next_frame_time; + const now = performance.now(); + if (now >= nextframems) { + dynCall('vi', iterate, [device]); // calls SDL_CameraThreadIterate, which will call our AcquireFrame implementation. + + // bump ahead but try to stay consistent on timing, in case we dropped frames. + while (SDL3.camera.next_frame_time < now) { + SDL3.camera.next_frame_time += SDL3.camera.fpsincrms; + } + } + + requestAnimationFrame(grabNextCameraFrame); // run this function again at the display framerate. (!!! FIXME: would this be better as requestIdleCallback?) + } + + navigator.mediaDevices.getUserMedia(constraints) + .then((stream) => { + const settings = stream.getVideoTracks()[0].getSettings(); + const actualw = settings.width; + const actualh = settings.height; + const actualfps = settings.frameRate; + console.log("Camera is opened! Actual spec: (" + actualw + "x" + actualh + "), fps=" + actualfps); + + if (dynCall('iiiiii', outcome, [device, 1, actualw, actualh, actualfps])) { + const video = document.createElement("video"); + video.width = actualw; + video.height = actualh; + video.style.display = 'none'; // we need to attach this to a hidden video node so we can read it as pixels. + video.srcObject = stream; + + const canvas = document.createElement("canvas"); + canvas.width = actualw; + canvas.height = actualh; + canvas.style.display = 'none'; // we need to attach this to a hidden video node so we can read it as pixels. + + const ctx2d = canvas.getContext('2d'); + + const SDL3 = Module['SDL3']; + SDL3.camera.width = actualw; + SDL3.camera.height = actualh; + SDL3.camera.fps = actualfps; + SDL3.camera.fpsincrms = 1000.0 / actualfps; + SDL3.camera.stream = stream; + SDL3.camera.video = video; + SDL3.camera.canvas = canvas; + SDL3.camera.ctx2d = ctx2d; + SDL3.camera.next_frame_time = performance.now(); + + video.play(); + video.addEventListener('loadedmetadata', () => { + grabNextCameraFrame(); // start this loop going. + }); + } + }) + .catch((err) => { + console.error("Tried to open camera but it threw an error! " + err.name + ": " + err.message); + dynCall('iiiiii', outcome, [device, 0, 0, 0, 0]); // we call this a permission error, because it probably is. + }); + }, device, spec->width, spec->height, spec->framerate_numerator, spec->framerate_denominator, SDLEmscriptenCameraPermissionOutcome, SDL_CameraThreadIterate); + + return true; // the real work waits until the user approves a camera. +} + +static void EMSCRIPTENCAMERA_FreeDeviceHandle(SDL_Camera *device) +{ + // no-op. +} + +static void EMSCRIPTENCAMERA_Deinitialize(void) +{ + MAIN_THREAD_EM_ASM({ + if (typeof(Module['SDL3']) !== 'undefined') { + Module['SDL3'].camera = undefined; + } + }); +} + +static void EMSCRIPTENCAMERA_DetectDevices(void) +{ + // `navigator.mediaDevices` is not defined if unsupported or not in a secure context! + const int supported = MAIN_THREAD_EM_ASM_INT({ return (navigator.mediaDevices === undefined) ? 0 : 1; }); + + // if we have support at all, report a single generic camera with no specs. + // We'll find out if there really _is_ a camera when we try to open it, but querying it for real here + // will pop up a user permission dialog warning them we're trying to access the camera, and we generally + // don't want that during SDL_Init(). + if (supported) { + SDL_AddCamera("Web browser's camera", SDL_CAMERA_POSITION_UNKNOWN, 0, NULL, (void *) (size_t) 0x1); + } +} + +static bool EMSCRIPTENCAMERA_Init(SDL_CameraDriverImpl *impl) +{ + MAIN_THREAD_EM_ASM({ + if (typeof(Module['SDL3']) === 'undefined') { + Module['SDL3'] = {}; + } + Module['SDL3'].camera = {}; + }); + + impl->DetectDevices = EMSCRIPTENCAMERA_DetectDevices; + impl->OpenDevice = EMSCRIPTENCAMERA_OpenDevice; + impl->CloseDevice = EMSCRIPTENCAMERA_CloseDevice; + impl->WaitDevice = EMSCRIPTENCAMERA_WaitDevice; + impl->AcquireFrame = EMSCRIPTENCAMERA_AcquireFrame; + impl->ReleaseFrame = EMSCRIPTENCAMERA_ReleaseFrame; + impl->FreeDeviceHandle = EMSCRIPTENCAMERA_FreeDeviceHandle; + impl->Deinitialize = EMSCRIPTENCAMERA_Deinitialize; + + impl->ProvidesOwnCallbackThread = true; + + return true; +} + +CameraBootStrap EMSCRIPTENCAMERA_bootstrap = { + "emscripten", "SDL Emscripten MediaStream camera driver", EMSCRIPTENCAMERA_Init, false +}; + +/* *INDENT-ON* */ // clang-format on + +#endif // SDL_CAMERA_DRIVER_EMSCRIPTEN + diff --git a/contrib/SDL-3.2.8/src/camera/mediafoundation/SDL_camera_mediafoundation.c b/contrib/SDL-3.2.8/src/camera/mediafoundation/SDL_camera_mediafoundation.c new file mode 100644 index 0000000..d9d627d --- /dev/null +++ b/contrib/SDL-3.2.8/src/camera/mediafoundation/SDL_camera_mediafoundation.c @@ -0,0 +1,1143 @@ +/* + Simple DirectMedia Layer + Copyright (C) 1997-2025 Sam Lantinga + + This software is provided 'as-is', without any express or implied + warranty. In no event will the authors be held liable for any damages + arising from the use of this software. + + Permission is granted to anyone to use this software for any purpose, + including commercial applications, and to alter it and redistribute it + freely, subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + 3. This notice may not be removed or altered from any source distribution. +*/ +#include "SDL_internal.h" + +// the Windows Media Foundation API + +#ifdef SDL_CAMERA_DRIVER_MEDIAFOUNDATION + +#define COBJMACROS + +// this seems to be a bug in mfidl.h, just define this to avoid the problem section. +#define __IMFVideoProcessorControl3_INTERFACE_DEFINED__ + +#include "../../core/windows/SDL_windows.h" + +#include +#include +#include + +#include "../SDL_syscamera.h" +#include "../SDL_camera_c.h" + +static const IID SDL_IID_IMFMediaSource = { 0x279a808d, 0xaec7, 0x40c8, { 0x9c, 0x6b, 0xa6, 0xb4, 0x92, 0xc7, 0x8a, 0x66 } }; +static const IID SDL_IID_IMF2DBuffer = { 0x7dc9d5f9, 0x9ed9, 0x44ec, { 0x9b, 0xbf, 0x06, 0x00, 0xbb, 0x58, 0x9f, 0xbb } }; +static const IID SDL_IID_IMF2DBuffer2 = { 0x33ae5ea6, 0x4316, 0x436f, { 0x8d, 0xdd, 0xd7, 0x3d, 0x22, 0xf8, 0x29, 0xec } }; +static const GUID SDL_MF_MT_DEFAULT_STRIDE = { 0x644b4e48, 0x1e02, 0x4516, { 0xb0, 0xeb, 0xc0, 0x1c, 0xa9, 0xd4, 0x9a, 0xc6 } }; +static const GUID SDL_MF_MT_MAJOR_TYPE = { 0x48eba18e, 0xf8c9, 0x4687, { 0xbf, 0x11, 0x0a, 0x74, 0xc9, 0xf9, 0x6a, 0x8f } }; +static const GUID SDL_MF_MT_SUBTYPE = { 0xf7e34c9a, 0x42e8, 0x4714, { 0xb7, 0x4b, 0xcb, 0x29, 0xd7, 0x2c, 0x35, 0xe5 } }; +static const GUID SDL_MF_MT_VIDEO_NOMINAL_RANGE = { 0xc21b8ee5, 0xb956, 0x4071, { 0x8d, 0xaf, 0x32, 0x5e, 0xdf, 0x5c, 0xab, 0x11 } }; +static const GUID SDL_MF_MT_VIDEO_PRIMARIES = { 0xdbfbe4d7, 0x0740, 0x4ee0, { 0x81, 0x92, 0x85, 0x0a, 0xb0, 0xe2, 0x19, 0x35 } }; +static const GUID SDL_MF_MT_TRANSFER_FUNCTION = { 0x5fb0fce9, 0xbe5c, 0x4935, { 0xa8, 0x11, 0xec, 0x83, 0x8f, 0x8e, 0xed, 0x93 } }; +static const GUID SDL_MF_MT_YUV_MATRIX = { 0x3e23d450, 0x2c75, 0x4d25, { 0xa0, 0x0e, 0xb9, 0x16, 0x70, 0xd1, 0x23, 0x27 } }; +static const GUID SDL_MF_MT_VIDEO_CHROMA_SITING = { 0x65df2370, 0xc773, 0x4c33, { 0xaa, 0x64, 0x84, 0x3e, 0x06, 0x8e, 0xfb, 0x0c } }; +static const GUID SDL_MF_MT_FRAME_SIZE = { 0x1652c33d, 0xd6b2, 0x4012, { 0xb8, 0x34, 0x72, 0x03, 0x08, 0x49, 0xa3, 0x7d } }; +static const GUID SDL_MF_MT_FRAME_RATE = { 0xc459a2e8, 0x3d2c, 0x4e44, { 0xb1, 0x32, 0xfe, 0xe5, 0x15, 0x6c, 0x7b, 0xb0 } }; +static const GUID SDL_MFMediaType_Video = { 0x73646976, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71 } }; +static const IID SDL_MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME = { 0x60d0e559, 0x52f8, 0x4fa2, { 0xbb, 0xce, 0xac, 0xdb, 0x34, 0xa8, 0xec, 0x1 } }; +static const IID SDL_MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE = { 0xc60ac5fe, 0x252a, 0x478f, { 0xa0, 0xef, 0xbc, 0x8f, 0xa5, 0xf7, 0xca, 0xd3 } }; +static const IID SDL_MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK = { 0x58f0aad8, 0x22bf, 0x4f8a, { 0xbb, 0x3d, 0xd2, 0xc4, 0x97, 0x8c, 0x6e, 0x2f } }; +static const IID SDL_MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID = { 0x8ac3587a, 0x4ae7, 0x42d8, { 0x99, 0xe0, 0x0a, 0x60, 0x13, 0xee, 0xf9, 0x0f } }; + +#ifdef __GNUC__ +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wmultichar" +#endif + +#define SDL_DEFINE_MEDIATYPE_GUID(name, fmt) static const GUID SDL_##name = { fmt, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 } } +SDL_DEFINE_MEDIATYPE_GUID(MFVideoFormat_RGB555, 24); +SDL_DEFINE_MEDIATYPE_GUID(MFVideoFormat_RGB565, 23); +SDL_DEFINE_MEDIATYPE_GUID(MFVideoFormat_RGB24, 20); +SDL_DEFINE_MEDIATYPE_GUID(MFVideoFormat_RGB32, 22); +SDL_DEFINE_MEDIATYPE_GUID(MFVideoFormat_ARGB32, 21); +SDL_DEFINE_MEDIATYPE_GUID(MFVideoFormat_A2R10G10B10, 31); +SDL_DEFINE_MEDIATYPE_GUID(MFVideoFormat_YV12, FCC('YV12')); +SDL_DEFINE_MEDIATYPE_GUID(MFVideoFormat_IYUV, FCC('IYUV')); +SDL_DEFINE_MEDIATYPE_GUID(MFVideoFormat_YUY2, FCC('YUY2')); +SDL_DEFINE_MEDIATYPE_GUID(MFVideoFormat_UYVY, FCC('UYVY')); +SDL_DEFINE_MEDIATYPE_GUID(MFVideoFormat_YVYU, FCC('YVYU')); +SDL_DEFINE_MEDIATYPE_GUID(MFVideoFormat_NV12, FCC('NV12')); +SDL_DEFINE_MEDIATYPE_GUID(MFVideoFormat_NV21, FCC('NV21')); +SDL_DEFINE_MEDIATYPE_GUID(MFVideoFormat_MJPG, FCC('MJPG')); +#undef SDL_DEFINE_MEDIATYPE_GUID + +#ifdef __GNUC__ +#pragma GCC diagnostic pop +#endif + +static const struct +{ + const GUID *guid; + SDL_PixelFormat format; + SDL_Colorspace colorspace; +} fmtmappings[] = { + // This is not every possible format, just popular ones that SDL can reasonably handle. + // (and we should probably trim this list more.) + { &SDL_MFVideoFormat_RGB555, SDL_PIXELFORMAT_XRGB1555, SDL_COLORSPACE_SRGB }, + { &SDL_MFVideoFormat_RGB565, SDL_PIXELFORMAT_RGB565, SDL_COLORSPACE_SRGB }, + { &SDL_MFVideoFormat_RGB24, SDL_PIXELFORMAT_RGB24, SDL_COLORSPACE_SRGB }, + { &SDL_MFVideoFormat_RGB32, SDL_PIXELFORMAT_XRGB8888, SDL_COLORSPACE_SRGB }, + { &SDL_MFVideoFormat_ARGB32, SDL_PIXELFORMAT_ARGB8888, SDL_COLORSPACE_SRGB }, + { &SDL_MFVideoFormat_A2R10G10B10, SDL_PIXELFORMAT_ARGB2101010, SDL_COLORSPACE_SRGB }, + { &SDL_MFVideoFormat_YV12, SDL_PIXELFORMAT_YV12, SDL_COLORSPACE_BT709_LIMITED }, + { &SDL_MFVideoFormat_IYUV, SDL_PIXELFORMAT_IYUV, SDL_COLORSPACE_BT709_LIMITED }, + { &SDL_MFVideoFormat_YUY2, SDL_PIXELFORMAT_YUY2, SDL_COLORSPACE_BT709_LIMITED }, + { &SDL_MFVideoFormat_UYVY, SDL_PIXELFORMAT_UYVY, SDL_COLORSPACE_BT709_LIMITED }, + { &SDL_MFVideoFormat_YVYU, SDL_PIXELFORMAT_YVYU, SDL_COLORSPACE_BT709_LIMITED }, + { &SDL_MFVideoFormat_NV12, SDL_PIXELFORMAT_NV12, SDL_COLORSPACE_BT709_LIMITED }, + { &SDL_MFVideoFormat_NV21, SDL_PIXELFORMAT_NV21, SDL_COLORSPACE_BT709_LIMITED }, + { &SDL_MFVideoFormat_MJPG, SDL_PIXELFORMAT_MJPG, SDL_COLORSPACE_SRGB } +}; + +static SDL_Colorspace GetMediaTypeColorspace(IMFMediaType *mediatype, SDL_Colorspace default_colorspace) +{ + SDL_Colorspace colorspace = default_colorspace; + + if (SDL_COLORSPACETYPE(colorspace) == SDL_COLOR_TYPE_YCBCR) { + HRESULT ret; + UINT32 range = 0, primaries = 0, transfer = 0, matrix = 0, chroma = 0; + + ret = IMFMediaType_GetUINT32(mediatype, &SDL_MF_MT_VIDEO_NOMINAL_RANGE, &range); + if (SUCCEEDED(ret)) { + switch (range) { + case MFNominalRange_0_255: + range = SDL_COLOR_RANGE_FULL; + break; + case MFNominalRange_16_235: + range = SDL_COLOR_RANGE_LIMITED; + break; + default: + range = (UINT32)SDL_COLORSPACERANGE(default_colorspace); + break; + } + } else { + range = (UINT32)SDL_COLORSPACERANGE(default_colorspace); + } + + ret = IMFMediaType_GetUINT32(mediatype, &SDL_MF_MT_VIDEO_PRIMARIES, &primaries); + if (SUCCEEDED(ret)) { + switch (primaries) { + case MFVideoPrimaries_BT709: + primaries = SDL_COLOR_PRIMARIES_BT709; + break; + case MFVideoPrimaries_BT470_2_SysM: + primaries = SDL_COLOR_PRIMARIES_BT470M; + break; + case MFVideoPrimaries_BT470_2_SysBG: + primaries = SDL_COLOR_PRIMARIES_BT470BG; + break; + case MFVideoPrimaries_SMPTE170M: + primaries = SDL_COLOR_PRIMARIES_BT601; + break; + case MFVideoPrimaries_SMPTE240M: + primaries = SDL_COLOR_PRIMARIES_SMPTE240; + break; + case MFVideoPrimaries_EBU3213: + primaries = SDL_COLOR_PRIMARIES_EBU3213; + break; + case MFVideoPrimaries_BT2020: + primaries = SDL_COLOR_PRIMARIES_BT2020; + break; + case MFVideoPrimaries_XYZ: + primaries = SDL_COLOR_PRIMARIES_XYZ; + break; + case MFVideoPrimaries_DCI_P3: + primaries = SDL_COLOR_PRIMARIES_SMPTE432; + break; + default: + primaries = (UINT32)SDL_COLORSPACEPRIMARIES(default_colorspace); + break; + } + } else { + primaries = (UINT32)SDL_COLORSPACEPRIMARIES(default_colorspace); + } + + ret = IMFMediaType_GetUINT32(mediatype, &SDL_MF_MT_TRANSFER_FUNCTION, &transfer); + if (SUCCEEDED(ret)) { + switch (transfer) { + case MFVideoTransFunc_10: + transfer = SDL_TRANSFER_CHARACTERISTICS_LINEAR; + break; + case MFVideoTransFunc_22: + transfer = SDL_TRANSFER_CHARACTERISTICS_GAMMA22; + break; + case MFVideoTransFunc_709: + transfer = SDL_TRANSFER_CHARACTERISTICS_BT709; + break; + case MFVideoTransFunc_240M: + transfer = SDL_TRANSFER_CHARACTERISTICS_SMPTE240; + break; + case MFVideoTransFunc_sRGB: + transfer = SDL_TRANSFER_CHARACTERISTICS_SRGB; + break; + case MFVideoTransFunc_28: + transfer = SDL_TRANSFER_CHARACTERISTICS_GAMMA28; + break; + case MFVideoTransFunc_Log_100: + transfer = SDL_TRANSFER_CHARACTERISTICS_LOG100; + break; + case MFVideoTransFunc_2084: + transfer = SDL_TRANSFER_CHARACTERISTICS_PQ; + break; + case MFVideoTransFunc_HLG: + transfer = SDL_TRANSFER_CHARACTERISTICS_HLG; + break; + case 18 /* MFVideoTransFunc_BT1361_ECG */: + transfer = SDL_TRANSFER_CHARACTERISTICS_BT1361; + break; + case 19 /* MFVideoTransFunc_SMPTE428 */: + transfer = SDL_TRANSFER_CHARACTERISTICS_SMPTE428; + break; + default: + transfer = (UINT32)SDL_COLORSPACETRANSFER(default_colorspace); + break; + } + } else { + transfer = (UINT32)SDL_COLORSPACETRANSFER(default_colorspace); + } + + ret = IMFMediaType_GetUINT32(mediatype, &SDL_MF_MT_YUV_MATRIX, &matrix); + if (SUCCEEDED(ret)) { + switch (matrix) { + case MFVideoTransferMatrix_BT709: + matrix = SDL_MATRIX_COEFFICIENTS_BT709; + break; + case MFVideoTransferMatrix_BT601: + matrix = SDL_MATRIX_COEFFICIENTS_BT601; + break; + case MFVideoTransferMatrix_SMPTE240M: + matrix = SDL_MATRIX_COEFFICIENTS_SMPTE240; + break; + case MFVideoTransferMatrix_BT2020_10: + matrix = SDL_MATRIX_COEFFICIENTS_BT2020_NCL; + break; + case 6 /* MFVideoTransferMatrix_Identity */: + matrix = SDL_MATRIX_COEFFICIENTS_IDENTITY; + break; + case 7 /* MFVideoTransferMatrix_FCC47 */: + matrix = SDL_MATRIX_COEFFICIENTS_FCC; + break; + case 8 /* MFVideoTransferMatrix_YCgCo */: + matrix = SDL_MATRIX_COEFFICIENTS_YCGCO; + break; + case 9 /* MFVideoTransferMatrix_SMPTE2085 */: + matrix = SDL_MATRIX_COEFFICIENTS_SMPTE2085; + break; + case 10 /* MFVideoTransferMatrix_Chroma */: + matrix = SDL_MATRIX_COEFFICIENTS_CHROMA_DERIVED_NCL; + break; + case 11 /* MFVideoTransferMatrix_Chroma_const */: + matrix = SDL_MATRIX_COEFFICIENTS_CHROMA_DERIVED_CL; + break; + case 12 /* MFVideoTransferMatrix_ICtCp */: + matrix = SDL_MATRIX_COEFFICIENTS_ICTCP; + break; + default: + matrix = (UINT32)SDL_COLORSPACEMATRIX(default_colorspace); + break; + } + } else { + matrix = (UINT32)SDL_COLORSPACEMATRIX(default_colorspace); + } + + ret = IMFMediaType_GetUINT32(mediatype, &SDL_MF_MT_VIDEO_CHROMA_SITING, &chroma); + if (SUCCEEDED(ret)) { + switch (chroma) { + case MFVideoChromaSubsampling_MPEG2: + chroma = SDL_CHROMA_LOCATION_LEFT; + break; + case MFVideoChromaSubsampling_MPEG1: + chroma = SDL_CHROMA_LOCATION_CENTER; + break; + case MFVideoChromaSubsampling_DV_PAL: + chroma = SDL_CHROMA_LOCATION_TOPLEFT; + break; + default: + chroma = (UINT32)SDL_COLORSPACECHROMA(default_colorspace); + break; + } + } else { + chroma = (UINT32)SDL_COLORSPACECHROMA(default_colorspace); + } + + colorspace = SDL_DEFINE_COLORSPACE(SDL_COLOR_TYPE_YCBCR, range, primaries, transfer, matrix, chroma); + } + return colorspace; +} + +static void MediaTypeToSDLFmt(IMFMediaType *mediatype, SDL_PixelFormat *format, SDL_Colorspace *colorspace) +{ + HRESULT ret; + GUID type; + + ret = IMFMediaType_GetGUID(mediatype, &SDL_MF_MT_SUBTYPE, &type); + if (SUCCEEDED(ret)) { + for (size_t i = 0; i < SDL_arraysize(fmtmappings); i++) { + if (WIN_IsEqualGUID(&type, fmtmappings[i].guid)) { + *format = fmtmappings[i].format; + *colorspace = GetMediaTypeColorspace(mediatype, fmtmappings[i].colorspace); + return; + } + } + } +#if DEBUG_CAMERA + SDL_Log("Unknown media type: 0x%x (%c%c%c%c)", type.Data1, + (char)(Uint8)(type.Data1 >> 0), + (char)(Uint8)(type.Data1 >> 8), + (char)(Uint8)(type.Data1 >> 16), + (char)(Uint8)(type.Data1 >> 24)); +#endif + *format = SDL_PIXELFORMAT_UNKNOWN; + *colorspace = SDL_COLORSPACE_UNKNOWN; +} + +static const GUID *SDLFmtToMFVidFmtGuid(SDL_PixelFormat format) +{ + for (size_t i = 0; i < SDL_arraysize(fmtmappings); i++) { + if (fmtmappings[i].format == format) { + return fmtmappings[i].guid; + } + } + return NULL; +} + + +// handle to Media Foundation libs--Vista and later!--for access to the Media Foundation API. + +// mf.dll ... +static HMODULE libmf = NULL; +typedef HRESULT(WINAPI *pfnMFEnumDeviceSources)(IMFAttributes *,IMFActivate ***,UINT32 *); +typedef HRESULT(WINAPI *pfnMFCreateDeviceSource)(IMFAttributes *, IMFMediaSource **); +static pfnMFEnumDeviceSources pMFEnumDeviceSources = NULL; +static pfnMFCreateDeviceSource pMFCreateDeviceSource = NULL; + +// mfplat.dll ... +static HMODULE libmfplat = NULL; +typedef HRESULT(WINAPI *pfnMFStartup)(ULONG, DWORD); +typedef HRESULT(WINAPI *pfnMFShutdown)(void); +typedef HRESULT(WINAPI *pfnMFCreateAttributes)(IMFAttributes **, UINT32); +typedef HRESULT(WINAPI *pfnMFCreateMediaType)(IMFMediaType **); +typedef HRESULT(WINAPI *pfnMFGetStrideForBitmapInfoHeader)(DWORD, DWORD, LONG *); + +static pfnMFStartup pMFStartup = NULL; +static pfnMFShutdown pMFShutdown = NULL; +static pfnMFCreateAttributes pMFCreateAttributes = NULL; +static pfnMFCreateMediaType pMFCreateMediaType = NULL; +static pfnMFGetStrideForBitmapInfoHeader pMFGetStrideForBitmapInfoHeader = NULL; + +// mfreadwrite.dll ... +static HMODULE libmfreadwrite = NULL; +typedef HRESULT(WINAPI *pfnMFCreateSourceReaderFromMediaSource)(IMFMediaSource *, IMFAttributes *, IMFSourceReader **); +static pfnMFCreateSourceReaderFromMediaSource pMFCreateSourceReaderFromMediaSource = NULL; + + +typedef struct SDL_PrivateCameraData +{ + IMFSourceReader *srcreader; + IMFSample *current_sample; + int pitch; +} SDL_PrivateCameraData; + +static bool MEDIAFOUNDATION_WaitDevice(SDL_Camera *device) +{ + SDL_assert(device->hidden->current_sample == NULL); + + IMFSourceReader *srcreader = device->hidden->srcreader; + IMFSample *sample = NULL; + + while (!SDL_GetAtomicInt(&device->shutdown)) { + DWORD stream_flags = 0; + const HRESULT ret = IMFSourceReader_ReadSample(srcreader, (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, NULL, &stream_flags, NULL, &sample); + if (FAILED(ret)) { + return false; // ruh roh. + } + + // we currently ignore stream_flags format changes, but my _hope_ is that IMFSourceReader is handling this and + // will continue to give us the explicitly-specified format we requested when opening the device, though, and + // we don't have to manually deal with it. + + if (sample != NULL) { + break; + } else if (stream_flags & (MF_SOURCE_READERF_ERROR | MF_SOURCE_READERF_ENDOFSTREAM)) { + return false; // apparently this camera has gone down. :/ + } + + // otherwise, there was some minor burp, probably; just try again. + } + + device->hidden->current_sample = sample; + + return true; +} + + +#ifdef KEEP_ACQUIRED_BUFFERS_LOCKED + +#define PROP_SURFACE_IMFOBJS_POINTER "SDL.camera.mediafoundation.imfobjs" + +typedef struct SDL_IMFObjects +{ + IMF2DBuffer2 *buffer2d2; + IMF2DBuffer *buffer2d; + IMFMediaBuffer *buffer; + IMFSample *sample; +} SDL_IMFObjects; + +static void SDLCALL CleanupIMF2DBuffer2(void *userdata, void *value) +{ + SDL_IMFObjects *objs = (SDL_IMFObjects *)value; + IMF2DBuffer2_Unlock2D(objs->buffer2d2); + IMF2DBuffer2_Release(objs->buffer2d2); + IMFMediaBuffer_Release(objs->buffer); + IMFSample_Release(objs->sample); + SDL_free(objs); +} + +static void SDLCALL CleanupIMF2DBuffer(void *userdata, void *value) +{ + SDL_IMFObjects *objs = (SDL_IMFObjects *)value; + IMF2DBuffer_Unlock2D(objs->buffer2d); + IMF2DBuffer_Release(objs->buffer2d); + IMFMediaBuffer_Release(objs->buffer); + IMFSample_Release(objs->sample); + SDL_free(objs); +} + +static void SDLCALL CleanupIMFMediaBuffer(void *userdata, void *value) +{ + SDL_IMFObjects *objs = (SDL_IMFObjects *)value; + IMFMediaBuffer_Unlock(objs->buffer); + IMFMediaBuffer_Release(objs->buffer); + IMFSample_Release(objs->sample); + SDL_free(objs); +} + +static SDL_CameraFrameResult MEDIAFOUNDATION_AcquireFrame(SDL_Camera *device, SDL_Surface *frame, Uint64 *timestampNS) +{ + SDL_assert(device->hidden->current_sample != NULL); + + SDL_CameraFrameResult result = SDL_CAMERA_FRAME_READY; + HRESULT ret; + LONGLONG timestamp100NS = 0; + SDL_IMFObjects *objs = (SDL_IMFObjects *) SDL_calloc(1, sizeof (SDL_IMFObjects)); + + if (objs == NULL) { + return SDL_CAMERA_FRAME_ERROR; + } + + objs->sample = device->hidden->current_sample; + device->hidden->current_sample = NULL; + + const SDL_PropertiesID surfprops = SDL_GetSurfaceProperties(frame); + if (!surfprops) { + result = SDL_CAMERA_FRAME_ERROR; + } else { + ret = IMFSample_GetSampleTime(objs->sample, ×tamp100NS); + if (FAILED(ret)) { + result = SDL_CAMERA_FRAME_ERROR; + } + + *timestampNS = timestamp100NS * 100; // the timestamps are in 100-nanosecond increments; move to full nanoseconds. + } + + ret = (result == SDL_CAMERA_FRAME_ERROR) ? E_FAIL : IMFSample_ConvertToContiguousBuffer(objs->sample, &objs->buffer); // IMFSample_GetBufferByIndex(objs->sample, 0, &objs->buffer); + + if (FAILED(ret)) { + SDL_free(objs); + result = SDL_CAMERA_FRAME_ERROR; + } else { + BYTE *pixels = NULL; + LONG pitch = 0; + DWORD buflen = 0; + + if (SUCCEEDED(IMFMediaBuffer_QueryInterface(objs->buffer, &SDL_IID_IMF2DBuffer2, (void **)&objs->buffer2d2))) { + BYTE *bufstart = NULL; + ret = IMF2DBuffer2_Lock2DSize(objs->buffer2d2, MF2DBuffer_LockFlags_Read, &pixels, &pitch, &bufstart, &buflen); + if (FAILED(ret)) { + result = SDL_CAMERA_FRAME_ERROR; + CleanupIMF2DBuffer2(NULL, objs); + } else { + if (frame->format == SDL_PIXELFORMAT_MJPG) { + pitch = (LONG)buflen; + } + if (pitch < 0) { // image rows are reversed. + pixels += -pitch * (frame->h - 1); + } + frame->pixels = pixels; + frame->pitch = (int)pitch; + if (!SDL_SetPointerPropertyWithCleanup(surfprops, PROP_SURFACE_IMFOBJS_POINTER, objs, CleanupIMF2DBuffer2, NULL)) { + result = SDL_CAMERA_FRAME_ERROR; + } + } + } else if (frame->format != SDL_PIXELFORMAT_MJPG && + SUCCEEDED(IMFMediaBuffer_QueryInterface(objs->buffer, &SDL_IID_IMF2DBuffer, (void **)&objs->buffer2d))) { + ret = IMF2DBuffer_Lock2D(objs->buffer2d, &pixels, &pitch); + if (FAILED(ret)) { + CleanupIMF2DBuffer(NULL, objs); + result = SDL_CAMERA_FRAME_ERROR; + } else { + if (pitch < 0) { // image rows are reversed. + pixels += -pitch * (frame->h - 1); + } + frame->pixels = pixels; + frame->pitch = (int)pitch; + if (!SDL_SetPointerPropertyWithCleanup(surfprops, PROP_SURFACE_IMFOBJS_POINTER, objs, CleanupIMF2DBuffer, NULL)) { + result = SDL_CAMERA_FRAME_ERROR; + } + } + } else { + DWORD maxlen = 0; + ret = IMFMediaBuffer_Lock(objs->buffer, &pixels, &maxlen, &buflen); + if (FAILED(ret)) { + CleanupIMFMediaBuffer(NULL, objs); + result = SDL_CAMERA_FRAME_ERROR; + } else { + if (frame->format == SDL_PIXELFORMAT_MJPG) { + pitch = (LONG)buflen; + } else { + pitch = (LONG)device->hidden->pitch; + } + if (pitch < 0) { // image rows are reversed. + pixels += -pitch * (frame->h - 1); + } + frame->pixels = pixels; + frame->pitch = (int)pitch; + if (!SDL_SetPointerPropertyWithCleanup(surfprops, PROP_SURFACE_IMFOBJS_POINTER, objs, CleanupIMFMediaBuffer, NULL)) { + result = SDL_CAMERA_FRAME_ERROR; + } + } + } + } + + if (result != SDL_CAMERA_FRAME_READY) { + *timestampNS = 0; + } + + return result; +} + +static void MEDIAFOUNDATION_ReleaseFrame(SDL_Camera *device, SDL_Surface *frame) +{ + const SDL_PropertiesID surfprops = SDL_GetSurfaceProperties(frame); + if (surfprops) { + // this will release the IMFBuffer and IMFSample objects for this frame. + SDL_ClearProperty(surfprops, PROP_SURFACE_IMFOBJS_POINTER); + } +} + +#else + +static SDL_CameraFrameResult MEDIAFOUNDATION_CopyFrame(SDL_Surface *frame, const BYTE *pixels, LONG pitch, DWORD buflen) +{ + frame->pixels = SDL_aligned_alloc(SDL_GetSIMDAlignment(), buflen); + if (!frame->pixels) { + return SDL_CAMERA_FRAME_ERROR; + } + + const BYTE *start = pixels; + if (pitch < 0) { // image rows are reversed. + start += -pitch * (frame->h - 1); + } + SDL_memcpy(frame->pixels, start, buflen); + frame->pitch = (int)pitch; + + return SDL_CAMERA_FRAME_READY; +} + +static SDL_CameraFrameResult MEDIAFOUNDATION_AcquireFrame(SDL_Camera *device, SDL_Surface *frame, Uint64 *timestampNS) +{ + SDL_assert(device->hidden->current_sample != NULL); + + SDL_CameraFrameResult result = SDL_CAMERA_FRAME_READY; + HRESULT ret; + LONGLONG timestamp100NS = 0; + + IMFSample *sample = device->hidden->current_sample; + device->hidden->current_sample = NULL; + + const SDL_PropertiesID surfprops = SDL_GetSurfaceProperties(frame); + if (!surfprops) { + result = SDL_CAMERA_FRAME_ERROR; + } else { + ret = IMFSample_GetSampleTime(sample, ×tamp100NS); + if (FAILED(ret)) { + result = SDL_CAMERA_FRAME_ERROR; + } + + *timestampNS = timestamp100NS * 100; // the timestamps are in 100-nanosecond increments; move to full nanoseconds. + } + + IMFMediaBuffer *buffer = NULL; + ret = (result < 0) ? E_FAIL : IMFSample_ConvertToContiguousBuffer(sample, &buffer); // IMFSample_GetBufferByIndex(sample, 0, &buffer); + + if (FAILED(ret)) { + result = SDL_CAMERA_FRAME_ERROR; + } else { + IMF2DBuffer *buffer2d = NULL; + IMF2DBuffer2 *buffer2d2 = NULL; + BYTE *pixels = NULL; + LONG pitch = 0; + DWORD buflen = 0; + + if (SUCCEEDED(IMFMediaBuffer_QueryInterface(buffer, &SDL_IID_IMF2DBuffer2, (void **)&buffer2d2))) { + BYTE *bufstart = NULL; + ret = IMF2DBuffer2_Lock2DSize(buffer2d2, MF2DBuffer_LockFlags_Read, &pixels, &pitch, &bufstart, &buflen); + if (FAILED(ret)) { + result = SDL_CAMERA_FRAME_ERROR; + } else { + if (frame->format == SDL_PIXELFORMAT_MJPG) { + pitch = (LONG)buflen; + } + result = MEDIAFOUNDATION_CopyFrame(frame, pixels, pitch, buflen); + IMF2DBuffer2_Unlock2D(buffer2d2); + } + IMF2DBuffer2_Release(buffer2d2); + } else if (frame->format != SDL_PIXELFORMAT_MJPG && + SUCCEEDED(IMFMediaBuffer_QueryInterface(buffer, &SDL_IID_IMF2DBuffer, (void **)&buffer2d))) { + ret = IMF2DBuffer_Lock2D(buffer2d, &pixels, &pitch); + if (FAILED(ret)) { + result = SDL_CAMERA_FRAME_ERROR; + } else { + buflen = SDL_abs((int)pitch) * frame->h; + result = MEDIAFOUNDATION_CopyFrame(frame, pixels, pitch, buflen); + IMF2DBuffer_Unlock2D(buffer2d); + } + IMF2DBuffer_Release(buffer2d); + } else { + DWORD maxlen = 0; + ret = IMFMediaBuffer_Lock(buffer, &pixels, &maxlen, &buflen); + if (FAILED(ret)) { + result = SDL_CAMERA_FRAME_ERROR; + } else { + if (frame->format == SDL_PIXELFORMAT_MJPG) { + pitch = (LONG)buflen; + } else { + pitch = (LONG)device->hidden->pitch; + } + result = MEDIAFOUNDATION_CopyFrame(frame, pixels, pitch, buflen); + IMFMediaBuffer_Unlock(buffer); + } + } + IMFMediaBuffer_Release(buffer); + } + + IMFSample_Release(sample); + + if (result != SDL_CAMERA_FRAME_READY) { + *timestampNS = 0; + } + + return result; +} + +static void MEDIAFOUNDATION_ReleaseFrame(SDL_Camera *device, SDL_Surface *frame) +{ + SDL_aligned_free(frame->pixels); +} + +#endif + +static void MEDIAFOUNDATION_CloseDevice(SDL_Camera *device) +{ + if (device && device->hidden) { + if (device->hidden->srcreader) { + IMFSourceReader_Release(device->hidden->srcreader); + } + if (device->hidden->current_sample) { + IMFSample_Release(device->hidden->current_sample); + } + SDL_free(device->hidden); + device->hidden = NULL; + } +} + +// this function is from https://learn.microsoft.com/en-us/windows/win32/medfound/uncompressed-video-buffers +static HRESULT GetDefaultStride(IMFMediaType *pType, LONG *plStride) +{ + LONG lStride = 0; + + // Try to get the default stride from the media type. + HRESULT ret = IMFMediaType_GetUINT32(pType, &SDL_MF_MT_DEFAULT_STRIDE, (UINT32*)&lStride); + if (FAILED(ret)) { + // Attribute not set. Try to calculate the default stride. + + GUID subtype = GUID_NULL; + UINT32 width = 0; + // UINT32 height = 0; + UINT64 val = 0; + + // Get the subtype and the image size. + ret = IMFMediaType_GetGUID(pType, &SDL_MF_MT_SUBTYPE, &subtype); + if (FAILED(ret)) { + goto done; + } + + ret = IMFMediaType_GetUINT64(pType, &SDL_MF_MT_FRAME_SIZE, &val); + if (FAILED(ret)) { + goto done; + } + + width = (UINT32) (val >> 32); + // height = (UINT32) val; + + ret = pMFGetStrideForBitmapInfoHeader(subtype.Data1, width, &lStride); + if (FAILED(ret)) { + goto done; + } + + // Set the attribute for later reference. + IMFMediaType_SetUINT32(pType, &SDL_MF_MT_DEFAULT_STRIDE, (UINT32) lStride); + } + + if (SUCCEEDED(ret)) { + *plStride = lStride; + } + +done: + return ret; +} + + +static bool MEDIAFOUNDATION_OpenDevice(SDL_Camera *device, const SDL_CameraSpec *spec) +{ + const char *utf8symlink = (const char *) device->handle; + IMFAttributes *attrs = NULL; + LPWSTR wstrsymlink = NULL; + IMFMediaSource *source = NULL; + IMFMediaType *mediatype = NULL; + IMFSourceReader *srcreader = NULL; +#if 0 + DWORD num_streams = 0; +#endif + LONG lstride = 0; + //PROPVARIANT var; + HRESULT ret; + + #if 0 + IMFStreamDescriptor *streamdesc = NULL; + IMFPresentationDescriptor *presentdesc = NULL; + IMFMediaTypeHandler *handler = NULL; + #endif + + #if DEBUG_CAMERA + SDL_Log("CAMERA: opening device with symlink of '%s'", utf8symlink); + #endif + + wstrsymlink = WIN_UTF8ToString(utf8symlink); + if (!wstrsymlink) { + goto failed; + } + + #define CHECK_HRESULT(what, r) if (FAILED(r)) { WIN_SetErrorFromHRESULT(what " failed", r); goto failed; } + + ret = pMFCreateAttributes(&attrs, 1); + CHECK_HRESULT("MFCreateAttributes", ret); + + ret = IMFAttributes_SetGUID(attrs, &SDL_MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, &SDL_MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID); + CHECK_HRESULT("IMFAttributes_SetGUID(srctype)", ret); + + ret = IMFAttributes_SetString(attrs, &SDL_MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK, wstrsymlink); + CHECK_HRESULT("IMFAttributes_SetString(symlink)", ret); + + ret = pMFCreateDeviceSource(attrs, &source); + CHECK_HRESULT("MFCreateDeviceSource", ret); + + IMFAttributes_Release(attrs); + SDL_free(wstrsymlink); + attrs = NULL; + wstrsymlink = NULL; + + // !!! FIXME: I think it'd be nice to do this without an IMFSourceReader, + // since it's just utility code that has to handle more complex media streams + // than we're dealing with, but this will do for now. The docs are slightly + // insistent that you should use one, though...Maybe it's extremely hard + // to handle directly at the IMFMediaSource layer...? + ret = pMFCreateSourceReaderFromMediaSource(source, NULL, &srcreader); + CHECK_HRESULT("MFCreateSourceReaderFromMediaSource", ret); + + // !!! FIXME: do we actually have to find the media type object in the source reader or can we just roll our own like this? + ret = pMFCreateMediaType(&mediatype); + CHECK_HRESULT("MFCreateMediaType", ret); + + ret = IMFMediaType_SetGUID(mediatype, &SDL_MF_MT_MAJOR_TYPE, &SDL_MFMediaType_Video); + CHECK_HRESULT("IMFMediaType_SetGUID(major_type)", ret); + + ret = IMFMediaType_SetGUID(mediatype, &SDL_MF_MT_SUBTYPE, SDLFmtToMFVidFmtGuid(spec->format)); + CHECK_HRESULT("IMFMediaType_SetGUID(subtype)", ret); + + ret = IMFMediaType_SetUINT64(mediatype, &SDL_MF_MT_FRAME_SIZE, (((UINT64)spec->width) << 32) | ((UINT64)spec->height)); + CHECK_HRESULT("MFSetAttributeSize(frame_size)", ret); + + ret = IMFMediaType_SetUINT64(mediatype, &SDL_MF_MT_FRAME_RATE, (((UINT64)spec->framerate_numerator) << 32) | ((UINT64)spec->framerate_denominator)); + CHECK_HRESULT("MFSetAttributeRatio(frame_rate)", ret); + + ret = IMFSourceReader_SetCurrentMediaType(srcreader, (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, NULL, mediatype); + CHECK_HRESULT("IMFSourceReader_SetCurrentMediaType", ret); + + #if 0 // this (untested thing) is what we would do to get started with a IMFMediaSource that _doesn't_ use IMFSourceReader... + ret = IMFMediaSource_CreatePresentationDescriptor(source, &presentdesc); + CHECK_HRESULT("IMFMediaSource_CreatePresentationDescriptor", ret); + + ret = IMFPresentationDescriptor_GetStreamDescriptorCount(presentdesc, &num_streams); + CHECK_HRESULT("IMFPresentationDescriptor_GetStreamDescriptorCount", ret); + + for (DWORD i = 0; i < num_streams; i++) { + BOOL selected = FALSE; + ret = IMFPresentationDescriptor_GetStreamDescriptorByIndex(presentdesc, i, &selected, &streamdesc); + CHECK_HRESULT("IMFPresentationDescriptor_GetStreamDescriptorByIndex", ret); + + if (selected) { + ret = IMFStreamDescriptor_GetMediaTypeHandler(streamdesc, &handler); + CHECK_HRESULT("IMFStreamDescriptor_GetMediaTypeHandler", ret); + IMFMediaTypeHandler_SetCurrentMediaType(handler, mediatype); + IMFMediaTypeHandler_Release(handler); + handler = NULL; + } + + IMFStreamDescriptor_Release(streamdesc); + streamdesc = NULL; + } + + PropVariantInit(&var); + var.vt = VT_EMPTY; + ret = IMFMediaSource_Start(source, presentdesc, NULL, &var); + PropVariantClear(&var); + CHECK_HRESULT("IMFMediaSource_Start", ret); + + IMFPresentationDescriptor_Release(presentdesc); + presentdesc = NULL; + #endif + + ret = GetDefaultStride(mediatype, &lstride); + CHECK_HRESULT("GetDefaultStride", ret); + + IMFMediaType_Release(mediatype); + mediatype = NULL; + + device->hidden = (SDL_PrivateCameraData *) SDL_calloc(1, sizeof (SDL_PrivateCameraData)); + if (!device->hidden) { + goto failed; + } + + device->hidden->pitch = (int) lstride; + device->hidden->srcreader = srcreader; + IMFMediaSource_Release(source); // srcreader is holding a reference to this. + + // There is no user permission prompt for camera access (I think?) + SDL_CameraPermissionOutcome(device, true); + + #undef CHECK_HRESULT + + return true; + +failed: + + if (srcreader) { + IMFSourceReader_Release(srcreader); + } + + #if 0 + if (handler) { + IMFMediaTypeHandler_Release(handler); + } + + if (streamdesc) { + IMFStreamDescriptor_Release(streamdesc); + } + + if (presentdesc) { + IMFPresentationDescriptor_Release(presentdesc); + } + #endif + + if (source) { + IMFMediaSource_Shutdown(source); + IMFMediaSource_Release(source); + } + + if (mediatype) { + IMFMediaType_Release(mediatype); + } + + if (attrs) { + IMFAttributes_Release(attrs); + } + SDL_free(wstrsymlink); + + return false; +} + +static void MEDIAFOUNDATION_FreeDeviceHandle(SDL_Camera *device) +{ + if (device) { + SDL_free(device->handle); // the device's symlink string. + } +} + +static char *QueryActivationObjectString(IMFActivate *activation, const GUID *pguid) +{ + LPWSTR wstr = NULL; + UINT32 wlen = 0; + HRESULT ret = IMFActivate_GetAllocatedString(activation, pguid, &wstr, &wlen); + if (FAILED(ret)) { + return NULL; + } + + char *utf8str = WIN_StringToUTF8(wstr); + CoTaskMemFree(wstr); + return utf8str; +} + +static void GatherCameraSpecs(IMFMediaSource *source, CameraFormatAddData *add_data) +{ + HRESULT ret; + + // this has like a thousand steps. :/ + + SDL_zerop(add_data); + + IMFPresentationDescriptor *presentdesc = NULL; + ret = IMFMediaSource_CreatePresentationDescriptor(source, &presentdesc); + if (FAILED(ret) || !presentdesc) { + return; + } + + DWORD num_streams = 0; + ret = IMFPresentationDescriptor_GetStreamDescriptorCount(presentdesc, &num_streams); + if (FAILED(ret)) { + num_streams = 0; + } + + for (DWORD i = 0; i < num_streams; i++) { + IMFStreamDescriptor *streamdesc = NULL; + BOOL selected = FALSE; + ret = IMFPresentationDescriptor_GetStreamDescriptorByIndex(presentdesc, i, &selected, &streamdesc); + if (FAILED(ret) || !streamdesc) { + continue; + } + + if (selected) { + IMFMediaTypeHandler *handler = NULL; + ret = IMFStreamDescriptor_GetMediaTypeHandler(streamdesc, &handler); + if (SUCCEEDED(ret) && handler) { + DWORD num_mediatype = 0; + ret = IMFMediaTypeHandler_GetMediaTypeCount(handler, &num_mediatype); + if (FAILED(ret)) { + num_mediatype = 0; + } + + for (DWORD j = 0; j < num_mediatype; j++) { + IMFMediaType *mediatype = NULL; + ret = IMFMediaTypeHandler_GetMediaTypeByIndex(handler, j, &mediatype); + if (SUCCEEDED(ret) && mediatype) { + GUID type; + ret = IMFMediaType_GetGUID(mediatype, &SDL_MF_MT_MAJOR_TYPE, &type); + if (SUCCEEDED(ret) && WIN_IsEqualGUID(&type, &SDL_MFMediaType_Video)) { + SDL_PixelFormat sdlfmt = SDL_PIXELFORMAT_UNKNOWN; + SDL_Colorspace colorspace = SDL_COLORSPACE_UNKNOWN; + MediaTypeToSDLFmt(mediatype, &sdlfmt, &colorspace); + if (sdlfmt != SDL_PIXELFORMAT_UNKNOWN) { + UINT64 val = 0; + UINT32 w = 0, h = 0; + ret = IMFMediaType_GetUINT64(mediatype, &SDL_MF_MT_FRAME_SIZE, &val); + w = (UINT32)(val >> 32); + h = (UINT32)val; + if (SUCCEEDED(ret) && w && h) { + UINT32 framerate_numerator = 0, framerate_denominator = 0; + ret = IMFMediaType_GetUINT64(mediatype, &SDL_MF_MT_FRAME_RATE, &val); + framerate_numerator = (UINT32)(val >> 32); + framerate_denominator = (UINT32)val; + if (SUCCEEDED(ret) && framerate_numerator && framerate_denominator) { + SDL_AddCameraFormat(add_data, sdlfmt, colorspace, (int) w, (int) h, (int)framerate_numerator, (int)framerate_denominator); + } + } + } + } + IMFMediaType_Release(mediatype); + } + } + IMFMediaTypeHandler_Release(handler); + } + } + IMFStreamDescriptor_Release(streamdesc); + } + + IMFPresentationDescriptor_Release(presentdesc); +} + +static bool FindMediaFoundationCameraBySymlink(SDL_Camera *device, void *userdata) +{ + return (SDL_strcmp((const char *) device->handle, (const char *) userdata) == 0); +} + +static void MaybeAddDevice(IMFActivate *activation) +{ + char *symlink = QueryActivationObjectString(activation, &SDL_MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK); + + if (SDL_FindPhysicalCameraByCallback(FindMediaFoundationCameraBySymlink, symlink)) { + SDL_free(symlink); + return; // already have this one. + } + + char *name = QueryActivationObjectString(activation, &SDL_MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME); + if (name && symlink) { + IMFMediaSource *source = NULL; + // "activating" here only creates an object, it doesn't open the actual camera hardware or start recording. + HRESULT ret = IMFActivate_ActivateObject(activation, &SDL_IID_IMFMediaSource, (void**)&source); + if (SUCCEEDED(ret) && source) { + CameraFormatAddData add_data; + GatherCameraSpecs(source, &add_data); + if (add_data.num_specs > 0) { + SDL_AddCamera(name, SDL_CAMERA_POSITION_UNKNOWN, add_data.num_specs, add_data.specs, symlink); + } + SDL_free(add_data.specs); + IMFActivate_ShutdownObject(activation); + IMFMediaSource_Release(source); + } + } + + SDL_free(name); +} + +static void MEDIAFOUNDATION_DetectDevices(void) +{ + // !!! FIXME: use CM_Register_Notification (Win8+) to get device notifications. + // !!! FIXME: Earlier versions can use RegisterDeviceNotification, but I'm not bothering: no hotplug for you! + HRESULT ret; + + IMFAttributes *attrs = NULL; + ret = pMFCreateAttributes(&attrs, 1); + if (FAILED(ret)) { + return; // oh well, no cameras for you. + } + + ret = IMFAttributes_SetGUID(attrs, &SDL_MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, &SDL_MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID); + if (FAILED(ret)) { + IMFAttributes_Release(attrs); + return; // oh well, no cameras for you. + } + + IMFActivate **activations = NULL; + UINT32 total = 0; + ret = pMFEnumDeviceSources(attrs, &activations, &total); + IMFAttributes_Release(attrs); + if (FAILED(ret)) { + return; // oh well, no cameras for you. + } + + for (UINT32 i = 0; i < total; i++) { + MaybeAddDevice(activations[i]); + IMFActivate_Release(activations[i]); + } + + CoTaskMemFree(activations); +} + +static void MEDIAFOUNDATION_Deinitialize(void) +{ + pMFShutdown(); + + FreeLibrary(libmfreadwrite); + libmfreadwrite = NULL; + FreeLibrary(libmfplat); + libmfplat = NULL; + FreeLibrary(libmf); + libmf = NULL; + + pMFEnumDeviceSources = NULL; + pMFCreateDeviceSource = NULL; + pMFStartup = NULL; + pMFShutdown = NULL; + pMFCreateAttributes = NULL; + pMFCreateMediaType = NULL; + pMFCreateSourceReaderFromMediaSource = NULL; + pMFGetStrideForBitmapInfoHeader = NULL; +} + +static bool MEDIAFOUNDATION_Init(SDL_CameraDriverImpl *impl) +{ + // !!! FIXME: slide this off into a subroutine + HMODULE mf = LoadLibrary(TEXT("Mf.dll")); // this library is available in Vista and later, but also can be on XP with service packs and Windows + if (!mf) { + return false; + } + + HMODULE mfplat = LoadLibrary(TEXT("Mfplat.dll")); // this library is available in Vista and later. No WinXP, so have to LoadLibrary to use it for now! + if (!mfplat) { + FreeLibrary(mf); + return false; + } + + HMODULE mfreadwrite = LoadLibrary(TEXT("Mfreadwrite.dll")); // this library is available in Vista and later. No WinXP, so have to LoadLibrary to use it for now! + if (!mfreadwrite) { + FreeLibrary(mfplat); + FreeLibrary(mf); + return false; + } + + bool okay = true; + #define LOADSYM(lib, fn) if (okay) { p##fn = (pfn##fn) GetProcAddress(lib, #fn); if (!p##fn) { okay = false; } } + LOADSYM(mf, MFEnumDeviceSources); + LOADSYM(mf, MFCreateDeviceSource); + LOADSYM(mfplat, MFStartup); + LOADSYM(mfplat, MFShutdown); + LOADSYM(mfplat, MFCreateAttributes); + LOADSYM(mfplat, MFCreateMediaType); + LOADSYM(mfplat, MFGetStrideForBitmapInfoHeader); + LOADSYM(mfreadwrite, MFCreateSourceReaderFromMediaSource); + #undef LOADSYM + + if (okay) { + const HRESULT ret = pMFStartup(MF_VERSION, MFSTARTUP_LITE); + if (FAILED(ret)) { + okay = false; + } + } + + if (!okay) { + FreeLibrary(mfreadwrite); + FreeLibrary(mfplat); + FreeLibrary(mf); + return false; + } + + libmf = mf; + libmfplat = mfplat; + libmfreadwrite = mfreadwrite; + + impl->DetectDevices = MEDIAFOUNDATION_DetectDevices; + impl->OpenDevice = MEDIAFOUNDATION_OpenDevice; + impl->CloseDevice = MEDIAFOUNDATION_CloseDevice; + impl->WaitDevice = MEDIAFOUNDATION_WaitDevice; + impl->AcquireFrame = MEDIAFOUNDATION_AcquireFrame; + impl->ReleaseFrame = MEDIAFOUNDATION_ReleaseFrame; + impl->FreeDeviceHandle = MEDIAFOUNDATION_FreeDeviceHandle; + impl->Deinitialize = MEDIAFOUNDATION_Deinitialize; + + return true; +} + +CameraBootStrap MEDIAFOUNDATION_bootstrap = { + "mediafoundation", "SDL Windows Media Foundation camera driver", MEDIAFOUNDATION_Init, false +}; + +#endif // SDL_CAMERA_DRIVER_MEDIAFOUNDATION + diff --git a/contrib/SDL-3.2.8/src/camera/pipewire/SDL_camera_pipewire.c b/contrib/SDL-3.2.8/src/camera/pipewire/SDL_camera_pipewire.c new file mode 100644 index 0000000..5d868bf --- /dev/null +++ b/contrib/SDL-3.2.8/src/camera/pipewire/SDL_camera_pipewire.c @@ -0,0 +1,1144 @@ +/* + Simple DirectMedia Layer + Copyright (C) 1997-2025 Sam Lantinga + Copyright (C) 2024 Wim Taymans + + This software is provided 'as-is', without any express or implied + warranty. In no event will the authors be held liable for any damages + arising from the use of this software. + + Permission is granted to anyone to use this software for any purpose, + including commercial applications, and to alter it and redistribute it + freely, subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + 3. This notice may not be removed or altered from any source distribution. +*/ +#include "SDL_internal.h" + +#ifdef SDL_CAMERA_DRIVER_PIPEWIRE + +#include "../SDL_syscamera.h" + +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#define PW_POD_BUFFER_LENGTH 1024 +#define PW_THREAD_NAME_BUFFER_LENGTH 128 +#define PW_MAX_IDENTIFIER_LENGTH 256 + +#define PW_REQUIRED_MAJOR 1 +#define PW_REQUIRED_MINOR 0 +#define PW_REQUIRED_PATCH 0 + +enum PW_READY_FLAGS +{ + PW_READY_FLAG_BUFFER_ADDED = 0x1, + PW_READY_FLAG_STREAM_READY = 0x2, + PW_READY_FLAG_ALL_BITS = 0x3 +}; + +#define PW_ID_TO_HANDLE(x) (void *)((uintptr_t)x) +#define PW_HANDLE_TO_ID(x) (uint32_t)((uintptr_t)x) + +static bool pipewire_initialized = false; + +// Pipewire entry points +static const char *(*PIPEWIRE_pw_get_library_version)(void); +#if PW_CHECK_VERSION(0, 3, 75) +static bool (*PIPEWIRE_pw_check_library_version)(int major, int minor, int micro); +#endif +static void (*PIPEWIRE_pw_init)(int *, char ***); +static void (*PIPEWIRE_pw_deinit)(void); +static struct pw_main_loop *(*PIPEWIRE_pw_main_loop_new)(const struct spa_dict *loop); +static struct pw_loop *(*PIPEWIRE_pw_main_loop_get_loop)(struct pw_main_loop *loop); +static int (*PIPEWIRE_pw_main_loop_run)(struct pw_main_loop *loop); +static int (*PIPEWIRE_pw_main_loop_quit)(struct pw_main_loop *loop); +static void(*PIPEWIRE_pw_main_loop_destroy)(struct pw_main_loop *loop); +static struct pw_thread_loop *(*PIPEWIRE_pw_thread_loop_new)(const char *, const struct spa_dict *); +static void (*PIPEWIRE_pw_thread_loop_destroy)(struct pw_thread_loop *); +static void (*PIPEWIRE_pw_thread_loop_stop)(struct pw_thread_loop *); +static struct pw_loop *(*PIPEWIRE_pw_thread_loop_get_loop)(struct pw_thread_loop *); +static void (*PIPEWIRE_pw_thread_loop_lock)(struct pw_thread_loop *); +static void (*PIPEWIRE_pw_thread_loop_unlock)(struct pw_thread_loop *); +static void (*PIPEWIRE_pw_thread_loop_signal)(struct pw_thread_loop *, bool); +static void (*PIPEWIRE_pw_thread_loop_wait)(struct pw_thread_loop *); +static int (*PIPEWIRE_pw_thread_loop_start)(struct pw_thread_loop *); +static struct pw_context *(*PIPEWIRE_pw_context_new)(struct pw_loop *, struct pw_properties *, size_t); +static void (*PIPEWIRE_pw_context_destroy)(struct pw_context *); +static struct pw_core *(*PIPEWIRE_pw_context_connect)(struct pw_context *, struct pw_properties *, size_t); +static void (*PIPEWIRE_pw_proxy_add_object_listener)(struct pw_proxy *, struct spa_hook *, const void *, void *); +static void (*PIPEWIRE_pw_proxy_add_listener)(struct pw_proxy *, struct spa_hook *, const struct pw_proxy_events *, void *); +static void *(*PIPEWIRE_pw_proxy_get_user_data)(struct pw_proxy *); +static void (*PIPEWIRE_pw_proxy_destroy)(struct pw_proxy *); +static int (*PIPEWIRE_pw_core_disconnect)(struct pw_core *); +static struct pw_node_info * (*PIPEWIRE_pw_node_info_merge)(struct pw_node_info *info, const struct pw_node_info *update, bool reset); +static void (*PIPEWIRE_pw_node_info_free)(struct pw_node_info *info); +static struct pw_stream *(*PIPEWIRE_pw_stream_new)(struct pw_core *, const char *, struct pw_properties *); +static void (*PIPEWIRE_pw_stream_add_listener)(struct pw_stream *stream, struct spa_hook *listener, const struct pw_stream_events *events, void *data); +static void (*PIPEWIRE_pw_stream_destroy)(struct pw_stream *); +static int (*PIPEWIRE_pw_stream_connect)(struct pw_stream *, enum pw_direction, uint32_t, enum pw_stream_flags, + const struct spa_pod **, uint32_t); +static enum pw_stream_state (*PIPEWIRE_pw_stream_get_state)(struct pw_stream *stream, const char **error); +static struct pw_buffer *(*PIPEWIRE_pw_stream_dequeue_buffer)(struct pw_stream *); +static int (*PIPEWIRE_pw_stream_queue_buffer)(struct pw_stream *, struct pw_buffer *); +static struct pw_properties *(*PIPEWIRE_pw_properties_new)(const char *, ...)SPA_SENTINEL; +static struct pw_properties *(*PIPEWIRE_pw_properties_new_dict)(const struct spa_dict *dict); +static int (*PIPEWIRE_pw_properties_set)(struct pw_properties *, const char *, const char *); +static int (*PIPEWIRE_pw_properties_setf)(struct pw_properties *, const char *, const char *, ...) SPA_PRINTF_FUNC(3, 4); + +#ifdef SDL_CAMERA_DRIVER_PIPEWIRE_DYNAMIC + +static const char *pipewire_library = SDL_CAMERA_DRIVER_PIPEWIRE_DYNAMIC; +static SDL_SharedObject *pipewire_handle = NULL; + +static bool pipewire_dlsym(const char *fn, void **addr) +{ + *addr = SDL_LoadFunction(pipewire_handle, fn); + if (!*addr) { + // Don't call SDL_SetError(): SDL_LoadFunction already did. + return false; + } + + return true; +} + +#define SDL_PIPEWIRE_SYM(x) \ + if (!pipewire_dlsym(#x, (void **)(char *)&PIPEWIRE_##x)) \ + return false + +static bool load_pipewire_library(void) +{ + pipewire_handle = SDL_LoadObject(pipewire_library); + return pipewire_handle ? true : false; +} + +static void unload_pipewire_library(void) +{ + if (pipewire_handle) { + SDL_UnloadObject(pipewire_handle); + pipewire_handle = NULL; + } +} + +#else + +#define SDL_PIPEWIRE_SYM(x) PIPEWIRE_##x = x + +static bool load_pipewire_library(void) +{ + return true; +} + +static void unload_pipewire_library(void) +{ + // Nothing to do +} + +#endif // SDL_CAMERA_DRIVER_PIPEWIRE_DYNAMIC + +static bool load_pipewire_syms(void) +{ + SDL_PIPEWIRE_SYM(pw_get_library_version); +#if PW_CHECK_VERSION(0, 3, 75) + SDL_PIPEWIRE_SYM(pw_check_library_version); +#endif + SDL_PIPEWIRE_SYM(pw_init); + SDL_PIPEWIRE_SYM(pw_deinit); + SDL_PIPEWIRE_SYM(pw_main_loop_new); + SDL_PIPEWIRE_SYM(pw_main_loop_get_loop); + SDL_PIPEWIRE_SYM(pw_main_loop_run); + SDL_PIPEWIRE_SYM(pw_main_loop_quit); + SDL_PIPEWIRE_SYM(pw_main_loop_destroy); + SDL_PIPEWIRE_SYM(pw_thread_loop_new); + SDL_PIPEWIRE_SYM(pw_thread_loop_destroy); + SDL_PIPEWIRE_SYM(pw_thread_loop_stop); + SDL_PIPEWIRE_SYM(pw_thread_loop_get_loop); + SDL_PIPEWIRE_SYM(pw_thread_loop_lock); + SDL_PIPEWIRE_SYM(pw_thread_loop_unlock); + SDL_PIPEWIRE_SYM(pw_thread_loop_signal); + SDL_PIPEWIRE_SYM(pw_thread_loop_wait); + SDL_PIPEWIRE_SYM(pw_thread_loop_start); + SDL_PIPEWIRE_SYM(pw_context_new); + SDL_PIPEWIRE_SYM(pw_context_destroy); + SDL_PIPEWIRE_SYM(pw_context_connect); + SDL_PIPEWIRE_SYM(pw_proxy_add_listener); + SDL_PIPEWIRE_SYM(pw_proxy_add_object_listener); + SDL_PIPEWIRE_SYM(pw_proxy_get_user_data); + SDL_PIPEWIRE_SYM(pw_proxy_destroy); + SDL_PIPEWIRE_SYM(pw_core_disconnect); + SDL_PIPEWIRE_SYM(pw_node_info_merge); + SDL_PIPEWIRE_SYM(pw_node_info_free); + SDL_PIPEWIRE_SYM(pw_stream_new); + SDL_PIPEWIRE_SYM(pw_stream_add_listener); + SDL_PIPEWIRE_SYM(pw_stream_destroy); + SDL_PIPEWIRE_SYM(pw_stream_connect); + SDL_PIPEWIRE_SYM(pw_stream_get_state); + SDL_PIPEWIRE_SYM(pw_stream_dequeue_buffer); + SDL_PIPEWIRE_SYM(pw_stream_queue_buffer); + SDL_PIPEWIRE_SYM(pw_properties_new); + SDL_PIPEWIRE_SYM(pw_properties_new_dict); + SDL_PIPEWIRE_SYM(pw_properties_set); + SDL_PIPEWIRE_SYM(pw_properties_setf); + + return true; +} + +static bool init_pipewire_library(void) +{ + if (load_pipewire_library()) { + if (load_pipewire_syms()) { + PIPEWIRE_pw_init(NULL, NULL); + return true; + } + } + return false; +} + +static void deinit_pipewire_library(void) +{ + PIPEWIRE_pw_deinit(); + unload_pipewire_library(); +} + +// The global hotplug thread and associated objects. +static struct +{ + struct pw_thread_loop *loop; + + struct pw_context *context; + + struct pw_core *core; + struct spa_hook core_listener; + int server_major; + int server_minor; + int server_patch; + int last_seq; + int pending_seq; + + struct pw_registry *registry; + struct spa_hook registry_listener; + + struct spa_list global_list; + + bool have_1_0_5; + bool init_complete; + bool events_enabled; +} hotplug; + +struct global +{ + struct spa_list link; + + const struct global_class *class; + + uint32_t id; + uint32_t permissions; + struct pw_properties *props; + + char *name; + + struct pw_proxy *proxy; + struct spa_hook proxy_listener; + struct spa_hook object_listener; + + int changed; + void *info; + struct spa_list pending_list; + struct spa_list param_list; + + bool added; +}; + +struct global_class +{ + const char *type; + uint32_t version; + const void *events; + int (*init) (struct global *g); + void (*destroy) (struct global *g); +}; + +struct param { + uint32_t id; + int32_t seq; + struct spa_list link; + struct spa_pod *param; +}; + +static uint32_t param_clear(struct spa_list *param_list, uint32_t id) +{ + struct param *p, *t; + uint32_t count = 0; + + spa_list_for_each_safe(p, t, param_list, link) { + if (id == SPA_ID_INVALID || p->id == id) { + spa_list_remove(&p->link); + free(p); // This should NOT be SDL_free() + count++; + } + } + return count; +} + +#if PW_CHECK_VERSION(0,3,60) +#define SPA_PARAMS_INFO_SEQ(p) ((p).seq) +#else +#define SPA_PARAMS_INFO_SEQ(p) ((p).padding[0]) +#endif + +static struct param *param_add(struct spa_list *params, + int seq, uint32_t id, const struct spa_pod *param) +{ + struct param *p; + + if (id == SPA_ID_INVALID) { + if (param == NULL || !spa_pod_is_object(param)) { + errno = EINVAL; + return NULL; + } + id = SPA_POD_OBJECT_ID(param); + } + + p = malloc(sizeof(*p) + (param != NULL ? SPA_POD_SIZE(param) : 0)); + if (p == NULL) + return NULL; + + p->id = id; + p->seq = seq; + if (param != NULL) { + p->param = SPA_PTROFF(p, sizeof(*p), struct spa_pod); + SDL_memcpy(p->param, param, SPA_POD_SIZE(param)); + } else { + param_clear(params, id); + p->param = NULL; + } + spa_list_append(params, &p->link); + + return p; +} + +static void param_update(struct spa_list *param_list, struct spa_list *pending_list, + uint32_t n_params, struct spa_param_info *params) +{ + struct param *p, *t; + uint32_t i; + + for (i = 0; i < n_params; i++) { + spa_list_for_each_safe(p, t, pending_list, link) { + if (p->id == params[i].id && + p->seq != SPA_PARAMS_INFO_SEQ(params[i]) && + p->param != NULL) { + spa_list_remove(&p->link); + free(p); // This should NOT be SDL_free() + } + } + } + spa_list_consume(p, pending_list, link) { + spa_list_remove(&p->link); + if (p->param == NULL) { + param_clear(param_list, p->id); + free(p); // This should NOT be SDL_free() + } else { + spa_list_append(param_list, &p->link); + } + } +} + +static struct sdl_video_format { + SDL_PixelFormat format; + SDL_Colorspace colorspace; + uint32_t id; +} sdl_video_formats[] = { + { SDL_PIXELFORMAT_RGBX32, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_RGBx }, + { SDL_PIXELFORMAT_XRGB32, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_xRGB }, + { SDL_PIXELFORMAT_BGRX32, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_BGRx }, + { SDL_PIXELFORMAT_XBGR32, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_xBGR }, + { SDL_PIXELFORMAT_RGBA32, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_RGBA }, + { SDL_PIXELFORMAT_ARGB32, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_ARGB }, + { SDL_PIXELFORMAT_BGRA32, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_BGRA }, + { SDL_PIXELFORMAT_ABGR32, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_ABGR }, + { SDL_PIXELFORMAT_RGB24, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_RGB }, + { SDL_PIXELFORMAT_BGR24, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_BGR }, + { SDL_PIXELFORMAT_YV12, SDL_COLORSPACE_BT709_LIMITED, SPA_VIDEO_FORMAT_YV12 }, + { SDL_PIXELFORMAT_IYUV, SDL_COLORSPACE_BT709_LIMITED, SPA_VIDEO_FORMAT_I420 }, + { SDL_PIXELFORMAT_YUY2, SDL_COLORSPACE_BT709_LIMITED, SPA_VIDEO_FORMAT_YUY2 }, + { SDL_PIXELFORMAT_UYVY, SDL_COLORSPACE_BT709_LIMITED, SPA_VIDEO_FORMAT_UYVY }, + { SDL_PIXELFORMAT_YVYU, SDL_COLORSPACE_BT709_LIMITED, SPA_VIDEO_FORMAT_YVYU }, + { SDL_PIXELFORMAT_NV12, SDL_COLORSPACE_BT709_LIMITED, SPA_VIDEO_FORMAT_NV12 }, + { SDL_PIXELFORMAT_NV21, SDL_COLORSPACE_BT709_LIMITED, SPA_VIDEO_FORMAT_NV21 } +}; + +static uint32_t sdl_format_to_id(SDL_PixelFormat format) +{ + struct sdl_video_format *f; + SPA_FOR_EACH_ELEMENT(sdl_video_formats, f) { + if (f->format == format) + return f->id; + } + return SPA_VIDEO_FORMAT_UNKNOWN; +} + +static void id_to_sdl_format(uint32_t id, SDL_PixelFormat *format, SDL_Colorspace *colorspace) +{ + struct sdl_video_format *f; + SPA_FOR_EACH_ELEMENT(sdl_video_formats, f) { + if (f->id == id) { + *format = f->format; + *colorspace = f->colorspace; + return; + } + } + *format = SDL_PIXELFORMAT_UNKNOWN; + *colorspace = SDL_COLORSPACE_UNKNOWN; +} + +struct SDL_PrivateCameraData +{ + struct pw_stream *stream; + struct spa_hook stream_listener; + + struct pw_array buffers; +}; + +static void on_process(void *data) +{ + PIPEWIRE_pw_thread_loop_signal(hotplug.loop, false); +} + +static void on_stream_state_changed(void *data, enum pw_stream_state old, + enum pw_stream_state state, const char *error) +{ + SDL_Camera *device = data; + switch (state) { + case PW_STREAM_STATE_UNCONNECTED: + break; + case PW_STREAM_STATE_STREAMING: + SDL_CameraPermissionOutcome(device, true); + break; + default: + break; + } +} + +static void on_stream_param_changed(void *data, uint32_t id, const struct spa_pod *param) +{ +} + +static void on_add_buffer(void *data, struct pw_buffer *buffer) +{ + SDL_Camera *device = data; + pw_array_add_ptr(&device->hidden->buffers, buffer); +} + +static void on_remove_buffer(void *data, struct pw_buffer *buffer) +{ + SDL_Camera *device = data; + struct pw_buffer **p; + pw_array_for_each(p, &device->hidden->buffers) { + if (*p == buffer) { + pw_array_remove(&device->hidden->buffers, p); + return; + } + } +} + +static const struct pw_stream_events stream_events = { + .version = PW_VERSION_STREAM_EVENTS, + .add_buffer = on_add_buffer, + .remove_buffer = on_remove_buffer, + .state_changed = on_stream_state_changed, + .param_changed = on_stream_param_changed, + .process = on_process, +}; + +static bool PIPEWIRECAMERA_OpenDevice(SDL_Camera *device, const SDL_CameraSpec *spec) +{ + struct pw_properties *props; + const struct spa_pod *params[3]; + int res, n_params = 0; + uint8_t buffer[1024]; + struct spa_pod_builder b = SPA_POD_BUILDER_INIT(buffer, sizeof(buffer)); + + if (!device) { + return false; + } + device->hidden = (struct SDL_PrivateCameraData *) SDL_calloc(1, sizeof (struct SDL_PrivateCameraData)); + if (device->hidden == NULL) { + return false; + } + pw_array_init(&device->hidden->buffers, 64); + + PIPEWIRE_pw_thread_loop_lock(hotplug.loop); + + props = PIPEWIRE_pw_properties_new(PW_KEY_MEDIA_TYPE, "Video", + PW_KEY_MEDIA_CATEGORY, "Capture", + PW_KEY_MEDIA_ROLE, "Camera", + PW_KEY_TARGET_OBJECT, device->name, + NULL); + if (props == NULL) { + return false; + } + + device->hidden->stream = PIPEWIRE_pw_stream_new(hotplug.core, "SDL PipeWire Camera", props); + if (device->hidden->stream == NULL) { + return false; + } + + PIPEWIRE_pw_stream_add_listener(device->hidden->stream, + &device->hidden->stream_listener, + &stream_events, device); + + if (spec->format == SDL_PIXELFORMAT_MJPG) { + params[n_params++] = spa_pod_builder_add_object(&b, + SPA_TYPE_OBJECT_Format, SPA_PARAM_EnumFormat, + SPA_FORMAT_mediaType, SPA_POD_Id(SPA_MEDIA_TYPE_video), + SPA_FORMAT_mediaSubtype, SPA_POD_Id(SPA_MEDIA_SUBTYPE_mjpg), + SPA_FORMAT_VIDEO_size, SPA_POD_Rectangle(&SPA_RECTANGLE(spec->width, spec->height)), + SPA_FORMAT_VIDEO_framerate, + SPA_POD_Fraction(&SPA_FRACTION(spec->framerate_numerator, spec->framerate_denominator))); + } else { + params[n_params++] = spa_pod_builder_add_object(&b, + SPA_TYPE_OBJECT_Format, SPA_PARAM_EnumFormat, + SPA_FORMAT_mediaType, SPA_POD_Id(SPA_MEDIA_TYPE_video), + SPA_FORMAT_mediaSubtype, SPA_POD_Id(SPA_MEDIA_SUBTYPE_raw), + SPA_FORMAT_VIDEO_format, SPA_POD_Id(sdl_format_to_id(spec->format)), + SPA_FORMAT_VIDEO_size, SPA_POD_Rectangle(&SPA_RECTANGLE(spec->width, spec->height)), + SPA_FORMAT_VIDEO_framerate, + SPA_POD_Fraction(&SPA_FRACTION(spec->framerate_numerator, spec->framerate_denominator))); + } + + if ((res = PIPEWIRE_pw_stream_connect(device->hidden->stream, + PW_DIRECTION_INPUT, + PW_ID_ANY, + PW_STREAM_FLAG_AUTOCONNECT | + PW_STREAM_FLAG_MAP_BUFFERS, + params, n_params)) < 0) { + return false; + } + + PIPEWIRE_pw_thread_loop_unlock(hotplug.loop); + + return true; +} + +static void PIPEWIRECAMERA_CloseDevice(SDL_Camera *device) +{ + if (!device) { + return; + } + + PIPEWIRE_pw_thread_loop_lock(hotplug.loop); + if (device->hidden) { + if (device->hidden->stream) + PIPEWIRE_pw_stream_destroy(device->hidden->stream); + pw_array_clear(&device->hidden->buffers); + SDL_free(device->hidden); + device->hidden = NULL; + } + PIPEWIRE_pw_thread_loop_unlock(hotplug.loop); +} + +static bool PIPEWIRECAMERA_WaitDevice(SDL_Camera *device) +{ + PIPEWIRE_pw_thread_loop_lock(hotplug.loop); + PIPEWIRE_pw_thread_loop_wait(hotplug.loop); + PIPEWIRE_pw_thread_loop_unlock(hotplug.loop); + return true; +} + +static SDL_CameraFrameResult PIPEWIRECAMERA_AcquireFrame(SDL_Camera *device, SDL_Surface *frame, Uint64 *timestampNS) +{ + struct pw_buffer *b; + + PIPEWIRE_pw_thread_loop_lock(hotplug.loop); + b = NULL; + while (true) { + struct pw_buffer *t; + if ((t = PIPEWIRE_pw_stream_dequeue_buffer(device->hidden->stream)) == NULL) + break; + if (b) + PIPEWIRE_pw_stream_queue_buffer(device->hidden->stream, b); + b = t; + } + if (b == NULL) { + PIPEWIRE_pw_thread_loop_unlock(hotplug.loop); + return SDL_CAMERA_FRAME_SKIP; + } + +#if PW_CHECK_VERSION(1,0,5) + *timestampNS = hotplug.have_1_0_5 ? b->time : SDL_GetTicksNS(); +#else + *timestampNS = SDL_GetTicksNS(); +#endif + frame->pixels = b->buffer->datas[0].data; + if (frame->format == SDL_PIXELFORMAT_MJPG) { + frame->pitch = b->buffer->datas[0].chunk->size; + } else { + frame->pitch = b->buffer->datas[0].chunk->stride; + } + + PIPEWIRE_pw_thread_loop_unlock(hotplug.loop); + + return SDL_CAMERA_FRAME_READY; +} + +static void PIPEWIRECAMERA_ReleaseFrame(SDL_Camera *device, SDL_Surface *frame) +{ + struct pw_buffer **p; + PIPEWIRE_pw_thread_loop_lock(hotplug.loop); + pw_array_for_each(p, &device->hidden->buffers) { + if ((*p)->buffer->datas[0].data == frame->pixels) { + PIPEWIRE_pw_stream_queue_buffer(device->hidden->stream, (*p)); + break; + } + } + PIPEWIRE_pw_thread_loop_unlock(hotplug.loop); +} + +static void collect_rates(CameraFormatAddData *data, struct param *p, SDL_PixelFormat sdlfmt, SDL_Colorspace colorspace, const struct spa_rectangle *size) +{ + const struct spa_pod_prop *prop; + struct spa_pod * values; + uint32_t i, n_vals, choice; + struct spa_fraction *rates; + + prop = spa_pod_find_prop(p->param, NULL, SPA_FORMAT_VIDEO_framerate); + if (prop == NULL) + return; + + values = spa_pod_get_values(&prop->value, &n_vals, &choice); + if (values->type != SPA_TYPE_Fraction || n_vals == 0) + return; + + rates = SPA_POD_BODY(values); + switch (choice) { + case SPA_CHOICE_None: + n_vals = 1; + SDL_FALLTHROUGH; + case SPA_CHOICE_Enum: + for (i = 0; i < n_vals; i++) { + if (!SDL_AddCameraFormat(data, sdlfmt, colorspace, size->width, size->height, rates[i].num, rates[i].denom)) { + return; // Probably out of memory; we'll go with what we have, if anything. + } + } + break; + default: + SDL_Log("CAMERA: unimplemented choice:%d", choice); + break; + } +} + +static void collect_size(CameraFormatAddData *data, struct param *p, SDL_PixelFormat sdlfmt, SDL_Colorspace colorspace) +{ + const struct spa_pod_prop *prop; + struct spa_pod * values; + uint32_t i, n_vals, choice; + struct spa_rectangle *rectangles; + + prop = spa_pod_find_prop(p->param, NULL, SPA_FORMAT_VIDEO_size); + if (prop == NULL) + return; + + values = spa_pod_get_values(&prop->value, &n_vals, &choice); + if (values->type != SPA_TYPE_Rectangle || n_vals == 0) + return; + + rectangles = SPA_POD_BODY(values); + switch (choice) { + case SPA_CHOICE_None: + n_vals = 1; + SDL_FALLTHROUGH; + case SPA_CHOICE_Enum: + for (i = 0; i < n_vals; i++) { + collect_rates(data, p, sdlfmt, colorspace, &rectangles[i]); + } + break; + default: + SDL_Log("CAMERA: unimplemented choice:%d", choice); + break; + } +} + +static void collect_raw(CameraFormatAddData *data, struct param *p) +{ + const struct spa_pod_prop *prop; + SDL_PixelFormat sdlfmt; + SDL_Colorspace colorspace; + struct spa_pod * values; + uint32_t i, n_vals, choice, *ids; + + prop = spa_pod_find_prop(p->param, NULL, SPA_FORMAT_VIDEO_format); + if (prop == NULL) + return; + + values = spa_pod_get_values(&prop->value, &n_vals, &choice); + if (values->type != SPA_TYPE_Id || n_vals == 0) + return; + + ids = SPA_POD_BODY(values); + switch (choice) { + case SPA_CHOICE_None: + n_vals = 1; + SDL_FALLTHROUGH; + case SPA_CHOICE_Enum: + for (i = 0; i < n_vals; i++) { + id_to_sdl_format(ids[i], &sdlfmt, &colorspace); + if (sdlfmt == SDL_PIXELFORMAT_UNKNOWN) { + continue; + } + collect_size(data, p, sdlfmt, colorspace); + } + break; + default: + SDL_Log("CAMERA: unimplemented choice: %d", choice); + break; + } +} + +static void collect_format(CameraFormatAddData *data, struct param *p) +{ + const struct spa_pod_prop *prop; + struct spa_pod * values; + uint32_t i, n_vals, choice, *ids; + + prop = spa_pod_find_prop(p->param, NULL, SPA_FORMAT_mediaSubtype); + if (prop == NULL) + return; + + values = spa_pod_get_values(&prop->value, &n_vals, &choice); + if (values->type != SPA_TYPE_Id || n_vals == 0) + return; + + ids = SPA_POD_BODY(values); + switch (choice) { + case SPA_CHOICE_None: + n_vals = 1; + SDL_FALLTHROUGH; + case SPA_CHOICE_Enum: + for (i = 0; i < n_vals; i++) { + switch (ids[i]) { + case SPA_MEDIA_SUBTYPE_raw: + collect_raw(data, p); + break; + case SPA_MEDIA_SUBTYPE_mjpg: + collect_size(data, p, SDL_PIXELFORMAT_MJPG, SDL_COLORSPACE_JPEG); + break; + default: + // Unsupported format + break; + } + } + break; + default: + SDL_Log("CAMERA: unimplemented choice: %d", choice); + break; + } +} + +static void add_device(struct global *g) +{ + struct param *p; + CameraFormatAddData data; + + SDL_zero(data); + + spa_list_for_each(p, &g->param_list, link) { + if (p->id != SPA_PARAM_EnumFormat) + continue; + + collect_format(&data, p); + } + if (data.num_specs > 0) { + SDL_AddCamera(g->name, SDL_CAMERA_POSITION_UNKNOWN, + data.num_specs, data.specs, g); + } + SDL_free(data.specs); + + g->added = true; +} + +static void PIPEWIRECAMERA_DetectDevices(void) +{ + struct global *g; + + PIPEWIRE_pw_thread_loop_lock(hotplug.loop); + + // Wait until the initial registry enumeration is complete + while (!hotplug.init_complete) { + PIPEWIRE_pw_thread_loop_wait(hotplug.loop); + } + + spa_list_for_each (g, &hotplug.global_list, link) { + if (!g->added) { + add_device(g); + } + } + + hotplug.events_enabled = true; + + PIPEWIRE_pw_thread_loop_unlock(hotplug.loop); +} + +static void PIPEWIRECAMERA_FreeDeviceHandle(SDL_Camera *device) +{ +} + +static void do_resync(void) +{ + hotplug.pending_seq = pw_core_sync(hotplug.core, PW_ID_CORE, 0); +} + +/** node */ +static void node_event_info(void *object, const struct pw_node_info *info) +{ + struct global *g = object; + uint32_t i; + + info = g->info = PIPEWIRE_pw_node_info_merge(g->info, info, g->changed == 0); + if (info == NULL) + return; + + if (info->change_mask & PW_NODE_CHANGE_MASK_PARAMS) { + for (i = 0; i < info->n_params; i++) { + uint32_t id = info->params[i].id; + int res; + + if (info->params[i].user == 0) + continue; + info->params[i].user = 0; + + if (id != SPA_PARAM_EnumFormat) + continue; + + param_add(&g->pending_list, SPA_PARAMS_INFO_SEQ(info->params[i]), id, NULL); + if (!(info->params[i].flags & SPA_PARAM_INFO_READ)) + continue; + + res = pw_node_enum_params((struct pw_node*)g->proxy, + ++SPA_PARAMS_INFO_SEQ(info->params[i]), id, 0, -1, NULL); + if (SPA_RESULT_IS_ASYNC(res)) + SPA_PARAMS_INFO_SEQ(info->params[i]) = res; + + g->changed++; + } + } + do_resync(); +} + +static void node_event_param(void *object, int seq, + uint32_t id, uint32_t index, uint32_t next, + const struct spa_pod *param) +{ + struct global *g = object; + param_add(&g->pending_list, seq, id, param); +} + +static const struct pw_node_events node_events = { + .version = PW_VERSION_NODE_EVENTS, + .info = node_event_info, + .param = node_event_param, +}; + +static void node_destroy(struct global *g) +{ + if (g->info) { + PIPEWIRE_pw_node_info_free(g->info); + g->info = NULL; + } +} + + +static const struct global_class node_class = { + .type = PW_TYPE_INTERFACE_Node, + .version = PW_VERSION_NODE, + .events = &node_events, + .destroy = node_destroy, +}; + +/** proxy */ +static void proxy_removed(void *data) +{ + struct global *g = data; + PIPEWIRE_pw_proxy_destroy(g->proxy); +} + +static void proxy_destroy(void *data) +{ + struct global *g = data; + spa_list_remove(&g->link); + g->proxy = NULL; + if (g->class) { + if (g->class->events) + spa_hook_remove(&g->object_listener); + if (g->class->destroy) + g->class->destroy(g); + } + param_clear(&g->param_list, SPA_ID_INVALID); + param_clear(&g->pending_list, SPA_ID_INVALID); + free(g->name); // This should NOT be SDL_free() +} + +static const struct pw_proxy_events proxy_events = { + .version = PW_VERSION_PROXY_EVENTS, + .removed = proxy_removed, + .destroy = proxy_destroy +}; + +// called with thread_loop lock +static void hotplug_registry_global_callback(void *object, uint32_t id, + uint32_t permissions, const char *type, uint32_t version, + const struct spa_dict *props) +{ + const struct global_class *class = NULL; + struct pw_proxy *proxy; + const char *str, *name = NULL; + + if (spa_streq(type, PW_TYPE_INTERFACE_Node)) { + if (props == NULL) + return; + if (((str = spa_dict_lookup(props, PW_KEY_MEDIA_CLASS)) == NULL) || + (!spa_streq(str, "Video/Source"))) + return; + + if ((name = spa_dict_lookup(props, PW_KEY_NODE_DESCRIPTION)) == NULL && + (name = spa_dict_lookup(props, PW_KEY_NODE_NAME)) == NULL) + name = "unnamed camera"; + + class = &node_class; + } + if (class) { + struct global *g; + + proxy = pw_registry_bind(hotplug.registry, + id, class->type, class->version, + sizeof(struct global)); + + g = PIPEWIRE_pw_proxy_get_user_data(proxy); + g->class = class; + g->id = id; + g->permissions = permissions; + g->props = props ? PIPEWIRE_pw_properties_new_dict(props) : NULL; + g->proxy = proxy; + g->name = strdup(name); + spa_list_init(&g->pending_list); + spa_list_init(&g->param_list); + spa_list_append(&hotplug.global_list, &g->link); + + PIPEWIRE_pw_proxy_add_listener(proxy, + &g->proxy_listener, + &proxy_events, g); + + if (class->events) { + PIPEWIRE_pw_proxy_add_object_listener(proxy, + &g->object_listener, + class->events, g); + } + if (class->init) + class->init(g); + + do_resync(); + } +} + +// called with thread_loop lock +static void hotplug_registry_global_remove_callback(void *object, uint32_t id) +{ +} + +static const struct pw_registry_events hotplug_registry_events = +{ + .version = PW_VERSION_REGISTRY_EVENTS, + .global = hotplug_registry_global_callback, + .global_remove = hotplug_registry_global_remove_callback +}; + +static void parse_version(const char *str, int *major, int *minor, int *patch) +{ + if (SDL_sscanf(str, "%d.%d.%d", major, minor, patch) < 3) { + *major = 0; + *minor = 0; + *patch = 0; + } +} + +// Core info, called with thread_loop lock +static void hotplug_core_info_callback(void *data, const struct pw_core_info *info) +{ + parse_version(info->version, &hotplug.server_major, &hotplug.server_minor, &hotplug.server_patch); +} + +// Core sync points, called with thread_loop lock +static void hotplug_core_done_callback(void *object, uint32_t id, int seq) +{ + hotplug.last_seq = seq; + if (id == PW_ID_CORE && seq == hotplug.pending_seq) { + struct global *g; + struct pw_node_info *info; + + spa_list_for_each(g, &hotplug.global_list, link) { + if (!g->changed) + continue; + + info = g->info; + param_update(&g->param_list, &g->pending_list, info->n_params, info->params); + + if (!g->added && hotplug.events_enabled) { + add_device(g); + } + } + hotplug.init_complete = true; + PIPEWIRE_pw_thread_loop_signal(hotplug.loop, false); + } +} +static const struct pw_core_events hotplug_core_events = +{ + .version = PW_VERSION_CORE_EVENTS, + .info = hotplug_core_info_callback, + .done = hotplug_core_done_callback +}; + +/* When in a container, the library version can differ from the underlying core version, + * so make sure the underlying Pipewire implementation meets the version requirement. + */ +static bool pipewire_server_version_at_least(int major, int minor, int patch) +{ + return (hotplug.server_major >= major) && + (hotplug.server_major > major || hotplug.server_minor >= minor) && + (hotplug.server_major > major || hotplug.server_minor > minor || hotplug.server_patch >= patch); +} + +// The hotplug thread +static bool hotplug_loop_init(void) +{ + int res; + + spa_list_init(&hotplug.global_list); + +#if PW_CHECK_VERSION(0, 3, 75) + hotplug.have_1_0_5 = PIPEWIRE_pw_check_library_version(1,0,5); +#else + hotplug.have_1_0_5 = false; +#endif + + hotplug.loop = PIPEWIRE_pw_thread_loop_new("SDLPwCameraPlug", NULL); + if (!hotplug.loop) { + return SDL_SetError("Pipewire: Failed to create hotplug detection loop (%i)", errno); + } + + hotplug.context = PIPEWIRE_pw_context_new(PIPEWIRE_pw_thread_loop_get_loop(hotplug.loop), NULL, 0); + if (!hotplug.context) { + return SDL_SetError("Pipewire: Failed to create hotplug detection context (%i)", errno); + } + + hotplug.core = PIPEWIRE_pw_context_connect(hotplug.context, NULL, 0); + if (!hotplug.core) { + return SDL_SetError("Pipewire: Failed to connect hotplug detection context (%i)", errno); + } + spa_zero(hotplug.core_listener); + pw_core_add_listener(hotplug.core, &hotplug.core_listener, &hotplug_core_events, NULL); + + hotplug.registry = pw_core_get_registry(hotplug.core, PW_VERSION_REGISTRY, 0); + if (!hotplug.registry) { + return SDL_SetError("Pipewire: Failed to acquire hotplug detection registry (%i)", errno); + } + + spa_zero(hotplug.registry_listener); + pw_registry_add_listener(hotplug.registry, &hotplug.registry_listener, &hotplug_registry_events, NULL); + + do_resync(); + + res = PIPEWIRE_pw_thread_loop_start(hotplug.loop); + if (res != 0) { + return SDL_SetError("Pipewire: Failed to start hotplug detection loop"); + } + + PIPEWIRE_pw_thread_loop_lock(hotplug.loop); + while (!hotplug.init_complete) { + PIPEWIRE_pw_thread_loop_wait(hotplug.loop); + } + PIPEWIRE_pw_thread_loop_unlock(hotplug.loop); + + if (!pipewire_server_version_at_least(PW_REQUIRED_MAJOR, PW_REQUIRED_MINOR, PW_REQUIRED_PATCH)) { + return SDL_SetError("Pipewire: server version is too old %d.%d.%d < %d.%d.%d", + hotplug.server_major, hotplug.server_minor, hotplug.server_patch, + PW_REQUIRED_MAJOR, PW_REQUIRED_MINOR, PW_REQUIRED_PATCH); + } + + return true; +} + + +static void PIPEWIRECAMERA_Deinitialize(void) +{ + if (pipewire_initialized) { + if (hotplug.loop) { + PIPEWIRE_pw_thread_loop_lock(hotplug.loop); + } + if (hotplug.registry) { + spa_hook_remove(&hotplug.registry_listener); + PIPEWIRE_pw_proxy_destroy((struct pw_proxy *)hotplug.registry); + } + if (hotplug.core) { + spa_hook_remove(&hotplug.core_listener); + PIPEWIRE_pw_core_disconnect(hotplug.core); + } + if (hotplug.context) { + PIPEWIRE_pw_context_destroy(hotplug.context); + } + if (hotplug.loop) { + PIPEWIRE_pw_thread_loop_unlock(hotplug.loop); + PIPEWIRE_pw_thread_loop_destroy(hotplug.loop); + } + deinit_pipewire_library(); + spa_zero(hotplug); + pipewire_initialized = false; + } +} + +static bool PIPEWIRECAMERA_Init(SDL_CameraDriverImpl *impl) +{ + if (!pipewire_initialized) { + + if (!init_pipewire_library()) { + return false; + } + + pipewire_initialized = true; + + if (!hotplug_loop_init()) { + PIPEWIRECAMERA_Deinitialize(); + return false; + } + } + + impl->DetectDevices = PIPEWIRECAMERA_DetectDevices; + impl->OpenDevice = PIPEWIRECAMERA_OpenDevice; + impl->CloseDevice = PIPEWIRECAMERA_CloseDevice; + impl->WaitDevice = PIPEWIRECAMERA_WaitDevice; + impl->AcquireFrame = PIPEWIRECAMERA_AcquireFrame; + impl->ReleaseFrame = PIPEWIRECAMERA_ReleaseFrame; + impl->FreeDeviceHandle = PIPEWIRECAMERA_FreeDeviceHandle; + impl->Deinitialize = PIPEWIRECAMERA_Deinitialize; + + return true; +} + +CameraBootStrap PIPEWIRECAMERA_bootstrap = { + "pipewire", "SDL PipeWire camera driver", PIPEWIRECAMERA_Init, false +}; + +#endif // SDL_CAMERA_DRIVER_PIPEWIRE diff --git a/contrib/SDL-3.2.8/src/camera/v4l2/SDL_camera_v4l2.c b/contrib/SDL-3.2.8/src/camera/v4l2/SDL_camera_v4l2.c new file mode 100644 index 0000000..9cdb54b --- /dev/null +++ b/contrib/SDL-3.2.8/src/camera/v4l2/SDL_camera_v4l2.c @@ -0,0 +1,929 @@ +/* + Simple DirectMedia Layer + Copyright (C) 1997-2025 Sam Lantinga + + This software is provided 'as-is', without any express or implied + warranty. In no event will the authors be held liable for any damages + arising from the use of this software. + + Permission is granted to anyone to use this software for any purpose, + including commercial applications, and to alter it and redistribute it + freely, subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + 3. This notice may not be removed or altered from any source distribution. +*/ +#include "SDL_internal.h" + +#ifdef SDL_CAMERA_DRIVER_V4L2 + +#include +#include +#include // low-level i/o +#include +#include +#include +#include +#include +#include + +#ifndef V4L2_CAP_DEVICE_CAPS +// device_caps was added to struct v4l2_capability as of kernel 3.4. +#define device_caps reserved[0] +SDL_COMPILE_TIME_ASSERT(v4l2devicecaps, offsetof(struct v4l2_capability,device_caps) == offsetof(struct v4l2_capability,capabilities) + 4); +#endif + +#include "../SDL_syscamera.h" +#include "../SDL_camera_c.h" +#include "../../video/SDL_pixels_c.h" +#include "../../video/SDL_surface_c.h" +#include "../../thread/SDL_systhread.h" +#include "../../core/linux/SDL_evdev_capabilities.h" +#include "../../core/linux/SDL_udev.h" + +#ifndef SDL_USE_LIBUDEV +#include +#endif + +typedef struct V4L2DeviceHandle +{ + char *bus_info; + char *path; +} V4L2DeviceHandle; + + +typedef enum io_method { + IO_METHOD_INVALID, + IO_METHOD_READ, + IO_METHOD_MMAP, + IO_METHOD_USERPTR +} io_method; + +struct buffer { + void *start; + size_t length; + int available; // Is available in userspace +}; + +struct SDL_PrivateCameraData +{ + int fd; + io_method io; + int nb_buffers; + struct buffer *buffers; + int driver_pitch; +}; + +static int xioctl(int fh, int request, void *arg) +{ + int r; + + do { + r = ioctl(fh, request, arg); + } while ((r == -1) && (errno == EINTR)); + + return r; +} + +static bool V4L2_WaitDevice(SDL_Camera *device) +{ + const int fd = device->hidden->fd; + + int rc; + + do { + fd_set fds; + FD_ZERO(&fds); + FD_SET(fd, &fds); + + struct timeval tv; + tv.tv_sec = 0; + tv.tv_usec = 100 * 1000; + + rc = select(fd + 1, &fds, NULL, NULL, &tv); + if ((rc == -1) && (errno == EINTR)) { + rc = 0; // pretend it was a timeout, keep looping. + } else if (rc > 0) { + return true; + } + + // Thread is requested to shut down + if (SDL_GetAtomicInt(&device->shutdown)) { + return true; + } + + } while (rc == 0); + + return false; +} + +static SDL_CameraFrameResult V4L2_AcquireFrame(SDL_Camera *device, SDL_Surface *frame, Uint64 *timestampNS) +{ + const int fd = device->hidden->fd; + const io_method io = device->hidden->io; + size_t size = device->hidden->buffers[0].length; + struct v4l2_buffer buf; + ssize_t amount; + + switch (io) { + case IO_METHOD_READ: + if ((amount = read(fd, device->hidden->buffers[0].start, size)) == -1) { + switch (errno) { + case EAGAIN: + return SDL_CAMERA_FRAME_SKIP; + + case EIO: + // Could ignore EIO, see spec. + // fall through + + default: + SDL_SetError("read"); + return SDL_CAMERA_FRAME_ERROR; + } + } + + *timestampNS = SDL_GetTicksNS(); // oh well, close enough. + frame->pixels = device->hidden->buffers[0].start; + if (device->hidden->driver_pitch) { + frame->pitch = device->hidden->driver_pitch; + } else { + frame->pitch = (int)amount; + } + break; + + case IO_METHOD_MMAP: + SDL_zero(buf); + + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buf.memory = V4L2_MEMORY_MMAP; + + if (xioctl(fd, VIDIOC_DQBUF, &buf) == -1) { + switch (errno) { + case EAGAIN: + return SDL_CAMERA_FRAME_SKIP; + + case EIO: + // Could ignore EIO, see spec. + // fall through + + default: + SDL_SetError("VIDIOC_DQBUF: %d", errno); + return SDL_CAMERA_FRAME_ERROR; + } + } + + if ((int)buf.index < 0 || (int)buf.index >= device->hidden->nb_buffers) { + SDL_SetError("invalid buffer index"); + return SDL_CAMERA_FRAME_ERROR; + } + + frame->pixels = device->hidden->buffers[buf.index].start; + if (device->hidden->driver_pitch) { + frame->pitch = device->hidden->driver_pitch; + } else { + frame->pitch = buf.bytesused; + } + device->hidden->buffers[buf.index].available = 1; + + *timestampNS = (((Uint64) buf.timestamp.tv_sec) * SDL_NS_PER_SECOND) + SDL_US_TO_NS(buf.timestamp.tv_usec); + + #if DEBUG_CAMERA + SDL_Log("CAMERA: debug mmap: image %d/%d data[0]=%p", buf.index, device->hidden->nb_buffers, (void*)frame->pixels); + #endif + break; + + case IO_METHOD_USERPTR: + SDL_zero(buf); + + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buf.memory = V4L2_MEMORY_USERPTR; + + if (xioctl(fd, VIDIOC_DQBUF, &buf) == -1) { + switch (errno) { + case EAGAIN: + return SDL_CAMERA_FRAME_SKIP; + + case EIO: + // Could ignore EIO, see spec. + // fall through + + default: + SDL_SetError("VIDIOC_DQBUF"); + return SDL_CAMERA_FRAME_ERROR; + } + } + + int i; + for (i = 0; i < device->hidden->nb_buffers; ++i) { + if (buf.m.userptr == (unsigned long)device->hidden->buffers[i].start && buf.length == size) { + break; + } + } + + if (i >= device->hidden->nb_buffers) { + SDL_SetError("invalid buffer index"); + return SDL_CAMERA_FRAME_ERROR; + } + + frame->pixels = (void*)buf.m.userptr; + if (device->hidden->driver_pitch) { + frame->pitch = device->hidden->driver_pitch; + } else { + frame->pitch = buf.bytesused; + } + device->hidden->buffers[i].available = 1; + + *timestampNS = (((Uint64) buf.timestamp.tv_sec) * SDL_NS_PER_SECOND) + SDL_US_TO_NS(buf.timestamp.tv_usec); + + #if DEBUG_CAMERA + SDL_Log("CAMERA: debug userptr: image %d/%d data[0]=%p", buf.index, device->hidden->nb_buffers, (void*)frame->pixels); + #endif + break; + + case IO_METHOD_INVALID: + SDL_assert(!"Shouldn't have hit this"); + break; + } + + return SDL_CAMERA_FRAME_READY; +} + +static void V4L2_ReleaseFrame(SDL_Camera *device, SDL_Surface *frame) +{ + struct v4l2_buffer buf; + const int fd = device->hidden->fd; + const io_method io = device->hidden->io; + int i; + + for (i = 0; i < device->hidden->nb_buffers; ++i) { + if (frame->pixels == device->hidden->buffers[i].start) { + break; + } + } + + if (i >= device->hidden->nb_buffers) { + return; // oh well, we didn't own this. + } + + switch (io) { + case IO_METHOD_READ: + break; + + case IO_METHOD_MMAP: + SDL_zero(buf); + + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buf.memory = V4L2_MEMORY_MMAP; + buf.index = i; + + if (xioctl(fd, VIDIOC_QBUF, &buf) == -1) { + // !!! FIXME: disconnect the device. + return; //SDL_SetError("VIDIOC_QBUF"); + } + device->hidden->buffers[i].available = 0; + break; + + case IO_METHOD_USERPTR: + SDL_zero(buf); + + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buf.memory = V4L2_MEMORY_USERPTR; + buf.index = i; + buf.m.userptr = (unsigned long)frame->pixels; + buf.length = (int) device->hidden->buffers[i].length; + + if (xioctl(fd, VIDIOC_QBUF, &buf) == -1) { + // !!! FIXME: disconnect the device. + return; //SDL_SetError("VIDIOC_QBUF"); + } + device->hidden->buffers[i].available = 0; + break; + + case IO_METHOD_INVALID: + SDL_assert(!"Shouldn't have hit this"); + break; + } +} + +static bool EnqueueBuffers(SDL_Camera *device) +{ + const int fd = device->hidden->fd; + const io_method io = device->hidden->io; + switch (io) { + case IO_METHOD_READ: + break; + + case IO_METHOD_MMAP: + for (int i = 0; i < device->hidden->nb_buffers; ++i) { + if (device->hidden->buffers[i].available == 0) { + struct v4l2_buffer buf; + + SDL_zero(buf); + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buf.memory = V4L2_MEMORY_MMAP; + buf.index = i; + + if (xioctl(fd, VIDIOC_QBUF, &buf) == -1) { + return SDL_SetError("VIDIOC_QBUF"); + } + } + } + break; + + case IO_METHOD_USERPTR: + for (int i = 0; i < device->hidden->nb_buffers; ++i) { + if (device->hidden->buffers[i].available == 0) { + struct v4l2_buffer buf; + + SDL_zero(buf); + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buf.memory = V4L2_MEMORY_USERPTR; + buf.index = i; + buf.m.userptr = (unsigned long)device->hidden->buffers[i].start; + buf.length = (int) device->hidden->buffers[i].length; + + if (xioctl(fd, VIDIOC_QBUF, &buf) == -1) { + return SDL_SetError("VIDIOC_QBUF"); + } + } + } + break; + + case IO_METHOD_INVALID: SDL_assert(!"Shouldn't have hit this"); break; + } + return true; +} + +static bool AllocBufferRead(SDL_Camera *device, size_t buffer_size) +{ + device->hidden->buffers[0].length = buffer_size; + device->hidden->buffers[0].start = SDL_calloc(1, buffer_size); + return (device->hidden->buffers[0].start != NULL); +} + +static bool AllocBufferMmap(SDL_Camera *device) +{ + const int fd = device->hidden->fd; + int i; + for (i = 0; i < device->hidden->nb_buffers; ++i) { + struct v4l2_buffer buf; + + SDL_zero(buf); + + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + buf.memory = V4L2_MEMORY_MMAP; + buf.index = i; + + if (xioctl(fd, VIDIOC_QUERYBUF, &buf) == -1) { + return SDL_SetError("VIDIOC_QUERYBUF"); + } + + device->hidden->buffers[i].length = buf.length; + device->hidden->buffers[i].start = + mmap(NULL /* start anywhere */, + buf.length, + PROT_READ | PROT_WRITE /* required */, + MAP_SHARED /* recommended */, + fd, buf.m.offset); + + if (MAP_FAILED == device->hidden->buffers[i].start) { + return SDL_SetError("mmap"); + } + } + return true; +} + +static bool AllocBufferUserPtr(SDL_Camera *device, size_t buffer_size) +{ + int i; + for (i = 0; i < device->hidden->nb_buffers; ++i) { + device->hidden->buffers[i].length = buffer_size; + device->hidden->buffers[i].start = SDL_calloc(1, buffer_size); + + if (!device->hidden->buffers[i].start) { + return false; + } + } + return true; +} + +static void format_v4l2_to_sdl(Uint32 fmt, SDL_PixelFormat *format, SDL_Colorspace *colorspace) +{ + switch (fmt) { + #define CASE(x, y, z) case x: *format = y; *colorspace = z; return + CASE(V4L2_PIX_FMT_YUYV, SDL_PIXELFORMAT_YUY2, SDL_COLORSPACE_BT709_LIMITED); + CASE(V4L2_PIX_FMT_MJPEG, SDL_PIXELFORMAT_MJPG, SDL_COLORSPACE_SRGB); + #undef CASE + default: + #if DEBUG_CAMERA + SDL_Log("CAMERA: Unknown format V4L2_PIX_FORMAT '%c%c%c%c' (0x%x)", + (char)(Uint8)(fmt >> 0), + (char)(Uint8)(fmt >> 8), + (char)(Uint8)(fmt >> 16), + (char)(Uint8)(fmt >> 24), fmt); + #endif + break; + } + *format = SDL_PIXELFORMAT_UNKNOWN; + *colorspace = SDL_COLORSPACE_UNKNOWN; +} + +static Uint32 format_sdl_to_v4l2(SDL_PixelFormat fmt) +{ + switch (fmt) { + #define CASE(y, x) case x: return y + CASE(V4L2_PIX_FMT_YUYV, SDL_PIXELFORMAT_YUY2); + CASE(V4L2_PIX_FMT_MJPEG, SDL_PIXELFORMAT_MJPG); + #undef CASE + default: + return 0; + } +} + +static void V4L2_CloseDevice(SDL_Camera *device) +{ + if (!device) { + return; + } + + if (device->hidden) { + const io_method io = device->hidden->io; + const int fd = device->hidden->fd; + + if ((io == IO_METHOD_MMAP) || (io == IO_METHOD_USERPTR)) { + enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + xioctl(fd, VIDIOC_STREAMOFF, &type); + } + + if (device->hidden->buffers) { + switch (io) { + case IO_METHOD_INVALID: + break; + + case IO_METHOD_READ: + SDL_free(device->hidden->buffers[0].start); + break; + + case IO_METHOD_MMAP: + for (int i = 0; i < device->hidden->nb_buffers; ++i) { + if (munmap(device->hidden->buffers[i].start, device->hidden->buffers[i].length) == -1) { + SDL_SetError("munmap"); + } + } + break; + + case IO_METHOD_USERPTR: + for (int i = 0; i < device->hidden->nb_buffers; ++i) { + SDL_free(device->hidden->buffers[i].start); + } + break; + } + + SDL_free(device->hidden->buffers); + } + + if (fd != -1) { + close(fd); + } + SDL_free(device->hidden); + + device->hidden = NULL; + } +} + +static bool V4L2_OpenDevice(SDL_Camera *device, const SDL_CameraSpec *spec) +{ + const V4L2DeviceHandle *handle = (const V4L2DeviceHandle *) device->handle; + struct stat st; + struct v4l2_capability cap; + const int fd = open(handle->path, O_RDWR /* required */ | O_NONBLOCK, 0); + + // most of this probably shouldn't fail unless the filesystem node changed out from under us since MaybeAddDevice(). + if (fd == -1) { + return SDL_SetError("Cannot open '%s': %d, %s", handle->path, errno, strerror(errno)); + } else if (fstat(fd, &st) == -1) { + close(fd); + return SDL_SetError("Cannot identify '%s': %d, %s", handle->path, errno, strerror(errno)); + } else if (!S_ISCHR(st.st_mode)) { + close(fd); + return SDL_SetError("%s is not a character device", handle->path); + } else if (xioctl(fd, VIDIOC_QUERYCAP, &cap) == -1) { + const int err = errno; + close(fd); + if (err == EINVAL) { + return SDL_SetError("%s is unexpectedly not a V4L2 device", handle->path); + } + return SDL_SetError("Error VIDIOC_QUERYCAP errno=%d device%s is no V4L2 device", err, handle->path); + } else if ((cap.device_caps & V4L2_CAP_VIDEO_CAPTURE) == 0) { + close(fd); + return SDL_SetError("%s is unexpectedly not a video capture device", handle->path); + } + + device->hidden = (struct SDL_PrivateCameraData *) SDL_calloc(1, sizeof (struct SDL_PrivateCameraData)); + if (device->hidden == NULL) { + close(fd); + return false; + } + + device->hidden->fd = fd; + device->hidden->io = IO_METHOD_INVALID; + + // Select video input, video standard and tune here. + // errors in the crop code are not fatal. + struct v4l2_cropcap cropcap; + SDL_zero(cropcap); + cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (xioctl(fd, VIDIOC_CROPCAP, &cropcap) == 0) { + struct v4l2_crop crop; + SDL_zero(crop); + crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + crop.c = cropcap.defrect; // reset to default + xioctl(fd, VIDIOC_S_CROP, &crop); + } + + struct v4l2_format fmt; + SDL_zero(fmt); + fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + fmt.fmt.pix.width = spec->width; + fmt.fmt.pix.height = spec->height; + fmt.fmt.pix.pixelformat = format_sdl_to_v4l2(spec->format); + //fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; + fmt.fmt.pix.field = V4L2_FIELD_ANY; + + #if DEBUG_CAMERA + SDL_Log("CAMERA: set SDL format %s", SDL_GetPixelFormatName(spec->format)); + { const Uint32 f = fmt.fmt.pix.pixelformat; SDL_Log("CAMERA: set format V4L2_format=%d %c%c%c%c", f, (f >> 0) & 0xff, (f >> 8) & 0xff, (f >> 16) & 0xff, (f >> 24) & 0xff); } + #endif + + if (xioctl(fd, VIDIOC_S_FMT, &fmt) == -1) { + return SDL_SetError("Error VIDIOC_S_FMT"); + } + + if (spec->framerate_numerator && spec->framerate_denominator) { + struct v4l2_streamparm setfps; + SDL_zero(setfps); + setfps.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (xioctl(fd, VIDIOC_G_PARM, &setfps) == 0) { + if ( (setfps.parm.capture.timeperframe.denominator != spec->framerate_numerator) || + (setfps.parm.capture.timeperframe.numerator = spec->framerate_denominator) ) { + setfps.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + setfps.parm.capture.timeperframe.numerator = spec->framerate_denominator; + setfps.parm.capture.timeperframe.denominator = spec->framerate_numerator; + if (xioctl(fd, VIDIOC_S_PARM, &setfps) == -1) { + return SDL_SetError("Error VIDIOC_S_PARM"); + } + } + } + } + + SDL_zero(fmt); + fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (xioctl(fd, VIDIOC_G_FMT, &fmt) == -1) { + return SDL_SetError("Error VIDIOC_G_FMT"); + } + device->hidden->driver_pitch = fmt.fmt.pix.bytesperline; + + io_method io = IO_METHOD_INVALID; + if ((io == IO_METHOD_INVALID) && (cap.device_caps & V4L2_CAP_STREAMING)) { + struct v4l2_requestbuffers req; + SDL_zero(req); + req.count = 8; + req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + req.memory = V4L2_MEMORY_MMAP; + if ((xioctl(fd, VIDIOC_REQBUFS, &req) == 0) && (req.count >= 2)) { + io = IO_METHOD_MMAP; + device->hidden->nb_buffers = req.count; + } else { // mmap didn't work out? Try USERPTR. + SDL_zero(req); + req.count = 8; + req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + req.memory = V4L2_MEMORY_USERPTR; + if (xioctl(fd, VIDIOC_REQBUFS, &req) == 0) { + io = IO_METHOD_USERPTR; + device->hidden->nb_buffers = 8; + } + } + } + + if ((io == IO_METHOD_INVALID) && (cap.device_caps & V4L2_CAP_READWRITE)) { + io = IO_METHOD_READ; + device->hidden->nb_buffers = 1; + } + + if (io == IO_METHOD_INVALID) { + return SDL_SetError("Don't have a way to talk to this device"); + } + + device->hidden->io = io; + + device->hidden->buffers = SDL_calloc(device->hidden->nb_buffers, sizeof(*device->hidden->buffers)); + if (!device->hidden->buffers) { + return false; + } + + size_t size, pitch; + if (!SDL_CalculateSurfaceSize(device->spec.format, device->spec.width, device->spec.height, &size, &pitch, false)) { + return false; + } + + bool rc = true; + switch (io) { + case IO_METHOD_READ: + rc = AllocBufferRead(device, size); + break; + + case IO_METHOD_MMAP: + rc = AllocBufferMmap(device); + break; + + case IO_METHOD_USERPTR: + rc = AllocBufferUserPtr(device, size); + break; + + case IO_METHOD_INVALID: + SDL_assert(!"Shouldn't have hit this"); + break; + } + + if (!rc) { + return false; + } else if (!EnqueueBuffers(device)) { + return false; + } else if (io != IO_METHOD_READ) { + enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (xioctl(fd, VIDIOC_STREAMON, &type) == -1) { + return SDL_SetError("VIDIOC_STREAMON"); + } + } + + // Currently there is no user permission prompt for camera access, but maybe there will be a D-Bus portal interface at some point. + SDL_CameraPermissionOutcome(device, true); + + return true; +} + +static bool FindV4L2CameraByBusInfoCallback(SDL_Camera *device, void *userdata) +{ + const V4L2DeviceHandle *handle = (const V4L2DeviceHandle *) device->handle; + return (SDL_strcmp(handle->bus_info, (const char *) userdata) == 0); +} + +static bool AddCameraFormat(const int fd, CameraFormatAddData *data, SDL_PixelFormat sdlfmt, SDL_Colorspace colorspace, Uint32 v4l2fmt, int w, int h) +{ + struct v4l2_frmivalenum frmivalenum; + SDL_zero(frmivalenum); + frmivalenum.pixel_format = v4l2fmt; + frmivalenum.width = (Uint32) w; + frmivalenum.height = (Uint32) h; + + while (ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &frmivalenum) == 0) { + if (frmivalenum.type == V4L2_FRMIVAL_TYPE_DISCRETE) { + const int numerator = (int) frmivalenum.discrete.numerator; + const int denominator = (int) frmivalenum.discrete.denominator; + #if DEBUG_CAMERA + const float fps = (float) denominator / (float) numerator; + SDL_Log("CAMERA: * Has discrete frame interval (%d / %d), fps=%f", numerator, denominator, fps); + #endif + if (!SDL_AddCameraFormat(data, sdlfmt, colorspace, w, h, denominator, numerator)) { + return false; // Probably out of memory; we'll go with what we have, if anything. + } + frmivalenum.index++; // set up for the next one. + } else if ((frmivalenum.type == V4L2_FRMIVAL_TYPE_STEPWISE) || (frmivalenum.type == V4L2_FRMIVAL_TYPE_CONTINUOUS)) { + int d = frmivalenum.stepwise.min.denominator; + // !!! FIXME: should we step by the numerator...? + for (int n = (int) frmivalenum.stepwise.min.numerator; n <= (int) frmivalenum.stepwise.max.numerator; n += (int) frmivalenum.stepwise.step.numerator) { + #if DEBUG_CAMERA + const float fps = (float) d / (float) n; + SDL_Log("CAMERA: * Has %s frame interval (%d / %d), fps=%f", (frmivalenum.type == V4L2_FRMIVAL_TYPE_STEPWISE) ? "stepwise" : "continuous", n, d, fps); + #endif + // SDL expects framerate, V4L2 provides interval + if (!SDL_AddCameraFormat(data, sdlfmt, colorspace, w, h, d, n)) { + return false; // Probably out of memory; we'll go with what we have, if anything. + } + d += (int) frmivalenum.stepwise.step.denominator; + } + break; + } + } + + return true; +} + + +static void MaybeAddDevice(const char *path) +{ + if (!path) { + return; + } + + struct stat st; + const int fd = open(path, O_RDWR /* required */ | O_NONBLOCK, 0); + if (fd == -1) { + return; // can't open it? skip it. + } else if (fstat(fd, &st) == -1) { + close(fd); + return; // can't stat it? skip it. + } else if (!S_ISCHR(st.st_mode)) { + close(fd); + return; // not a character device. + } + + struct v4l2_capability vcap; + const int rc = ioctl(fd, VIDIOC_QUERYCAP, &vcap); + if (rc != 0) { + close(fd); + return; // probably not a v4l2 device at all. + } else if ((vcap.device_caps & V4L2_CAP_VIDEO_CAPTURE) == 0) { + close(fd); + return; // not a video capture device. + } else if (SDL_FindPhysicalCameraByCallback(FindV4L2CameraByBusInfoCallback, vcap.bus_info)) { + close(fd); + return; // already have it. + } + + #if DEBUG_CAMERA + SDL_Log("CAMERA: V4L2 camera path='%s' bus_info='%s' name='%s'", path, (const char *) vcap.bus_info, vcap.card); + #endif + + CameraFormatAddData add_data; + SDL_zero(add_data); + + struct v4l2_fmtdesc fmtdesc; + SDL_zero(fmtdesc); + fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + while (ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc) == 0) { + SDL_PixelFormat sdlfmt = SDL_PIXELFORMAT_UNKNOWN; + SDL_Colorspace colorspace = SDL_COLORSPACE_UNKNOWN; + format_v4l2_to_sdl(fmtdesc.pixelformat, &sdlfmt, &colorspace); + + #if DEBUG_CAMERA + SDL_Log("CAMERA: - Has format '%s'%s%s", SDL_GetPixelFormatName(sdlfmt), + (fmtdesc.flags & V4L2_FMT_FLAG_EMULATED) ? " [EMULATED]" : "", + (fmtdesc.flags & V4L2_FMT_FLAG_COMPRESSED) ? " [COMPRESSED]" : ""); + #endif + + fmtdesc.index++; // prepare for next iteration. + + if (sdlfmt == SDL_PIXELFORMAT_UNKNOWN) { + continue; // unsupported by SDL atm. + } + + struct v4l2_frmsizeenum frmsizeenum; + SDL_zero(frmsizeenum); + frmsizeenum.pixel_format = fmtdesc.pixelformat; + + while (ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsizeenum) == 0) { + if (frmsizeenum.type == V4L2_FRMSIZE_TYPE_DISCRETE) { + const int w = (int) frmsizeenum.discrete.width; + const int h = (int) frmsizeenum.discrete.height; + #if DEBUG_CAMERA + SDL_Log("CAMERA: * Has discrete size %dx%d", w, h); + #endif + if (!AddCameraFormat(fd, &add_data, sdlfmt, colorspace, fmtdesc.pixelformat, w, h)) { + break; // Probably out of memory; we'll go with what we have, if anything. + } + frmsizeenum.index++; // set up for the next one. + } else if ((frmsizeenum.type == V4L2_FRMSIZE_TYPE_STEPWISE) || (frmsizeenum.type == V4L2_FRMSIZE_TYPE_CONTINUOUS)) { + const int minw = (int) frmsizeenum.stepwise.min_width; + const int minh = (int) frmsizeenum.stepwise.min_height; + const int maxw = (int) frmsizeenum.stepwise.max_width; + const int maxh = (int) frmsizeenum.stepwise.max_height; + const int stepw = (int) frmsizeenum.stepwise.step_width; + const int steph = (int) frmsizeenum.stepwise.step_height; + for (int w = minw; w <= maxw; w += stepw) { + for (int h = minh; w <= maxh; w += steph) { + #if DEBUG_CAMERA + SDL_Log("CAMERA: * Has %s size %dx%d", (frmsizeenum.type == V4L2_FRMSIZE_TYPE_STEPWISE) ? "stepwise" : "continuous", w, h); + #endif + if (!AddCameraFormat(fd, &add_data, sdlfmt, colorspace, fmtdesc.pixelformat, w, h)) { + break; // Probably out of memory; we'll go with what we have, if anything. + } + } + } + break; + } + } + } + + close(fd); + + #if DEBUG_CAMERA + SDL_Log("CAMERA: (total specs: %d)", add_data.num_specs); + #endif + + if (add_data.num_specs > 0) { + V4L2DeviceHandle *handle = (V4L2DeviceHandle *) SDL_calloc(1, sizeof (V4L2DeviceHandle)); + if (handle) { + handle->path = SDL_strdup(path); + if (handle->path) { + handle->bus_info = SDL_strdup((char *)vcap.bus_info); + if (handle->bus_info) { + if (SDL_AddCamera((const char *) vcap.card, SDL_CAMERA_POSITION_UNKNOWN, add_data.num_specs, add_data.specs, handle)) { + SDL_free(add_data.specs); + return; // good to go. + } + SDL_free(handle->bus_info); + } + SDL_free(handle->path); + } + SDL_free(handle); + } + } + SDL_free(add_data.specs); +} + +static void V4L2_FreeDeviceHandle(SDL_Camera *device) +{ + if (device) { + V4L2DeviceHandle *handle = (V4L2DeviceHandle *) device->handle; + SDL_free(handle->path); + SDL_free(handle->bus_info); + SDL_free(handle); + } +} + +#ifdef SDL_USE_LIBUDEV +static bool FindV4L2CameraByPathCallback(SDL_Camera *device, void *userdata) +{ + const V4L2DeviceHandle *handle = (const V4L2DeviceHandle *) device->handle; + return (SDL_strcmp(handle->path, (const char *) userdata) == 0); +} + +static void MaybeRemoveDevice(const char *path) +{ + if (path) { + SDL_CameraDisconnected(SDL_FindPhysicalCameraByCallback(FindV4L2CameraByPathCallback, (void *) path)); + } +} + +static void CameraUdevCallback(SDL_UDEV_deviceevent udev_type, int udev_class, const char *devpath) +{ + if (devpath && (udev_class & SDL_UDEV_DEVICE_VIDEO_CAPTURE)) { + if (udev_type == SDL_UDEV_DEVICEADDED) { + MaybeAddDevice(devpath); + } else if (udev_type == SDL_UDEV_DEVICEREMOVED) { + MaybeRemoveDevice(devpath); + } + } +} +#endif // SDL_USE_LIBUDEV + +static void V4L2_Deinitialize(void) +{ +#ifdef SDL_USE_LIBUDEV + SDL_UDEV_DelCallback(CameraUdevCallback); + SDL_UDEV_Quit(); +#endif // SDL_USE_LIBUDEV +} + +static void V4L2_DetectDevices(void) +{ +#ifdef SDL_USE_LIBUDEV + if (SDL_UDEV_Init()) { + if (SDL_UDEV_AddCallback(CameraUdevCallback)) { + SDL_UDEV_Scan(); // Force a scan to build the initial device list + } + return; + } +#endif // SDL_USE_LIBUDEV + + DIR *dirp = opendir("/dev"); + if (dirp) { + struct dirent *dent; + while ((dent = readdir(dirp)) != NULL) { + int num = 0; + if (SDL_sscanf(dent->d_name, "video%d", &num) == 1) { + char fullpath[64]; + SDL_snprintf(fullpath, sizeof (fullpath), "/dev/video%d", num); + MaybeAddDevice(fullpath); + } + } + closedir(dirp); + } +} + +static bool V4L2_Init(SDL_CameraDriverImpl *impl) +{ + impl->DetectDevices = V4L2_DetectDevices; + impl->OpenDevice = V4L2_OpenDevice; + impl->CloseDevice = V4L2_CloseDevice; + impl->WaitDevice = V4L2_WaitDevice; + impl->AcquireFrame = V4L2_AcquireFrame; + impl->ReleaseFrame = V4L2_ReleaseFrame; + impl->FreeDeviceHandle = V4L2_FreeDeviceHandle; + impl->Deinitialize = V4L2_Deinitialize; + + return true; +} + +CameraBootStrap V4L2_bootstrap = { + "v4l2", "SDL Video4Linux2 camera driver", V4L2_Init, false +}; + +#endif // SDL_CAMERA_DRIVER_V4L2 + diff --git a/contrib/SDL-3.2.8/src/camera/vita/SDL_camera_vita.c b/contrib/SDL-3.2.8/src/camera/vita/SDL_camera_vita.c new file mode 100644 index 0000000..42a5a89 --- /dev/null +++ b/contrib/SDL-3.2.8/src/camera/vita/SDL_camera_vita.c @@ -0,0 +1,258 @@ +/* + Simple DirectMedia Layer + Copyright (C) 1997-2025 Sam Lantinga + + This software is provided 'as-is', without any express or implied + warranty. In no event will the authors be held liable for any damages + arising from the use of this software. + + Permission is granted to anyone to use this software for any purpose, + including commercial applications, and to alter it and redistribute it + freely, subject to the following restrictions: + + 1. The origin of this software must not be misrepresented; you must not + claim that you wrote the original software. If you use this software + in a product, an acknowledgment in the product documentation would be + appreciated but is not required. + 2. Altered source versions must be plainly marked as such, and must not be + misrepresented as being the original software. + 3. This notice may not be removed or altered from any source distribution. +*/ +#include "SDL_internal.h" + +#ifdef SDL_CAMERA_DRIVER_VITA + +#include "../SDL_syscamera.h" +#include +#include + +static struct { + Sint32 w; + Sint32 h; + Sint32 res; +} resolutions[] = { + {640, 480, SCE_CAMERA_RESOLUTION_640_480}, + {320, 240, SCE_CAMERA_RESOLUTION_320_240}, + {160, 120, SCE_CAMERA_RESOLUTION_160_120}, + {352, 288, SCE_CAMERA_RESOLUTION_352_288}, + {176, 144, SCE_CAMERA_RESOLUTION_176_144}, + {480, 272, SCE_CAMERA_RESOLUTION_480_272}, + {640, 360, SCE_CAMERA_RESOLUTION_640_360}, + {0, 0, 0} +}; + +static Sint32 fps[] = {5, 10, 15, 20, 24, 25, 30, 60, 0}; + +static void GatherCameraSpecs(Sint32 devid, CameraFormatAddData *add_data, char **fullname, SDL_CameraPosition *position) +{ + SDL_zerop(add_data); + + if (devid == SCE_CAMERA_DEVICE_FRONT) { + *position = SDL_CAMERA_POSITION_FRONT_FACING; + *fullname = SDL_strdup("Front-facing camera"); + } else if (devid == SCE_CAMERA_DEVICE_BACK) { + *position = SDL_CAMERA_POSITION_BACK_FACING; + *fullname = SDL_strdup("Back-facing camera"); + } + + if (!*fullname) { + *fullname = SDL_strdup("Generic camera"); + } + + // Note: there are actually more fps and pixelformats. Planar YUV is fastest. Support only YUV and integer fps for now + Sint32 idx = 0; + while (resolutions[idx].res > 0) { + Sint32 fps_idx = 0; + while (fps[fps_idx] > 0) { + SDL_AddCameraFormat(add_data, SDL_PIXELFORMAT_IYUV, SDL_COLORSPACE_BT601_LIMITED, resolutions[idx].w, resolutions[idx].h, fps[fps_idx], 1); /* SCE_CAMERA_FORMAT_ARGB */ + fps_idx++; + } + idx++; + } +} + +static bool FindVitaCameraByID(SDL_Camera *device, void *userdata) +{ + Sint32 devid = (Sint32) userdata; + return (devid == (Sint32)device->handle); +} + +static void MaybeAddDevice(Sint32 devid) +{ + #if DEBUG_CAMERA + SDL_Log("CAMERA: MaybeAddDevice('%d')", devid); + #endif + + if (SDL_FindPhysicalCameraByCallback(FindVitaCameraByID, (void *) devid)) { + return; // already have this one. + } + + SDL_CameraPosition position = SDL_CAMERA_POSITION_UNKNOWN; + char *fullname = NULL; + CameraFormatAddData add_data; + GatherCameraSpecs(devid, &add_data, &fullname, &position); + + if (add_data.num_specs > 0) { + SDL_AddCamera(fullname, position, add_data.num_specs, add_data.specs, (void*)devid); + } + + SDL_free(fullname); + SDL_free(add_data.specs); +} + +static SceUID imbUid = -1; + +static void freeBuffers(SceCameraInfo* info) +{ + if (imbUid != -1) { + sceKernelFreeMemBlock(imbUid); + info->pIBase = NULL; + imbUid = -1; + } +} + +static bool VITACAMERA_OpenDevice(SDL_Camera *device, const SDL_CameraSpec *spec) +{ + // we can't open more than one camera, so error-out early + if (imbUid != -1) { + return SDL_SetError("Only one camera can be active"); + } + + SceCameraInfo* info = (SceCameraInfo*)SDL_calloc(1, sizeof(SceCameraInfo)); + + info->size = sizeof(SceCameraInfo); + info->priority = SCE_CAMERA_PRIORITY_SHARE; + info->buffer = 0; // target buffer set by sceCameraOpen + + info->framerate = spec->framerate_numerator / spec->framerate_denominator; + + Sint32 idx = 0; + while (resolutions[idx].res > 0) { + if (spec->width == resolutions[idx].w && spec->height == resolutions[idx].h) { + info->resolution = resolutions[idx].res; + break; + } + idx++; + } + + info->range = 1; + info->format = SCE_CAMERA_FORMAT_YUV420_PLANE; + info->pitch = 0; // same size surface + + info->sizeIBase = spec->width*spec->height;; + info->sizeUBase = ((spec->width+1)/2) * ((spec->height+1) / 2); + info->sizeVBase = ((spec->width+1)/2) * ((spec->height+1) / 2); + + // PHYCONT memory size *must* be a multiple of 1MB, we can just always spend 2MB, since we don't use PHYCONT anywhere else + imbUid = sceKernelAllocMemBlock("CameraI", SCE_KERNEL_MEMBLOCK_TYPE_USER_MAIN_PHYCONT_NC_RW, 2*1024*1024 , NULL); + if (imbUid < 0) + { + return SDL_SetError("sceKernelAllocMemBlock error: 0x%08X", imbUid); + } + sceKernelGetMemBlockBase(imbUid, &(info->pIBase)); + + info->pUBase = info->pIBase + info->sizeIBase; + info->pVBase = info->pIBase + (info->sizeIBase + info->sizeUBase); + + device->hidden = (struct SDL_PrivateCameraData *)info; + + int ret = sceCameraOpen((int)device->handle, info); + if (ret == 0) { + ret = sceCameraStart((int)device->handle); + if (ret == 0) { + SDL_CameraPermissionOutcome(device, true); + return true; + } else { + SDL_SetError("sceCameraStart error: 0x%08X", imbUid); + } + } else { + SDL_SetError("sceCameraOpen error: 0x%08X", imbUid); + } + + freeBuffers(info); + + return false; +} + +static void VITACAMERA_CloseDevice(SDL_Camera *device) +{ + if (device->hidden) { + sceCameraStop((int)device->handle); + sceCameraClose((int)device->handle); + freeBuffers((SceCameraInfo*)device->hidden); + SDL_free(device->hidden); + } +} + +static bool VITACAMERA_WaitDevice(SDL_Camera *device) +{ + while(!sceCameraIsActive((int)device->handle)) {} + return true; +} + +static SDL_CameraFrameResult VITACAMERA_AcquireFrame(SDL_Camera *device, SDL_Surface *frame, Uint64 *timestampNS) +{ + SceCameraRead read = {0}; + read.size = sizeof(SceCameraRead); + read.mode = 1; // don't wait next frame + + int ret = sceCameraRead((int)device->handle, &read); + + if (ret < 0) { + SDL_SetError("sceCameraRead error: 0x%08X", ret); + return SDL_CAMERA_FRAME_ERROR; + } + + *timestampNS = read.timestamp; + + SceCameraInfo* info = (SceCameraInfo*)(device->hidden); + + frame->pitch = info->width; + frame->pixels = SDL_aligned_alloc(SDL_GetSIMDAlignment(), info->sizeIBase + info->sizeUBase + info->sizeVBase); + + if (frame->pixels) { + SDL_memcpy(frame->pixels, info->pIBase, info->sizeIBase + info->sizeUBase + info->sizeVBase); + return SDL_CAMERA_FRAME_READY; + } + + return SDL_CAMERA_FRAME_ERROR; +} + +static void VITACAMERA_ReleaseFrame(SDL_Camera *device, SDL_Surface *frame) +{ + SDL_aligned_free(frame->pixels); +} + +static void VITACAMERA_DetectDevices(void) +{ + MaybeAddDevice(SCE_CAMERA_DEVICE_FRONT); + MaybeAddDevice(SCE_CAMERA_DEVICE_BACK); +} + +static void VITACAMERA_FreeDeviceHandle(SDL_Camera *device) +{ +} + +static void VITACAMERA_Deinitialize(void) +{ +} + +static bool VITACAMERA_Init(SDL_CameraDriverImpl *impl) +{ + impl->DetectDevices = VITACAMERA_DetectDevices; + impl->OpenDevice = VITACAMERA_OpenDevice; + impl->CloseDevice = VITACAMERA_CloseDevice; + impl->WaitDevice = VITACAMERA_WaitDevice; + impl->AcquireFrame = VITACAMERA_AcquireFrame; + impl->ReleaseFrame = VITACAMERA_ReleaseFrame; + impl->FreeDeviceHandle = VITACAMERA_FreeDeviceHandle; + impl->Deinitialize = VITACAMERA_Deinitialize; + + return true; +} + +CameraBootStrap VITACAMERA_bootstrap = { + "vita", "SDL PSVita camera driver", VITACAMERA_Init, false +}; + +#endif // SDL_CAMERA_DRIVER_VITA -- cgit v1.2.3