diff options
| author | 3gg <3gg@shellblade.net> | 2025-12-27 12:03:39 -0800 |
|---|---|---|
| committer | 3gg <3gg@shellblade.net> | 2025-12-27 12:03:39 -0800 |
| commit | 5a079a2d114f96d4847d1ee305d5b7c16eeec50e (patch) | |
| tree | 8926ab44f168acf787d8e19608857b3af0f82758 /contrib/SDL-3.2.8/test/testffmpeg.c | |
Initial commit
Diffstat (limited to 'contrib/SDL-3.2.8/test/testffmpeg.c')
| -rw-r--r-- | contrib/SDL-3.2.8/test/testffmpeg.c | 1603 |
1 files changed, 1603 insertions, 0 deletions
diff --git a/contrib/SDL-3.2.8/test/testffmpeg.c b/contrib/SDL-3.2.8/test/testffmpeg.c new file mode 100644 index 0000000..63bc3cb --- /dev/null +++ b/contrib/SDL-3.2.8/test/testffmpeg.c | |||
| @@ -0,0 +1,1603 @@ | |||
| 1 | /* | ||
| 2 | Copyright (C) 1997-2025 Sam Lantinga <slouken@libsdl.org> | ||
| 3 | |||
| 4 | This software is provided 'as-is', without any express or implied | ||
| 5 | warranty. In no event will the authors be held liable for any damages | ||
| 6 | arising from the use of this software. | ||
| 7 | |||
| 8 | Permission is granted to anyone to use this software for any purpose, | ||
| 9 | including commercial applications, and to alter it and redistribute it | ||
| 10 | freely. | ||
| 11 | */ | ||
| 12 | /* Simple program: Display a video with a sprite bouncing around over it | ||
| 13 | * | ||
| 14 | * For a more complete video example, see ffplay.c in the ffmpeg sources. | ||
| 15 | */ | ||
| 16 | |||
| 17 | #include <SDL3/SDL.h> | ||
| 18 | #include <SDL3/SDL_main.h> | ||
| 19 | #include <SDL3/SDL_test.h> | ||
| 20 | |||
| 21 | #include <libavcodec/avcodec.h> | ||
| 22 | #include <libavformat/avformat.h> | ||
| 23 | #include <libavutil/avutil.h> | ||
| 24 | #include <libavutil/mastering_display_metadata.h> | ||
| 25 | #include <libavutil/pixdesc.h> | ||
| 26 | #include <libswscale/swscale.h> | ||
| 27 | |||
| 28 | #ifdef HAVE_EGL | ||
| 29 | #include <SDL3/SDL_egl.h> | ||
| 30 | #include <SDL3/SDL_opengl.h> | ||
| 31 | #include <SDL3/SDL_opengles2.h> | ||
| 32 | |||
| 33 | #include <libavutil/hwcontext_drm.h> | ||
| 34 | |||
| 35 | #ifndef fourcc_code | ||
| 36 | #define fourcc_code(a, b, c, d) ((uint32_t)(a) | ((uint32_t)(b) << 8) | ((uint32_t)(c) << 16) | ((uint32_t)(d) << 24)) | ||
| 37 | #endif | ||
| 38 | #ifndef DRM_FORMAT_R8 | ||
| 39 | #define DRM_FORMAT_R8 fourcc_code('R', '8', ' ', ' ') | ||
| 40 | #endif | ||
| 41 | #ifndef DRM_FORMAT_GR88 | ||
| 42 | #define DRM_FORMAT_GR88 fourcc_code('G', 'R', '8', '8') | ||
| 43 | #endif | ||
| 44 | #endif | ||
| 45 | |||
| 46 | #define DRM_FORMAT_MOD_VENDOR_NONE 0 | ||
| 47 | #define DRM_FORMAT_RESERVED ((1ULL << 56) - 1) | ||
| 48 | |||
| 49 | #define fourcc_mod_get_vendor(modifier) \ | ||
| 50 | (((modifier) >> 56) & 0xff) | ||
| 51 | |||
| 52 | #define fourcc_mod_is_vendor(modifier, vendor) \ | ||
| 53 | (fourcc_mod_get_vendor(modifier) == DRM_FORMAT_MOD_VENDOR_##vendor) | ||
| 54 | |||
| 55 | #define fourcc_mod_code(vendor, val) \ | ||
| 56 | ((((Uint64)DRM_FORMAT_MOD_VENDOR_##vendor) << 56) | ((val) & 0x00ffffffffffffffULL)) | ||
| 57 | |||
| 58 | #define DRM_FORMAT_MOD_INVALID fourcc_mod_code(NONE, DRM_FORMAT_RESERVED) | ||
| 59 | #define DRM_FORMAT_MOD_LINEAR fourcc_mod_code(NONE, 0) | ||
| 60 | |||
| 61 | #ifdef SDL_PLATFORM_APPLE | ||
| 62 | #include <CoreVideo/CoreVideo.h> | ||
| 63 | #endif | ||
| 64 | |||
| 65 | #ifdef SDL_PLATFORM_WIN32 | ||
| 66 | #define COBJMACROS | ||
| 67 | #include <libavutil/hwcontext_d3d11va.h> | ||
| 68 | #endif /* SDL_PLATFORM_WIN32 */ | ||
| 69 | |||
| 70 | #include "testffmpeg_vulkan.h" | ||
| 71 | |||
| 72 | #include "icon.h" | ||
| 73 | |||
| 74 | static SDL_Texture *sprite; | ||
| 75 | static SDL_FRect *positions; | ||
| 76 | static SDL_FRect *velocities; | ||
| 77 | static int sprite_w, sprite_h; | ||
| 78 | static int num_sprites = 0; | ||
| 79 | |||
| 80 | static SDL_Window *window; | ||
| 81 | static SDL_Renderer *renderer; | ||
| 82 | static SDL_AudioStream *audio; | ||
| 83 | static SDL_Texture *video_texture; | ||
| 84 | static Uint64 video_start; | ||
| 85 | static bool software_only; | ||
| 86 | static bool has_eglCreateImage; | ||
| 87 | #ifdef HAVE_EGL | ||
| 88 | static bool has_EGL_EXT_image_dma_buf_import; | ||
| 89 | static bool has_EGL_EXT_image_dma_buf_import_modifiers; | ||
| 90 | static PFNGLACTIVETEXTUREARBPROC glActiveTextureARBFunc; | ||
| 91 | static PFNGLEGLIMAGETARGETTEXTURE2DOESPROC glEGLImageTargetTexture2DOESFunc; | ||
| 92 | #endif | ||
| 93 | #ifdef SDL_PLATFORM_WIN32 | ||
| 94 | static ID3D11Device *d3d11_device; | ||
| 95 | static ID3D11DeviceContext *d3d11_context; | ||
| 96 | static const GUID SDL_IID_ID3D11Resource = { 0xdc8e63f3, 0xd12b, 0x4952, { 0xb4, 0x7b, 0x5e, 0x45, 0x02, 0x6a, 0x86, 0x2d } }; | ||
| 97 | #endif | ||
| 98 | static VulkanVideoContext *vulkan_context; | ||
| 99 | struct SwsContextContainer | ||
| 100 | { | ||
| 101 | struct SwsContext *context; | ||
| 102 | }; | ||
| 103 | static const char *SWS_CONTEXT_CONTAINER_PROPERTY = "SWS_CONTEXT_CONTAINER"; | ||
| 104 | static bool verbose; | ||
| 105 | |||
| 106 | static bool CreateWindowAndRenderer(SDL_WindowFlags window_flags, const char *driver) | ||
| 107 | { | ||
| 108 | SDL_PropertiesID props; | ||
| 109 | bool useOpenGL = (driver && (SDL_strcmp(driver, "opengl") == 0 || SDL_strcmp(driver, "opengles2") == 0)); | ||
| 110 | bool useEGL = (driver && SDL_strcmp(driver, "opengles2") == 0); | ||
| 111 | bool useVulkan = (driver && SDL_strcmp(driver, "vulkan") == 0); | ||
| 112 | Uint32 flags = SDL_WINDOW_HIDDEN; | ||
| 113 | |||
| 114 | if (useOpenGL) { | ||
| 115 | if (useEGL) { | ||
| 116 | SDL_SetHint(SDL_HINT_VIDEO_FORCE_EGL, "1"); | ||
| 117 | SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_ES); | ||
| 118 | SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 2); | ||
| 119 | SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 0); | ||
| 120 | } else { | ||
| 121 | SDL_SetHint(SDL_HINT_VIDEO_FORCE_EGL, "0"); | ||
| 122 | SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, 0); | ||
| 123 | SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 2); | ||
| 124 | SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 1); | ||
| 125 | } | ||
| 126 | SDL_GL_SetAttribute(SDL_GL_RED_SIZE, 5); | ||
| 127 | SDL_GL_SetAttribute(SDL_GL_GREEN_SIZE, 6); | ||
| 128 | SDL_GL_SetAttribute(SDL_GL_BLUE_SIZE, 5); | ||
| 129 | |||
| 130 | flags |= SDL_WINDOW_OPENGL; | ||
| 131 | } | ||
| 132 | if (useVulkan) { | ||
| 133 | flags |= SDL_WINDOW_VULKAN; | ||
| 134 | } | ||
| 135 | |||
| 136 | /* The window will be resized to the video size when it's loaded, in OpenVideoStream() */ | ||
| 137 | window = SDL_CreateWindow("testffmpeg", 1920, 1080, flags); | ||
| 138 | if (!window) { | ||
| 139 | return false; | ||
| 140 | } | ||
| 141 | |||
| 142 | if (useVulkan) { | ||
| 143 | vulkan_context = CreateVulkanVideoContext(window); | ||
| 144 | if (!vulkan_context) { | ||
| 145 | SDL_DestroyWindow(window); | ||
| 146 | window = NULL; | ||
| 147 | return false; | ||
| 148 | } | ||
| 149 | } | ||
| 150 | |||
| 151 | props = SDL_CreateProperties(); | ||
| 152 | SDL_SetStringProperty(props, SDL_PROP_RENDERER_CREATE_NAME_STRING, driver); | ||
| 153 | SDL_SetPointerProperty(props, SDL_PROP_RENDERER_CREATE_WINDOW_POINTER, window); | ||
| 154 | if (useVulkan) { | ||
| 155 | SetupVulkanRenderProperties(vulkan_context, props); | ||
| 156 | } | ||
| 157 | if (SDL_GetBooleanProperty(SDL_GetWindowProperties(window), SDL_PROP_WINDOW_HDR_ENABLED_BOOLEAN, false)) { | ||
| 158 | /* Try to create an HDR capable renderer */ | ||
| 159 | SDL_SetNumberProperty(props, SDL_PROP_RENDERER_CREATE_OUTPUT_COLORSPACE_NUMBER, SDL_COLORSPACE_SRGB_LINEAR); | ||
| 160 | renderer = SDL_CreateRendererWithProperties(props); | ||
| 161 | } | ||
| 162 | if (!renderer) { | ||
| 163 | /* Try again with the sRGB colorspace */ | ||
| 164 | SDL_SetNumberProperty(props, SDL_PROP_RENDERER_CREATE_OUTPUT_COLORSPACE_NUMBER, SDL_COLORSPACE_SRGB); | ||
| 165 | renderer = SDL_CreateRendererWithProperties(props); | ||
| 166 | } | ||
| 167 | SDL_DestroyProperties(props); | ||
| 168 | if (!renderer) { | ||
| 169 | SDL_DestroyWindow(window); | ||
| 170 | window = NULL; | ||
| 171 | return false; | ||
| 172 | } | ||
| 173 | |||
| 174 | SDL_Log("Created renderer %s", SDL_GetRendererName(renderer)); | ||
| 175 | |||
| 176 | #ifdef HAVE_EGL | ||
| 177 | if (useEGL) { | ||
| 178 | const char *egl_extensions = eglQueryString(eglGetCurrentDisplay(), EGL_EXTENSIONS); | ||
| 179 | if (!egl_extensions) { | ||
| 180 | return false; | ||
| 181 | } | ||
| 182 | |||
| 183 | char *extensions = SDL_strdup(egl_extensions); | ||
| 184 | if (!extensions) { | ||
| 185 | return false; | ||
| 186 | } | ||
| 187 | |||
| 188 | char *saveptr, *token; | ||
| 189 | token = SDL_strtok_r(extensions, " ", &saveptr); | ||
| 190 | if (!token) { | ||
| 191 | SDL_free(extensions); | ||
| 192 | return false; | ||
| 193 | } | ||
| 194 | do { | ||
| 195 | if (SDL_strcmp(token, "EGL_EXT_image_dma_buf_import") == 0) { | ||
| 196 | has_EGL_EXT_image_dma_buf_import = true; | ||
| 197 | } else if (SDL_strcmp(token, "EGL_EXT_image_dma_buf_import_modifiers") == 0) { | ||
| 198 | has_EGL_EXT_image_dma_buf_import_modifiers = true; | ||
| 199 | } | ||
| 200 | } while ((token = SDL_strtok_r(NULL, " ", &saveptr)) != NULL); | ||
| 201 | |||
| 202 | SDL_free(extensions); | ||
| 203 | |||
| 204 | if (SDL_GL_ExtensionSupported("GL_OES_EGL_image")) { | ||
| 205 | glEGLImageTargetTexture2DOESFunc = (PFNGLEGLIMAGETARGETTEXTURE2DOESPROC)eglGetProcAddress("glEGLImageTargetTexture2DOES"); | ||
| 206 | } | ||
| 207 | |||
| 208 | glActiveTextureARBFunc = (PFNGLACTIVETEXTUREARBPROC)SDL_GL_GetProcAddress("glActiveTextureARB"); | ||
| 209 | |||
| 210 | if (has_EGL_EXT_image_dma_buf_import && | ||
| 211 | glEGLImageTargetTexture2DOESFunc && | ||
| 212 | glActiveTextureARBFunc) { | ||
| 213 | has_eglCreateImage = true; | ||
| 214 | } | ||
| 215 | } | ||
| 216 | #endif /* HAVE_EGL */ | ||
| 217 | |||
| 218 | #ifdef SDL_PLATFORM_WIN32 | ||
| 219 | d3d11_device = (ID3D11Device *)SDL_GetPointerProperty(SDL_GetRendererProperties(renderer), SDL_PROP_RENDERER_D3D11_DEVICE_POINTER, NULL); | ||
| 220 | if (d3d11_device) { | ||
| 221 | ID3D11Device_AddRef(d3d11_device); | ||
| 222 | ID3D11Device_GetImmediateContext(d3d11_device, &d3d11_context); | ||
| 223 | } | ||
| 224 | #endif | ||
| 225 | |||
| 226 | return true; | ||
| 227 | } | ||
| 228 | |||
| 229 | static SDL_Texture *CreateTexture(SDL_Renderer *r, unsigned char *data, unsigned int len, int *w, int *h) | ||
| 230 | { | ||
| 231 | SDL_Texture *texture = NULL; | ||
| 232 | SDL_Surface *surface; | ||
| 233 | SDL_IOStream *src = SDL_IOFromConstMem(data, len); | ||
| 234 | if (src) { | ||
| 235 | surface = SDL_LoadBMP_IO(src, true); | ||
| 236 | if (surface) { | ||
| 237 | /* Treat white as transparent */ | ||
| 238 | SDL_SetSurfaceColorKey(surface, true, SDL_MapSurfaceRGB(surface, 255, 255, 255)); | ||
| 239 | |||
| 240 | texture = SDL_CreateTextureFromSurface(r, surface); | ||
| 241 | *w = surface->w; | ||
| 242 | *h = surface->h; | ||
| 243 | SDL_DestroySurface(surface); | ||
| 244 | } | ||
| 245 | } | ||
| 246 | return texture; | ||
| 247 | } | ||
| 248 | |||
| 249 | static void MoveSprite(void) | ||
| 250 | { | ||
| 251 | SDL_Rect viewport; | ||
| 252 | SDL_FRect *position, *velocity; | ||
| 253 | int i; | ||
| 254 | |||
| 255 | SDL_GetRenderViewport(renderer, &viewport); | ||
| 256 | |||
| 257 | for (i = 0; i < num_sprites; ++i) { | ||
| 258 | position = &positions[i]; | ||
| 259 | velocity = &velocities[i]; | ||
| 260 | position->x += velocity->x; | ||
| 261 | if ((position->x < 0) || (position->x >= (viewport.w - sprite_w))) { | ||
| 262 | velocity->x = -velocity->x; | ||
| 263 | position->x += velocity->x; | ||
| 264 | } | ||
| 265 | position->y += velocity->y; | ||
| 266 | if ((position->y < 0) || (position->y >= (viewport.h - sprite_h))) { | ||
| 267 | velocity->y = -velocity->y; | ||
| 268 | position->y += velocity->y; | ||
| 269 | } | ||
| 270 | } | ||
| 271 | |||
| 272 | /* Blit the sprite onto the screen */ | ||
| 273 | for (i = 0; i < num_sprites; ++i) { | ||
| 274 | position = &positions[i]; | ||
| 275 | |||
| 276 | /* Blit the sprite onto the screen */ | ||
| 277 | SDL_RenderTexture(renderer, sprite, NULL, position); | ||
| 278 | } | ||
| 279 | } | ||
| 280 | |||
| 281 | static SDL_PixelFormat GetTextureFormat(enum AVPixelFormat format) | ||
| 282 | { | ||
| 283 | switch (format) { | ||
| 284 | case AV_PIX_FMT_RGB8: | ||
| 285 | return SDL_PIXELFORMAT_RGB332; | ||
| 286 | case AV_PIX_FMT_RGB444: | ||
| 287 | return SDL_PIXELFORMAT_XRGB4444; | ||
| 288 | case AV_PIX_FMT_RGB555: | ||
| 289 | return SDL_PIXELFORMAT_XRGB1555; | ||
| 290 | case AV_PIX_FMT_BGR555: | ||
| 291 | return SDL_PIXELFORMAT_XBGR1555; | ||
| 292 | case AV_PIX_FMT_RGB565: | ||
| 293 | return SDL_PIXELFORMAT_RGB565; | ||
| 294 | case AV_PIX_FMT_BGR565: | ||
| 295 | return SDL_PIXELFORMAT_BGR565; | ||
| 296 | case AV_PIX_FMT_RGB24: | ||
| 297 | return SDL_PIXELFORMAT_RGB24; | ||
| 298 | case AV_PIX_FMT_BGR24: | ||
| 299 | return SDL_PIXELFORMAT_BGR24; | ||
| 300 | case AV_PIX_FMT_0RGB32: | ||
| 301 | return SDL_PIXELFORMAT_XRGB8888; | ||
| 302 | case AV_PIX_FMT_0BGR32: | ||
| 303 | return SDL_PIXELFORMAT_XBGR8888; | ||
| 304 | case AV_PIX_FMT_NE(RGB0, 0BGR): | ||
| 305 | return SDL_PIXELFORMAT_RGBX8888; | ||
| 306 | case AV_PIX_FMT_NE(BGR0, 0RGB): | ||
| 307 | return SDL_PIXELFORMAT_BGRX8888; | ||
| 308 | case AV_PIX_FMT_RGB32: | ||
| 309 | return SDL_PIXELFORMAT_ARGB8888; | ||
| 310 | case AV_PIX_FMT_RGB32_1: | ||
| 311 | return SDL_PIXELFORMAT_RGBA8888; | ||
| 312 | case AV_PIX_FMT_BGR32: | ||
| 313 | return SDL_PIXELFORMAT_ABGR8888; | ||
| 314 | case AV_PIX_FMT_BGR32_1: | ||
| 315 | return SDL_PIXELFORMAT_BGRA8888; | ||
| 316 | case AV_PIX_FMT_YUV420P: | ||
| 317 | return SDL_PIXELFORMAT_IYUV; | ||
| 318 | case AV_PIX_FMT_YUYV422: | ||
| 319 | return SDL_PIXELFORMAT_YUY2; | ||
| 320 | case AV_PIX_FMT_UYVY422: | ||
| 321 | return SDL_PIXELFORMAT_UYVY; | ||
| 322 | case AV_PIX_FMT_NV12: | ||
| 323 | return SDL_PIXELFORMAT_NV12; | ||
| 324 | case AV_PIX_FMT_NV21: | ||
| 325 | return SDL_PIXELFORMAT_NV21; | ||
| 326 | case AV_PIX_FMT_P010: | ||
| 327 | return SDL_PIXELFORMAT_P010; | ||
| 328 | default: | ||
| 329 | return SDL_PIXELFORMAT_UNKNOWN; | ||
| 330 | } | ||
| 331 | } | ||
| 332 | |||
| 333 | static bool SupportedPixelFormat(enum AVPixelFormat format) | ||
| 334 | { | ||
| 335 | if (!software_only) { | ||
| 336 | if (has_eglCreateImage && | ||
| 337 | (format == AV_PIX_FMT_VAAPI || format == AV_PIX_FMT_DRM_PRIME)) { | ||
| 338 | return true; | ||
| 339 | } | ||
| 340 | #ifdef SDL_PLATFORM_APPLE | ||
| 341 | if (format == AV_PIX_FMT_VIDEOTOOLBOX) { | ||
| 342 | return true; | ||
| 343 | } | ||
| 344 | #endif | ||
| 345 | #ifdef SDL_PLATFORM_WIN32 | ||
| 346 | if (d3d11_device && format == AV_PIX_FMT_D3D11) { | ||
| 347 | return true; | ||
| 348 | } | ||
| 349 | #endif | ||
| 350 | if (vulkan_context && format == AV_PIX_FMT_VULKAN) { | ||
| 351 | return true; | ||
| 352 | } | ||
| 353 | } | ||
| 354 | |||
| 355 | if (GetTextureFormat(format) != SDL_PIXELFORMAT_UNKNOWN) { | ||
| 356 | return true; | ||
| 357 | } | ||
| 358 | return false; | ||
| 359 | } | ||
| 360 | |||
| 361 | static enum AVPixelFormat GetSupportedPixelFormat(AVCodecContext *s, const enum AVPixelFormat *pix_fmts) | ||
| 362 | { | ||
| 363 | const enum AVPixelFormat *p; | ||
| 364 | |||
| 365 | for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { | ||
| 366 | const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(*p); | ||
| 367 | |||
| 368 | if (!(desc->flags & AV_PIX_FMT_FLAG_HWACCEL)) { | ||
| 369 | /* We support all memory formats using swscale */ | ||
| 370 | break; | ||
| 371 | } | ||
| 372 | |||
| 373 | if (SupportedPixelFormat(*p)) { | ||
| 374 | /* We support this format */ | ||
| 375 | break; | ||
| 376 | } | ||
| 377 | } | ||
| 378 | |||
| 379 | if (*p == AV_PIX_FMT_NONE) { | ||
| 380 | SDL_Log("Couldn't find a supported pixel format:"); | ||
| 381 | for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) { | ||
| 382 | SDL_Log(" %s", av_get_pix_fmt_name(*p)); | ||
| 383 | } | ||
| 384 | } | ||
| 385 | |||
| 386 | return *p; | ||
| 387 | } | ||
| 388 | |||
| 389 | static AVCodecContext *OpenVideoStream(AVFormatContext *ic, int stream, const AVCodec *codec) | ||
| 390 | { | ||
| 391 | AVStream *st = ic->streams[stream]; | ||
| 392 | AVCodecParameters *codecpar = st->codecpar; | ||
| 393 | AVCodecContext *context; | ||
| 394 | const AVCodecHWConfig *config; | ||
| 395 | int i; | ||
| 396 | int result; | ||
| 397 | |||
| 398 | SDL_Log("Video stream: %s %dx%d", avcodec_get_name(codec->id), codecpar->width, codecpar->height); | ||
| 399 | |||
| 400 | context = avcodec_alloc_context3(NULL); | ||
| 401 | if (!context) { | ||
| 402 | SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "avcodec_alloc_context3 failed"); | ||
| 403 | return NULL; | ||
| 404 | } | ||
| 405 | |||
| 406 | result = avcodec_parameters_to_context(context, ic->streams[stream]->codecpar); | ||
| 407 | if (result < 0) { | ||
| 408 | SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "avcodec_parameters_to_context failed: %s", av_err2str(result)); | ||
| 409 | avcodec_free_context(&context); | ||
| 410 | return NULL; | ||
| 411 | } | ||
| 412 | context->pkt_timebase = ic->streams[stream]->time_base; | ||
| 413 | |||
| 414 | /* Look for supported hardware accelerated configurations */ | ||
| 415 | i = 0; | ||
| 416 | while (!context->hw_device_ctx && | ||
| 417 | (config = avcodec_get_hw_config(codec, i++)) != NULL) { | ||
| 418 | #if 0 | ||
| 419 | SDL_Log("Found %s hardware acceleration with pixel format %s", av_hwdevice_get_type_name(config->device_type), av_get_pix_fmt_name(config->pix_fmt)); | ||
| 420 | #endif | ||
| 421 | |||
| 422 | if (!(config->methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX) || | ||
| 423 | !SupportedPixelFormat(config->pix_fmt)) { | ||
| 424 | continue; | ||
| 425 | } | ||
| 426 | |||
| 427 | #ifdef SDL_PLATFORM_WIN32 | ||
| 428 | if (d3d11_device && config->device_type == AV_HWDEVICE_TYPE_D3D11VA) { | ||
| 429 | AVD3D11VADeviceContext *device_context; | ||
| 430 | |||
| 431 | context->hw_device_ctx = av_hwdevice_ctx_alloc(config->device_type); | ||
| 432 | |||
| 433 | device_context = (AVD3D11VADeviceContext *)((AVHWDeviceContext *)context->hw_device_ctx->data)->hwctx; | ||
| 434 | device_context->device = d3d11_device; | ||
| 435 | ID3D11Device_AddRef(device_context->device); | ||
| 436 | device_context->device_context = d3d11_context; | ||
| 437 | ID3D11DeviceContext_AddRef(device_context->device_context); | ||
| 438 | |||
| 439 | result = av_hwdevice_ctx_init(context->hw_device_ctx); | ||
| 440 | if (result < 0) { | ||
| 441 | SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "Couldn't create %s hardware device context: %s", av_hwdevice_get_type_name(config->device_type), av_err2str(result)); | ||
| 442 | } else { | ||
| 443 | SDL_Log("Using %s hardware acceleration with pixel format %s", av_hwdevice_get_type_name(config->device_type), av_get_pix_fmt_name(config->pix_fmt)); | ||
| 444 | } | ||
| 445 | } else | ||
| 446 | #endif | ||
| 447 | if (vulkan_context && config->device_type == AV_HWDEVICE_TYPE_VULKAN) { | ||
| 448 | AVVulkanDeviceContext *device_context; | ||
| 449 | |||
| 450 | context->hw_device_ctx = av_hwdevice_ctx_alloc(config->device_type); | ||
| 451 | |||
| 452 | device_context = (AVVulkanDeviceContext *)((AVHWDeviceContext *)context->hw_device_ctx->data)->hwctx; | ||
| 453 | SetupVulkanDeviceContextData(vulkan_context, device_context); | ||
| 454 | |||
| 455 | result = av_hwdevice_ctx_init(context->hw_device_ctx); | ||
| 456 | if (result < 0) { | ||
| 457 | SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "Couldn't create %s hardware device context: %s", av_hwdevice_get_type_name(config->device_type), av_err2str(result)); | ||
| 458 | } else { | ||
| 459 | SDL_Log("Using %s hardware acceleration with pixel format %s", av_hwdevice_get_type_name(config->device_type), av_get_pix_fmt_name(config->pix_fmt)); | ||
| 460 | } | ||
| 461 | } else { | ||
| 462 | result = av_hwdevice_ctx_create(&context->hw_device_ctx, config->device_type, NULL, NULL, 0); | ||
| 463 | if (result < 0) { | ||
| 464 | SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "Couldn't create %s hardware device context: %s", av_hwdevice_get_type_name(config->device_type), av_err2str(result)); | ||
| 465 | } else { | ||
| 466 | SDL_Log("Using %s hardware acceleration with pixel format %s", av_hwdevice_get_type_name(config->device_type), av_get_pix_fmt_name(config->pix_fmt)); | ||
| 467 | } | ||
| 468 | } | ||
| 469 | } | ||
| 470 | |||
| 471 | /* Allow supported hardware accelerated pixel formats */ | ||
| 472 | context->get_format = GetSupportedPixelFormat; | ||
| 473 | |||
| 474 | if (codecpar->codec_id == AV_CODEC_ID_VVC) { | ||
| 475 | context->strict_std_compliance = -2; | ||
| 476 | |||
| 477 | /* Enable threaded decoding, VVC decode is slow */ | ||
| 478 | context->thread_count = SDL_GetNumLogicalCPUCores(); | ||
| 479 | context->thread_type = (FF_THREAD_FRAME | FF_THREAD_SLICE); | ||
| 480 | } | ||
| 481 | |||
| 482 | result = avcodec_open2(context, codec, NULL); | ||
| 483 | if (result < 0) { | ||
| 484 | SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "Couldn't open codec %s: %s", avcodec_get_name(context->codec_id), av_err2str(result)); | ||
| 485 | avcodec_free_context(&context); | ||
| 486 | return NULL; | ||
| 487 | } | ||
| 488 | |||
| 489 | SDL_SetWindowSize(window, codecpar->width, codecpar->height); | ||
| 490 | SDL_SetWindowPosition(window, SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED); | ||
| 491 | |||
| 492 | return context; | ||
| 493 | } | ||
| 494 | |||
| 495 | static SDL_Colorspace GetFrameColorspace(AVFrame *frame) | ||
| 496 | { | ||
| 497 | SDL_Colorspace colorspace = SDL_COLORSPACE_SRGB; | ||
| 498 | |||
| 499 | if (frame && frame->colorspace != AVCOL_SPC_RGB) { | ||
| 500 | #ifdef DEBUG_COLORSPACE | ||
| 501 | SDL_Log("Frame colorspace: range: %d, primaries: %d, trc: %d, colorspace: %d, chroma_location: %d", frame->color_range, frame->color_primaries, frame->color_trc, frame->colorspace, frame->chroma_location); | ||
| 502 | #endif | ||
| 503 | colorspace = SDL_DEFINE_COLORSPACE(SDL_COLOR_TYPE_YCBCR, | ||
| 504 | frame->color_range, | ||
| 505 | frame->color_primaries, | ||
| 506 | frame->color_trc, | ||
| 507 | frame->colorspace, | ||
| 508 | frame->chroma_location); | ||
| 509 | } | ||
| 510 | return colorspace; | ||
| 511 | } | ||
| 512 | |||
| 513 | static SDL_PropertiesID CreateVideoTextureProperties(AVFrame *frame, SDL_PixelFormat format, int access) | ||
| 514 | { | ||
| 515 | AVFrameSideData *pSideData; | ||
| 516 | SDL_PropertiesID props; | ||
| 517 | int width = frame->width; | ||
| 518 | int height = frame->height; | ||
| 519 | SDL_Colorspace colorspace = GetFrameColorspace(frame); | ||
| 520 | |||
| 521 | /* ITU-R BT.2408-6 recommends using an SDR white point of 203 nits, which is more likely for game content */ | ||
| 522 | static const float k_flSDRWhitePoint = 203.0f; | ||
| 523 | float flMaxLuminance = k_flSDRWhitePoint; | ||
| 524 | |||
| 525 | if (frame->hw_frames_ctx) { | ||
| 526 | AVHWFramesContext *frames = (AVHWFramesContext *)(frame->hw_frames_ctx->data); | ||
| 527 | |||
| 528 | width = frames->width; | ||
| 529 | height = frames->height; | ||
| 530 | if (format == SDL_PIXELFORMAT_UNKNOWN) { | ||
| 531 | format = GetTextureFormat(frames->sw_format); | ||
| 532 | } | ||
| 533 | } else { | ||
| 534 | if (format == SDL_PIXELFORMAT_UNKNOWN) { | ||
| 535 | format = GetTextureFormat(frame->format); | ||
| 536 | } | ||
| 537 | } | ||
| 538 | |||
| 539 | props = SDL_CreateProperties(); | ||
| 540 | SDL_SetNumberProperty(props, SDL_PROP_TEXTURE_CREATE_COLORSPACE_NUMBER, colorspace); | ||
| 541 | pSideData = av_frame_get_side_data(frame, AV_FRAME_DATA_MASTERING_DISPLAY_METADATA); | ||
| 542 | if (pSideData) { | ||
| 543 | AVMasteringDisplayMetadata *pMasteringDisplayMetadata = (AVMasteringDisplayMetadata *)pSideData->data; | ||
| 544 | flMaxLuminance = (float)pMasteringDisplayMetadata->max_luminance.num / pMasteringDisplayMetadata->max_luminance.den; | ||
| 545 | } else if (SDL_COLORSPACETRANSFER(colorspace) == SDL_TRANSFER_CHARACTERISTICS_PQ) { | ||
| 546 | /* The official definition is 10000, but PQ game content is often mastered for 400 or 1000 nits */ | ||
| 547 | flMaxLuminance = 1000.0f; | ||
| 548 | } | ||
| 549 | if (flMaxLuminance > k_flSDRWhitePoint) { | ||
| 550 | SDL_SetFloatProperty(props, SDL_PROP_TEXTURE_CREATE_SDR_WHITE_POINT_FLOAT, k_flSDRWhitePoint); | ||
| 551 | SDL_SetFloatProperty(props, SDL_PROP_TEXTURE_CREATE_HDR_HEADROOM_FLOAT, flMaxLuminance / k_flSDRWhitePoint); | ||
| 552 | } | ||
| 553 | SDL_SetNumberProperty(props, SDL_PROP_TEXTURE_CREATE_FORMAT_NUMBER, format); | ||
| 554 | SDL_SetNumberProperty(props, SDL_PROP_TEXTURE_CREATE_ACCESS_NUMBER, access); | ||
| 555 | SDL_SetNumberProperty(props, SDL_PROP_TEXTURE_CREATE_WIDTH_NUMBER, width); | ||
| 556 | SDL_SetNumberProperty(props, SDL_PROP_TEXTURE_CREATE_HEIGHT_NUMBER, height); | ||
| 557 | |||
| 558 | return props; | ||
| 559 | } | ||
| 560 | |||
| 561 | static void SDLCALL FreeSwsContextContainer(void *userdata, void *value) | ||
| 562 | { | ||
| 563 | struct SwsContextContainer *sws_container = (struct SwsContextContainer *)value; | ||
| 564 | if (sws_container->context) { | ||
| 565 | sws_freeContext(sws_container->context); | ||
| 566 | } | ||
| 567 | SDL_free(sws_container); | ||
| 568 | } | ||
| 569 | |||
| 570 | static bool GetTextureForMemoryFrame(AVFrame *frame, SDL_Texture **texture) | ||
| 571 | { | ||
| 572 | int texture_width = 0, texture_height = 0; | ||
| 573 | SDL_PixelFormat texture_format = SDL_PIXELFORMAT_UNKNOWN; | ||
| 574 | SDL_PixelFormat frame_format = GetTextureFormat(frame->format); | ||
| 575 | |||
| 576 | if (*texture) { | ||
| 577 | SDL_PropertiesID props = SDL_GetTextureProperties(*texture); | ||
| 578 | texture_format = (SDL_PixelFormat)SDL_GetNumberProperty(props, SDL_PROP_TEXTURE_FORMAT_NUMBER, SDL_PIXELFORMAT_UNKNOWN); | ||
| 579 | texture_width = (int)SDL_GetNumberProperty(props, SDL_PROP_TEXTURE_WIDTH_NUMBER, 0); | ||
| 580 | texture_height = (int)SDL_GetNumberProperty(props, SDL_PROP_TEXTURE_HEIGHT_NUMBER, 0); | ||
| 581 | } | ||
| 582 | if (!*texture || texture_width != frame->width || texture_height != frame->height || | ||
| 583 | (frame_format != SDL_PIXELFORMAT_UNKNOWN && texture_format != frame_format) || | ||
| 584 | (frame_format == SDL_PIXELFORMAT_UNKNOWN && texture_format != SDL_PIXELFORMAT_ARGB8888)) { | ||
| 585 | if (*texture) { | ||
| 586 | SDL_DestroyTexture(*texture); | ||
| 587 | } | ||
| 588 | |||
| 589 | SDL_PropertiesID props; | ||
| 590 | if (frame_format == SDL_PIXELFORMAT_UNKNOWN) { | ||
| 591 | props = CreateVideoTextureProperties(frame, SDL_PIXELFORMAT_ARGB8888, SDL_TEXTUREACCESS_STREAMING); | ||
| 592 | } else { | ||
| 593 | props = CreateVideoTextureProperties(frame, frame_format, SDL_TEXTUREACCESS_STREAMING); | ||
| 594 | } | ||
| 595 | *texture = SDL_CreateTextureWithProperties(renderer, props); | ||
| 596 | SDL_DestroyProperties(props); | ||
| 597 | if (!*texture) { | ||
| 598 | return false; | ||
| 599 | } | ||
| 600 | |||
| 601 | if (frame_format == SDL_PIXELFORMAT_UNKNOWN || SDL_ISPIXELFORMAT_ALPHA(frame_format)) { | ||
| 602 | SDL_SetTextureBlendMode(*texture, SDL_BLENDMODE_BLEND); | ||
| 603 | } else { | ||
| 604 | SDL_SetTextureBlendMode(*texture, SDL_BLENDMODE_NONE); | ||
| 605 | } | ||
| 606 | SDL_SetTextureScaleMode(*texture, SDL_SCALEMODE_LINEAR); | ||
| 607 | } | ||
| 608 | |||
| 609 | switch (frame_format) { | ||
| 610 | case SDL_PIXELFORMAT_UNKNOWN: | ||
| 611 | { | ||
| 612 | SDL_PropertiesID props = SDL_GetTextureProperties(*texture); | ||
| 613 | struct SwsContextContainer *sws_container = (struct SwsContextContainer *)SDL_GetPointerProperty(props, SWS_CONTEXT_CONTAINER_PROPERTY, NULL); | ||
| 614 | if (!sws_container) { | ||
| 615 | sws_container = (struct SwsContextContainer *)SDL_calloc(1, sizeof(*sws_container)); | ||
| 616 | if (!sws_container) { | ||
| 617 | return false; | ||
| 618 | } | ||
| 619 | SDL_SetPointerPropertyWithCleanup(props, SWS_CONTEXT_CONTAINER_PROPERTY, sws_container, FreeSwsContextContainer, NULL); | ||
| 620 | } | ||
| 621 | sws_container->context = sws_getCachedContext(sws_container->context, frame->width, frame->height, frame->format, frame->width, frame->height, AV_PIX_FMT_BGRA, SWS_POINT, NULL, NULL, NULL); | ||
| 622 | if (sws_container->context) { | ||
| 623 | uint8_t *pixels[4]; | ||
| 624 | int pitch[4]; | ||
| 625 | if (SDL_LockTexture(*texture, NULL, (void **)&pixels[0], &pitch[0])) { | ||
| 626 | sws_scale(sws_container->context, (const uint8_t *const *)frame->data, frame->linesize, 0, frame->height, pixels, pitch); | ||
| 627 | SDL_UnlockTexture(*texture); | ||
| 628 | } | ||
| 629 | } else { | ||
| 630 | SDL_SetError("Can't initialize the conversion context"); | ||
| 631 | return false; | ||
| 632 | } | ||
| 633 | break; | ||
| 634 | } | ||
| 635 | case SDL_PIXELFORMAT_IYUV: | ||
| 636 | if (frame->linesize[0] > 0 && frame->linesize[1] > 0 && frame->linesize[2] > 0) { | ||
| 637 | SDL_UpdateYUVTexture(*texture, NULL, frame->data[0], frame->linesize[0], | ||
| 638 | frame->data[1], frame->linesize[1], | ||
| 639 | frame->data[2], frame->linesize[2]); | ||
| 640 | } else if (frame->linesize[0] < 0 && frame->linesize[1] < 0 && frame->linesize[2] < 0) { | ||
| 641 | SDL_UpdateYUVTexture(*texture, NULL, frame->data[0] + frame->linesize[0] * (frame->height - 1), -frame->linesize[0], | ||
| 642 | frame->data[1] + frame->linesize[1] * (AV_CEIL_RSHIFT(frame->height, 1) - 1), -frame->linesize[1], | ||
| 643 | frame->data[2] + frame->linesize[2] * (AV_CEIL_RSHIFT(frame->height, 1) - 1), -frame->linesize[2]); | ||
| 644 | } | ||
| 645 | break; | ||
| 646 | default: | ||
| 647 | if (frame->linesize[0] < 0) { | ||
| 648 | SDL_UpdateTexture(*texture, NULL, frame->data[0] + frame->linesize[0] * (frame->height - 1), -frame->linesize[0]); | ||
| 649 | } else { | ||
| 650 | SDL_UpdateTexture(*texture, NULL, frame->data[0], frame->linesize[0]); | ||
| 651 | } | ||
| 652 | break; | ||
| 653 | } | ||
| 654 | return true; | ||
| 655 | } | ||
| 656 | |||
| 657 | #ifdef HAVE_EGL | ||
| 658 | |||
| 659 | static bool GetNV12TextureForDRMFrame(AVFrame *frame, SDL_Texture **texture) | ||
| 660 | { | ||
| 661 | AVHWFramesContext *frames = (AVHWFramesContext *)(frame->hw_frames_ctx ? frame->hw_frames_ctx->data : NULL); | ||
| 662 | const AVDRMFrameDescriptor *desc = (const AVDRMFrameDescriptor *)frame->data[0]; | ||
| 663 | int i, j, image_index; | ||
| 664 | EGLDisplay display = eglGetCurrentDisplay(); | ||
| 665 | SDL_PropertiesID props; | ||
| 666 | GLuint textures[2]; | ||
| 667 | |||
| 668 | if (*texture) { | ||
| 669 | /* Free the previous texture now that we're about to render a new one */ | ||
| 670 | SDL_DestroyTexture(*texture); | ||
| 671 | } else { | ||
| 672 | /* First time set up for NV12 textures */ | ||
| 673 | SDL_SetHint("SDL_RENDER_OPENGL_NV12_RG_SHADER", "1"); | ||
| 674 | } | ||
| 675 | |||
| 676 | props = CreateVideoTextureProperties(frame, SDL_PIXELFORMAT_NV12, SDL_TEXTUREACCESS_STATIC); | ||
| 677 | *texture = SDL_CreateTextureWithProperties(renderer, props); | ||
| 678 | SDL_DestroyProperties(props); | ||
| 679 | if (!*texture) { | ||
| 680 | return false; | ||
| 681 | } | ||
| 682 | SDL_SetTextureBlendMode(*texture, SDL_BLENDMODE_NONE); | ||
| 683 | SDL_SetTextureScaleMode(*texture, SDL_SCALEMODE_LINEAR); | ||
| 684 | |||
| 685 | props = SDL_GetTextureProperties(*texture); | ||
| 686 | textures[0] = (GLuint)SDL_GetNumberProperty(props, SDL_PROP_TEXTURE_OPENGLES2_TEXTURE_NUMBER, 0); | ||
| 687 | textures[1] = (GLuint)SDL_GetNumberProperty(props, SDL_PROP_TEXTURE_OPENGLES2_TEXTURE_UV_NUMBER, 0); | ||
| 688 | if (!textures[0] || !textures[1]) { | ||
| 689 | SDL_SetError("Couldn't get NV12 OpenGL textures"); | ||
| 690 | return false; | ||
| 691 | } | ||
| 692 | |||
| 693 | /* import the frame into OpenGL */ | ||
| 694 | image_index = 0; | ||
| 695 | for (i = 0; i < desc->nb_layers; ++i) { | ||
| 696 | const AVDRMLayerDescriptor *layer = &desc->layers[i]; | ||
| 697 | for (j = 0; j < layer->nb_planes; ++j) { | ||
| 698 | const AVDRMPlaneDescriptor *plane = &layer->planes[j]; | ||
| 699 | const AVDRMObjectDescriptor *object = &desc->objects[plane->object_index]; | ||
| 700 | |||
| 701 | EGLAttrib attr[32]; | ||
| 702 | size_t k = 0; | ||
| 703 | |||
| 704 | attr[k++] = EGL_LINUX_DRM_FOURCC_EXT; | ||
| 705 | attr[k++] = layer->format; | ||
| 706 | |||
| 707 | attr[k++] = EGL_WIDTH; | ||
| 708 | attr[k++] = (frames ? frames->width : frame->width) / (image_index + 1); /* half size for chroma */ | ||
| 709 | |||
| 710 | attr[k++] = EGL_HEIGHT; | ||
| 711 | attr[k++] = (frames ? frames->height : frame->height) / (image_index + 1); | ||
| 712 | |||
| 713 | attr[k++] = EGL_DMA_BUF_PLANE0_FD_EXT; | ||
| 714 | attr[k++] = object->fd; | ||
| 715 | |||
| 716 | attr[k++] = EGL_DMA_BUF_PLANE0_OFFSET_EXT; | ||
| 717 | attr[k++] = plane->offset; | ||
| 718 | |||
| 719 | attr[k++] = EGL_DMA_BUF_PLANE0_PITCH_EXT; | ||
| 720 | attr[k++] = plane->pitch; | ||
| 721 | |||
| 722 | if (has_EGL_EXT_image_dma_buf_import_modifiers) { | ||
| 723 | attr[k++] = EGL_DMA_BUF_PLANE0_MODIFIER_LO_EXT; | ||
| 724 | attr[k++] = (object->format_modifier >> 0) & 0xFFFFFFFF; | ||
| 725 | |||
| 726 | attr[k++] = EGL_DMA_BUF_PLANE0_MODIFIER_HI_EXT; | ||
| 727 | attr[k++] = (object->format_modifier >> 32) & 0xFFFFFFFF; | ||
| 728 | } | ||
| 729 | |||
| 730 | attr[k++] = EGL_NONE; | ||
| 731 | |||
| 732 | EGLImage image = eglCreateImage(display, EGL_NO_CONTEXT, EGL_LINUX_DMA_BUF_EXT, NULL, attr); | ||
| 733 | if (image == EGL_NO_IMAGE) { | ||
| 734 | SDL_Log("Couldn't create image: %d", glGetError()); | ||
| 735 | return false; | ||
| 736 | } | ||
| 737 | |||
| 738 | glActiveTextureARBFunc(GL_TEXTURE0_ARB + image_index); | ||
| 739 | glBindTexture(GL_TEXTURE_2D, textures[image_index]); | ||
| 740 | glEGLImageTargetTexture2DOESFunc(GL_TEXTURE_2D, image); | ||
| 741 | ++image_index; | ||
| 742 | } | ||
| 743 | } | ||
| 744 | |||
| 745 | return true; | ||
| 746 | } | ||
| 747 | |||
| 748 | static bool GetOESTextureForDRMFrame(AVFrame *frame, SDL_Texture **texture) | ||
| 749 | { | ||
| 750 | AVHWFramesContext *frames = (AVHWFramesContext *)(frame->hw_frames_ctx ? frame->hw_frames_ctx->data : NULL); | ||
| 751 | const AVDRMFrameDescriptor *desc = (const AVDRMFrameDescriptor *)frame->data[0]; | ||
| 752 | int i, j, k, image_index; | ||
| 753 | EGLDisplay display = eglGetCurrentDisplay(); | ||
| 754 | SDL_PropertiesID props; | ||
| 755 | GLuint textureID; | ||
| 756 | EGLAttrib attr[64]; | ||
| 757 | SDL_Colorspace colorspace; | ||
| 758 | |||
| 759 | if (*texture) { | ||
| 760 | /* Free the previous texture now that we're about to render a new one */ | ||
| 761 | SDL_DestroyTexture(*texture); | ||
| 762 | } | ||
| 763 | |||
| 764 | props = CreateVideoTextureProperties(frame, SDL_PIXELFORMAT_EXTERNAL_OES, SDL_TEXTUREACCESS_STATIC); | ||
| 765 | *texture = SDL_CreateTextureWithProperties(renderer, props); | ||
| 766 | SDL_DestroyProperties(props); | ||
| 767 | if (!*texture) { | ||
| 768 | return false; | ||
| 769 | } | ||
| 770 | SDL_SetTextureBlendMode(*texture, SDL_BLENDMODE_NONE); | ||
| 771 | SDL_SetTextureScaleMode(*texture, SDL_SCALEMODE_LINEAR); | ||
| 772 | |||
| 773 | props = SDL_GetTextureProperties(*texture); | ||
| 774 | textureID = (GLuint)SDL_GetNumberProperty(props, SDL_PROP_TEXTURE_OPENGLES2_TEXTURE_NUMBER, 0); | ||
| 775 | if (!textureID) { | ||
| 776 | SDL_SetError("Couldn't get OpenGL texture"); | ||
| 777 | return false; | ||
| 778 | } | ||
| 779 | colorspace = (SDL_Colorspace)SDL_GetNumberProperty(props, SDL_PROP_TEXTURE_COLORSPACE_NUMBER, SDL_COLORSPACE_UNKNOWN); | ||
| 780 | |||
| 781 | /* import the frame into OpenGL */ | ||
| 782 | k = 0; | ||
| 783 | attr[k++] = EGL_LINUX_DRM_FOURCC_EXT; | ||
| 784 | attr[k++] = desc->layers[0].format; | ||
| 785 | attr[k++] = EGL_WIDTH; | ||
| 786 | attr[k++] = frames ? frames->width : frame->width; | ||
| 787 | attr[k++] = EGL_HEIGHT; | ||
| 788 | attr[k++] = frames ? frames->height : frame->height; | ||
| 789 | image_index = 0; | ||
| 790 | for (i = 0; i < desc->nb_layers; ++i) { | ||
| 791 | const AVDRMLayerDescriptor *layer = &desc->layers[i]; | ||
| 792 | for (j = 0; j < layer->nb_planes; ++j) { | ||
| 793 | const AVDRMPlaneDescriptor *plane = &layer->planes[j]; | ||
| 794 | const AVDRMObjectDescriptor *object = &desc->objects[plane->object_index]; | ||
| 795 | |||
| 796 | switch (image_index) { | ||
| 797 | case 0: | ||
| 798 | attr[k++] = EGL_DMA_BUF_PLANE0_FD_EXT; | ||
| 799 | attr[k++] = object->fd; | ||
| 800 | attr[k++] = EGL_DMA_BUF_PLANE0_OFFSET_EXT; | ||
| 801 | attr[k++] = plane->offset; | ||
| 802 | attr[k++] = EGL_DMA_BUF_PLANE0_PITCH_EXT; | ||
| 803 | attr[k++] = plane->pitch; | ||
| 804 | if (has_EGL_EXT_image_dma_buf_import_modifiers && object->format_modifier != DRM_FORMAT_MOD_INVALID) { | ||
| 805 | attr[k++] = EGL_DMA_BUF_PLANE0_MODIFIER_LO_EXT; | ||
| 806 | attr[k++] = (object->format_modifier & 0xFFFFFFFF); | ||
| 807 | attr[k++] = EGL_DMA_BUF_PLANE0_MODIFIER_HI_EXT; | ||
| 808 | attr[k++] = (object->format_modifier >> 32); | ||
| 809 | } | ||
| 810 | break; | ||
| 811 | case 1: | ||
| 812 | attr[k++] = EGL_DMA_BUF_PLANE1_FD_EXT; | ||
| 813 | attr[k++] = object->fd; | ||
| 814 | attr[k++] = EGL_DMA_BUF_PLANE1_OFFSET_EXT; | ||
| 815 | attr[k++] = plane->offset; | ||
| 816 | attr[k++] = EGL_DMA_BUF_PLANE1_PITCH_EXT; | ||
| 817 | attr[k++] = plane->pitch; | ||
| 818 | if (has_EGL_EXT_image_dma_buf_import_modifiers && object->format_modifier != DRM_FORMAT_MOD_INVALID) { | ||
| 819 | attr[k++] = EGL_DMA_BUF_PLANE1_MODIFIER_LO_EXT; | ||
| 820 | attr[k++] = (object->format_modifier & 0xFFFFFFFF); | ||
| 821 | attr[k++] = EGL_DMA_BUF_PLANE1_MODIFIER_HI_EXT; | ||
| 822 | attr[k++] = (object->format_modifier >> 32); | ||
| 823 | } | ||
| 824 | break; | ||
| 825 | case 2: | ||
| 826 | attr[k++] = EGL_DMA_BUF_PLANE2_FD_EXT; | ||
| 827 | attr[k++] = object->fd; | ||
| 828 | attr[k++] = EGL_DMA_BUF_PLANE2_OFFSET_EXT; | ||
| 829 | attr[k++] = plane->offset; | ||
| 830 | attr[k++] = EGL_DMA_BUF_PLANE2_PITCH_EXT; | ||
| 831 | attr[k++] = plane->pitch; | ||
| 832 | if (has_EGL_EXT_image_dma_buf_import_modifiers && object->format_modifier != DRM_FORMAT_MOD_INVALID) { | ||
| 833 | attr[k++] = EGL_DMA_BUF_PLANE2_MODIFIER_LO_EXT; | ||
| 834 | attr[k++] = (object->format_modifier & 0xFFFFFFFF); | ||
| 835 | attr[k++] = EGL_DMA_BUF_PLANE2_MODIFIER_HI_EXT; | ||
| 836 | attr[k++] = (object->format_modifier >> 32); | ||
| 837 | } | ||
| 838 | break; | ||
| 839 | case 3: | ||
| 840 | attr[k++] = EGL_DMA_BUF_PLANE3_FD_EXT; | ||
| 841 | attr[k++] = object->fd; | ||
| 842 | attr[k++] = EGL_DMA_BUF_PLANE3_OFFSET_EXT; | ||
| 843 | attr[k++] = plane->offset; | ||
| 844 | attr[k++] = EGL_DMA_BUF_PLANE3_PITCH_EXT; | ||
| 845 | attr[k++] = plane->pitch; | ||
| 846 | if (has_EGL_EXT_image_dma_buf_import_modifiers && object->format_modifier != DRM_FORMAT_MOD_INVALID) { | ||
| 847 | attr[k++] = EGL_DMA_BUF_PLANE3_MODIFIER_LO_EXT; | ||
| 848 | attr[k++] = (object->format_modifier & 0xFFFFFFFF); | ||
| 849 | attr[k++] = EGL_DMA_BUF_PLANE3_MODIFIER_HI_EXT; | ||
| 850 | attr[k++] = (object->format_modifier >> 32); | ||
| 851 | } | ||
| 852 | break; | ||
| 853 | |||
| 854 | default: | ||
| 855 | break; | ||
| 856 | } | ||
| 857 | ++image_index; | ||
| 858 | } | ||
| 859 | } | ||
| 860 | |||
| 861 | switch (SDL_COLORSPACEPRIMARIES(colorspace)) { | ||
| 862 | case SDL_COLOR_PRIMARIES_BT601: | ||
| 863 | case SDL_COLOR_PRIMARIES_SMPTE240: | ||
| 864 | attr[k++] = EGL_YUV_COLOR_SPACE_HINT_EXT; | ||
| 865 | attr[k++] = EGL_ITU_REC601_EXT; | ||
| 866 | break; | ||
| 867 | case SDL_COLOR_PRIMARIES_BT709: | ||
| 868 | attr[k++] = EGL_YUV_COLOR_SPACE_HINT_EXT; | ||
| 869 | attr[k++] = EGL_ITU_REC709_EXT; | ||
| 870 | break; | ||
| 871 | case SDL_COLOR_PRIMARIES_BT2020: | ||
| 872 | attr[k++] = EGL_YUV_COLOR_SPACE_HINT_EXT; | ||
| 873 | attr[k++] = EGL_ITU_REC2020_EXT; | ||
| 874 | break; | ||
| 875 | default: | ||
| 876 | break; | ||
| 877 | } | ||
| 878 | |||
| 879 | switch (SDL_COLORSPACERANGE(colorspace)) { | ||
| 880 | case SDL_COLOR_RANGE_FULL: | ||
| 881 | attr[k++] = EGL_SAMPLE_RANGE_HINT_EXT; | ||
| 882 | attr[k++] = EGL_YUV_FULL_RANGE_EXT; | ||
| 883 | break; | ||
| 884 | case SDL_COLOR_RANGE_LIMITED: | ||
| 885 | default: | ||
| 886 | attr[k++] = EGL_SAMPLE_RANGE_HINT_EXT; | ||
| 887 | attr[k++] = EGL_YUV_NARROW_RANGE_EXT; | ||
| 888 | break; | ||
| 889 | } | ||
| 890 | |||
| 891 | switch (SDL_COLORSPACECHROMA(colorspace)) { | ||
| 892 | case SDL_CHROMA_LOCATION_LEFT: | ||
| 893 | attr[k++] = EGL_YUV_CHROMA_HORIZONTAL_SITING_HINT_EXT; | ||
| 894 | attr[k++] = EGL_YUV_CHROMA_SITING_0_EXT; | ||
| 895 | attr[k++] = EGL_YUV_CHROMA_VERTICAL_SITING_HINT_EXT; | ||
| 896 | attr[k++] = EGL_YUV_CHROMA_SITING_0_5_EXT; | ||
| 897 | break; | ||
| 898 | case SDL_CHROMA_LOCATION_CENTER: | ||
| 899 | attr[k++] = EGL_YUV_CHROMA_HORIZONTAL_SITING_HINT_EXT; | ||
| 900 | attr[k++] = EGL_YUV_CHROMA_SITING_0_5_EXT; | ||
| 901 | attr[k++] = EGL_YUV_CHROMA_VERTICAL_SITING_HINT_EXT; | ||
| 902 | attr[k++] = EGL_YUV_CHROMA_SITING_0_5_EXT; | ||
| 903 | break; | ||
| 904 | case SDL_CHROMA_LOCATION_TOPLEFT: | ||
| 905 | attr[k++] = EGL_YUV_CHROMA_HORIZONTAL_SITING_HINT_EXT; | ||
| 906 | attr[k++] = EGL_YUV_CHROMA_SITING_0_EXT; | ||
| 907 | attr[k++] = EGL_YUV_CHROMA_VERTICAL_SITING_HINT_EXT; | ||
| 908 | attr[k++] = EGL_YUV_CHROMA_SITING_0_EXT; | ||
| 909 | break; | ||
| 910 | default: | ||
| 911 | break; | ||
| 912 | } | ||
| 913 | |||
| 914 | SDL_assert(k < SDL_arraysize(attr)); | ||
| 915 | attr[k++] = EGL_NONE; | ||
| 916 | |||
| 917 | EGLImage image = eglCreateImage(display, EGL_NO_CONTEXT, EGL_LINUX_DMA_BUF_EXT, NULL, attr); | ||
| 918 | if (image == EGL_NO_IMAGE) { | ||
| 919 | SDL_Log("Couldn't create image: %d", glGetError()); | ||
| 920 | return false; | ||
| 921 | } | ||
| 922 | |||
| 923 | glActiveTextureARBFunc(GL_TEXTURE0_ARB); | ||
| 924 | glBindTexture(GL_TEXTURE_EXTERNAL_OES, textureID); | ||
| 925 | glEGLImageTargetTexture2DOESFunc(GL_TEXTURE_EXTERNAL_OES, image); | ||
| 926 | return true; | ||
| 927 | } | ||
| 928 | #endif // HAVE_EGL | ||
| 929 | |||
| 930 | static bool GetTextureForDRMFrame(AVFrame *frame, SDL_Texture **texture) | ||
| 931 | { | ||
| 932 | #ifdef HAVE_EGL | ||
| 933 | const AVDRMFrameDescriptor *desc = (const AVDRMFrameDescriptor *)frame->data[0]; | ||
| 934 | |||
| 935 | if (desc->nb_layers == 2 && | ||
| 936 | desc->layers[0].format == DRM_FORMAT_R8 && | ||
| 937 | desc->layers[1].format == DRM_FORMAT_GR88) { | ||
| 938 | return GetNV12TextureForDRMFrame(frame, texture); | ||
| 939 | } else { | ||
| 940 | return GetOESTextureForDRMFrame(frame, texture); | ||
| 941 | } | ||
| 942 | #else | ||
| 943 | return false; | ||
| 944 | #endif | ||
| 945 | } | ||
| 946 | |||
| 947 | static bool GetTextureForVAAPIFrame(AVFrame *frame, SDL_Texture **texture) | ||
| 948 | { | ||
| 949 | AVFrame *drm_frame; | ||
| 950 | bool result = false; | ||
| 951 | |||
| 952 | drm_frame = av_frame_alloc(); | ||
| 953 | if (drm_frame) { | ||
| 954 | drm_frame->format = AV_PIX_FMT_DRM_PRIME; | ||
| 955 | if (av_hwframe_map(drm_frame, frame, 0) == 0) { | ||
| 956 | result = GetTextureForDRMFrame(drm_frame, texture); | ||
| 957 | } else { | ||
| 958 | SDL_SetError("Couldn't map hardware frame"); | ||
| 959 | } | ||
| 960 | av_frame_free(&drm_frame); | ||
| 961 | } | ||
| 962 | return result; | ||
| 963 | } | ||
| 964 | |||
| 965 | static bool GetTextureForD3D11Frame(AVFrame *frame, SDL_Texture **texture) | ||
| 966 | { | ||
| 967 | #ifdef SDL_PLATFORM_WIN32 | ||
| 968 | AVHWFramesContext *frames = (AVHWFramesContext *)(frame->hw_frames_ctx->data); | ||
| 969 | int texture_width = 0, texture_height = 0; | ||
| 970 | ID3D11Texture2D *pTexture = (ID3D11Texture2D *)frame->data[0]; | ||
| 971 | UINT iSliceIndex = (UINT)(uintptr_t)frame->data[1]; | ||
| 972 | |||
| 973 | if (*texture) { | ||
| 974 | SDL_PropertiesID props = SDL_GetTextureProperties(*texture); | ||
| 975 | texture_width = (int)SDL_GetNumberProperty(props, SDL_PROP_TEXTURE_WIDTH_NUMBER, 0); | ||
| 976 | texture_height = (int)SDL_GetNumberProperty(props, SDL_PROP_TEXTURE_HEIGHT_NUMBER, 0); | ||
| 977 | } | ||
| 978 | if (!*texture || texture_width != frames->width || texture_height != frames->height) { | ||
| 979 | if (*texture) { | ||
| 980 | SDL_DestroyTexture(*texture); | ||
| 981 | } | ||
| 982 | |||
| 983 | SDL_PropertiesID props = CreateVideoTextureProperties(frame, SDL_PIXELFORMAT_UNKNOWN, SDL_TEXTUREACCESS_STATIC); | ||
| 984 | *texture = SDL_CreateTextureWithProperties(renderer, props); | ||
| 985 | SDL_DestroyProperties(props); | ||
| 986 | if (!*texture) { | ||
| 987 | return false; | ||
| 988 | } | ||
| 989 | } | ||
| 990 | |||
| 991 | ID3D11Resource *dx11_resource = SDL_GetPointerProperty(SDL_GetTextureProperties(*texture), SDL_PROP_TEXTURE_D3D11_TEXTURE_POINTER, NULL); | ||
| 992 | if (!dx11_resource) { | ||
| 993 | SDL_SetError("Couldn't get texture ID3D11Resource interface"); | ||
| 994 | return false; | ||
| 995 | } | ||
| 996 | ID3D11DeviceContext_CopySubresourceRegion(d3d11_context, dx11_resource, 0, 0, 0, 0, (ID3D11Resource *)pTexture, iSliceIndex, NULL); | ||
| 997 | |||
| 998 | return true; | ||
| 999 | #else | ||
| 1000 | return false; | ||
| 1001 | #endif | ||
| 1002 | } | ||
| 1003 | |||
| 1004 | static bool GetTextureForVideoToolboxFrame(AVFrame *frame, SDL_Texture **texture) | ||
| 1005 | { | ||
| 1006 | #ifdef SDL_PLATFORM_APPLE | ||
| 1007 | CVPixelBufferRef pPixelBuffer = (CVPixelBufferRef)frame->data[3]; | ||
| 1008 | SDL_PropertiesID props; | ||
| 1009 | |||
| 1010 | if (*texture) { | ||
| 1011 | /* Free the previous texture now that we're about to render a new one */ | ||
| 1012 | /* FIXME: We can actually keep a cache of textures that map to pixel buffers */ | ||
| 1013 | SDL_DestroyTexture(*texture); | ||
| 1014 | } | ||
| 1015 | |||
| 1016 | props = CreateVideoTextureProperties(frame, SDL_PIXELFORMAT_UNKNOWN, SDL_TEXTUREACCESS_STATIC); | ||
| 1017 | SDL_SetPointerProperty(props, SDL_PROP_TEXTURE_CREATE_METAL_PIXELBUFFER_POINTER, pPixelBuffer); | ||
| 1018 | *texture = SDL_CreateTextureWithProperties(renderer, props); | ||
| 1019 | SDL_DestroyProperties(props); | ||
| 1020 | if (!*texture) { | ||
| 1021 | return false; | ||
| 1022 | } | ||
| 1023 | |||
| 1024 | return true; | ||
| 1025 | #else | ||
| 1026 | return false; | ||
| 1027 | #endif | ||
| 1028 | } | ||
| 1029 | |||
| 1030 | static bool GetTextureForVulkanFrame(AVFrame *frame, SDL_Texture **texture) | ||
| 1031 | { | ||
| 1032 | SDL_PropertiesID props; | ||
| 1033 | |||
| 1034 | if (*texture) { | ||
| 1035 | SDL_DestroyTexture(*texture); | ||
| 1036 | } | ||
| 1037 | |||
| 1038 | props = CreateVideoTextureProperties(frame, SDL_PIXELFORMAT_UNKNOWN, SDL_TEXTUREACCESS_STATIC); | ||
| 1039 | *texture = CreateVulkanVideoTexture(vulkan_context, frame, renderer, props); | ||
| 1040 | SDL_DestroyProperties(props); | ||
| 1041 | if (!*texture) { | ||
| 1042 | return false; | ||
| 1043 | } | ||
| 1044 | return true; | ||
| 1045 | } | ||
| 1046 | |||
| 1047 | static bool GetTextureForFrame(AVFrame *frame, SDL_Texture **texture) | ||
| 1048 | { | ||
| 1049 | switch (frame->format) { | ||
| 1050 | case AV_PIX_FMT_VAAPI: | ||
| 1051 | return GetTextureForVAAPIFrame(frame, texture); | ||
| 1052 | case AV_PIX_FMT_DRM_PRIME: | ||
| 1053 | return GetTextureForDRMFrame(frame, texture); | ||
| 1054 | case AV_PIX_FMT_D3D11: | ||
| 1055 | return GetTextureForD3D11Frame(frame, texture); | ||
| 1056 | case AV_PIX_FMT_VIDEOTOOLBOX: | ||
| 1057 | return GetTextureForVideoToolboxFrame(frame, texture); | ||
| 1058 | case AV_PIX_FMT_VULKAN: | ||
| 1059 | return GetTextureForVulkanFrame(frame, texture); | ||
| 1060 | default: | ||
| 1061 | return GetTextureForMemoryFrame(frame, texture); | ||
| 1062 | } | ||
| 1063 | } | ||
| 1064 | |||
| 1065 | static int BeginFrameRendering(AVFrame *frame) | ||
| 1066 | { | ||
| 1067 | if (frame->format == AV_PIX_FMT_VULKAN) { | ||
| 1068 | return BeginVulkanFrameRendering(vulkan_context, frame, renderer); | ||
| 1069 | } | ||
| 1070 | return 0; | ||
| 1071 | } | ||
| 1072 | |||
| 1073 | static int FinishFrameRendering(AVFrame *frame) | ||
| 1074 | { | ||
| 1075 | if (frame->format == AV_PIX_FMT_VULKAN) { | ||
| 1076 | return FinishVulkanFrameRendering(vulkan_context, frame, renderer); | ||
| 1077 | } | ||
| 1078 | return 0; | ||
| 1079 | } | ||
| 1080 | |||
| 1081 | static void DisplayVideoTexture(AVFrame *frame) | ||
| 1082 | { | ||
| 1083 | /* Update the video texture */ | ||
| 1084 | if (!GetTextureForFrame(frame, &video_texture)) { | ||
| 1085 | SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "Couldn't get texture for frame: %s", SDL_GetError()); | ||
| 1086 | return; | ||
| 1087 | } | ||
| 1088 | |||
| 1089 | SDL_FRect src; | ||
| 1090 | src.x = 0.0f; | ||
| 1091 | src.y = 0.0f; | ||
| 1092 | src.w = (float)frame->width; | ||
| 1093 | src.h = (float)frame->height; | ||
| 1094 | if (frame->linesize[0] < 0) { | ||
| 1095 | SDL_RenderTextureRotated(renderer, video_texture, &src, NULL, 0.0, NULL, SDL_FLIP_VERTICAL); | ||
| 1096 | } else { | ||
| 1097 | SDL_RenderTexture(renderer, video_texture, &src, NULL); | ||
| 1098 | } | ||
| 1099 | } | ||
| 1100 | |||
| 1101 | static void DisplayVideoFrame(AVFrame *frame) | ||
| 1102 | { | ||
| 1103 | DisplayVideoTexture(frame); | ||
| 1104 | } | ||
| 1105 | |||
| 1106 | static void HandleVideoFrame(AVFrame *frame, double pts) | ||
| 1107 | { | ||
| 1108 | /* Quick and dirty PTS handling */ | ||
| 1109 | if (!video_start) { | ||
| 1110 | video_start = SDL_GetTicks(); | ||
| 1111 | } | ||
| 1112 | double now = (double)(SDL_GetTicks() - video_start) / 1000.0; | ||
| 1113 | if (now < pts) { | ||
| 1114 | SDL_DelayPrecise((Uint64)((pts - now) * SDL_NS_PER_SECOND)); | ||
| 1115 | } | ||
| 1116 | |||
| 1117 | if (BeginFrameRendering(frame) < 0) { | ||
| 1118 | return; | ||
| 1119 | } | ||
| 1120 | |||
| 1121 | SDL_SetRenderDrawColor(renderer, 0, 0, 0, 255); | ||
| 1122 | SDL_RenderClear(renderer); | ||
| 1123 | |||
| 1124 | DisplayVideoFrame(frame); | ||
| 1125 | |||
| 1126 | /* Render any bouncing balls */ | ||
| 1127 | MoveSprite(); | ||
| 1128 | |||
| 1129 | SDL_RenderPresent(renderer); | ||
| 1130 | |||
| 1131 | FinishFrameRendering(frame); | ||
| 1132 | } | ||
| 1133 | |||
| 1134 | static AVCodecContext *OpenAudioStream(AVFormatContext *ic, int stream, const AVCodec *codec) | ||
| 1135 | { | ||
| 1136 | AVStream *st = ic->streams[stream]; | ||
| 1137 | AVCodecParameters *codecpar = st->codecpar; | ||
| 1138 | AVCodecContext *context; | ||
| 1139 | int result; | ||
| 1140 | |||
| 1141 | SDL_Log("Audio stream: %s %d channels, %d Hz", avcodec_get_name(codec->id), codecpar->ch_layout.nb_channels, codecpar->sample_rate); | ||
| 1142 | |||
| 1143 | context = avcodec_alloc_context3(NULL); | ||
| 1144 | if (!context) { | ||
| 1145 | SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "avcodec_alloc_context3 failed"); | ||
| 1146 | return NULL; | ||
| 1147 | } | ||
| 1148 | |||
| 1149 | result = avcodec_parameters_to_context(context, ic->streams[stream]->codecpar); | ||
| 1150 | if (result < 0) { | ||
| 1151 | SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "avcodec_parameters_to_context failed: %s", av_err2str(result)); | ||
| 1152 | avcodec_free_context(&context); | ||
| 1153 | return NULL; | ||
| 1154 | } | ||
| 1155 | context->pkt_timebase = ic->streams[stream]->time_base; | ||
| 1156 | |||
| 1157 | result = avcodec_open2(context, codec, NULL); | ||
| 1158 | if (result < 0) { | ||
| 1159 | SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "Couldn't open codec %s: %s", avcodec_get_name(context->codec_id), av_err2str(result)); | ||
| 1160 | avcodec_free_context(&context); | ||
| 1161 | return NULL; | ||
| 1162 | } | ||
| 1163 | |||
| 1164 | SDL_AudioSpec spec = { SDL_AUDIO_F32, codecpar->ch_layout.nb_channels, codecpar->sample_rate }; | ||
| 1165 | audio = SDL_OpenAudioDeviceStream(SDL_AUDIO_DEVICE_DEFAULT_PLAYBACK, &spec, NULL, NULL); | ||
| 1166 | if (audio) { | ||
| 1167 | SDL_ResumeAudioStreamDevice(audio); | ||
| 1168 | } else { | ||
| 1169 | SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "Couldn't open audio: %s", SDL_GetError()); | ||
| 1170 | } | ||
| 1171 | return context; | ||
| 1172 | } | ||
| 1173 | |||
| 1174 | static SDL_AudioFormat GetAudioFormat(int format) | ||
| 1175 | { | ||
| 1176 | switch (format) { | ||
| 1177 | case AV_SAMPLE_FMT_U8: | ||
| 1178 | case AV_SAMPLE_FMT_U8P: | ||
| 1179 | return SDL_AUDIO_U8; | ||
| 1180 | case AV_SAMPLE_FMT_S16: | ||
| 1181 | case AV_SAMPLE_FMT_S16P: | ||
| 1182 | return SDL_AUDIO_S16; | ||
| 1183 | case AV_SAMPLE_FMT_S32: | ||
| 1184 | case AV_SAMPLE_FMT_S32P: | ||
| 1185 | return SDL_AUDIO_S32; | ||
| 1186 | case AV_SAMPLE_FMT_FLT: | ||
| 1187 | case AV_SAMPLE_FMT_FLTP: | ||
| 1188 | return SDL_AUDIO_F32; | ||
| 1189 | default: | ||
| 1190 | /* Unsupported */ | ||
| 1191 | return SDL_AUDIO_UNKNOWN; | ||
| 1192 | } | ||
| 1193 | } | ||
| 1194 | |||
| 1195 | static bool IsPlanarAudioFormat(int format) | ||
| 1196 | { | ||
| 1197 | switch (format) { | ||
| 1198 | case AV_SAMPLE_FMT_U8P: | ||
| 1199 | case AV_SAMPLE_FMT_S16P: | ||
| 1200 | case AV_SAMPLE_FMT_S32P: | ||
| 1201 | case AV_SAMPLE_FMT_FLTP: | ||
| 1202 | case AV_SAMPLE_FMT_DBLP: | ||
| 1203 | case AV_SAMPLE_FMT_S64P: | ||
| 1204 | return true; | ||
| 1205 | default: | ||
| 1206 | return false; | ||
| 1207 | } | ||
| 1208 | } | ||
| 1209 | |||
| 1210 | static void InterleaveAudio(AVFrame *frame, const SDL_AudioSpec *spec) | ||
| 1211 | { | ||
| 1212 | int c, n; | ||
| 1213 | int samplesize = SDL_AUDIO_BYTESIZE(spec->format); | ||
| 1214 | int framesize = SDL_AUDIO_FRAMESIZE(*spec); | ||
| 1215 | Uint8 *data = (Uint8 *)SDL_malloc(frame->nb_samples * framesize); | ||
| 1216 | if (!data) { | ||
| 1217 | return; | ||
| 1218 | } | ||
| 1219 | |||
| 1220 | /* This could be optimized with SIMD and not allocating memory each time */ | ||
| 1221 | for (c = 0; c < spec->channels; ++c) { | ||
| 1222 | const Uint8 *src = frame->data[c]; | ||
| 1223 | Uint8 *dst = data + c * samplesize; | ||
| 1224 | for (n = frame->nb_samples; n--;) { | ||
| 1225 | SDL_memcpy(dst, src, samplesize); | ||
| 1226 | src += samplesize; | ||
| 1227 | dst += framesize; | ||
| 1228 | } | ||
| 1229 | } | ||
| 1230 | SDL_PutAudioStreamData(audio, data, frame->nb_samples * framesize); | ||
| 1231 | SDL_free(data); | ||
| 1232 | } | ||
| 1233 | |||
| 1234 | static void HandleAudioFrame(AVFrame *frame) | ||
| 1235 | { | ||
| 1236 | if (audio) { | ||
| 1237 | SDL_AudioSpec spec = { GetAudioFormat(frame->format), frame->ch_layout.nb_channels, frame->sample_rate }; | ||
| 1238 | SDL_SetAudioStreamFormat(audio, &spec, NULL); | ||
| 1239 | |||
| 1240 | if (frame->ch_layout.nb_channels > 1 && IsPlanarAudioFormat(frame->format)) { | ||
| 1241 | InterleaveAudio(frame, &spec); | ||
| 1242 | } else { | ||
| 1243 | SDL_PutAudioStreamData(audio, frame->data[0], frame->nb_samples * SDL_AUDIO_FRAMESIZE(spec)); | ||
| 1244 | } | ||
| 1245 | } | ||
| 1246 | } | ||
| 1247 | |||
| 1248 | static void av_log_callback(void *avcl, int level, const char *fmt, va_list vl) | ||
| 1249 | { | ||
| 1250 | const char *pszCategory = NULL; | ||
| 1251 | char *message; | ||
| 1252 | |||
| 1253 | switch (level) { | ||
| 1254 | case AV_LOG_PANIC: | ||
| 1255 | case AV_LOG_FATAL: | ||
| 1256 | pszCategory = "fatal error"; | ||
| 1257 | break; | ||
| 1258 | case AV_LOG_ERROR: | ||
| 1259 | pszCategory = "error"; | ||
| 1260 | break; | ||
| 1261 | case AV_LOG_WARNING: | ||
| 1262 | pszCategory = "warning"; | ||
| 1263 | break; | ||
| 1264 | case AV_LOG_INFO: | ||
| 1265 | pszCategory = "info"; | ||
| 1266 | break; | ||
| 1267 | case AV_LOG_VERBOSE: | ||
| 1268 | pszCategory = "verbose"; | ||
| 1269 | break; | ||
| 1270 | case AV_LOG_DEBUG: | ||
| 1271 | if (verbose) { | ||
| 1272 | pszCategory = "debug"; | ||
| 1273 | } | ||
| 1274 | break; | ||
| 1275 | } | ||
| 1276 | |||
| 1277 | if (!pszCategory) { | ||
| 1278 | // We don't care about this message | ||
| 1279 | return; | ||
| 1280 | } | ||
| 1281 | |||
| 1282 | SDL_vasprintf(&message, fmt, vl); | ||
| 1283 | SDL_Log("ffmpeg %s: %s", pszCategory, message); | ||
| 1284 | SDL_free(message); | ||
| 1285 | } | ||
| 1286 | |||
| 1287 | static void print_usage(SDLTest_CommonState *state, const char *argv0) | ||
| 1288 | { | ||
| 1289 | static const char *options[] = { "[--verbose]", "[--sprites N]", "[--audio-codec codec]", "[--video-codec codec]", "[--software]", "video_file", NULL }; | ||
| 1290 | SDLTest_CommonLogUsage(state, argv0, options); | ||
| 1291 | } | ||
| 1292 | |||
| 1293 | int main(int argc, char *argv[]) | ||
| 1294 | { | ||
| 1295 | const char *file = NULL; | ||
| 1296 | AVFormatContext *ic = NULL; | ||
| 1297 | int audio_stream = -1; | ||
| 1298 | int video_stream = -1; | ||
| 1299 | const char *audio_codec_name = NULL; | ||
| 1300 | const char *video_codec_name = NULL; | ||
| 1301 | const AVCodec *audio_codec = NULL; | ||
| 1302 | const AVCodec *video_codec = NULL; | ||
| 1303 | AVCodecContext *audio_context = NULL; | ||
| 1304 | AVCodecContext *video_context = NULL; | ||
| 1305 | AVPacket *pkt = NULL; | ||
| 1306 | AVFrame *frame = NULL; | ||
| 1307 | double first_pts = -1.0; | ||
| 1308 | int i; | ||
| 1309 | int result; | ||
| 1310 | int return_code = -1; | ||
| 1311 | SDL_WindowFlags window_flags; | ||
| 1312 | bool flushing = false; | ||
| 1313 | bool decoded = false; | ||
| 1314 | bool done = false; | ||
| 1315 | SDLTest_CommonState *state; | ||
| 1316 | |||
| 1317 | /* Initialize test framework */ | ||
| 1318 | state = SDLTest_CommonCreateState(argv, 0); | ||
| 1319 | |||
| 1320 | /* Enable standard application logging */ | ||
| 1321 | SDL_SetLogPriority(SDL_LOG_CATEGORY_APPLICATION, SDL_LOG_PRIORITY_INFO); | ||
| 1322 | |||
| 1323 | /* Log ffmpeg messages */ | ||
| 1324 | av_log_set_callback(av_log_callback); | ||
| 1325 | |||
| 1326 | /* Parse commandline */ | ||
| 1327 | for (i = 1; i < argc;) { | ||
| 1328 | int consumed; | ||
| 1329 | |||
| 1330 | consumed = SDLTest_CommonArg(state, i); | ||
| 1331 | if (!consumed) { | ||
| 1332 | if (SDL_strcmp(argv[i], "--verbose") == 0) { | ||
| 1333 | verbose = true; | ||
| 1334 | consumed = 1; | ||
| 1335 | } else if (SDL_strcmp(argv[i], "--sprites") == 0 && argv[i + 1]) { | ||
| 1336 | num_sprites = SDL_atoi(argv[i + 1]); | ||
| 1337 | consumed = 2; | ||
| 1338 | } else if (SDL_strcmp(argv[i], "--audio-codec") == 0 && argv[i + 1]) { | ||
| 1339 | audio_codec_name = argv[i + 1]; | ||
| 1340 | consumed = 2; | ||
| 1341 | } else if (SDL_strcmp(argv[i], "--video-codec") == 0 && argv[i + 1]) { | ||
| 1342 | video_codec_name = argv[i + 1]; | ||
| 1343 | consumed = 2; | ||
| 1344 | } else if (SDL_strcmp(argv[i], "--software") == 0) { | ||
| 1345 | software_only = true; | ||
| 1346 | consumed = 1; | ||
| 1347 | } else if (!file) { | ||
| 1348 | /* We'll try to open this as a media file */ | ||
| 1349 | file = argv[i]; | ||
| 1350 | consumed = 1; | ||
| 1351 | } | ||
| 1352 | } | ||
| 1353 | if (consumed <= 0) { | ||
| 1354 | print_usage(state, argv[0]); | ||
| 1355 | return_code = 1; | ||
| 1356 | goto quit; | ||
| 1357 | } | ||
| 1358 | |||
| 1359 | i += consumed; | ||
| 1360 | } | ||
| 1361 | |||
| 1362 | if (!file) { | ||
| 1363 | print_usage(state, argv[0]); | ||
| 1364 | return_code = 1; | ||
| 1365 | goto quit; | ||
| 1366 | } | ||
| 1367 | |||
| 1368 | if (!SDL_Init(SDL_INIT_AUDIO | SDL_INIT_VIDEO)) { | ||
| 1369 | return_code = 2; | ||
| 1370 | goto quit; | ||
| 1371 | } | ||
| 1372 | |||
| 1373 | window_flags = SDL_WINDOW_HIDDEN | SDL_WINDOW_RESIZABLE | SDL_WINDOW_HIGH_PIXEL_DENSITY; | ||
| 1374 | #ifdef SDL_PLATFORM_APPLE | ||
| 1375 | window_flags |= SDL_WINDOW_METAL; | ||
| 1376 | #elif !defined(SDL_PLATFORM_WIN32) | ||
| 1377 | window_flags |= SDL_WINDOW_OPENGL; | ||
| 1378 | #endif | ||
| 1379 | if (SDL_GetHint(SDL_HINT_RENDER_DRIVER) != NULL) { | ||
| 1380 | CreateWindowAndRenderer(window_flags, SDL_GetHint(SDL_HINT_RENDER_DRIVER)); | ||
| 1381 | } | ||
| 1382 | #ifdef HAVE_EGL | ||
| 1383 | /* Try to create an EGL compatible window for DRM hardware frame support */ | ||
| 1384 | if (!window) { | ||
| 1385 | CreateWindowAndRenderer(window_flags, "opengles2"); | ||
| 1386 | } | ||
| 1387 | #endif | ||
| 1388 | #ifdef SDL_PLATFORM_APPLE | ||
| 1389 | if (!window) { | ||
| 1390 | CreateWindowAndRenderer(window_flags, "metal"); | ||
| 1391 | } | ||
| 1392 | #endif | ||
| 1393 | #ifdef SDL_PLATFORM_WIN32 | ||
| 1394 | if (!window) { | ||
| 1395 | CreateWindowAndRenderer(window_flags, "direct3d11"); | ||
| 1396 | } | ||
| 1397 | #endif | ||
| 1398 | if (!window) { | ||
| 1399 | if (!CreateWindowAndRenderer(window_flags, NULL)) { | ||
| 1400 | return_code = 2; | ||
| 1401 | goto quit; | ||
| 1402 | } | ||
| 1403 | } | ||
| 1404 | |||
| 1405 | if (!SDL_SetWindowTitle(window, file)) { | ||
| 1406 | SDL_Log("SDL_SetWindowTitle: %s", SDL_GetError()); | ||
| 1407 | } | ||
| 1408 | |||
| 1409 | /* Open the media file */ | ||
| 1410 | result = avformat_open_input(&ic, file, NULL, NULL); | ||
| 1411 | if (result < 0) { | ||
| 1412 | SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "Couldn't open %s: %d", argv[1], result); | ||
| 1413 | return_code = 4; | ||
| 1414 | goto quit; | ||
| 1415 | } | ||
| 1416 | video_stream = av_find_best_stream(ic, AVMEDIA_TYPE_VIDEO, -1, -1, &video_codec, 0); | ||
| 1417 | if (video_stream >= 0) { | ||
| 1418 | if (video_codec_name) { | ||
| 1419 | video_codec = avcodec_find_decoder_by_name(video_codec_name); | ||
| 1420 | if (!video_codec) { | ||
| 1421 | SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "Couldn't find codec '%s'", video_codec_name); | ||
| 1422 | return_code = 4; | ||
| 1423 | goto quit; | ||
| 1424 | } | ||
| 1425 | } | ||
| 1426 | video_context = OpenVideoStream(ic, video_stream, video_codec); | ||
| 1427 | if (!video_context) { | ||
| 1428 | return_code = 4; | ||
| 1429 | goto quit; | ||
| 1430 | } | ||
| 1431 | } | ||
| 1432 | audio_stream = av_find_best_stream(ic, AVMEDIA_TYPE_AUDIO, -1, video_stream, &audio_codec, 0); | ||
| 1433 | if (audio_stream >= 0) { | ||
| 1434 | if (audio_codec_name) { | ||
| 1435 | audio_codec = avcodec_find_decoder_by_name(audio_codec_name); | ||
| 1436 | if (!audio_codec) { | ||
| 1437 | SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "Couldn't find codec '%s'", audio_codec_name); | ||
| 1438 | return_code = 4; | ||
| 1439 | goto quit; | ||
| 1440 | } | ||
| 1441 | } | ||
| 1442 | audio_context = OpenAudioStream(ic, audio_stream, audio_codec); | ||
| 1443 | if (!audio_context) { | ||
| 1444 | return_code = 4; | ||
| 1445 | goto quit; | ||
| 1446 | } | ||
| 1447 | } | ||
| 1448 | pkt = av_packet_alloc(); | ||
| 1449 | if (!pkt) { | ||
| 1450 | SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "av_packet_alloc failed"); | ||
| 1451 | return_code = 4; | ||
| 1452 | goto quit; | ||
| 1453 | } | ||
| 1454 | frame = av_frame_alloc(); | ||
| 1455 | if (!frame) { | ||
| 1456 | SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "av_frame_alloc failed"); | ||
| 1457 | return_code = 4; | ||
| 1458 | goto quit; | ||
| 1459 | } | ||
| 1460 | |||
| 1461 | /* Create the sprite */ | ||
| 1462 | sprite = CreateTexture(renderer, icon_bmp, icon_bmp_len, &sprite_w, &sprite_h); | ||
| 1463 | |||
| 1464 | if (!sprite) { | ||
| 1465 | SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "Couldn't create texture (%s)", SDL_GetError()); | ||
| 1466 | return_code = 3; | ||
| 1467 | goto quit; | ||
| 1468 | } | ||
| 1469 | |||
| 1470 | /* Allocate memory for the sprite info */ | ||
| 1471 | positions = (SDL_FRect *)SDL_malloc(num_sprites * sizeof(*positions)); | ||
| 1472 | velocities = (SDL_FRect *)SDL_malloc(num_sprites * sizeof(*velocities)); | ||
| 1473 | if (!positions || !velocities) { | ||
| 1474 | SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "Out of memory!"); | ||
| 1475 | return_code = 3; | ||
| 1476 | goto quit; | ||
| 1477 | } | ||
| 1478 | |||
| 1479 | /* Position sprites and set their velocities */ | ||
| 1480 | SDL_Rect viewport; | ||
| 1481 | SDL_GetRenderViewport(renderer, &viewport); | ||
| 1482 | for (i = 0; i < num_sprites; ++i) { | ||
| 1483 | positions[i].x = (float)SDL_rand(viewport.w - sprite_w); | ||
| 1484 | positions[i].y = (float)SDL_rand(viewport.h - sprite_h); | ||
| 1485 | positions[i].w = (float)sprite_w; | ||
| 1486 | positions[i].h = (float)sprite_h; | ||
| 1487 | velocities[i].x = 0.0f; | ||
| 1488 | velocities[i].y = 0.0f; | ||
| 1489 | while (velocities[i].x == 0.f || velocities[i].y == 0.f) { | ||
| 1490 | velocities[i].x = (float)(SDL_rand(2 + 1) - 1); | ||
| 1491 | velocities[i].y = (float)(SDL_rand(2 + 1) - 1); | ||
| 1492 | } | ||
| 1493 | } | ||
| 1494 | |||
| 1495 | /* We're ready to go! */ | ||
| 1496 | SDL_ShowWindow(window); | ||
| 1497 | |||
| 1498 | /* Main render loop */ | ||
| 1499 | while (!done) { | ||
| 1500 | SDL_Event event; | ||
| 1501 | |||
| 1502 | /* Check for events */ | ||
| 1503 | while (SDL_PollEvent(&event)) { | ||
| 1504 | if (event.type == SDL_EVENT_QUIT || | ||
| 1505 | (event.type == SDL_EVENT_KEY_DOWN && event.key.key == SDLK_ESCAPE)) { | ||
| 1506 | done = true; | ||
| 1507 | } | ||
| 1508 | } | ||
| 1509 | |||
| 1510 | if (!flushing) { | ||
| 1511 | result = av_read_frame(ic, pkt); | ||
| 1512 | if (result < 0) { | ||
| 1513 | SDL_Log("End of stream, finishing decode"); | ||
| 1514 | if (audio_context) { | ||
| 1515 | avcodec_flush_buffers(audio_context); | ||
| 1516 | } | ||
| 1517 | if (video_context) { | ||
| 1518 | avcodec_flush_buffers(video_context); | ||
| 1519 | } | ||
| 1520 | flushing = true; | ||
| 1521 | } else { | ||
| 1522 | if (pkt->stream_index == audio_stream) { | ||
| 1523 | result = avcodec_send_packet(audio_context, pkt); | ||
| 1524 | if (result < 0) { | ||
| 1525 | SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "avcodec_send_packet(audio_context) failed: %s", av_err2str(result)); | ||
| 1526 | } | ||
| 1527 | } else if (pkt->stream_index == video_stream) { | ||
| 1528 | result = avcodec_send_packet(video_context, pkt); | ||
| 1529 | if (result < 0) { | ||
| 1530 | SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "avcodec_send_packet(video_context) failed: %s", av_err2str(result)); | ||
| 1531 | } | ||
| 1532 | } | ||
| 1533 | av_packet_unref(pkt); | ||
| 1534 | } | ||
| 1535 | } | ||
| 1536 | |||
| 1537 | decoded = false; | ||
| 1538 | if (audio_context) { | ||
| 1539 | while (avcodec_receive_frame(audio_context, frame) >= 0) { | ||
| 1540 | HandleAudioFrame(frame); | ||
| 1541 | decoded = true; | ||
| 1542 | } | ||
| 1543 | if (flushing) { | ||
| 1544 | /* Let SDL know we're done sending audio */ | ||
| 1545 | SDL_FlushAudioStream(audio); | ||
| 1546 | } | ||
| 1547 | } | ||
| 1548 | if (video_context) { | ||
| 1549 | while (avcodec_receive_frame(video_context, frame) >= 0) { | ||
| 1550 | double pts = ((double)frame->pts * video_context->pkt_timebase.num) / video_context->pkt_timebase.den; | ||
| 1551 | if (first_pts < 0.0) { | ||
| 1552 | first_pts = pts; | ||
| 1553 | } | ||
| 1554 | pts -= first_pts; | ||
| 1555 | |||
| 1556 | HandleVideoFrame(frame, pts); | ||
| 1557 | decoded = true; | ||
| 1558 | } | ||
| 1559 | } else { | ||
| 1560 | /* Update video rendering */ | ||
| 1561 | SDL_SetRenderDrawColor(renderer, 0xA0, 0xA0, 0xA0, 0xFF); | ||
| 1562 | SDL_RenderClear(renderer); | ||
| 1563 | MoveSprite(); | ||
| 1564 | SDL_RenderPresent(renderer); | ||
| 1565 | } | ||
| 1566 | |||
| 1567 | if (flushing && !decoded) { | ||
| 1568 | if (SDL_GetAudioStreamQueued(audio) > 0) { | ||
| 1569 | /* Wait a little bit for the audio to finish */ | ||
| 1570 | SDL_Delay(10); | ||
| 1571 | } else { | ||
| 1572 | done = true; | ||
| 1573 | } | ||
| 1574 | } | ||
| 1575 | } | ||
| 1576 | return_code = 0; | ||
| 1577 | quit: | ||
| 1578 | #ifdef SDL_PLATFORM_WIN32 | ||
| 1579 | if (d3d11_context) { | ||
| 1580 | ID3D11DeviceContext_Release(d3d11_context); | ||
| 1581 | d3d11_context = NULL; | ||
| 1582 | } | ||
| 1583 | if (d3d11_device) { | ||
| 1584 | ID3D11Device_Release(d3d11_device); | ||
| 1585 | d3d11_device = NULL; | ||
| 1586 | } | ||
| 1587 | #endif | ||
| 1588 | SDL_free(positions); | ||
| 1589 | SDL_free(velocities); | ||
| 1590 | av_frame_free(&frame); | ||
| 1591 | av_packet_free(&pkt); | ||
| 1592 | avcodec_free_context(&audio_context); | ||
| 1593 | avcodec_free_context(&video_context); | ||
| 1594 | avformat_close_input(&ic); | ||
| 1595 | SDL_DestroyRenderer(renderer); | ||
| 1596 | if (vulkan_context) { | ||
| 1597 | DestroyVulkanVideoContext(vulkan_context); | ||
| 1598 | } | ||
| 1599 | SDL_DestroyWindow(window); | ||
| 1600 | SDL_Quit(); | ||
| 1601 | SDLTest_CommonDestroyState(state); | ||
| 1602 | return return_code; | ||
| 1603 | } | ||
