Added colorspace to SDL_CameraSpec

This commit is contained in:
Sam Lantinga 2024-06-12 11:18:26 -07:00
parent 98499d6818
commit aeea819494
10 changed files with 407 additions and 167 deletions

View File

@ -79,6 +79,7 @@ typedef struct SDL_Camera SDL_Camera;
typedef struct SDL_CameraSpec
{
SDL_PixelFormatEnum format; /**< Frame format */
SDL_Colorspace colorspace; /**< Frame colorspace */
int width; /**< Frame width */
int height; /**< Frame height */
int interval_numerator; /**< Frame rate numerator ((dom / num) == fps, (num / dom) == duration) */

View File

@ -482,7 +482,7 @@ typedef enum SDL_ColorPrimaries
SDL_COLOR_PRIMARIES_UNSPECIFIED = 2,
SDL_COLOR_PRIMARIES_BT470M = 4, /**< ITU-R BT.470-6 System M */
SDL_COLOR_PRIMARIES_BT470BG = 5, /**< ITU-R BT.470-6 System B, G / ITU-R BT.601-7 625 */
SDL_COLOR_PRIMARIES_BT601 = 6, /**< ITU-R BT.601-7 525 */
SDL_COLOR_PRIMARIES_BT601 = 6, /**< ITU-R BT.601-7 525, SMPTE 170M */
SDL_COLOR_PRIMARIES_SMPTE240 = 7, /**< SMPTE 240M, functionally the same as SDL_COLOR_PRIMARIES_BT601 */
SDL_COLOR_PRIMARIES_GENERIC_FILM = 8, /**< Generic film (color filters using Illuminant C) */
SDL_COLOR_PRIMARIES_BT2020 = 9, /**< ITU-R BT.2020-2 / ITU-R BT.2100-0 */
@ -535,7 +535,7 @@ typedef enum SDL_MatrixCoefficients
SDL_MATRIX_COEFFICIENTS_IDENTITY = 0,
SDL_MATRIX_COEFFICIENTS_BT709 = 1, /**< ITU-R BT.709-6 */
SDL_MATRIX_COEFFICIENTS_UNSPECIFIED = 2,
SDL_MATRIX_COEFFICIENTS_FCC = 4, /**< US FCC */
SDL_MATRIX_COEFFICIENTS_FCC = 4, /**< US FCC Title 47 */
SDL_MATRIX_COEFFICIENTS_BT470BG = 5, /**< ITU-R BT.470-6 System B, G / ITU-R BT.601-7 625, functionally the same as SDL_MATRIX_COEFFICIENTS_BT601 */
SDL_MATRIX_COEFFICIENTS_BT601 = 6, /**< ITU-R BT.601-7 525 */
SDL_MATRIX_COEFFICIENTS_SMPTE240 = 7, /**< SMPTE 240M */

View File

@ -84,7 +84,7 @@ char *SDL_GetCameraThreadName(SDL_CameraDevice *device, char *buf, size_t buflen
return buf;
}
int SDL_AddCameraFormat(CameraFormatAddData *data, SDL_PixelFormatEnum fmt, int w, int h, int interval_numerator, int interval_denominator)
int SDL_AddCameraFormat(CameraFormatAddData *data, SDL_PixelFormatEnum format, SDL_Colorspace colorspace, int w, int h, int interval_numerator, int interval_denominator)
{
SDL_assert(data != NULL);
if (data->allocated_specs <= data->num_specs) {
@ -98,7 +98,8 @@ int SDL_AddCameraFormat(CameraFormatAddData *data, SDL_PixelFormatEnum fmt, int
}
SDL_CameraSpec *spec = &data->specs[data->num_specs];
spec->format = fmt;
spec->format = format;
spec->colorspace = colorspace;
spec->width = w;
spec->height = h;
spec->interval_numerator = interval_numerator;
@ -129,7 +130,7 @@ static size_t GetFrameBufLen(const SDL_CameraSpec *spec)
const size_t w = (const size_t) spec->width;
const size_t h = (const size_t) spec->height;
const size_t wxh = w * h;
const Uint32 fmt = spec->format;
const SDL_PixelFormatEnum fmt = spec->format;
switch (fmt) {
// Some YUV formats have a larger Y plane than their U or V planes.
@ -366,8 +367,8 @@ static int SDLCALL CameraSpecCmp(const void *vpa, const void *vpb)
SDL_assert(b->width > 0);
SDL_assert(b->height > 0);
const Uint32 afmt = a->format;
const Uint32 bfmt = b->format;
const SDL_PixelFormatEnum afmt = a->format;
const SDL_PixelFormatEnum bfmt = b->format;
if (SDL_ISPIXELFORMAT_FOURCC(afmt) && !SDL_ISPIXELFORMAT_FOURCC(bfmt)) {
return -1;
} else if (!SDL_ISPIXELFORMAT_FOURCC(afmt) && SDL_ISPIXELFORMAT_FOURCC(bfmt)) {
@ -401,6 +402,15 @@ static int SDLCALL CameraSpecCmp(const void *vpa, const void *vpb)
return 1;
}
if (SDL_COLORSPACERANGE(a->colorspace) == SDL_COLOR_RANGE_FULL &&
SDL_COLORSPACERANGE(b->colorspace) != SDL_COLOR_RANGE_FULL) {
return -1;
}
if (SDL_COLORSPACERANGE(a->colorspace) != SDL_COLOR_RANGE_FULL &&
SDL_COLORSPACERANGE(b->colorspace) == SDL_COLOR_RANGE_FULL) {
return 1;
}
return 0; // apparently, they're equal.
}
@ -1115,6 +1125,7 @@ SDL_Camera *SDL_OpenCameraDevice(SDL_CameraDeviceID instance_id, const SDL_Camer
ReleaseCameraDevice(device);
return NULL;
}
SDL_SetSurfaceColorspace(device->acquire_surface, closest.colorspace);
// if we have to scale _and_ convert, we need a middleman surface, since we can't do both changes at once.
if (device->needs_scaling && device->needs_conversion) {
@ -1122,6 +1133,12 @@ SDL_Camera *SDL_OpenCameraDevice(SDL_CameraDeviceID instance_id, const SDL_Camer
const SDL_CameraSpec *s = downsampling_first ? &device->spec : &closest;
const SDL_PixelFormatEnum fmt = downsampling_first ? closest.format : device->spec.format;
device->conversion_surface = SDL_CreateSurface(s->width, s->height, fmt);
if (!device->conversion_surface) {
ClosePhysicalCameraDevice(device);
ReleaseCameraDevice(device);
return NULL;
}
SDL_SetSurfaceColorspace(device->conversion_surface, closest.colorspace);
}
// output surfaces are in the app-requested format. If no conversion is necessary, we'll just use the pointers
@ -1140,12 +1157,12 @@ SDL_Camera *SDL_OpenCameraDevice(SDL_CameraDeviceID instance_id, const SDL_Camer
} else {
surf = SDL_CreateSurfaceFrom(NULL, device->spec.width, device->spec.height, 0, device->spec.format);
}
if (!surf) {
ClosePhysicalCameraDevice(device);
ReleaseCameraDevice(device);
return NULL;
}
SDL_SetSurfaceColorspace(surf, closest.colorspace);
device->output_surfaces[i].surface = surf;
}

View File

@ -64,7 +64,7 @@ typedef struct CameraFormatAddData
int allocated_specs;
} CameraFormatAddData;
int SDL_AddCameraFormat(CameraFormatAddData *data, SDL_PixelFormatEnum fmt, int w, int h, int interval_numerator, int interval_denominator);
int SDL_AddCameraFormat(CameraFormatAddData *data, SDL_PixelFormatEnum format, SDL_Colorspace colorspace, int w, int h, int interval_numerator, int interval_denominator);
typedef struct SurfaceList
{

View File

@ -252,18 +252,16 @@ static void DestroyCameraManager(void)
}
}
static Uint32 format_android_to_sdl(Uint32 fmt)
static void format_android_to_sdl(Uint32 fmt, SDL_PixelFormatEnum *format, SDL_Colorspace *colorspace)
{
switch (fmt) {
#define CASE(x, y) case x: return y
CASE(AIMAGE_FORMAT_YUV_420_888, SDL_PIXELFORMAT_NV12);
CASE(AIMAGE_FORMAT_RGB_565, SDL_PIXELFORMAT_RGB565);
CASE(AIMAGE_FORMAT_RGB_888, SDL_PIXELFORMAT_XRGB8888);
CASE(AIMAGE_FORMAT_RGBA_8888, SDL_PIXELFORMAT_RGBA8888);
CASE(AIMAGE_FORMAT_RGBX_8888, SDL_PIXELFORMAT_RGBX8888);
//CASE(AIMAGE_FORMAT_RGBA_FP16, SDL_PIXELFORMAT_UNKNOWN); // 64bits
//CASE(AIMAGE_FORMAT_RAW_PRIVATE, SDL_PIXELFORMAT_UNKNOWN);
//CASE(AIMAGE_FORMAT_JPEG, SDL_PIXELFORMAT_UNKNOWN);
#define CASE(x, y, z) case x: *format = y; *colorspace = z; return
CASE(AIMAGE_FORMAT_YUV_420_888, SDL_PIXELFORMAT_NV12, SDL_COLORSPACE_BT709_LIMITED);
CASE(AIMAGE_FORMAT_RGB_565, SDL_PIXELFORMAT_RGB565, SDL_COLORSPACE_SRGB);
CASE(AIMAGE_FORMAT_RGB_888, SDL_PIXELFORMAT_XRGB8888, SDL_COLORSPACE_SRGB);
CASE(AIMAGE_FORMAT_RGBA_8888, SDL_PIXELFORMAT_RGBA8888, SDL_COLORSPACE_SRGB);
CASE(AIMAGE_FORMAT_RGBX_8888, SDL_PIXELFORMAT_RGBX8888, SDL_COLORSPACE_SRGB);
CASE(AIMAGE_FORMAT_RGBA_FP16, SDL_PIXELFORMAT_RGBA64_FLOAT, SDL_COLORSPACE_SRGB);
#undef CASE
default: break;
}
@ -272,10 +270,11 @@ static Uint32 format_android_to_sdl(Uint32 fmt)
//SDL_Log("Unknown format AIMAGE_FORMAT '%d'", fmt);
#endif
return SDL_PIXELFORMAT_UNKNOWN;
*format = SDL_PIXELFORMAT_UNKNOWN;
*colorspace = SDL_COLORSPACE_UNKNOWN;
}
static Uint32 format_sdl_to_android(Uint32 fmt)
static Uint32 format_sdl_to_android(SDL_PixelFormatEnum fmt)
{
switch (fmt) {
#define CASE(x, y) case y: return x
@ -633,14 +632,18 @@ static void GatherCameraSpecs(const char *devid, CameraFormatAddData *add_data,
const int w = (int) i32ptr[1];
const int h = (int) i32ptr[2];
const int32_t type = i32ptr[3];
Uint32 sdlfmt;
SDL_PixelFormatEnum sdlfmt = SDL_PIXELFORMAT_UNKNOWN;
SDL_Colorspace colorspace = SDL_COLORSPACE_UNKNOWN;
if (type == ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT) {
continue;
} else if ((w <= 0) || (h <= 0)) {
continue;
} else if ((sdlfmt = format_android_to_sdl(fmt)) == SDL_PIXELFORMAT_UNKNOWN) {
continue;
} else {
format_android_to_sdl(fmt, &sdlfmt, &colorspace);
if (sdlfmt == SDL_PIXELFORMAT_UNKNOWN) {
continue;
}
}
#if 0 // !!! FIXME: these all come out with 0 durations on my test phone. :(
@ -650,13 +653,13 @@ static void GatherCameraSpecs(const char *devid, CameraFormatAddData *add_data,
const int fpsw = (int) i64ptr[1];
const int fpsh = (int) i64ptr[2];
const long long duration = (long long) i64ptr[3];
SDL_Log("CAMERA: possible fps %s %dx%d duration=%lld", SDL_GetPixelFormatName(format_android_to_sdl(fpsfmt)), fpsw, fpsh, duration);
SDL_Log("CAMERA: possible fps %s %dx%d duration=%lld", SDL_GetPixelFormatName(sdlfmt), fpsw, fpsh, duration);
if ((duration > 0) && (fpsfmt == fmt) && (fpsw == w) && (fpsh == h)) {
SDL_AddCameraFormat(add_data, sdlfmt, w, h, duration, 1000000000);
SDL_AddCameraFormat(add_data, sdlfmt, colorspace, w, h, duration, 1000000000);
}
}
#else
SDL_AddCameraFormat(add_data, sdlfmt, w, h, 1, 30);
SDL_AddCameraFormat(add_data, sdlfmt, colorspace, w, h, 1, 30);
#endif
}

View File

@ -41,24 +41,24 @@
* <key>com.apple.security.device.camera</key> <true/>
*/
static SDL_PixelFormatEnum CoreMediaFormatToSDL(FourCharCode fmt)
static void CoreMediaFormatToSDL(FourCharCode fmt, SDL_PixelFormatEnum *pixel_format, SDL_Colorspace *colorspace)
{
switch (fmt) {
#define CASE(x, y) case x: return y
#define CASE(x, y, z) case x: *pixel_format = y; *colorspace = z; return
// the 16LE ones should use 16BE if we're on a Bigendian system like PowerPC,
// but at current time there is no bigendian Apple platform that has CoreMedia.
CASE(kCMPixelFormat_16LE555, SDL_PIXELFORMAT_XRGB1555);
CASE(kCMPixelFormat_16LE5551, SDL_PIXELFORMAT_RGBA5551);
CASE(kCMPixelFormat_16LE565, SDL_PIXELFORMAT_RGB565);
CASE(kCMPixelFormat_24RGB, SDL_PIXELFORMAT_RGB24);
CASE(kCMPixelFormat_32ARGB, SDL_PIXELFORMAT_ARGB32);
CASE(kCMPixelFormat_32BGRA, SDL_PIXELFORMAT_BGRA32);
CASE(kCMPixelFormat_422YpCbCr8, SDL_PIXELFORMAT_UYVY);
CASE(kCMPixelFormat_422YpCbCr8_yuvs, SDL_PIXELFORMAT_YUY2);
CASE(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, SDL_PIXELFORMAT_NV12);
CASE(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange, SDL_PIXELFORMAT_NV12);
CASE(kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange, SDL_PIXELFORMAT_P010);
CASE(kCVPixelFormatType_420YpCbCr10BiPlanarFullRange, SDL_PIXELFORMAT_P010);
CASE(kCMPixelFormat_16LE555, SDL_PIXELFORMAT_XRGB1555, SDL_COLORSPACE_SRGB);
CASE(kCMPixelFormat_16LE5551, SDL_PIXELFORMAT_RGBA5551, SDL_COLORSPACE_SRGB);
CASE(kCMPixelFormat_16LE565, SDL_PIXELFORMAT_RGB565, SDL_COLORSPACE_SRGB);
CASE(kCMPixelFormat_24RGB, SDL_PIXELFORMAT_RGB24, SDL_COLORSPACE_SRGB);
CASE(kCMPixelFormat_32ARGB, SDL_PIXELFORMAT_ARGB32, SDL_COLORSPACE_SRGB);
CASE(kCMPixelFormat_32BGRA, SDL_PIXELFORMAT_BGRA32, SDL_COLORSPACE_SRGB);
CASE(kCMPixelFormat_422YpCbCr8, SDL_PIXELFORMAT_UYVY, SDL_COLORSPACE_BT709_LIMITED);
CASE(kCMPixelFormat_422YpCbCr8_yuvs, SDL_PIXELFORMAT_YUY2, SDL_COLORSPACE_BT709_LIMITED);
CASE(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, SDL_PIXELFORMAT_NV12, SDL_COLORSPACE_BT709_LIMITED);
CASE(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange, SDL_PIXELFORMAT_NV12, SDL_COLORSPACE_BT709_FULL);
CASE(kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange, SDL_PIXELFORMAT_P010, SDL_COLORSPACE_BT2020_LIMITED);
CASE(kCVPixelFormatType_420YpCbCr10BiPlanarFullRange, SDL_PIXELFORMAT_P010, SDL_COLORSPACE_BT2020_FULL);
#undef CASE
default:
#if DEBUG_CAMERA
@ -66,7 +66,8 @@ static SDL_PixelFormatEnum CoreMediaFormatToSDL(FourCharCode fmt)
#endif
break;
}
return SDL_PIXELFORMAT_UNKNOWN;
*pixel_format = SDL_PIXELFORMAT_UNKNOWN;
*colorspace = SDL_COLORSPACE_UNKNOWN;
}
@class SDLCaptureVideoDataOutputSampleBufferDelegate;
@ -252,7 +253,6 @@ static int COREMEDIA_OpenDevice(SDL_CameraDevice *device, const SDL_CameraSpec *
AVCaptureDevice *avdevice = (__bridge AVCaptureDevice *) device->handle;
// Pick format that matches the spec
const SDL_PixelFormatEnum sdlfmt = spec->format;
const int w = spec->width;
const int h = spec->height;
const int rate = spec->interval_denominator;
@ -260,7 +260,10 @@ static int COREMEDIA_OpenDevice(SDL_CameraDevice *device, const SDL_CameraSpec *
NSArray<AVCaptureDeviceFormat *> *formats = [avdevice formats];
for (AVCaptureDeviceFormat *format in formats) {
CMFormatDescriptionRef formatDescription = [format formatDescription];
if (CoreMediaFormatToSDL(CMFormatDescriptionGetMediaSubType(formatDescription)) != sdlfmt) {
SDL_PixelFormatEnum device_format = SDL_PIXELFORMAT_UNKNOWN;
SDL_Colorspace device_colorspace = SDL_COLORSPACE_UNKNOWN;
CoreMediaFormatToSDL(CMFormatDescriptionGetMediaSubType(formatDescription), &device_format, &device_colorspace);
if (device_format != spec->format || device_colorspace != spec->colorspace) {
continue;
}
@ -296,6 +299,11 @@ static int COREMEDIA_OpenDevice(SDL_CameraDevice *device, const SDL_CameraSpec *
}
session.sessionPreset = AVCaptureSessionPresetHigh;
#if defined(SDL_PLATFORM_IOS)
if (@available(iOS 10.0, tvOS 17.0, *)) {
session.automaticallyConfiguresCaptureDeviceForWideColor = NO;
}
#endif
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:avdevice error:&error];
@ -373,8 +381,11 @@ static void GatherCameraSpecs(AVCaptureDevice *device, CameraFormatAddData *add_
continue;
}
const SDL_PixelFormatEnum sdlfmt = CoreMediaFormatToSDL(CMFormatDescriptionGetMediaSubType(fmt.formatDescription));
if (sdlfmt == SDL_PIXELFORMAT_UNKNOWN) {
NSLog(@"Available camera format: %@\n", fmt);
SDL_PixelFormatEnum device_format = SDL_PIXELFORMAT_UNKNOWN;
SDL_Colorspace device_colorspace = SDL_COLORSPACE_UNKNOWN;
CoreMediaFormatToSDL(CMFormatDescriptionGetMediaSubType(fmt.formatDescription), &device_format, &device_colorspace);
if (device_format == SDL_PIXELFORMAT_UNKNOWN) {
continue;
}
@ -386,11 +397,11 @@ static void GatherCameraSpecs(AVCaptureDevice *device, CameraFormatAddData *add_
rate = (int) SDL_ceil((double) framerate.minFrameRate);
if (rate) {
SDL_AddCameraFormat(add_data, sdlfmt, w, h, 1, rate);
SDL_AddCameraFormat(add_data, device_format, device_colorspace, w, h, 1, rate);
}
rate = (int) SDL_floor((double) framerate.maxFrameRate);
if (rate) {
SDL_AddCameraFormat(add_data, sdlfmt, w, h, 1, rate);
SDL_AddCameraFormat(add_data, device_format, device_colorspace, w, h, 1, rate);
}
}
}

View File

@ -44,6 +44,11 @@ static const IID SDL_IID_IMF2DBuffer2 = { 0x33ae5ea6, 0x4316, 0x436f, { 0x8d, 0x
static const GUID SDL_MF_MT_DEFAULT_STRIDE = { 0x644b4e48, 0x1e02, 0x4516, { 0xb0, 0xeb, 0xc0, 0x1c, 0xa9, 0xd4, 0x9a, 0xc6 } };
static const GUID SDL_MF_MT_MAJOR_TYPE = { 0x48eba18e, 0xf8c9, 0x4687, { 0xbf, 0x11, 0x0a, 0x74, 0xc9, 0xf9, 0x6a, 0x8f } };
static const GUID SDL_MF_MT_SUBTYPE = { 0xf7e34c9a, 0x42e8, 0x4714, { 0xb7, 0x4b, 0xcb, 0x29, 0xd7, 0x2c, 0x35, 0xe5 } };
static const GUID SDL_MF_MT_VIDEO_NOMINAL_RANGE = { 0xc21b8ee5, 0xb956, 0x4071, { 0x8d, 0xaf, 0x32, 0x5e, 0xdf, 0x5c, 0xab, 0x11 } };
static const GUID SDL_MF_MT_VIDEO_PRIMARIES = { 0xdbfbe4d7, 0x0740, 0x4ee0, { 0x81, 0x92, 0x85, 0x0a, 0xb0, 0xe2, 0x19, 0x35 } };
static const GUID SDL_MF_MT_TRANSFER_FUNCTION = { 0x5fb0fce9, 0xbe5c, 0x4935, { 0xa8, 0x11, 0xec, 0x83, 0x8f, 0x8e, 0xed, 0x93 } };
static const GUID SDL_MF_MT_YUV_MATRIX = { 0x3e23d450, 0x2c75, 0x4d25, { 0xa0, 0x0e, 0xb9, 0x16, 0x70, 0xd1, 0x23, 0x27 } };
static const GUID SDL_MF_MT_VIDEO_CHROMA_SITING = { 0x65df2370, 0xc773, 0x4c33, { 0xaa, 0x64, 0x84, 0x3e, 0x06, 0x8e, 0xfb, 0x0c } };
static const GUID SDL_MF_MT_FRAME_SIZE = { 0x1652c33d, 0xd6b2, 0x4012, { 0xb8, 0x34, 0x72, 0x03, 0x08, 0x49, 0xa3, 0x7d } };
static const GUID SDL_MF_MT_FRAME_RATE = { 0xc459a2e8, 0x3d2c, 0x4e44, { 0xb1, 0x32, 0xfe, 0xe5, 0x15, 0x6c, 0x7b, 0xb0 } };
static const GUID SDL_MFMediaType_Video = { 0x73646976, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71 } };
@ -80,39 +85,225 @@ SDL_DEFINE_MEDIATYPE_GUID(MFVideoFormat_NV21, FCC('NV21'));
static const struct
{
const GUID *guid;
const SDL_PixelFormatEnum sdlfmt;
SDL_PixelFormatEnum format;
SDL_Colorspace colorspace;
} fmtmappings[] = {
// This is not every possible format, just popular ones that SDL can reasonably handle.
// (and we should probably trim this list more.)
{ &SDL_MFVideoFormat_RGB555, SDL_PIXELFORMAT_XRGB1555 },
{ &SDL_MFVideoFormat_RGB565, SDL_PIXELFORMAT_RGB565 },
{ &SDL_MFVideoFormat_RGB24, SDL_PIXELFORMAT_RGB24 },
{ &SDL_MFVideoFormat_RGB32, SDL_PIXELFORMAT_XRGB8888 },
{ &SDL_MFVideoFormat_ARGB32, SDL_PIXELFORMAT_ARGB8888 },
{ &SDL_MFVideoFormat_A2R10G10B10, SDL_PIXELFORMAT_ARGB2101010 },
{ &SDL_MFVideoFormat_YV12, SDL_PIXELFORMAT_YV12 },
{ &SDL_MFVideoFormat_IYUV, SDL_PIXELFORMAT_IYUV },
{ &SDL_MFVideoFormat_YUY2, SDL_PIXELFORMAT_YUY2 },
{ &SDL_MFVideoFormat_UYVY, SDL_PIXELFORMAT_UYVY },
{ &SDL_MFVideoFormat_YVYU, SDL_PIXELFORMAT_YVYU },
{ &SDL_MFVideoFormat_NV12, SDL_PIXELFORMAT_NV12 },
{ &SDL_MFVideoFormat_NV21, SDL_PIXELFORMAT_NV21 }
{ &SDL_MFVideoFormat_RGB555, SDL_PIXELFORMAT_XRGB1555, SDL_COLORSPACE_SRGB },
{ &SDL_MFVideoFormat_RGB565, SDL_PIXELFORMAT_RGB565, SDL_COLORSPACE_SRGB },
{ &SDL_MFVideoFormat_RGB24, SDL_PIXELFORMAT_RGB24, SDL_COLORSPACE_SRGB },
{ &SDL_MFVideoFormat_RGB32, SDL_PIXELFORMAT_XRGB8888, SDL_COLORSPACE_SRGB },
{ &SDL_MFVideoFormat_ARGB32, SDL_PIXELFORMAT_ARGB8888, SDL_COLORSPACE_SRGB },
{ &SDL_MFVideoFormat_A2R10G10B10, SDL_PIXELFORMAT_ARGB2101010, SDL_COLORSPACE_SRGB },
{ &SDL_MFVideoFormat_YV12, SDL_PIXELFORMAT_YV12, SDL_COLORSPACE_BT709_LIMITED },
{ &SDL_MFVideoFormat_IYUV, SDL_PIXELFORMAT_IYUV, SDL_COLORSPACE_BT709_LIMITED },
{ &SDL_MFVideoFormat_YUY2, SDL_PIXELFORMAT_YUY2, SDL_COLORSPACE_BT709_LIMITED },
{ &SDL_MFVideoFormat_UYVY, SDL_PIXELFORMAT_UYVY, SDL_COLORSPACE_BT709_LIMITED },
{ &SDL_MFVideoFormat_YVYU, SDL_PIXELFORMAT_YVYU, SDL_COLORSPACE_BT709_LIMITED },
{ &SDL_MFVideoFormat_NV12, SDL_PIXELFORMAT_NV12, SDL_COLORSPACE_BT709_LIMITED },
{ &SDL_MFVideoFormat_NV21, SDL_PIXELFORMAT_NV21, SDL_COLORSPACE_BT709_LIMITED }
};
static SDL_PixelFormatEnum MFVidFmtGuidToSDLFmt(const GUID *guid)
static SDL_Colorspace GetMediaTypeColorspace(IMFMediaType *mediatype, SDL_Colorspace default_colorspace)
{
for (size_t i = 0; i < SDL_arraysize(fmtmappings); i++) {
if (WIN_IsEqualGUID(guid, fmtmappings[i].guid)) {
return fmtmappings[i].sdlfmt;
SDL_Colorspace colorspace = default_colorspace;
if (SDL_COLORSPACETYPE(colorspace) == SDL_COLOR_TYPE_YCBCR) {
HRESULT ret;
UINT32 range = 0, primaries = 0, transfer = 0, matrix = 0, chroma = 0;
ret = IMFMediaType_GetUINT32(mediatype, &SDL_MF_MT_VIDEO_NOMINAL_RANGE, &range);
if (SUCCEEDED(ret)) {
switch (range) {
case MFNominalRange_0_255:
range = SDL_COLOR_RANGE_FULL;
break;
case MFNominalRange_16_235:
range = SDL_COLOR_RANGE_LIMITED;
break;
default:
range = (UINT32)SDL_COLORSPACERANGE(default_colorspace);
break;
}
} else {
range = (UINT32)SDL_COLORSPACERANGE(default_colorspace);
}
ret = IMFMediaType_GetUINT32(mediatype, &SDL_MF_MT_VIDEO_PRIMARIES, &primaries);
if (SUCCEEDED(ret)) {
switch (primaries) {
case MFVideoPrimaries_BT709:
primaries = SDL_COLOR_PRIMARIES_BT709;
break;
case MFVideoPrimaries_BT470_2_SysM:
primaries = SDL_COLOR_PRIMARIES_BT470M;
break;
case MFVideoPrimaries_BT470_2_SysBG:
primaries = SDL_COLOR_PRIMARIES_BT470BG;
break;
case MFVideoPrimaries_SMPTE170M:
primaries = SDL_COLOR_PRIMARIES_BT601;
break;
case MFVideoPrimaries_SMPTE240M:
primaries = SDL_COLOR_PRIMARIES_SMPTE240;
break;
case MFVideoPrimaries_EBU3213:
primaries = SDL_COLOR_PRIMARIES_EBU3213;
break;
case MFVideoPrimaries_BT2020:
primaries = SDL_COLOR_PRIMARIES_BT2020;
break;
case MFVideoPrimaries_XYZ:
primaries = SDL_COLOR_PRIMARIES_XYZ;
break;
case MFVideoPrimaries_DCI_P3:
primaries = SDL_COLOR_PRIMARIES_SMPTE432;
break;
default:
primaries = (UINT32)SDL_COLORSPACEPRIMARIES(default_colorspace);
break;
}
} else {
primaries = (UINT32)SDL_COLORSPACEPRIMARIES(default_colorspace);
}
ret = IMFMediaType_GetUINT32(mediatype, &SDL_MF_MT_TRANSFER_FUNCTION, &transfer);
if (SUCCEEDED(ret)) {
switch (transfer) {
case MFVideoTransFunc_10:
transfer = SDL_TRANSFER_CHARACTERISTICS_LINEAR;
break;
case MFVideoTransFunc_22:
transfer = SDL_TRANSFER_CHARACTERISTICS_GAMMA22;
break;
case MFVideoTransFunc_709:
transfer = SDL_TRANSFER_CHARACTERISTICS_BT709;
break;
case MFVideoTransFunc_240M:
transfer = SDL_TRANSFER_CHARACTERISTICS_SMPTE240;
break;
case MFVideoTransFunc_sRGB:
transfer = SDL_TRANSFER_CHARACTERISTICS_SRGB;
break;
case MFVideoTransFunc_28:
transfer = SDL_TRANSFER_CHARACTERISTICS_GAMMA28;
break;
case MFVideoTransFunc_Log_100:
transfer = SDL_TRANSFER_CHARACTERISTICS_LOG100;
break;
case MFVideoTransFunc_2084:
transfer = SDL_TRANSFER_CHARACTERISTICS_PQ;
break;
case MFVideoTransFunc_HLG:
transfer = SDL_TRANSFER_CHARACTERISTICS_HLG;
break;
case 18 /* MFVideoTransFunc_BT1361_ECG */:
transfer = SDL_TRANSFER_CHARACTERISTICS_BT1361;
break;
case 19 /* MFVideoTransFunc_SMPTE428 */:
transfer = SDL_TRANSFER_CHARACTERISTICS_SMPTE428;
break;
default:
transfer = (UINT32)SDL_COLORSPACETRANSFER(default_colorspace);
break;
}
} else {
transfer = (UINT32)SDL_COLORSPACETRANSFER(default_colorspace);
}
ret = IMFMediaType_GetUINT32(mediatype, &SDL_MF_MT_YUV_MATRIX, &matrix);
if (SUCCEEDED(ret)) {
switch (matrix) {
case MFVideoTransferMatrix_BT709:
matrix = SDL_MATRIX_COEFFICIENTS_BT709;
break;
case MFVideoTransferMatrix_BT601:
matrix = SDL_MATRIX_COEFFICIENTS_BT601;
break;
case MFVideoTransferMatrix_SMPTE240M:
matrix = SDL_MATRIX_COEFFICIENTS_SMPTE240;
break;
case MFVideoTransferMatrix_BT2020_10:
matrix = SDL_MATRIX_COEFFICIENTS_BT2020_NCL;
break;
case 6 /* MFVideoTransferMatrix_Identity */:
matrix = SDL_MATRIX_COEFFICIENTS_IDENTITY;
break;
case 7 /* MFVideoTransferMatrix_FCC47 */:
matrix = SDL_MATRIX_COEFFICIENTS_FCC;
break;
case 8 /* MFVideoTransferMatrix_YCgCo */:
matrix = SDL_MATRIX_COEFFICIENTS_YCGCO;
break;
case 9 /* MFVideoTransferMatrix_SMPTE2085 */:
matrix = SDL_MATRIX_COEFFICIENTS_SMPTE2085;
break;
case 10 /* MFVideoTransferMatrix_Chroma */:
matrix = SDL_MATRIX_COEFFICIENTS_CHROMA_DERIVED_NCL;
break;
case 11 /* MFVideoTransferMatrix_Chroma_const */:
matrix = SDL_MATRIX_COEFFICIENTS_CHROMA_DERIVED_CL;
break;
case 12 /* MFVideoTransferMatrix_ICtCp */:
matrix = SDL_MATRIX_COEFFICIENTS_ICTCP;
break;
default:
matrix = (UINT32)SDL_COLORSPACEMATRIX(default_colorspace);
break;
}
} else {
matrix = (UINT32)SDL_COLORSPACEMATRIX(default_colorspace);
}
ret = IMFMediaType_GetUINT32(mediatype, &SDL_MF_MT_VIDEO_CHROMA_SITING, &chroma);
if (SUCCEEDED(ret)) {
switch (chroma) {
case MFVideoChromaSubsampling_MPEG2:
chroma = SDL_CHROMA_LOCATION_LEFT;
break;
case MFVideoChromaSubsampling_MPEG1:
chroma = SDL_CHROMA_LOCATION_CENTER;
break;
case MFVideoChromaSubsampling_DV_PAL:
chroma = SDL_CHROMA_LOCATION_TOPLEFT;
break;
default:
chroma = (UINT32)SDL_COLORSPACECHROMA(default_colorspace);
break;
}
} else {
chroma = (UINT32)SDL_COLORSPACECHROMA(default_colorspace);
}
colorspace = SDL_DEFINE_COLORSPACE(SDL_COLOR_TYPE_YCBCR, range, primaries, transfer, matrix, chroma);
}
return SDL_PIXELFORMAT_UNKNOWN;
return colorspace;
}
static const GUID *SDLFmtToMFVidFmtGuid(SDL_PixelFormatEnum sdlfmt)
static void MediaTypeToSDLFmt(IMFMediaType *mediatype, SDL_PixelFormatEnum *format, SDL_Colorspace *colorspace)
{
HRESULT ret;
GUID type;
ret = IMFMediaType_GetGUID(mediatype, &SDL_MF_MT_SUBTYPE, &type);
if (SUCCEEDED(ret)) {
for (size_t i = 0; i < SDL_arraysize(fmtmappings); i++) {
if (WIN_IsEqualGUID(&type, fmtmappings[i].guid)) {
*format = fmtmappings[i].format;
*colorspace = GetMediaTypeColorspace(mediatype, fmtmappings[i].colorspace);
return;
}
}
}
*format = SDL_PIXELFORMAT_UNKNOWN;
*colorspace = SDL_COLORSPACE_UNKNOWN;
}
static const GUID *SDLFmtToMFVidFmtGuid(SDL_PixelFormatEnum format)
{
for (size_t i = 0; i < SDL_arraysize(fmtmappings); i++) {
if (fmtmappings[i].sdlfmt == sdlfmt) {
if (fmtmappings[i].format == format) {
return fmtmappings[i].guid;
}
}
@ -739,23 +930,22 @@ static void GatherCameraSpecs(IMFMediaSource *source, CameraFormatAddData *add_d
GUID type;
ret = IMFMediaType_GetGUID(mediatype, &SDL_MF_MT_MAJOR_TYPE, &type);
if (SUCCEEDED(ret) && WIN_IsEqualGUID(&type, &SDL_MFMediaType_Video)) {
ret = IMFMediaType_GetGUID(mediatype, &SDL_MF_MT_SUBTYPE, &type);
if (SUCCEEDED(ret)) {
const SDL_PixelFormatEnum sdlfmt = MFVidFmtGuidToSDLFmt(&type);
if (sdlfmt != SDL_PIXELFORMAT_UNKNOWN) {
UINT64 val = 0;
UINT32 w = 0, h = 0;
ret = IMFMediaType_GetUINT64(mediatype, &SDL_MF_MT_FRAME_SIZE, &val);
w = (UINT32)(val >> 32);
h = (UINT32)val;
if (SUCCEEDED(ret) && w && h) {
UINT32 interval_numerator = 0, interval_denominator = 0;
ret = IMFMediaType_GetUINT64(mediatype, &SDL_MF_MT_FRAME_RATE, &val);
interval_numerator = (UINT32)(val >> 32);
interval_denominator = (UINT32)val;
if (SUCCEEDED(ret) && interval_numerator && interval_denominator) {
SDL_AddCameraFormat(add_data, sdlfmt, (int) w, (int) h, (int) interval_numerator, (int) interval_denominator); // whew.
}
SDL_PixelFormatEnum sdlfmt = SDL_PIXELFORMAT_UNKNOWN;
SDL_Colorspace colorspace = SDL_COLORSPACE_UNKNOWN;
MediaTypeToSDLFmt(mediatype, &sdlfmt, &colorspace);
if (sdlfmt != SDL_PIXELFORMAT_UNKNOWN) {
UINT64 val = 0;
UINT32 w = 0, h = 0;
ret = IMFMediaType_GetUINT64(mediatype, &SDL_MF_MT_FRAME_SIZE, &val);
w = (UINT32)(val >> 32);
h = (UINT32)val;
if (SUCCEEDED(ret) && w && h) {
UINT32 interval_numerator = 0, interval_denominator = 0;
ret = IMFMediaType_GetUINT64(mediatype, &SDL_MF_MT_FRAME_RATE, &val);
interval_numerator = (UINT32)(val >> 32);
interval_denominator = (UINT32)val;
if (SUCCEEDED(ret) && interval_numerator && interval_denominator) {
SDL_AddCameraFormat(add_data, sdlfmt, colorspace, (int) w, (int) h, (int)interval_numerator, (int)interval_denominator);
}
}
}

View File

@ -357,55 +357,60 @@ static void param_update(struct spa_list *param_list, struct spa_list *pending_l
}
static struct sdl_video_format {
Uint32 format;
uint32_t id;
SDL_PixelFormatEnum format;
SDL_Colorspace colorspace;
uint32_t id;
} sdl_video_formats[] = {
#if SDL_BYTEORDER == SDL_BIG_ENDIAN
{ SDL_PIXELFORMAT_RGBX8888, SPA_VIDEO_FORMAT_RGBx,},
{ SDL_PIXELFORMAT_BGRX8888, SPA_VIDEO_FORMAT_BGRx,},
{ SDL_PIXELFORMAT_RGBA8888, SPA_VIDEO_FORMAT_RGBA,},
{ SDL_PIXELFORMAT_ARGB8888, SPA_VIDEO_FORMAT_ARGB,},
{ SDL_PIXELFORMAT_BGRA8888, SPA_VIDEO_FORMAT_BGRA,},
{ SDL_PIXELFORMAT_ABGR8888, SPA_VIDEO_FORMAT_ABGR,},
{ SDL_PIXELFORMAT_RGBX8888, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_RGBx },
{ SDL_PIXELFORMAT_BGRX8888, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_BGRx },
{ SDL_PIXELFORMAT_RGBA8888, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_RGBA },
{ SDL_PIXELFORMAT_ARGB8888, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_ARGB },
{ SDL_PIXELFORMAT_BGRA8888, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_BGRA },
{ SDL_PIXELFORMAT_ABGR8888, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_ABGR },
#else
{ SDL_PIXELFORMAT_RGBX8888, SPA_VIDEO_FORMAT_xBGR,},
{ SDL_PIXELFORMAT_BGRX8888, SPA_VIDEO_FORMAT_xRGB,},
{ SDL_PIXELFORMAT_RGBA8888, SPA_VIDEO_FORMAT_ABGR,},
{ SDL_PIXELFORMAT_ARGB8888, SPA_VIDEO_FORMAT_BGRA,},
{ SDL_PIXELFORMAT_BGRA8888, SPA_VIDEO_FORMAT_ARGB,},
{ SDL_PIXELFORMAT_ABGR8888, SPA_VIDEO_FORMAT_RGBA,},
{ SDL_PIXELFORMAT_RGBX8888, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_xBGR },
{ SDL_PIXELFORMAT_BGRX8888, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_xRGB },
{ SDL_PIXELFORMAT_RGBA8888, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_ABGR },
{ SDL_PIXELFORMAT_ARGB8888, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_BGRA },
{ SDL_PIXELFORMAT_BGRA8888, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_ARGB },
{ SDL_PIXELFORMAT_ABGR8888, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_RGBA },
#endif
{ SDL_PIXELFORMAT_RGB24, SPA_VIDEO_FORMAT_RGB,},
{ SDL_PIXELFORMAT_BGR24, SPA_VIDEO_FORMAT_BGR,},
{ SDL_PIXELFORMAT_YV12, SPA_VIDEO_FORMAT_YV12,},
{ SDL_PIXELFORMAT_IYUV, SPA_VIDEO_FORMAT_I420,},
{ SDL_PIXELFORMAT_YUY2, SPA_VIDEO_FORMAT_YUY2,},
{ SDL_PIXELFORMAT_UYVY, SPA_VIDEO_FORMAT_UYVY,},
{ SDL_PIXELFORMAT_YVYU, SPA_VIDEO_FORMAT_YVYU,},
{ SDL_PIXELFORMAT_RGB24, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_RGB },
{ SDL_PIXELFORMAT_BGR24, SDL_COLORSPACE_SRGB, SPA_VIDEO_FORMAT_BGR },
{ SDL_PIXELFORMAT_YV12, SDL_COLORSPACE_BT709_LIMITED, SPA_VIDEO_FORMAT_YV12 },
{ SDL_PIXELFORMAT_IYUV, SDL_COLORSPACE_BT709_LIMITED, SPA_VIDEO_FORMAT_I420 },
{ SDL_PIXELFORMAT_YUY2, SDL_COLORSPACE_BT709_LIMITED, SPA_VIDEO_FORMAT_YUY2 },
{ SDL_PIXELFORMAT_UYVY, SDL_COLORSPACE_BT709_LIMITED, SPA_VIDEO_FORMAT_UYVY },
{ SDL_PIXELFORMAT_YVYU, SDL_COLORSPACE_BT709_LIMITED, SPA_VIDEO_FORMAT_YVYU },
#if SDL_VERSION_ATLEAST(2,0,4)
{ SDL_PIXELFORMAT_NV12, SPA_VIDEO_FORMAT_NV12,},
{ SDL_PIXELFORMAT_NV21, SPA_VIDEO_FORMAT_NV21,},
{ SDL_PIXELFORMAT_NV12, SDL_COLORSPACE_BT709_LIMITED, SPA_VIDEO_FORMAT_NV12 },
{ SDL_PIXELFORMAT_NV21, SDL_COLORSPACE_BT709_LIMITED, SPA_VIDEO_FORMAT_NV21 },
#endif
};
static inline uint32_t sdl_format_to_id(Uint32 format)
static uint32_t sdl_format_to_id(SDL_PixelFormatEnum format)
{
struct sdl_video_format *f;
SPA_FOR_EACH_ELEMENT(sdl_video_formats, f) {
if (f->format == format)
return f->id;
}
return SPA_VIDEO_FORMAT_UNKNOWN;
struct sdl_video_format *f;
SPA_FOR_EACH_ELEMENT(sdl_video_formats, f) {
if (f->format == format)
return f->id;
}
return SPA_VIDEO_FORMAT_UNKNOWN;
}
static inline Uint32 id_to_sdl_format(uint32_t id)
static void id_to_sdl_format(uint32_t id, SDL_PixelFormatEnum *format, SDL_Colorspace *colorspace)
{
struct sdl_video_format *f;
SPA_FOR_EACH_ELEMENT(sdl_video_formats, f) {
if (f->id == id)
return f->format;
}
return SDL_PIXELFORMAT_UNKNOWN;
struct sdl_video_format *f;
SPA_FOR_EACH_ELEMENT(sdl_video_formats, f) {
if (f->id == id) {
*format = f->format;
*colorspace = f->colorspace;
return;
}
}
*format = SDL_PIXELFORMAT_UNKNOWN;
*colorspace = SDL_COLORSPACE_UNKNOWN;
}
struct SDL_PrivateCameraData
@ -592,14 +597,13 @@ static void PIPEWIRECAMERA_ReleaseFrame(SDL_CameraDevice *device, SDL_Surface *f
pw_array_for_each(p, &device->hidden->buffers) {
if ((*p)->buffer->datas[0].data == frame->pixels) {
PIPEWIRE_pw_stream_queue_buffer(device->hidden->stream, (*p));
break;
break;
}
}
PIPEWIRE_pw_thread_loop_unlock(hotplug.loop);
}
static void collect_rates(CameraFormatAddData *data, struct param *p, const Uint32 sdlfmt,
const struct spa_rectangle *size)
static void collect_rates(CameraFormatAddData *data, struct param *p, SDL_PixelFormatEnum sdlfmt, SDL_Colorspace colorspace, const struct spa_rectangle *size)
{
const struct spa_pod_prop *prop;
struct spa_pod * values;
@ -618,23 +622,22 @@ static void collect_rates(CameraFormatAddData *data, struct param *p, const Uint
switch (choice) {
case SPA_CHOICE_None:
n_vals = 1;
SPA_FALLTHROUGH;
SPA_FALLTHROUGH;
case SPA_CHOICE_Enum:
for (i = 0; i < n_vals; i++) {
for (i = 0; i < n_vals; i++) {
// denom and num are switched, because SDL expects an interval, while pw provides a rate
if (SDL_AddCameraFormat(data, sdlfmt, size->width, size->height,
rates[i].denom, rates[i].num) == -1) {
if (SDL_AddCameraFormat(data, sdlfmt, colorspace, size->width, size->height, rates[i].denom, rates[i].num) < 0) {
return; // Probably out of memory; we'll go with what we have, if anything.
}
}
break;
}
break;
default:
SDL_Log("CAMERA: unimplemented choice:%d", choice);
break;
break;
}
}
static void collect_size(CameraFormatAddData *data, struct param *p, const Uint32 sdlfmt)
static void collect_size(CameraFormatAddData *data, struct param *p, SDL_PixelFormatEnum sdlfmt, SDL_Colorspace colorspace)
{
const struct spa_pod_prop *prop;
struct spa_pod * values;
@ -653,22 +656,23 @@ static void collect_size(CameraFormatAddData *data, struct param *p, const Uint3
switch (choice) {
case SPA_CHOICE_None:
n_vals = 1;
SPA_FALLTHROUGH;
SPA_FALLTHROUGH;
case SPA_CHOICE_Enum:
for (i = 0; i < n_vals; i++) {
collect_rates(data, p, sdlfmt, &rectangles[i]);
}
break;
for (i = 0; i < n_vals; i++) {
collect_rates(data, p, sdlfmt, colorspace, &rectangles[i]);
}
break;
default:
SDL_Log("CAMERA: unimplemented choice:%d", choice);
break;
break;
}
}
static void collect_format(CameraFormatAddData *data, struct param *p)
{
const struct spa_pod_prop *prop;
Uint32 sdlfmt;
SDL_PixelFormatEnum sdlfmt;
SDL_Colorspace colorspace;
struct spa_pod * values;
uint32_t i, n_vals, choice, *ids;
@ -686,16 +690,17 @@ static void collect_format(CameraFormatAddData *data, struct param *p)
n_vals = 1;
SPA_FALLTHROUGH;
case SPA_CHOICE_Enum:
for (i = 0; i < n_vals; i++) {
sdlfmt = id_to_sdl_format(ids[i]);
if (sdlfmt == SDL_PIXELFORMAT_UNKNOWN)
for (i = 0; i < n_vals; i++) {
id_to_sdl_format(ids[i], &sdlfmt, &colorspace);
if (sdlfmt == SDL_PIXELFORMAT_UNKNOWN) {
continue;
collect_size(data, p, sdlfmt);
}
break;
}
collect_size(data, p, sdlfmt, colorspace);
}
break;
default:
SDL_Log("CAMERA: unimplemented choice:%d", choice);
break;
break;
}
}
@ -710,7 +715,7 @@ static void add_device(struct global *g)
if (p->id != SPA_PARAM_EnumFormat)
continue;
collect_format(&data, p);
collect_format(&data, p);
}
if (data.num_specs > 0) {
SDL_AddCameraDevice(g->name, SDL_CAMERA_POSITION_UNKNOWN,

View File

@ -391,22 +391,23 @@ static int AllocBufferUserPtr(SDL_CameraDevice *device, size_t buffer_size)
return 0;
}
static Uint32 format_v4l2_to_sdl(Uint32 fmt)
static void format_v4l2_to_sdl(Uint32 fmt, SDL_PixelFormatEnum *format, SDL_Colorspace *colorspace)
{
switch (fmt) {
#define CASE(x, y) case x: return y
CASE(V4L2_PIX_FMT_YUYV, SDL_PIXELFORMAT_YUY2);
CASE(V4L2_PIX_FMT_MJPEG, SDL_PIXELFORMAT_UNKNOWN);
#undef CASE
default:
#if DEBUG_CAMERA
SDL_Log("CAMERA: Unknown format V4L2_PIX_FORMAT '%d'", fmt);
#endif
return SDL_PIXELFORMAT_UNKNOWN;
#define CASE(x, y, z) case x: *format = y; *colorspace = z; return
CASE(V4L2_PIX_FMT_YUYV, SDL_PIXELFORMAT_YUY2, SDL_COLORSPACE_BT709_LIMITED);
#undef CASE
default:
#if DEBUG_CAMERA
SDL_Log("CAMERA: Unknown format V4L2_PIX_FORMAT '%d'", fmt);
#endif
break;
}
*format = SDL_PIXELFORMAT_UNKNOWN;
*colorspace = SDL_COLORSPACE_UNKNOWN;
}
static Uint32 format_sdl_to_v4l2(Uint32 fmt)
static Uint32 format_sdl_to_v4l2(SDL_PixelFormatEnum fmt)
{
switch (fmt) {
#define CASE(y, x) case x: return y
@ -644,7 +645,7 @@ static SDL_bool FindV4L2CameraDeviceByBusInfoCallback(SDL_CameraDevice *device,
return (SDL_strcmp(handle->bus_info, (const char *) userdata) == 0);
}
static int AddCameraFormat(const int fd, CameraFormatAddData *data, Uint32 sdlfmt, Uint32 v4l2fmt, int w, int h)
static int AddCameraFormat(const int fd, CameraFormatAddData *data, SDL_PixelFormatEnum sdlfmt, SDL_Colorspace colorspace, Uint32 v4l2fmt, int w, int h)
{
struct v4l2_frmivalenum frmivalenum;
SDL_zero(frmivalenum);
@ -660,7 +661,7 @@ static int AddCameraFormat(const int fd, CameraFormatAddData *data, Uint32 sdlfm
const float fps = (float) denominator / (float) numerator;
SDL_Log("CAMERA: * Has discrete frame interval (%d / %d), fps=%f", numerator, denominator, fps);
#endif
if (SDL_AddCameraFormat(data, sdlfmt, w, h, numerator, denominator) == -1) {
if (SDL_AddCameraFormat(data, sdlfmt, colorspace, w, h, numerator, denominator) == -1) {
return -1; // Probably out of memory; we'll go with what we have, if anything.
}
frmivalenum.index++; // set up for the next one.
@ -672,7 +673,7 @@ static int AddCameraFormat(const int fd, CameraFormatAddData *data, Uint32 sdlfm
const float fps = (float) d / (float) n;
SDL_Log("CAMERA: * Has %s frame interval (%d / %d), fps=%f", (frmivalenum.type == V4L2_FRMIVAL_TYPE_STEPWISE) ? "stepwise" : "continuous", n, d, fps);
#endif
if (SDL_AddCameraFormat(data, sdlfmt, w, h, n, d) == -1) {
if (SDL_AddCameraFormat(data, sdlfmt, colorspace, w, h, n, d) == -1) {
return -1; // Probably out of memory; we'll go with what we have, if anything.
}
d += (int) frmivalenum.stepwise.step.denominator;
@ -727,7 +728,9 @@ static void MaybeAddDevice(const char *path)
SDL_zero(fmtdesc);
fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
while (ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc) == 0) {
const Uint32 sdlfmt = format_v4l2_to_sdl(fmtdesc.pixelformat);
SDL_PixelFormatEnum sdlfmt = SDL_PIXELFORMAT_UNKNOWN;
SDL_Colorspace colorspace = SDL_COLORSPACE_UNKNOWN;
format_v4l2_to_sdl(fmtdesc.pixelformat, &sdlfmt, &colorspace);
#if DEBUG_CAMERA
SDL_Log("CAMERA: - Has format '%s'%s%s", SDL_GetPixelFormatName(sdlfmt),
@ -752,7 +755,7 @@ static void MaybeAddDevice(const char *path)
#if DEBUG_CAMERA
SDL_Log("CAMERA: * Has discrete size %dx%d", w, h);
#endif
if (AddCameraFormat(fd, &add_data, sdlfmt, fmtdesc.pixelformat, w, h) == -1) {
if (AddCameraFormat(fd, &add_data, sdlfmt, colorspace, fmtdesc.pixelformat, w, h) == -1) {
break; // Probably out of memory; we'll go with what we have, if anything.
}
frmsizeenum.index++; // set up for the next one.
@ -768,7 +771,7 @@ static void MaybeAddDevice(const char *path)
#if DEBUG_CAMERA
SDL_Log("CAMERA: * Has %s size %dx%d", (frmsizeenum.type == V4L2_FRMSIZE_TYPE_STEPWISE) ? "stepwise" : "continuous", w, h);
#endif
if (AddCameraFormat(fd, &add_data, sdlfmt, fmtdesc.pixelformat, w, h) == -1) {
if (AddCameraFormat(fd, &add_data, sdlfmt, colorspace, fmtdesc.pixelformat, w, h) == -1) {
break; // Probably out of memory; we'll go with what we have, if anything.
}
}

View File

@ -274,8 +274,18 @@ int SDL_AppIterate(void *appstate)
SDL_DestroyTexture(texture);
}
SDL_Colorspace colorspace = SDL_COLORSPACE_UNKNOWN;
SDL_GetSurfaceColorspace(frame_current, &colorspace);
/* Create texture with appropriate format */
texture = SDL_CreateTexture(renderer, frame_current->format->format, SDL_TEXTUREACCESS_STREAMING, frame_current->w, frame_current->h);
SDL_PropertiesID props = SDL_CreateProperties();
SDL_SetNumberProperty(props, SDL_PROP_TEXTURE_CREATE_FORMAT_NUMBER, frame_current->format->format);
SDL_SetNumberProperty(props, SDL_PROP_TEXTURE_CREATE_COLORSPACE_NUMBER, colorspace);
SDL_SetNumberProperty(props, SDL_PROP_TEXTURE_CREATE_ACCESS_NUMBER, SDL_TEXTUREACCESS_STREAMING);
SDL_SetNumberProperty(props, SDL_PROP_TEXTURE_CREATE_WIDTH_NUMBER, frame_current->w);
SDL_SetNumberProperty(props, SDL_PROP_TEXTURE_CREATE_HEIGHT_NUMBER, frame_current->h);
texture = SDL_CreateTextureWithProperties(renderer, props);
SDL_DestroyProperties(props);
if (!texture) {
SDL_Log("Couldn't create texture: %s", SDL_GetError());
return SDL_APP_FAILURE;