Atlas - SDL_camera_coremedia.m

Home / ext / SDL / src / camera / coremedia Lines: 2 | Size: 27353 bytes [Download] [Show on GitHub] [Search similar files] [Raw] [Raw (proxy)]
[FILE BEGIN]
1/* 2 Simple DirectMedia Layer 3 Copyright (C) 1997-2025 Sam Lantinga <[email protected]> 4 5 This software is provided 'as-is', without any express or implied 6 warranty. In no event will the authors be held liable for any damages 7 arising from the use of this software. 8 9 Permission is granted to anyone to use this software for any purpose, 10 including commercial applications, and to alter it and redistribute it 11 freely, subject to the following restrictions: 12 13 1. The origin of this software must not be misrepresented; you must not 14 claim that you wrote the original software. If you use this software 15 in a product, an acknowledgment in the product documentation would be 16 appreciated but is not required. 17 2. Altered source versions must be plainly marked as such, and must not be 18 misrepresented as being the original software. 19 3. This notice may not be removed or altered from any source distribution. 20*/ 21#include "SDL_internal.h" 22 23#ifdef SDL_CAMERA_DRIVER_COREMEDIA 24 25#include "../SDL_syscamera.h" 26#include "../SDL_camera_c.h" 27#include "../../thread/SDL_systhread.h" 28 29#import <AVFoundation/AVFoundation.h> 30#import <CoreMedia/CoreMedia.h> 31 32#if defined(SDL_PLATFORM_IOS) && !defined(SDL_PLATFORM_TVOS) 33#define USE_UIKIT_DEVICE_ROTATION 34#endif 35 36#ifdef USE_UIKIT_DEVICE_ROTATION 37#import <UIKit/UIKit.h> 38#endif 39 40/* 41 * Need to link with:: CoreMedia CoreVideo 42 * 43 * Add in pInfo.list: 44 * <key>NSCameraUsageDescription</key> <string>Access camera</string> 45 * 46 * 47 * MACOSX: 48 * Add to the Code Sign Entitlement file: 49 * <key>com.apple.security.device.camera</key> <true/> 50 */ 51 52static void CoreMediaFormatToSDL(FourCharCode fmt, SDL_PixelFormat *pixel_format, SDL_Colorspace *colorspace) 53{ 54 switch (fmt) { 55 #define CASE(x, y, z) case x: *pixel_format = y; *colorspace = z; return 56 // the 16LE ones should use 16BE if we're on a Bigendian system like PowerPC, 57 // but at current time there is no bigendian Apple platform that has CoreMedia. 58 CASE(kCMPixelFormat_16LE555, SDL_PIXELFORMAT_XRGB1555, SDL_COLORSPACE_SRGB); 59 CASE(kCMPixelFormat_16LE5551, SDL_PIXELFORMAT_RGBA5551, SDL_COLORSPACE_SRGB); 60 CASE(kCMPixelFormat_16LE565, SDL_PIXELFORMAT_RGB565, SDL_COLORSPACE_SRGB); 61 CASE(kCMPixelFormat_24RGB, SDL_PIXELFORMAT_RGB24, SDL_COLORSPACE_SRGB); 62 CASE(kCMPixelFormat_32ARGB, SDL_PIXELFORMAT_ARGB32, SDL_COLORSPACE_SRGB); 63 CASE(kCMPixelFormat_32BGRA, SDL_PIXELFORMAT_BGRA32, SDL_COLORSPACE_SRGB); 64 CASE(kCMPixelFormat_422YpCbCr8, SDL_PIXELFORMAT_UYVY, SDL_COLORSPACE_BT709_LIMITED); 65 CASE(kCMPixelFormat_422YpCbCr8_yuvs, SDL_PIXELFORMAT_YUY2, SDL_COLORSPACE_BT709_LIMITED); 66 CASE(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, SDL_PIXELFORMAT_NV12, SDL_COLORSPACE_BT709_LIMITED); 67 CASE(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange, SDL_PIXELFORMAT_NV12, SDL_COLORSPACE_BT709_FULL); 68 CASE(kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange, SDL_PIXELFORMAT_P010, SDL_COLORSPACE_BT2020_LIMITED); 69 CASE(kCVPixelFormatType_420YpCbCr10BiPlanarFullRange, SDL_PIXELFORMAT_P010, SDL_COLORSPACE_BT2020_FULL); 70 #undef CASE 71 default: 72 #if DEBUG_CAMERA 73 SDL_Log("CAMERA: Unknown format FourCharCode '%d'", (int) fmt); 74 #endif 75 break; 76 } 77 *pixel_format = SDL_PIXELFORMAT_UNKNOWN; 78 *colorspace = SDL_COLORSPACE_UNKNOWN; 79} 80 81@class SDLCaptureVideoDataOutputSampleBufferDelegate; 82 83// just a simple wrapper to help ARC manage memory... 84@interface SDLPrivateCameraData : NSObject 85@property(nonatomic, retain) AVCaptureSession *session; 86@property(nonatomic, retain) SDLCaptureVideoDataOutputSampleBufferDelegate *delegate; 87@property(nonatomic, assign) CMSampleBufferRef current_sample; 88#ifdef USE_UIKIT_DEVICE_ROTATION 89@property(nonatomic, assign) UIDeviceOrientation last_device_orientation; 90#endif 91@end 92 93@implementation SDLPrivateCameraData 94@end 95 96 97static bool CheckCameraPermissions(SDL_Camera *device) 98{ 99 if (device->permission == SDL_CAMERA_PERMISSION_STATE_PENDING) { // still expecting a permission result. 100 if (@available(macOS 14, *)) { 101 const AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo]; 102 if (status != AVAuthorizationStatusNotDetermined) { // NotDetermined == still waiting for an answer from the user. 103 SDL_CameraPermissionOutcome(device, (status == AVAuthorizationStatusAuthorized) ? true : false); 104 } 105 } else { 106 SDL_CameraPermissionOutcome(device, true); // always allowed (or just unqueryable...?) on older macOS. 107 } 108 } 109 110 return (device->permission > SDL_CAMERA_PERMISSION_STATE_PENDING); 111} 112 113// this delegate just receives new video frames on a Grand Central Dispatch queue, and fires off the 114// main device thread iterate function directly to consume it. 115@interface SDLCaptureVideoDataOutputSampleBufferDelegate : NSObject<AVCaptureVideoDataOutputSampleBufferDelegate> 116 @property SDL_Camera *device; 117 -(id) init:(SDL_Camera *) dev; 118 -(void) captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection; 119@end 120 121@implementation SDLCaptureVideoDataOutputSampleBufferDelegate 122 123 -(id) init:(SDL_Camera *) dev { 124 if ( self = [super init] ) { 125 _device = dev; 126 } 127 return self; 128 } 129 130 - (void) captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection 131 { 132 SDL_Camera *device = self.device; 133 if (!device || !device->hidden) { 134 return; // oh well. 135 } 136 137 if (!CheckCameraPermissions(device)) { 138 return; // nothing to do right now, dump what is probably a completely black frame. 139 } 140 141 SDLPrivateCameraData *hidden = (__bridge SDLPrivateCameraData *) device->hidden; 142 hidden.current_sample = sampleBuffer; 143 SDL_CameraThreadIterate(device); 144 hidden.current_sample = NULL; 145 } 146 147 - (void)captureOutput:(AVCaptureOutput *)output didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection 148 { 149 #if DEBUG_CAMERA 150 SDL_Log("CAMERA: Drop frame."); 151 #endif 152 } 153@end 154 155static bool COREMEDIA_WaitDevice(SDL_Camera *device) 156{ 157 return true; // this isn't used atm, since we run our own thread out of Grand Central Dispatch. 158} 159 160static SDL_CameraFrameResult COREMEDIA_AcquireFrame(SDL_Camera *device, SDL_Surface *frame, Uint64 *timestampNS, float *rotation) 161{ 162 SDL_CameraFrameResult result = SDL_CAMERA_FRAME_READY; 163 SDLPrivateCameraData *hidden = (__bridge SDLPrivateCameraData *) device->hidden; 164 CMSampleBufferRef sample_buffer = hidden.current_sample; 165 hidden.current_sample = NULL; 166 SDL_assert(sample_buffer != NULL); // should only have been called from our delegate with a new frame. 167 168 CMSampleTimingInfo timinginfo; 169 if (CMSampleBufferGetSampleTimingInfo(sample_buffer, 0, &timinginfo) == noErr) { 170 *timestampNS = (Uint64) (CMTimeGetSeconds(timinginfo.presentationTimeStamp) * ((Float64) SDL_NS_PER_SECOND)); 171 } else { 172 SDL_assert(!"this shouldn't happen, I think."); 173 *timestampNS = 0; 174 } 175 176 CVImageBufferRef image = CMSampleBufferGetImageBuffer(sample_buffer); // does not retain `image` (and we don't want it to). 177 const int numPlanes = (int) CVPixelBufferGetPlaneCount(image); 178 const int planar = (int) CVPixelBufferIsPlanar(image); 179 180 #if DEBUG_CAMERA 181 const int w = (int) CVPixelBufferGetWidth(image); 182 const int h = (int) CVPixelBufferGetHeight(image); 183 const int sz = (int) CVPixelBufferGetDataSize(image); 184 const int pitch = (int) CVPixelBufferGetBytesPerRow(image); 185 SDL_Log("CAMERA: buffer planar=%d numPlanes=%d %d x %d sz=%d pitch=%d", planar, numPlanes, w, h, sz, pitch); 186 #endif 187 188 // !!! FIXME: this currently copies the data to the surface (see FIXME about non-contiguous planar surfaces, but in theory we could just keep this locked until ReleaseFrame... 189 CVPixelBufferLockBaseAddress(image, 0); 190 191 frame->w = (int)CVPixelBufferGetWidth(image); 192 frame->h = (int)CVPixelBufferGetHeight(image); 193 194 if ((planar == 0) && (numPlanes == 0)) { 195 const int pitch = (int) CVPixelBufferGetBytesPerRow(image); 196 const size_t buflen = pitch * frame->h; 197 frame->pixels = SDL_aligned_alloc(SDL_GetSIMDAlignment(), buflen); 198 if (frame->pixels == NULL) { 199 result = SDL_CAMERA_FRAME_ERROR; 200 } else { 201 frame->pitch = pitch; 202 SDL_memcpy(frame->pixels, CVPixelBufferGetBaseAddress(image), buflen); 203 } 204 } else { 205 // !!! FIXME: we have an open issue in SDL3 to allow SDL_Surface to support non-contiguous planar data, but we don't have it yet. 206 size_t buflen = 0; 207 for (int i = 0; i < numPlanes; i++) { 208 size_t plane_height = CVPixelBufferGetHeightOfPlane(image, i); 209 size_t plane_pitch = CVPixelBufferGetBytesPerRowOfPlane(image, i); 210 size_t plane_size = (plane_pitch * plane_height); 211 buflen += plane_size; 212 } 213 214 frame->pitch = (int)CVPixelBufferGetBytesPerRowOfPlane(image, 0); // this is what SDL3 currently expects 215 frame->pixels = SDL_aligned_alloc(SDL_GetSIMDAlignment(), buflen); 216 if (frame->pixels == NULL) { 217 result = SDL_CAMERA_FRAME_ERROR; 218 } else { 219 Uint8 *dst = frame->pixels; 220 for (int i = 0; i < numPlanes; i++) { 221 const void *src = CVPixelBufferGetBaseAddressOfPlane(image, i); 222 size_t plane_height = CVPixelBufferGetHeightOfPlane(image, i); 223 size_t plane_pitch = CVPixelBufferGetBytesPerRowOfPlane(image, i); 224 size_t plane_size = (plane_pitch * plane_height); 225 SDL_memcpy(dst, src, plane_size); 226 dst += plane_size; 227 } 228 } 229 } 230 231 CVPixelBufferUnlockBaseAddress(image, 0); 232 233 #ifdef USE_UIKIT_DEVICE_ROTATION 234 UIDeviceOrientation device_orientation = [[UIDevice currentDevice] orientation]; 235 if (!UIDeviceOrientationIsValidInterfaceOrientation(device_orientation)) { 236 device_orientation = hidden.last_device_orientation; // possible the phone is laying flat or something went wrong, just stay with the last known-good orientation. 237 } else { 238 hidden.last_device_orientation = device_orientation; // update the last known-good orientation for later. 239 } 240 241 const UIInterfaceOrientation ui_orientation = [UIApplication sharedApplication].statusBarOrientation; 242 243 // there is probably math for this, but this is easy to slap into a table. 244 // rotation = rotations[uiorientation-1][devorientation-1]; 245 if (device->position == SDL_CAMERA_POSITION_BACK_FACING) { 246 static const Uint16 back_rotations[4][4] = { 247 { 90, 90, 90, 90 }, // ui portrait 248 { 270, 270, 270, 270 }, // ui portait upside down 249 { 0, 0, 0, 0 }, // ui landscape left 250 { 180, 180, 180, 180 } // ui landscape right 251 }; 252 *rotation = (float) back_rotations[ui_orientation - 1][device_orientation - 1]; 253 } else { 254 static const Uint16 front_rotations[4][4] = { 255 { 90, 90, 270, 270 }, // ui portrait 256 { 270, 270, 90, 90 }, // ui portait upside down 257 { 0, 0, 180, 180 }, // ui landscape left 258 { 180, 180, 0, 0 } // ui landscape right 259 }; 260 *rotation = (float) front_rotations[ui_orientation - 1][device_orientation - 1]; 261 } 262 #endif 263 264 return result; 265} 266 267static void COREMEDIA_ReleaseFrame(SDL_Camera *device, SDL_Surface *frame) 268{ 269 // !!! FIXME: this currently copies the data to the surface, but in theory we could just keep this locked until ReleaseFrame... 270 SDL_aligned_free(frame->pixels); 271} 272 273static void COREMEDIA_CloseDevice(SDL_Camera *device) 274{ 275 if (device && device->hidden) { 276 #ifdef USE_UIKIT_DEVICE_ROTATION 277 [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications]; 278 #endif 279 280 SDLPrivateCameraData *hidden = (SDLPrivateCameraData *) CFBridgingRelease(device->hidden); 281 device->hidden = NULL; 282 283 AVCaptureSession *session = hidden.session; 284 if (session) { 285 hidden.session = nil; 286 [session stopRunning]; 287 [session removeInput:[session.inputs objectAtIndex:0]]; 288 [session removeOutput:(AVCaptureVideoDataOutput *)[session.outputs objectAtIndex:0]]; 289 session = nil; 290 } 291 292 hidden.delegate = NULL; 293 hidden.current_sample = NULL; 294 } 295} 296 297static bool COREMEDIA_OpenDevice(SDL_Camera *device, const SDL_CameraSpec *spec) 298{ 299 AVCaptureDevice *avdevice = (__bridge AVCaptureDevice *) device->handle; 300 301 // Pick format that matches the spec 302 const int w = spec->width; 303 const int h = spec->height; 304 const float rate = (float)spec->framerate_numerator / spec->framerate_denominator; 305 AVCaptureDeviceFormat *spec_format = nil; 306 NSArray<AVCaptureDeviceFormat *> *formats = [avdevice formats]; 307 for (AVCaptureDeviceFormat *format in formats) { 308 CMFormatDescriptionRef formatDescription = [format formatDescription]; 309 SDL_PixelFormat device_format = SDL_PIXELFORMAT_UNKNOWN; 310 SDL_Colorspace device_colorspace = SDL_COLORSPACE_UNKNOWN; 311 CoreMediaFormatToSDL(CMFormatDescriptionGetMediaSubType(formatDescription), &device_format, &device_colorspace); 312 if (device_format != spec->format || device_colorspace != spec->colorspace) { 313 continue; 314 } 315 316 const CMVideoDimensions dim = CMVideoFormatDescriptionGetDimensions(formatDescription); 317 if ((int)dim.width != w || (int)dim.height != h) { 318 continue; 319 } 320 321 const float FRAMERATE_EPSILON = 0.01f; 322 for (AVFrameRateRange *framerate in format.videoSupportedFrameRateRanges) { 323 // Check if the requested rate is within the supported range 324 if (rate >= (framerate.minFrameRate - FRAMERATE_EPSILON) && 325 rate <= (framerate.maxFrameRate + FRAMERATE_EPSILON)) { 326 327 // Prefer formats with narrower frame rate ranges that are closer to our target 328 // This helps avoid formats that support a wide range (like 10-60 FPS) 329 // when we want a specific rate (like 30 FPS) 330 bool should_select = false; 331 if (spec_format == nil) { 332 should_select = true; 333 } else { 334 AVFrameRateRange *current_range = spec_format.videoSupportedFrameRateRanges.firstObject; 335 float current_range_width = current_range.maxFrameRate - current_range.minFrameRate; 336 float new_range_width = framerate.maxFrameRate - framerate.minFrameRate; 337 338 // Prefer formats with narrower ranges, or if ranges are similar, prefer closer to target 339 if (new_range_width < current_range_width) { 340 should_select = true; 341 } else if (SDL_fabsf(new_range_width - current_range_width) < 0.1f) { 342 // Similar range width, prefer the one closer to our target rate 343 float current_distance = SDL_fabsf(rate - current_range.minFrameRate); 344 float new_distance = SDL_fabsf(rate - framerate.minFrameRate); 345 if (new_distance < current_distance) { 346 should_select = true; 347 } 348 } 349 } 350 351 if (should_select) { 352 spec_format = format; 353 } 354 } 355 } 356 357 if (spec_format != nil) { 358 break; 359 } 360 } 361 362 if (spec_format == nil) { 363 return SDL_SetError("camera spec format not available"); 364 } else if (![avdevice lockForConfiguration:NULL]) { 365 return SDL_SetError("Cannot lockForConfiguration"); 366 } 367 368 avdevice.activeFormat = spec_format; 369 370 // Try to set the frame duration to enforce the requested frame rate 371 const float frameRate = (float)spec->framerate_numerator / spec->framerate_denominator; 372 const CMTime frameDuration = CMTimeMake(1, (int32_t)frameRate); 373 374 // Check if the device supports setting frame duration 375 if ([avdevice respondsToSelector:@selector(setActiveVideoMinFrameDuration:)] && 376 [avdevice respondsToSelector:@selector(setActiveVideoMaxFrameDuration:)]) { 377 @try { 378 avdevice.activeVideoMinFrameDuration = frameDuration; 379 avdevice.activeVideoMaxFrameDuration = frameDuration; 380 } @catch (NSException *exception) { 381 // Some devices don't support setting frame duration, that's okay 382 } 383 } 384 385 [avdevice unlockForConfiguration]; 386 387 AVCaptureSession *session = [[AVCaptureSession alloc] init]; 388 if (session == nil) { 389 return SDL_SetError("Failed to allocate/init AVCaptureSession"); 390 } 391 392 session.sessionPreset = AVCaptureSessionPresetHigh; 393#if defined(SDL_PLATFORM_IOS) 394 if (@available(iOS 10.0, tvOS 17.0, *)) { 395 session.automaticallyConfiguresCaptureDeviceForWideColor = NO; 396 } 397#endif 398 399 NSError *error = nil; 400 AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:avdevice error:&error]; 401 if (!input) { 402 return SDL_SetError("Cannot create AVCaptureDeviceInput"); 403 } 404 405 AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init]; 406 if (!output) { 407 return SDL_SetError("Cannot create AVCaptureVideoDataOutput"); 408 } 409 410 output.videoSettings = @{ 411 (id)kCVPixelBufferWidthKey : @(spec->width), 412 (id)kCVPixelBufferHeightKey : @(spec->height), 413 (id)kCVPixelBufferPixelFormatTypeKey : @(CMFormatDescriptionGetMediaSubType([spec_format formatDescription])) 414 }; 415 416 char threadname[64]; 417 SDL_GetCameraThreadName(device, threadname, sizeof (threadname)); 418 dispatch_queue_t queue = dispatch_queue_create(threadname, NULL); 419 //dispatch_queue_t queue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0); 420 if (!queue) { 421 return SDL_SetError("dispatch_queue_create() failed"); 422 } 423 424 SDLCaptureVideoDataOutputSampleBufferDelegate *delegate = [[SDLCaptureVideoDataOutputSampleBufferDelegate alloc] init:device]; 425 if (delegate == nil) { 426 return SDL_SetError("Cannot create SDLCaptureVideoDataOutputSampleBufferDelegate"); 427 } 428 [output setSampleBufferDelegate:delegate queue:queue]; 429 430 if (![session canAddInput:input]) { 431 return SDL_SetError("Cannot add AVCaptureDeviceInput"); 432 } 433 [session addInput:input]; 434 435 if (![session canAddOutput:output]) { 436 return SDL_SetError("Cannot add AVCaptureVideoDataOutput"); 437 } 438 [session addOutput:output]; 439 440 // Try to set the frame rate on the connection 441 AVCaptureConnection *connection = [output connectionWithMediaType:AVMediaTypeVideo]; 442 if (connection && connection.isVideoMinFrameDurationSupported) { 443 connection.videoMinFrameDuration = frameDuration; 444 if (connection.isVideoMaxFrameDurationSupported) { 445 connection.videoMaxFrameDuration = frameDuration; 446 } 447 } 448 449 [session commitConfiguration]; 450 451 SDLPrivateCameraData *hidden = [[SDLPrivateCameraData alloc] init]; 452 if (hidden == nil) { 453 return SDL_SetError("Cannot create SDLPrivateCameraData"); 454 } 455 456 hidden.session = session; 457 hidden.delegate = delegate; 458 hidden.current_sample = NULL; 459 460 #ifdef USE_UIKIT_DEVICE_ROTATION 461 // When using a camera, we turn on device orientation tracking. The docs note that this turns on 462 // the device's accelerometer, so I assume this burns power, so we don't leave this running all 463 // the time. These calls nest, so we just need to call the matching `end` message when we close. 464 // You _can_ get an actual events through this mechanism, but we just want to be able to call 465 // -[UIDevice orientation], which will update with real info while notificatons are enabled. 466 UIDevice *uidevice = [UIDevice currentDevice]; 467 [uidevice beginGeneratingDeviceOrientationNotifications]; 468 hidden.last_device_orientation = uidevice.orientation; 469 if (!UIDeviceOrientationIsValidInterfaceOrientation(hidden.last_device_orientation)) { 470 // accelerometer isn't ready yet or the phone is laying flat or something. Just try to guess from how the UI is oriented at the moment. 471 switch ([UIApplication sharedApplication].statusBarOrientation) { 472 case UIInterfaceOrientationPortrait: hidden.last_device_orientation = UIDeviceOrientationPortrait; break; 473 case UIInterfaceOrientationPortraitUpsideDown: hidden.last_device_orientation = UIDeviceOrientationPortraitUpsideDown; break; 474 case UIInterfaceOrientationLandscapeLeft: hidden.last_device_orientation = UIDeviceOrientationLandscapeRight; break; // Apple docs say UI and device orientations are reversed in landscape. 475 case UIInterfaceOrientationLandscapeRight: hidden.last_device_orientation = UIDeviceOrientationLandscapeLeft; break; 476 default: hidden.last_device_orientation = UIDeviceOrientationPortrait; break; // oh well. 477 } 478 } 479 #endif 480 481 device->hidden = (struct SDL_PrivateCameraData *)CFBridgingRetain(hidden); 482 483 [session startRunning]; // !!! FIXME: docs say this can block while camera warms up and shouldn't be done on main thread. Maybe push through `queue`? 484 485 CheckCameraPermissions(device); // check right away, in case the process is already granted permission. 486 487 return true; 488} 489 490static void COREMEDIA_FreeDeviceHandle(SDL_Camera *device) 491{ 492 if (device && device->handle) { 493 CFBridgingRelease(device->handle); 494 } 495} 496 497static void GatherCameraSpecs(AVCaptureDevice *device, CameraFormatAddData *add_data) 498{ 499 SDL_zerop(add_data); 500 501 for (AVCaptureDeviceFormat *fmt in device.formats) { 502 if (CMFormatDescriptionGetMediaType(fmt.formatDescription) != kCMMediaType_Video) { 503 continue; 504 } 505 506//NSLog(@"Available camera format: %@\n", fmt); 507 SDL_PixelFormat device_format = SDL_PIXELFORMAT_UNKNOWN; 508 SDL_Colorspace device_colorspace = SDL_COLORSPACE_UNKNOWN; 509 CoreMediaFormatToSDL(CMFormatDescriptionGetMediaSubType(fmt.formatDescription), &device_format, &device_colorspace); 510 if (device_format == SDL_PIXELFORMAT_UNKNOWN) { 511 continue; 512 } 513 514 const CMVideoDimensions dims = CMVideoFormatDescriptionGetDimensions(fmt.formatDescription); 515 const int w = (int) dims.width; 516 const int h = (int) dims.height; 517 for (AVFrameRateRange *framerate in fmt.videoSupportedFrameRateRanges) { 518 int min_numerator = 0, min_denominator = 1; 519 int max_numerator = 0, max_denominator = 1; 520 521 SDL_CalculateFraction(framerate.minFrameRate, &min_numerator, &min_denominator); 522 SDL_AddCameraFormat(add_data, device_format, device_colorspace, w, h, min_numerator, min_denominator); 523 SDL_CalculateFraction(framerate.maxFrameRate, &max_numerator, &max_denominator); 524 if (max_numerator != min_numerator || max_denominator != min_denominator) { 525 SDL_AddCameraFormat(add_data, device_format, device_colorspace, w, h, max_numerator, max_denominator); 526 } 527 } 528 } 529} 530 531static bool FindCoreMediaCameraByUniqueID(SDL_Camera *device, void *userdata) 532{ 533 NSString *uniqueid = (__bridge NSString *) userdata; 534 AVCaptureDevice *avdev = (__bridge AVCaptureDevice *) device->handle; 535 return ([uniqueid isEqualToString:avdev.uniqueID]) ? true : false; 536} 537 538static void MaybeAddDevice(AVCaptureDevice *avdevice) 539{ 540 if (!avdevice.connected) { 541 return; // not connected. 542 } else if (![avdevice hasMediaType:AVMediaTypeVideo]) { 543 return; // not a camera. 544 } else if (SDL_FindPhysicalCameraByCallback(FindCoreMediaCameraByUniqueID, (__bridge void *) avdevice.uniqueID)) { 545 return; // already have this one. 546 } 547 548 CameraFormatAddData add_data; 549 GatherCameraSpecs(avdevice, &add_data); 550 if (add_data.num_specs > 0) { 551 SDL_CameraPosition position = SDL_CAMERA_POSITION_UNKNOWN; 552 if (avdevice.position == AVCaptureDevicePositionFront) { 553 position = SDL_CAMERA_POSITION_FRONT_FACING; 554 } else if (avdevice.position == AVCaptureDevicePositionBack) { 555 position = SDL_CAMERA_POSITION_BACK_FACING; 556 } 557 SDL_AddCamera(avdevice.localizedName.UTF8String, position, add_data.num_specs, add_data.specs, (void *) CFBridgingRetain(avdevice)); 558 } 559 560 SDL_free(add_data.specs); 561} 562 563static void COREMEDIA_DetectDevices(void) 564{ 565 NSArray<AVCaptureDevice *> *devices = nil; 566 567 if (@available(macOS 10.15, iOS 13, *)) { 568 // kind of annoying that there isn't a "give me anything that looks like a camera" option, 569 // so this list will need to be updated when Apple decides to add 570 // AVCaptureDeviceTypeBuiltInQuadrupleCamera some day. 571 NSArray *device_types = @[ 572 #ifdef SDL_PLATFORM_IOS 573 AVCaptureDeviceTypeBuiltInTelephotoCamera, 574 AVCaptureDeviceTypeBuiltInDualCamera, 575 AVCaptureDeviceTypeBuiltInDualWideCamera, 576 AVCaptureDeviceTypeBuiltInTripleCamera, 577 AVCaptureDeviceTypeBuiltInUltraWideCamera, 578 #else 579 AVCaptureDeviceTypeExternalUnknown, 580 #endif 581 AVCaptureDeviceTypeBuiltInWideAngleCamera 582 ]; 583 584 AVCaptureDeviceDiscoverySession *discoverySession = [AVCaptureDeviceDiscoverySession 585 discoverySessionWithDeviceTypes:device_types 586 mediaType:AVMediaTypeVideo 587 position:AVCaptureDevicePositionUnspecified]; 588 589 devices = discoverySession.devices; 590 // !!! FIXME: this can use Key Value Observation to get hotplug events. 591 } else { 592 // this is deprecated but works back to macOS 10.7; 10.15 added AVCaptureDeviceDiscoverySession as a replacement. 593 devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; 594 // !!! FIXME: this can use AVCaptureDeviceWasConnectedNotification and AVCaptureDeviceWasDisconnectedNotification with NSNotificationCenter to get hotplug events. 595 } 596 597 for (AVCaptureDevice *device in devices) { 598 MaybeAddDevice(device); 599 } 600} 601 602static void COREMEDIA_Deinitialize(void) 603{ 604 // !!! FIXME: disable hotplug. 605} 606 607static bool COREMEDIA_Init(SDL_CameraDriverImpl *impl) 608{ 609 impl->DetectDevices = COREMEDIA_DetectDevices; 610 impl->OpenDevice = COREMEDIA_OpenDevice; 611 impl->CloseDevice = COREMEDIA_CloseDevice; 612 impl->WaitDevice = COREMEDIA_WaitDevice; 613 impl->AcquireFrame = COREMEDIA_AcquireFrame; 614 impl->ReleaseFrame = COREMEDIA_ReleaseFrame; 615 impl->FreeDeviceHandle = COREMEDIA_FreeDeviceHandle; 616 impl->Deinitialize = COREMEDIA_Deinitialize; 617 618 impl->ProvidesOwnCallbackThread = true; 619 620 return true; 621} 622 623CameraBootStrap COREMEDIA_bootstrap = { 624 "coremedia", "SDL Apple CoreMedia camera driver", COREMEDIA_Init, false 625}; 626 627#endif // SDL_CAMERA_DRIVER_COREMEDIA 628 629
[FILE END]
(C) 2025 0x4248 (C) 2025 4248 Media and 4248 Systems, All part of 0x4248 See LICENCE files for more information. Not all files are by 0x4248 always check Licencing.