OBSAVCapture.m 59 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513
  1. //
  2. // OBSAVCapture.m
  3. // mac-avcapture
  4. //
  5. // Created by Patrick Heyer on 2023-03-07.
  6. //
  7. #import "OBSAVCapture.h"
  8. #import "AVCaptureDeviceFormat+OBSListable.h"
  9. /// Tthe maximum number of frame rate ranges to show complete information for before providing a more generic description of the supported frame rates inside of a device format description.
  10. static const UInt32 kMaxFrameRateRangesInDescription = 10;
  11. @implementation OBSAVCapture
  12. - (instancetype)init
  13. {
  14. return [self initWithCaptureInfo:nil];
  15. }
  16. - (instancetype)initWithCaptureInfo:(OBSAVCaptureInfo *)capture_info
  17. {
  18. self = [super init];
  19. if (self) {
  20. CMIOObjectPropertyAddress propertyAddress = {kCMIOHardwarePropertyAllowScreenCaptureDevices,
  21. kCMIOObjectPropertyScopeGlobal, kCMIOObjectPropertyElementMain};
  22. UInt32 allow = 1;
  23. CMIOObjectSetPropertyData(kCMIOObjectSystemObject, &propertyAddress, 0, NULL, sizeof(allow), &allow);
  24. _errorDomain = @"com.obsproject.obs-studio.av-capture";
  25. _presetList = @{
  26. AVCaptureSessionPresetLow: @"Low",
  27. AVCaptureSessionPresetMedium: @"Medium",
  28. AVCaptureSessionPresetHigh: @"High",
  29. AVCaptureSessionPreset320x240: @"320x240",
  30. AVCaptureSessionPreset352x288: @"352x288",
  31. AVCaptureSessionPreset640x480: @"640x480",
  32. AVCaptureSessionPreset960x540: @"960x540",
  33. AVCaptureSessionPreset1280x720: @"1280x720",
  34. AVCaptureSessionPreset1920x1080: @"1920x1080",
  35. AVCaptureSessionPreset3840x2160: @"3840x2160",
  36. };
  37. _sessionQueue = dispatch_queue_create("session queue", DISPATCH_QUEUE_SERIAL);
  38. OBSAVCaptureVideoInfo newInfo = {0};
  39. _videoInfo = newInfo;
  40. [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(deviceDisconnected:)
  41. name:AVCaptureDeviceWasDisconnectedNotification
  42. object:nil];
  43. [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(deviceConnected:)
  44. name:AVCaptureDeviceWasConnectedNotification
  45. object:nil];
  46. if (capture_info) {
  47. _captureInfo = capture_info;
  48. NSString *UUID = [OBSAVCapture stringFromSettings:_captureInfo->settings withSetting:@"device"];
  49. NSString *presetName = [OBSAVCapture stringFromSettings:_captureInfo->settings withSetting:@"preset"];
  50. BOOL isPresetEnabled = obs_data_get_bool(_captureInfo->settings, "use_preset");
  51. if (capture_info->isFastPath) {
  52. _isFastPath = YES;
  53. _isPresetBased = NO;
  54. } else {
  55. BOOL isBufferingEnabled = obs_data_get_bool(_captureInfo->settings, "buffering");
  56. obs_source_set_async_unbuffered(_captureInfo->source, !isBufferingEnabled);
  57. }
  58. __weak OBSAVCapture *weakSelf = self;
  59. dispatch_async(_sessionQueue, ^{
  60. NSError *error = nil;
  61. OBSAVCapture *instance = weakSelf;
  62. if ([instance createSession:&error]) {
  63. if ([instance switchCaptureDevice:UUID withError:nil]) {
  64. BOOL isSessionConfigured = NO;
  65. if (isPresetEnabled) {
  66. isSessionConfigured = [instance configureSessionWithPreset:presetName withError:nil];
  67. } else {
  68. isSessionConfigured = [instance configureSession:nil];
  69. }
  70. if (isSessionConfigured) {
  71. [instance startCaptureSession];
  72. }
  73. }
  74. } else {
  75. [instance AVCaptureLog:LOG_ERROR withFormat:error.localizedDescription];
  76. }
  77. });
  78. }
  79. }
  80. return self;
  81. }
  82. #pragma mark - Capture Session Handling
  83. - (BOOL)createSession:(NSError *__autoreleasing *)error
  84. {
  85. AVCaptureSession *session = [[AVCaptureSession alloc] init];
  86. [session beginConfiguration];
  87. if (!session) {
  88. if (error) {
  89. NSDictionary *userInfo = @{NSLocalizedDescriptionKey: @"Failed to create AVCaptureSession"};
  90. *error = [NSError errorWithDomain:self.errorDomain code:-101 userInfo:userInfo];
  91. }
  92. return NO;
  93. }
  94. AVCaptureVideoDataOutput *videoOutput = [[AVCaptureVideoDataOutput alloc] init];
  95. if (!videoOutput) {
  96. if (error) {
  97. NSDictionary *userInfo = @{NSLocalizedDescriptionKey: @"Failed to create AVCaptureVideoDataOutput"};
  98. *error = [NSError errorWithDomain:self.errorDomain code:-102 userInfo:userInfo];
  99. }
  100. return NO;
  101. }
  102. AVCaptureAudioDataOutput *audioOutput = [[AVCaptureAudioDataOutput alloc] init];
  103. if (!audioOutput) {
  104. if (error) {
  105. NSDictionary *userInfo = @{NSLocalizedDescriptionKey: @"Failed to create AVCaptureAudioDataOutput"};
  106. *error = [NSError errorWithDomain:self.errorDomain code:-103 userInfo:userInfo];
  107. }
  108. return NO;
  109. }
  110. dispatch_queue_t videoQueue = dispatch_queue_create(nil, nil);
  111. if (!videoQueue) {
  112. if (error) {
  113. NSDictionary *userInfo = @{NSLocalizedDescriptionKey: @"Failed to create video dispatch queue"};
  114. *error = [NSError errorWithDomain:self.errorDomain code:-104 userInfo:userInfo];
  115. }
  116. return NO;
  117. }
  118. dispatch_queue_t audioQueue = dispatch_queue_create(nil, nil);
  119. if (!audioQueue) {
  120. if (error) {
  121. NSDictionary *userInfo = @{NSLocalizedDescriptionKey: @"Failed to create audio dispatch queue"};
  122. *error = [NSError errorWithDomain:self.errorDomain code:-105 userInfo:userInfo];
  123. }
  124. return NO;
  125. }
  126. if ([session canAddOutput:videoOutput]) {
  127. [session addOutput:videoOutput];
  128. [videoOutput setSampleBufferDelegate:self queue:videoQueue];
  129. }
  130. if ([session canAddOutput:audioOutput]) {
  131. [session addOutput:audioOutput];
  132. [audioOutput setSampleBufferDelegate:self queue:audioQueue];
  133. }
  134. [session commitConfiguration];
  135. self.session = session;
  136. self.videoOutput = videoOutput;
  137. self.videoQueue = videoQueue;
  138. self.audioOutput = audioOutput;
  139. self.audioQueue = audioQueue;
  140. return YES;
  141. }
  142. - (BOOL)switchCaptureDevice:(NSString *)uuid withError:(NSError *__autoreleasing *)error
  143. {
  144. AVCaptureDevice *device = [AVCaptureDevice deviceWithUniqueID:uuid];
  145. if (self.deviceInput.device || !device) {
  146. [self stopCaptureSession];
  147. [self.session removeInput:self.deviceInput];
  148. [self.deviceInput.device unlockForConfiguration];
  149. self.deviceInput = nil;
  150. self.isDeviceLocked = NO;
  151. self.presetFormat = nil;
  152. }
  153. if (!device) {
  154. if (uuid.length < 1) {
  155. [self AVCaptureLog:LOG_INFO withFormat:@"No device selected"];
  156. self.deviceUUID = uuid;
  157. return NO;
  158. } else {
  159. [self AVCaptureLog:LOG_WARNING withFormat:@"Unable to initialize device with unique ID '%@'", uuid];
  160. return NO;
  161. }
  162. }
  163. const char *deviceName = device.localizedName.UTF8String;
  164. obs_data_set_string(self.captureInfo->settings, "device_name", deviceName);
  165. obs_data_set_string(self.captureInfo->settings, "device", device.uniqueID.UTF8String);
  166. [self AVCaptureLog:LOG_INFO withFormat:@"Selected device '%@'", device.localizedName];
  167. self.deviceUUID = device.uniqueID;
  168. BOOL isAudioSupported = [device hasMediaType:AVMediaTypeAudio] || [device hasMediaType:AVMediaTypeMuxed];
  169. obs_source_set_audio_active(self.captureInfo->source, isAudioSupported);
  170. AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:error];
  171. if (!deviceInput) {
  172. return NO;
  173. }
  174. [self.session beginConfiguration];
  175. if ([self.session canAddInput:deviceInput]) {
  176. [self.session addInput:deviceInput];
  177. self.deviceInput = deviceInput;
  178. } else {
  179. if (error) {
  180. NSDictionary *userInfo = @{
  181. NSLocalizedDescriptionKey: [NSString
  182. stringWithFormat:@"Unable to add device '%@' as deviceInput to capture session", self.deviceUUID]
  183. };
  184. *error = [NSError errorWithDomain:self.errorDomain code:-107 userInfo:userInfo];
  185. }
  186. [self.session commitConfiguration];
  187. return NO;
  188. }
  189. AVCaptureDeviceFormat *deviceFormat = device.activeFormat;
  190. CMMediaType mediaType = CMFormatDescriptionGetMediaType(deviceFormat.formatDescription);
  191. if (mediaType != kCMMediaType_Video && mediaType != kCMMediaType_Muxed) {
  192. if (error) {
  193. NSDictionary *userInfo = @{
  194. NSLocalizedDescriptionKey: [NSString stringWithFormat:@"CMMediaType '%@' is not supported",
  195. [OBSAVCapture stringFromFourCharCode:mediaType]]
  196. };
  197. *error = [NSError errorWithDomain:self.errorDomain code:-108 userInfo:userInfo];
  198. }
  199. [self.session removeInput:deviceInput];
  200. [self.session commitConfiguration];
  201. return NO;
  202. }
  203. if (self.isFastPath) {
  204. self.videoOutput.videoSettings = nil;
  205. NSMutableDictionary *videoSettings =
  206. [NSMutableDictionary dictionaryWithDictionary:self.videoOutput.videoSettings];
  207. FourCharCode targetPixelFormatType = kCVPixelFormatType_32BGRA;
  208. [videoSettings setObject:@(targetPixelFormatType)
  209. forKey:(__bridge NSString *) kCVPixelBufferPixelFormatTypeKey];
  210. self.videoOutput.videoSettings = videoSettings;
  211. } else {
  212. self.videoOutput.videoSettings = nil;
  213. FourCharCode subType = [[self.videoOutput.videoSettings
  214. objectForKey:(__bridge NSString *) kCVPixelBufferPixelFormatTypeKey] unsignedIntValue];
  215. if ([OBSAVCapture formatFromSubtype:subType] != VIDEO_FORMAT_NONE) {
  216. [self AVCaptureLog:LOG_DEBUG
  217. withFormat:@"Using native fourcc '%@'", [OBSAVCapture stringFromFourCharCode:subType]];
  218. } else {
  219. [self AVCaptureLog:LOG_DEBUG withFormat:@"Using fallback fourcc '%@' ('%@', 0x%08x unsupported)",
  220. [OBSAVCapture stringFromFourCharCode:kCVPixelFormatType_32BGRA],
  221. [OBSAVCapture stringFromFourCharCode:subType], subType];
  222. NSMutableDictionary *videoSettings =
  223. [NSMutableDictionary dictionaryWithDictionary:self.videoOutput.videoSettings];
  224. [videoSettings setObject:@(kCVPixelFormatType_32BGRA)
  225. forKey:(__bridge NSString *) kCVPixelBufferPixelFormatTypeKey];
  226. self.videoOutput.videoSettings = videoSettings;
  227. }
  228. }
  229. [self.session commitConfiguration];
  230. return YES;
  231. }
  232. - (void)startCaptureSession
  233. {
  234. if (!self.session.running) {
  235. [self.session startRunning];
  236. }
  237. }
  238. - (void)stopCaptureSession
  239. {
  240. if (self.session.running) {
  241. [self.session stopRunning];
  242. }
  243. if (self.captureInfo->isFastPath) {
  244. if (self.captureInfo->texture) {
  245. obs_enter_graphics();
  246. gs_texture_destroy(self.captureInfo->texture);
  247. obs_leave_graphics();
  248. self.captureInfo->texture = NULL;
  249. }
  250. if (self.captureInfo->currentSurface) {
  251. IOSurfaceDecrementUseCount(self.captureInfo->currentSurface);
  252. CFRelease(self.captureInfo->currentSurface);
  253. self.captureInfo->currentSurface = NULL;
  254. }
  255. if (self.captureInfo->previousSurface) {
  256. IOSurfaceDecrementUseCount(self.captureInfo->previousSurface);
  257. CFRelease(self.captureInfo->previousSurface);
  258. self.captureInfo->previousSurface = NULL;
  259. }
  260. } else {
  261. if (self.captureInfo->source) {
  262. obs_source_output_video(self.captureInfo->source, NULL);
  263. }
  264. }
  265. }
  266. - (BOOL)configureSessionWithPreset:(AVCaptureSessionPreset)preset withError:(NSError *__autoreleasing *)error
  267. {
  268. if (!self.deviceInput.device) {
  269. if (error) {
  270. NSDictionary *userInfo =
  271. @{NSLocalizedDescriptionKey: @"Unable to set session preset without capture device"};
  272. *error = [NSError errorWithDomain:self.errorDomain code:-108 userInfo:userInfo];
  273. }
  274. return NO;
  275. }
  276. if (![self.deviceInput.device supportsAVCaptureSessionPreset:preset]) {
  277. if (error) {
  278. NSDictionary *userInfo = @{
  279. NSLocalizedDescriptionKey: [NSString stringWithFormat:@"Preset %@ not supported by device %@",
  280. [OBSAVCapture stringFromCapturePreset:preset],
  281. self.deviceInput.device.localizedName]
  282. };
  283. *error = [NSError errorWithDomain:self.errorDomain code:-201 userInfo:userInfo];
  284. }
  285. return NO;
  286. }
  287. if ([self.session canSetSessionPreset:preset]) {
  288. if (self.isDeviceLocked) {
  289. if ([preset isEqualToString:self.session.sessionPreset]) {
  290. if (self.deviceInput.device.activeFormat) {
  291. self.deviceInput.device.activeFormat = self.presetFormat.activeFormat;
  292. self.deviceInput.device.activeVideoMinFrameDuration = self.presetFormat.minFrameRate;
  293. self.deviceInput.device.activeVideoMaxFrameDuration = self.presetFormat.maxFrameRate;
  294. }
  295. self.presetFormat = nil;
  296. }
  297. [self.deviceInput.device unlockForConfiguration];
  298. self.isDeviceLocked = NO;
  299. }
  300. if ([self.session canSetSessionPreset:preset]) {
  301. self.session.sessionPreset = preset;
  302. }
  303. } else {
  304. if (error) {
  305. NSDictionary *userInfo = @{
  306. NSLocalizedDescriptionKey: [NSString stringWithFormat:@"Preset %@ not supported by capture session",
  307. [OBSAVCapture stringFromCapturePreset:preset]]
  308. };
  309. *error = [NSError errorWithDomain:self.errorDomain code:-202 userInfo:userInfo];
  310. }
  311. return NO;
  312. }
  313. self.isPresetBased = YES;
  314. return YES;
  315. }
  316. - (BOOL)configureSession:(NSError *__autoreleasing *)error
  317. {
  318. OBSAVCaptureMediaFPS fps;
  319. if (!obs_data_get_frames_per_second(self.captureInfo->settings, "frame_rate", &fps, NULL)) {
  320. [self AVCaptureLog:LOG_DEBUG withFormat:@"No valid framerate found in settings"];
  321. return NO;
  322. }
  323. CMTime time = {.value = fps.denominator, .timescale = fps.numerator, .flags = 1};
  324. const char *selectedFormat = obs_data_get_string(self.captureInfo->settings, "supported_format");
  325. NSString *selectedFormatNSString = selectedFormat != NULL ? @(selectedFormat) : @"";
  326. AVCaptureDeviceFormat *format = nil;
  327. FourCharCode subtype;
  328. OBSAVCaptureColorSpace colorSpace;
  329. bool fpsSupported = false;
  330. if (![selectedFormatNSString isEqualToString:@""]) {
  331. for (AVCaptureDeviceFormat *formatCandidate in [self.deviceInput.device.formats reverseObjectEnumerator]) {
  332. if ([selectedFormatNSString isEqualToString:formatCandidate.obsPropertyListInternalRepresentation]) {
  333. CMFormatDescriptionRef formatDescription = formatCandidate.formatDescription;
  334. FourCharCode formatFourCC = CMFormatDescriptionGetMediaSubType(formatDescription);
  335. format = formatCandidate;
  336. subtype = formatFourCC;
  337. colorSpace = [OBSAVCapture colorspaceFromDescription:formatDescription];
  338. break;
  339. }
  340. }
  341. } else {
  342. //try to migrate from the legacy suite of properties
  343. int legacyVideoRange = (int) obs_data_get_int(self.captureInfo->settings, "video_range");
  344. int legacyInputFormat = (int) obs_data_get_int(self.captureInfo->settings, "input_format");
  345. int legacyColorSpace = (int) obs_data_get_int(self.captureInfo->settings, "color_space");
  346. CMVideoDimensions legacyDimensions = [OBSAVCapture legacyDimensionsFromSettings:self.captureInfo->settings];
  347. for (AVCaptureDeviceFormat *formatCandidate in [self.deviceInput.device.formats reverseObjectEnumerator]) {
  348. CMFormatDescriptionRef formatDescription = formatCandidate.formatDescription;
  349. CMVideoDimensions formatDimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
  350. int formatColorSpace = [OBSAVCapture colorspaceFromDescription:formatDescription];
  351. int formatInputFormat =
  352. [OBSAVCapture formatFromSubtype:CMFormatDescriptionGetMediaSubType(formatDescription)];
  353. int formatVideoRange = [OBSAVCapture isFullRangeFormat:formatInputFormat] ? VIDEO_RANGE_FULL
  354. : VIDEO_RANGE_PARTIAL;
  355. bool foundFormat = legacyVideoRange == formatVideoRange && legacyInputFormat == formatInputFormat &&
  356. legacyColorSpace == formatColorSpace &&
  357. legacyDimensions.width == formatDimensions.width &&
  358. legacyDimensions.height == formatDimensions.height;
  359. if (foundFormat) {
  360. format = formatCandidate;
  361. subtype = formatInputFormat;
  362. colorSpace = formatColorSpace;
  363. break;
  364. }
  365. }
  366. }
  367. if (!format) {
  368. [self AVCaptureLog:LOG_WARNING withFormat:@"Configured format not found on device"];
  369. return NO;
  370. }
  371. for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
  372. if (CMTimeCompare(range.maxFrameDuration, time) >= 0 && CMTimeCompare(range.minFrameDuration, time) <= 0) {
  373. fpsSupported = true;
  374. break;
  375. }
  376. }
  377. if (!fpsSupported) {
  378. OBSAVCaptureMediaFPS fallbackFPS = [OBSAVCapture fallbackFrameRateForFormat:format];
  379. if (fallbackFPS.denominator > 0 && fallbackFPS.numerator > 0) {
  380. [self AVCaptureLog:LOG_WARNING withFormat:@"Frame rate is not supported: %g FPS (%u/%u), \n"
  381. " falling back to value supported by device: %G FPS (%u/%u)",
  382. media_frames_per_second_to_fps(fps), fps.numerator,
  383. fps.denominator, media_frames_per_second_to_fps(fallbackFPS),
  384. fallbackFPS.numerator, fallbackFPS.denominator];
  385. obs_data_set_frames_per_second(self.captureInfo->settings, "frame_rate", fallbackFPS, NULL);
  386. time.value = fallbackFPS.denominator;
  387. time.timescale = fallbackFPS.numerator;
  388. } else {
  389. [self AVCaptureLog:LOG_WARNING
  390. withFormat:@"Frame rate is not supported: %g FPS (%u/%u), \n"
  391. " no supported fallback FPS found",
  392. media_frames_per_second_to_fps(fps), fps.numerator, fps.denominator];
  393. return NO;
  394. }
  395. }
  396. [self.session beginConfiguration];
  397. self.isDeviceLocked = [self.deviceInput.device lockForConfiguration:error];
  398. if (!self.isDeviceLocked) {
  399. [self AVCaptureLog:LOG_WARNING withFormat:@"Could not lock device for configuration"];
  400. return NO;
  401. }
  402. [self AVCaptureLog:LOG_INFO
  403. withFormat:@"Capturing '%@' (%@):\n"
  404. " Using Format : %@ \n"
  405. " FPS : %g (%u/%u)\n"
  406. " Frame Interval : %g\u00a0s\n",
  407. self.deviceInput.device.localizedName, self.deviceInput.device.uniqueID,
  408. format.obsPropertyListDescription, media_frames_per_second_to_fps(fps), fps.numerator,
  409. fps.denominator, media_frames_per_second_to_frame_interval(fps)];
  410. OBSAVCaptureVideoInfo newInfo = {.colorSpace = _videoInfo.colorSpace,
  411. .fourCC = _videoInfo.fourCC,
  412. .isValid = false};
  413. self.videoInfo = newInfo;
  414. self.captureInfo->configuredColorSpace = colorSpace;
  415. self.captureInfo->configuredFourCC = subtype;
  416. self.isPresetBased = NO;
  417. if (!self.presetFormat) {
  418. OBSAVCapturePresetInfo *presetInfo = [[OBSAVCapturePresetInfo alloc] init];
  419. presetInfo.activeFormat = self.deviceInput.device.activeFormat;
  420. presetInfo.minFrameRate = self.deviceInput.device.activeVideoMinFrameDuration;
  421. presetInfo.maxFrameRate = self.deviceInput.device.activeVideoMaxFrameDuration;
  422. self.presetFormat = presetInfo;
  423. }
  424. self.deviceInput.device.activeFormat = format;
  425. self.deviceInput.device.activeVideoMinFrameDuration = time;
  426. self.deviceInput.device.activeVideoMaxFrameDuration = time;
  427. [self.session commitConfiguration];
  428. return YES;
  429. }
  430. - (BOOL)updateSessionwithError:(NSError *__autoreleasing *)error
  431. {
  432. switch (self.captureInfo->lastError) {
  433. case OBSAVCaptureError_SampleBufferFormat:
  434. if (self.captureInfo->sampleBufferDescription) {
  435. FourCharCode mediaSubType =
  436. CMFormatDescriptionGetMediaSubType(self.captureInfo->sampleBufferDescription);
  437. [self AVCaptureLog:LOG_ERROR
  438. withFormat:@"Incompatible sample buffer format received for sync AVCapture source: %@ (0x%x)",
  439. [OBSAVCapture stringFromFourCharCode:mediaSubType], mediaSubType];
  440. }
  441. break;
  442. case OBSAVCaptureError_ColorSpace: {
  443. if (self.captureInfo->sampleBufferDescription) {
  444. FourCharCode mediaSubType =
  445. CMFormatDescriptionGetMediaSubType(self.captureInfo->sampleBufferDescription);
  446. BOOL isSampleBufferFullRange = [OBSAVCapture isFullRangeFormat:mediaSubType];
  447. OBSAVCaptureColorSpace sampleBufferColorSpace =
  448. [OBSAVCapture colorspaceFromDescription:self.captureInfo->sampleBufferDescription];
  449. OBSAVCaptureVideoRange sampleBufferRangeType = isSampleBufferFullRange ? VIDEO_RANGE_FULL
  450. : VIDEO_RANGE_PARTIAL;
  451. [self AVCaptureLog:LOG_ERROR
  452. withFormat:@"Failed to get colorspace parameters for colorspace %u and range %u",
  453. sampleBufferColorSpace, sampleBufferRangeType];
  454. }
  455. break;
  456. default:
  457. self.captureInfo->lastError = OBSAVCaptureError_NoError;
  458. self.captureInfo->sampleBufferDescription = NULL;
  459. break;
  460. }
  461. }
  462. switch (self.captureInfo->lastAudioError) {
  463. case OBSAVCaptureError_AudioBuffer: {
  464. [OBSAVCapture AVCaptureLog:LOG_ERROR
  465. withFormat:@"Unable to retrieve required AudioBufferList size from sample buffer."];
  466. break;
  467. }
  468. default:
  469. self.captureInfo->lastAudioError = OBSAVCaptureError_NoError;
  470. break;
  471. }
  472. NSString *newDeviceUUID = [OBSAVCapture stringFromSettings:self.captureInfo->settings withSetting:@"device"];
  473. NSString *presetName = [OBSAVCapture stringFromSettings:self.captureInfo->settings withSetting:@"preset"];
  474. BOOL isPresetEnabled = obs_data_get_bool(self.captureInfo->settings, "use_preset");
  475. BOOL updateSession = YES;
  476. if (![self.deviceUUID isEqualToString:newDeviceUUID]) {
  477. if (![self switchCaptureDevice:newDeviceUUID withError:error]) {
  478. obs_source_update_properties(self.captureInfo->source);
  479. return NO;
  480. }
  481. } else if (self.isPresetBased && isPresetEnabled && [presetName isEqualToString:self.session.sessionPreset]) {
  482. updateSession = NO;
  483. }
  484. if (updateSession) {
  485. if (isPresetEnabled) {
  486. [self configureSessionWithPreset:presetName withError:error];
  487. } else {
  488. if (![self configureSession:error]) {
  489. obs_source_update_properties(self.captureInfo->source);
  490. return NO;
  491. }
  492. }
  493. __weak OBSAVCapture *weakSelf = self;
  494. dispatch_async(self.sessionQueue, ^{
  495. [weakSelf startCaptureSession];
  496. });
  497. }
  498. BOOL isAudioAvailable = [self.deviceInput.device hasMediaType:AVMediaTypeAudio] ||
  499. [self.deviceInput.device hasMediaType:AVMediaTypeMuxed];
  500. obs_source_set_audio_active(self.captureInfo->source, isAudioAvailable);
  501. if (!self.isFastPath) {
  502. BOOL isBufferingEnabled = obs_data_get_bool(self.captureInfo->settings, "buffering");
  503. obs_source_set_async_unbuffered(self.captureInfo->source, !isBufferingEnabled);
  504. }
  505. return YES;
  506. }
  507. #pragma mark - OBS Settings Helpers
  508. + (CMVideoDimensions)legacyDimensionsFromSettings:(void *)settings
  509. {
  510. CMVideoDimensions zero = {0};
  511. NSString *jsonString = [OBSAVCapture stringFromSettings:settings withSetting:@"resolution"];
  512. NSDictionary *data = [NSJSONSerialization JSONObjectWithData:[jsonString dataUsingEncoding:NSUTF8StringEncoding]
  513. options:0
  514. error:nil];
  515. if (data.count == 0) {
  516. return zero;
  517. }
  518. NSInteger width = [[data objectForKey:@"width"] intValue];
  519. NSInteger height = [[data objectForKey:@"height"] intValue];
  520. if (!width || !height) {
  521. return zero;
  522. }
  523. CMVideoDimensions dimensions = {.width = (int32_t) clamp_Uint(width, 0, UINT32_MAX),
  524. .height = (int32_t) clamp_Uint(height, 0, UINT32_MAX)};
  525. return dimensions;
  526. }
  527. + (OBSAVCaptureMediaFPS)fallbackFrameRateForFormat:(AVCaptureDeviceFormat *)format
  528. {
  529. struct obs_video_info video_info;
  530. bool result = obs_get_video_info(&video_info);
  531. double outputFPS = result ? ((double) video_info.fps_num / (double) video_info.fps_den) : 0;
  532. double closestUpTo = 0;
  533. double closestAbove = DBL_MAX;
  534. OBSAVCaptureMediaFPS closestUpToMFPS = {};
  535. OBSAVCaptureMediaFPS closestAboveMFPS = {};
  536. for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
  537. if (range.maxFrameRate > closestUpTo && range.maxFrameRate <= outputFPS) {
  538. closestUpTo = range.maxFrameRate;
  539. closestUpToMFPS.numerator = (uint32_t) clamp_Uint(range.minFrameDuration.timescale, 0, UINT32_MAX);
  540. closestUpToMFPS.denominator = (uint32_t) clamp_Uint(range.minFrameDuration.value, 0, UINT32_MAX);
  541. }
  542. if (range.minFrameRate > outputFPS && range.minFrameRate < closestAbove) {
  543. closestAbove = range.minFrameRate;
  544. closestAboveMFPS.numerator = (uint32_t) clamp_Uint(range.maxFrameDuration.timescale, 0, UINT32_MAX);
  545. closestAboveMFPS.denominator = (uint32_t) clamp_Uint(range.maxFrameDuration.value, 0, UINT32_MAX);
  546. }
  547. }
  548. if (closestUpTo > 0) {
  549. return closestUpToMFPS;
  550. } else {
  551. return closestAboveMFPS;
  552. }
  553. }
  554. + (NSString *)aspectRatioStringFromDimensions:(CMVideoDimensions)dimensions
  555. {
  556. if (dimensions.width <= 0 || dimensions.height <= 0) {
  557. return @"";
  558. }
  559. double divisor = (double) gcd(dimensions.width, dimensions.height);
  560. if (divisor <= 50) {
  561. if (dimensions.width > dimensions.height) {
  562. double x = (double) dimensions.width / (double) dimensions.height;
  563. return [NSString stringWithFormat:@"%.2f:1", x];
  564. } else {
  565. double y = (double) dimensions.height / (double) dimensions.width;
  566. return [NSString stringWithFormat:@"1:%.2f", y];
  567. }
  568. } else {
  569. SInt32 x = dimensions.width / (SInt32) divisor;
  570. SInt32 y = dimensions.height / (SInt32) divisor;
  571. if (x == 8 && y == 5) {
  572. x = 16;
  573. y = 10;
  574. }
  575. return [NSString stringWithFormat:@"%i:%i", x, y];
  576. }
  577. }
  578. + (NSString *)stringFromSettings:(void *)settings withSetting:(NSString *)setting
  579. {
  580. return [OBSAVCapture stringFromSettings:settings withSetting:setting withDefault:@""];
  581. }
  582. + (NSString *)stringFromSettings:(void *)settings withSetting:(NSString *)setting withDefault:(NSString *)defaultValue
  583. {
  584. NSString *result;
  585. if (settings) {
  586. const char *setting_value = obs_data_get_string(settings, setting.UTF8String);
  587. if (!setting_value) {
  588. result = [NSString stringWithString:defaultValue];
  589. } else {
  590. result = @(setting_value);
  591. }
  592. } else {
  593. result = [NSString stringWithString:defaultValue];
  594. }
  595. return result;
  596. }
  597. + (NSString *)effectsWarningForDevice:(AVCaptureDevice *)device
  598. {
  599. int effectsCount = 0;
  600. NSString *effectWarning = nil;
  601. if (@available(macOS 12.0, *)) {
  602. if (device.portraitEffectActive) {
  603. effectWarning = @"Warning.Effect.Portrait";
  604. effectsCount++;
  605. }
  606. }
  607. if (@available(macOS 12.3, *)) {
  608. if (device.centerStageActive) {
  609. effectWarning = @"Warning.Effect.CenterStage";
  610. effectsCount++;
  611. }
  612. }
  613. if (@available(macOS 13.0, *)) {
  614. if (device.studioLightActive) {
  615. effectWarning = @"Warning.Effect.StudioLight";
  616. effectsCount++;
  617. }
  618. }
  619. if (@available(macOS 14.0, *)) {
  620. /// Reaction effects do not follow the same paradigm as other effects in terms of checking whether they are active. According to Apple, this is because a device instance property `reactionEffectsActive` would have been ambiguous (conflicting with whether a reaction is currently rendering).
  621. ///
  622. /// Instead, Apple exposes the `AVCaptureDevice.reactionEffectGesturesEnabled` class property (an equivalent exists for all other effects, but is hidden/private) to tell us whether the effect is enabled application-wide, as well as the `device.canPerformReactionEffects` instance property to tell us whether the device's active format currently supports the effect.
  623. ///
  624. /// The logical conjunction of these two properties tells us whether the effect is 'active'; i.e. whether putting our thumbs inside the video frame will make fireworks appear. The device instance properties for other effects are a convenience 'shorthand' for this private class/instance property combination.
  625. if (device.canPerformReactionEffects && AVCaptureDevice.reactionEffectGesturesEnabled) {
  626. effectWarning = @"Warning.Effect.Reactions";
  627. effectsCount++;
  628. }
  629. }
  630. if (@available(macOS 15.0, *)) {
  631. if (device.backgroundReplacementActive) {
  632. effectWarning = @"Warning.Effect.BackgroundReplacement";
  633. effectsCount++;
  634. }
  635. }
  636. if (effectsCount > 1) {
  637. effectWarning = @"Warning.Effect.Multiple";
  638. }
  639. return effectWarning;
  640. }
  641. #pragma mark - Format Conversion Helpers
  642. + (NSString *)stringFromSubType:(FourCharCode)subtype
  643. {
  644. switch (subtype) {
  645. case kCVPixelFormatType_422YpCbCr8:
  646. return @"UYVY (2vuy)";
  647. case kCVPixelFormatType_422YpCbCr8_yuvs:
  648. return @"YUY2 (yuvs)";
  649. case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
  650. return @"NV12 (420v)";
  651. case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
  652. return @"NV12 (420f)";
  653. case kCVPixelFormatType_420YpCbCr10BiPlanarFullRange:
  654. return @"P010 (xf20)";
  655. case kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange:
  656. return @"P010 (x420)";
  657. case kCVPixelFormatType_32ARGB:
  658. return @"ARGB - 32ARGB";
  659. case kCVPixelFormatType_32BGRA:
  660. return @"BGRA - 32BGRA";
  661. case kCMVideoCodecType_Animation:
  662. return @"Apple Animation";
  663. case kCMVideoCodecType_Cinepak:
  664. return @"Cinepak";
  665. case kCMVideoCodecType_JPEG:
  666. return @"JPEG";
  667. case kCMVideoCodecType_JPEG_OpenDML:
  668. return @"MJPEG - JPEG OpenDML";
  669. case kCMVideoCodecType_SorensonVideo:
  670. return @"Sorenson Video";
  671. case kCMVideoCodecType_SorensonVideo3:
  672. return @"Sorenson Video 3";
  673. case kCMVideoCodecType_H263:
  674. return @"H.263";
  675. case kCMVideoCodecType_H264:
  676. return @"H.264";
  677. case kCMVideoCodecType_MPEG4Video:
  678. return @"MPEG-4";
  679. case kCMVideoCodecType_MPEG2Video:
  680. return @"MPEG-2";
  681. case kCMVideoCodecType_MPEG1Video:
  682. return @"MPEG-1";
  683. case kCMVideoCodecType_DVCNTSC:
  684. return @"DV NTSC";
  685. case kCMVideoCodecType_DVCPAL:
  686. return @"DV PAL";
  687. case kCMVideoCodecType_DVCProPAL:
  688. return @"Panasonic DVCPro Pal";
  689. case kCMVideoCodecType_DVCPro50NTSC:
  690. return @"Panasonic DVCPro-50 NTSC";
  691. case kCMVideoCodecType_DVCPro50PAL:
  692. return @"Panasonic DVCPro-50 PAL";
  693. case kCMVideoCodecType_DVCPROHD720p60:
  694. return @"Panasonic DVCPro-HD 720p60";
  695. case kCMVideoCodecType_DVCPROHD720p50:
  696. return @"Panasonic DVCPro-HD 720p50";
  697. case kCMVideoCodecType_DVCPROHD1080i60:
  698. return @"Panasonic DVCPro-HD 1080i60";
  699. case kCMVideoCodecType_DVCPROHD1080i50:
  700. return @"Panasonic DVCPro-HD 1080i50";
  701. case kCMVideoCodecType_DVCPROHD1080p30:
  702. return @"Panasonic DVCPro-HD 1080p30";
  703. case kCMVideoCodecType_DVCPROHD1080p25:
  704. return @"Panasonic DVCPro-HD 1080p25";
  705. case kCMVideoCodecType_AppleProRes4444:
  706. return @"Apple ProRes 4444";
  707. case kCMVideoCodecType_AppleProRes422HQ:
  708. return @"Apple ProRes 422 HQ";
  709. case kCMVideoCodecType_AppleProRes422:
  710. return @"Apple ProRes 422";
  711. case kCMVideoCodecType_AppleProRes422LT:
  712. return @"Apple ProRes 422 LT";
  713. case kCMVideoCodecType_AppleProRes422Proxy:
  714. return @"Apple ProRes 422 Proxy";
  715. default:
  716. return @"Unknown";
  717. }
  718. }
  719. + (NSString *)stringFromColorspace:(enum video_colorspace)colorspace
  720. {
  721. switch (colorspace) {
  722. case VIDEO_CS_DEFAULT:
  723. return @"Default";
  724. case VIDEO_CS_601:
  725. return @"CS 601";
  726. case VIDEO_CS_709:
  727. return @"CS 709";
  728. case VIDEO_CS_SRGB:
  729. return @"sRGB";
  730. case VIDEO_CS_2100_PQ:
  731. return @"CS 2100 (PQ)";
  732. case VIDEO_CS_2100_HLG:
  733. return @"CS 2100 (HLG)";
  734. default:
  735. return @"Unknown";
  736. }
  737. }
  738. + (NSString *)stringFromVideoRange:(enum video_range_type)videoRange
  739. {
  740. switch (videoRange) {
  741. case VIDEO_RANGE_FULL:
  742. return @"Full";
  743. case VIDEO_RANGE_PARTIAL:
  744. return @"Partial";
  745. case VIDEO_RANGE_DEFAULT:
  746. return @"Default";
  747. }
  748. }
  749. + (NSString *)stringFromCapturePreset:(AVCaptureSessionPreset)preset
  750. {
  751. NSDictionary *presetDescriptions = @{
  752. AVCaptureSessionPresetLow: @"Low",
  753. AVCaptureSessionPresetMedium: @"Medium",
  754. AVCaptureSessionPresetHigh: @"High",
  755. AVCaptureSessionPreset320x240: @"320x240",
  756. AVCaptureSessionPreset352x288: @"352x288",
  757. AVCaptureSessionPreset640x480: @"640x480",
  758. AVCaptureSessionPreset960x540: @"960x460",
  759. AVCaptureSessionPreset1280x720: @"1280x720",
  760. AVCaptureSessionPreset1920x1080: @"1920x1080",
  761. AVCaptureSessionPreset3840x2160: @"3840x2160",
  762. };
  763. NSString *presetDescription = [presetDescriptions objectForKey:preset];
  764. if (!presetDescription) {
  765. return [NSString stringWithFormat:@"Unknown (%@)", preset];
  766. } else {
  767. return presetDescription;
  768. }
  769. }
  770. + (NSString *)stringFromFourCharCode:(OSType)fourCharCode
  771. {
  772. char cString[5] = {(fourCharCode >> 24) & 0xFF, (fourCharCode >> 16) & 0xFF, (fourCharCode >> 8) & 0xFF,
  773. fourCharCode & 0xFF, 0};
  774. NSString *codeString = @(cString);
  775. return codeString;
  776. }
  777. + (FourCharCode)fourCharCodeFromString:(NSString *)codeString
  778. {
  779. FourCharCode fourCharCode;
  780. const char *cString = codeString.UTF8String;
  781. fourCharCode = (cString[0] << 24) | (cString[1] << 16) | (cString[2] << 8) | cString[3];
  782. return fourCharCode;
  783. }
  784. + (BOOL)isValidColorspace:(enum video_colorspace)colorspace
  785. {
  786. switch (colorspace) {
  787. case VIDEO_CS_DEFAULT:
  788. case VIDEO_CS_601:
  789. case VIDEO_CS_709:
  790. return YES;
  791. default:
  792. return NO;
  793. }
  794. }
  795. + (BOOL)isValidVideoRange:(enum video_range_type)videoRange
  796. {
  797. switch (videoRange) {
  798. case VIDEO_RANGE_DEFAULT:
  799. case VIDEO_RANGE_PARTIAL:
  800. case VIDEO_RANGE_FULL:
  801. return YES;
  802. default:
  803. return NO;
  804. }
  805. }
  806. + (BOOL)isFullRangeFormat:(FourCharCode)pixelFormat
  807. {
  808. switch (pixelFormat) {
  809. case kCVPixelFormatType_420YpCbCr8PlanarFullRange:
  810. case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
  811. case kCVPixelFormatType_420YpCbCr10BiPlanarFullRange:
  812. case kCVPixelFormatType_422YpCbCr8FullRange:
  813. return YES;
  814. default:
  815. return NO;
  816. }
  817. }
  818. + (OBSAVCaptureVideoFormat)formatFromSubtype:(FourCharCode)subtype
  819. {
  820. switch (subtype) {
  821. case kCVPixelFormatType_422YpCbCr8:
  822. return VIDEO_FORMAT_UYVY;
  823. case kCVPixelFormatType_422YpCbCr8_yuvs:
  824. return VIDEO_FORMAT_YUY2;
  825. case kCVPixelFormatType_32BGRA:
  826. return VIDEO_FORMAT_BGRA;
  827. case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
  828. case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
  829. return VIDEO_FORMAT_NV12;
  830. case kCVPixelFormatType_420YpCbCr10BiPlanarFullRange:
  831. case kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange:
  832. return VIDEO_FORMAT_P010;
  833. default:
  834. return VIDEO_FORMAT_NONE;
  835. }
  836. }
  837. + (FourCharCode)fourCharCodeFromFormat:(OBSAVCaptureVideoFormat)format withRange:(enum video_range_type)videoRange
  838. {
  839. switch (format) {
  840. case VIDEO_FORMAT_UYVY:
  841. return kCVPixelFormatType_422YpCbCr8;
  842. case VIDEO_FORMAT_YUY2:
  843. return kCVPixelFormatType_422YpCbCr8_yuvs;
  844. case VIDEO_FORMAT_NV12:
  845. if (videoRange == VIDEO_RANGE_FULL) {
  846. return kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
  847. } else {
  848. return kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
  849. }
  850. case VIDEO_FORMAT_P010:
  851. if (videoRange == VIDEO_RANGE_FULL) {
  852. return kCVPixelFormatType_420YpCbCr10BiPlanarFullRange;
  853. } else {
  854. return kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange;
  855. }
  856. case VIDEO_FORMAT_BGRA:
  857. return kCVPixelFormatType_32BGRA;
  858. default:
  859. return 0;
  860. }
  861. }
  862. + (FourCharCode)fourCharCodeFromFormat:(OBSAVCaptureVideoFormat)format
  863. {
  864. return [OBSAVCapture fourCharCodeFromFormat:format withRange:VIDEO_RANGE_PARTIAL];
  865. }
  866. + (NSString *)frameRateDescription:(NSArray<AVFrameRateRange *> *)ranges
  867. {
  868. // The videoSupportedFrameRateRanges property seems to provide frame rate ranges in this order, but since that
  869. // ordering does not seem to be guaranteed, ensure they are sorted anyway.
  870. NSArray<AVFrameRateRange *> *sortedRangesDescending = [ranges
  871. sortedArrayUsingComparator:^NSComparisonResult(AVFrameRateRange *_Nonnull lhs, AVFrameRateRange *_Nonnull rhs) {
  872. if (lhs.maxFrameRate > rhs.maxFrameRate) {
  873. return NSOrderedAscending;
  874. } else if (lhs.maxFrameRate < rhs.maxFrameRate) {
  875. return NSOrderedDescending;
  876. }
  877. if (lhs.minFrameRate > rhs.minFrameRate) {
  878. return NSOrderedAscending;
  879. } else if (lhs.minFrameRate < rhs.minFrameRate) {
  880. return NSOrderedDescending;
  881. }
  882. return NSOrderedSame;
  883. }];
  884. NSString *frameRateDescription;
  885. NSMutableArray *frameRateDescriptions = [[NSMutableArray alloc] initWithCapacity:ranges.count];
  886. for (AVFrameRateRange *range in [sortedRangesDescending reverseObjectEnumerator]) {
  887. double minFrameRate = round(range.minFrameRate * 100) / 100;
  888. double maxFrameRate = round(range.maxFrameRate * 100) / 100;
  889. if (minFrameRate == maxFrameRate) {
  890. if (fmod(minFrameRate, 1.0) == 0 && fmod(maxFrameRate, 1.0) == 0) {
  891. [frameRateDescriptions addObject:[NSString stringWithFormat:@"%.0f", maxFrameRate]];
  892. } else {
  893. [frameRateDescriptions addObject:[NSString stringWithFormat:@"%.2f", maxFrameRate]];
  894. }
  895. } else {
  896. if (fmod(minFrameRate, 1.0) == 0 && fmod(maxFrameRate, 1.0) == 0) {
  897. [frameRateDescriptions addObject:[NSString stringWithFormat:@"%.0f-%.0f", minFrameRate, maxFrameRate]];
  898. } else {
  899. [frameRateDescriptions addObject:[NSString stringWithFormat:@"%.2f-%.2f", minFrameRate, maxFrameRate]];
  900. }
  901. }
  902. }
  903. if (frameRateDescriptions.count > 0 && frameRateDescriptions.count <= kMaxFrameRateRangesInDescription) {
  904. frameRateDescription = [frameRateDescriptions componentsJoinedByString:@", "];
  905. frameRateDescription = [frameRateDescription stringByAppendingString:@" FPS"];
  906. } else if (frameRateDescriptions.count > kMaxFrameRateRangesInDescription) {
  907. frameRateDescription =
  908. [NSString stringWithFormat:@"%.0f-%.0f FPS (%lu values)", sortedRangesDescending.lastObject.minFrameRate,
  909. sortedRangesDescending.firstObject.maxFrameRate, sortedRangesDescending.count];
  910. }
  911. return frameRateDescription;
  912. }
  913. + (OBSAVCaptureColorSpace)colorspaceFromDescription:(CMFormatDescriptionRef)description
  914. {
  915. CFPropertyListRef matrix = CMFormatDescriptionGetExtension(description, kCMFormatDescriptionExtension_YCbCrMatrix);
  916. if (!matrix) {
  917. return VIDEO_CS_DEFAULT;
  918. }
  919. CFComparisonResult is601 = CFStringCompare(matrix, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0);
  920. CFComparisonResult is709 = CFStringCompare(matrix, kCVImageBufferYCbCrMatrix_ITU_R_709_2, 0);
  921. CFComparisonResult is2020 = CFStringCompare(matrix, kCVImageBufferYCbCrMatrix_ITU_R_2020, 0);
  922. if (is601 == kCFCompareEqualTo) {
  923. return VIDEO_CS_601;
  924. } else if (is709 == kCFCompareEqualTo) {
  925. return VIDEO_CS_709;
  926. } else if (is2020 == kCFCompareEqualTo) {
  927. CFPropertyListRef transferFunction =
  928. CMFormatDescriptionGetExtension(description, kCMFormatDescriptionExtension_TransferFunction);
  929. if (!matrix) {
  930. return VIDEO_CS_DEFAULT;
  931. }
  932. CFComparisonResult isPQ = CFStringCompare(transferFunction, kCVImageBufferTransferFunction_SMPTE_ST_2084_PQ, 0);
  933. CFComparisonResult isHLG = CFStringCompare(transferFunction, kCVImageBufferTransferFunction_ITU_R_2100_HLG, 0);
  934. if (isPQ == kCFCompareEqualTo) {
  935. return VIDEO_CS_2100_PQ;
  936. } else if (isHLG == kCFCompareEqualTo) {
  937. return VIDEO_CS_2100_HLG;
  938. }
  939. }
  940. return VIDEO_CS_DEFAULT;
  941. }
  942. #pragma mark - Notification Handlers
  943. - (void)deviceConnected:(NSNotification *)notification
  944. {
  945. AVCaptureDevice *device = notification.object;
  946. if (!device) {
  947. return;
  948. }
  949. if (![[device uniqueID] isEqualTo:self.deviceUUID]) {
  950. obs_source_update_properties(self.captureInfo->source);
  951. return;
  952. }
  953. if (self.deviceInput.device) {
  954. [self AVCaptureLog:LOG_INFO withFormat:@"Received connect event with active device '%@' (UUID %@)",
  955. self.deviceInput.device.localizedName, self.deviceInput.device.uniqueID];
  956. obs_source_update_properties(self.captureInfo->source);
  957. return;
  958. }
  959. [self AVCaptureLog:LOG_INFO
  960. withFormat:@"Received connect event for device '%@' (UUID %@)", device.localizedName, device.uniqueID];
  961. NSError *error;
  962. NSString *presetName = [OBSAVCapture stringFromSettings:self.captureInfo->settings withSetting:@"preset"];
  963. BOOL isPresetEnabled = obs_data_get_bool(self.captureInfo->settings, "use_preset");
  964. BOOL isFastPath = self.captureInfo->isFastPath;
  965. if ([self switchCaptureDevice:device.uniqueID withError:&error]) {
  966. BOOL success;
  967. if (isPresetEnabled && !isFastPath) {
  968. success = [self configureSessionWithPreset:presetName withError:&error];
  969. } else {
  970. success = [self configureSession:&error];
  971. }
  972. if (success) {
  973. dispatch_async(self.sessionQueue, ^{
  974. [self startCaptureSession];
  975. });
  976. } else {
  977. [self AVCaptureLog:LOG_ERROR withFormat:error.localizedDescription];
  978. }
  979. } else {
  980. [self AVCaptureLog:LOG_ERROR withFormat:error.localizedDescription];
  981. }
  982. obs_source_update_properties(self.captureInfo->source);
  983. }
  984. - (void)deviceDisconnected:(NSNotification *)notification
  985. {
  986. AVCaptureDevice *device = notification.object;
  987. if (!device) {
  988. return;
  989. }
  990. if (![[device uniqueID] isEqualTo:self.deviceUUID]) {
  991. obs_source_update_properties(self.captureInfo->source);
  992. return;
  993. }
  994. if (!self.deviceInput.device) {
  995. [self AVCaptureLog:LOG_ERROR withFormat:@"Received disconnect event for inactive device '%@' (UUID %@)",
  996. device.localizedName, device.uniqueID];
  997. obs_source_update_properties(self.captureInfo->source);
  998. return;
  999. }
  1000. [self AVCaptureLog:LOG_INFO
  1001. withFormat:@"Received disconnect event for device '%@' (UUID %@)", device.localizedName, device.uniqueID];
  1002. __weak OBSAVCapture *weakSelf = self;
  1003. dispatch_async(self.sessionQueue, ^{
  1004. OBSAVCapture *instance = weakSelf;
  1005. [instance stopCaptureSession];
  1006. [instance.session removeInput:instance.deviceInput];
  1007. instance.deviceInput = nil;
  1008. instance = nil;
  1009. });
  1010. obs_source_update_properties(self.captureInfo->source);
  1011. }
  1012. #pragma mark - AVCapture Delegate Methods
  1013. - (void)captureOutput:(AVCaptureOutput *)output
  1014. didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
  1015. fromConnection:(AVCaptureConnection *)connection
  1016. {
  1017. return;
  1018. }
  1019. - (void)captureOutput:(AVCaptureOutput *)output
  1020. didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
  1021. fromConnection:(AVCaptureConnection *)connection
  1022. {
  1023. CMItemCount sampleCount = CMSampleBufferGetNumSamples(sampleBuffer);
  1024. if (!_captureInfo || sampleCount < 1) {
  1025. return;
  1026. }
  1027. CMTime presentationTimeStamp = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer);
  1028. CMTime presentationNanoTimeStamp = CMTimeConvertScale(presentationTimeStamp, 1E9, kCMTimeRoundingMethod_Default);
  1029. CMFormatDescriptionRef description = CMSampleBufferGetFormatDescription(sampleBuffer);
  1030. CMMediaType mediaType = CMFormatDescriptionGetMediaType(description);
  1031. switch (mediaType) {
  1032. case kCMMediaType_Video: {
  1033. CMVideoDimensions sampleBufferDimensions = CMVideoFormatDescriptionGetDimensions(description);
  1034. CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
  1035. FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(description);
  1036. OBSAVCaptureVideoInfo newInfo = {.fourCC = _videoInfo.fourCC,
  1037. .colorSpace = _videoInfo.colorSpace,
  1038. .isValid = false};
  1039. BOOL usePreset = obs_data_get_bool(_captureInfo->settings, "use_preset");
  1040. if (_isFastPath) {
  1041. if (mediaSubType != kCVPixelFormatType_32BGRA &&
  1042. mediaSubType != kCVPixelFormatType_ARGB2101010LEPacked) {
  1043. _captureInfo->lastError = OBSAVCaptureError_SampleBufferFormat;
  1044. CMFormatDescriptionCreate(kCFAllocatorDefault, mediaType, mediaSubType, NULL,
  1045. &_captureInfo->sampleBufferDescription);
  1046. obs_source_update_properties(_captureInfo->source);
  1047. break;
  1048. } else {
  1049. _captureInfo->lastError = OBSAVCaptureError_NoError;
  1050. _captureInfo->sampleBufferDescription = NULL;
  1051. }
  1052. CVPixelBufferLockBaseAddress(imageBuffer, 0);
  1053. IOSurfaceRef frameSurface = CVPixelBufferGetIOSurface(imageBuffer);
  1054. CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
  1055. IOSurfaceRef previousSurface = NULL;
  1056. if (frameSurface && !pthread_mutex_lock(&_captureInfo->mutex)) {
  1057. NSRect frameSize = _captureInfo->frameSize;
  1058. if (frameSize.size.width != sampleBufferDimensions.width ||
  1059. frameSize.size.height != sampleBufferDimensions.height) {
  1060. frameSize = CGRectMake(0, 0, sampleBufferDimensions.width, sampleBufferDimensions.height);
  1061. }
  1062. previousSurface = _captureInfo->currentSurface;
  1063. _captureInfo->currentSurface = frameSurface;
  1064. CFRetain(_captureInfo->currentSurface);
  1065. IOSurfaceIncrementUseCount(_captureInfo->currentSurface);
  1066. pthread_mutex_unlock(&_captureInfo->mutex);
  1067. newInfo.isValid = true;
  1068. if (_videoInfo.isValid != newInfo.isValid) {
  1069. obs_source_update_properties(_captureInfo->source);
  1070. }
  1071. _captureInfo->frameSize = frameSize;
  1072. _videoInfo = newInfo;
  1073. }
  1074. if (previousSurface) {
  1075. IOSurfaceDecrementUseCount(previousSurface);
  1076. CFRelease(previousSurface);
  1077. }
  1078. break;
  1079. } else {
  1080. OBSAVCaptureVideoFrame *frame = _captureInfo->videoFrame;
  1081. frame->timestamp = presentationNanoTimeStamp.value;
  1082. enum video_format videoFormat = [OBSAVCapture formatFromSubtype:mediaSubType];
  1083. if (videoFormat == VIDEO_FORMAT_NONE) {
  1084. _captureInfo->lastError = OBSAVCaptureError_SampleBufferFormat;
  1085. CMFormatDescriptionCreate(kCFAllocatorDefault, mediaType, mediaSubType, NULL,
  1086. &_captureInfo->sampleBufferDescription);
  1087. } else {
  1088. _captureInfo->lastError = OBSAVCaptureError_NoError;
  1089. _captureInfo->sampleBufferDescription = NULL;
  1090. #ifdef DEBUG
  1091. if (frame->format != VIDEO_FORMAT_NONE && frame->format != videoFormat) {
  1092. [self AVCaptureLog:LOG_DEBUG
  1093. withFormat:@"Switching fourcc: '%@' (0x%x) -> '%@' (0x%x)",
  1094. [OBSAVCapture stringFromFourCharCode:frame->format], frame -> format,
  1095. [OBSAVCapture stringFromFourCharCode:mediaSubType], mediaSubType];
  1096. }
  1097. #endif
  1098. bool isFrameYuv = format_is_yuv(frame->format);
  1099. bool isSampleBufferYuv = format_is_yuv(videoFormat);
  1100. frame->format = videoFormat;
  1101. frame->width = sampleBufferDimensions.width;
  1102. frame->height = sampleBufferDimensions.height;
  1103. BOOL isSampleBufferFullRange = [OBSAVCapture isFullRangeFormat:mediaSubType];
  1104. if (isSampleBufferYuv) {
  1105. OBSAVCaptureColorSpace sampleBufferColorSpace =
  1106. [OBSAVCapture colorspaceFromDescription:description];
  1107. OBSAVCaptureVideoRange sampleBufferRangeType = isSampleBufferFullRange ? VIDEO_RANGE_FULL
  1108. : VIDEO_RANGE_PARTIAL;
  1109. BOOL isColorSpaceMatching = NO;
  1110. SInt64 configuredColorSpace = _captureInfo->configuredColorSpace;
  1111. if (usePreset) {
  1112. isColorSpaceMatching = sampleBufferColorSpace == _videoInfo.colorSpace;
  1113. } else {
  1114. isColorSpaceMatching = configuredColorSpace == _videoInfo.colorSpace;
  1115. }
  1116. BOOL isFourCCMatching = NO;
  1117. SInt64 configuredFourCC = _captureInfo->configuredFourCC;
  1118. if (usePreset) {
  1119. isFourCCMatching = mediaSubType == _videoInfo.fourCC;
  1120. } else {
  1121. isFourCCMatching = configuredFourCC == _videoInfo.fourCC;
  1122. }
  1123. if (isColorSpaceMatching && isFourCCMatching) {
  1124. newInfo.isValid = true;
  1125. } else {
  1126. frame->full_range = isSampleBufferFullRange;
  1127. bool success = video_format_get_parameters_for_format(
  1128. sampleBufferColorSpace, sampleBufferRangeType, frame->format, frame->color_matrix,
  1129. frame->color_range_min, frame->color_range_max);
  1130. if (!success) {
  1131. _captureInfo->lastError = OBSAVCaptureError_ColorSpace;
  1132. CMFormatDescriptionCreate(kCFAllocatorDefault, mediaType, mediaSubType, NULL,
  1133. &_captureInfo->sampleBufferDescription);
  1134. newInfo.isValid = false;
  1135. } else {
  1136. newInfo.colorSpace = sampleBufferColorSpace;
  1137. newInfo.fourCC = mediaSubType;
  1138. newInfo.isValid = true;
  1139. }
  1140. }
  1141. } else if (!isFrameYuv && !isSampleBufferYuv) {
  1142. newInfo.isValid = true;
  1143. }
  1144. }
  1145. if (newInfo.isValid != _videoInfo.isValid) {
  1146. obs_source_update_properties(_captureInfo->source);
  1147. }
  1148. _videoInfo = newInfo;
  1149. if (newInfo.isValid) {
  1150. CVPixelBufferLockBaseAddress(imageBuffer, kCVPixelBufferLock_ReadOnly);
  1151. if (!CVPixelBufferIsPlanar(imageBuffer)) {
  1152. frame->linesize[0] = (UInt32) CVPixelBufferGetBytesPerRow(imageBuffer);
  1153. frame->data[0] = CVPixelBufferGetBaseAddress(imageBuffer);
  1154. } else {
  1155. size_t planeCount = CVPixelBufferGetPlaneCount(imageBuffer);
  1156. for (size_t i = 0; i < planeCount; i++) {
  1157. frame->linesize[i] = (UInt32) CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, i);
  1158. frame->data[i] = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, i);
  1159. }
  1160. }
  1161. obs_source_output_video(_captureInfo->source, frame);
  1162. CVPixelBufferUnlockBaseAddress(imageBuffer, kCVPixelBufferLock_ReadOnly);
  1163. } else {
  1164. obs_source_output_video(_captureInfo->source, NULL);
  1165. }
  1166. break;
  1167. }
  1168. }
  1169. case kCMMediaType_Audio: {
  1170. size_t requiredBufferListSize;
  1171. OSStatus status = noErr;
  1172. status = CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
  1173. sampleBuffer, &requiredBufferListSize, NULL, 0, NULL, NULL,
  1174. kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment, NULL);
  1175. if (status != noErr) {
  1176. _captureInfo->lastAudioError = OBSAVCaptureError_AudioBuffer;
  1177. obs_source_update_properties(_captureInfo->source);
  1178. break;
  1179. }
  1180. AudioBufferList *bufferList = (AudioBufferList *) malloc(requiredBufferListSize);
  1181. CMBlockBufferRef blockBuffer = NULL;
  1182. OSStatus error = noErr;
  1183. error = CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
  1184. sampleBuffer, NULL, bufferList, requiredBufferListSize, kCFAllocatorSystemDefault,
  1185. kCFAllocatorSystemDefault, kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment, &blockBuffer);
  1186. if (error == noErr) {
  1187. _captureInfo->lastAudioError = OBSAVCaptureError_NoError;
  1188. OBSAVCaptureAudioFrame *audio = _captureInfo->audioFrame;
  1189. for (size_t i = 0; i < bufferList->mNumberBuffers; i++) {
  1190. audio->data[i] = bufferList->mBuffers[i].mData;
  1191. }
  1192. audio->timestamp = presentationNanoTimeStamp.value;
  1193. audio->frames = (uint32_t) CMSampleBufferGetNumSamples(sampleBuffer);
  1194. const AudioStreamBasicDescription *basicDescription =
  1195. CMAudioFormatDescriptionGetStreamBasicDescription(description);
  1196. audio->samples_per_sec = (uint32_t) basicDescription->mSampleRate;
  1197. audio->speakers = (enum speaker_layout) basicDescription->mChannelsPerFrame;
  1198. switch (basicDescription->mBitsPerChannel) {
  1199. case 8:
  1200. audio->format = AUDIO_FORMAT_U8BIT;
  1201. break;
  1202. case 16:
  1203. audio->format = AUDIO_FORMAT_16BIT;
  1204. break;
  1205. case 32:
  1206. audio->format = AUDIO_FORMAT_32BIT;
  1207. break;
  1208. default:
  1209. audio->format = AUDIO_FORMAT_UNKNOWN;
  1210. break;
  1211. }
  1212. obs_source_output_audio(_captureInfo->source, audio);
  1213. } else {
  1214. _captureInfo->lastAudioError = OBSAVCaptureError_AudioBuffer;
  1215. obs_source_output_audio(_captureInfo->source, NULL);
  1216. }
  1217. if (blockBuffer != NULL) {
  1218. CFRelease(blockBuffer);
  1219. }
  1220. if (bufferList != NULL) {
  1221. free(bufferList);
  1222. bufferList = NULL;
  1223. }
  1224. break;
  1225. }
  1226. default:
  1227. break;
  1228. }
  1229. }
  1230. #pragma mark - Log Helpers
  1231. - (void)AVCaptureLog:(int)logLevel withFormat:(NSString *)format, ...
  1232. {
  1233. va_list args;
  1234. va_start(args, format);
  1235. NSString *logMessage = [[NSString alloc] initWithFormat:format arguments:args];
  1236. va_end(args);
  1237. const char *name_value = obs_source_get_name(self.captureInfo->source);
  1238. NSString *sourceName = @((name_value) ? name_value : "");
  1239. blog(logLevel, "%s: %s", sourceName.UTF8String, logMessage.UTF8String);
  1240. }
  1241. + (void)AVCaptureLog:(int)logLevel withFormat:(NSString *)format, ...
  1242. {
  1243. va_list args;
  1244. va_start(args, format);
  1245. NSString *logMessage = [[NSString alloc] initWithFormat:format arguments:args];
  1246. va_end(args);
  1247. blog(logLevel, "%s", logMessage.UTF8String);
  1248. }
  1249. @end