OBSAVCapture.m 59 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515
  1. //
  2. // OBSAVCapture.m
  3. // mac-avcapture
  4. //
  5. // Created by Patrick Heyer on 2023-03-07.
  6. //
  7. #import "OBSAVCapture.h"
  8. #import "AVCaptureDeviceFormat+OBSListable.h"
  9. /// Tthe maximum number of frame rate ranges to show complete information for before providing a more generic description of the supported frame rates inside of a device format description.
  10. static const UInt32 kMaxFrameRateRangesInDescription = 10;
  11. @implementation OBSAVCapture
  12. - (instancetype)init
  13. {
  14. return [self initWithCaptureInfo:nil];
  15. }
  16. - (instancetype)initWithCaptureInfo:(OBSAVCaptureInfo *)capture_info
  17. {
  18. self = [super init];
  19. if (self) {
  20. CMIOObjectPropertyAddress propertyAddress = {kCMIOHardwarePropertyAllowScreenCaptureDevices,
  21. kCMIOObjectPropertyScopeGlobal, kCMIOObjectPropertyElementMain};
  22. UInt32 allow = 1;
  23. CMIOObjectSetPropertyData(kCMIOObjectSystemObject, &propertyAddress, 0, NULL, sizeof(allow), &allow);
  24. _errorDomain = @"com.obsproject.obs-studio.av-capture";
  25. _presetList = @{
  26. AVCaptureSessionPresetLow: @"Low",
  27. AVCaptureSessionPresetMedium: @"Medium",
  28. AVCaptureSessionPresetHigh: @"High",
  29. AVCaptureSessionPreset320x240: @"320x240",
  30. AVCaptureSessionPreset352x288: @"352x288",
  31. AVCaptureSessionPreset640x480: @"640x480",
  32. AVCaptureSessionPreset960x540: @"960x540",
  33. AVCaptureSessionPreset1280x720: @"1280x720",
  34. AVCaptureSessionPreset1920x1080: @"1920x1080",
  35. AVCaptureSessionPreset3840x2160: @"3840x2160",
  36. };
  37. _sessionQueue = dispatch_queue_create("session queue", DISPATCH_QUEUE_SERIAL);
  38. OBSAVCaptureVideoInfo newInfo = {0};
  39. _videoInfo = newInfo;
  40. [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(deviceDisconnected:)
  41. name:AVCaptureDeviceWasDisconnectedNotification
  42. object:nil];
  43. [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(deviceConnected:)
  44. name:AVCaptureDeviceWasConnectedNotification
  45. object:nil];
  46. if (capture_info) {
  47. _captureInfo = capture_info;
  48. NSString *UUID = [OBSAVCapture stringFromSettings:_captureInfo->settings withSetting:@"device"];
  49. NSString *presetName = [OBSAVCapture stringFromSettings:_captureInfo->settings withSetting:@"preset"];
  50. BOOL isPresetEnabled = obs_data_get_bool(_captureInfo->settings, "use_preset");
  51. if (capture_info->isFastPath) {
  52. _isFastPath = YES;
  53. _isPresetBased = NO;
  54. } else {
  55. BOOL isBufferingEnabled = obs_data_get_bool(_captureInfo->settings, "buffering");
  56. obs_source_set_async_unbuffered(_captureInfo->source, !isBufferingEnabled);
  57. }
  58. __weak OBSAVCapture *weakSelf = self;
  59. dispatch_async(_sessionQueue, ^{
  60. NSError *error = nil;
  61. OBSAVCapture *instance = weakSelf;
  62. if ([instance createSession:&error]) {
  63. if ([instance switchCaptureDevice:UUID withError:nil]) {
  64. BOOL isSessionConfigured = NO;
  65. if (isPresetEnabled) {
  66. isSessionConfigured = [instance configureSessionWithPreset:presetName withError:nil];
  67. } else {
  68. isSessionConfigured = [instance configureSession:nil];
  69. }
  70. if (isSessionConfigured) {
  71. [instance startCaptureSession];
  72. }
  73. }
  74. } else {
  75. [instance AVCaptureLog:LOG_ERROR withFormat:error.localizedDescription];
  76. }
  77. });
  78. }
  79. }
  80. return self;
  81. }
  82. #pragma mark - Capture Session Handling
  83. - (BOOL)createSession:(NSError *__autoreleasing *)error
  84. {
  85. AVCaptureSession *session = [[AVCaptureSession alloc] init];
  86. [session beginConfiguration];
  87. if (!session) {
  88. if (error) {
  89. NSDictionary *userInfo = @{NSLocalizedDescriptionKey: @"Failed to create AVCaptureSession"};
  90. *error = [NSError errorWithDomain:self.errorDomain code:-101 userInfo:userInfo];
  91. }
  92. return NO;
  93. }
  94. AVCaptureVideoDataOutput *videoOutput = [[AVCaptureVideoDataOutput alloc] init];
  95. if (!videoOutput) {
  96. if (error) {
  97. NSDictionary *userInfo = @{NSLocalizedDescriptionKey: @"Failed to create AVCaptureVideoDataOutput"};
  98. *error = [NSError errorWithDomain:self.errorDomain code:-102 userInfo:userInfo];
  99. }
  100. return NO;
  101. }
  102. AVCaptureAudioDataOutput *audioOutput = [[AVCaptureAudioDataOutput alloc] init];
  103. if (!audioOutput) {
  104. if (error) {
  105. NSDictionary *userInfo = @{NSLocalizedDescriptionKey: @"Failed to create AVCaptureAudioDataOutput"};
  106. *error = [NSError errorWithDomain:self.errorDomain code:-103 userInfo:userInfo];
  107. }
  108. return NO;
  109. }
  110. dispatch_queue_t videoQueue = dispatch_queue_create(nil, nil);
  111. if (!videoQueue) {
  112. if (error) {
  113. NSDictionary *userInfo = @{NSLocalizedDescriptionKey: @"Failed to create video dispatch queue"};
  114. *error = [NSError errorWithDomain:self.errorDomain code:-104 userInfo:userInfo];
  115. }
  116. return NO;
  117. }
  118. dispatch_queue_t audioQueue = dispatch_queue_create(nil, nil);
  119. if (!audioQueue) {
  120. if (error) {
  121. NSDictionary *userInfo = @{NSLocalizedDescriptionKey: @"Failed to create audio dispatch queue"};
  122. *error = [NSError errorWithDomain:self.errorDomain code:-105 userInfo:userInfo];
  123. }
  124. return NO;
  125. }
  126. if ([session canAddOutput:videoOutput]) {
  127. [session addOutput:videoOutput];
  128. [videoOutput setSampleBufferDelegate:self queue:videoQueue];
  129. }
  130. if ([session canAddOutput:audioOutput]) {
  131. [session addOutput:audioOutput];
  132. [audioOutput setSampleBufferDelegate:self queue:audioQueue];
  133. }
  134. [session commitConfiguration];
  135. self.session = session;
  136. self.videoOutput = videoOutput;
  137. self.videoQueue = videoQueue;
  138. self.audioOutput = audioOutput;
  139. self.audioQueue = audioQueue;
  140. return YES;
  141. }
  142. - (BOOL)switchCaptureDevice:(NSString *)uuid withError:(NSError *__autoreleasing *)error
  143. {
  144. AVCaptureDevice *device = [AVCaptureDevice deviceWithUniqueID:uuid];
  145. if (self.deviceInput.device || !device) {
  146. [self stopCaptureSession];
  147. [self.session removeInput:self.deviceInput];
  148. [self.deviceInput.device unlockForConfiguration];
  149. self.deviceInput = nil;
  150. self.isDeviceLocked = NO;
  151. self.presetFormat = nil;
  152. }
  153. if (!device) {
  154. if (uuid.length < 1) {
  155. [self AVCaptureLog:LOG_INFO withFormat:@"No device selected"];
  156. self.deviceUUID = uuid;
  157. return NO;
  158. } else {
  159. [self AVCaptureLog:LOG_WARNING withFormat:@"Unable to initialize device with unique ID '%@'", uuid];
  160. return NO;
  161. }
  162. }
  163. const char *deviceName = device.localizedName.UTF8String;
  164. obs_data_set_string(self.captureInfo->settings, "device_name", deviceName);
  165. obs_data_set_string(self.captureInfo->settings, "device", device.uniqueID.UTF8String);
  166. [self AVCaptureLog:LOG_INFO withFormat:@"Selected device '%@'", device.localizedName];
  167. self.deviceUUID = device.uniqueID;
  168. BOOL isAudioSupported = [device hasMediaType:AVMediaTypeAudio] || [device hasMediaType:AVMediaTypeMuxed];
  169. obs_source_set_audio_active(self.captureInfo->source, isAudioSupported);
  170. AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:error];
  171. if (!deviceInput) {
  172. return NO;
  173. }
  174. [self.session beginConfiguration];
  175. if ([self.session canAddInput:deviceInput]) {
  176. [self.session addInput:deviceInput];
  177. self.deviceInput = deviceInput;
  178. } else {
  179. if (error) {
  180. NSDictionary *userInfo = @{
  181. NSLocalizedDescriptionKey: [NSString
  182. stringWithFormat:@"Unable to add device '%@' as deviceInput to capture session", self.deviceUUID]
  183. };
  184. *error = [NSError errorWithDomain:self.errorDomain code:-107 userInfo:userInfo];
  185. }
  186. [self.session commitConfiguration];
  187. return NO;
  188. }
  189. AVCaptureDeviceFormat *deviceFormat = device.activeFormat;
  190. CMMediaType mediaType = CMFormatDescriptionGetMediaType(deviceFormat.formatDescription);
  191. if (mediaType != kCMMediaType_Video && mediaType != kCMMediaType_Muxed) {
  192. if (error) {
  193. NSDictionary *userInfo = @{
  194. NSLocalizedDescriptionKey: [NSString stringWithFormat:@"CMMediaType '%@' is not supported",
  195. [OBSAVCapture stringFromFourCharCode:mediaType]]
  196. };
  197. *error = [NSError errorWithDomain:self.errorDomain code:-108 userInfo:userInfo];
  198. }
  199. [self.session removeInput:deviceInput];
  200. [self.session commitConfiguration];
  201. return NO;
  202. }
  203. if (self.isFastPath) {
  204. self.videoOutput.videoSettings = nil;
  205. NSMutableDictionary *videoSettings =
  206. [NSMutableDictionary dictionaryWithDictionary:self.videoOutput.videoSettings];
  207. FourCharCode targetPixelFormatType = kCVPixelFormatType_32BGRA;
  208. [videoSettings setObject:@(targetPixelFormatType)
  209. forKey:(__bridge NSString *) kCVPixelBufferPixelFormatTypeKey];
  210. self.videoOutput.videoSettings = videoSettings;
  211. } else {
  212. self.videoOutput.videoSettings = nil;
  213. FourCharCode subType = [[self.videoOutput.videoSettings
  214. objectForKey:(__bridge NSString *) kCVPixelBufferPixelFormatTypeKey] unsignedIntValue];
  215. if ([OBSAVCapture formatFromSubtype:subType] != VIDEO_FORMAT_NONE) {
  216. [self AVCaptureLog:LOG_DEBUG
  217. withFormat:@"Using native fourcc '%@'", [OBSAVCapture stringFromFourCharCode:subType]];
  218. } else {
  219. [self AVCaptureLog:LOG_DEBUG withFormat:@"Using fallback fourcc '%@' ('%@', 0x%08x unsupported)",
  220. [OBSAVCapture stringFromFourCharCode:kCVPixelFormatType_32BGRA],
  221. [OBSAVCapture stringFromFourCharCode:subType], subType];
  222. NSMutableDictionary *videoSettings =
  223. [NSMutableDictionary dictionaryWithDictionary:self.videoOutput.videoSettings];
  224. [videoSettings setObject:@(kCVPixelFormatType_32BGRA)
  225. forKey:(__bridge NSString *) kCVPixelBufferPixelFormatTypeKey];
  226. self.videoOutput.videoSettings = videoSettings;
  227. }
  228. }
  229. [self.session commitConfiguration];
  230. return YES;
  231. }
  232. - (void)startCaptureSession
  233. {
  234. if (!self.session.running) {
  235. [self.session startRunning];
  236. }
  237. }
  238. - (void)stopCaptureSession
  239. {
  240. if (self.session.running) {
  241. [self.session stopRunning];
  242. }
  243. if (self.captureInfo->isFastPath) {
  244. if (self.captureInfo->texture) {
  245. obs_enter_graphics();
  246. gs_texture_destroy(self.captureInfo->texture);
  247. obs_leave_graphics();
  248. self.captureInfo->texture = NULL;
  249. }
  250. if (self.captureInfo->currentSurface) {
  251. IOSurfaceDecrementUseCount(self.captureInfo->currentSurface);
  252. CFRelease(self.captureInfo->currentSurface);
  253. self.captureInfo->currentSurface = NULL;
  254. }
  255. if (self.captureInfo->previousSurface) {
  256. IOSurfaceDecrementUseCount(self.captureInfo->previousSurface);
  257. CFRelease(self.captureInfo->previousSurface);
  258. self.captureInfo->previousSurface = NULL;
  259. }
  260. } else {
  261. if (self.captureInfo->source) {
  262. obs_source_output_video(self.captureInfo->source, NULL);
  263. }
  264. }
  265. }
  266. - (BOOL)configureSessionWithPreset:(AVCaptureSessionPreset)preset withError:(NSError *__autoreleasing *)error
  267. {
  268. if (!self.deviceInput.device) {
  269. if (error) {
  270. NSDictionary *userInfo =
  271. @{NSLocalizedDescriptionKey: @"Unable to set session preset without capture device"};
  272. *error = [NSError errorWithDomain:self.errorDomain code:-108 userInfo:userInfo];
  273. }
  274. return NO;
  275. }
  276. if (![self.deviceInput.device supportsAVCaptureSessionPreset:preset]) {
  277. if (error) {
  278. NSDictionary *userInfo = @{
  279. NSLocalizedDescriptionKey: [NSString stringWithFormat:@"Preset %@ not supported by device %@",
  280. [OBSAVCapture stringFromCapturePreset:preset],
  281. self.deviceInput.device.localizedName]
  282. };
  283. *error = [NSError errorWithDomain:self.errorDomain code:-201 userInfo:userInfo];
  284. }
  285. return NO;
  286. }
  287. if ([self.session canSetSessionPreset:preset]) {
  288. if (self.isDeviceLocked) {
  289. if ([preset isEqualToString:self.session.sessionPreset]) {
  290. if (self.deviceInput.device.activeFormat) {
  291. self.deviceInput.device.activeFormat = self.presetFormat.activeFormat;
  292. self.deviceInput.device.activeVideoMinFrameDuration = self.presetFormat.minFrameRate;
  293. self.deviceInput.device.activeVideoMaxFrameDuration = self.presetFormat.maxFrameRate;
  294. }
  295. self.presetFormat = nil;
  296. }
  297. [self.deviceInput.device unlockForConfiguration];
  298. self.isDeviceLocked = NO;
  299. }
  300. if ([self.session canSetSessionPreset:preset]) {
  301. self.session.sessionPreset = preset;
  302. }
  303. } else {
  304. if (error) {
  305. NSDictionary *userInfo = @{
  306. NSLocalizedDescriptionKey: [NSString stringWithFormat:@"Preset %@ not supported by capture session",
  307. [OBSAVCapture stringFromCapturePreset:preset]]
  308. };
  309. *error = [NSError errorWithDomain:self.errorDomain code:-202 userInfo:userInfo];
  310. }
  311. return NO;
  312. }
  313. self.isPresetBased = YES;
  314. return YES;
  315. }
  316. - (BOOL)configureSession:(NSError *__autoreleasing *)error
  317. {
  318. OBSAVCaptureMediaFPS fps;
  319. if (!obs_data_get_frames_per_second(self.captureInfo->settings, "frame_rate", &fps, NULL)) {
  320. [self AVCaptureLog:LOG_DEBUG withFormat:@"No valid framerate found in settings"];
  321. return NO;
  322. }
  323. CMTime time = {.value = fps.denominator, .timescale = fps.numerator, .flags = 1};
  324. const char *selectedFormat = obs_data_get_string(self.captureInfo->settings, "supported_format");
  325. NSString *selectedFormatNSString = selectedFormat != NULL ? @(selectedFormat) : @"";
  326. AVCaptureDeviceFormat *format = nil;
  327. FourCharCode subtype;
  328. OBSAVCaptureColorSpace colorSpace;
  329. bool fpsSupported = false;
  330. if (![selectedFormatNSString isEqualToString:@""]) {
  331. for (AVCaptureDeviceFormat *formatCandidate in [self.deviceInput.device.formats reverseObjectEnumerator]) {
  332. if ([selectedFormatNSString isEqualToString:formatCandidate.obsPropertyListInternalRepresentation]) {
  333. CMFormatDescriptionRef formatDescription = formatCandidate.formatDescription;
  334. FourCharCode formatFourCC = CMFormatDescriptionGetMediaSubType(formatDescription);
  335. format = formatCandidate;
  336. subtype = formatFourCC;
  337. colorSpace = [OBSAVCapture colorspaceFromDescription:formatDescription];
  338. break;
  339. }
  340. }
  341. } else {
  342. //try to migrate from the legacy suite of properties
  343. int legacyVideoRange = (int) obs_data_get_int(self.captureInfo->settings, "video_range");
  344. int legacyInputFormat = (int) obs_data_get_int(self.captureInfo->settings, "input_format");
  345. int legacyColorSpace = (int) obs_data_get_int(self.captureInfo->settings, "color_space");
  346. CMVideoDimensions legacyDimensions = [OBSAVCapture legacyDimensionsFromSettings:self.captureInfo->settings];
  347. for (AVCaptureDeviceFormat *formatCandidate in [self.deviceInput.device.formats reverseObjectEnumerator]) {
  348. CMFormatDescriptionRef formatDescription = formatCandidate.formatDescription;
  349. CMVideoDimensions formatDimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
  350. int formatColorSpace = [OBSAVCapture colorspaceFromDescription:formatDescription];
  351. int formatInputFormat =
  352. [OBSAVCapture formatFromSubtype:CMFormatDescriptionGetMediaSubType(formatDescription)];
  353. int formatVideoRange = [OBSAVCapture isFullRangeFormat:formatInputFormat] ? VIDEO_RANGE_FULL
  354. : VIDEO_RANGE_PARTIAL;
  355. bool foundFormat = legacyVideoRange == formatVideoRange && legacyInputFormat == formatInputFormat &&
  356. legacyColorSpace == formatColorSpace &&
  357. legacyDimensions.width == formatDimensions.width &&
  358. legacyDimensions.height == formatDimensions.height;
  359. if (foundFormat) {
  360. format = formatCandidate;
  361. subtype = formatInputFormat;
  362. colorSpace = formatColorSpace;
  363. break;
  364. }
  365. }
  366. }
  367. if (!format) {
  368. [self AVCaptureLog:LOG_WARNING withFormat:@"Configured format not found on device"];
  369. return NO;
  370. }
  371. for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
  372. if (CMTimeCompare(range.maxFrameDuration, time) >= 0 && CMTimeCompare(range.minFrameDuration, time) <= 0) {
  373. fpsSupported = true;
  374. break;
  375. }
  376. }
  377. if (!fpsSupported) {
  378. OBSAVCaptureMediaFPS fallbackFPS = [OBSAVCapture fallbackFrameRateForFormat:format];
  379. if (fallbackFPS.denominator > 0 && fallbackFPS.numerator > 0) {
  380. [self AVCaptureLog:LOG_WARNING withFormat:@"Frame rate is not supported: %g FPS (%u/%u), \n"
  381. " falling back to value supported by device: %G FPS (%u/%u)",
  382. media_frames_per_second_to_fps(fps), fps.numerator,
  383. fps.denominator, media_frames_per_second_to_fps(fallbackFPS),
  384. fallbackFPS.numerator, fallbackFPS.denominator];
  385. obs_data_set_frames_per_second(self.captureInfo->settings, "frame_rate", fallbackFPS, NULL);
  386. time.value = fallbackFPS.denominator;
  387. time.timescale = fallbackFPS.numerator;
  388. } else {
  389. [self AVCaptureLog:LOG_WARNING
  390. withFormat:@"Frame rate is not supported: %g FPS (%u/%u), \n"
  391. " no supported fallback FPS found",
  392. media_frames_per_second_to_fps(fps), fps.numerator, fps.denominator];
  393. return NO;
  394. }
  395. }
  396. [self.session beginConfiguration];
  397. self.isDeviceLocked = [self.deviceInput.device lockForConfiguration:error];
  398. if (!self.isDeviceLocked) {
  399. [self AVCaptureLog:LOG_WARNING withFormat:@"Could not lock device for configuration"];
  400. return NO;
  401. }
  402. [self AVCaptureLog:LOG_INFO
  403. withFormat:@"Capturing '%@' (%@):\n"
  404. " Using Format : %@ \n"
  405. " FPS : %g (%u/%u)\n"
  406. " Frame Interval : %g\u00a0s\n",
  407. self.deviceInput.device.localizedName, self.deviceInput.device.uniqueID,
  408. format.obsPropertyListDescription, media_frames_per_second_to_fps(fps), fps.numerator,
  409. fps.denominator, media_frames_per_second_to_frame_interval(fps)];
  410. OBSAVCaptureVideoInfo newInfo = {.colorSpace = _videoInfo.colorSpace,
  411. .fourCC = _videoInfo.fourCC,
  412. .isValid = false};
  413. self.videoInfo = newInfo;
  414. self.captureInfo->configuredColorSpace = colorSpace;
  415. self.captureInfo->configuredFourCC = subtype;
  416. self.isPresetBased = NO;
  417. if (!self.presetFormat) {
  418. OBSAVCapturePresetInfo *presetInfo = [[OBSAVCapturePresetInfo alloc] init];
  419. presetInfo.activeFormat = self.deviceInput.device.activeFormat;
  420. presetInfo.minFrameRate = self.deviceInput.device.activeVideoMinFrameDuration;
  421. presetInfo.maxFrameRate = self.deviceInput.device.activeVideoMaxFrameDuration;
  422. self.presetFormat = presetInfo;
  423. }
  424. self.deviceInput.device.activeFormat = format;
  425. self.deviceInput.device.activeVideoMinFrameDuration = time;
  426. self.deviceInput.device.activeVideoMaxFrameDuration = time;
  427. [self.session commitConfiguration];
  428. return YES;
  429. }
  430. - (BOOL)updateSessionwithError:(NSError *__autoreleasing *)error
  431. {
  432. switch (self.captureInfo->lastError) {
  433. case OBSAVCaptureError_SampleBufferFormat:
  434. if (self.captureInfo->sampleBufferDescription) {
  435. FourCharCode mediaSubType =
  436. CMFormatDescriptionGetMediaSubType(self.captureInfo->sampleBufferDescription);
  437. [self AVCaptureLog:LOG_ERROR
  438. withFormat:@"Incompatible sample buffer format received for sync AVCapture source: %@ (0x%x)",
  439. [OBSAVCapture stringFromFourCharCode:mediaSubType], mediaSubType];
  440. }
  441. break;
  442. case OBSAVCaptureError_ColorSpace: {
  443. if (self.captureInfo->sampleBufferDescription) {
  444. FourCharCode mediaSubType =
  445. CMFormatDescriptionGetMediaSubType(self.captureInfo->sampleBufferDescription);
  446. BOOL isSampleBufferFullRange = [OBSAVCapture isFullRangeFormat:mediaSubType];
  447. OBSAVCaptureColorSpace sampleBufferColorSpace =
  448. [OBSAVCapture colorspaceFromDescription:self.captureInfo->sampleBufferDescription];
  449. OBSAVCaptureVideoRange sampleBufferRangeType = isSampleBufferFullRange ? VIDEO_RANGE_FULL
  450. : VIDEO_RANGE_PARTIAL;
  451. [self AVCaptureLog:LOG_ERROR
  452. withFormat:@"Failed to get colorspace parameters for colorspace %u and range %u",
  453. sampleBufferColorSpace, sampleBufferRangeType];
  454. }
  455. break;
  456. default:
  457. self.captureInfo->lastError = OBSAVCaptureError_NoError;
  458. self.captureInfo->sampleBufferDescription = NULL;
  459. break;
  460. }
  461. }
  462. switch (self.captureInfo->lastAudioError) {
  463. case OBSAVCaptureError_AudioBuffer: {
  464. [OBSAVCapture AVCaptureLog:LOG_ERROR
  465. withFormat:@"Unable to retrieve required AudioBufferList size from sample buffer."];
  466. break;
  467. }
  468. default:
  469. self.captureInfo->lastAudioError = OBSAVCaptureError_NoError;
  470. break;
  471. }
  472. NSString *newDeviceUUID = [OBSAVCapture stringFromSettings:self.captureInfo->settings withSetting:@"device"];
  473. NSString *presetName = [OBSAVCapture stringFromSettings:self.captureInfo->settings withSetting:@"preset"];
  474. BOOL isPresetEnabled = obs_data_get_bool(self.captureInfo->settings, "use_preset");
  475. BOOL updateSession = YES;
  476. if (![self.deviceUUID isEqualToString:newDeviceUUID]) {
  477. if (![self switchCaptureDevice:newDeviceUUID withError:error]) {
  478. obs_source_update_properties(self.captureInfo->source);
  479. return NO;
  480. }
  481. } else if (self.isPresetBased && isPresetEnabled && [presetName isEqualToString:self.session.sessionPreset]) {
  482. updateSession = NO;
  483. }
  484. if (updateSession) {
  485. if (isPresetEnabled) {
  486. [self configureSessionWithPreset:presetName withError:error];
  487. } else {
  488. if (![self configureSession:error]) {
  489. obs_source_update_properties(self.captureInfo->source);
  490. return NO;
  491. }
  492. }
  493. __weak OBSAVCapture *weakSelf = self;
  494. dispatch_async(self.sessionQueue, ^{
  495. [weakSelf startCaptureSession];
  496. });
  497. }
  498. BOOL isAudioAvailable = [self.deviceInput.device hasMediaType:AVMediaTypeAudio] ||
  499. [self.deviceInput.device hasMediaType:AVMediaTypeMuxed];
  500. obs_source_set_audio_active(self.captureInfo->source, isAudioAvailable);
  501. if (!self.isFastPath) {
  502. BOOL isBufferingEnabled = obs_data_get_bool(self.captureInfo->settings, "buffering");
  503. obs_source_set_async_unbuffered(self.captureInfo->source, !isBufferingEnabled);
  504. }
  505. return YES;
  506. }
  507. #pragma mark - OBS Settings Helpers
  508. + (CMVideoDimensions)legacyDimensionsFromSettings:(void *)settings
  509. {
  510. CMVideoDimensions zero = {0};
  511. NSString *jsonString = [OBSAVCapture stringFromSettings:settings withSetting:@"resolution"];
  512. NSDictionary *data = [NSJSONSerialization JSONObjectWithData:[jsonString dataUsingEncoding:NSUTF8StringEncoding]
  513. options:0
  514. error:nil];
  515. if (data.count == 0) {
  516. return zero;
  517. }
  518. NSInteger width = [[data objectForKey:@"width"] intValue];
  519. NSInteger height = [[data objectForKey:@"height"] intValue];
  520. if (!width || !height) {
  521. return zero;
  522. }
  523. CMVideoDimensions dimensions = {.width = (int32_t) clamp_Uint(width, 0, UINT32_MAX),
  524. .height = (int32_t) clamp_Uint(height, 0, UINT32_MAX)};
  525. return dimensions;
  526. }
  527. + (OBSAVCaptureMediaFPS)fallbackFrameRateForFormat:(AVCaptureDeviceFormat *)format
  528. {
  529. struct obs_video_info video_info;
  530. bool result = obs_get_video_info(&video_info);
  531. double outputFPS = result ? ((double) video_info.fps_num / (double) video_info.fps_den) : 0;
  532. double closestUpTo = 0;
  533. double closestAbove = DBL_MAX;
  534. OBSAVCaptureMediaFPS closestUpToMFPS = {};
  535. OBSAVCaptureMediaFPS closestAboveMFPS = {};
  536. for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
  537. if (range.maxFrameRate > closestUpTo && range.maxFrameRate <= outputFPS) {
  538. closestUpTo = range.maxFrameRate;
  539. closestUpToMFPS.numerator = (uint32_t) clamp_Uint(range.minFrameDuration.timescale, 0, UINT32_MAX);
  540. closestUpToMFPS.denominator = (uint32_t) clamp_Uint(range.minFrameDuration.value, 0, UINT32_MAX);
  541. }
  542. if (range.minFrameRate > outputFPS && range.minFrameRate < closestAbove) {
  543. closestAbove = range.minFrameRate;
  544. closestAboveMFPS.numerator = (uint32_t) clamp_Uint(range.maxFrameDuration.timescale, 0, UINT32_MAX);
  545. closestAboveMFPS.denominator = (uint32_t) clamp_Uint(range.maxFrameDuration.value, 0, UINT32_MAX);
  546. }
  547. }
  548. if (closestUpTo > 0) {
  549. return closestUpToMFPS;
  550. } else {
  551. return closestAboveMFPS;
  552. }
  553. }
  554. + (NSString *)aspectRatioStringFromDimensions:(CMVideoDimensions)dimensions
  555. {
  556. if (dimensions.width <= 0 || dimensions.height <= 0) {
  557. return @"";
  558. }
  559. double divisor = (double) gcd(dimensions.width, dimensions.height);
  560. if (divisor <= 50) {
  561. if (dimensions.width > dimensions.height) {
  562. double x = (double) dimensions.width / (double) dimensions.height;
  563. return [NSString stringWithFormat:@"%.2f:1", x];
  564. } else {
  565. double y = (double) dimensions.height / (double) dimensions.width;
  566. return [NSString stringWithFormat:@"1:%.2f", y];
  567. }
  568. } else {
  569. SInt32 x = dimensions.width / (SInt32) divisor;
  570. SInt32 y = dimensions.height / (SInt32) divisor;
  571. if (x == 8 && y == 5) {
  572. x = 16;
  573. y = 10;
  574. }
  575. return [NSString stringWithFormat:@"%i:%i", x, y];
  576. }
  577. }
  578. + (NSString *)stringFromSettings:(void *)settings withSetting:(NSString *)setting
  579. {
  580. return [OBSAVCapture stringFromSettings:settings withSetting:setting withDefault:@""];
  581. }
  582. + (NSString *)stringFromSettings:(void *)settings withSetting:(NSString *)setting withDefault:(NSString *)defaultValue
  583. {
  584. NSString *result;
  585. if (settings) {
  586. const char *setting_value = obs_data_get_string(settings, setting.UTF8String);
  587. if (!setting_value) {
  588. result = [NSString stringWithString:defaultValue];
  589. } else {
  590. result = @(setting_value);
  591. }
  592. } else {
  593. result = [NSString stringWithString:defaultValue];
  594. }
  595. return result;
  596. }
  597. + (NSString *)effectsWarningForDevice:(AVCaptureDevice *)device
  598. {
  599. int effectsCount = 0;
  600. NSString *effectWarning = nil;
  601. if (@available(macOS 12.0, *)) {
  602. if (device.portraitEffectActive) {
  603. effectWarning = @"Warning.Effect.Portrait";
  604. effectsCount++;
  605. }
  606. }
  607. if (@available(macOS 12.3, *)) {
  608. if (device.centerStageActive) {
  609. effectWarning = @"Warning.Effect.CenterStage";
  610. effectsCount++;
  611. }
  612. }
  613. if (@available(macOS 13.0, *)) {
  614. if (device.studioLightActive) {
  615. effectWarning = @"Warning.Effect.StudioLight";
  616. effectsCount++;
  617. }
  618. }
  619. if (@available(macOS 14.0, *)) {
  620. /// Reaction effects do not follow the same paradigm as other effects in terms of checking whether they are active. According to Apple, this is because a device instance property `reactionEffectsActive` would have been ambiguous (conflicting with whether a reaction is currently rendering).
  621. ///
  622. /// Instead, Apple exposes the `AVCaptureDevice.reactionEffectGesturesEnabled` class property (an equivalent exists for all other effects, but is hidden/private) to tell us whether the effect is enabled application-wide, as well as the `device.canPerformReactionEffects` instance property to tell us whether the device's active format currently supports the effect.
  623. ///
  624. /// The logical conjunction of these two properties tells us whether the effect is 'active'; i.e. whether putting our thumbs inside the video frame will make fireworks appear. The device instance properties for other effects are a convenience 'shorthand' for this private class/instance property combination.
  625. if (device.canPerformReactionEffects && AVCaptureDevice.reactionEffectGesturesEnabled) {
  626. effectWarning = @"Warning.Effect.Reactions";
  627. effectsCount++;
  628. }
  629. }
  630. #if __MAC_OS_X_VERSION_MAX_ALLOWED >= 150000
  631. if (@available(macOS 15.0, *)) {
  632. if (device.backgroundReplacementActive) {
  633. effectWarning = @"Warning.Effect.BackgroundReplacement";
  634. effectsCount++;
  635. }
  636. }
  637. #endif
  638. if (effectsCount > 1) {
  639. effectWarning = @"Warning.Effect.Multiple";
  640. }
  641. return effectWarning;
  642. }
  643. #pragma mark - Format Conversion Helpers
  644. + (NSString *)stringFromSubType:(FourCharCode)subtype
  645. {
  646. switch (subtype) {
  647. case kCVPixelFormatType_422YpCbCr8:
  648. return @"UYVY (2vuy)";
  649. case kCVPixelFormatType_422YpCbCr8_yuvs:
  650. return @"YUY2 (yuvs)";
  651. case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
  652. return @"NV12 (420v)";
  653. case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
  654. return @"NV12 (420f)";
  655. case kCVPixelFormatType_420YpCbCr10BiPlanarFullRange:
  656. return @"P010 (xf20)";
  657. case kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange:
  658. return @"P010 (x420)";
  659. case kCVPixelFormatType_32ARGB:
  660. return @"ARGB - 32ARGB";
  661. case kCVPixelFormatType_32BGRA:
  662. return @"BGRA - 32BGRA";
  663. case kCMVideoCodecType_Animation:
  664. return @"Apple Animation";
  665. case kCMVideoCodecType_Cinepak:
  666. return @"Cinepak";
  667. case kCMVideoCodecType_JPEG:
  668. return @"JPEG";
  669. case kCMVideoCodecType_JPEG_OpenDML:
  670. return @"MJPEG - JPEG OpenDML";
  671. case kCMVideoCodecType_SorensonVideo:
  672. return @"Sorenson Video";
  673. case kCMVideoCodecType_SorensonVideo3:
  674. return @"Sorenson Video 3";
  675. case kCMVideoCodecType_H263:
  676. return @"H.263";
  677. case kCMVideoCodecType_H264:
  678. return @"H.264";
  679. case kCMVideoCodecType_MPEG4Video:
  680. return @"MPEG-4";
  681. case kCMVideoCodecType_MPEG2Video:
  682. return @"MPEG-2";
  683. case kCMVideoCodecType_MPEG1Video:
  684. return @"MPEG-1";
  685. case kCMVideoCodecType_DVCNTSC:
  686. return @"DV NTSC";
  687. case kCMVideoCodecType_DVCPAL:
  688. return @"DV PAL";
  689. case kCMVideoCodecType_DVCProPAL:
  690. return @"Panasonic DVCPro Pal";
  691. case kCMVideoCodecType_DVCPro50NTSC:
  692. return @"Panasonic DVCPro-50 NTSC";
  693. case kCMVideoCodecType_DVCPro50PAL:
  694. return @"Panasonic DVCPro-50 PAL";
  695. case kCMVideoCodecType_DVCPROHD720p60:
  696. return @"Panasonic DVCPro-HD 720p60";
  697. case kCMVideoCodecType_DVCPROHD720p50:
  698. return @"Panasonic DVCPro-HD 720p50";
  699. case kCMVideoCodecType_DVCPROHD1080i60:
  700. return @"Panasonic DVCPro-HD 1080i60";
  701. case kCMVideoCodecType_DVCPROHD1080i50:
  702. return @"Panasonic DVCPro-HD 1080i50";
  703. case kCMVideoCodecType_DVCPROHD1080p30:
  704. return @"Panasonic DVCPro-HD 1080p30";
  705. case kCMVideoCodecType_DVCPROHD1080p25:
  706. return @"Panasonic DVCPro-HD 1080p25";
  707. case kCMVideoCodecType_AppleProRes4444:
  708. return @"Apple ProRes 4444";
  709. case kCMVideoCodecType_AppleProRes422HQ:
  710. return @"Apple ProRes 422 HQ";
  711. case kCMVideoCodecType_AppleProRes422:
  712. return @"Apple ProRes 422";
  713. case kCMVideoCodecType_AppleProRes422LT:
  714. return @"Apple ProRes 422 LT";
  715. case kCMVideoCodecType_AppleProRes422Proxy:
  716. return @"Apple ProRes 422 Proxy";
  717. default:
  718. return @"Unknown";
  719. }
  720. }
  721. + (NSString *)stringFromColorspace:(enum video_colorspace)colorspace
  722. {
  723. switch (colorspace) {
  724. case VIDEO_CS_DEFAULT:
  725. return @"Default";
  726. case VIDEO_CS_601:
  727. return @"CS 601";
  728. case VIDEO_CS_709:
  729. return @"CS 709";
  730. case VIDEO_CS_SRGB:
  731. return @"sRGB";
  732. case VIDEO_CS_2100_PQ:
  733. return @"CS 2100 (PQ)";
  734. case VIDEO_CS_2100_HLG:
  735. return @"CS 2100 (HLG)";
  736. default:
  737. return @"Unknown";
  738. }
  739. }
  740. + (NSString *)stringFromVideoRange:(enum video_range_type)videoRange
  741. {
  742. switch (videoRange) {
  743. case VIDEO_RANGE_FULL:
  744. return @"Full";
  745. case VIDEO_RANGE_PARTIAL:
  746. return @"Partial";
  747. case VIDEO_RANGE_DEFAULT:
  748. return @"Default";
  749. }
  750. }
  751. + (NSString *)stringFromCapturePreset:(AVCaptureSessionPreset)preset
  752. {
  753. NSDictionary *presetDescriptions = @{
  754. AVCaptureSessionPresetLow: @"Low",
  755. AVCaptureSessionPresetMedium: @"Medium",
  756. AVCaptureSessionPresetHigh: @"High",
  757. AVCaptureSessionPreset320x240: @"320x240",
  758. AVCaptureSessionPreset352x288: @"352x288",
  759. AVCaptureSessionPreset640x480: @"640x480",
  760. AVCaptureSessionPreset960x540: @"960x460",
  761. AVCaptureSessionPreset1280x720: @"1280x720",
  762. AVCaptureSessionPreset1920x1080: @"1920x1080",
  763. AVCaptureSessionPreset3840x2160: @"3840x2160",
  764. };
  765. NSString *presetDescription = [presetDescriptions objectForKey:preset];
  766. if (!presetDescription) {
  767. return [NSString stringWithFormat:@"Unknown (%@)", preset];
  768. } else {
  769. return presetDescription;
  770. }
  771. }
  772. + (NSString *)stringFromFourCharCode:(OSType)fourCharCode
  773. {
  774. char cString[5] = {(fourCharCode >> 24) & 0xFF, (fourCharCode >> 16) & 0xFF, (fourCharCode >> 8) & 0xFF,
  775. fourCharCode & 0xFF, 0};
  776. NSString *codeString = @(cString);
  777. return codeString;
  778. }
  779. + (FourCharCode)fourCharCodeFromString:(NSString *)codeString
  780. {
  781. FourCharCode fourCharCode;
  782. const char *cString = codeString.UTF8String;
  783. fourCharCode = (cString[0] << 24) | (cString[1] << 16) | (cString[2] << 8) | cString[3];
  784. return fourCharCode;
  785. }
  786. + (BOOL)isValidColorspace:(enum video_colorspace)colorspace
  787. {
  788. switch (colorspace) {
  789. case VIDEO_CS_DEFAULT:
  790. case VIDEO_CS_601:
  791. case VIDEO_CS_709:
  792. return YES;
  793. default:
  794. return NO;
  795. }
  796. }
  797. + (BOOL)isValidVideoRange:(enum video_range_type)videoRange
  798. {
  799. switch (videoRange) {
  800. case VIDEO_RANGE_DEFAULT:
  801. case VIDEO_RANGE_PARTIAL:
  802. case VIDEO_RANGE_FULL:
  803. return YES;
  804. default:
  805. return NO;
  806. }
  807. }
  808. + (BOOL)isFullRangeFormat:(FourCharCode)pixelFormat
  809. {
  810. switch (pixelFormat) {
  811. case kCVPixelFormatType_420YpCbCr8PlanarFullRange:
  812. case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
  813. case kCVPixelFormatType_420YpCbCr10BiPlanarFullRange:
  814. case kCVPixelFormatType_422YpCbCr8FullRange:
  815. return YES;
  816. default:
  817. return NO;
  818. }
  819. }
  820. + (OBSAVCaptureVideoFormat)formatFromSubtype:(FourCharCode)subtype
  821. {
  822. switch (subtype) {
  823. case kCVPixelFormatType_422YpCbCr8:
  824. return VIDEO_FORMAT_UYVY;
  825. case kCVPixelFormatType_422YpCbCr8_yuvs:
  826. return VIDEO_FORMAT_YUY2;
  827. case kCVPixelFormatType_32BGRA:
  828. return VIDEO_FORMAT_BGRA;
  829. case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
  830. case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
  831. return VIDEO_FORMAT_NV12;
  832. case kCVPixelFormatType_420YpCbCr10BiPlanarFullRange:
  833. case kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange:
  834. return VIDEO_FORMAT_P010;
  835. default:
  836. return VIDEO_FORMAT_NONE;
  837. }
  838. }
  839. + (FourCharCode)fourCharCodeFromFormat:(OBSAVCaptureVideoFormat)format withRange:(enum video_range_type)videoRange
  840. {
  841. switch (format) {
  842. case VIDEO_FORMAT_UYVY:
  843. return kCVPixelFormatType_422YpCbCr8;
  844. case VIDEO_FORMAT_YUY2:
  845. return kCVPixelFormatType_422YpCbCr8_yuvs;
  846. case VIDEO_FORMAT_NV12:
  847. if (videoRange == VIDEO_RANGE_FULL) {
  848. return kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
  849. } else {
  850. return kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
  851. }
  852. case VIDEO_FORMAT_P010:
  853. if (videoRange == VIDEO_RANGE_FULL) {
  854. return kCVPixelFormatType_420YpCbCr10BiPlanarFullRange;
  855. } else {
  856. return kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange;
  857. }
  858. case VIDEO_FORMAT_BGRA:
  859. return kCVPixelFormatType_32BGRA;
  860. default:
  861. return 0;
  862. }
  863. }
  864. + (FourCharCode)fourCharCodeFromFormat:(OBSAVCaptureVideoFormat)format
  865. {
  866. return [OBSAVCapture fourCharCodeFromFormat:format withRange:VIDEO_RANGE_PARTIAL];
  867. }
  868. + (NSString *)frameRateDescription:(NSArray<AVFrameRateRange *> *)ranges
  869. {
  870. // The videoSupportedFrameRateRanges property seems to provide frame rate ranges in this order, but since that
  871. // ordering does not seem to be guaranteed, ensure they are sorted anyway.
  872. NSArray<AVFrameRateRange *> *sortedRangesDescending = [ranges
  873. sortedArrayUsingComparator:^NSComparisonResult(AVFrameRateRange *_Nonnull lhs, AVFrameRateRange *_Nonnull rhs) {
  874. if (lhs.maxFrameRate > rhs.maxFrameRate) {
  875. return NSOrderedAscending;
  876. } else if (lhs.maxFrameRate < rhs.maxFrameRate) {
  877. return NSOrderedDescending;
  878. }
  879. if (lhs.minFrameRate > rhs.minFrameRate) {
  880. return NSOrderedAscending;
  881. } else if (lhs.minFrameRate < rhs.minFrameRate) {
  882. return NSOrderedDescending;
  883. }
  884. return NSOrderedSame;
  885. }];
  886. NSString *frameRateDescription;
  887. NSMutableArray *frameRateDescriptions = [[NSMutableArray alloc] initWithCapacity:ranges.count];
  888. for (AVFrameRateRange *range in [sortedRangesDescending reverseObjectEnumerator]) {
  889. double minFrameRate = round(range.minFrameRate * 100) / 100;
  890. double maxFrameRate = round(range.maxFrameRate * 100) / 100;
  891. if (minFrameRate == maxFrameRate) {
  892. if (fmod(minFrameRate, 1.0) == 0 && fmod(maxFrameRate, 1.0) == 0) {
  893. [frameRateDescriptions addObject:[NSString stringWithFormat:@"%.0f", maxFrameRate]];
  894. } else {
  895. [frameRateDescriptions addObject:[NSString stringWithFormat:@"%.2f", maxFrameRate]];
  896. }
  897. } else {
  898. if (fmod(minFrameRate, 1.0) == 0 && fmod(maxFrameRate, 1.0) == 0) {
  899. [frameRateDescriptions addObject:[NSString stringWithFormat:@"%.0f-%.0f", minFrameRate, maxFrameRate]];
  900. } else {
  901. [frameRateDescriptions addObject:[NSString stringWithFormat:@"%.2f-%.2f", minFrameRate, maxFrameRate]];
  902. }
  903. }
  904. }
  905. if (frameRateDescriptions.count > 0 && frameRateDescriptions.count <= kMaxFrameRateRangesInDescription) {
  906. frameRateDescription = [frameRateDescriptions componentsJoinedByString:@", "];
  907. frameRateDescription = [frameRateDescription stringByAppendingString:@" FPS"];
  908. } else if (frameRateDescriptions.count > kMaxFrameRateRangesInDescription) {
  909. frameRateDescription =
  910. [NSString stringWithFormat:@"%.0f-%.0f FPS (%lu values)", sortedRangesDescending.lastObject.minFrameRate,
  911. sortedRangesDescending.firstObject.maxFrameRate, sortedRangesDescending.count];
  912. }
  913. return frameRateDescription;
  914. }
  915. + (OBSAVCaptureColorSpace)colorspaceFromDescription:(CMFormatDescriptionRef)description
  916. {
  917. CFPropertyListRef matrix = CMFormatDescriptionGetExtension(description, kCMFormatDescriptionExtension_YCbCrMatrix);
  918. if (!matrix) {
  919. return VIDEO_CS_DEFAULT;
  920. }
  921. CFComparisonResult is601 = CFStringCompare(matrix, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0);
  922. CFComparisonResult is709 = CFStringCompare(matrix, kCVImageBufferYCbCrMatrix_ITU_R_709_2, 0);
  923. CFComparisonResult is2020 = CFStringCompare(matrix, kCVImageBufferYCbCrMatrix_ITU_R_2020, 0);
  924. if (is601 == kCFCompareEqualTo) {
  925. return VIDEO_CS_601;
  926. } else if (is709 == kCFCompareEqualTo) {
  927. return VIDEO_CS_709;
  928. } else if (is2020 == kCFCompareEqualTo) {
  929. CFPropertyListRef transferFunction =
  930. CMFormatDescriptionGetExtension(description, kCMFormatDescriptionExtension_TransferFunction);
  931. if (!matrix) {
  932. return VIDEO_CS_DEFAULT;
  933. }
  934. CFComparisonResult isPQ = CFStringCompare(transferFunction, kCVImageBufferTransferFunction_SMPTE_ST_2084_PQ, 0);
  935. CFComparisonResult isHLG = CFStringCompare(transferFunction, kCVImageBufferTransferFunction_ITU_R_2100_HLG, 0);
  936. if (isPQ == kCFCompareEqualTo) {
  937. return VIDEO_CS_2100_PQ;
  938. } else if (isHLG == kCFCompareEqualTo) {
  939. return VIDEO_CS_2100_HLG;
  940. }
  941. }
  942. return VIDEO_CS_DEFAULT;
  943. }
  944. #pragma mark - Notification Handlers
  945. - (void)deviceConnected:(NSNotification *)notification
  946. {
  947. AVCaptureDevice *device = notification.object;
  948. if (!device) {
  949. return;
  950. }
  951. if (![[device uniqueID] isEqualTo:self.deviceUUID]) {
  952. obs_source_update_properties(self.captureInfo->source);
  953. return;
  954. }
  955. if (self.deviceInput.device) {
  956. [self AVCaptureLog:LOG_INFO withFormat:@"Received connect event with active device '%@' (UUID %@)",
  957. self.deviceInput.device.localizedName, self.deviceInput.device.uniqueID];
  958. obs_source_update_properties(self.captureInfo->source);
  959. return;
  960. }
  961. [self AVCaptureLog:LOG_INFO
  962. withFormat:@"Received connect event for device '%@' (UUID %@)", device.localizedName, device.uniqueID];
  963. NSError *error;
  964. NSString *presetName = [OBSAVCapture stringFromSettings:self.captureInfo->settings withSetting:@"preset"];
  965. BOOL isPresetEnabled = obs_data_get_bool(self.captureInfo->settings, "use_preset");
  966. BOOL isFastPath = self.captureInfo->isFastPath;
  967. if ([self switchCaptureDevice:device.uniqueID withError:&error]) {
  968. BOOL success;
  969. if (isPresetEnabled && !isFastPath) {
  970. success = [self configureSessionWithPreset:presetName withError:&error];
  971. } else {
  972. success = [self configureSession:&error];
  973. }
  974. if (success) {
  975. dispatch_async(self.sessionQueue, ^{
  976. [self startCaptureSession];
  977. });
  978. } else {
  979. [self AVCaptureLog:LOG_ERROR withFormat:error.localizedDescription];
  980. }
  981. } else {
  982. [self AVCaptureLog:LOG_ERROR withFormat:error.localizedDescription];
  983. }
  984. obs_source_update_properties(self.captureInfo->source);
  985. }
  986. - (void)deviceDisconnected:(NSNotification *)notification
  987. {
  988. AVCaptureDevice *device = notification.object;
  989. if (!device) {
  990. return;
  991. }
  992. if (![[device uniqueID] isEqualTo:self.deviceUUID]) {
  993. obs_source_update_properties(self.captureInfo->source);
  994. return;
  995. }
  996. if (!self.deviceInput.device) {
  997. [self AVCaptureLog:LOG_ERROR withFormat:@"Received disconnect event for inactive device '%@' (UUID %@)",
  998. device.localizedName, device.uniqueID];
  999. obs_source_update_properties(self.captureInfo->source);
  1000. return;
  1001. }
  1002. [self AVCaptureLog:LOG_INFO
  1003. withFormat:@"Received disconnect event for device '%@' (UUID %@)", device.localizedName, device.uniqueID];
  1004. __weak OBSAVCapture *weakSelf = self;
  1005. dispatch_async(self.sessionQueue, ^{
  1006. OBSAVCapture *instance = weakSelf;
  1007. [instance stopCaptureSession];
  1008. [instance.session removeInput:instance.deviceInput];
  1009. instance.deviceInput = nil;
  1010. instance = nil;
  1011. });
  1012. obs_source_update_properties(self.captureInfo->source);
  1013. }
  1014. #pragma mark - AVCapture Delegate Methods
  1015. - (void)captureOutput:(AVCaptureOutput *)output
  1016. didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
  1017. fromConnection:(AVCaptureConnection *)connection
  1018. {
  1019. return;
  1020. }
  1021. - (void)captureOutput:(AVCaptureOutput *)output
  1022. didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
  1023. fromConnection:(AVCaptureConnection *)connection
  1024. {
  1025. CMItemCount sampleCount = CMSampleBufferGetNumSamples(sampleBuffer);
  1026. if (!_captureInfo || sampleCount < 1) {
  1027. return;
  1028. }
  1029. CMTime presentationTimeStamp = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer);
  1030. CMTime presentationNanoTimeStamp = CMTimeConvertScale(presentationTimeStamp, 1E9, kCMTimeRoundingMethod_Default);
  1031. CMFormatDescriptionRef description = CMSampleBufferGetFormatDescription(sampleBuffer);
  1032. CMMediaType mediaType = CMFormatDescriptionGetMediaType(description);
  1033. switch (mediaType) {
  1034. case kCMMediaType_Video: {
  1035. CMVideoDimensions sampleBufferDimensions = CMVideoFormatDescriptionGetDimensions(description);
  1036. CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
  1037. FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(description);
  1038. OBSAVCaptureVideoInfo newInfo = {.fourCC = _videoInfo.fourCC,
  1039. .colorSpace = _videoInfo.colorSpace,
  1040. .isValid = false};
  1041. BOOL usePreset = obs_data_get_bool(_captureInfo->settings, "use_preset");
  1042. if (_isFastPath) {
  1043. if (mediaSubType != kCVPixelFormatType_32BGRA &&
  1044. mediaSubType != kCVPixelFormatType_ARGB2101010LEPacked) {
  1045. _captureInfo->lastError = OBSAVCaptureError_SampleBufferFormat;
  1046. CMFormatDescriptionCreate(kCFAllocatorDefault, mediaType, mediaSubType, NULL,
  1047. &_captureInfo->sampleBufferDescription);
  1048. obs_source_update_properties(_captureInfo->source);
  1049. break;
  1050. } else {
  1051. _captureInfo->lastError = OBSAVCaptureError_NoError;
  1052. _captureInfo->sampleBufferDescription = NULL;
  1053. }
  1054. CVPixelBufferLockBaseAddress(imageBuffer, 0);
  1055. IOSurfaceRef frameSurface = CVPixelBufferGetIOSurface(imageBuffer);
  1056. CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
  1057. IOSurfaceRef previousSurface = NULL;
  1058. if (frameSurface && !pthread_mutex_lock(&_captureInfo->mutex)) {
  1059. NSRect frameSize = _captureInfo->frameSize;
  1060. if (frameSize.size.width != sampleBufferDimensions.width ||
  1061. frameSize.size.height != sampleBufferDimensions.height) {
  1062. frameSize = CGRectMake(0, 0, sampleBufferDimensions.width, sampleBufferDimensions.height);
  1063. }
  1064. previousSurface = _captureInfo->currentSurface;
  1065. _captureInfo->currentSurface = frameSurface;
  1066. CFRetain(_captureInfo->currentSurface);
  1067. IOSurfaceIncrementUseCount(_captureInfo->currentSurface);
  1068. pthread_mutex_unlock(&_captureInfo->mutex);
  1069. newInfo.isValid = true;
  1070. if (_videoInfo.isValid != newInfo.isValid) {
  1071. obs_source_update_properties(_captureInfo->source);
  1072. }
  1073. _captureInfo->frameSize = frameSize;
  1074. _videoInfo = newInfo;
  1075. }
  1076. if (previousSurface) {
  1077. IOSurfaceDecrementUseCount(previousSurface);
  1078. CFRelease(previousSurface);
  1079. }
  1080. break;
  1081. } else {
  1082. OBSAVCaptureVideoFrame *frame = _captureInfo->videoFrame;
  1083. frame->timestamp = presentationNanoTimeStamp.value;
  1084. enum video_format videoFormat = [OBSAVCapture formatFromSubtype:mediaSubType];
  1085. if (videoFormat == VIDEO_FORMAT_NONE) {
  1086. _captureInfo->lastError = OBSAVCaptureError_SampleBufferFormat;
  1087. CMFormatDescriptionCreate(kCFAllocatorDefault, mediaType, mediaSubType, NULL,
  1088. &_captureInfo->sampleBufferDescription);
  1089. } else {
  1090. _captureInfo->lastError = OBSAVCaptureError_NoError;
  1091. _captureInfo->sampleBufferDescription = NULL;
  1092. #ifdef DEBUG
  1093. if (frame->format != VIDEO_FORMAT_NONE && frame->format != videoFormat) {
  1094. [self AVCaptureLog:LOG_DEBUG
  1095. withFormat:@"Switching fourcc: '%@' (0x%x) -> '%@' (0x%x)",
  1096. [OBSAVCapture stringFromFourCharCode:frame->format], frame -> format,
  1097. [OBSAVCapture stringFromFourCharCode:mediaSubType], mediaSubType];
  1098. }
  1099. #endif
  1100. bool isFrameYuv = format_is_yuv(frame->format);
  1101. bool isSampleBufferYuv = format_is_yuv(videoFormat);
  1102. frame->format = videoFormat;
  1103. frame->width = sampleBufferDimensions.width;
  1104. frame->height = sampleBufferDimensions.height;
  1105. BOOL isSampleBufferFullRange = [OBSAVCapture isFullRangeFormat:mediaSubType];
  1106. if (isSampleBufferYuv) {
  1107. OBSAVCaptureColorSpace sampleBufferColorSpace =
  1108. [OBSAVCapture colorspaceFromDescription:description];
  1109. OBSAVCaptureVideoRange sampleBufferRangeType = isSampleBufferFullRange ? VIDEO_RANGE_FULL
  1110. : VIDEO_RANGE_PARTIAL;
  1111. BOOL isColorSpaceMatching = NO;
  1112. SInt64 configuredColorSpace = _captureInfo->configuredColorSpace;
  1113. if (usePreset) {
  1114. isColorSpaceMatching = sampleBufferColorSpace == _videoInfo.colorSpace;
  1115. } else {
  1116. isColorSpaceMatching = configuredColorSpace == _videoInfo.colorSpace;
  1117. }
  1118. BOOL isFourCCMatching = NO;
  1119. SInt64 configuredFourCC = _captureInfo->configuredFourCC;
  1120. if (usePreset) {
  1121. isFourCCMatching = mediaSubType == _videoInfo.fourCC;
  1122. } else {
  1123. isFourCCMatching = configuredFourCC == _videoInfo.fourCC;
  1124. }
  1125. if (isColorSpaceMatching && isFourCCMatching) {
  1126. newInfo.isValid = true;
  1127. } else {
  1128. frame->full_range = isSampleBufferFullRange;
  1129. bool success = video_format_get_parameters_for_format(
  1130. sampleBufferColorSpace, sampleBufferRangeType, frame->format, frame->color_matrix,
  1131. frame->color_range_min, frame->color_range_max);
  1132. if (!success) {
  1133. _captureInfo->lastError = OBSAVCaptureError_ColorSpace;
  1134. CMFormatDescriptionCreate(kCFAllocatorDefault, mediaType, mediaSubType, NULL,
  1135. &_captureInfo->sampleBufferDescription);
  1136. newInfo.isValid = false;
  1137. } else {
  1138. newInfo.colorSpace = sampleBufferColorSpace;
  1139. newInfo.fourCC = mediaSubType;
  1140. newInfo.isValid = true;
  1141. }
  1142. }
  1143. } else if (!isFrameYuv && !isSampleBufferYuv) {
  1144. newInfo.isValid = true;
  1145. }
  1146. }
  1147. if (newInfo.isValid != _videoInfo.isValid) {
  1148. obs_source_update_properties(_captureInfo->source);
  1149. }
  1150. _videoInfo = newInfo;
  1151. if (newInfo.isValid) {
  1152. CVPixelBufferLockBaseAddress(imageBuffer, kCVPixelBufferLock_ReadOnly);
  1153. if (!CVPixelBufferIsPlanar(imageBuffer)) {
  1154. frame->linesize[0] = (UInt32) CVPixelBufferGetBytesPerRow(imageBuffer);
  1155. frame->data[0] = CVPixelBufferGetBaseAddress(imageBuffer);
  1156. } else {
  1157. size_t planeCount = CVPixelBufferGetPlaneCount(imageBuffer);
  1158. for (size_t i = 0; i < planeCount; i++) {
  1159. frame->linesize[i] = (UInt32) CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, i);
  1160. frame->data[i] = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, i);
  1161. }
  1162. }
  1163. obs_source_output_video(_captureInfo->source, frame);
  1164. CVPixelBufferUnlockBaseAddress(imageBuffer, kCVPixelBufferLock_ReadOnly);
  1165. } else {
  1166. obs_source_output_video(_captureInfo->source, NULL);
  1167. }
  1168. break;
  1169. }
  1170. }
  1171. case kCMMediaType_Audio: {
  1172. size_t requiredBufferListSize;
  1173. OSStatus status = noErr;
  1174. status = CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
  1175. sampleBuffer, &requiredBufferListSize, NULL, 0, NULL, NULL,
  1176. kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment, NULL);
  1177. if (status != noErr) {
  1178. _captureInfo->lastAudioError = OBSAVCaptureError_AudioBuffer;
  1179. obs_source_update_properties(_captureInfo->source);
  1180. break;
  1181. }
  1182. AudioBufferList *bufferList = (AudioBufferList *) malloc(requiredBufferListSize);
  1183. CMBlockBufferRef blockBuffer = NULL;
  1184. OSStatus error = noErr;
  1185. error = CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
  1186. sampleBuffer, NULL, bufferList, requiredBufferListSize, kCFAllocatorSystemDefault,
  1187. kCFAllocatorSystemDefault, kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment, &blockBuffer);
  1188. if (error == noErr) {
  1189. _captureInfo->lastAudioError = OBSAVCaptureError_NoError;
  1190. OBSAVCaptureAudioFrame *audio = _captureInfo->audioFrame;
  1191. for (size_t i = 0; i < bufferList->mNumberBuffers; i++) {
  1192. audio->data[i] = bufferList->mBuffers[i].mData;
  1193. }
  1194. audio->timestamp = presentationNanoTimeStamp.value;
  1195. audio->frames = (uint32_t) CMSampleBufferGetNumSamples(sampleBuffer);
  1196. const AudioStreamBasicDescription *basicDescription =
  1197. CMAudioFormatDescriptionGetStreamBasicDescription(description);
  1198. audio->samples_per_sec = (uint32_t) basicDescription->mSampleRate;
  1199. audio->speakers = (enum speaker_layout) basicDescription->mChannelsPerFrame;
  1200. switch (basicDescription->mBitsPerChannel) {
  1201. case 8:
  1202. audio->format = AUDIO_FORMAT_U8BIT;
  1203. break;
  1204. case 16:
  1205. audio->format = AUDIO_FORMAT_16BIT;
  1206. break;
  1207. case 32:
  1208. audio->format = AUDIO_FORMAT_32BIT;
  1209. break;
  1210. default:
  1211. audio->format = AUDIO_FORMAT_UNKNOWN;
  1212. break;
  1213. }
  1214. obs_source_output_audio(_captureInfo->source, audio);
  1215. } else {
  1216. _captureInfo->lastAudioError = OBSAVCaptureError_AudioBuffer;
  1217. obs_source_output_audio(_captureInfo->source, NULL);
  1218. }
  1219. if (blockBuffer != NULL) {
  1220. CFRelease(blockBuffer);
  1221. }
  1222. if (bufferList != NULL) {
  1223. free(bufferList);
  1224. bufferList = NULL;
  1225. }
  1226. break;
  1227. }
  1228. default:
  1229. break;
  1230. }
  1231. }
  1232. #pragma mark - Log Helpers
  1233. - (void)AVCaptureLog:(int)logLevel withFormat:(NSString *)format, ...
  1234. {
  1235. va_list args;
  1236. va_start(args, format);
  1237. NSString *logMessage = [[NSString alloc] initWithFormat:format arguments:args];
  1238. va_end(args);
  1239. const char *name_value = obs_source_get_name(self.captureInfo->source);
  1240. NSString *sourceName = @((name_value) ? name_value : "");
  1241. blog(logLevel, "%s: %s", sourceName.UTF8String, logMessage.UTF8String);
  1242. }
  1243. + (void)AVCaptureLog:(int)logLevel withFormat:(NSString *)format, ...
  1244. {
  1245. va_list args;
  1246. va_start(args, format);
  1247. NSString *logMessage = [[NSString alloc] initWithFormat:format arguments:args];
  1248. va_end(args);
  1249. blog(logLevel, "%s", logMessage.UTF8String);
  1250. }
  1251. @end