plugin-properties.m 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525
  1. //
  2. // plugin-properties.m
  3. // mac-avcapture
  4. //
  5. // Created by Patrick Heyer on 2023-03-07.
  6. //
  7. #import "OBSAVCapture.h"
  8. #import "plugin-properties.h"
  9. extern const char *av_capture_get_text(const char *text_id);
  10. void configure_property(obs_property_t *property, bool enable, bool visible, void *callback, OBSAVCapture *capture)
  11. {
  12. if (property) {
  13. obs_property_set_enabled(property, enable);
  14. obs_property_set_visible(property, visible);
  15. if (callback) {
  16. obs_property_set_modified_callback2(property, callback, (__bridge void *) (capture));
  17. }
  18. }
  19. }
  20. bool properties_changed(OBSAVCapture *capture, obs_properties_t *properties, obs_property_t *property __unused,
  21. obs_data_t *settings)
  22. {
  23. OBSAVCaptureInfo *captureInfo = capture.captureInfo;
  24. obs_property_t *prop_use_preset = obs_properties_get(properties, "use_preset");
  25. obs_property_t *prop_device = obs_properties_get(properties, "device");
  26. obs_property_t *prop_presets = obs_properties_get(properties, "preset");
  27. obs_property_set_enabled(prop_use_preset, !captureInfo->isFastPath);
  28. if (captureInfo && settings) {
  29. properties_update_device(capture, prop_device, settings);
  30. bool use_preset = (settings ? obs_data_get_bool(settings, "use_preset") : true);
  31. if (use_preset) {
  32. properties_update_preset(capture, prop_presets, settings);
  33. } else {
  34. properties_update_config(capture, properties, settings);
  35. }
  36. }
  37. return true;
  38. }
  39. bool properties_changed_preset(OBSAVCapture *capture, obs_properties_t *properties __unused, obs_property_t *property,
  40. obs_data_t *settings)
  41. {
  42. bool use_preset = obs_data_get_bool(settings, "use_preset");
  43. if (capture && settings && use_preset) {
  44. NSArray *presetKeys =
  45. [capture.presetList keysSortedByValueUsingComparator:^NSComparisonResult(NSString *obj1, NSString *obj2) {
  46. NSNumber *obj1Resolution;
  47. NSNumber *obj2Resolution;
  48. if ([obj1 isEqualToString:@"High"]) {
  49. obj1Resolution = @3;
  50. } else if ([obj1 isEqualToString:@"Medium"]) {
  51. obj1Resolution = @2;
  52. } else if ([obj1 isEqualToString:@"Low"]) {
  53. obj1Resolution = @1;
  54. } else {
  55. NSArray<NSString *> *obj1Dimensions = [obj1 componentsSeparatedByString:@"x"];
  56. obj1Resolution = [NSNumber numberWithInt:([[obj1Dimensions objectAtIndex:0] intValue] *
  57. [[obj1Dimensions objectAtIndex:1] intValue])];
  58. }
  59. if ([obj2 isEqualToString:@"High"]) {
  60. obj2Resolution = @3;
  61. } else if ([obj2 isEqualToString:@"Medium"]) {
  62. obj2Resolution = @2;
  63. } else if ([obj2 isEqualToString:@"Low"]) {
  64. obj2Resolution = @1;
  65. } else {
  66. NSArray<NSString *> *obj2Dimensions = [obj2 componentsSeparatedByString:@"x"];
  67. obj2Resolution = [NSNumber numberWithInt:([[obj2Dimensions objectAtIndex:0] intValue] *
  68. [[obj2Dimensions objectAtIndex:1] intValue])];
  69. }
  70. NSComparisonResult result = [obj1Resolution compare:obj2Resolution];
  71. if (result == NSOrderedAscending) {
  72. return (NSComparisonResult) NSOrderedDescending;
  73. } else if (result == NSOrderedDescending) {
  74. return (NSComparisonResult) NSOrderedAscending;
  75. } else {
  76. return (NSComparisonResult) NSOrderedSame;
  77. }
  78. }];
  79. NSString *UUID = [OBSAVCapture stringFromSettings:settings withSetting:@"device"];
  80. AVCaptureDevice *device = [AVCaptureDevice deviceWithUniqueID:UUID];
  81. NSString *currentPreset = [OBSAVCapture stringFromSettings:settings withSetting:@"preset"];
  82. obs_property_list_clear(property);
  83. if (device) {
  84. for (NSString *presetName in presetKeys) {
  85. NSString *presetDescription = capture.presetList[presetName];
  86. if ([device supportsAVCaptureSessionPreset:presetName]) {
  87. obs_property_list_add_string(property, presetDescription.UTF8String, presetName.UTF8String);
  88. } else if ([currentPreset isEqualToString:presetName]) {
  89. size_t index =
  90. obs_property_list_add_string(property, presetDescription.UTF8String, presetName.UTF8String);
  91. obs_property_list_item_disable(property, index, true);
  92. }
  93. };
  94. } else if (UUID.length) {
  95. size_t index = obs_property_list_add_string(property, capture.presetList[currentPreset].UTF8String,
  96. currentPreset.UTF8String);
  97. obs_property_list_item_disable(property, index, true);
  98. }
  99. return YES;
  100. } else {
  101. return NO;
  102. }
  103. }
  104. bool properties_changed_use_preset(OBSAVCapture *capture, obs_properties_t *properties,
  105. obs_property_t *property __unused, obs_data_t *settings)
  106. {
  107. bool use_preset = obs_data_get_bool(settings, "use_preset");
  108. obs_property_t *preset_list = obs_properties_get(properties, "preset");
  109. obs_property_set_visible(preset_list, use_preset);
  110. if (use_preset) {
  111. properties_changed_preset(capture, properties, preset_list, settings);
  112. }
  113. const char *update_properties[5] = {"resolution", "frame_rate", "color_space", "video_range", "input_format"};
  114. size_t number_of_properties = sizeof(update_properties) / sizeof(update_properties[0]);
  115. for (size_t i = 0; i < number_of_properties; i++) {
  116. obs_property_t *update_property = obs_properties_get(properties, update_properties[i]);
  117. if (update_property) {
  118. obs_property_set_visible(update_property, !use_preset);
  119. obs_property_set_enabled(update_property, !use_preset);
  120. }
  121. }
  122. return true;
  123. }
  124. bool properties_update_preset(OBSAVCapture *capture, obs_property_t *property, obs_data_t *settings)
  125. {
  126. NSArray *presetKeys =
  127. [capture.presetList keysSortedByValueUsingComparator:^NSComparisonResult(NSString *obj1, NSString *obj2) {
  128. NSNumber *obj1Resolution;
  129. NSNumber *obj2Resolution;
  130. if ([obj1 isEqualToString:@"High"]) {
  131. obj1Resolution = @3;
  132. } else if ([obj1 isEqualToString:@"Medium"]) {
  133. obj1Resolution = @2;
  134. } else if ([obj1 isEqualToString:@"Low"]) {
  135. obj1Resolution = @1;
  136. } else {
  137. NSArray<NSString *> *obj1Dimensions = [obj1 componentsSeparatedByString:@"x"];
  138. obj1Resolution = [NSNumber numberWithInt:([[obj1Dimensions objectAtIndex:0] intValue] *
  139. [[obj1Dimensions objectAtIndex:1] intValue])];
  140. }
  141. if ([obj2 isEqualToString:@"High"]) {
  142. obj2Resolution = @3;
  143. } else if ([obj2 isEqualToString:@"Medium"]) {
  144. obj2Resolution = @2;
  145. } else if ([obj2 isEqualToString:@"Low"]) {
  146. obj2Resolution = @1;
  147. } else {
  148. NSArray<NSString *> *obj2Dimensions = [obj2 componentsSeparatedByString:@"x"];
  149. obj2Resolution = [NSNumber numberWithInt:([[obj2Dimensions objectAtIndex:0] intValue] *
  150. [[obj2Dimensions objectAtIndex:1] intValue])];
  151. }
  152. NSComparisonResult result = [obj1Resolution compare:obj2Resolution];
  153. if (result == NSOrderedAscending) {
  154. return (NSComparisonResult) NSOrderedDescending;
  155. } else if (result == NSOrderedDescending) {
  156. return (NSComparisonResult) NSOrderedAscending;
  157. } else {
  158. return (NSComparisonResult) NSOrderedSame;
  159. }
  160. }];
  161. NSString *deviceUUID = [OBSAVCapture stringFromSettings:settings withSetting:@"device"];
  162. AVCaptureDevice *device = [AVCaptureDevice deviceWithUniqueID:deviceUUID];
  163. NSString *currentPreset = [OBSAVCapture stringFromSettings:settings withSetting:@"preset"];
  164. obs_property_list_clear(property);
  165. if (device) {
  166. for (NSString *presetName in presetKeys) {
  167. NSString *presetDescription = capture.presetList[presetName];
  168. if ([device supportsAVCaptureSessionPreset:presetName]) {
  169. obs_property_list_add_string(property, presetDescription.UTF8String, presetName.UTF8String);
  170. } else if ([currentPreset isEqualToString:presetName]) {
  171. size_t index =
  172. obs_property_list_add_string(property, presetDescription.UTF8String, presetName.UTF8String);
  173. obs_property_list_item_disable(property, index, true);
  174. }
  175. };
  176. } else if (deviceUUID.length) {
  177. size_t index = obs_property_list_add_string(property, capture.presetList[currentPreset].UTF8String,
  178. currentPreset.UTF8String);
  179. obs_property_list_item_disable(property, index, true);
  180. }
  181. return true;
  182. }
  183. bool properties_update_device(OBSAVCapture *capture __unused, obs_property_t *property, obs_data_t *settings)
  184. {
  185. obs_property_list_clear(property);
  186. NSString *currentDeviceUUID = [OBSAVCapture stringFromSettings:settings withSetting:@"device"];
  187. NSString *currentDeviceName = [OBSAVCapture stringFromSettings:settings withSetting:@"device_name"];
  188. BOOL isDeviceFound = NO;
  189. obs_property_list_add_string(property, "", "");
  190. NSArray *deviceTypes;
  191. if (@available(macOS 13, *)) {
  192. deviceTypes = @[
  193. AVCaptureDeviceTypeBuiltInWideAngleCamera, AVCaptureDeviceTypeExternalUnknown,
  194. AVCaptureDeviceTypeDeskViewCamera
  195. ];
  196. } else {
  197. deviceTypes = @[AVCaptureDeviceTypeBuiltInWideAngleCamera, AVCaptureDeviceTypeExternalUnknown];
  198. }
  199. AVCaptureDeviceDiscoverySession *videoDiscoverySession =
  200. [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:deviceTypes mediaType:AVMediaTypeVideo
  201. position:AVCaptureDevicePositionUnspecified];
  202. AVCaptureDeviceDiscoverySession *muxedDiscoverySession =
  203. [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:deviceTypes mediaType:AVMediaTypeMuxed
  204. position:AVCaptureDevicePositionUnspecified];
  205. for (AVCaptureDevice *device in [videoDiscoverySession devices]) {
  206. obs_property_list_add_string(property, device.localizedName.UTF8String, device.uniqueID.UTF8String);
  207. if (!isDeviceFound && [currentDeviceUUID isEqualToString:device.uniqueID]) {
  208. isDeviceFound = YES;
  209. }
  210. }
  211. for (AVCaptureDevice *device in [muxedDiscoverySession devices]) {
  212. obs_property_list_add_string(property, device.localizedName.UTF8String, device.uniqueID.UTF8String);
  213. if (!isDeviceFound && [currentDeviceUUID isEqualToString:device.uniqueID]) {
  214. isDeviceFound = YES;
  215. }
  216. }
  217. if (!isDeviceFound && currentDeviceUUID.length > 0) {
  218. size_t index =
  219. obs_property_list_add_string(property, currentDeviceName.UTF8String, currentDeviceUUID.UTF8String);
  220. obs_property_list_item_disable(property, index, true);
  221. }
  222. return true;
  223. }
  224. bool properties_update_config(OBSAVCapture *capture, obs_properties_t *properties, obs_data_t *settings)
  225. {
  226. AVCaptureDevice *device = [AVCaptureDevice deviceWithUniqueID:[OBSAVCapture stringFromSettings:settings
  227. withSetting:@"device"]];
  228. obs_property_t *prop_resolution = obs_properties_get(properties, "resolution");
  229. obs_property_t *prop_framerate = obs_properties_get(properties, "frame_rate");
  230. obs_property_list_clear(prop_resolution);
  231. obs_property_frame_rate_clear(prop_framerate);
  232. obs_property_t *prop_input_format = NULL;
  233. obs_property_t *prop_color_space = NULL;
  234. obs_property_t *prop_video_range = NULL;
  235. prop_input_format = obs_properties_get(properties, "input_format");
  236. obs_property_list_clear(prop_input_format);
  237. if (!capture.isFastPath) {
  238. prop_color_space = obs_properties_get(properties, "color_space");
  239. prop_video_range = obs_properties_get(properties, "video_range");
  240. obs_property_list_clear(prop_video_range);
  241. obs_property_list_clear(prop_color_space);
  242. }
  243. CMVideoDimensions resolution = [OBSAVCapture dimensionsFromSettings:settings];
  244. if (resolution.width == 0 || resolution.height == 0) {
  245. [capture AVCaptureLog:LOG_DEBUG withFormat:@"No valid resolution found in settings"];
  246. }
  247. struct media_frames_per_second fps;
  248. if (!obs_data_get_frames_per_second(settings, "frame_rate", &fps, NULL)) {
  249. [capture AVCaptureLog:LOG_DEBUG withFormat:@"No valid framerate found in settings"];
  250. }
  251. CMTime time = {.value = fps.denominator, .timescale = fps.numerator, .flags = 1};
  252. int input_format = 0;
  253. int color_space = 0;
  254. int video_range = 0;
  255. NSMutableArray *inputFormats = NULL;
  256. NSMutableArray *colorSpaces = NULL;
  257. NSMutableArray *videoRanges = NULL;
  258. input_format = (int) obs_data_get_int(settings, "input_format");
  259. inputFormats = [[NSMutableArray alloc] init];
  260. if (!capture.isFastPath) {
  261. color_space = (int) obs_data_get_int(settings, "color_space");
  262. video_range = (int) obs_data_get_int(settings, "video_range");
  263. colorSpaces = [[NSMutableArray alloc] init];
  264. videoRanges = [[NSMutableArray alloc] init];
  265. }
  266. NSMutableArray *resolutions = [[NSMutableArray alloc] init];
  267. NSMutableArray *frameRates = [[NSMutableArray alloc] init];
  268. BOOL hasFoundResolution = NO;
  269. BOOL hasFoundFramerate = NO;
  270. BOOL hasFoundInputFormat = NO;
  271. BOOL hasFoundColorSpace = capture.isFastPath;
  272. BOOL hasFoundVideoRange = capture.isFastPath;
  273. CFPropertyListRef priorColorPrimary = @"";
  274. if (device) {
  275. // Iterate over all formats reported by the device and gather them for property lists
  276. for (AVCaptureDeviceFormat *format in device.formats) {
  277. if (!capture.isFastPath) {
  278. FourCharCode formatSubType = CMFormatDescriptionGetMediaSubType(format.formatDescription);
  279. NSString *formatDescription = [OBSAVCapture stringFromSubType:formatSubType];
  280. int device_format = [OBSAVCapture formatFromSubtype:formatSubType];
  281. int device_range;
  282. const char *range_description;
  283. if ([OBSAVCapture isFullRangeFormat:formatSubType]) {
  284. device_range = VIDEO_RANGE_FULL;
  285. range_description = av_capture_get_text("VideoRange.Full");
  286. } else {
  287. device_range = VIDEO_RANGE_PARTIAL;
  288. range_description = av_capture_get_text("VideoRange.Partial");
  289. }
  290. if (!hasFoundInputFormat && input_format == device_format) {
  291. hasFoundInputFormat = YES;
  292. }
  293. if (!hasFoundVideoRange && video_range == device_range) {
  294. hasFoundVideoRange = YES;
  295. }
  296. if (![inputFormats containsObject:@(formatSubType)]) {
  297. obs_property_list_add_int(prop_input_format, formatDescription.UTF8String, device_format);
  298. [inputFormats addObject:@(formatSubType)];
  299. }
  300. if (![videoRanges containsObject:@(range_description)]) {
  301. obs_property_list_add_int(prop_video_range, range_description, device_range);
  302. [videoRanges addObject:@(range_description)];
  303. }
  304. int device_color_space = [OBSAVCapture colorspaceFromDescription:format.formatDescription];
  305. if (![colorSpaces containsObject:@(device_color_space)]) {
  306. obs_property_list_add_int(prop_color_space,
  307. [OBSAVCapture stringFromColorspace:device_color_space].UTF8String,
  308. device_color_space);
  309. [colorSpaces addObject:@(device_color_space)];
  310. }
  311. if (!hasFoundColorSpace && device_color_space == color_space) {
  312. hasFoundColorSpace = YES;
  313. }
  314. } else {
  315. FourCharCode formatSubType = CMFormatDescriptionGetMediaSubType(format.formatDescription);
  316. NSString *formatDescription = [OBSAVCapture stringFromSubType:formatSubType];
  317. int device_format = [OBSAVCapture formatFromSubtype:formatSubType];
  318. if (!hasFoundInputFormat && input_format == device_format) {
  319. hasFoundInputFormat = YES;
  320. }
  321. if (![inputFormats containsObject:@(formatSubType)]) {
  322. obs_property_list_add_int(prop_input_format, formatDescription.UTF8String, device_format);
  323. [inputFormats addObject:@(formatSubType)];
  324. }
  325. }
  326. CMVideoDimensions formatDimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
  327. NSDictionary *resolutionData =
  328. @{@"width": @(formatDimensions.width),
  329. @"height": @(formatDimensions.height)};
  330. if (![resolutions containsObject:resolutionData]) {
  331. [resolutions addObject:resolutionData];
  332. }
  333. if (!hasFoundResolution && formatDimensions.width == resolution.width &&
  334. formatDimensions.height == resolution.height) {
  335. hasFoundResolution = YES;
  336. }
  337. // Only iterate over available framerates if input format, color space, and resolution are matching
  338. if (hasFoundInputFormat && hasFoundColorSpace && hasFoundResolution) {
  339. CFComparisonResult isColorPrimaryMatch = kCFCompareEqualTo;
  340. CFPropertyListRef colorPrimary = CMFormatDescriptionGetExtension(
  341. format.formatDescription, kCMFormatDescriptionExtension_ColorPrimaries);
  342. if (colorPrimary) {
  343. isColorPrimaryMatch = CFStringCompare(colorPrimary, priorColorPrimary, 0);
  344. }
  345. if (isColorPrimaryMatch != kCFCompareEqualTo || !hasFoundFramerate) {
  346. for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges.reverseObjectEnumerator) {
  347. FourCharCode formatSubType = CMFormatDescriptionGetMediaSubType(format.formatDescription);
  348. int device_format = [OBSAVCapture formatFromSubtype:formatSubType];
  349. if (input_format == device_format) {
  350. struct media_frames_per_second min_fps = {
  351. .numerator = (uint32_t) clamp_Uint(range.maxFrameDuration.timescale, 0, UINT32_MAX),
  352. .denominator = (uint32_t) clamp_Uint(range.maxFrameDuration.value, 0, UINT32_MAX)};
  353. struct media_frames_per_second max_fps = {
  354. .numerator = (uint32_t) clamp_Uint(range.minFrameDuration.timescale, 0, UINT32_MAX),
  355. .denominator = (uint32_t) clamp_Uint(range.minFrameDuration.value, 0, UINT32_MAX)};
  356. if (![frameRates containsObject:range]) {
  357. obs_property_frame_rate_fps_range_add(prop_framerate, min_fps, max_fps);
  358. [frameRates addObject:range];
  359. }
  360. if (!hasFoundFramerate && CMTimeCompare(range.maxFrameDuration, time) >= 0 &&
  361. CMTimeCompare(range.minFrameDuration, time) <= 0) {
  362. hasFoundFramerate = YES;
  363. }
  364. }
  365. }
  366. priorColorPrimary = colorPrimary;
  367. }
  368. }
  369. }
  370. // Add resolutions in reverse order (formats reported by macOS are sorted with lowest resolution first)
  371. for (NSDictionary *resolutionData in resolutions.reverseObjectEnumerator) {
  372. NSError *error;
  373. NSData *jsonData = [NSJSONSerialization dataWithJSONObject:resolutionData options:0 error:&error];
  374. int width = [[resolutionData objectForKey:@"width"] intValue];
  375. int height = [[resolutionData objectForKey:@"height"] intValue];
  376. obs_property_list_add_string(
  377. prop_resolution, [NSString stringWithFormat:@"%dx%d", width, height].UTF8String,
  378. [[NSString alloc] initWithData:jsonData encoding:NSUTF8StringEncoding].UTF8String);
  379. }
  380. // Add currently selected values in disabled state if they are not supported by the device
  381. size_t index;
  382. FourCharCode formatSubType = [OBSAVCapture fourCharCodeFromFormat:input_format withRange:video_range];
  383. if (!hasFoundInputFormat) {
  384. NSString *formatDescription = [OBSAVCapture stringFromSubType:formatSubType];
  385. index = obs_property_list_add_int(prop_input_format, formatDescription.UTF8String, input_format);
  386. obs_property_list_item_disable(prop_input_format, index, true);
  387. }
  388. if (!capture.isFastPath) {
  389. if (!hasFoundVideoRange) {
  390. int device_range;
  391. const char *range_description;
  392. if ([OBSAVCapture isFullRangeFormat:formatSubType]) {
  393. device_range = VIDEO_RANGE_FULL;
  394. range_description = av_capture_get_text("VideoRange.Full");
  395. } else {
  396. device_range = VIDEO_RANGE_PARTIAL;
  397. range_description = av_capture_get_text("VideoRange.Partial");
  398. }
  399. index = obs_property_list_add_int(prop_video_range, range_description, device_range);
  400. obs_property_list_item_disable(prop_video_range, index, true);
  401. }
  402. if (!hasFoundColorSpace) {
  403. index = obs_property_list_add_int(
  404. prop_color_space, [OBSAVCapture stringFromColorspace:color_space].UTF8String, color_space);
  405. obs_property_list_item_disable(prop_color_space, index, true);
  406. }
  407. }
  408. if (!hasFoundResolution) {
  409. NSDictionary *resolutionData = @{@"width": @(resolution.width), @"height": @(resolution.height)};
  410. NSError *error;
  411. NSData *jsonData = [NSJSONSerialization dataWithJSONObject:resolutionData options:0 error:&error];
  412. index = obs_property_list_add_string(
  413. prop_resolution, [NSString stringWithFormat:@"%dx%d", resolution.width, resolution.height].UTF8String,
  414. [[NSString alloc] initWithData:jsonData encoding:NSUTF8StringEncoding].UTF8String);
  415. obs_property_list_item_disable(prop_resolution, index, true);
  416. }
  417. }
  418. return true;
  419. }