Browse Source

mac-avcapture: Use list-based format selector

jcm 1 year ago
parent
commit
bf7e82de80

+ 26 - 0
plugins/mac-avcapture/AVCaptureDeviceFormat+OBSListable.h

@@ -0,0 +1,26 @@
+//
+//  AVCaptureDeviceFormat+OBSListable.h
+//  obs-studio
+//
+//  Created by jcm on 7/9/24.
+//
+
+/// [AVCaptureDeviceFormat](https://developer.apple.com/documentation/avfoundation/avcapturedeviceformat) class customization adding comparison functionality as well as stored properties for sort order and descriptions. Used to present multiple `AVCaptureDeviceFormat`s in a sorted OBS property list.
+@interface AVCaptureDeviceFormat (OBSListable)
+
+/// Lazily computed property representing a scalar for the potential 'pixel throughput' of the format. Computed by multiplying together the resolution area and maximum frames per second.
+@property (nonatomic, strong) NSNumber *pixelBandwidthComparisonValue;
+
+/// Lazily computed property representing the aspect ratio value used for comparing to other format aspect ratios. Given by the width divided by height, to only two decimal places. Low precision is used to account for resolutions with a precise aspect ratio that is not equal to their semantic aspect ratio. For example, 854x480 and 1366x768 are semantically both 16:9, but not precisely 16:9.
+@property (nonatomic, strong) NSNumber *aspectRatioComparisonValue;
+
+/// Lazily computed property containing the bits per pixel for the format. For planar formats, accounts for the number of bytes of chrominance and luminance per pixel block.
+@property (nonatomic, strong) NSNumber *bitsPerPixel;
+
+/// Lazily computed property containing a general localized description of the format, suitable for use in picking a format in the OBS source properties window.
+@property (nonatomic, strong) NSString *obsPropertyListDescription;
+
+/// Lazily computed property containing the string value used by OBS to represent and uniquely identify the format by its dimensions, supported frame rate ranges, color space and [four character code](https://developer.apple.com/documentation/coremedia/1489255-cmformatdescriptiongetmediasubty?language=objc).
+@property (nonatomic, strong) NSString *obsPropertyListInternalRepresentation;
+
+@end

+ 163 - 0
plugins/mac-avcapture/AVCaptureDeviceFormat+OBSListable.m

@@ -0,0 +1,163 @@
+//
+//  AVCaptureDeviceFormat+OBSListable.m
+//  obs-studio
+//
+//  Created by jcm on 7/9/24.
+//
+
+#import "OBSAVCapture.h"
+
+@implementation AVCaptureDeviceFormat (OBSListable)
+
+- (NSString *)obsPropertyListDescription
+{
+    if (!objc_getAssociatedObject(self, @selector(obsPropertyListDescription))) {
+        CMVideoDimensions formatDimensions = CMVideoFormatDescriptionGetDimensions(self.formatDescription);
+        FourCharCode formatSubType = CMFormatDescriptionGetMediaSubType(self.formatDescription);
+        NSString *pixelFormatDescription = [OBSAVCapture stringFromSubType:formatSubType];
+        OBSAVCaptureColorSpace deviceColorSpace = [OBSAVCapture colorspaceFromDescription:self.formatDescription];
+        NSString *colorspaceDescription = [OBSAVCapture stringFromColorspace:deviceColorSpace];
+        NSString *fpsRangesDescription = [OBSAVCapture frameRateDescription:self.videoSupportedFrameRateRanges];
+        NSString *aspectRatioString = [OBSAVCapture aspectRatioStringFromDimensions:formatDimensions];
+
+        NSMutableArray *propertyListDescriptionArray = [[NSMutableArray alloc] init];
+        NSString *resolutionDescriptionComponent;
+        if (formatDimensions.width > 0 && formatDimensions.height > 0) {
+            resolutionDescriptionComponent = [NSString
+                stringWithFormat:@"%dx%d (%@)", formatDimensions.width, formatDimensions.height, aspectRatioString];
+            [propertyListDescriptionArray addObject:resolutionDescriptionComponent];
+        }
+        if (fpsRangesDescription) {
+            [propertyListDescriptionArray addObject:fpsRangesDescription];
+        }
+        if (deviceColorSpace != VIDEO_CS_DEFAULT) {
+            [propertyListDescriptionArray addObject:colorspaceDescription];
+        }
+        if (![pixelFormatDescription isEqualToString:@"Unknown"]) {
+            [propertyListDescriptionArray addObject:pixelFormatDescription];
+        }
+
+        NSString *propertyListDescription = [propertyListDescriptionArray componentsJoinedByString:@" - "];
+        if ([propertyListDescription isEqualToString:@""]) {
+            propertyListDescription = @"Default";
+        }
+        objc_setAssociatedObject(self, @selector(obsPropertyListDescription), propertyListDescription,
+                                 OBJC_ASSOCIATION_RETAIN_NONATOMIC);
+    }
+    return objc_getAssociatedObject(self, @selector(obsPropertyListDescription));
+}
+
+- (NSString *)obsPropertyListInternalRepresentation
+{
+    if (!objc_getAssociatedObject(self, @selector(obsPropertyListInternalRepresentation))) {
+        CMVideoDimensions formatDimensions = CMVideoFormatDescriptionGetDimensions(self.formatDescription);
+        FourCharCode formatSubType = CMFormatDescriptionGetMediaSubType(self.formatDescription);
+        OBSAVCaptureColorSpace deviceColorSpace = [OBSAVCapture colorspaceFromDescription:self.formatDescription];
+        NSString *frameRatesInternalDescription = @"";
+        NSArray *sortedRanges = [self.videoSupportedFrameRateRanges
+            sortedArrayUsingComparator:^NSComparisonResult(AVFrameRateRange *_Nonnull lhs,
+                                                           AVFrameRateRange *_Nonnull rhs) {
+                if (lhs.maxFrameRate > rhs.maxFrameRate) {
+                    return NSOrderedDescending;
+                } else if (lhs.maxFrameRate < rhs.maxFrameRate) {
+                    return NSOrderedAscending;
+                }
+                if (lhs.minFrameRate > rhs.minFrameRate) {
+                    return NSOrderedDescending;
+                } else if (lhs.minFrameRate < rhs.minFrameRate) {
+                    return NSOrderedAscending;
+                }
+                return NSOrderedSame;
+            }];
+        for (AVFrameRateRange *range in sortedRanges) {
+            frameRatesInternalDescription = [frameRatesInternalDescription
+                stringByAppendingFormat:@"%.3f-%.3f", range.minFrameRate, range.maxFrameRate];
+        }
+        NSString *internalRepresentation =
+            [NSString stringWithFormat:@"%dx%d %@ %i %i", formatDimensions.width, formatDimensions.height,
+                                       frameRatesInternalDescription, deviceColorSpace, formatSubType];
+        NSData *internalRepresentationBytes = [internalRepresentation dataUsingEncoding:NSUTF8StringEncoding];
+        NSString *base64String = [internalRepresentationBytes base64EncodedStringWithOptions:0];
+        objc_setAssociatedObject(self, @selector(obsPropertyListInternalRepresentation), base64String,
+                                 OBJC_ASSOCIATION_RETAIN_NONATOMIC);
+    }
+    return objc_getAssociatedObject(self, @selector(obsPropertyListInternalRepresentation));
+}
+
+- (NSNumber *)bitsPerPixel
+{
+    if (!objc_getAssociatedObject(self, @selector(bitsPerPixel))) {
+        CMFormatDescriptionRef formatDescription = self.formatDescription;
+        FourCharCode subtype = CMFormatDescriptionGetMediaSubType(formatDescription);
+        UInt64 value;
+        switch (subtype) {
+            case kCVPixelFormatType_422YpCbCr8:
+            case kCVPixelFormatType_422YpCbCr8_yuvs:
+                //2x2 block, 4 bytes luma, 2 bytes Cr, 2 bytes Cb; 64 / 4
+                value = 16;
+            case kCVPixelFormatType_32BGRA:
+                value = 32;
+            case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
+            case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
+                //2x2 block, 4 bytes luma, 1 byte Cr, 1 byte Cb; 48 / 4
+                value = 12;
+            case kCVPixelFormatType_420YpCbCr10BiPlanarFullRange:
+            case kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange:
+                //2x2 block, 8 bytes luma, 2 bytes Cr, 2 bytes Cb; 96 / 4
+                value = 24;
+            default:
+                //what other formats do we need to possibly account for?
+                value = 32;
+        }
+        objc_setAssociatedObject(self, @selector(bitsPerPixel), @(value), OBJC_ASSOCIATION_RETAIN_NONATOMIC);
+    }
+    return objc_getAssociatedObject(self, @selector(bitsPerPixel));
+}
+
+- (NSNumber *)pixelBandwidthComparisonValue
+{
+    if (!objc_getAssociatedObject(self, @selector(pixelBandwidthComparisonValue))) {
+        CMFormatDescriptionRef formatDescription = self.formatDescription;
+        CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
+        NSArray<AVFrameRateRange *> *sortedRangesAscending = [self.videoSupportedFrameRateRanges
+            sortedArrayUsingComparator:^NSComparisonResult(AVFrameRateRange *_Nonnull lhs,
+                                                           AVFrameRateRange *_Nonnull rhs) {
+                if (lhs.maxFrameRate > rhs.maxFrameRate) {
+                    return NSOrderedDescending;
+                } else if (lhs.maxFrameRate < rhs.maxFrameRate) {
+                    return NSOrderedAscending;
+                }
+                if (lhs.minFrameRate > rhs.minFrameRate) {
+                    return NSOrderedDescending;
+                } else if (lhs.minFrameRate < rhs.minFrameRate) {
+                    return NSOrderedAscending;
+                }
+                return NSOrderedSame;
+            }];
+        NSNumber *bandwidth = @(dimensions.width * dimensions.height * sortedRangesAscending.lastObject.maxFrameRate);
+        objc_setAssociatedObject(self, @selector(pixelBandwidthComparisonValue), bandwidth,
+                                 OBJC_ASSOCIATION_RETAIN_NONATOMIC);
+    }
+    return objc_getAssociatedObject(self, @selector(pixelBandwidthComparisonValue));
+}
+
+- (NSNumber *)aspectRatioComparisonValue
+{
+    if (!objc_getAssociatedObject(self, @selector(aspectRatioComparisonValue))) {
+        CMFormatDescriptionRef formatDescription = self.formatDescription;
+        CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
+
+        if (dimensions.height > 0) {
+            //for sorting purposes, treat aspect ratios sufficiently close together as equivalent
+            double ratio = (double) dimensions.width / (double) dimensions.height;
+            double roundedRatio = round(ratio * 100) / 100;
+            NSNumber *compareRatio = @(roundedRatio);
+
+            objc_setAssociatedObject(self, @selector(aspectRatioComparisonValue), compareRatio,
+                                     OBJC_ASSOCIATION_RETAIN_NONATOMIC);
+        }
+    }
+    return objc_getAssociatedObject(self, @selector(aspectRatioComparisonValue));
+}
+
+@end

+ 2 - 0
plugins/mac-avcapture/CMakeLists.txt

@@ -33,6 +33,8 @@ add_library(OBS::avcapture ALIAS mac-avcapture)
 target_sources(
   mac-avcapture
   PRIVATE
+    AVCaptureDeviceFormat+OBSListable.m
+    AVCaptureDeviceFormat+OBSListable.h
     plugin-main.m
     plugin-main.h
     plugin-properties.m

+ 41 - 4
plugins/mac-avcapture/OBSAVCapture.h

@@ -25,6 +25,7 @@ typedef struct obs_source_frame OBSAVCaptureVideoFrame;
 typedef struct obs_source_audio OBSAVCaptureAudioFrame;
 typedef struct gs_texture OBSAVCaptureTexture;
 typedef struct gs_effect OBSAVCaptureEffect;
+typedef struct media_frames_per_second OBSAVCaptureMediaFPS;
 
 /// C struct for errors encountered in capture callback
 typedef enum : NSUInteger {
@@ -46,6 +47,9 @@ typedef struct av_capture {
 
     pthread_mutex_t mutex;
 
+    OBSAVCaptureColorSpace configuredColorSpace;
+    OBSAVCaptureVideoRange configuredFourCC;
+
     void *settings;
     void *source;
     bool isFastPath;
@@ -58,7 +62,7 @@ typedef struct av_capture {
 /// C struct for sample buffer validity checks in capture callback
 typedef struct av_capture_info {
     OBSAVCaptureColorSpace colorSpace;
-    OBSAVCaptureVideoRange videoRange;
+    FourCharCode fourCC;
     bool isValid;
 } OBSAVCaptureVideoInfo;
 
@@ -70,7 +74,7 @@ typedef struct av_capture_info {
 ///
 /// Devices can be configured either via [presets](https://developer.apple.com/documentation/avfoundation/avcapturesessionpreset?language=objc) (usually 3 quality-based presets in addition to resolution based presets). The resolution defined by the preset does not necessarily switch the actual device to the same resolution, instead the device is automatically switched to the best possible resolution and the [CMSampleBuffer](https://developer.apple.com/documentation/coremedia/cmsamplebuffer?language=objc) provided via [AVCaptureVideoDataOutput](https://developer.apple.com/documentation/avfoundation/avcapturevideodataoutput?language=objc) will be resized accordingly. If necessary the actual frame will be pillar-boxed to fit into a widescreen sample buffer in an attempt to fit the content into it.
 ///
-/// Alternatively, devices can be configured manually by specifying resolution, frame-rate, color format and color space. If a device was **not** configured via a preset originally, the size of the [CMSampleBuffer](https://developer.apple.com/documentation/coremedia/cmsamplebuffer?language=objc) will be adjusted to the selected resolution.
+/// Alternatively, devices can be configured manually by specifying a particular [AVCaptureDeviceFormat](https://developer.apple.com/documentation/avfoundation/avcapturedeviceformat?language=objc) representing a specific combination of resolution, frame-rate, color format and color space supported by the device. If a device was **not** configured via a preset originally, the size of the [CMSampleBuffer](https://developer.apple.com/documentation/coremedia/cmsamplebuffer?language=objc) will be adjusted to the selected resolution.
 ///
 /// > Important: If a preset was configured before, the resolution of the last valid preset-based buffer will be retained and the frame will be fit into it with the selected resolution.
 ///
@@ -144,10 +148,27 @@ typedef struct av_capture_info {
 
 #pragma mark - OBS Settings Helpers
 
-/// Reads source dimensions from user settings and converts them into a [CMVideoDimensions](https://developer.apple.com/documentation/coremedia/cmvideodimensions?language=objc) struct for convenience when interacting with the [CoreMediaIO](https://developer.apple.com/documentation/coremediaio?language=objc) framework.
+/// Reads source dimensions from the legacy user settings and converts them into a [CMVideoDimensions](https://developer.apple.com/documentation/coremedia/cmvideodimensions?language=objc) struct for convenience when interacting with the [CoreMediaIO](https://developer.apple.com/documentation/coremediaio?language=objc) framework.
 /// - Parameter settings: Pointer to settings struct used by ``libobs``
 /// - Returns: [CMVideoDimensions](https://developer.apple.com/documentation/coremedia/cmvideodimensions?language=objc) struct with resolution from user settings
-+ (CMVideoDimensions)dimensionsFromSettings:(void *)settings;
++ (CMVideoDimensions)legacyDimensionsFromSettings:(void *)settings;
+
+/// Generates an appropriate frame rate value to fall back to (based on OBS's configured output framerate) when the user selects a format that does not support their previously configured frame rate.
+///
+/// This function fetches OBS's configured output frame rate and uses it to determine the appropriate default frame rate supported by the format. It will return:
+///
+/// * The frame rate nearest up to and including OBS's configured output frame rate.
+/// * If that does not exist on the format, the frame rate nearest above OBS's configured output frame rate.
+/// * If that does not exist, a struct representing an invalid frame rate.
+/// - Parameter format: [AVCaptureDeviceFormat](https://developer.apple.com/documentation/avfoundation/avcapturedevice/format?language=objc) instance that we are determining a fallback FPS for.
+/// - Returns: Struct representing a frames per second value as defined in ``libobs``.
++ (OBSAVCaptureMediaFPS)fallbackFrameRateForFormat:(AVCaptureDeviceFormat *)format;
+
+/// Generates a new [NSString](https://developer.apple.com/documentation/foundation/nsstring?language=objc) instance containing a human-readable aspect ratio for a given pixel width and height.
+/// - Parameter dimensions: [CMVideoDimensions](https://developer.apple.com/documentation/coremedia/cmvideodimensions?language=objc) struct containing the width and height in pixels.
+/// - Returns: New [NSString](https://developer.apple.com/documentation/foundation/nsstring?language=objc) instance containing the aspect ratio description.
+/// For resolutions with too low of a common divisor (i.e. 2.35:1, resolutions slightly off of a common aspect ratio), this function provides the ratio between 1 and the larger float value.
++ (NSString *)aspectRatioStringFromDimensions:(CMVideoDimensions)dimensions;
 
 /// Reads a C-character pointer from user settings and converts it into an [NSString](https://developer.apple.com/documentation/foundation/nsstring?language=objc) instance.
 /// - Parameters:
@@ -164,6 +185,11 @@ typedef struct av_capture_info {
 /// - Returns: New [NSString](https://developer.apple.com/documentation/foundation/nsstring?language=objc) instance created from user setting if setting represented a valid C character pointer.
 + (NSString *)stringFromSettings:(void *)settings withSetting:(NSString *)setting withDefault:(NSString *)defaultValue;
 
+/// Generates an NSString representing the name of the warning to display in the properties window for macOS system effects that are active on a particular `AVCaptureDevice`.
+/// - Parameter device: The [AVCaptureDevice](https://developer.apple.com/documentation/avfoundation/avcapturedevice?language=objc) to generate an effects warning string for.
+/// - Returns: `nil` if there are no effects active on the device. If effects are found, returns a new [NSString](https://developer.apple.com/documentation/foundation/nsstring?language=objc) instance containing the `libobs` key used to retrieve the appropriate localized warning string.
++ (NSString *)effectsWarningForDevice:(AVCaptureDevice *)device;
+
 #pragma mark - Format Conversion Helpers
 
 /// Converts a FourCC-based color format identifier into a human-readable string represented as an [NSString](https://developer.apple.com/documentation/foundation/nsstring?language=objc) instance.
@@ -235,6 +261,11 @@ typedef struct av_capture_info {
 
 + (FourCharCode)fourCharCodeFromFormat:(OBSAVCaptureVideoFormat)format withRange:(enum video_range_type)videoRange;
 
+/// Generates a string describing an array of frame rate ranges. The frame rate ranges are described in ascending order.
+/// - Parameter ranges: [NSArray](https://developer.apple.com/documentation/foundation/nsarray?language=objc) of [AVFrameRateRange](https://developer.apple.com/documentation/avfoundation/avframeraterange?language=objc), such as might be provided by an `AVCaptureDeviceFormat` instance's [videoSupportedFrameRateRanges](https://developer.apple.com/documentation/avfoundation/avcapturedeviceformat/1387592-videosupportedframerateranges) property.
+/// - Returns: A new [NSString](https://developer.apple.com/documentation/foundation/nsstring?language=objc) instance that describes the frame rate ranges.
++ (NSString *)frameRateDescription:(NSArray<AVFrameRateRange *> *)ranges;
+
 /// Converts a [CMFormatDescription](https://developer.apple.com/documentation/coremedia/cmformatdescription?language=objc) into a ``libobs``-based color space value
 /// - Parameter description: A [CMFormatDescription](https://developer.apple.com/documentation/coremedia/cmformatdescription?language=objc) media format descriptor
 /// - Returns: A ``libobs``-based color space value
@@ -351,3 +382,9 @@ static inline SInt64 clamp_Sint(SInt64 value, SInt64 min, SInt64 max)
 
     return clamped > max ? max : value;
 }
+
+/// Compute the greatest common divisor of two signed 32-bit integers.
+static inline SInt32 gcd(SInt32 a, SInt32 b)
+{
+    return (b == 0) ? a : gcd(b, a % b);
+}

+ 245 - 109
plugins/mac-avcapture/OBSAVCapture.m

@@ -6,6 +6,10 @@
 //
 
 #import "OBSAVCapture.h"
+#import "AVCaptureDeviceFormat+OBSListable.h"
+
+/// Tthe maximum number of frame rate ranges to show complete information for before providing a more generic description of the supported frame rates inside of a device format description.
+static const UInt32 kMaxFrameRateRangesInDescription = 10;
 
 @implementation OBSAVCapture
 
@@ -229,24 +233,6 @@
         return NO;
     }
 
-    if (@available(macOS 12.0, *)) {
-        if (device.portraitEffectActive) {
-            [self AVCaptureLog:LOG_WARNING withFormat:@"Portrait effect is active on selected device"];
-        }
-    }
-
-    if (@available(macOS 12.3, *)) {
-        if (device.centerStageActive) {
-            [self AVCaptureLog:LOG_WARNING withFormat:@"Center Stage effect is active on selected device"];
-        }
-    }
-
-    if (@available(macOS 13.0, *)) {
-        if (device.studioLightActive) {
-            [self AVCaptureLog:LOG_WARNING withFormat:@"Studio Light effect is active on selected device"];
-        }
-    }
-
     [self.session beginConfiguration];
 
     if ([self.session canAddInput:deviceInput]) {
@@ -428,46 +414,7 @@
 
 - (BOOL)configureSession:(NSError *__autoreleasing *)error
 {
-    int videoRange;
-    int colorSpace;
-    FourCharCode inputFourCC;
-
-    if (!self.isFastPath) {
-        videoRange = (int) obs_data_get_int(self.captureInfo->settings, "video_range");
-
-        if (![OBSAVCapture isValidVideoRange:videoRange]) {
-            [self AVCaptureLog:LOG_WARNING withFormat:@"Unsupported video range: %d", videoRange];
-            return NO;
-        }
-
-        int inputFormat;
-        inputFormat = (int) obs_data_get_int(self.captureInfo->settings, "input_format");
-        inputFourCC = [OBSAVCapture fourCharCodeFromFormat:inputFormat withRange:videoRange];
-
-        colorSpace = (int) obs_data_get_int(self.captureInfo->settings, "color_space");
-
-        if (![OBSAVCapture isValidColorspace:colorSpace]) {
-            [self AVCaptureLog:LOG_DEBUG withFormat:@"Unsupported color space: %d", colorSpace];
-            return NO;
-        }
-    } else {
-        int inputFormat;
-        CMFormatDescriptionRef formatDescription = self.deviceInput.device.activeFormat.formatDescription;
-        inputFormat = (int) obs_data_get_int(self.captureInfo->settings, "input_format");
-        inputFourCC = [OBSAVCapture fourCharCodeFromFormat:inputFormat withRange:VIDEO_RANGE_DEFAULT];
-
-        colorSpace = [OBSAVCapture colorspaceFromDescription:formatDescription];
-        videoRange = ([OBSAVCapture isFullRangeFormat:inputFourCC]) ? VIDEO_RANGE_FULL : VIDEO_RANGE_PARTIAL;
-    }
-
-    CMVideoDimensions dimensions = [OBSAVCapture dimensionsFromSettings:self.captureInfo->settings];
-
-    if (dimensions.width == 0 || dimensions.height == 0) {
-        [self AVCaptureLog:LOG_DEBUG withFormat:@"No valid resolution found in settings"];
-        return NO;
-    }
-
-    struct media_frames_per_second fps;
+    OBSAVCaptureMediaFPS fps;
     if (!obs_data_get_frames_per_second(self.captureInfo->settings, "frame_rate", &fps, NULL)) {
         [self AVCaptureLog:LOG_DEBUG withFormat:@"No valid framerate found in settings"];
         return NO;
@@ -475,68 +422,109 @@
 
     CMTime time = {.value = fps.denominator, .timescale = fps.numerator, .flags = 1};
 
-    AVCaptureDeviceFormat *format = nil;
-
-    for (AVCaptureDeviceFormat *formatCandidate in [self.deviceInput.device.formats reverseObjectEnumerator]) {
-        CMVideoDimensions formatDimensions = CMVideoFormatDescriptionGetDimensions(formatCandidate.formatDescription);
+    const char *selectedFormat = obs_data_get_string(self.captureInfo->settings, "supported_format");
+    NSString *selectedFormatNSString = selectedFormat != NULL ? @(selectedFormat) : @"";
 
-        if (!(formatDimensions.width == dimensions.width) || !(formatDimensions.height == dimensions.height)) {
-            continue;
-        }
+    AVCaptureDeviceFormat *format = nil;
+    FourCharCode subtype;
+    OBSAVCaptureColorSpace colorSpace;
+    bool fpsSupported = false;
 
-        for (AVFrameRateRange *range in formatCandidate.videoSupportedFrameRateRanges) {
-            if (CMTimeCompare(range.maxFrameDuration, time) >= 0 && CMTimeCompare(range.minFrameDuration, time) <= 0) {
+    if (![selectedFormatNSString isEqualToString:@""]) {
+        for (AVCaptureDeviceFormat *formatCandidate in [self.deviceInput.device.formats reverseObjectEnumerator]) {
+            if ([selectedFormatNSString isEqualToString:formatCandidate.obsPropertyListInternalRepresentation]) {
                 CMFormatDescriptionRef formatDescription = formatCandidate.formatDescription;
                 FourCharCode formatFourCC = CMFormatDescriptionGetMediaSubType(formatDescription);
-
-                if (inputFourCC == formatFourCC) {
-                    format = formatCandidate;
-                    inputFourCC = formatFourCC;
-                    break;
-                }
+                format = formatCandidate;
+                subtype = formatFourCC;
+                colorSpace = [OBSAVCapture colorspaceFromDescription:formatDescription];
+                break;
             }
         }
-
-        if (format) {
-            break;
+    } else {
+        //try to migrate from the legacy suite of properties
+        int legacyVideoRange = (int) obs_data_get_int(self.captureInfo->settings, "video_range");
+        int legacyInputFormat = (int) obs_data_get_int(self.captureInfo->settings, "input_format");
+        int legacyColorSpace = (int) obs_data_get_int(self.captureInfo->settings, "color_space");
+        CMVideoDimensions legacyDimensions = [OBSAVCapture legacyDimensionsFromSettings:self.captureInfo->settings];
+        for (AVCaptureDeviceFormat *formatCandidate in [self.deviceInput.device.formats reverseObjectEnumerator]) {
+            CMFormatDescriptionRef formatDescription = formatCandidate.formatDescription;
+            CMVideoDimensions formatDimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
+            int formatColorSpace = [OBSAVCapture colorspaceFromDescription:formatDescription];
+            int formatInputFormat =
+                [OBSAVCapture formatFromSubtype:CMFormatDescriptionGetMediaSubType(formatDescription)];
+            int formatVideoRange = [OBSAVCapture isFullRangeFormat:formatInputFormat] ? VIDEO_RANGE_FULL
+                                                                                      : VIDEO_RANGE_PARTIAL;
+            bool foundFormat = legacyVideoRange == formatVideoRange && legacyInputFormat == formatInputFormat &&
+                               legacyColorSpace == formatColorSpace &&
+                               legacyDimensions.width == formatDimensions.width &&
+                               legacyDimensions.height == formatDimensions.height;
+            if (foundFormat) {
+                format = formatCandidate;
+                subtype = formatInputFormat;
+                colorSpace = formatColorSpace;
+                break;
+            }
         }
     }
 
     if (!format) {
-        [self AVCaptureLog:LOG_WARNING withFormat:@"Frame rate is not supported: %g FPS (%u/%u)",
-                                                  media_frames_per_second_to_fps(fps), fps.numerator, fps.denominator];
+        [self AVCaptureLog:LOG_WARNING withFormat:@"Configured format not found on device"];
         return NO;
     }
 
+    for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
+        if (CMTimeCompare(range.maxFrameDuration, time) >= 0 && CMTimeCompare(range.minFrameDuration, time) <= 0) {
+            fpsSupported = true;
+            break;
+        }
+    }
+
+    if (!fpsSupported) {
+        OBSAVCaptureMediaFPS fallbackFPS = [OBSAVCapture fallbackFrameRateForFormat:format];
+        if (fallbackFPS.denominator > 0 && fallbackFPS.numerator > 0) {
+            [self AVCaptureLog:LOG_WARNING withFormat:@"Frame rate is not supported: %g FPS (%u/%u), \n"
+                                                       " falling back to value supported by device: %G FPS (%u/%u)",
+                                                      media_frames_per_second_to_fps(fps), fps.numerator,
+                                                      fps.denominator, media_frames_per_second_to_fps(fallbackFPS),
+                                                      fallbackFPS.numerator, fallbackFPS.denominator];
+            obs_data_set_frames_per_second(self.captureInfo->settings, "frame_rate", fallbackFPS, NULL);
+            time.value = fallbackFPS.denominator;
+            time.timescale = fallbackFPS.numerator;
+        } else {
+            [self AVCaptureLog:LOG_WARNING
+                    withFormat:@"Frame rate is not supported: %g FPS (%u/%u), \n"
+                                " no supported fallback FPS found",
+                               media_frames_per_second_to_fps(fps), fps.numerator, fps.denominator];
+            return NO;
+        }
+    }
+
     [self.session beginConfiguration];
 
     self.isDeviceLocked = [self.deviceInput.device lockForConfiguration:error];
 
     if (!self.isDeviceLocked) {
-        [self AVCaptureLog:LOG_WARNING withFormat:@"Could not lock devie for configuration"];
+        [self AVCaptureLog:LOG_WARNING withFormat:@"Could not lock device for configuration"];
         return NO;
     }
 
     [self AVCaptureLog:LOG_INFO
             withFormat:@"Capturing '%@' (%@):\n"
-                        " Resolution            : %ux%u\n"
+                        " Using Format          : %@ \n"
                         " FPS                   : %g (%u/%u)\n"
-                        " Frame Interval        : %g\u00a0s\n"
-                        " Input Format          : %@\n"
-                        " Requested Color Space : %@ (%d)\n"
-                        " Requested Video Range : %@ (%d)\n"
-                        " Using Format          : %@",
-                       self.deviceInput.device.localizedName, self.deviceInput.device.uniqueID, dimensions.width,
-                       dimensions.height, media_frames_per_second_to_fps(fps), fps.numerator, fps.denominator,
-                       media_frames_per_second_to_frame_interval(fps), [OBSAVCapture stringFromSubType:inputFourCC],
-                       [OBSAVCapture stringFromColorspace:colorSpace], colorSpace,
-                       [OBSAVCapture stringFromVideoRange:videoRange], videoRange, format.description];
+                        " Frame Interval        : %g\u00a0s\n",
+                       self.deviceInput.device.localizedName, self.deviceInput.device.uniqueID,
+                       format.obsPropertyListDescription, media_frames_per_second_to_fps(fps), fps.numerator,
+                       fps.denominator, media_frames_per_second_to_frame_interval(fps)];
 
     OBSAVCaptureVideoInfo newInfo = {.colorSpace = _videoInfo.colorSpace,
-                                     .videoRange = _videoInfo.videoRange,
+                                     .fourCC = _videoInfo.fourCC,
                                      .isValid = false};
 
     self.videoInfo = newInfo;
+    self.captureInfo->configuredColorSpace = colorSpace;
+    self.captureInfo->configuredFourCC = subtype;
 
     self.isPresetBased = NO;
 
@@ -649,16 +637,14 @@
 
 #pragma mark - OBS Settings Helpers
 
-+ (CMVideoDimensions)dimensionsFromSettings:(void *)settings
++ (CMVideoDimensions)legacyDimensionsFromSettings:(void *)settings
 {
     CMVideoDimensions zero = {0};
 
     NSString *jsonString = [OBSAVCapture stringFromSettings:settings withSetting:@"resolution"];
-
     NSDictionary *data = [NSJSONSerialization JSONObjectWithData:[jsonString dataUsingEncoding:NSUTF8StringEncoding]
                                                          options:0
                                                            error:nil];
-
     if (data.count == 0) {
         return zero;
     }
@@ -672,10 +658,65 @@
 
     CMVideoDimensions dimensions = {.width = (int32_t) clamp_Uint(width, 0, UINT32_MAX),
                                     .height = (int32_t) clamp_Uint(height, 0, UINT32_MAX)};
-
     return dimensions;
 }
 
++ (OBSAVCaptureMediaFPS)fallbackFrameRateForFormat:(AVCaptureDeviceFormat *)format
+{
+    struct obs_video_info video_info;
+    bool result = obs_get_video_info(&video_info);
+
+    double outputFPS = result ? ((double) video_info.fps_num / (double) video_info.fps_den) : 0;
+
+    double closestUpTo = 0;
+    double closestAbove = DBL_MAX;
+    OBSAVCaptureMediaFPS closestUpToMFPS = {};
+    OBSAVCaptureMediaFPS closestAboveMFPS = {};
+
+    for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
+        if (range.maxFrameRate > closestUpTo && range.maxFrameRate <= outputFPS) {
+            closestUpTo = range.maxFrameRate;
+            closestUpToMFPS.numerator = (uint32_t) clamp_Uint(range.minFrameDuration.timescale, 0, UINT32_MAX);
+            closestUpToMFPS.denominator = (uint32_t) clamp_Uint(range.minFrameDuration.value, 0, UINT32_MAX);
+        }
+        if (range.minFrameRate > outputFPS && range.minFrameRate < closestAbove) {
+            closestAbove = range.minFrameRate;
+            closestAboveMFPS.numerator = (uint32_t) clamp_Uint(range.maxFrameDuration.timescale, 0, UINT32_MAX);
+            closestAboveMFPS.denominator = (uint32_t) clamp_Uint(range.maxFrameDuration.value, 0, UINT32_MAX);
+        }
+    }
+    if (closestUpTo > 0) {
+        return closestUpToMFPS;
+    } else {
+        return closestAboveMFPS;
+    }
+}
+
++ (NSString *)aspectRatioStringFromDimensions:(CMVideoDimensions)dimensions
+{
+    if (dimensions.width <= 0 || dimensions.height <= 0) {
+        return @"";
+    }
+    double divisor = (double) gcd(dimensions.width, dimensions.height);
+    if (divisor <= 50) {
+        if (dimensions.width > dimensions.height) {
+            double x = (double) dimensions.width / (double) dimensions.height;
+            return [NSString stringWithFormat:@"%.2f:1", x];
+        } else {
+            double y = (double) dimensions.height / (double) dimensions.width;
+            return [NSString stringWithFormat:@"1:%.2f", y];
+        }
+    } else {
+        SInt32 x = dimensions.width / (SInt32) divisor;
+        SInt32 y = dimensions.height / (SInt32) divisor;
+        if (x == 8 && y == 5) {
+            x = 16;
+            y = 10;
+        }
+        return [NSString stringWithFormat:@"%i:%i", x, y];
+    }
+}
+
 + (NSString *)stringFromSettings:(void *)settings withSetting:(NSString *)setting
 {
     return [OBSAVCapture stringFromSettings:settings withSetting:setting withDefault:@""];
@@ -700,21 +741,70 @@
     return result;
 }
 
++ (NSString *)effectsWarningForDevice:(AVCaptureDevice *)device
+{
+    int effectsCount = 0;
+    NSString *effectWarning = nil;
+    if (@available(macOS 12.0, *)) {
+        if (device.portraitEffectActive) {
+            effectWarning = @"Warning.Effect.Portrait";
+            effectsCount++;
+        }
+    }
+    if (@available(macOS 12.3, *)) {
+        if (device.centerStageActive) {
+            effectWarning = @"Warning.Effect.CenterStage";
+            effectsCount++;
+        }
+    }
+    if (@available(macOS 13.0, *)) {
+        if (device.studioLightActive) {
+            effectWarning = @"Warning.Effect.StudioLight";
+            effectsCount++;
+        }
+    }
+    if (@available(macOS 14.0, *)) {
+        /// Reaction effects do not follow the same paradigm as other effects in terms of checking whether they are active. According to Apple, this is because a device instance property `reactionEffectsActive` would have been ambiguous (conflicting with whether a reaction is currently rendering).
+        ///
+        /// Instead, Apple exposes the `AVCaptureDevice.reactionEffectGesturesEnabled` class property (an equivalent exists for all other effects, but is hidden/private) to tell us whether the effect is enabled application-wide, as well as the `device.canPerformReactionEffects` instance property to tell us whether the device's active format currently supports the effect.
+        ///
+        /// The logical conjunction of these two properties tells us whether the effect is 'active'; i.e. whether putting our thumbs inside the video frame will make fireworks appear. The device instance properties for other effects are a convenience 'shorthand' for this private class/instance property combination.
+        if (device.canPerformReactionEffects && AVCaptureDevice.reactionEffectGesturesEnabled) {
+            effectWarning = @"Warning.Effect.Reactions";
+            effectsCount++;
+        }
+    }
+#if __MAC_OS_X_VERSION_MAX_ALLOWED >= 150000
+    if (@available(macOS 15.0, *)) {
+        if (device.backgroundReplacementActive) {
+            effectWarning = @"Warning.Effect.BackgroundReplacement";
+            effectsCount++;
+        }
+    }
+#endif
+    if (effectsCount > 1) {
+        effectWarning = @"Warning.Effect.Multiple";
+    }
+    return effectWarning;
+}
+
 #pragma mark - Format Conversion Helpers
 
 + (NSString *)stringFromSubType:(FourCharCode)subtype
 {
     switch (subtype) {
         case kCVPixelFormatType_422YpCbCr8:
-            return @"UYVY - 422YpCbCr8";
+            return @"UYVY (2vuy)";
         case kCVPixelFormatType_422YpCbCr8_yuvs:
-            return @"YUY2 - 422YpCbCr8_yuvs";
+            return @"YUY2 (yuvs)";
         case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
+            return @"NV12 (420v)";
         case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
-            return @"NV12 - 420YpCbCr8BiPlanar";
+            return @"NV12 (420f)";
         case kCVPixelFormatType_420YpCbCr10BiPlanarFullRange:
+            return @"P010 (xf20)";
         case kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange:
-            return @"P010 - 420YpCbCr10BiPlanar";
+            return @"P010 (x420)";
         case kCVPixelFormatType_32ARGB:
             return @"ARGB - 32ARGB";
         case kCVPixelFormatType_32BGRA:
@@ -946,6 +1036,54 @@
     return [OBSAVCapture fourCharCodeFromFormat:format withRange:VIDEO_RANGE_PARTIAL];
 }
 
++ (NSString *)frameRateDescription:(NSArray<AVFrameRateRange *> *)ranges
+{
+    // The videoSupportedFrameRateRanges property seems to provide frame rate ranges in this order, but since that
+    // ordering does not seem to be guaranteed, ensure they are sorted anyway.
+    NSArray<AVFrameRateRange *> *sortedRangesDescending = [ranges
+        sortedArrayUsingComparator:^NSComparisonResult(AVFrameRateRange *_Nonnull lhs, AVFrameRateRange *_Nonnull rhs) {
+            if (lhs.maxFrameRate > rhs.maxFrameRate) {
+                return NSOrderedAscending;
+            } else if (lhs.maxFrameRate < rhs.maxFrameRate) {
+                return NSOrderedDescending;
+            }
+            if (lhs.minFrameRate > rhs.minFrameRate) {
+                return NSOrderedAscending;
+            } else if (lhs.minFrameRate < rhs.minFrameRate) {
+                return NSOrderedDescending;
+            }
+            return NSOrderedSame;
+        }];
+    NSString *frameRateDescription;
+    NSMutableArray *frameRateDescriptions = [[NSMutableArray alloc] initWithCapacity:ranges.count];
+    for (AVFrameRateRange *range in [sortedRangesDescending reverseObjectEnumerator]) {
+        double minFrameRate = round(range.minFrameRate * 100) / 100;
+        double maxFrameRate = round(range.maxFrameRate * 100) / 100;
+        if (minFrameRate == maxFrameRate) {
+            if (fmod(minFrameRate, 1.0) == 0 && fmod(maxFrameRate, 1.0) == 0) {
+                [frameRateDescriptions addObject:[NSString stringWithFormat:@"%.0f", maxFrameRate]];
+            } else {
+                [frameRateDescriptions addObject:[NSString stringWithFormat:@"%.2f", maxFrameRate]];
+            }
+        } else {
+            if (fmod(minFrameRate, 1.0) == 0 && fmod(maxFrameRate, 1.0) == 0) {
+                [frameRateDescriptions addObject:[NSString stringWithFormat:@"%.0f-%.0f", minFrameRate, maxFrameRate]];
+            } else {
+                [frameRateDescriptions addObject:[NSString stringWithFormat:@"%.2f-%.2f", minFrameRate, maxFrameRate]];
+            }
+        }
+    }
+    if (frameRateDescriptions.count > 0 && frameRateDescriptions.count <= kMaxFrameRateRangesInDescription) {
+        frameRateDescription = [frameRateDescriptions componentsJoinedByString:@", "];
+        frameRateDescription = [frameRateDescription stringByAppendingString:@" FPS"];
+    } else if (frameRateDescriptions.count > kMaxFrameRateRangesInDescription) {
+        frameRateDescription =
+            [NSString stringWithFormat:@"%.0f-%.0f FPS (%lu values)", sortedRangesDescending.lastObject.minFrameRate,
+                                       sortedRangesDescending.firstObject.maxFrameRate, sortedRangesDescending.count];
+    }
+    return frameRateDescription;
+}
+
 + (OBSAVCaptureColorSpace)colorspaceFromDescription:(CMFormatDescriptionRef)description
 {
     CFPropertyListRef matrix = CMFormatDescriptionGetExtension(description, kCMFormatDescriptionExtension_YCbCrMatrix);
@@ -1104,7 +1242,7 @@
             CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
             FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(description);
 
-            OBSAVCaptureVideoInfo newInfo = {.videoRange = _videoInfo.videoRange,
+            OBSAVCaptureVideoInfo newInfo = {.fourCC = _videoInfo.fourCC,
                                              .colorSpace = _videoInfo.colorSpace,
                                              .isValid = false};
 
@@ -1196,10 +1334,9 @@
                             [OBSAVCapture colorspaceFromDescription:description];
                         OBSAVCaptureVideoRange sampleBufferRangeType = isSampleBufferFullRange ? VIDEO_RANGE_FULL
                                                                                                : VIDEO_RANGE_PARTIAL;
-
                         BOOL isColorSpaceMatching = NO;
 
-                        SInt64 configuredColorSpace = obs_data_get_int(_captureInfo->settings, "color_space");
+                        SInt64 configuredColorSpace = _captureInfo->configuredColorSpace;
 
                         if (usePreset) {
                             isColorSpaceMatching = sampleBufferColorSpace == _videoInfo.colorSpace;
@@ -1207,17 +1344,16 @@
                             isColorSpaceMatching = configuredColorSpace == _videoInfo.colorSpace;
                         }
 
-                        BOOL isVideoRangeMatching = NO;
-                        SInt64 configuredVideoRangeType = obs_data_get_int(_captureInfo->settings, "video_range");
+                        BOOL isFourCCMatching = NO;
+                        SInt64 configuredFourCC = _captureInfo->configuredFourCC;
 
                         if (usePreset) {
-                            isVideoRangeMatching = sampleBufferRangeType == _videoInfo.videoRange;
+                            isFourCCMatching = mediaSubType == _videoInfo.fourCC;
                         } else {
-                            isVideoRangeMatching = configuredVideoRangeType == _videoInfo.videoRange;
-                            isSampleBufferFullRange = configuredVideoRangeType == VIDEO_RANGE_FULL;
+                            isFourCCMatching = configuredFourCC == _videoInfo.fourCC;
                         }
 
-                        if (isColorSpaceMatching && isVideoRangeMatching) {
+                        if (isColorSpaceMatching && isFourCCMatching) {
                             newInfo.isValid = true;
                         } else {
                             frame->full_range = isSampleBufferFullRange;
@@ -1233,7 +1369,7 @@
                                 newInfo.isValid = false;
                             } else {
                                 newInfo.colorSpace = sampleBufferColorSpace;
-                                newInfo.videoRange = sampleBufferRangeType;
+                                newInfo.fourCC = mediaSubType;
                                 newInfo.isValid = true;
                             }
                         }

+ 6 - 0
plugins/mac-avcapture/data/locale/en-US.ini

@@ -15,3 +15,9 @@ Auto="Auto"
 Unknown="Unknown (%1)"
 EnableAudio="Enable audio if supported by device"
 Resolution="Resolution"
+Warning.Effect.Portrait="Portrait system effect is active on the selected device"
+Warning.Effect.CenterStage="Center Stage system effect is active on the selected device"
+Warning.Effect.StudioLight="Studio Light system effect is active on the selected device"
+Warning.Effect.Reactions="Reactions system effect is active on the selected device"
+Warning.Effect.BackgroundReplacement="Background replacement system effect is active on the selected device"
+Warning.Effect.Multiple="Multiple system effects are active on the selected device"

+ 18 - 12
plugins/mac-avcapture/plugin-main.m

@@ -79,42 +79,48 @@ static obs_properties_t *av_capture_properties(void *av_capture)
 {
     OBSAVCapture *capture = (__bridge OBSAVCapture *) (av_capture);
     OBSAVCaptureInfo *capture_info = capture.captureInfo;
+    AVCaptureDevice *device = capture.deviceInput.device;
+    NSString *effectsWarningKey = [OBSAVCapture effectsWarningForDevice:device];
 
     obs_properties_t *properties = obs_properties_create();
 
     // Create Properties
     obs_property_t *device_list = obs_properties_add_list(properties, "device", obs_module_text("Device"),
                                                           OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING);
+
+    obs_property_t *effects_warning;
+    if (effectsWarningKey) {
+        effects_warning = obs_properties_add_text(properties, "effects_warning",
+                                                  obs_module_text(effectsWarningKey.UTF8String), OBS_TEXT_INFO);
+        obs_property_text_set_info_type(effects_warning, OBS_TEXT_INFO_WARNING);
+    }
+
     obs_property_t *use_preset = obs_properties_add_bool(properties, "use_preset", obs_module_text("UsePreset"));
     obs_property_t *preset_list = obs_properties_add_list(properties, "preset", obs_module_text("Preset"),
                                                           OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING);
-    obs_property_t *resolutions = obs_properties_add_list(properties, "resolution", obs_module_text("Resolution"),
-                                                          OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING);
+    obs_property_t *supported_formats = obs_properties_add_list(
+        properties, "supported_format", obs_module_text("InputFormat"), OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING);
     obs_property_t *use_buffering = obs_properties_add_bool(properties, "buffering", obs_module_text("Buffering"));
     obs_property_t *frame_rates = obs_properties_add_frame_rate(properties, "frame_rate", obs_module_text("FrameRate"));
-    obs_property_t *input_format = obs_properties_add_list(properties, "input_format", obs_module_text("InputFormat"),
-                                                           OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT);
-    obs_property_t *color_space = obs_properties_add_list(properties, "color_space", obs_module_text("ColorSpace"),
-                                                          OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT);
-    obs_property_t *video_range = obs_properties_add_list(properties, "video_range", obs_module_text("VideoRange"),
-                                                          OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT);
 
     if (capture_info) {
         bool isFastPath = capture_info->isFastPath;
 
         // Add Property Visibility and Callbacks
         configure_property(device_list, true, true, properties_changed, capture);
+        if (effectsWarningKey) {
+            configure_property(effects_warning, true, true, NULL, NULL);
+        }
+
         configure_property(use_preset, !isFastPath, !isFastPath, (!isFastPath) ? properties_changed_use_preset : NULL,
                            capture);
         configure_property(preset_list, !isFastPath, !isFastPath, (!isFastPath) ? properties_changed_preset : NULL,
                            capture);
 
-        configure_property(resolutions, isFastPath, isFastPath, NULL, NULL);
+        configure_property(supported_formats, true, true, properties_changed, capture);
+
         configure_property(use_buffering, !isFastPath, !isFastPath, NULL, NULL);
         configure_property(frame_rates, isFastPath, isFastPath, NULL, NULL);
-        configure_property(color_space, !isFastPath, !isFastPath, NULL, NULL);
-        configure_property(video_range, !isFastPath, !isFastPath, NULL, NULL);
-        configure_property(input_format, true, true, NULL, NULL);
     }
 
     return properties;

+ 2 - 5
plugins/mac-avcapture/plugin-properties.h

@@ -67,12 +67,9 @@ bool properties_update_device(OBSAVCapture *capture, obs_property_t *property, o
 /// Updates available values for all properties required in manual device configuration.
 ///
 /// Properties updated by this call include:
-/// * Resolutions
+/// * Device formats
 /// * Frame rates and frame rate ranges
-/// * Color formats
-/// * Color range
-///
-///  In CoreMediaIO color format, resolution and frame rate ranges are always coupled into a single format, while color range is always contained in the color format. The formats are thus compiled and de-duplicated to create a selection of all properties.
+/// * Effects warnings
 ///
 ///  Frame rate ranges will be limited to ranges only available for a specific combination of resolution and color format.
 ///

+ 33 - 228
plugins/mac-avcapture/plugin-properties.m

@@ -7,6 +7,7 @@
 
 #import "OBSAVCapture.h"
 #import "plugin-properties.h"
+#import "AVCaptureDeviceFormat+OBSListable.h"
 
 extern const char *av_capture_get_text(const char *text_id);
 
@@ -135,7 +136,7 @@ bool properties_changed_use_preset(OBSAVCapture *capture, obs_properties_t *prop
         properties_changed_preset(capture, properties, preset_list, settings);
     }
 
-    const char *update_properties[5] = {"resolution", "frame_rate", "color_space", "video_range", "input_format"};
+    const char *update_properties[2] = {"frame_rate", "supported_format"};
 
     size_t number_of_properties = sizeof(update_properties) / sizeof(update_properties[0]);
 
@@ -276,250 +277,54 @@ bool properties_update_config(OBSAVCapture *capture, obs_properties_t *propertie
     AVCaptureDevice *device = [AVCaptureDevice deviceWithUniqueID:[OBSAVCapture stringFromSettings:settings
                                                                                        withSetting:@"device"]];
 
-    obs_property_t *prop_resolution = obs_properties_get(properties, "resolution");
     obs_property_t *prop_framerate = obs_properties_get(properties, "frame_rate");
+    obs_property_t *prop_format = obs_properties_get(properties, "supported_format");
+    obs_property_t *prop_effects_warning = obs_properties_get(properties, "effects_warning");
 
-    obs_property_list_clear(prop_resolution);
     obs_property_frame_rate_clear(prop_framerate);
-
-    obs_property_t *prop_input_format = NULL;
-    obs_property_t *prop_color_space = NULL;
-    obs_property_t *prop_video_range = NULL;
-
-    prop_input_format = obs_properties_get(properties, "input_format");
-    obs_property_list_clear(prop_input_format);
-
-    if (!capture.isFastPath) {
-        prop_color_space = obs_properties_get(properties, "color_space");
-        prop_video_range = obs_properties_get(properties, "video_range");
-
-        obs_property_list_clear(prop_video_range);
-        obs_property_list_clear(prop_color_space);
-    }
-
-    CMVideoDimensions resolution = [OBSAVCapture dimensionsFromSettings:settings];
-
-    if (resolution.width == 0 || resolution.height == 0) {
-        [capture AVCaptureLog:LOG_DEBUG withFormat:@"No valid resolution found in settings"];
-    }
+    obs_property_list_clear(prop_format);
+    obs_property_list_clear(prop_effects_warning);
 
     struct media_frames_per_second fps;
     if (!obs_data_get_frames_per_second(settings, "frame_rate", &fps, NULL)) {
         [capture AVCaptureLog:LOG_DEBUG withFormat:@"No valid framerate found in settings"];
     }
 
-    CMTime time = {.value = fps.denominator, .timescale = fps.numerator, .flags = 1};
-
-    int input_format = 0;
-    int color_space = 0;
-    int video_range = 0;
+    const char *selectedFormatData = obs_data_get_string(settings, "supported_format");
+    NSString *selectedFormatString = [NSString stringWithCString:selectedFormatData encoding:NSUTF8StringEncoding];
 
-    NSMutableArray *inputFormats = NULL;
-    NSMutableArray *colorSpaces = NULL;
-    NSMutableArray *videoRanges = NULL;
-
-    input_format = (int) obs_data_get_int(settings, "input_format");
-    inputFormats = [[NSMutableArray alloc] init];
-
-    if (!capture.isFastPath) {
-        color_space = (int) obs_data_get_int(settings, "color_space");
-        video_range = (int) obs_data_get_int(settings, "video_range");
-
-        colorSpaces = [[NSMutableArray alloc] init];
-        videoRanges = [[NSMutableArray alloc] init];
-    }
-
-    NSMutableArray *resolutions = [[NSMutableArray alloc] init];
     NSMutableArray *frameRates = [[NSMutableArray alloc] init];
 
-    BOOL hasFoundResolution = NO;
-    BOOL hasFoundFramerate = NO;
-    BOOL hasFoundInputFormat = NO;
-    BOOL hasFoundColorSpace = capture.isFastPath;
-    BOOL hasFoundVideoRange = capture.isFastPath;
-
-    CFPropertyListRef priorColorPrimary = @"";
-
     if (device) {
-        // Iterate over all formats reported by the device and gather them for property lists
-        for (AVCaptureDeviceFormat *format in device.formats) {
-            if (!capture.isFastPath) {
-                FourCharCode formatSubType = CMFormatDescriptionGetMediaSubType(format.formatDescription);
-
-                NSString *formatDescription = [OBSAVCapture stringFromSubType:formatSubType];
-                int device_format = [OBSAVCapture formatFromSubtype:formatSubType];
-                int device_range;
-                const char *range_description;
-
-                if ([OBSAVCapture isFullRangeFormat:formatSubType]) {
-                    device_range = VIDEO_RANGE_FULL;
-                    range_description = av_capture_get_text("VideoRange.Full");
-                } else {
-                    device_range = VIDEO_RANGE_PARTIAL;
-                    range_description = av_capture_get_text("VideoRange.Partial");
-                }
-
-                if (!hasFoundInputFormat && input_format == device_format) {
-                    hasFoundInputFormat = YES;
-                }
-
-                if (!hasFoundVideoRange && video_range == device_range) {
-                    hasFoundVideoRange = YES;
-                }
-
-                if (![inputFormats containsObject:@(formatSubType)]) {
-                    obs_property_list_add_int(prop_input_format, formatDescription.UTF8String, device_format);
-                    [inputFormats addObject:@(formatSubType)];
-                }
-
-                if (![videoRanges containsObject:@(range_description)]) {
-                    obs_property_list_add_int(prop_video_range, range_description, device_range);
-                    [videoRanges addObject:@(range_description)];
-                }
-
-                int device_color_space = [OBSAVCapture colorspaceFromDescription:format.formatDescription];
-
-                if (![colorSpaces containsObject:@(device_color_space)]) {
-                    obs_property_list_add_int(prop_color_space,
-                                              [OBSAVCapture stringFromColorspace:device_color_space].UTF8String,
-                                              device_color_space);
-                    [colorSpaces addObject:@(device_color_space)];
-                }
-
-                if (!hasFoundColorSpace && device_color_space == color_space) {
-                    hasFoundColorSpace = YES;
-                }
-            } else {
-                FourCharCode formatSubType = CMFormatDescriptionGetMediaSubType(format.formatDescription);
-
-                NSString *formatDescription = [OBSAVCapture stringFromSubType:formatSubType];
-                int device_format = [OBSAVCapture formatFromSubtype:formatSubType];
-
-                if (!hasFoundInputFormat && input_format == device_format) {
-                    hasFoundInputFormat = YES;
-                }
-
-                if (![inputFormats containsObject:@(formatSubType)]) {
-                    obs_property_list_add_int(prop_input_format, formatDescription.UTF8String, device_format);
-                    [inputFormats addObject:@(formatSubType)];
-                }
-            }
-
-            CMVideoDimensions formatDimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
-
-            NSDictionary *resolutionData =
-                @{@"width": @(formatDimensions.width),
-                  @"height": @(formatDimensions.height)};
-
-            if (![resolutions containsObject:resolutionData]) {
-                [resolutions addObject:resolutionData];
-            }
-
-            if (!hasFoundResolution && formatDimensions.width == resolution.width &&
-                formatDimensions.height == resolution.height) {
-                hasFoundResolution = YES;
-            }
-
-            // Only iterate over available framerates if input format, color space, and resolution are matching
-            if (hasFoundInputFormat && hasFoundColorSpace && hasFoundResolution) {
-                CFComparisonResult isColorPrimaryMatch = kCFCompareEqualTo;
-
-                CFPropertyListRef colorPrimary = CMFormatDescriptionGetExtension(
-                    format.formatDescription, kCMFormatDescriptionExtension_ColorPrimaries);
-
-                if (colorPrimary) {
-                    isColorPrimaryMatch = CFStringCompare(colorPrimary, priorColorPrimary, 0);
-                }
-
-                if (isColorPrimaryMatch != kCFCompareEqualTo || !hasFoundFramerate) {
-                    for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges.reverseObjectEnumerator) {
-                        FourCharCode formatSubType = CMFormatDescriptionGetMediaSubType(format.formatDescription);
-                        int device_format = [OBSAVCapture formatFromSubtype:formatSubType];
-
-                        if (input_format == device_format) {
-                            struct media_frames_per_second min_fps = {
-                                .numerator = (uint32_t) clamp_Uint(range.maxFrameDuration.timescale, 0, UINT32_MAX),
-                                .denominator = (uint32_t) clamp_Uint(range.maxFrameDuration.value, 0, UINT32_MAX)};
-                            struct media_frames_per_second max_fps = {
-                                .numerator = (uint32_t) clamp_Uint(range.minFrameDuration.timescale, 0, UINT32_MAX),
-                                .denominator = (uint32_t) clamp_Uint(range.minFrameDuration.value, 0, UINT32_MAX)};
-
-                            if (![frameRates containsObject:range]) {
-                                obs_property_frame_rate_fps_range_add(prop_framerate, min_fps, max_fps);
-                                [frameRates addObject:range];
-                            }
-
-                            if (!hasFoundFramerate && CMTimeCompare(range.maxFrameDuration, time) >= 0 &&
-                                CMTimeCompare(range.minFrameDuration, time) <= 0) {
-                                hasFoundFramerate = YES;
-                            }
-                        }
+        NSSortDescriptor *aspectSort = [[NSSortDescriptor alloc] initWithKey:@"aspectRatioComparisonValue"
+                                                                   ascending:false];
+        NSSortDescriptor *pixelBandwidthSort = [[NSSortDescriptor alloc] initWithKey:@"pixelBandwidthComparisonValue"
+                                                                           ascending:false];
+        NSSortDescriptor *bppSort = [[NSSortDescriptor alloc] initWithKey:@"bitsPerPixel" ascending:true];
+        NSArray<NSSortDescriptor *> *sortArray =
+            [NSArray arrayWithObjects:aspectSort, pixelBandwidthSort, bppSort, nil];
+        for (AVCaptureDeviceFormat *format in [device.formats sortedArrayUsingDescriptors:sortArray]) {
+            NSString *enumeratedFormatString = format.obsPropertyListDescription;
+            NSString *internalRepresentation = format.obsPropertyListInternalRepresentation;
+            obs_property_list_add_string(prop_format, enumeratedFormatString.UTF8String,
+                                         internalRepresentation.UTF8String);
+            if ([selectedFormatString isEqualToString:internalRepresentation]) {
+                for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
+                    struct media_frames_per_second min_fps = {
+                        .numerator = (uint32_t) clamp_Uint(range.maxFrameDuration.timescale, 0, UINT32_MAX),
+                        .denominator = (uint32_t) clamp_Uint(range.maxFrameDuration.value, 0, UINT32_MAX)};
+                    struct media_frames_per_second max_fps = {
+                        .numerator = (uint32_t) clamp_Uint(range.minFrameDuration.timescale, 0, UINT32_MAX),
+                        .denominator = (uint32_t) clamp_Uint(range.minFrameDuration.value, 0, UINT32_MAX)};
+
+                    if (![frameRates containsObject:range]) {
+                        obs_property_frame_rate_fps_range_add(prop_framerate, min_fps, max_fps);
+                        [frameRates addObject:range];
                     }
-
-                    priorColorPrimary = colorPrimary;
-                }
-            }
-        }
-
-        // Add resolutions in reverse order (formats reported by macOS are sorted with lowest resolution first)
-        for (NSDictionary *resolutionData in resolutions.reverseObjectEnumerator) {
-            NSError *error;
-            NSData *jsonData = [NSJSONSerialization dataWithJSONObject:resolutionData options:0 error:&error];
-
-            int width = [[resolutionData objectForKey:@"width"] intValue];
-            int height = [[resolutionData objectForKey:@"height"] intValue];
-
-            obs_property_list_add_string(
-                prop_resolution, [NSString stringWithFormat:@"%dx%d", width, height].UTF8String,
-                [[NSString alloc] initWithData:jsonData encoding:NSUTF8StringEncoding].UTF8String);
-        }
-
-        // Add currently selected values in disabled state if they are not supported by the device
-        size_t index;
-
-        FourCharCode formatSubType = [OBSAVCapture fourCharCodeFromFormat:input_format withRange:video_range];
-        if (!hasFoundInputFormat) {
-            NSString *formatDescription = [OBSAVCapture stringFromSubType:formatSubType];
-
-            index = obs_property_list_add_int(prop_input_format, formatDescription.UTF8String, input_format);
-            obs_property_list_item_disable(prop_input_format, index, true);
-        }
-
-        if (!capture.isFastPath) {
-            if (!hasFoundVideoRange) {
-                int device_range;
-                const char *range_description;
-
-                if ([OBSAVCapture isFullRangeFormat:formatSubType]) {
-                    device_range = VIDEO_RANGE_FULL;
-                    range_description = av_capture_get_text("VideoRange.Full");
-                } else {
-                    device_range = VIDEO_RANGE_PARTIAL;
-                    range_description = av_capture_get_text("VideoRange.Partial");
                 }
-
-                index = obs_property_list_add_int(prop_video_range, range_description, device_range);
-                obs_property_list_item_disable(prop_video_range, index, true);
-            }
-
-            if (!hasFoundColorSpace) {
-                index = obs_property_list_add_int(
-                    prop_color_space, [OBSAVCapture stringFromColorspace:color_space].UTF8String, color_space);
-                obs_property_list_item_disable(prop_color_space, index, true);
             }
         }
-
-        if (!hasFoundResolution) {
-            NSDictionary *resolutionData = @{@"width": @(resolution.width), @"height": @(resolution.height)};
-
-            NSError *error;
-            NSData *jsonData = [NSJSONSerialization dataWithJSONObject:resolutionData options:0 error:&error];
-
-            index = obs_property_list_add_string(
-                prop_resolution, [NSString stringWithFormat:@"%dx%d", resolution.width, resolution.height].UTF8String,
-                [[NSString alloc] initWithData:jsonData encoding:NSUTF8StringEncoding].UTF8String);
-            obs_property_list_item_disable(prop_resolution, index, true);
-        }
     }
+
     return true;
 }