瀏覽代碼

mac-avcapture: Update plugin to ObjC and modern APIs

Marks old AVFoundation capture source as deprecated.
PatTheMav 2 年之前
父節點
當前提交
e284a79b48

+ 41 - 5
plugins/mac-avcapture/CMakeLists.txt

@@ -1,12 +1,13 @@
 cmake_minimum_required(VERSION 3.24...3.25)
 
-add_library(mac-avcapture MODULE)
-add_library(OBS::avcapture ALIAS mac-avcapture)
+add_library(mac-avcapture-legacy MODULE)
+add_library(OBS::avcapture-legacy ALIAS mac-avcapture-legacy)
 
-target_sources(mac-avcapture PRIVATE av-capture.mm left-right.hpp scope-guard.hpp)
+target_sources(mac-avcapture-legacy PRIVATE legacy/av-capture.mm legacy/left-right.hpp legacy/scope-guard.hpp)
+target_include_directories(mac-avcapture-legacy PRIVATE legacy)
 
 target_link_libraries(
-  mac-avcapture
+  mac-avcapture-legacy
   PRIVATE OBS::libobs
           "$<LINK_LIBRARY:FRAMEWORK,AVFoundation.framework>"
           "$<LINK_LIBRARY:FRAMEWORK,Cocoa.framework>"
@@ -17,10 +18,45 @@ target_link_libraries(
 
 # cmake-format: off
 set_target_properties_obs(
-  mac-avcapture
+  mac-avcapture-legacy
   PROPERTIES FOLDER plugins
              PREFIX ""
              XCODE_ATTRIBUTE_CLANG_ENABLE_OBJC_ARC YES
              XCODE_ATTRIBUTE_CLANG_WARN_SUSPICIOUS_IMPLICIT_CONVERSION YES
              XCODE_ATTRIBUTE_GCC_WARN_SHADOW YES)
 # cmake-format: on
+
+add_library(mac-avcapture MODULE)
+add_library(OBS::avcapture ALIAS mac-avcapture)
+
+target_sources(
+  mac-avcapture
+  PRIVATE plugin-main.m
+          plugin-main.h
+          plugin-properties.m
+          plugin-properties.h
+          OBSAVCapture.m
+          OBSAVCapture.h
+          OBSAVCapturePresetInfo.m
+          OBSAVCapturePresetInfo.h)
+
+target_link_libraries(mac-avcapture PRIVATE OBS::libobs)
+
+# cmake-format: off
+set_target_properties_obs(
+  mac-avcapture
+  PROPERTIES FOLDER plugins
+             PREFIX ""
+             XCODE_ATTRIBUTE_CLANG_ENABLE_OBJC_ARC YES
+             XCODE_ATTRIBUTE_CLANG_WARN_SUSPICIOUS_IMPLICIT_CONVERSION YES
+             XCODE_ATTRIBUTE_GCC_WARN_SHADOW YES
+             XCODE_ATTRIBUTE_CLANG_ENABLE_MODULES YES
+             XCODE_ATTRIBUTE_CLANG_MODULES_AUTOLINK YES
+             XCODE_ATTRIBUTE_GCC_STRICT_ALIASING YES)
+
+
+string(TIMESTAMP CURRENT_YEAR "%Y")
+set_target_xcode_properties(
+  mac-avcapture
+  PROPERTIES INFOPLIST_KEY_NSHumanReadableCopyright "(c) 2023-${CURRENT_YEAR} Patrick Heyer")
+# cmake-format: on

+ 354 - 0
plugins/mac-avcapture/OBSAVCapture.h

@@ -0,0 +1,354 @@
+//
+//  OBSAVCapture.h
+//  mac-avcapture
+//
+//  Created by Patrick Heyer on 2023-03-07.
+//
+
+@import Foundation;
+@import AVFoundation;
+@import CoreMediaIO;
+
+#import "OBSAVCapturePresetInfo.h"
+
+// Import everything as libobs source data types are not available in a specific header.
+#import <obs.h>
+#import <pthread.h>
+#import <media-io/video-io.h>
+
+#pragma mark - Type aliases and type definitions
+
+typedef enum video_colorspace OBSAVCaptureColorSpace;
+typedef enum video_range_type OBSAVCaptureVideoRange;
+typedef enum video_format OBSAVCaptureVideoFormat;
+typedef struct obs_source_frame OBSAVCaptureVideoFrame;
+typedef struct obs_source_audio OBSAVCaptureAudioFrame;
+typedef struct gs_texture OBSAVCaptureTexture;
+typedef struct gs_effect OBSAVCaptureEffect;
+
+/// C struct for errors encountered in capture callback
+typedef enum : NSUInteger {
+    OBSAVCaptureError_NoError,
+    OBSAVCaptureError_SampleBufferFormat,
+    OBSAVCaptureError_ColorSpace,
+    OBSAVCaptureError_AudioBuffer,
+} OBSAVCaptureError;
+
+/// C struct for interaction with obs-module functions
+typedef struct av_capture {
+    id capture;
+    IOSurfaceRef previousSurface;
+    IOSurfaceRef currentSurface;
+    OBSAVCaptureTexture *texture;
+    OBSAVCaptureEffect *effect;
+    OBSAVCaptureVideoFrame *videoFrame;
+    OBSAVCaptureAudioFrame *audioFrame;
+    NSRect frameSize;
+
+    pthread_mutex_t mutex;
+
+    void *settings;
+    void *source;
+    bool isFastPath;
+
+    OBSAVCaptureError lastError;
+    CMFormatDescriptionRef sampleBufferDescription;
+    OBSAVCaptureError lastAudioError;
+} OBSAVCaptureInfo;
+
+/// C struct for sample buffer validity checks in capture callback
+typedef struct av_capture_info {
+    OBSAVCaptureColorSpace colorSpace;
+    OBSAVCaptureVideoRange videoRange;
+    bool isValid;
+} OBSAVCaptureVideoInfo;
+
+#pragma mark - OBSAVCapture Class
+
+/// Video Capture implementation for [CoreMediaIO](https://developer.apple.com/documentation/coremediaio?language=objc)-based devices
+///
+/// Provides access to camera devices recognized by macOS via its [CoreMediaIO](https://developer.apple.com/documentation/coremediaio?language=objc) framework. Devices can be either entirely video-based or a "muxed" device that provides audio and video at the same time.
+///
+/// Devices can be configured either via [presets](https://developer.apple.com/documentation/avfoundation/avcapturesessionpreset?language=objc) (usually 3 quality-based presets in addition to resolution based presets). The resolution defined by the preset does not necessarily switch the actual device to the same resolution, instead the device is automatically switched to the best possible resolution and the [CMSampleBuffer](https://developer.apple.com/documentation/coremedia/cmsamplebuffer?language=objc) provided via [AVCaptureVideoDataOutput](https://developer.apple.com/documentation/avfoundation/avcapturevideodataoutput?language=objc) will be resized accordingly. If necessary the actual frame will be pillar-boxed to fit into a widescreen sample buffer in an attempt to fit the content into it.
+///
+/// Alternatively, devices can be configured manually by specifying resolution, frame-rate, color format and color space. If a device was **not** configured via a preset originally, the size of the [CMSampleBuffer](https://developer.apple.com/documentation/coremedia/cmsamplebuffer?language=objc) will be adjusted to the selected resolution.
+///
+/// > Important: If a preset was configured before, the resolution of the last valid preset-based buffer will be retained and the frame will be fit into it with the selected resolution.
+///
+/// If a device is switched back from manual configuration to a preset-based output, the preset's original settings will be restored, as device configuration is not mutually exclusive with the settings of a preset on macOS.
+@interface OBSAVCapture
+    : NSObject <AVCaptureAudioDataOutputSampleBufferDelegate, AVCaptureVideoDataOutputSampleBufferDelegate>
+
+/// Bare initialiser for ``OBSAVCapture`` class.
+///
+/// > Tip: Use ``OBSAVCapture/initWithCaptureInfo:capture_info:`` instead.
+///
+/// - Returns: A new instance of ``OBSAVCapture`` without settings or a source attached and without an active [AVCaptureSession](https://developer.apple.com/documentation/avfoundation/avcapturesession?language=objc).
+- (instancetype)init;
+
+/// Creates new ``OBSAVCapture`` instance with provided ``mac_avcapture/OBSAVCaptureInfo`` struct. If settings and source pointers in the provided struct are valid, a new capture session will be created immediately and capture of a valid device will begin.
+///
+/// If the device specified in the settings is not valid (e.g., because the device has been disconnected in the meantime) the source will retain the settings and can be reconfigured. If no valid device is specified, the source will stay empty.
+/// - Parameters:
+///   - capture_info: ``OBSAVCaptureInfo`` struct containing source and settings pointers provided by ``libobs``
+/// - Returns: A new instance of an ``OBSAVCapture``
+- (instancetype)initWithCaptureInfo:(OBSAVCaptureInfo *)capture_info NS_DESIGNATED_INITIALIZER;
+
+#pragma mark - Capture Session Handling
+
+/// Creates a new [AVCaptureSession](https://developer.apple.com/documentation/avfoundation/avcapturesession?language=objc) for the device configured in the ``OBSAVCapture`` instance.
+/// - Parameters:
+///   - error: Optional pointer to a valid [NSError](https://developer.apple.com/documentation/foundation/nserror?language=objc) instance to retain possible errors that occurred
+/// - Returns: `YES` if session was created successfully, `NO` otherwise
+- (BOOL)createSession:(NSError **)error;
+
+/// Switches the active capture device used by ``OBSAVCapture`` instance.
+/// - Parameters:
+///   - uuid: UUID of new device to switch to (as provided by [CoreMediaIO](https://developer.apple.com/documentation/coremediaio?language=objc)
+///   - error: Optional pointer to a valid [NSError](https://developer.apple.com/documentation/foundation/nserror?language=objc) instance to retain possible errors that occurred
+/// - Returns: `YES` if the device was successfully switched, `NO` otherwise
+- (BOOL)switchCaptureDevice:(NSString *)uuid withError:(NSError **)error;
+
+/// Starts a capture session with the active capture device used by ``OBSAVCapture`` instance.
+- (void)startCaptureSession;
+
+/// Stops a capture session with the active capture device used by ``OBSAVCapture`` instance. Also sends an empty frame to clear any frames provided by the source.
+- (void)stopCaptureSession;
+
+/// Configures the current [AVCaptureSession](https://developer.apple.com/documentation/avfoundation/avcapturesession?language=objc) to use the [AVCaptureSessionPreset](https://developer.apple.com/documentation/avfoundation/avcapturesessionpreset?language=objc) value selected in the source's property window.
+/// - Parameter error: Optional pointer to a valid [NSError](https://developer.apple.com/documentation/foundation/nserror?language=objc) instance to retain possible errors that occurred
+/// - Returns: `YES` if configuration finished successfully, `NO` otherwise
+- (BOOL)configureSessionWithPreset:(AVCaptureSessionPreset)preset withError:(NSError **)error;
+
+/// Configures the device used in the current [AVCaptureSession](https://developer.apple.com/documentation/avfoundation/avcapturesession?language=objc) directly by attempting to apply the values selected in the source's property window.
+///
+/// The available values are commonly filled by all settings available for the current [AVCaptureDevice](https://developer.apple.com/documentation/avfoundation/avcapturedevice?language=objc). This includes:
+/// * Resolution
+/// * Frame rate
+/// * Color Format
+/// * Color Space
+///
+/// If the combination of property values read from the settings does not match any format supported by the [AVCaptureDevice](https://developer.apple.com/documentation/avfoundation/avcapturedevice?language=objc), the session will not be configured and if a valid [AVCaptureSession](https://developer.apple.com/documentation/avfoundation/avcapturesession?language=objc) exist, it will be kept active.
+/// - Parameter error: Optional pointer to a valid [NSError](https://developer.apple.com/documentation/foundation/nserror?language=objc) instance to retain possible errors that occurred
+/// - Returns: `YES` if configuration finished successfully, `NO` otherwise
+- (BOOL)configureSession:(NSError **)error;
+
+/// Triggers an update of the current ``OBSAVCapture`` source using the property values currently set up on the source.
+///
+/// The function will automatically call ``switchCaptureDevice:uuid:error`` if the device property was changed, and also call ``configureSession:error`` or ``configureSessionWithPreset:preset:error`` based on the value of the associated source property.
+///
+/// A new [AVCaptureSession](https://developer.apple.com/documentation/avfoundation/avcapturesession?language=objc) will be created and started to reflect any changes.
+///
+/// - Parameter error: Optional pointer to a valid [NSError](https://developer.apple.com/documentation/foundation/nserror?language=objc) instance to retain possible errors that occurred
+/// - Returns: `YES` if session was updated successfully, `NO` otherwise
+- (BOOL)updateSessionwithError:(NSError **)error;
+
+#pragma mark - OBS Settings Helpers
+
+/// Reads source dimensions from user settings and converts them into a [CMVideoDimensions](https://developer.apple.com/documentation/coremedia/cmvideodimensions?language=objc) struct for convenience when interacting with the [CoreMediaIO](https://developer.apple.com/documentation/coremediaio?language=objc) framework.
+/// - Parameter settings: Pointer to settings struct used by ``libobs``
+/// - Returns: [CMVideoDimensions](https://developer.apple.com/documentation/coremedia/cmvideodimensions?language=objc) struct with resolution from user settings
++ (CMVideoDimensions)dimensionsFromSettings:(void *)settings;
+
+/// Reads a C-character pointer from user settings and converts it into an [NSString](https://developer.apple.com/documentation/foundation/nsstring?language=objc) instance.
+/// - Parameters:
+///   - settings: Pointer to user settings struct used by ``libobs``
+///   - setting: String identifier for setting
+/// - Returns: New [NSString](https://developer.apple.com/documentation/foundation/nsstring?language=objc) instance created from user setting if setting represented a valid C character pointer.
++ (NSString *)stringFromSettings:(void *)settings withSetting:(NSString *)setting;
+
+/// Reads a C-character pointer from user settings and converts it into an [NSString](https://developer.apple.com/documentation/foundation/nsstring?language=objc) instance.
+/// - Parameters:
+///   - settings: Pointer to user settings struct used by ``libobs``
+///   - setting: String identifer for setting
+///   - widthDefault: Optional fallback value to use if C-character pointer read from settings is invalid
+/// - Returns: New [NSString](https://developer.apple.com/documentation/foundation/nsstring?language=objc) instance created from user setting if setting represented a valid C character pointer.
++ (NSString *)stringFromSettings:(void *)settings withSetting:(NSString *)setting withDefault:(NSString *)defaultValue;
+
+#pragma mark - Format Conversion Helpers
+
+/// Converts a FourCC-based color format identifier into a human-readable string represented as an [NSString](https://developer.apple.com/documentation/foundation/nsstring?language=objc) instance.
+/// - Parameter subtype: FourCC-based color format identifier
+/// - Returns: Human-readable representation of the color format
++ (NSString *)stringFromSubType:(FourCharCode)subtype;
+
+/// Converts a ``libobs``-based color space value into a human-readable string represented as an [NSString](https://developer.apple.com/documentation/foundation/nsstring?language=objc) instance.
+/// - Parameter subtype: ``libobs``-based colorspace value
+/// - Returns: Human-readable representation of the color space
++ (NSString *)stringFromColorspace:(enum video_colorspace)colorspace;
+
+/// Converts a ``libobs``-based video range value into a human-readable string represented as an [NSString](https://developer.apple.com/documentation/foundation/nsstring?language=objc) instance.
+/// - Parameter subtype: ``libobs``-based video range value
+/// - Returns: Human-readable representation of the video range
++ (NSString *)stringFromVideoRange:(enum video_range_type)videoRange;
+
+/// Converts a FourCC value into a human-readable string represented as an [NSString](https://developer.apple.com/documentation/foundation/nsstring?language=objc) instance.
+/// - Parameter fourCharCode: Arbitrary FourCC code in big-endian format
+/// - Returns: Human-readable representation of the FourCC code
++ (NSString *)stringFromFourCharCode:(OSType)fourCharCode;
+
+/// Converts a [AVCaptureSessionPreset](https://developer.apple.com/documentation/avfoundation/avcapturesessionpreset?language=objc) into a human-readable string represented as an [NSString](https://developer.apple.com/documentation/foundation/nsstring?language=objc) instance.
+///
+/// Supported presets include:
+/// * High preset
+/// * Medium preset
+/// * Low preset
+/// * Common resolutions ranging from 320x240 up to and including 3840x2160 (4k)
+/// - Parameter preset: Supported [AVCaptureSessionPreset](https://developer.apple.com/documentation/avfoundation/avcapturesessionpreset?language=objc)
+/// - Returns: Human-readable representation of the [AVCaptureSessionPreset](https://developer.apple.com/documentation/avfoundation/avcapturesessionpreset?language=objc)
++ (NSString *)stringFromCapturePreset:(AVCaptureSessionPreset)preset;
+
+/// Converts an [NSString](https://developer.apple.com/documentation/foundation/nsstring?language=objc) into a big-endian FourCC value.
+/// - Parameter codeString: [NSString](https://developer.apple.com/documentation/foundation/nsstring?language=objc) representation of a big-endian FourCC value
+/// - Returns: Big-endian FourCC value of the provided [NSString](https://developer.apple.com/documentation/foundation/nsstring?language=objc)
++ (FourCharCode)fourCharCodeFromString:(NSString *)codeString;
+
+/// Checks whether the provided colorspace value is actually supported by ``libobs``.
+/// - Parameter colorSpace: ``libobs``-based color-space value
+/// - Returns: `YES` if provided color space value is supported, `NO` otherwise
++ (BOOL)isValidColorspace:(enum video_colorspace)colorSpace;
+
+/// Checks whether the provided video range value is actually supported by ``libobs``.
+/// - Parameter videoRange: ``libobs``-based video range value
+/// - Returns: `YES` if provided video range value is supported, `NO` otherwise
++ (BOOL)isValidVideoRange:(enum video_range_type)videoRange;
+
+/// Checks whether the provided FourCC-based pixel format is a full video range variant.
+/// - Parameter pixelFormat: FourCC code of the pixel format in big-endian format
+/// - Returns: `YES` if provided pixel format has full video range, `NO` otherwise
++ (BOOL)isFullRangeFormat:(FourCharCode)pixelFormat;
+
+/// Converts a FourCC-based media subtype in big-endian format to a video format understood by ``libobs``.
+/// - Parameter subtype: FourCC code of the media subtype in big-endian format
+/// - Returns: Video format identifier understood by ``libobs``
++ (OBSAVCaptureVideoFormat)formatFromSubtype:(FourCharCode)subtype;
+
+/// Converts a ``libobs``based video format its FourCC-based media subtype in big-endian format
+/// - Parameter format: ``libobs``-based video format
+/// - Returns: FourCC-based media subtype in big-endian format
++ (FourCharCode)fourCharCodeFromFormat:(OBSAVCaptureVideoFormat)format;
+
+/// Converts a ``libobs``based video format with a provided video range into its FourCC-based media subtype in big-endian format
+/// - Parameters:
+///   -  format: ``libobs``-based video format
+///   - videoRange: ``libobs``-based video range
+/// - Returns: FourCC-based media subtype in big-endian format
+
++ (FourCharCode)fourCharCodeFromFormat:(OBSAVCaptureVideoFormat)format withRange:(enum video_range_type)videoRange;
+
+/// Converts a [CMFormatDescription](https://developer.apple.com/documentation/coremedia/cmformatdescription?language=objc) into a ``libobs``-based color space value
+/// - Parameter description: A [CMFormatDescription](https://developer.apple.com/documentation/coremedia/cmformatdescription?language=objc) media format descriptor
+/// - Returns: A ``libobs``-based color space value
++ (OBSAVCaptureColorSpace)colorspaceFromDescription:(CMFormatDescriptionRef)description;
+
+#pragma mark - Notification Handlers
+
+/// Notification center callback function for [AVCaptureDeviceWasDisconnected](https://developer.apple.com/documentation/avfoundation/avcapturedevicewasdisconnectednotification?language=objc) notification.
+/// - Parameter notification: [NSNotification](https://developer.apple.com/documentation/foundation/nsnotification?language=objc) container for notification
+- (void)deviceDisconnected:(NSNotification *)notification;
+
+/// Notification center callback function for [AVCaptureDeviceWasConnected](https://developer.apple.com/documentation/avfoundation/avcapturedevicewasconnectednotification?language=objc) notification.
+/// - Parameter notification: [NSNotification](https://developer.apple.com/documentation/foundation/nsnotification?language=objc) container for notification
+- (void)deviceConnected:(NSNotification *)notification;
+
+#pragma mark - Log Helpers
+
+/// ObjC-based wrapper for ``libobs`` logging. This instance function automatically adds the localized ``OBSAVCapture`` source name to all provided log strings.
+///
+/// The signature for string composition is similar to [NSString:stringWithFormat](https://developer.apple.com/documentation/foundation/nsstring/1497275-stringwithformat?language=objc), accepting a `printf`-like format string with added support for ObjC types:
+///
+/// ```objc
+/// [self AVCaptureLog:LOG_WARNING withFormat:@"%@ - %i", @"Some String", 12];
+/// ```
+///
+/// - Parameters:
+///   - logLevel: ``libobs``-based log severity level
+///   - format: [NSString:stringWithFormat:](https://developer.apple.com/documentation/foundation/nsstring/1497275-stringwithformat?language=objc)-compatible format string]
+- (void)AVCaptureLog:(int)logLevel withFormat:(NSString *)format, ...;
+
+/// ObjC-based wrapper for ``libobs`` logging. This class function is available for ObjC code without access to an existing ``OBSAVCapture`` instance.
+///
+/// The signature for string composition is similar to [NSString:stringWithFormat](https://developer.apple.com/documentation/foundation/nsstring/1497275-stringwithformat?language=objc), accepting a `printf`-like format string with added support for ObjC types:
+///
+/// ```objc
+/// [self AVCaptureLog:LOG_WARNING withFormat:@"%@ - %i", @"Some String", 12];
+/// ```
+///
+/// - Parameters:
+///   - logLevel: ``libobs``-based log severity level
+///   - format: [NSString:stringWithFormat:](https://developer.apple.com/documentation/foundation/nsstring/1497275-stringwithformat?language=objc)-compatible format string]
++ (void)AVCaptureLog:(int)logLevel withFormat:(NSString *)format, ...;
+
+#pragma mark - Instance Properties
+/// Internal reference to ``OBSAVCaptureInfo`` struct created by ``libobs`` module code.
+@property (nonatomic) OBSAVCaptureInfo *captureInfo;
+
+/// ``OBSVideoCaptureVideoInfo`` struct used to hold state information of the video configuration
+@property OBSAVCaptureVideoInfo videoInfo;
+
+/// ``libobs``-based frame struct represented by a [NSMutableData](https://developer.apple.com/documentation/foundation/nsmutabledata?language=objc) instance
+@property NSMutableData *obsFrame;
+
+/// ``libobs``-based audio frame struct represented by a [NSMutableData](https://developer.apple.com/documentation/foundation/nsmutabledata?language=objc) instance
+@property NSMutableData *obsAudioFrame;
+
+/// Dictionary of human-readable descriptions of [AVCaptureSession](https://developer.apple.com/documentation/avfoundation/avcapturesession?language=objc) values
+@property NSDictionary<NSString *, NSString *> *presetList;
+
+/// UUID of [AVCaptureDevice](https://developer.apple.com/documentation/avfoundation/avcapturedevice?language=objc) currently used by the ``OBSAVCapture`` instance
+@property NSString *deviceUUID;
+
+/// Instance of ``OBSAVCapturePresetInfo`` to store format and frame rate of a [AVCaptureSessionPreset](https://developer.apple.com/documentation/avfoundation/avcapturesessionpreset?language=objc).
+@property OBSAVCapturePresetInfo *presetFormat;
+
+/// Instance of [AVCaptureSession](https://developer.apple.com/documentation/avfoundation/avcapturesession?language=objc) used by ``OBSAVCapture``
+@property AVCaptureSession *session;
+
+/// Instance of [AVCaptureDeviceInput](https://developer.apple.com/documentation/avfoundation/avcapturesession?language=objc) used by ``OBSAVCapture``
+@property AVCaptureDeviceInput *deviceInput;
+
+/// Instance of [AVCaptureVideoDataOutput](https://developer.apple.com/documentation/avfoundation/avcapturevideodataoutput?language=objc) used by ``OBSAVCapture``
+@property AVCaptureVideoDataOutput *videoOutput;
+
+/// Instance of [AVCaptureAudioDataOutput](https://developer.apple.com/documentation/avfoundation/avcaptureaudiodataoutput?language=objc) used by ``OBSAVCapture``
+@property AVCaptureAudioDataOutput *audioOutput;
+
+/// [Dispatch Queue](https://developer.apple.com/documentation/dispatch/dispatch_queue?language=objc) used by the [AVCaptureVideoDataOutput](https://developer.apple.com/documentation/avfoundation/avcapturevideodataoutput?language=objc) instance
+@property dispatch_queue_t videoQueue;
+/// [Dispatch Queue](https://developer.apple.com/documentation/dispatch/dispatch_queue?language=objc) used by the [AVCaptureAudioDataOutput](https://developer.apple.com/documentation/avfoundation/avcaptureaudiodataoutput?language=objc) instance
+@property dispatch_queue_t audioQueue;
+
+/// [Dispatch Queue](https://developer.apple.com/documentation/dispatch/dispatch_queue?language=objc) used by asynchronous blocks for functions that are required to not block the main thread.
+@property (nonatomic) dispatch_queue_t sessionQueue;
+
+/// `YES` if the device's active format is set and is locked against configuration changes, `NO` otherwise
+@property BOOL isDeviceLocked;
+
+/// `YES` if the device's configuration is based on a preset, `NO` otherwise
+@property BOOL isPresetBased;
+
+/// `YES` if the ``OBSAVCapture`` instance handles frame rendering by itself (used by the Capture Card variant), `NO` otherwise
+@property (readonly) BOOL isFastPath;
+
+/// Error domain identifier used for [NSError](https://developer.apple.com/documentation/foundation/nserror?language=objc) instances
+@property (readonly) NSString *errorDomain;
+
+@end
+
+#pragma mark - Static helper functions
+
+/// Clamp an unsigned 64-bit integer value to the specified minimum and maximum values
+static inline UInt64 clamp_Uint(UInt64 value, UInt64 min, UInt64 max)
+{
+    const UInt64 clamped = value < min ? min : value;
+
+    return clamped > max ? max : value;
+}
+
+/// Clamp a signed 64-bit integer value to the specified minimum and maximum values
+static inline SInt64 clamp_Sint(SInt64 value, SInt64 min, SInt64 max)
+{
+    const SInt64 clamped = value < min ? min : value;
+
+    return clamped > max ? max : value;
+}

+ 1373 - 0
plugins/mac-avcapture/OBSAVCapture.m

@@ -0,0 +1,1373 @@
+//
+//  OBSAVCapture.m
+//  mac-avcapture
+//
+//  Created by Patrick Heyer on 2023-03-07.
+//
+
+#import "OBSAVCapture.h"
+
+@implementation OBSAVCapture
+
+- (instancetype)init
+{
+    return [self initWithCaptureInfo:nil];
+}
+
+- (instancetype)initWithCaptureInfo:(OBSAVCaptureInfo *)capture_info
+{
+    self = [super init];
+
+    if (self) {
+        CMIOObjectPropertyAddress propertyAddress = {kCMIOHardwarePropertyAllowScreenCaptureDevices,
+                                                     kCMIOObjectPropertyScopeGlobal, kCMIOObjectPropertyElementMaster};
+
+        UInt32 allow = 1;
+        CMIOObjectSetPropertyData(kCMIOObjectSystemObject, &propertyAddress, 0, NULL, sizeof(allow), &allow);
+
+        _errorDomain = @"com.obsproject.obs-studio.av-capture";
+
+        _presetList = @{
+            AVCaptureSessionPresetLow: @"Low",
+            AVCaptureSessionPresetMedium: @"Medium",
+            AVCaptureSessionPresetHigh: @"High",
+            AVCaptureSessionPreset320x240: @"320x240",
+            AVCaptureSessionPreset352x288: @"352x288",
+            AVCaptureSessionPreset640x480: @"640x480",
+            AVCaptureSessionPreset960x540: @"960x540",
+            AVCaptureSessionPreset1280x720: @"1280x720",
+            AVCaptureSessionPreset1920x1080: @"1920x1080",
+            AVCaptureSessionPreset3840x2160: @"3840x2160",
+        };
+
+        _sessionQueue = dispatch_queue_create("session queue", DISPATCH_QUEUE_SERIAL);
+
+        OBSAVCaptureVideoInfo newInfo = {0};
+        _videoInfo = newInfo;
+
+        [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(deviceDisconnected:)
+                                                     name:AVCaptureDeviceWasDisconnectedNotification
+                                                   object:nil];
+
+        [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(deviceConnected:)
+                                                     name:AVCaptureDeviceWasConnectedNotification
+                                                   object:nil];
+
+        if (capture_info) {
+            _captureInfo = capture_info;
+
+            NSString *UUID = [OBSAVCapture stringFromSettings:_captureInfo->settings withSetting:@"device"];
+            NSString *presetName = [OBSAVCapture stringFromSettings:_captureInfo->settings withSetting:@"preset"];
+
+            BOOL isPresetEnabled = obs_data_get_bool(_captureInfo->settings, "use_preset");
+
+            if (capture_info->isFastPath) {
+                _isFastPath = YES;
+                _isPresetBased = NO;
+            } else {
+                BOOL isBufferingEnabled = obs_data_get_bool(_captureInfo->settings, "buffering");
+
+                obs_source_set_async_unbuffered(_captureInfo->source, !isBufferingEnabled);
+            }
+
+            __weak OBSAVCapture *weakSelf = self;
+
+            dispatch_async(_sessionQueue, ^{
+                NSError *error = nil;
+
+                OBSAVCapture *instance = weakSelf;
+
+                if ([instance createSession:&error]) {
+                    if ([instance switchCaptureDevice:UUID withError:nil]) {
+                        BOOL isSessionConfigured = NO;
+
+                        if (isPresetEnabled) {
+                            isSessionConfigured = [instance configureSessionWithPreset:presetName withError:nil];
+                        } else {
+                            isSessionConfigured = [instance configureSession:nil];
+                        }
+
+                        if (isSessionConfigured) {
+                            [instance startCaptureSession];
+                        }
+                    }
+                } else {
+                    [instance AVCaptureLog:LOG_ERROR withFormat:error.localizedDescription];
+                }
+            });
+        }
+    }
+
+    return self;
+}
+
+#pragma mark - Capture Session Handling
+
+- (BOOL)createSession:(NSError *__autoreleasing *)error
+{
+    AVCaptureSession *session = [[AVCaptureSession alloc] init];
+
+    [session beginConfiguration];
+
+    if (!session) {
+        if (error) {
+            NSDictionary *userInfo = @{NSLocalizedDescriptionKey: @"Failed to create AVCaptureSession"};
+
+            *error = [NSError errorWithDomain:self.errorDomain code:-101 userInfo:userInfo];
+        }
+
+        return NO;
+    }
+
+    AVCaptureVideoDataOutput *videoOutput = [[AVCaptureVideoDataOutput alloc] init];
+
+    if (!videoOutput) {
+        if (error) {
+            NSDictionary *userInfo = @{NSLocalizedDescriptionKey: @"Failed to create AVCaptureVideoDataOutput"};
+            *error = [NSError errorWithDomain:self.errorDomain code:-102 userInfo:userInfo];
+        }
+
+        return NO;
+    }
+
+    AVCaptureAudioDataOutput *audioOutput = [[AVCaptureAudioDataOutput alloc] init];
+
+    if (!audioOutput) {
+        if (error) {
+            NSDictionary *userInfo = @{NSLocalizedDescriptionKey: @"Failed to create AVCaptureAudioDataOutput"};
+            *error = [NSError errorWithDomain:self.errorDomain code:-103 userInfo:userInfo];
+        }
+
+        return NO;
+    }
+
+    dispatch_queue_t videoQueue = dispatch_queue_create(nil, nil);
+
+    if (!videoQueue) {
+        if (error) {
+            NSDictionary *userInfo = @{NSLocalizedDescriptionKey: @"Failed to create video dispatch queue"};
+
+            *error = [NSError errorWithDomain:self.errorDomain code:-104 userInfo:userInfo];
+        }
+
+        return NO;
+    }
+
+    dispatch_queue_t audioQueue = dispatch_queue_create(nil, nil);
+
+    if (!audioQueue) {
+        if (error) {
+            NSDictionary *userInfo = @{NSLocalizedDescriptionKey: @"Failed to create audio dispatch queue"};
+
+            *error = [NSError errorWithDomain:self.errorDomain code:-105 userInfo:userInfo];
+        }
+
+        return NO;
+    }
+
+    if ([session canAddOutput:videoOutput]) {
+        [session addOutput:videoOutput];
+        [videoOutput setSampleBufferDelegate:self queue:videoQueue];
+    }
+
+    if ([session canAddOutput:audioOutput]) {
+        [session addOutput:audioOutput];
+        [audioOutput setSampleBufferDelegate:self queue:audioQueue];
+    }
+
+    [session commitConfiguration];
+
+    self.session = session;
+    self.videoOutput = videoOutput;
+    self.videoQueue = videoQueue;
+    self.audioOutput = audioOutput;
+    self.audioQueue = audioQueue;
+
+    return YES;
+}
+
+- (BOOL)switchCaptureDevice:(NSString *)uuid withError:(NSError *__autoreleasing *)error
+{
+    AVCaptureDevice *device = [AVCaptureDevice deviceWithUniqueID:uuid];
+
+    if (self.deviceInput.device || !device) {
+        [self stopCaptureSession];
+        [self.session removeInput:self.deviceInput];
+
+        [self.deviceInput.device unlockForConfiguration];
+        self.deviceInput = nil;
+        self.isDeviceLocked = NO;
+    }
+
+    if (!device) {
+        if (uuid.length < 1) {
+            [self AVCaptureLog:LOG_INFO withFormat:@"No device selected"];
+            self.deviceUUID = uuid;
+            return NO;
+        } else {
+            [self AVCaptureLog:LOG_WARNING withFormat:@"Unable to initialize device with unique ID '%@'", uuid];
+            return NO;
+        }
+    }
+
+    const char *deviceName = device.localizedName.UTF8String;
+    obs_data_set_string(self.captureInfo->settings, "device_name", deviceName);
+    obs_data_set_string(self.captureInfo->settings, "device", device.uniqueID.UTF8String);
+
+    [self AVCaptureLog:LOG_INFO withFormat:@"Selected device '%@'", device.localizedName];
+
+    self.deviceUUID = device.uniqueID;
+
+    BOOL isAudioSupported = [device hasMediaType:AVMediaTypeAudio] || [device hasMediaType:AVMediaTypeMuxed];
+
+    obs_source_set_audio_active(self.captureInfo->source, isAudioSupported);
+
+    AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:error];
+
+    if (!deviceInput) {
+        return NO;
+    }
+
+    if (@available(macOS 12.0, *)) {
+        if (device.portraitEffectActive) {
+            [self AVCaptureLog:LOG_WARNING withFormat:@"Portrait effect is active on selected device"];
+        }
+    }
+
+    if (@available(macOS 12.3, *)) {
+        if (device.centerStageActive) {
+            [self AVCaptureLog:LOG_WARNING withFormat:@"Center Stage effect is active on selected device"];
+        }
+    }
+
+    if (@available(macOS 13.0, *)) {
+        if (device.studioLightActive) {
+            [self AVCaptureLog:LOG_WARNING withFormat:@"Studio Light effect is active on selected device"];
+        }
+    }
+
+    [self.session beginConfiguration];
+
+    if ([self.session canAddInput:deviceInput]) {
+        [self.session addInput:deviceInput];
+        self.deviceInput = deviceInput;
+    } else {
+        if (error) {
+            NSDictionary *userInfo = @{
+                NSLocalizedDescriptionKey: [NSString
+                    stringWithFormat:@"Unable to add device '%@' as deviceInput to capture session", self.deviceUUID]
+            };
+
+            *error = [NSError errorWithDomain:self.errorDomain code:-107 userInfo:userInfo];
+        }
+
+        [self.session commitConfiguration];
+        return NO;
+    }
+
+    AVCaptureDeviceFormat *deviceFormat = device.activeFormat;
+
+    CMMediaType mediaType = CMFormatDescriptionGetMediaType(deviceFormat.formatDescription);
+
+    if (mediaType != kCMMediaType_Video && mediaType != kCMMediaType_Muxed) {
+        if (error) {
+            NSDictionary *userInfo = @{
+                NSLocalizedDescriptionKey: [NSString stringWithFormat:@"CMMediaType '%@' is not supported",
+                                                                      [OBSAVCapture stringFromFourCharCode:mediaType]]
+            };
+            *error = [NSError errorWithDomain:self.errorDomain code:-108 userInfo:userInfo];
+        }
+
+        [self.session removeInput:deviceInput];
+        [self.session commitConfiguration];
+        return NO;
+    }
+
+    if (self.isFastPath) {
+        self.videoOutput.videoSettings = nil;
+
+        NSMutableDictionary *videoSettings =
+            [NSMutableDictionary dictionaryWithDictionary:self.videoOutput.videoSettings];
+
+        FourCharCode targetPixelFormatType = kCVPixelFormatType_32BGRA;
+
+        [videoSettings setObject:@(targetPixelFormatType)
+                          forKey:(__bridge NSString *) kCVPixelBufferPixelFormatTypeKey];
+
+        self.videoOutput.videoSettings = videoSettings;
+    } else {
+        self.videoOutput.videoSettings = nil;
+
+        FourCharCode subType = [[self.videoOutput.videoSettings
+            objectForKey:(__bridge NSString *) kCVPixelBufferPixelFormatTypeKey] unsignedIntValue];
+
+        if ([OBSAVCapture formatFromSubtype:subType] != VIDEO_FORMAT_NONE) {
+            [self AVCaptureLog:LOG_DEBUG
+                    withFormat:@"Using native fourcc '%@'", [OBSAVCapture stringFromFourCharCode:subType]];
+        } else {
+            [self AVCaptureLog:LOG_DEBUG withFormat:@"Using fallback fourcc '%@' ('%@', 0x%08x unsupported)",
+                                                    [OBSAVCapture stringFromFourCharCode:kCVPixelFormatType_32BGRA],
+                                                    [OBSAVCapture stringFromFourCharCode:subType], subType];
+
+            NSMutableDictionary *videoSettings =
+                [NSMutableDictionary dictionaryWithDictionary:self.videoOutput.videoSettings];
+
+            [videoSettings setObject:@(kCVPixelFormatType_32BGRA)
+                              forKey:(__bridge NSString *) kCVPixelBufferPixelFormatTypeKey];
+
+            self.videoOutput.videoSettings = videoSettings;
+        }
+    }
+
+    [self.session commitConfiguration];
+
+    return YES;
+}
+
+- (void)startCaptureSession
+{
+    if (!self.session.running) {
+        [self.session startRunning];
+    }
+}
+
+- (void)stopCaptureSession
+{
+    if (self.session.running) {
+        [self.session stopRunning];
+    }
+
+    if (self.captureInfo->isFastPath) {
+        if (self.captureInfo->texture) {
+            obs_enter_graphics();
+            gs_texture_destroy(self.captureInfo->texture);
+            obs_leave_graphics();
+
+            self.captureInfo->texture = NULL;
+        }
+
+        if (self.captureInfo->currentSurface) {
+            IOSurfaceDecrementUseCount(self.captureInfo->currentSurface);
+            CFRelease(self.captureInfo->currentSurface);
+            self.captureInfo->currentSurface = NULL;
+        }
+
+        if (self.captureInfo->previousSurface) {
+            IOSurfaceDecrementUseCount(self.captureInfo->previousSurface);
+            CFRelease(self.captureInfo->previousSurface);
+            self.captureInfo->previousSurface = NULL;
+        }
+    } else {
+        obs_source_output_video(self.captureInfo->source, NULL);
+    }
+}
+
+- (BOOL)configureSessionWithPreset:(AVCaptureSessionPreset)preset withError:(NSError *__autoreleasing *)error
+{
+    if (!self.deviceInput.device) {
+        if (error) {
+            NSDictionary *userInfo =
+                @{NSLocalizedDescriptionKey: @"Unable to set session preset without capture device"};
+
+            *error = [NSError errorWithDomain:self.errorDomain code:-108 userInfo:userInfo];
+        }
+        return NO;
+    }
+
+    if (![self.deviceInput.device supportsAVCaptureSessionPreset:preset]) {
+        if (error) {
+            NSDictionary *userInfo = @{
+                NSLocalizedDescriptionKey: [NSString stringWithFormat:@"Preset %@ not supported by device %@",
+                                                                      [OBSAVCapture stringFromCapturePreset:preset],
+                                                                      self.deviceInput.device.localizedName]
+            };
+
+            *error = [NSError errorWithDomain:self.errorDomain code:-201 userInfo:userInfo];
+        }
+
+        return NO;
+    }
+
+    if ([self.session canSetSessionPreset:preset]) {
+        if (self.isDeviceLocked) {
+            if ([preset isEqualToString:self.session.sessionPreset]) {
+                if (self.deviceInput.device.activeFormat) {
+                    self.deviceInput.device.activeFormat = self.presetFormat.activeFormat;
+                    self.deviceInput.device.activeVideoMinFrameDuration = self.presetFormat.minFrameRate;
+                    self.deviceInput.device.activeVideoMaxFrameDuration = self.presetFormat.maxFrameRate;
+                }
+                self.presetFormat = nil;
+            }
+
+            [self.deviceInput.device unlockForConfiguration];
+            self.isDeviceLocked = NO;
+        }
+
+        if ([self.session canSetSessionPreset:preset]) {
+            self.session.sessionPreset = preset;
+        }
+    } else {
+        if (error) {
+            NSDictionary *userInfo = @{
+                NSLocalizedDescriptionKey: [NSString stringWithFormat:@"Preset %@ not supported by capture session",
+                                                                      [OBSAVCapture stringFromCapturePreset:preset]]
+            };
+
+            *error = [NSError errorWithDomain:self.errorDomain code:-202 userInfo:userInfo];
+        }
+
+        return NO;
+    }
+
+    self.isPresetBased = YES;
+    return YES;
+}
+
+- (BOOL)configureSession:(NSError *__autoreleasing *)error
+{
+    int videoRange;
+    int colorSpace;
+    FourCharCode inputFourCC;
+
+    if (!self.isFastPath) {
+        videoRange = (int) obs_data_get_int(self.captureInfo->settings, "video_range");
+
+        if (![OBSAVCapture isValidVideoRange:videoRange]) {
+            [self AVCaptureLog:LOG_WARNING withFormat:@"Unsupported video range: %d", videoRange];
+            return NO;
+        }
+
+        int inputFormat;
+        inputFormat = (int) obs_data_get_int(self.captureInfo->settings, "input_format");
+        inputFourCC = [OBSAVCapture fourCharCodeFromFormat:inputFormat withRange:videoRange];
+
+        colorSpace = (int) obs_data_get_int(self.captureInfo->settings, "color_space");
+
+        if (![OBSAVCapture isValidColorspace:colorSpace]) {
+            [self AVCaptureLog:LOG_DEBUG withFormat:@"Unsupported color space: %d", colorSpace];
+            return NO;
+        }
+    } else {
+        CMFormatDescriptionRef formatDescription = self.deviceInput.device.activeFormat.formatDescription;
+        inputFourCC = CMFormatDescriptionGetMediaSubType(formatDescription);
+        colorSpace = [OBSAVCapture colorspaceFromDescription:formatDescription];
+        videoRange = ([OBSAVCapture isFullRangeFormat:inputFourCC]) ? VIDEO_RANGE_FULL : VIDEO_RANGE_PARTIAL;
+    }
+
+    CMVideoDimensions dimensions = [OBSAVCapture dimensionsFromSettings:self.captureInfo->settings];
+
+    if (dimensions.width == 0 || dimensions.height == 0) {
+        [self AVCaptureLog:LOG_DEBUG withFormat:@"No valid resolution found in settings"];
+        return NO;
+    }
+
+    struct media_frames_per_second fps;
+    if (!obs_data_get_frames_per_second(self.captureInfo->settings, "frame_rate", &fps, NULL)) {
+        [self AVCaptureLog:LOG_DEBUG withFormat:@"No valid framerate found in settings"];
+        return NO;
+    }
+
+    CMTime time = {.value = fps.denominator, .timescale = fps.numerator, .flags = 1};
+
+    AVCaptureDeviceFormat *format = nil;
+
+    for (AVCaptureDeviceFormat *formatCandidate in [self.deviceInput.device.formats reverseObjectEnumerator]) {
+        CMVideoDimensions formatDimensions = CMVideoFormatDescriptionGetDimensions(formatCandidate.formatDescription);
+
+        if (!(formatDimensions.width == dimensions.width) || !(formatDimensions.height == dimensions.height)) {
+            continue;
+        }
+
+        for (AVFrameRateRange *range in formatCandidate.videoSupportedFrameRateRanges) {
+            if (CMTimeCompare(range.maxFrameDuration, time) >= 0 && CMTimeCompare(range.minFrameDuration, time) <= 0) {
+                CMFormatDescriptionRef formatDescription = formatCandidate.formatDescription;
+                FourCharCode formatFourCC = CMFormatDescriptionGetMediaSubType(formatDescription);
+
+                if (inputFourCC == formatFourCC) {
+                    format = formatCandidate;
+                    inputFourCC = formatFourCC;
+                    break;
+                }
+            }
+        }
+
+        if (format) {
+            break;
+        }
+    }
+
+    if (!format) {
+        [self AVCaptureLog:LOG_WARNING withFormat:@"Frame rate is not supported: %g FPS (%u/%u)",
+                                                  media_frames_per_second_to_fps(fps), fps.numerator, fps.denominator];
+        return NO;
+    }
+
+    [self.session beginConfiguration];
+
+    self.isDeviceLocked = [self.deviceInput.device lockForConfiguration:error];
+
+    if (!self.isDeviceLocked) {
+        [self AVCaptureLog:LOG_WARNING withFormat:@"Could not lock devie for configuration"];
+        return NO;
+    }
+
+    [self AVCaptureLog:LOG_INFO
+            withFormat:@"Capturing '%@' (%@):\n"
+                        " Resolution            : %ux%u\n"
+                        " FPS                   : %g (%u/%u)\n"
+                        " Frame Interval        : %g\u00a0s\n"
+                        " Input Format          : %@\n"
+                        " Requested Color Space : %@ (%d)\n"
+                        " Requested Video Range : %@ (%d)\n"
+                        " Using Format          : %@",
+                       self.deviceInput.device.localizedName, self.deviceInput.device.uniqueID, dimensions.width,
+                       dimensions.height, media_frames_per_second_to_fps(fps), fps.numerator, fps.denominator,
+                       media_frames_per_second_to_frame_interval(fps), [OBSAVCapture stringFromSubType:inputFourCC],
+                       [OBSAVCapture stringFromColorspace:colorSpace], colorSpace,
+                       [OBSAVCapture stringFromVideoRange:videoRange], videoRange, format.description];
+
+    OBSAVCaptureVideoInfo newInfo = {.colorSpace = _videoInfo.colorSpace,
+                                     .videoRange = _videoInfo.videoRange,
+                                     .isValid = false};
+
+    self.videoInfo = newInfo;
+
+    self.isPresetBased = NO;
+
+    if (!self.presetFormat) {
+        OBSAVCapturePresetInfo *presetInfo = [[OBSAVCapturePresetInfo alloc] init];
+        presetInfo.activeFormat = self.deviceInput.device.activeFormat;
+        presetInfo.minFrameRate = self.deviceInput.device.activeVideoMinFrameDuration;
+        presetInfo.maxFrameRate = self.deviceInput.device.activeVideoMaxFrameDuration;
+        self.presetFormat = presetInfo;
+    }
+
+    self.deviceInput.device.activeFormat = format;
+    self.deviceInput.device.activeVideoMinFrameDuration = time;
+    self.deviceInput.device.activeVideoMaxFrameDuration = time;
+
+    [self.session commitConfiguration];
+
+    return YES;
+}
+
+- (BOOL)updateSessionwithError:(NSError *__autoreleasing *)error
+{
+    switch (self.captureInfo->lastError) {
+        case OBSAVCaptureError_SampleBufferFormat:
+            if (self.captureInfo->sampleBufferDescription) {
+                FourCharCode mediaSubType =
+                    CMFormatDescriptionGetMediaSubType(self.captureInfo->sampleBufferDescription);
+
+                [self AVCaptureLog:LOG_ERROR
+                        withFormat:@"Incompatible sample buffer format received for sync AVCapture source: %@ (0x%x)",
+                                   [OBSAVCapture stringFromFourCharCode:mediaSubType], mediaSubType];
+            }
+            break;
+        case OBSAVCaptureError_ColorSpace: {
+            if (self.captureInfo->sampleBufferDescription) {
+                FourCharCode mediaSubType =
+                    CMFormatDescriptionGetMediaSubType(self.captureInfo->sampleBufferDescription);
+                BOOL isSampleBufferFullRange = [OBSAVCapture isFullRangeFormat:mediaSubType];
+                OBSAVCaptureColorSpace sampleBufferColorSpace =
+                    [OBSAVCapture colorspaceFromDescription:self.captureInfo->sampleBufferDescription];
+                OBSAVCaptureVideoRange sampleBufferRangeType = isSampleBufferFullRange ? VIDEO_RANGE_FULL
+                                                                                       : VIDEO_RANGE_PARTIAL;
+
+                [self AVCaptureLog:LOG_ERROR
+                        withFormat:@"Failed to get colorspace parameters for colorspace %u and range %u",
+                                   sampleBufferColorSpace, sampleBufferRangeType];
+            }
+            break;
+            default:
+                self.captureInfo->lastError = OBSAVCaptureError_NoError;
+                self.captureInfo->sampleBufferDescription = NULL;
+                break;
+        }
+    }
+
+    switch (self.captureInfo->lastAudioError) {
+        case OBSAVCaptureError_AudioBuffer: {
+            [OBSAVCapture AVCaptureLog:LOG_ERROR
+                            withFormat:@"Unable to retrieve required AudioBufferList size from sample buffer."];
+            break;
+        }
+        default:
+            self.captureInfo->lastAudioError = OBSAVCaptureError_NoError;
+            break;
+    }
+
+    NSString *newDeviceUUID = [OBSAVCapture stringFromSettings:self.captureInfo->settings withSetting:@"device"];
+    NSString *presetName = [OBSAVCapture stringFromSettings:self.captureInfo->settings withSetting:@"preset"];
+    BOOL isPresetEnabled = obs_data_get_bool(self.captureInfo->settings, "use_preset");
+
+    BOOL updateSession = YES;
+
+    if (![self.deviceUUID isEqualToString:newDeviceUUID]) {
+        if (![self switchCaptureDevice:newDeviceUUID withError:error]) {
+            obs_source_update_properties(self.captureInfo->source);
+            return NO;
+        }
+    } else if (self.isPresetBased && isPresetEnabled && [presetName isEqualToString:self.session.sessionPreset]) {
+        updateSession = NO;
+    }
+
+    if (updateSession) {
+        if (isPresetEnabled) {
+            [self configureSessionWithPreset:presetName withError:error];
+        } else {
+            if (![self configureSession:error]) {
+                obs_source_update_properties(self.captureInfo->source);
+                return NO;
+            }
+        }
+
+        __weak OBSAVCapture *weakSelf = self;
+        dispatch_async(self.sessionQueue, ^{
+            [weakSelf startCaptureSession];
+        });
+    }
+
+    BOOL isAudioAvailable = [self.deviceInput.device hasMediaType:AVMediaTypeAudio] ||
+                            [self.deviceInput.device hasMediaType:AVMediaTypeMuxed];
+
+    obs_source_set_audio_active(self.captureInfo->source, isAudioAvailable);
+
+    if (!self.isFastPath) {
+        BOOL isBufferingEnabled = obs_data_get_bool(self.captureInfo->settings, "buffering");
+        obs_source_set_async_unbuffered(self.captureInfo->source, !isBufferingEnabled);
+    }
+
+    return YES;
+}
+
+#pragma mark - OBS Settings Helpers
+
++ (CMVideoDimensions)dimensionsFromSettings:(void *)settings
+{
+    CMVideoDimensions zero = {0};
+
+    NSString *jsonString = [OBSAVCapture stringFromSettings:settings withSetting:@"resolution"];
+
+    NSDictionary *data = [NSJSONSerialization JSONObjectWithData:[jsonString dataUsingEncoding:NSUTF8StringEncoding]
+                                                         options:0
+                                                           error:nil];
+
+    if (data.count == 0) {
+        return zero;
+    }
+
+    NSInteger width = [[data objectForKey:@"width"] intValue];
+    NSInteger height = [[data objectForKey:@"height"] intValue];
+
+    if (!width || !height) {
+        return zero;
+    }
+
+    CMVideoDimensions dimensions = {.width = (int32_t) clamp_Uint(width, 0, UINT32_MAX),
+                                    .height = (int32_t) clamp_Uint(height, 0, UINT32_MAX)};
+
+    return dimensions;
+}
+
++ (NSString *)stringFromSettings:(void *)settings withSetting:(NSString *)setting
+{
+    return [OBSAVCapture stringFromSettings:settings withSetting:setting withDefault:@""];
+}
+
++ (NSString *)stringFromSettings:(void *)settings withSetting:(NSString *)setting withDefault:(NSString *)defaultValue
+{
+    NSString *result;
+
+    if (settings) {
+        const char *setting_value = obs_data_get_string(settings, setting.UTF8String);
+
+        if (!setting_value) {
+            result = [NSString stringWithString:defaultValue];
+        } else {
+            result = @(setting_value);
+        }
+    } else {
+        result = [NSString stringWithString:defaultValue];
+    }
+
+    return result;
+}
+
+#pragma mark - Format Conversion Helpers
+
++ (NSString *)stringFromSubType:(FourCharCode)subtype
+{
+    switch (subtype) {
+        case kCVPixelFormatType_422YpCbCr8:
+            return @"UYVY - 422YpCbCr8";
+        case kCVPixelFormatType_422YpCbCr8_yuvs:
+            return @"YUY2 - 422YpCbCr8_yuvs";
+        case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
+        case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
+            return @"NV12 - 420YpCbCr8BiPlanar";
+        case kCVPixelFormatType_420YpCbCr10BiPlanarFullRange:
+        case kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange:
+            return @"P010 - 420YpCbCr10BiPlanar";
+        case kCVPixelFormatType_32ARGB:
+            return @"ARGB - 32ARGB";
+        case kCVPixelFormatType_32BGRA:
+            return @"BGRA - 32BGRA";
+
+        case kCMVideoCodecType_Animation:
+            return @"Apple Animation";
+        case kCMVideoCodecType_Cinepak:
+            return @"Cinepak";
+        case kCMVideoCodecType_JPEG:
+            return @"JPEG";
+        case kCMVideoCodecType_JPEG_OpenDML:
+            return @"MJPEG - JPEG OpenDML";
+        case kCMVideoCodecType_SorensonVideo:
+            return @"Sorenson Video";
+        case kCMVideoCodecType_SorensonVideo3:
+            return @"Sorenson Video 3";
+        case kCMVideoCodecType_H263:
+            return @"H.263";
+        case kCMVideoCodecType_H264:
+            return @"H.264";
+        case kCMVideoCodecType_MPEG4Video:
+            return @"MPEG-4";
+        case kCMVideoCodecType_MPEG2Video:
+            return @"MPEG-2";
+        case kCMVideoCodecType_MPEG1Video:
+            return @"MPEG-1";
+
+        case kCMVideoCodecType_DVCNTSC:
+            return @"DV NTSC";
+        case kCMVideoCodecType_DVCPAL:
+            return @"DV PAL";
+        case kCMVideoCodecType_DVCProPAL:
+            return @"Panasonic DVCPro Pal";
+        case kCMVideoCodecType_DVCPro50NTSC:
+            return @"Panasonic DVCPro-50 NTSC";
+        case kCMVideoCodecType_DVCPro50PAL:
+            return @"Panasonic DVCPro-50 PAL";
+        case kCMVideoCodecType_DVCPROHD720p60:
+            return @"Panasonic DVCPro-HD 720p60";
+        case kCMVideoCodecType_DVCPROHD720p50:
+            return @"Panasonic DVCPro-HD 720p50";
+        case kCMVideoCodecType_DVCPROHD1080i60:
+            return @"Panasonic DVCPro-HD 1080i60";
+        case kCMVideoCodecType_DVCPROHD1080i50:
+            return @"Panasonic DVCPro-HD 1080i50";
+        case kCMVideoCodecType_DVCPROHD1080p30:
+            return @"Panasonic DVCPro-HD 1080p30";
+        case kCMVideoCodecType_DVCPROHD1080p25:
+            return @"Panasonic DVCPro-HD 1080p25";
+
+        case kCMVideoCodecType_AppleProRes4444:
+            return @"Apple ProRes 4444";
+        case kCMVideoCodecType_AppleProRes422HQ:
+            return @"Apple ProRes 422 HQ";
+        case kCMVideoCodecType_AppleProRes422:
+            return @"Apple ProRes 422";
+        case kCMVideoCodecType_AppleProRes422LT:
+            return @"Apple ProRes 422 LT";
+        case kCMVideoCodecType_AppleProRes422Proxy:
+            return @"Apple ProRes 422 Proxy";
+
+        default:
+            return @"Unknown";
+    }
+}
+
++ (NSString *)stringFromColorspace:(enum video_colorspace)colorspace
+{
+    switch (colorspace) {
+        case VIDEO_CS_DEFAULT:
+            return @"Default";
+        case VIDEO_CS_601:
+            return @"CS 601";
+        case VIDEO_CS_709:
+            return @"CS 709";
+        case VIDEO_CS_SRGB:
+            return @"sRGB";
+        case VIDEO_CS_2100_PQ:
+            return @"CS 2100 (PQ)";
+        case VIDEO_CS_2100_HLG:
+            return @"CS 2100 (HLG)";
+        default:
+            return @"Unknown";
+    }
+}
+
++ (NSString *)stringFromVideoRange:(enum video_range_type)videoRange
+{
+    switch (videoRange) {
+        case VIDEO_RANGE_FULL:
+            return @"Full";
+        case VIDEO_RANGE_PARTIAL:
+            return @"Partial";
+        case VIDEO_RANGE_DEFAULT:
+            return @"Default";
+    }
+}
+
++ (NSString *)stringFromCapturePreset:(AVCaptureSessionPreset)preset
+{
+    NSDictionary *presetDescriptions = @{
+        AVCaptureSessionPresetLow: @"Low",
+        AVCaptureSessionPresetMedium: @"Medium",
+        AVCaptureSessionPresetHigh: @"High",
+        AVCaptureSessionPreset320x240: @"320x240",
+        AVCaptureSessionPreset352x288: @"352x288",
+        AVCaptureSessionPreset640x480: @"640x480",
+        AVCaptureSessionPreset960x540: @"960x460",
+        AVCaptureSessionPreset1280x720: @"1280x720",
+        AVCaptureSessionPreset1920x1080: @"1920x1080",
+        AVCaptureSessionPreset3840x2160: @"3840x2160",
+    };
+
+    NSString *presetDescription = [presetDescriptions objectForKey:preset];
+
+    if (!presetDescription) {
+        return [NSString stringWithFormat:@"Unknown (%@)", preset];
+    } else {
+        return presetDescription;
+    }
+}
+
++ (NSString *)stringFromFourCharCode:(OSType)fourCharCode
+{
+    char cString[5] = {(fourCharCode >> 24) & 0xFF, (fourCharCode >> 16) & 0xFF, (fourCharCode >> 8) & 0xFF,
+                       fourCharCode & 0xFF, 0};
+
+    NSString *codeString = @(cString);
+
+    return codeString;
+}
+
++ (FourCharCode)fourCharCodeFromString:(NSString *)codeString
+{
+    FourCharCode fourCharCode;
+    const char *cString = codeString.UTF8String;
+
+    fourCharCode = (cString[0] << 24) | (cString[1] << 16) | (cString[2] << 8) | cString[3];
+
+    return fourCharCode;
+}
+
++ (BOOL)isValidColorspace:(enum video_colorspace)colorspace
+{
+    switch (colorspace) {
+        case VIDEO_CS_DEFAULT:
+        case VIDEO_CS_601:
+        case VIDEO_CS_709:
+            return YES;
+        default:
+            return NO;
+    }
+}
+
++ (BOOL)isValidVideoRange:(enum video_range_type)videoRange
+{
+    switch (videoRange) {
+        case VIDEO_RANGE_DEFAULT:
+        case VIDEO_RANGE_PARTIAL:
+        case VIDEO_RANGE_FULL:
+            return YES;
+        default:
+            return NO;
+    }
+}
+
++ (BOOL)isFullRangeFormat:(FourCharCode)pixelFormat
+{
+    switch (pixelFormat) {
+        case kCVPixelFormatType_420YpCbCr8PlanarFullRange:
+        case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
+        case kCVPixelFormatType_420YpCbCr10BiPlanarFullRange:
+        case kCVPixelFormatType_422YpCbCr8FullRange:
+            return YES;
+        default:
+            return NO;
+    }
+}
+
++ (OBSAVCaptureVideoFormat)formatFromSubtype:(FourCharCode)subtype
+{
+    switch (subtype) {
+        case kCVPixelFormatType_422YpCbCr8:
+            return VIDEO_FORMAT_UYVY;
+        case kCVPixelFormatType_422YpCbCr8_yuvs:
+            return VIDEO_FORMAT_YUY2;
+        case kCVPixelFormatType_32BGRA:
+            return VIDEO_FORMAT_BGRA;
+        case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
+        case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
+            return VIDEO_FORMAT_NV12;
+        case kCVPixelFormatType_420YpCbCr10BiPlanarFullRange:
+        case kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange:
+            return VIDEO_FORMAT_P010;
+        default:
+            return VIDEO_FORMAT_NONE;
+    }
+}
+
++ (FourCharCode)fourCharCodeFromFormat:(OBSAVCaptureVideoFormat)format withRange:(enum video_range_type)videoRange
+{
+    switch (format) {
+        case VIDEO_FORMAT_UYVY:
+            return kCVPixelFormatType_422YpCbCr8;
+        case VIDEO_FORMAT_YUY2:
+            return kCVPixelFormatType_422YpCbCr8_yuvs;
+        case VIDEO_FORMAT_NV12:
+            if (videoRange == VIDEO_RANGE_FULL) {
+                return kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
+            } else {
+                return kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
+            }
+        case VIDEO_FORMAT_P010:
+            if (videoRange == VIDEO_RANGE_FULL) {
+                return kCVPixelFormatType_420YpCbCr10BiPlanarFullRange;
+            } else {
+                return kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange;
+            }
+        case VIDEO_FORMAT_BGRA:
+            return kCVPixelFormatType_32BGRA;
+        default:
+            return 0;
+    }
+}
+
++ (FourCharCode)fourCharCodeFromFormat:(OBSAVCaptureVideoFormat)format
+{
+    return [OBSAVCapture fourCharCodeFromFormat:format withRange:VIDEO_RANGE_PARTIAL];
+}
+
++ (OBSAVCaptureColorSpace)colorspaceFromDescription:(CMFormatDescriptionRef)description
+{
+    CFPropertyListRef matrix = CMFormatDescriptionGetExtension(description, kCMFormatDescriptionExtension_YCbCrMatrix);
+
+    if (!matrix) {
+        return VIDEO_CS_DEFAULT;
+    }
+
+    CFComparisonResult is601 = CFStringCompare(matrix, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0);
+    CFComparisonResult is709 = CFStringCompare(matrix, kCVImageBufferYCbCrMatrix_ITU_R_709_2, 0);
+    CFComparisonResult is2020 = CFStringCompare(matrix, kCVImageBufferYCbCrMatrix_ITU_R_2020, 0);
+
+    if (is601 == kCFCompareEqualTo) {
+        return VIDEO_CS_601;
+    } else if (is709 == kCFCompareEqualTo) {
+        return VIDEO_CS_709;
+    } else if (is2020 == kCFCompareEqualTo) {
+        CFPropertyListRef transferFunction =
+            CMFormatDescriptionGetExtension(description, kCMFormatDescriptionExtension_TransferFunction);
+
+        if (!matrix) {
+            return VIDEO_CS_DEFAULT;
+        }
+
+        CFComparisonResult isPQ = CFStringCompare(transferFunction, kCVImageBufferTransferFunction_SMPTE_ST_2084_PQ, 0);
+        CFComparisonResult isHLG = CFStringCompare(transferFunction, kCVImageBufferTransferFunction_ITU_R_2100_HLG, 0);
+
+        if (isPQ == kCFCompareEqualTo) {
+            return VIDEO_CS_2100_PQ;
+        } else if (isHLG == kCFCompareEqualTo) {
+            return VIDEO_CS_2100_HLG;
+        }
+    }
+
+    return VIDEO_CS_DEFAULT;
+}
+
+#pragma mark - Notification Handlers
+
+- (void)deviceConnected:(NSNotification *)notification
+{
+    AVCaptureDevice *device = notification.object;
+
+    if (!device) {
+        return;
+    }
+
+    if (![[device uniqueID] isEqualTo:self.deviceUUID]) {
+        obs_source_update_properties(self.captureInfo->source);
+        return;
+    }
+
+    if (self.deviceInput.device) {
+        [self AVCaptureLog:LOG_INFO withFormat:@"Received connect event with active device '%@' (UUID %@)",
+                                               self.deviceInput.device.localizedName, self.deviceInput.device.uniqueID];
+
+        obs_source_update_properties(self.captureInfo->source);
+        return;
+    }
+
+    [self AVCaptureLog:LOG_INFO
+            withFormat:@"Received connect event for device '%@' (UUID %@)", device.localizedName, device.uniqueID];
+
+    NSError *error;
+    NSString *presetName = [OBSAVCapture stringFromSettings:self.captureInfo->settings withSetting:@"preset"];
+    BOOL isPresetEnabled = obs_data_get_bool(self.captureInfo->settings, "use_preset");
+    BOOL isFastPath = self.captureInfo->isFastPath;
+
+    if ([self switchCaptureDevice:device.uniqueID withError:&error]) {
+        BOOL success;
+        if (isPresetEnabled && !isFastPath) {
+            success = [self configureSessionWithPreset:presetName withError:&error];
+        } else {
+            success = [self configureSession:&error];
+        }
+
+        if (success) {
+            dispatch_async(self.sessionQueue, ^{
+                [self startCaptureSession];
+            });
+        } else {
+            [self AVCaptureLog:LOG_ERROR withFormat:error.localizedDescription];
+        }
+    } else {
+        [self AVCaptureLog:LOG_ERROR withFormat:error.localizedDescription];
+    }
+
+    obs_source_update_properties(self.captureInfo->source);
+}
+
+- (void)deviceDisconnected:(NSNotification *)notification
+{
+    AVCaptureDevice *device = notification.object;
+
+    if (!device) {
+        return;
+    }
+
+    if (![[device uniqueID] isEqualTo:self.deviceUUID]) {
+        obs_source_update_properties(self.captureInfo->source);
+        return;
+    }
+
+    if (!self.deviceInput.device) {
+        [self AVCaptureLog:LOG_ERROR withFormat:@"Received disconnect event for inactive device '%@' (UUID %@)",
+                                                device.localizedName, device.uniqueID];
+        obs_source_update_properties(self.captureInfo->source);
+        return;
+    }
+
+    [self AVCaptureLog:LOG_INFO
+            withFormat:@"Received disconnect event for device '%@' (UUID %@)", device.localizedName, device.uniqueID];
+
+    __weak OBSAVCapture *weakSelf = self;
+    dispatch_async(self.sessionQueue, ^{
+        OBSAVCapture *instance = weakSelf;
+
+        [instance stopCaptureSession];
+        [instance.session removeInput:instance.deviceInput];
+
+        instance.deviceInput = nil;
+        instance = nil;
+    });
+
+    obs_source_update_properties(self.captureInfo->source);
+}
+
+#pragma mark - AVCapture Delegate Methods
+
+- (void)captureOutput:(AVCaptureOutput *)output
+    didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
+         fromConnection:(AVCaptureConnection *)connection
+{
+    return;
+}
+
+- (void)captureOutput:(AVCaptureOutput *)output
+    didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
+           fromConnection:(AVCaptureConnection *)connection
+{
+    CMItemCount sampleCount = CMSampleBufferGetNumSamples(sampleBuffer);
+
+    if (!_captureInfo || sampleCount < 1) {
+        return;
+    }
+
+    CMTime presentationTimeStamp = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer);
+    CMTime presentationNanoTimeStamp = CMTimeConvertScale(presentationTimeStamp, 1E9, kCMTimeRoundingMethod_Default);
+
+    CMFormatDescriptionRef description = CMSampleBufferGetFormatDescription(sampleBuffer);
+    CMMediaType mediaType = CMFormatDescriptionGetMediaType(description);
+
+    switch (mediaType) {
+        case kCMMediaType_Video: {
+            CMVideoDimensions sampleBufferDimensions = CMVideoFormatDescriptionGetDimensions(description);
+            CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
+            FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(description);
+
+            OBSAVCaptureVideoInfo newInfo = {.videoRange = _videoInfo.videoRange,
+                                             .colorSpace = _videoInfo.colorSpace,
+                                             .isValid = false};
+
+            BOOL usePreset = obs_data_get_bool(_captureInfo->settings, "use_preset");
+
+            if (_isFastPath) {
+                if (mediaSubType != kCVPixelFormatType_32BGRA &&
+                    mediaSubType != kCVPixelFormatType_ARGB2101010LEPacked) {
+                    _captureInfo->lastError = OBSAVCaptureError_SampleBufferFormat;
+                    CMFormatDescriptionCreate(kCFAllocatorDefault, mediaType, mediaSubType, NULL,
+                                              &_captureInfo->sampleBufferDescription);
+                    obs_source_update_properties(_captureInfo->source);
+                    break;
+                } else {
+                    _captureInfo->lastError = OBSAVCaptureError_NoError;
+                    _captureInfo->sampleBufferDescription = NULL;
+                }
+
+                CVPixelBufferLockBaseAddress(imageBuffer, 0);
+                IOSurfaceRef frameSurface = CVPixelBufferGetIOSurface(imageBuffer);
+                CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
+
+                IOSurfaceRef previousSurface = NULL;
+
+                if (frameSurface && !pthread_mutex_lock(&_captureInfo->mutex)) {
+                    NSRect frameSize = _captureInfo->frameSize;
+
+                    if (frameSize.size.width != sampleBufferDimensions.width ||
+                        frameSize.size.height != sampleBufferDimensions.height) {
+                        frameSize = CGRectMake(0, 0, sampleBufferDimensions.width, sampleBufferDimensions.height);
+                    }
+
+                    previousSurface = _captureInfo->currentSurface;
+                    _captureInfo->currentSurface = frameSurface;
+
+                    CFRetain(_captureInfo->currentSurface);
+                    IOSurfaceIncrementUseCount(_captureInfo->currentSurface);
+                    pthread_mutex_unlock(&_captureInfo->mutex);
+
+                    newInfo.isValid = true;
+
+                    if (_videoInfo.isValid != newInfo.isValid) {
+                        obs_source_update_properties(_captureInfo->source);
+                    }
+
+                    _captureInfo->frameSize = frameSize;
+                    _videoInfo = newInfo;
+                }
+
+                if (previousSurface) {
+                    IOSurfaceDecrementUseCount(previousSurface);
+                    CFRelease(previousSurface);
+                }
+
+                break;
+            } else {
+                OBSAVCaptureVideoFrame *frame = _captureInfo->videoFrame;
+
+                frame->timestamp = presentationNanoTimeStamp.value;
+
+                enum video_format videoFormat = [OBSAVCapture formatFromSubtype:mediaSubType];
+
+                if (videoFormat == VIDEO_FORMAT_NONE) {
+                    _captureInfo->lastError = OBSAVCaptureError_SampleBufferFormat;
+                    CMFormatDescriptionCreate(kCFAllocatorDefault, mediaType, mediaSubType, NULL,
+                                              &_captureInfo->sampleBufferDescription);
+                } else {
+                    _captureInfo->lastError = OBSAVCaptureError_NoError;
+                    _captureInfo->sampleBufferDescription = NULL;
+#ifdef DEBUG
+                    if (frame->format != VIDEO_FORMAT_NONE && frame->format != videoFormat) {
+                        [self AVCaptureLog:LOG_DEBUG
+                                withFormat:@"Switching fourcc: '%@' (0x%x) -> '%@' (0x%x)",
+                                           [OBSAVCapture stringFromFourCharCode:frame->format], frame -> format,
+                                           [OBSAVCapture stringFromFourCharCode:mediaSubType], mediaSubType];
+                    }
+#endif
+                    bool isFrameYuv = format_is_yuv(frame->format);
+                    bool isSampleBufferYuv = format_is_yuv(videoFormat);
+
+                    frame->format = videoFormat;
+                    frame->width = sampleBufferDimensions.width;
+                    frame->height = sampleBufferDimensions.height;
+
+                    BOOL isSampleBufferFullRange = [OBSAVCapture isFullRangeFormat:mediaSubType];
+
+                    if (isSampleBufferYuv) {
+                        OBSAVCaptureColorSpace sampleBufferColorSpace =
+                            [OBSAVCapture colorspaceFromDescription:description];
+                        OBSAVCaptureVideoRange sampleBufferRangeType = isSampleBufferFullRange ? VIDEO_RANGE_FULL
+                                                                                               : VIDEO_RANGE_PARTIAL;
+
+                        BOOL isColorSpaceMatching = NO;
+
+                        SInt64 configuredColorSpace = obs_data_get_int(_captureInfo->settings, "color_space");
+
+                        if (usePreset) {
+                            isColorSpaceMatching = sampleBufferColorSpace == _videoInfo.colorSpace;
+                        } else {
+                            isColorSpaceMatching = configuredColorSpace == _videoInfo.colorSpace;
+                        }
+
+                        BOOL isVideoRangeMatching = NO;
+                        SInt64 configuredVideoRangeType = obs_data_get_int(_captureInfo->settings, "video_range");
+
+                        if (usePreset) {
+                            isVideoRangeMatching = sampleBufferRangeType == _videoInfo.videoRange;
+                        } else {
+                            isVideoRangeMatching = configuredVideoRangeType == _videoInfo.videoRange;
+                            isSampleBufferFullRange = configuredVideoRangeType == VIDEO_RANGE_FULL;
+                        }
+
+                        if (isColorSpaceMatching && isVideoRangeMatching) {
+                            newInfo.isValid = true;
+                        } else {
+                            frame->full_range = isSampleBufferFullRange;
+
+                            bool success = video_format_get_parameters_for_format(
+                                sampleBufferColorSpace, sampleBufferRangeType, frame->format, frame->color_matrix,
+                                frame->color_range_min, frame->color_range_max);
+
+                            if (!success) {
+                                _captureInfo->lastError = OBSAVCaptureError_ColorSpace;
+                                CMFormatDescriptionCreate(kCFAllocatorDefault, mediaType, mediaSubType, NULL,
+                                                          &_captureInfo->sampleBufferDescription);
+                                newInfo.isValid = false;
+                            } else {
+                                newInfo.colorSpace = sampleBufferColorSpace;
+                                newInfo.videoRange = sampleBufferRangeType;
+                                newInfo.isValid = true;
+                            }
+                        }
+                    } else if (!isFrameYuv && !isSampleBufferYuv) {
+                        newInfo.isValid = true;
+                    }
+                }
+
+                if (newInfo.isValid != _videoInfo.isValid) {
+                    obs_source_update_properties(_captureInfo->source);
+                }
+
+                _videoInfo = newInfo;
+
+                if (newInfo.isValid) {
+                    CVPixelBufferLockBaseAddress(imageBuffer, kCVPixelBufferLock_ReadOnly);
+
+                    if (!CVPixelBufferIsPlanar(imageBuffer)) {
+                        frame->linesize[0] = (UInt32) CVPixelBufferGetBytesPerRow(imageBuffer);
+                        frame->data[0] = CVPixelBufferGetBaseAddress(imageBuffer);
+                    } else {
+                        size_t planeCount = CVPixelBufferGetPlaneCount(imageBuffer);
+
+                        for (size_t i = 0; i < planeCount; i++) {
+                            frame->linesize[i] = (UInt32) CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, i);
+                            frame->data[i] = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, i);
+                        }
+                    }
+
+                    obs_source_output_video(_captureInfo->source, frame);
+                    CVPixelBufferUnlockBaseAddress(imageBuffer, kCVPixelBufferLock_ReadOnly);
+                } else {
+                    obs_source_output_video(_captureInfo->source, NULL);
+                }
+
+                break;
+            }
+        }
+        case kCMMediaType_Audio: {
+            size_t requiredBufferListSize;
+            OSStatus status = noErr;
+
+            status = CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
+                sampleBuffer, &requiredBufferListSize, NULL, 0, NULL, NULL,
+                kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment, NULL);
+
+            if (status != noErr) {
+                _captureInfo->lastAudioError = OBSAVCaptureError_AudioBuffer;
+                obs_source_update_properties(_captureInfo->source);
+                break;
+            }
+
+            AudioBufferList *bufferList = (AudioBufferList *) malloc(requiredBufferListSize);
+            CMBlockBufferRef blockBuffer = NULL;
+
+            OSStatus error = noErr;
+            error = CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
+                sampleBuffer, NULL, bufferList, requiredBufferListSize, kCFAllocatorSystemDefault,
+                kCFAllocatorSystemDefault, kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment, &blockBuffer);
+
+            if (error == noErr) {
+                _captureInfo->lastAudioError = OBSAVCaptureError_NoError;
+
+                OBSAVCaptureAudioFrame *audio = _captureInfo->audioFrame;
+
+                for (size_t i = 0; i < bufferList->mNumberBuffers; i++) {
+                    audio->data[i] = bufferList->mBuffers[i].mData;
+                }
+
+                audio->timestamp = presentationNanoTimeStamp.value;
+                audio->frames = (uint32_t) CMSampleBufferGetNumSamples(sampleBuffer);
+
+                const AudioStreamBasicDescription *basicDescription =
+                    CMAudioFormatDescriptionGetStreamBasicDescription(description);
+
+                audio->samples_per_sec = (uint32_t) basicDescription->mSampleRate;
+                audio->speakers = (enum speaker_layout) basicDescription->mChannelsPerFrame;
+
+                switch (basicDescription->mBitsPerChannel) {
+                    case 8:
+                        audio->format = AUDIO_FORMAT_U8BIT;
+                        break;
+                    case 16:
+                        audio->format = AUDIO_FORMAT_16BIT;
+                        break;
+                    case 32:
+                        audio->format = AUDIO_FORMAT_32BIT;
+                        break;
+                    default:
+                        audio->format = AUDIO_FORMAT_UNKNOWN;
+                        break;
+                }
+
+                obs_source_output_audio(_captureInfo->source, audio);
+            } else {
+                _captureInfo->lastAudioError = OBSAVCaptureError_AudioBuffer;
+                obs_source_output_audio(_captureInfo->source, NULL);
+            }
+
+            if (blockBuffer != NULL) {
+                CFRelease(blockBuffer);
+            }
+
+            if (bufferList != NULL) {
+                free(bufferList);
+                bufferList = NULL;
+            }
+
+            break;
+        }
+        default:
+            break;
+    }
+}
+
+#pragma mark - Log Helpers
+
+- (void)AVCaptureLog:(int)logLevel withFormat:(NSString *)format, ...
+{
+    va_list args;
+    va_start(args, format);
+
+    NSString *logMessage = [[NSString alloc] initWithFormat:format arguments:args];
+    va_end(args);
+
+    const char *name_value = obs_source_get_name(self.captureInfo->source);
+    NSString *sourceName = @((name_value) ? name_value : "");
+
+    blog(logLevel, "%s: %s", sourceName.UTF8String, logMessage.UTF8String);
+}
+
++ (void)AVCaptureLog:(int)logLevel withFormat:(NSString *)format, ...
+{
+    va_list args;
+    va_start(args, format);
+
+    NSString *logMessage = [[NSString alloc] initWithFormat:format arguments:args];
+    va_end(args);
+
+    blog(logLevel, "%s", logMessage.UTF8String);
+}
+
+@end

+ 24 - 0
plugins/mac-avcapture/OBSAVCapturePresetInfo.h

@@ -0,0 +1,24 @@
+//
+//  OBSAVCapturePresetInfo.h
+//  mac-avcapture
+//
+//  Created by Patrick Heyer on 2023-03-07.
+//
+
+@import Foundation;
+@import AVFoundation;
+
+/// Stores format and framerate of a [AVCaptureSessionPreset](https://developer.apple.com/documentation/avfoundation/avcapturesessionpreset?language=objc).
+///
+/// Changing the [activeFormat](https://developer.apple.com/documentation/avfoundation/avcapturedevice/1389221-activeformat?language=objc) of a device takes precedence over the configuration contained in a [AVCaptureSessionPreset](https://developer.apple.com/documentation/avfoundation/avcapturesessionpreset?language=objc). To restore a preset's configuration after changing to a different format, the values of a configured preset are stored in this object and restored when the source is switched back to a preset-based configuration.
+@interface OBSAVCapturePresetInfo : NSObject
+
+/// [activeFormat](https://developer.apple.com/documentation/avfoundation/avcapturedevice/1389221-activeformat?language=objc) used by the preset
+@property (nonatomic) AVCaptureDeviceFormat *activeFormat;
+
+/// Minimum framerate supported by the preset
+@property (nonatomic) CMTime minFrameRate;
+
+/// Maximum framerate supported by the preset
+@property (nonatomic) CMTime maxFrameRate;
+@end

+ 25 - 0
plugins/mac-avcapture/OBSAVCapturePresetInfo.m

@@ -0,0 +1,25 @@
+//
+//  OBSAVCapturePresetInfo.m
+//  mac-avcapture
+//
+//  Created by Patrick Heyer on 2023-03-07.
+//
+
+#import <OBSAVCapturePresetInfo.h>
+
+@implementation OBSAVCapturePresetInfo
+
+- (instancetype)init
+{
+    self = [super init];
+
+    if (self) {
+        _activeFormat = nil;
+        _minFrameRate = CMTimeMake(10000, 300);
+        _maxFrameRate = CMTimeMake(10000, 300);
+    }
+
+    return self;
+}
+
+@end

+ 2 - 0
plugins/mac-avcapture/data/locale/en-US.ini

@@ -1,4 +1,6 @@
 AVCapture="Video Capture Device"
+AVCapture_Fast="Capture Card Device"
+AVCapture_Legacy="Video Capture Device (Legacy)"
 Device="Device"
 UsePreset="Use Preset"
 Preset="Preset"

+ 28 - 0
plugins/mac-avcapture/legacy/CMakeLists.txt

@@ -0,0 +1,28 @@
+cmake_minimum_required(VERSION 3.24...3.25)
+
+add_library(mac-avcapture MODULE)
+add_library(OBS::avcapture ALIAS mac-avcapture)
+
+target_sources(mac-avcapture PRIVATE av-capture.mm left-right.hpp scope-guard.hpp)
+
+if(NOT XCODE)
+  set_source_files_properties(av-capture.mm PROPERTIES COMPILE_FLAGS -fobjc-arc)
+endif()
+
+target_link_libraries(
+  mac-avcapture
+  PRIVATE OBS::libobs
+          "$<LINK_LIBRARY:FRAMEWORK,AVFoundation.framework>"
+          "$<LINK_LIBRARY:FRAMEWORK,Cocoa.framework>"
+          "$<LINK_LIBRARY:FRAMEWORK,CoreFoundation.framework>"
+          "$<LINK_LIBRARY:FRAMEWORK,CoreMedia.framework>"
+          "$<LINK_LIBRARY:FRAMEWORK,CoreVideo.framework>"
+          "$<LINK_LIBRARY:FRAMEWORK,CoreMediaIO.framework>")
+
+set_target_properties_obs(
+  mac-avcapture
+  PROPERTIES FOLDER plugins
+             PREFIX ""
+             XCODE_ATTRIBUTE_CLANG_ENABLE_OBJC_ARC YES
+             XCODE_ATTRIBUTE_CLANG_WARN_SUSPICIOUS_IMPLICIT_CONVERSION YES
+             XCODE_ATTRIBUTE_GCC_WARN_SHADOW YES)

+ 24 - 22
plugins/mac-avcapture/av-capture.mm → plugins/mac-avcapture/legacy/av-capture.mm

@@ -43,7 +43,7 @@ namespace std {
 
 }  // namespace std
 
-#define TEXT_AVCAPTURE     obs_module_text("AVCapture")
+#define TEXT_AVCAPTURE     obs_module_text("AVCapture_Legacy")
 #define TEXT_DEVICE        obs_module_text("Device")
 #define TEXT_USE_PRESET    obs_module_text("UsePreset")
 #define TEXT_PRESET        obs_module_text("Preset")
@@ -79,7 +79,7 @@ struct av_capture;
 
 #define AVLOG(level, format, ...) blog(level, "%s: " format, obs_source_get_name(capture->source), ##__VA_ARGS__)
 
-@interface OBSAVCaptureDelegate
+@interface OBSLegacyAVCaptureDelegate
     : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate> {
       @public
     struct av_capture *capture;
@@ -113,7 +113,7 @@ namespace {
 }  // namespace
 
 struct av_capture {
-    OBSAVCaptureDelegate *delegate;
+    OBSLegacyAVCaptureDelegate *delegate;
     dispatch_queue_t queue;
     dispatch_queue_t audioQueue;
     bool has_clock;
@@ -198,7 +198,7 @@ static bool get_input_format(obs_data_t *settings, FourCharCode &fourcc)
     if (!item)
         return false;
 
-    fourcc = (FourCharCode) obs_data_item_get_int(item.get());
+    fourcc = static_cast<FourCharCode>(obs_data_item_get_int(item.get()));
     return true;
 }
 
@@ -564,14 +564,14 @@ static inline bool update_frame(av_capture *capture, obs_source_frame *frame, CM
     CVPixelBufferLockBaseAddress(img, kCVPixelBufferLock_ReadOnly);
 
     if (!CVPixelBufferIsPlanar(img)) {
-        frame->linesize[0] = (uint32_t) CVPixelBufferGetBytesPerRow(img);
+        frame->linesize[0] = static_cast<uint32_t>(CVPixelBufferGetBytesPerRow(img));
         frame->data[0] = static_cast<uint8_t *>(CVPixelBufferGetBaseAddress(img));
         return true;
     }
 
     size_t count = CVPixelBufferGetPlaneCount(img);
     for (size_t i = 0; i < count; i++) {
-        frame->linesize[i] = (uint32_t) CVPixelBufferGetBytesPerRowOfPlane(img, i);
+        frame->linesize[i] = static_cast<uint32_t>(CVPixelBufferGetBytesPerRowOfPlane(img, i));
         frame->data[i] = static_cast<uint8_t *>(CVPixelBufferGetBaseAddressOfPlane(img, i));
     }
     return true;
@@ -589,7 +589,7 @@ static inline bool update_audio(obs_source_audio *audio, CMSampleBufferRef sampl
         return false;
     }
 
-    AudioBufferList *list = (AudioBufferList *) bmalloc(requiredSize);
+    AudioBufferList *list = static_cast<AudioBufferList *>(bmalloc(requiredSize));
     CMBlockBufferRef buffer;
 
     status = CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(
@@ -605,13 +605,13 @@ static inline bool update_audio(obs_source_audio *audio, CMSampleBufferRef sampl
     }
 
     for (size_t i = 0; i < list->mNumberBuffers; i++)
-        audio->data[i] = (uint8_t *) list->mBuffers[i].mData;
+        audio->data[i] = static_cast<uint8_t *>(list->mBuffers[i].mData);
 
-    audio->frames = (uint32_t) CMSampleBufferGetNumSamples(sample_buffer);
+    audio->frames = static_cast<uint32_t>(CMSampleBufferGetNumSamples(sample_buffer));
     CMFormatDescriptionRef desc = CMSampleBufferGetFormatDescription(sample_buffer);
     const AudioStreamBasicDescription *asbd = CMAudioFormatDescriptionGetStreamBasicDescription(desc);
-    audio->samples_per_sec = (uint32_t) asbd->mSampleRate;
-    audio->speakers = (enum speaker_layout) asbd->mChannelsPerFrame;
+    audio->samples_per_sec = static_cast<uint32_t>(asbd->mSampleRate);
+    audio->speakers = static_cast<enum speaker_layout>(asbd->mChannelsPerFrame);
     switch (asbd->mBitsPerChannel) {
         case 8:
             audio->format = AUDIO_FORMAT_U8BIT;
@@ -634,7 +634,7 @@ static inline bool update_audio(obs_source_audio *audio, CMSampleBufferRef sampl
     return true;
 }
 
-@implementation OBSAVCaptureDelegate
+@implementation OBSLegacyAVCaptureDelegate
 
 - (void)captureOutput:(AVCaptureOutput *)out
     didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
@@ -754,7 +754,7 @@ static bool init_session(av_capture *capture)
         return false;
     }
 
-    auto delegate = [[OBSAVCaptureDelegate alloc] init];
+    auto delegate = [[OBSLegacyAVCaptureDelegate alloc] init];
     if (!delegate) {
         AVLOG(LOG_ERROR, "Could not create OBSAVCaptureDelegate");
         return false;
@@ -826,7 +826,7 @@ static bool init_device_input(av_capture *capture, AVCaptureDevice *dev)
 
 static uint32_t uint_from_dict(NSDictionary *dict, CFStringRef key)
 {
-    return ((NSNumber *) dict[(__bridge NSString *) key]).unsignedIntValue;
+    return static_cast<NSNumber *>(dict[(__bridge NSString *) key]).unsignedIntValue;
 }
 
 static bool init_format(av_capture *capture, AVCaptureDevice *dev)
@@ -1010,7 +1010,7 @@ static bool init_manual(av_capture *capture, AVCaptureDevice *dev, obs_data_t *s
     clear_capture(capture);
 
     auto input_format = obs_data_get_int(settings, "input_format");
-    FourCharCode actual_format = (FourCharCode) input_format;
+    FourCharCode actual_format = static_cast<FourCharCode>(input_format);
 
     SCOPE_EXIT
     {
@@ -1027,13 +1027,13 @@ static bool init_manual(av_capture *capture, AVCaptureDevice *dev, obs_data_t *s
             obs_source_update_properties(capture->source);
     };
 
-    capture->requested_colorspace = (int) obs_data_get_int(settings, "color_space");
+    capture->requested_colorspace = static_cast<int>(obs_data_get_int(settings, "color_space"));
     if (!color_space_valid(capture->requested_colorspace)) {
         AVLOG(LOG_WARNING, "Unsupported color space: %d", capture->requested_colorspace);
         return false;
     }
 
-    capture->requested_video_range = (int) obs_data_get_int(settings, "video_range");
+    capture->requested_video_range = static_cast<int>(obs_data_get_int(settings, "video_range"));
     if (!video_range_valid(capture->requested_video_range)) {
         AVLOG(LOG_WARNING, "Unsupported color range: %d", capture->requested_video_range);
         return false;
@@ -1074,7 +1074,8 @@ static bool init_manual(av_capture *capture, AVCaptureDevice *dev, obs_data_t *s
     if (!lock_device(capture, dev))
         return false;
 
-    const char *if_name = input_format == INPUT_FORMAT_AUTO ? "Auto" : fourcc_subtype_name((FourCharCode) input_format);
+    const char *if_name =
+        input_format == INPUT_FORMAT_AUTO ? "Auto" : fourcc_subtype_name(static_cast<FourCharCode>(input_format));
 
 #define IF_AUTO(x) (input_format != INPUT_FORMAT_AUTO ? "" : x)
     AVLOG(LOG_INFO,
@@ -1702,7 +1703,7 @@ static bool input_format_property_needs_update(obs_property_t *p, const vector<A
 
     auto num = obs_property_list_item_count(p);
     for (size_t i = 1; i < num; i++) {  // skip auto entry
-        FourCharCode fourcc = (FourCharCode) obs_property_list_item_int(p, i);
+        FourCharCode fourcc = static_cast<FourCharCode>(obs_property_list_item_int(p, i));
         fourcc_found = fourcc_found || fourcc == *fourcc_;
 
         auto pos = find_if(begin(formats), end(formats), [&](AVCaptureDeviceFormat *format) {
@@ -1803,7 +1804,7 @@ static bool update_int_list_property(const char *prop_name, const char *localiza
     if (!p)
         p = obs_properties_get(props, prop_name);
 
-    int val = (int) obs_data_get_int(conf.settings, prop_name);
+    int val = static_cast<int>(obs_data_get_int(conf.settings, prop_name));
 
     av_video_info vi;
     if (ref)
@@ -2181,7 +2182,7 @@ bool obs_module_load(void)
         .id = "av_capture_input",
         .type = OBS_SOURCE_TYPE_INPUT,
         .output_flags = OBS_SOURCE_ASYNC_VIDEO | OBS_SOURCE_AUDIO | OBS_SOURCE_DO_NOT_DUPLICATE |
-                        OBS_SOURCE_CAP_OBSOLETE,
+                        OBS_SOURCE_CAP_OBSOLETE | OBS_SOURCE_DEPRECATED,
         .get_name = av_capture_getname,
         .create = av_capture_create,
         .destroy = av_capture_destroy,
@@ -2193,7 +2194,8 @@ bool obs_module_load(void)
     obs_register_source(&av_capture_info);
 
     av_capture_info.version = 2;
-    av_capture_info.output_flags = OBS_SOURCE_ASYNC_VIDEO | OBS_SOURCE_AUDIO | OBS_SOURCE_DO_NOT_DUPLICATE;
+    av_capture_info.output_flags = OBS_SOURCE_ASYNC_VIDEO | OBS_SOURCE_AUDIO | OBS_SOURCE_DO_NOT_DUPLICATE |
+                                   OBS_SOURCE_DEPRECATED;
     av_capture_info.get_defaults = av_capture_defaults_v2;
 
     obs_register_source(&av_capture_info);

+ 0 - 0
plugins/mac-avcapture/left-right.hpp → plugins/mac-avcapture/legacy/left-right.hpp


+ 0 - 0
plugins/mac-avcapture/scope-guard.hpp → plugins/mac-avcapture/legacy/scope-guard.hpp


+ 119 - 0
plugins/mac-avcapture/plugin-main.h

@@ -0,0 +1,119 @@
+//
+//  plugin-main.h
+//  mac-avcapture
+//
+//  Created by Patrick Heyer on 2023-03-07.
+//
+
+#import <obs-module.h>
+#import <pthread.h>
+#import "OBSAVCapture.h"
+#import "plugin-properties.h"
+
+/// Get  localized text for module string identifier
+/// - Parameters:
+///   - text_id: Localization string identifier
+/// - Returns: Pointer to localized text variant
+extern const char *av_capture_get_text(const char *text_id);
+
+/// Create a macOS ``OBSAVCapture`` source  using asynchronous video frames
+/// - Parameters:
+///   - settings: Pointer to ``libobs`` data struct with possible user settings read from configuration file
+///   - source: Pointer to ``libobs`` source struct
+/// - Returns: Pointer to created ``OBSAVCaptureInfo`` struct
+static void *av_capture_create(obs_data_t *settings, obs_source_t *source);
+
+/// Create a macOS ``OBSAVCapture`` source   rendering video frames directly.
+/// - Parameters:
+///   - settings: Pointer to ``libobs`` data struct with possible user settings read from configuration file
+///   - source: Pointer to ``libobs`` source struct
+/// - Returns: Pointer to created ``OBSAVCaptureInfo`` struct
+static void *av_fast_capture_create(obs_data_t *settings, obs_source_t *source);
+
+/// Get localized ``OBSAVCapture`` source  name.
+///
+/// The value returned by this function will be used to identify the ``OBSAVCapture`` source throughout the OBS Studio user interface.
+///
+/// - Parameters:
+///   - data: Pointer to ``OBSAVCaptureInfo`` struct as returned by ``av_capture_create``
+/// - Returns: Pointer to localized source type name
+static const char *av_capture_get_name(void *capture_info);
+
+/// Get localized source name of the fast capture variant of the ``OBSAVCapture`` source.
+///
+/// The value returned by this function will be used to identify the ``OBSAVCapture`` source throughout the OBS Studio user interface.
+///
+/// - Parameters:
+///   - data: Pointer to ``OBSAVCaptureInfo`` struct as returned by ``av_capture_create``
+/// - Returns: Pointer to localized source type name
+static const char *av_fast_capture_get_name(void *capture_info);
+
+/// Set default values used by the ``OBSAVCapture`` source
+///
+/// While this function sets default values for specific properties in the user settings, the function is also called to _get_ defaults by ``libobs``
+///
+/// - Parameters:
+///   - settings: Pointer to obs settings struct
+static void av_capture_set_defaults(obs_data_t *settings);
+
+/// Set default values used by the fast capture variant of the ``OBSAVCapture`` source
+///
+/// While this function sets default values for specific properties in the user settings, the function is also called to _get_ defaults by ``libobs``
+///
+/// - Parameters:
+///   - settings: Pointer to obs settings struct
+static void av_fast_capture_set_defaults(obs_data_t *settings);
+
+/// Creates a new properties struct with all the properties used by the ``OBSAVCapture`` source.
+///
+/// This function is commonly used to set up all the properties that a source type has and can be used by the internal API to discover which properties exist, but is also commonly used to set up the properties used for the OBS Studio user interface.
+///
+/// If the source type was created successfully, the associated ``OBSAVCaptureInfo`` struct is passed to this function as well, which allows setting up property visibility and availability depending on the source state.
+///
+/// - Parameters:
+///   - capture_info: Pointer to ``OBSAVCaptureInfo`` struct
+/// - Returns: Pointer to created OBS properties struct
+static obs_properties_t *av_capture_properties(void *capture_info);
+
+/// Update ``OBSAVCapture`` source
+/// - Parameters:
+///   - capture_info: Pointer to ``OBSAVCaptureInfo`` struct
+///   - settings: Pointer to settings struct
+static void av_capture_update(void *capture_info, obs_data_t *settings);
+
+/// Handle ``tick`` of ``libobs`` compositing engine.
+///
+/// ``libobs`` sends a tick at an internal refresh rate to allow sources to prepare data for output. This function works in conjunction with the ``render`` which function which is called at a later point at the frame rate specified in the `Output` configuration of OBS Studio.
+///
+/// ``OBSAVCapture`` keeps a reference to the ``IOSurface`` of the last two frames provided by ``CoreMediaIO`` and converts a valid ``IOSurface`` into a ``libobs``-recognized texture in this function.
+///
+/// - Parameters:
+///   - capture_info: Pointer to ``OBSAVCaptureInfo`` struct
+///   - seconds: Delta since the last call to this function
+static void av_fast_capture_tick(void *capture_info, float seconds);
+
+/// Handle ``render`` of ``libobs`` compositing engine.
+///
+/// ``libobs`` sends a render call at the frame rate specified in the `Output` configuration of OBS Studio, which requires the source to do the actual rendering work using the ``libobs`` graphics engine.Draw
+///
+/// - Parameters:
+///   - capture_info: Pointer to ``OBSAVCaptureInfo`` struct
+///   - effect: Draw function used by ``libobs``
+static void av_fast_capture_render(void *capture_info, gs_effect_t *effect);
+
+/// Get width of texture currently managed by the ``OBSAVCapture`` source
+/// - Parameters:
+///   - capture_info: Pointer to ``OBSAVCaptureInfo`` struct
+static UInt32 av_fast_capture_get_width(void *capture_info);
+
+/// Get height of texture currently managed by the ``OBSAVCapture`` source
+/// - Parameters:
+///   - capture_info: Pointer to ``OBSAVCaptureInfo`` struct
+static UInt32 av_fast_capture_get_height(void *capture_info);
+
+/// Tear down ``OBSAVCapture`` source.
+///
+/// This function implements all the necessary cleanup functions to ensure a clean exit of the program. Any resources or references held by the source need to be deleted or destroyed to avoid memory leaks. Any shutdown or cleanup functions required by resources used by the source also need to be called to ensure a clean shutdown.
+/// - Parameters:
+///   - capture_info: Pointer to ``OBSAVCaptureInfo`` struct
+static void av_capture_destroy(void *capture_info);

+ 307 - 0
plugins/mac-avcapture/plugin-main.m

@@ -0,0 +1,307 @@
+//
+//  plugin-main.m
+//  mac-avcapture
+//
+//  Created by Patrick Heyer on 2023-03-07.
+//
+
+#import "plugin-main.h"
+
+#pragma mark av-capture API
+
+const char *av_capture_get_text(const char *text_id)
+{
+    return obs_module_text(text_id);
+}
+
+static void *av_capture_create(obs_data_t *settings, obs_source_t *source)
+{
+    OBSAVCaptureInfo *capture_data = bmalloc(sizeof(OBSAVCaptureInfo));
+    capture_data->isFastPath = false;
+    capture_data->settings = settings;
+    capture_data->source = source;
+    capture_data->videoFrame = bmalloc(sizeof(OBSAVCaptureVideoFrame));
+    capture_data->audioFrame = bmalloc(sizeof(OBSAVCaptureAudioFrame));
+
+    OBSAVCapture *capture = [[OBSAVCapture alloc] initWithCaptureInfo:capture_data];
+
+    capture_data->capture = capture;
+
+    return capture_data;
+}
+
+static void *av_fast_capture_create(obs_data_t *settings, obs_source_t *source)
+{
+    OBSAVCaptureInfo *capture_info = bmalloc(sizeof(OBSAVCaptureInfo));
+    capture_info->isFastPath = true;
+    capture_info->settings = settings;
+    capture_info->source = source;
+    capture_info->effect = obs_get_base_effect(OBS_EFFECT_DEFAULT_RECT);
+    capture_info->frameSize = CGRectZero;
+
+    if (!capture_info->effect) {
+        return NULL;
+    }
+
+    pthread_mutex_init(&capture_info->mutex, NULL);
+
+    OBSAVCapture *capture = [[OBSAVCapture alloc] initWithCaptureInfo:capture_info];
+
+    capture_info->capture = capture;
+
+    return capture_info;
+}
+
+static const char *av_capture_get_name(void *capture_info_aliased __unused)
+{
+    return obs_module_text("AVCapture");
+}
+
+static const char *av_fast_capture_get_name(void *capture_info_aliased __unused)
+{
+    return obs_module_text("AVCapture_Fast");
+}
+
+static void av_capture_set_defaults(obs_data_t *settings)
+{
+    obs_data_set_default_string(settings, "device", "");
+    obs_data_set_default_bool(settings, "use_preset", true);
+
+    obs_data_set_default_string(settings, "preset", AVCaptureSessionPresetHigh.UTF8String);
+
+    obs_data_set_default_bool(settings, "enable_audio", true);
+}
+
+static void av_fast_capture_set_defaults(obs_data_t *settings)
+{
+    obs_data_set_default_string(settings, "device", "");
+    obs_data_set_default_bool(settings, "use_preset", false);
+    obs_data_set_default_bool(settings, "enable_audio", true);
+}
+
+static obs_properties_t *av_capture_properties(void *capture_info_aliased)
+{
+    OBSAVCaptureInfo *capture_info = capture_info_aliased;
+
+    obs_properties_t *properties = obs_properties_create();
+
+    // Create Properties
+    obs_property_t *device_list = obs_properties_add_list(properties, "device", obs_module_text("Device"),
+                                                          OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING);
+    obs_property_t *use_preset = obs_properties_add_bool(properties, "use_preset", obs_module_text("UsePreset"));
+    obs_property_t *preset_list = obs_properties_add_list(properties, "preset", obs_module_text("Preset"),
+                                                          OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING);
+    obs_property_t *resolutions = obs_properties_add_list(properties, "resolution", obs_module_text("Resolution"),
+                                                          OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_STRING);
+    obs_property_t *use_buffering = obs_properties_add_bool(properties, "buffering", obs_module_text("Buffering"));
+    obs_property_t *frame_rates = obs_properties_add_frame_rate(properties, "frame_rate", obs_module_text("FrameRate"));
+    obs_property_t *input_format = obs_properties_add_list(properties, "input_format", obs_module_text("InputFormat"),
+                                                           OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT);
+    obs_property_t *color_space = obs_properties_add_list(properties, "color_space", obs_module_text("ColorSpace"),
+                                                          OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT);
+    obs_property_t *video_range = obs_properties_add_list(properties, "video_range", obs_module_text("VideoRange"),
+                                                          OBS_COMBO_TYPE_LIST, OBS_COMBO_FORMAT_INT);
+
+    if (capture_info) {
+        bool isFastPath = capture_info->isFastPath;
+
+        // Add Property Visibility and Callbacks
+        configure_property(device_list, true, true, properties_changed, capture_info);
+        configure_property(use_preset, !isFastPath, !isFastPath, (!isFastPath) ? properties_changed_use_preset : NULL,
+                           capture_info);
+        configure_property(preset_list, !isFastPath, !isFastPath, (!isFastPath) ? properties_changed_preset : NULL,
+                           capture_info);
+
+        configure_property(resolutions, isFastPath, isFastPath, NULL, NULL);
+        configure_property(use_buffering, !isFastPath, !isFastPath, NULL, NULL);
+        configure_property(frame_rates, isFastPath, isFastPath, NULL, NULL);
+        configure_property(color_space, !isFastPath, !isFastPath, NULL, NULL);
+        configure_property(video_range, !isFastPath, !isFastPath, NULL, NULL);
+        configure_property(input_format, !isFastPath, !isFastPath, NULL, NULL);
+    }
+
+    return properties;
+}
+
+static void av_capture_update(void *capture_info_aliased, obs_data_t *settings)
+{
+    OBSAVCaptureInfo *capture_info = capture_info_aliased;
+    OBSAVCapture *capture = capture_info->capture;
+    capture_info->settings = settings;
+
+    [capture updateSessionwithError:NULL];
+}
+
+static void av_fast_capture_tick(void *capture_info_aliased, float seconds __unused)
+{
+    OBSAVCaptureInfo *capture_info = capture_info_aliased;
+
+    if (!capture_info->currentSurface) {
+        return;
+    }
+
+    if (!obs_source_showing(capture_info->source)) {
+        return;
+    }
+
+    IOSurfaceRef previousSurface = capture_info->previousSurface;
+
+    if (pthread_mutex_lock(&capture_info->mutex)) {
+        return;
+    }
+
+    capture_info->previousSurface = capture_info->currentSurface;
+    capture_info->currentSurface = NULL;
+    pthread_mutex_unlock(&capture_info->mutex);
+
+    if (previousSurface == capture_info->previousSurface) {
+        return;
+    }
+
+    if (capture_info->previousSurface) {
+        obs_enter_graphics();
+        if (capture_info->texture) {
+            gs_texture_rebind_iosurface(capture_info->texture, capture_info->previousSurface);
+        } else {
+            capture_info->texture = gs_texture_create_from_iosurface(capture_info->previousSurface);
+        }
+        obs_leave_graphics();
+    }
+
+    if (previousSurface) {
+        IOSurfaceDecrementUseCount(previousSurface);
+        CFRelease(previousSurface);
+    }
+}
+
+static void av_fast_capture_render(void *capture_info_aliased, gs_effect_t *effect __unused)
+{
+    OBSAVCaptureInfo *capture_info = capture_info_aliased;
+
+    if (!capture_info->texture) {
+        return;
+    }
+
+    const bool linear_srgb = gs_get_linear_srgb();
+
+    const bool previous = gs_framebuffer_srgb_enabled();
+    gs_enable_framebuffer_srgb(linear_srgb);
+
+    gs_eparam_t *param = gs_effect_get_param_by_name(capture_info->effect, "image");
+    gs_effect_set_texture_srgb(param, capture_info->texture);
+
+    if (linear_srgb) {
+        gs_effect_set_texture_srgb(param, capture_info->texture);
+    } else {
+        gs_effect_set_texture(param, capture_info->texture);
+    }
+
+    while (gs_effect_loop(capture_info->effect, "Draw")) {
+        gs_draw_sprite(capture_info->texture, 0, 0, 0);
+    }
+
+    gs_enable_framebuffer_srgb(previous);
+}
+
+static UInt32 av_fast_capture_get_width(void *capture_info_aliased)
+{
+    OBSAVCaptureInfo *capture_info = capture_info_aliased;
+
+    CGSize frameSize = capture_info->frameSize.size;
+
+    return (UInt32) frameSize.width;
+}
+
+static UInt32 av_fast_capture_get_height(void *capture_info_aliased)
+{
+    OBSAVCaptureInfo *capture_info = capture_info_aliased;
+
+    CGSize frameSize = capture_info->frameSize.size;
+
+    return (UInt32) frameSize.height;
+}
+
+static void av_capture_destroy(void *capture_info_aliased)
+{
+    OBSAVCaptureInfo *capture_info = capture_info_aliased;
+
+    if (!capture_info) {
+        return;
+    }
+
+    OBSAVCapture *capture = capture_info->capture;
+    [capture stopCaptureSession];
+    [capture.deviceInput.device unlockForConfiguration];
+
+    if (capture_info->isFastPath) {
+        pthread_mutex_destroy(&capture_info->mutex);
+    }
+
+    if (capture_info->videoFrame) {
+        bfree(capture_info->videoFrame);
+        capture_info->videoFrame = NULL;
+    }
+
+    if (capture_info->audioFrame) {
+        bfree(capture_info->audioFrame);
+        capture_info->audioFrame = NULL;
+    }
+
+    if (capture_info->sampleBufferDescription) {
+        capture_info->sampleBufferDescription = NULL;
+    }
+
+    capture_info->capture = NULL;
+    bfree(capture_info);
+}
+
+#pragma mark - OBS Module API
+
+OBS_DECLARE_MODULE()
+OBS_MODULE_USE_DEFAULT_LOCALE("macOS-avcapture", "en-US")
+
+MODULE_EXPORT const char *obs_module_description(void)
+{
+    return "macOS AVFoundation Capture Source";
+}
+
+bool obs_module_load(void)
+{
+    struct obs_source_info av_capture_info = {
+        .id = "macos-avcapture",
+        .type = OBS_SOURCE_TYPE_INPUT,
+        .output_flags = OBS_SOURCE_ASYNC_VIDEO | OBS_SOURCE_AUDIO | OBS_SOURCE_DO_NOT_DUPLICATE,
+        .create = av_capture_create,
+        .get_name = av_capture_get_name,
+        .get_defaults = av_capture_set_defaults,
+        .get_properties = av_capture_properties,
+        .update = av_capture_update,
+        .destroy = av_capture_destroy,
+        .icon_type = OBS_ICON_TYPE_CAMERA,
+    };
+
+    obs_register_source(&av_capture_info);
+
+    struct obs_source_info av_capture_sync_info = {.id = "macos-avcapture-fast",
+                                                   .type = OBS_SOURCE_TYPE_INPUT,
+                                                   .output_flags = OBS_SOURCE_VIDEO | OBS_SOURCE_CUSTOM_DRAW |
+                                                                   OBS_SOURCE_AUDIO | OBS_SOURCE_SRGB |
+                                                                   OBS_SOURCE_DO_NOT_DUPLICATE,
+                                                   .create = av_fast_capture_create,
+                                                   .get_name = av_fast_capture_get_name,
+                                                   .get_defaults = av_fast_capture_set_defaults,
+                                                   .get_properties = av_capture_properties,
+                                                   .update = av_capture_update,
+                                                   .destroy = av_capture_destroy,
+                                                   .video_tick = av_fast_capture_tick,
+                                                   .video_render = av_fast_capture_render,
+                                                   .get_width = av_fast_capture_get_width,
+                                                   .get_height = av_fast_capture_get_height,
+                                                   .icon_type = OBS_ICON_TYPE_CAMERA
+
+    };
+
+    obs_register_source(&av_capture_sync_info);
+
+    return true;
+}

+ 84 - 0
plugins/mac-avcapture/plugin-properties.h

@@ -0,0 +1,84 @@
+//
+//  plugin-properties.h
+//  obs-studio
+//
+//  Created by Patrick Heyer on 2023-03-07.
+//
+
+@import Foundation;
+
+#import <obs-properties.h>
+
+/// Configures single source property's visibility, possible modification callback and callback payload data, as well as whether the property should be enabled
+/// - Parameters:
+///   - property: The source property to change
+///   - enable: Whether the source property should be enabled (user-changeable)
+///   - visible: Whether the source property should be visible
+///   - callback: Pointer to a function that will be called if this property has been modified or the properties are reloaded
+///   - callback_data: Optional payload data for the callback function
+void configure_property(obs_property_t *property, bool enable, bool visible, void *callback, void *callback_data);
+
+/// Generic callback handler for changed properties. Will update all properties of an OBSAVCapture source at once
+/// - Parameters:
+///   - captureInfo: Pointer to capture info struct associated with the source (``OBSAVcaptureInfo``)
+///   - properties: Pointer to properties struct associated with the source
+///   - property: Pointer to the property that the callback is attached to
+///   - settings: Pointer to settings associated with the source
+/// - Returns: Always returns true
+bool properties_changed(OBSAVCaptureInfo *captureInfo, obs_properties_t *properties, obs_property_t *property,
+                        obs_data_t *settings);
+
+/// Callback handler for preset changes.
+/// - Parameters:
+///   - captureInfo: Pointer to capture info struct associated with the source
+///   - properties: Pointer to properties struct associated with the source
+///   - property: Pointer to the property that the callback is attached to
+///   - settings: Pointer to settings associated with the source
+/// - Returns: Always returns true
+bool properties_changed_preset(OBSAVCaptureInfo *captureInfo, obs_properties_t *properties, obs_property_t *property,
+                               obs_data_t *settings);
+
+/// Callback handler for changing preset usage for an OBSAVCapture source. Switches between preset-based configuration and manual configuration
+/// - Parameters:
+///   - captureInfo: Pointer to capture info struct associated with the source
+///   - properties: Pointer to properties struct associated with the source
+///   - property: Pointer to the property that the callback is attached to
+///   - settings: Pointer to settings associated with the source
+/// - Returns: Always returns true
+bool properties_changed_use_preset(OBSAVCaptureInfo *captureInfo, obs_properties_t *properties,
+                                   obs_property_t *property, obs_data_t *settings);
+
+/// Updates preset property with description-value-pairs of presets supported by the currently selected device
+/// - Parameters:
+///   - captureInfo: Pointer to capture info struct associated with the source
+///   - property: Pointer to the property that the callback is attached to
+///   - settings: Pointer to settings associated with the source
+/// - Returns: Always returns true
+bool properties_update_preset(OBSAVCaptureInfo *captureInfo, obs_property_t *property, obs_data_t *settings);
+
+/// Updates device property with description-value-pairs of devices available via CoreMediaIO
+/// - Parameters:
+///   - captureInfo: Pointer to capture info struct associated with the source
+///   - property: Pointer to the property that the callback is attached to
+///   - settings: Pointer to settings associated with the source
+/// - Returns: Always returns true
+bool properties_update_device(OBSAVCaptureInfo *captureInfo, obs_property_t *property, obs_data_t *settings);
+
+/// Updates available values for all properties required in manual device configuration.
+///
+/// Properties updated by this call include:
+/// * Resolutions
+/// * Frame rates and frame rate ranges
+/// * Color formats
+/// * Color range
+///
+///  In CoreMediaIO color format, resolution and frame rate ranges are always coupled into a single format, while color range is always contained in the color format. The formats are thus compiled and de-duplicated to create a selection of all properties.
+///
+///  Frame rate ranges will be limited to ranges only available for a specific combination of resolution and color format.
+///
+/// - Parameters:
+///   - captureInfo: Pointer to capture info struct associated with the source
+///   - property: Pointer to the property that the callback is attached to
+///   - settings: Pointer to settings associated with the source
+/// - Returns: Always returns true
+bool properties_update_config(OBSAVCaptureInfo *captureInfo, obs_properties_t *properties, obs_data_t *settings);

+ 491 - 0
plugins/mac-avcapture/plugin-properties.m

@@ -0,0 +1,491 @@
+//
+//  plugin-properties.m
+//  mac-avcapture
+//
+//  Created by Patrick Heyer on 2023-03-07.
+//
+
+#import "OBSAVCapture.h"
+#import "plugin-properties.h"
+
+extern const char *av_capture_get_text(const char *text_id);
+
+void configure_property(obs_property_t *property, bool enable, bool visible, void *callback, void *callback_data)
+{
+    if (property) {
+        obs_property_set_enabled(property, enable);
+        obs_property_set_visible(property, visible);
+
+        if (callback) {
+            obs_property_set_modified_callback2(property, callback, callback_data);
+        }
+    }
+}
+
+bool properties_changed(OBSAVCaptureInfo *captureInfo, obs_properties_t *properties, obs_property_t *property __unused,
+                        obs_data_t *settings)
+{
+    obs_property_t *prop_use_preset = obs_properties_get(properties, "use_preset");
+    obs_property_t *prop_device = obs_properties_get(properties, "device");
+    obs_property_t *prop_presets = obs_properties_get(properties, "preset");
+
+    obs_property_set_enabled(prop_use_preset, !captureInfo->isFastPath);
+
+    if (captureInfo && captureInfo->capture && settings) {
+        properties_update_device(captureInfo, prop_device, settings);
+
+        bool use_preset = (settings ? obs_data_get_bool(settings, "use_preset") : true);
+
+        if (use_preset) {
+            properties_update_preset(captureInfo, prop_presets, settings);
+        } else {
+            properties_update_config(captureInfo, properties, settings);
+        }
+    }
+
+    return true;
+}
+
+bool properties_changed_preset(OBSAVCaptureInfo *captureInfo, obs_properties_t *properties __unused,
+                               obs_property_t *property, obs_data_t *settings)
+{
+    bool use_preset = obs_data_get_bool(settings, "use_preset");
+
+    if (captureInfo && captureInfo->capture && settings && use_preset) {
+        OBSAVCapture *capture = captureInfo->capture;
+
+        NSArray *presetKeys =
+            [capture.presetList keysSortedByValueUsingComparator:^NSComparisonResult(NSString *obj1, NSString *obj2) {
+                NSNumber *obj1Resolution;
+                NSNumber *obj2Resolution;
+                if ([obj1 isEqualToString:@"High"]) {
+                    obj1Resolution = @3;
+                } else if ([obj1 isEqualToString:@"Medium"]) {
+                    obj1Resolution = @2;
+                } else if ([obj1 isEqualToString:@"Low"]) {
+                    obj1Resolution = @1;
+                } else {
+                    NSArray<NSString *> *obj1Dimensions = [obj1 componentsSeparatedByString:@"x"];
+                    obj1Resolution = [NSNumber numberWithInt:([[obj1Dimensions objectAtIndex:0] intValue] *
+                                                              [[obj1Dimensions objectAtIndex:1] intValue])];
+                }
+
+                if ([obj2 isEqualToString:@"High"]) {
+                    obj2Resolution = @3;
+                } else if ([obj2 isEqualToString:@"Medium"]) {
+                    obj2Resolution = @2;
+                } else if ([obj2 isEqualToString:@"Low"]) {
+                    obj2Resolution = @1;
+                } else {
+                    NSArray<NSString *> *obj2Dimensions = [obj2 componentsSeparatedByString:@"x"];
+                    obj2Resolution = [NSNumber numberWithInt:([[obj2Dimensions objectAtIndex:0] intValue] *
+                                                              [[obj2Dimensions objectAtIndex:1] intValue])];
+                }
+
+                NSComparisonResult result = [obj1Resolution compare:obj2Resolution];
+
+                if (result == NSOrderedAscending) {
+                    return (NSComparisonResult) NSOrderedDescending;
+                } else if (result == NSOrderedDescending) {
+                    return (NSComparisonResult) NSOrderedAscending;
+                } else {
+                    return (NSComparisonResult) NSOrderedSame;
+                }
+            }];
+
+        NSString *UUID = [OBSAVCapture stringFromSettings:settings withSetting:@"device"];
+        AVCaptureDevice *device = [AVCaptureDevice deviceWithUniqueID:UUID];
+        NSString *currentPreset = [OBSAVCapture stringFromSettings:settings withSetting:@"preset"];
+
+        obs_property_list_clear(property);
+
+        if (device) {
+            for (NSString *presetName in presetKeys) {
+                NSString *presetDescription = capture.presetList[presetName];
+
+                if ([device supportsAVCaptureSessionPreset:presetName]) {
+                    obs_property_list_add_string(property, presetDescription.UTF8String, presetName.UTF8String);
+                } else if ([currentPreset isEqualToString:presetName]) {
+                    size_t index =
+                        obs_property_list_add_string(property, presetDescription.UTF8String, presetName.UTF8String);
+                    obs_property_list_item_disable(property, index, true);
+                }
+            };
+        } else if (UUID.length) {
+            size_t index = obs_property_list_add_string(property, capture.presetList[currentPreset].UTF8String,
+                                                        currentPreset.UTF8String);
+            obs_property_list_item_disable(property, index, true);
+        }
+
+        return YES;
+    } else {
+        return NO;
+    }
+}
+
+bool properties_changed_use_preset(OBSAVCaptureInfo *captureInfo, obs_properties_t *properties,
+                                   obs_property_t *property __unused, obs_data_t *settings)
+{
+    bool use_preset = obs_data_get_bool(settings, "use_preset");
+    obs_property_t *preset_list = obs_properties_get(properties, "preset");
+
+    obs_property_set_visible(preset_list, use_preset);
+
+    if (use_preset) {
+        properties_changed_preset(captureInfo, properties, preset_list, settings);
+    }
+
+    const char *update_properties[5] = {"resolution", "frame_rate", "color_space", "video_range", "input_format"};
+
+    size_t number_of_properties = sizeof(update_properties) / sizeof(update_properties[0]);
+
+    for (size_t i = 0; i < number_of_properties; i++) {
+        obs_property_t *update_property = obs_properties_get(properties, update_properties[i]);
+
+        if (update_property) {
+            obs_property_set_visible(update_property, !use_preset);
+            obs_property_set_enabled(update_property, !use_preset);
+        }
+    }
+
+    return true;
+}
+
+bool properties_update_preset(OBSAVCaptureInfo *captureInfo, obs_property_t *property, obs_data_t *settings)
+{
+    OBSAVCapture *captureInstance = captureInfo->capture;
+
+    NSArray *presetKeys = [captureInstance.presetList
+        keysSortedByValueUsingComparator:^NSComparisonResult(NSString *obj1, NSString *obj2) {
+            NSNumber *obj1Resolution;
+            NSNumber *obj2Resolution;
+            if ([obj1 isEqualToString:@"High"]) {
+                obj1Resolution = @3;
+            } else if ([obj1 isEqualToString:@"Medium"]) {
+                obj1Resolution = @2;
+            } else if ([obj1 isEqualToString:@"Low"]) {
+                obj1Resolution = @1;
+            } else {
+                NSArray<NSString *> *obj1Dimensions = [obj1 componentsSeparatedByString:@"x"];
+                obj1Resolution = [NSNumber numberWithInt:([[obj1Dimensions objectAtIndex:0] intValue] *
+                                                          [[obj1Dimensions objectAtIndex:1] intValue])];
+            }
+
+            if ([obj2 isEqualToString:@"High"]) {
+                obj2Resolution = @3;
+            } else if ([obj2 isEqualToString:@"Medium"]) {
+                obj2Resolution = @2;
+            } else if ([obj2 isEqualToString:@"Low"]) {
+                obj2Resolution = @1;
+            } else {
+                NSArray<NSString *> *obj2Dimensions = [obj2 componentsSeparatedByString:@"x"];
+                obj2Resolution = [NSNumber numberWithInt:([[obj2Dimensions objectAtIndex:0] intValue] *
+                                                          [[obj2Dimensions objectAtIndex:1] intValue])];
+            }
+
+            NSComparisonResult result = [obj1Resolution compare:obj2Resolution];
+
+            if (result == NSOrderedAscending) {
+                return (NSComparisonResult) NSOrderedDescending;
+            } else if (result == NSOrderedDescending) {
+                return (NSComparisonResult) NSOrderedAscending;
+            } else {
+                return (NSComparisonResult) NSOrderedSame;
+            }
+        }];
+
+    NSString *deviceUUID = [OBSAVCapture stringFromSettings:settings withSetting:@"device"];
+    AVCaptureDevice *device = [AVCaptureDevice deviceWithUniqueID:deviceUUID];
+    NSString *currentPreset = [OBSAVCapture stringFromSettings:settings withSetting:@"preset"];
+
+    obs_property_list_clear(property);
+
+    if (device) {
+        for (NSString *presetName in presetKeys) {
+            NSString *presetDescription = captureInstance.presetList[presetName];
+
+            if ([device supportsAVCaptureSessionPreset:presetName]) {
+                obs_property_list_add_string(property, presetDescription.UTF8String, presetName.UTF8String);
+            } else if ([currentPreset isEqualToString:presetName]) {
+                size_t index =
+                    obs_property_list_add_string(property, presetDescription.UTF8String, presetName.UTF8String);
+                obs_property_list_item_disable(property, index, true);
+            }
+        };
+    } else if (deviceUUID.length) {
+        size_t index = obs_property_list_add_string(property, captureInstance.presetList[currentPreset].UTF8String,
+                                                    currentPreset.UTF8String);
+        obs_property_list_item_disable(property, index, true);
+    }
+
+    return true;
+}
+
+bool properties_update_device(OBSAVCaptureInfo *captureInfo __unused, obs_property_t *property, obs_data_t *settings)
+{
+    obs_property_list_clear(property);
+
+    NSString *currentDeviceUUID = [OBSAVCapture stringFromSettings:settings withSetting:@"device"];
+    NSString *currentDeviceName = [OBSAVCapture stringFromSettings:settings withSetting:@"device_name"];
+    BOOL isDeviceFound = NO;
+
+    obs_property_list_add_string(property, "", "");
+
+    NSArray *deviceTypes;
+    if (@available(macOS 13, *)) {
+        deviceTypes = @[
+            AVCaptureDeviceTypeBuiltInWideAngleCamera, AVCaptureDeviceTypeExternalUnknown,
+            AVCaptureDeviceTypeDeskViewCamera
+        ];
+    } else {
+        deviceTypes = @[AVCaptureDeviceTypeBuiltInWideAngleCamera, AVCaptureDeviceTypeExternalUnknown];
+    }
+
+    AVCaptureDeviceDiscoverySession *videoDiscoverySession =
+        [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:deviceTypes mediaType:AVMediaTypeVideo
+                                                                position:AVCaptureDevicePositionUnspecified];
+    AVCaptureDeviceDiscoverySession *muxedDiscoverySession =
+        [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:deviceTypes mediaType:AVMediaTypeMuxed
+                                                                position:AVCaptureDevicePositionUnspecified];
+
+    for (AVCaptureDevice *device in [videoDiscoverySession devices]) {
+        obs_property_list_add_string(property, device.localizedName.UTF8String, device.uniqueID.UTF8String);
+
+        if (!isDeviceFound && [currentDeviceUUID isEqualToString:device.uniqueID]) {
+            isDeviceFound = YES;
+        }
+    }
+
+    for (AVCaptureDevice *device in [muxedDiscoverySession devices]) {
+        obs_property_list_add_string(property, device.localizedName.UTF8String, device.uniqueID.UTF8String);
+
+        if (!isDeviceFound && [currentDeviceUUID isEqualToString:device.uniqueID]) {
+            isDeviceFound = YES;
+        }
+    }
+
+    if (!isDeviceFound && currentDeviceUUID.length > 0) {
+        size_t index =
+            obs_property_list_add_string(property, currentDeviceName.UTF8String, currentDeviceUUID.UTF8String);
+        obs_property_list_item_disable(property, index, true);
+    }
+
+    return true;
+}
+
+bool properties_update_config(OBSAVCaptureInfo *capture, obs_properties_t *properties, obs_data_t *settings)
+{
+    OBSAVCapture *captureInstance = capture->capture;
+    AVCaptureDevice *device = [AVCaptureDevice deviceWithUniqueID:[OBSAVCapture stringFromSettings:settings
+                                                                                       withSetting:@"device"]];
+
+    obs_property_t *prop_resolution = obs_properties_get(properties, "resolution");
+    obs_property_t *prop_framerate = obs_properties_get(properties, "frame_rate");
+
+    obs_property_list_clear(prop_resolution);
+    obs_property_frame_rate_clear(prop_framerate);
+
+    obs_property_t *prop_input_format = NULL;
+    obs_property_t *prop_color_space = NULL;
+    obs_property_t *prop_video_range = NULL;
+
+    if (!captureInstance.isFastPath) {
+        prop_input_format = obs_properties_get(properties, "input_format");
+        prop_color_space = obs_properties_get(properties, "color_space");
+        prop_video_range = obs_properties_get(properties, "video_range");
+
+        obs_property_list_clear(prop_input_format);
+        obs_property_list_clear(prop_video_range);
+        obs_property_list_clear(prop_color_space);
+    }
+
+    CMVideoDimensions resolution = [OBSAVCapture dimensionsFromSettings:settings];
+
+    if (resolution.width == 0 || resolution.height == 0) {
+        [captureInstance AVCaptureLog:LOG_DEBUG withFormat:@"No valid resolution found in settings"];
+    }
+
+    struct media_frames_per_second fps;
+    if (!obs_data_get_frames_per_second(settings, "frame_rate", &fps, NULL)) {
+        [captureInstance AVCaptureLog:LOG_DEBUG withFormat:@"No valid framerate found in settings"];
+    }
+
+    CMTime time = {.value = fps.denominator, .timescale = fps.numerator, .flags = 1};
+
+    int input_format = 0;
+    int color_space = 0;
+    int video_range = 0;
+
+    NSMutableArray *inputFormats = NULL;
+    NSMutableArray *colorSpaces = NULL;
+    NSMutableArray *videoRanges = NULL;
+
+    if (!captureInstance.isFastPath) {
+        input_format = (int) obs_data_get_int(settings, "input_format");
+        color_space = (int) obs_data_get_int(settings, "color_space");
+        video_range = (int) obs_data_get_int(settings, "video_range");
+
+        inputFormats = [[NSMutableArray alloc] init];
+        colorSpaces = [[NSMutableArray alloc] init];
+        videoRanges = [[NSMutableArray alloc] init];
+    }
+
+    NSMutableArray *resolutions = [[NSMutableArray alloc] init];
+    NSMutableArray *frameRates = [[NSMutableArray alloc] init];
+
+    BOOL hasFoundResolution = NO;
+    BOOL hasFoundFramerate = NO;
+    BOOL hasFoundInputFormat = captureInstance.isFastPath;
+    BOOL hasFoundColorSpace = captureInstance.isFastPath;
+    BOOL hasFoundVideoRange = captureInstance.isFastPath;
+
+    if (device) {
+        // Iterate over all formats reported by the device and gather them for property lists
+        for (AVCaptureDeviceFormat *format in device.formats) {
+            if (!captureInstance.isFastPath) {
+                FourCharCode formatSubType = CMFormatDescriptionGetMediaSubType(format.formatDescription);
+
+                NSString *formatDescription = [OBSAVCapture stringFromSubType:formatSubType];
+                int device_format = [OBSAVCapture formatFromSubtype:formatSubType];
+                int device_range;
+                const char *range_description;
+
+                if ([OBSAVCapture isFullRangeFormat:formatSubType]) {
+                    device_range = VIDEO_RANGE_FULL;
+                    range_description = av_capture_get_text("VideoRange.Full");
+                } else {
+                    device_range = VIDEO_RANGE_PARTIAL;
+                    range_description = av_capture_get_text("VideoRange.Partial");
+                }
+
+                if (!hasFoundInputFormat && input_format == device_format) {
+                    hasFoundInputFormat = YES;
+                }
+
+                if (!hasFoundVideoRange && video_range == device_range) {
+                    hasFoundVideoRange = YES;
+                }
+
+                if (![inputFormats containsObject:@(formatSubType)]) {
+                    obs_property_list_add_int(prop_input_format, formatDescription.UTF8String, device_format);
+                    [inputFormats addObject:@(formatSubType)];
+                }
+
+                if (![videoRanges containsObject:@(range_description)]) {
+                    obs_property_list_add_int(prop_video_range, range_description, device_range);
+                    [videoRanges addObject:@(range_description)];
+                }
+
+                int device_color_space = [OBSAVCapture colorspaceFromDescription:format.formatDescription];
+
+                if (![colorSpaces containsObject:@(device_color_space)]) {
+                    obs_property_list_add_int(prop_color_space,
+                                              [OBSAVCapture stringFromColorspace:device_color_space].UTF8String,
+                                              device_color_space);
+                    [colorSpaces addObject:@(device_color_space)];
+                }
+
+                if (!hasFoundColorSpace && device_color_space == color_space) {
+                    hasFoundColorSpace = YES;
+                }
+            }
+
+            CMVideoDimensions formatDimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
+
+            NSDictionary *resolutionData =
+                @{@"width": @(formatDimensions.width),
+                  @"height": @(formatDimensions.height)};
+
+            if (![resolutions containsObject:resolutionData]) {
+                [resolutions addObject:resolutionData];
+            }
+
+            if (!hasFoundResolution && formatDimensions.width == resolution.width &&
+                formatDimensions.height == resolution.height) {
+                hasFoundResolution = YES;
+            }
+
+            // Only iterate over available framerates if input format, color space, and resolution are matching
+            if (hasFoundInputFormat && hasFoundColorSpace && hasFoundResolution) {
+                for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges.reverseObjectEnumerator) {
+                    struct media_frames_per_second min_fps = {
+                        .numerator = (uint32_t) clamp_Uint(range.maxFrameDuration.timescale, 0, UINT32_MAX),
+                        .denominator = (uint32_t) clamp_Uint(range.maxFrameDuration.value, 0, UINT32_MAX)};
+                    struct media_frames_per_second max_fps = {
+                        .numerator = (uint32_t) clamp_Uint(range.minFrameDuration.timescale, 0, UINT32_MAX),
+                        .denominator = (uint32_t) clamp_Uint(range.minFrameDuration.value, 0, UINT32_MAX)};
+
+                    if (![frameRates containsObject:range]) {
+                        obs_property_frame_rate_fps_range_add(prop_framerate, min_fps, max_fps);
+                        [frameRates addObject:range];
+                    }
+
+                    if (!hasFoundFramerate && CMTimeCompare(range.maxFrameDuration, time) >= 0 &&
+                        CMTimeCompare(range.minFrameDuration, time) <= 0) {
+                        hasFoundFramerate = YES;
+                    }
+                }
+            }
+        }
+
+        // Add resolutions in reverse order (formats reported by macOS are sorted with lowest resolution first)
+        for (NSDictionary *resolutionData in resolutions.reverseObjectEnumerator) {
+            NSError *error;
+            NSData *jsonData = [NSJSONSerialization dataWithJSONObject:resolutionData options:0 error:&error];
+
+            int width = [[resolutionData objectForKey:@"width"] intValue];
+            int height = [[resolutionData objectForKey:@"height"] intValue];
+
+            obs_property_list_add_string(
+                prop_resolution, [NSString stringWithFormat:@"%dx%d", width, height].UTF8String,
+                [[NSString alloc] initWithData:jsonData encoding:NSUTF8StringEncoding].UTF8String);
+        }
+
+        // Add currently selected values in disabled state if they are not supported by the device
+        size_t index;
+        if (!captureInstance.isFastPath) {
+            FourCharCode formatSubType = [OBSAVCapture fourCharCodeFromFormat:input_format withRange:video_range];
+            if (!hasFoundInputFormat) {
+                NSString *formatDescription = [OBSAVCapture stringFromSubType:formatSubType];
+
+                index = obs_property_list_add_int(prop_input_format, formatDescription.UTF8String, input_format);
+                obs_property_list_item_disable(prop_input_format, index, true);
+            }
+
+            if (!hasFoundVideoRange) {
+                int device_range;
+                const char *range_description;
+
+                if ([OBSAVCapture isFullRangeFormat:formatSubType]) {
+                    device_range = VIDEO_RANGE_FULL;
+                    range_description = av_capture_get_text("VideoRange.Full");
+                } else {
+                    device_range = VIDEO_RANGE_PARTIAL;
+                    range_description = av_capture_get_text("VideoRange.Partial");
+                }
+
+                index = obs_property_list_add_int(prop_video_range, range_description, device_range);
+                obs_property_list_item_disable(prop_video_range, index, true);
+            }
+
+            if (!hasFoundColorSpace) {
+                index = obs_property_list_add_int(
+                    prop_color_space, [OBSAVCapture stringFromColorspace:color_space].UTF8String, color_space);
+                obs_property_list_item_disable(prop_color_space, index, true);
+            }
+        }
+
+        if (!hasFoundResolution) {
+            NSDictionary *resolutionData = @{@"width": @(resolution.width), @"height": @(resolution.height)};
+
+            NSError *error;
+            NSData *jsonData = [NSJSONSerialization dataWithJSONObject:resolutionData options:0 error:&error];
+
+            index = obs_property_list_add_string(
+                prop_resolution, [NSString stringWithFormat:@"%dx%d", resolution.width, resolution.height].UTF8String,
+                [[NSString alloc] initWithData:jsonData encoding:NSUTF8StringEncoding].UTF8String);
+            obs_property_list_item_disable(prop_resolution, index, true);
+        }
+    }
+    return true;
+}