mac-sck-common.m 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349
  1. #include "mac-sck-common.h"
  2. bool is_screen_capture_available(void)
  3. {
  4. if (@available(macOS 12.5, *)) {
  5. return true;
  6. } else {
  7. return false;
  8. }
  9. }
  10. #pragma mark - ScreenCaptureDelegate
  11. @implementation ScreenCaptureDelegate
  12. - (void)stream:(SCStream *)stream didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer ofType:(SCStreamOutputType)type
  13. {
  14. if (self.sc != NULL) {
  15. if (type == SCStreamOutputTypeScreen && !self.sc->audio_only) {
  16. screen_stream_video_update(self.sc, sampleBuffer);
  17. } else if (@available(macOS 13.0, *)) {
  18. if (type == SCStreamOutputTypeAudio) {
  19. screen_stream_audio_update(self.sc, sampleBuffer);
  20. }
  21. }
  22. }
  23. }
  24. - (void)stream:(SCStream *)stream didStopWithError:(NSError *)error
  25. {
  26. NSString *errorMessage;
  27. switch (error.code) {
  28. case SCStreamErrorUserStopped:
  29. errorMessage = @"User stopped stream.";
  30. break;
  31. case SCStreamErrorNoCaptureSource:
  32. errorMessage = @"Stream stopped as no capture source was not found.";
  33. break;
  34. default:
  35. errorMessage = [NSString stringWithFormat:@"Stream stopped with error %ld (\"%s\")", error.code,
  36. error.localizedDescription.UTF8String];
  37. break;
  38. }
  39. MACCAP_LOG(LOG_WARNING, "%s", errorMessage.UTF8String);
  40. self.sc->capture_failed = true;
  41. obs_source_update_properties(self.sc->source);
  42. }
  43. @end
  44. #pragma mark - obs_properties
  45. void screen_capture_build_content_list(struct screen_capture *sc, bool display_capture)
  46. {
  47. typedef void (^shareable_content_callback)(SCShareableContent *, NSError *);
  48. shareable_content_callback new_content_received = ^void(SCShareableContent *shareable_content, NSError *error) {
  49. if (error == nil && sc->shareable_content_available != NULL) {
  50. sc->shareable_content = [shareable_content retain];
  51. } else {
  52. #ifdef DEBUG
  53. MACCAP_ERR("screen_capture_properties: Failed to get shareable content with error %s\n",
  54. [[error localizedFailureReason] cStringUsingEncoding:NSUTF8StringEncoding]);
  55. #endif
  56. MACCAP_LOG(LOG_WARNING, "Unable to get list of available applications or windows. "
  57. "Please check if OBS has necessary screen capture permissions.");
  58. }
  59. os_sem_post(sc->shareable_content_available);
  60. };
  61. os_sem_wait(sc->shareable_content_available);
  62. [sc->shareable_content release];
  63. BOOL onScreenWindowsOnly = (display_capture) ? NO : !sc->show_hidden_windows;
  64. [SCShareableContent getShareableContentExcludingDesktopWindows:YES onScreenWindowsOnly:onScreenWindowsOnly
  65. completionHandler:new_content_received];
  66. }
  67. bool build_display_list(struct screen_capture *sc, obs_properties_t *props)
  68. {
  69. os_sem_wait(sc->shareable_content_available);
  70. obs_property_t *display_list = obs_properties_get(props, "display_uuid");
  71. obs_property_list_clear(display_list);
  72. // Add null entry to the top of the content list, to avoid inadvertent capture of the first enumerated display
  73. // when opening the source's properties window
  74. obs_property_list_add_string(display_list, " ", NULL);
  75. for (SCDisplay *display in sc->shareable_content.displays) {
  76. NSScreen *display_screen = nil;
  77. for (NSScreen *screen in NSScreen.screens) {
  78. NSNumber *screen_num = screen.deviceDescription[@"NSScreenNumber"];
  79. CGDirectDisplayID screen_display_id = (CGDirectDisplayID) screen_num.intValue;
  80. if (screen_display_id == display.displayID) {
  81. display_screen = screen;
  82. break;
  83. }
  84. }
  85. if (!display_screen) {
  86. continue;
  87. }
  88. char dimension_buffer[4][12] = {};
  89. char name_buffer[256] = {};
  90. snprintf(dimension_buffer[0], sizeof(dimension_buffer[0]), "%u", (uint32_t) display_screen.frame.size.width);
  91. snprintf(dimension_buffer[1], sizeof(dimension_buffer[0]), "%u", (uint32_t) display_screen.frame.size.height);
  92. snprintf(dimension_buffer[2], sizeof(dimension_buffer[0]), "%d", (int32_t) display_screen.frame.origin.x);
  93. snprintf(dimension_buffer[3], sizeof(dimension_buffer[0]), "%d", (int32_t) display_screen.frame.origin.y);
  94. snprintf(name_buffer, sizeof(name_buffer), "%.200s: %.12sx%.12s @ %.12s,%.12s",
  95. display_screen.localizedName.UTF8String, dimension_buffer[0], dimension_buffer[1], dimension_buffer[2],
  96. dimension_buffer[3]);
  97. CFUUIDRef display_uuid = CGDisplayCreateUUIDFromDisplayID(display.displayID);
  98. CFStringRef uuid_string = CFUUIDCreateString(kCFAllocatorDefault, display_uuid);
  99. obs_property_list_add_string(display_list, name_buffer,
  100. CFStringGetCStringPtr(uuid_string, kCFStringEncodingUTF8));
  101. CFRelease(uuid_string);
  102. CFRelease(display_uuid);
  103. }
  104. os_sem_post(sc->shareable_content_available);
  105. return true;
  106. }
  107. bool build_window_list(struct screen_capture *sc, obs_properties_t *props)
  108. {
  109. os_sem_wait(sc->shareable_content_available);
  110. obs_property_t *window_list = obs_properties_get(props, "window");
  111. obs_property_list_clear(window_list);
  112. // Add null entry to the top of the content list, to avoid inadvertent capture of the first enumerated window
  113. // when opening the source's properties window
  114. obs_property_list_add_int(window_list, " ", kCGNullWindowID);
  115. NSPredicate *filteredWindowPredicate =
  116. [NSPredicate predicateWithBlock:^BOOL(SCWindow *window, NSDictionary *bindings __unused) {
  117. NSString *app_name = window.owningApplication.applicationName;
  118. NSString *title = window.title;
  119. if (!sc->show_empty_names) {
  120. return (app_name.length > 0) && (title.length > 0);
  121. } else {
  122. return YES;
  123. }
  124. }];
  125. NSArray<SCWindow *> *filteredWindows;
  126. filteredWindows = [sc->shareable_content.windows filteredArrayUsingPredicate:filteredWindowPredicate];
  127. NSArray<SCWindow *> *sortedWindows;
  128. sortedWindows = [filteredWindows sortedArrayUsingComparator:^NSComparisonResult(SCWindow *window, SCWindow *other) {
  129. NSComparisonResult appNameCmp = [window.owningApplication.applicationName
  130. compare:other.owningApplication.applicationName
  131. options:NSCaseInsensitiveSearch];
  132. if (appNameCmp == NSOrderedAscending) {
  133. return NSOrderedAscending;
  134. } else if (appNameCmp == NSOrderedSame) {
  135. return [window.title compare:other.title options:NSCaseInsensitiveSearch];
  136. } else {
  137. return NSOrderedDescending;
  138. }
  139. }];
  140. for (SCWindow *window in sortedWindows) {
  141. NSString *app_name = window.owningApplication.applicationName;
  142. NSString *title = window.title;
  143. const char *list_text = [[NSString stringWithFormat:@"[%@] %@", app_name, title] UTF8String];
  144. obs_property_list_add_int(window_list, list_text, window.windowID);
  145. }
  146. os_sem_post(sc->shareable_content_available);
  147. return true;
  148. }
  149. bool build_application_list(struct screen_capture *sc, obs_properties_t *props)
  150. {
  151. os_sem_wait(sc->shareable_content_available);
  152. obs_property_t *application_list = obs_properties_get(props, "application");
  153. obs_property_list_clear(application_list);
  154. // Add null entry to the top of the content list, to avoid inadvertent capture of the first enumerated application
  155. // when opening the source's properties window
  156. obs_property_list_add_string(application_list, " ", 0);
  157. NSArray<SCRunningApplication *> *filteredApplications;
  158. filteredApplications = [sc->shareable_content.applications
  159. filteredArrayUsingPredicate:[NSPredicate predicateWithBlock:^BOOL(SCRunningApplication *app,
  160. NSDictionary *bindings __unused) {
  161. return app.applicationName.length > 0;
  162. }]];
  163. NSArray<SCRunningApplication *> *sortedApplications;
  164. sortedApplications = [filteredApplications
  165. sortedArrayUsingComparator:^NSComparisonResult(SCRunningApplication *app, SCRunningApplication *other) {
  166. return [app.applicationName compare:other.applicationName options:NSCaseInsensitiveSearch];
  167. }];
  168. for (SCRunningApplication *application in sortedApplications) {
  169. const char *name = [application.applicationName UTF8String];
  170. const char *bundle_id = [application.bundleIdentifier UTF8String];
  171. obs_property_list_add_string(application_list, name, bundle_id);
  172. }
  173. os_sem_post(sc->shareable_content_available);
  174. return true;
  175. }
  176. #pragma mark - audio/video
  177. API_AVAILABLE(macos(12.5)) void screen_stream_video_update(struct screen_capture *sc, CMSampleBufferRef sample_buffer)
  178. {
  179. bool frame_detail_errored = false;
  180. float scale_factor = 1.0f;
  181. CGRect window_rect = {};
  182. CFArrayRef attachments_array = CMSampleBufferGetSampleAttachmentsArray(sample_buffer, false);
  183. if (sc->capture_type == ScreenCaptureWindowStream && attachments_array != NULL &&
  184. CFArrayGetCount(attachments_array) > 0) {
  185. CFDictionaryRef attachments_dict = CFArrayGetValueAtIndex(attachments_array, 0);
  186. if (attachments_dict != NULL) {
  187. CFTypeRef frame_scale_factor = CFDictionaryGetValue(attachments_dict, SCStreamFrameInfoScaleFactor);
  188. if (frame_scale_factor != NULL) {
  189. Boolean result = CFNumberGetValue((CFNumberRef) frame_scale_factor, kCFNumberFloatType, &scale_factor);
  190. if (result == false) {
  191. scale_factor = 1.0f;
  192. frame_detail_errored = true;
  193. }
  194. }
  195. CFTypeRef content_rect_dict = CFDictionaryGetValue(attachments_dict, SCStreamFrameInfoContentRect);
  196. CFTypeRef content_scale_factor = CFDictionaryGetValue(attachments_dict, SCStreamFrameInfoContentScale);
  197. if ((content_rect_dict != NULL) && (content_scale_factor != NULL)) {
  198. CGRect content_rect = {};
  199. float points_to_pixels = 0.0f;
  200. Boolean result =
  201. CGRectMakeWithDictionaryRepresentation((__bridge CFDictionaryRef) content_rect_dict, &content_rect);
  202. if (result == false) {
  203. content_rect = CGRectZero;
  204. frame_detail_errored = true;
  205. }
  206. result = CFNumberGetValue((CFNumberRef) content_scale_factor, kCFNumberFloatType, &points_to_pixels);
  207. if (result == false) {
  208. points_to_pixels = 1.0f;
  209. frame_detail_errored = true;
  210. }
  211. window_rect.origin = content_rect.origin;
  212. window_rect.size.width = content_rect.size.width / points_to_pixels * scale_factor;
  213. window_rect.size.height = content_rect.size.height / points_to_pixels * scale_factor;
  214. }
  215. }
  216. }
  217. CVImageBufferRef image_buffer = CMSampleBufferGetImageBuffer(sample_buffer);
  218. CVPixelBufferLockBaseAddress(image_buffer, 0);
  219. IOSurfaceRef frame_surface = CVPixelBufferGetIOSurface(image_buffer);
  220. CVPixelBufferUnlockBaseAddress(image_buffer, 0);
  221. IOSurfaceRef prev_current = NULL;
  222. if (frame_surface && !pthread_mutex_lock(&sc->mutex)) {
  223. bool needs_to_update_properties = false;
  224. if (!frame_detail_errored) {
  225. if (sc->capture_type == ScreenCaptureWindowStream) {
  226. if ((sc->frame.size.width != window_rect.size.width) ||
  227. (sc->frame.size.height != window_rect.size.height)) {
  228. sc->frame.size.width = window_rect.size.width;
  229. sc->frame.size.height = window_rect.size.height;
  230. needs_to_update_properties = true;
  231. }
  232. } else {
  233. size_t width = CVPixelBufferGetWidth(image_buffer);
  234. size_t height = CVPixelBufferGetHeight(image_buffer);
  235. if ((sc->frame.size.width != width) || (sc->frame.size.height != height)) {
  236. sc->frame.size.width = width;
  237. sc->frame.size.height = height;
  238. needs_to_update_properties = true;
  239. }
  240. }
  241. }
  242. if (needs_to_update_properties) {
  243. [sc->stream_properties setWidth:(size_t) sc->frame.size.width];
  244. [sc->stream_properties setHeight:(size_t) sc->frame.size.height];
  245. [sc->disp updateConfiguration:sc->stream_properties completionHandler:^(NSError *_Nullable error) {
  246. if (error) {
  247. MACCAP_ERR("screen_stream_video_update: Failed to update stream properties with error %s\n",
  248. [[error localizedFailureReason] cStringUsingEncoding:NSUTF8StringEncoding]);
  249. }
  250. }];
  251. }
  252. prev_current = sc->current;
  253. sc->current = frame_surface;
  254. CFRetain(sc->current);
  255. IOSurfaceIncrementUseCount(sc->current);
  256. pthread_mutex_unlock(&sc->mutex);
  257. }
  258. if (prev_current) {
  259. IOSurfaceDecrementUseCount(prev_current);
  260. CFRelease(prev_current);
  261. }
  262. }
  263. void screen_stream_audio_update(struct screen_capture *sc, CMSampleBufferRef sample_buffer)
  264. {
  265. CMFormatDescriptionRef format_description = CMSampleBufferGetFormatDescription(sample_buffer);
  266. const AudioStreamBasicDescription *audio_description =
  267. CMAudioFormatDescriptionGetStreamBasicDescription(format_description);
  268. if (audio_description->mChannelsPerFrame < 1) {
  269. MACCAP_ERR(
  270. "screen_stream_audio_update: Received sample buffer has less than 1 channel per frame (mChannelsPerFrame set to '%d')\n",
  271. audio_description->mChannelsPerFrame);
  272. return;
  273. }
  274. char *_Nullable bytes = NULL;
  275. CMBlockBufferRef data_buffer = CMSampleBufferGetDataBuffer(sample_buffer);
  276. size_t data_buffer_length = CMBlockBufferGetDataLength(data_buffer);
  277. CMBlockBufferGetDataPointer(data_buffer, 0, &data_buffer_length, NULL, &bytes);
  278. CMTime presentation_time = CMSampleBufferGetOutputPresentationTimeStamp(sample_buffer);
  279. struct obs_source_audio audio_data = {};
  280. for (uint32_t channel_idx = 0; channel_idx < audio_description->mChannelsPerFrame; ++channel_idx) {
  281. uint32_t offset = (uint32_t) (data_buffer_length / audio_description->mChannelsPerFrame) * channel_idx;
  282. audio_data.data[channel_idx] = (uint8_t *) bytes + offset;
  283. }
  284. audio_data.frames =
  285. (uint32_t) (data_buffer_length / audio_description->mBytesPerFrame / audio_description->mChannelsPerFrame);
  286. audio_data.speakers = audio_description->mChannelsPerFrame;
  287. audio_data.samples_per_sec = (uint32_t) audio_description->mSampleRate;
  288. audio_data.timestamp = (uint64_t) (CMTimeGetSeconds(presentation_time) * NSEC_PER_SEC);
  289. audio_data.format = AUDIO_FORMAT_FLOAT_PLANAR;
  290. obs_source_output_audio(sc->source, &audio_data);
  291. }