1 /*
2 * AVFoundation input device
3 * Copyright (c) 2014 Thilo Borgmann <thilo.borgmann@mail.de>
4 *
5 * This file is part of FFmpeg.
6 *
7 * FFmpeg is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2.1 of the License, or (at your option) any later version.
11 *
12 * FFmpeg is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
16 *
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with FFmpeg; if not, write to the Free Software
19 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20 */
21
22 /**
23 * @file
24 * AVFoundation input device
25 * @author Thilo Borgmann <thilo.borgmann@mail.de>
26 */
27
28 #import <AVFoundation/AVFoundation.h>
29 #include <pthread.h>
30
43
45
49 };
50
54 };
55
79 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
81 #endif
83 };
84
86 {
88
94
97
104
110
114
116
124
127
129
135
137 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
138 AVCaptureDeviceTransportControlsPlaybackMode observed_mode;
139 #endif
142
144 {
146 }
147
149 {
151 }
152
153 /** FrameReciever class - delegate for AVCaptureSession
154 */
156 {
158 }
159
161
162 - (void) captureOutput:(AVCaptureOutput *)captureOutput
163 didOutputSampleBuffer:(CMSampleBufferRef)videoFrame
164 fromConnection:(AVCaptureConnection *)connection;
165
166 @end
167
169
171 {
172 if (
self = [super
init]) {
174
175 // start observing if a device is set for it
176 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
178 NSString *keyPath = NSStringFromSelector(@selector(transportControlsPlaybackMode));
179 NSKeyValueObservingOptions
options = NSKeyValueObservingOptionNew;
180
181 [
_context->observed_device addObserver: self
182 forKeyPath: keyPath
183 options: options
184 context: _context];
185 }
186 #endif
187 }
188 return self;
189 }
190
191 - (void)dealloc {
192 // stop observing if a device is set for it
193 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
195 NSString *keyPath = NSStringFromSelector(@selector(transportControlsPlaybackMode));
196 [_context->observed_device removeObserver: self forKeyPath: keyPath];
197 }
198 #endif
199 [super dealloc];
200 }
201
202 - (void)observeValueForKeyPath:(NSString *)keyPath
203 ofObject:(id)object
204 change:(NSDictionary *)change
205 context:(void *)context {
207 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
208 AVCaptureDeviceTransportControlsPlaybackMode
mode =
209 [change[NSKeyValueChangeNewKey] integerValue];
210
212 if (
mode == AVCaptureDeviceTransportControlsNotPlayingMode) {
214 }
216 }
217 #endif
218 } else {
219 [super observeValueForKeyPath: keyPath
220 ofObject: object
221 change: change
222 context: context];
223 }
224 }
225
226 - (void) captureOutput:(AVCaptureOutput *)captureOutput
227 didOutputSampleBuffer:(CMSampleBufferRef)videoFrame
228 fromConnection:(AVCaptureConnection *)connection
229 {
231
234 }
235
237
239
241 }
242
243 @end
244
245 /** AudioReciever class - delegate for AVCaptureSession
246 */
248 {
250 }
251
253
254 - (void) captureOutput:(AVCaptureOutput *)captureOutput
255 didOutputSampleBuffer:(CMSampleBufferRef)audioFrame
256 fromConnection:(AVCaptureConnection *)connection;
257
258 @end
259
261
263 {
264 if (
self = [super
init]) {
266 }
267 return self;
268 }
269
270 - (void) captureOutput:(AVCaptureOutput *)captureOutput
271 didOutputSampleBuffer:(CMSampleBufferRef)audioFrame
272 fromConnection:(AVCaptureConnection *)connection
273 {
275
278 }
279
281
283
285 }
286
287 @end
288
290 {
291 [ctx->capture_session stopRunning];
292
293 [ctx->capture_session release];
294 [ctx->video_output release];
295 [ctx->audio_output release];
296 [ctx->avf_delegate release];
297 [ctx->avf_audio_delegate release];
298
303 ctx->avf_audio_delegate =
NULL;
304
307
309
310 if (
ctx->current_frame) {
311 CFRelease(
ctx->current_frame);
312 }
313 }
314
316 {
318 char *save;
319
321
327 } else {
329 }
330 return 0;
331 }
332
333 /**
334 * Configure the video device.
335 *
336 * Configure the video device using a run-time approach to access properties
337 * since formats, activeFormat are available since iOS >= 7.0 or OSX >= 10.7
338 * and activeVideoMaxFrameDuration is available since i0S >= 7.0 and OSX >= 10.9.
339 *
340 * The NSUndefinedKeyException must be handled by the caller of this function.
341 *
342 */
344 {
346
348 NSObject *
range = nil;
350 NSObject *selected_range = nil;
351 NSObject *selected_format = nil;
352
353 // try to configure format by formats list
354 // might raise an exception if no format list is given
355 // (then fallback to default, no configuration)
356 @try {
357 for (
format in [video_device valueForKey:
@"formats"]) {
358 CMFormatDescriptionRef formatDescription;
359 CMVideoDimensions dimensions;
360
361 formatDescription = (CMFormatDescriptionRef) [
format performSelector:
@selector(formatDescription)];
362 dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
363
364 if ((
ctx->width == 0 &&
ctx->height == 0) ||
365 (dimensions.width ==
ctx->width && dimensions.height ==
ctx->height)) {
366
368
369 for (
range in [
format valueForKey:
@"videoSupportedFrameRateRanges"]) {
370 double max_framerate;
371
372 [[range valueForKey:@"maxFrameRate"] getValue:&max_framerate];
374 selected_range =
range;
375 break;
376 }
377 }
378 }
379 }
380
381 if (!selected_format) {
384 goto unsupported_format;
385 }
386
387 if (!selected_range) {
390 if (
ctx->video_is_muxed) {
392 } else {
393 goto unsupported_format;
394 }
395 }
396
397 if ([video_device lockForConfiguration:
NULL] == YES) {
398 if (selected_format) {
399 [video_device setValue:selected_format forKey:@"activeFormat"];
400 }
401 if (selected_range) {
402 NSValue *min_frame_duration = [selected_range valueForKey:@"minFrameDuration"];
403 [video_device setValue:min_frame_duration forKey:@"activeVideoMinFrameDuration"];
404 [video_device setValue:min_frame_duration forKey:@"activeVideoMaxFrameDuration"];
405 }
406 } else {
409 }
410 } @catch(NSException *e) {
412 }
413
414 return 0;
415
416 unsupported_format:
417
419 for (
format in [video_device valueForKey:
@"formats"]) {
420 CMFormatDescriptionRef formatDescription;
421 CMVideoDimensions dimensions;
422
423 formatDescription = (CMFormatDescriptionRef) [
format performSelector:
@selector(formatDescription)];
424 dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
425
426 for (
range in [
format valueForKey:
@"videoSupportedFrameRateRanges"]) {
427 double min_framerate;
428 double max_framerate;
429
430 [[range valueForKey:@"minFrameRate"] getValue:&min_framerate];
431 [[range valueForKey:@"maxFrameRate"] getValue:&max_framerate];
433 dimensions.width, dimensions.height,
434 min_framerate, max_framerate);
435 }
436 }
438 }
439
441 {
444 NSError *
error = nil;
445 AVCaptureInput* capture_input = nil;
447 NSNumber *pixel_format;
448 NSDictionary *capture_dict;
449 dispatch_queue_t queue;
450
451 if (
ctx->video_device_index <
ctx->num_video_devices) {
452 capture_input = (AVCaptureInput*) [[[AVCaptureDeviceInput alloc] initWithDevice:video_device
error:&
error] autorelease];
453 } else {
454 capture_input = (AVCaptureInput*) video_device;
455 }
456
457 if (!capture_input) {
459 [[
error localizedDescription] UTF8String]);
460 return 1;
461 }
462
463 if ([
ctx->capture_session canAddInput:capture_input]) {
464 [ctx->capture_session addInput:capture_input];
465 } else {
467 return 1;
468 }
469
470 // Attaching output
471 ctx->video_output = [[AVCaptureVideoDataOutput alloc] init];
472
473 if (!
ctx->video_output) {
475 return 1;
476 }
477
478 // Configure device framerate and video size
479 @try {
482 }
483 } @catch (NSException *exception) {
484 if (![[exception
name] isEqualToString:NSUndefinedKeyException]) {
487 }
488 }
489
490 // select pixel format
492
496 break;
497 }
498 }
499
500 // check if selected pixel format is supported by AVFoundation
504 return 1;
505 }
506
507 // check if the pixel format is available for this device
508 if ([[
ctx->video_output availableVideoCVPixelFormatTypes] indexOfObject:[NSNumber numberWithInt:pxl_fmt_spec.avf_id]] == NSNotFound) {
509 av_log(
s,
AV_LOG_ERROR,
"Selected pixel format (%s) is not supported by the input device.\n",
511
513
515 for (NSNumber *pxl_fmt in [
ctx->video_output availableVideoCVPixelFormatTypes]) {
521 break;
522 }
523 }
524
527
528 // select first supported pixel format instead of user selected (or default) pixel format
530 pxl_fmt_spec = pxl_fmt_dummy;
531 }
532 }
533 }
534
535 // fail if there is no appropriate pixel format or print a warning about overriding the pixel format
537 return 1;
538 } else {
541 }
542 }
543
544 // set videoSettings to an empty dict for receiving raw data of muxed devices
545 if (
ctx->capture_raw_data) {
546 ctx->pixel_format = pxl_fmt_spec.ff_id;
547 ctx->video_output.videoSettings = @{ };
548 } else {
549 ctx->pixel_format = pxl_fmt_spec.ff_id;
550 pixel_format = [NSNumber numberWithUnsignedInt:pxl_fmt_spec.avf_id];
551 capture_dict = [NSDictionary dictionaryWithObject:pixel_format
552 forKey:(id)kCVPixelBufferPixelFormatTypeKey];
553
554 [ctx->video_output setVideoSettings:capture_dict];
555 }
556 [ctx->video_output setAlwaysDiscardsLateVideoFrames:ctx->drop_late_frames];
557
558 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
559 // check for transport control support and set observer device if supported
560 if (!
ctx->video_is_screen) {
561 int trans_ctrl = [video_device transportControlsSupported];
562 AVCaptureDeviceTransportControlsPlaybackMode trans_mode = [video_device transportControlsPlaybackMode];
563
564 if (trans_ctrl) {
565 ctx->observed_mode = trans_mode;
566 ctx->observed_device = video_device;
567 }
568 }
569 #endif
570
572
573 queue = dispatch_queue_create(
"avf_queue",
NULL);
574 [ctx->video_output setSampleBufferDelegate:ctx->avf_delegate queue:queue];
575 dispatch_release(queue);
576
577 if ([
ctx->capture_session canAddOutput:
ctx->video_output]) {
578 [ctx->capture_session addOutput:ctx->video_output];
579 } else {
581 return 1;
582 }
583
584 return 0;
585 }
586
588 {
590 NSError *
error = nil;
591 AVCaptureDeviceInput* audio_dev_input = [[[AVCaptureDeviceInput alloc] initWithDevice:audio_device
error:&
error] autorelease];
592 dispatch_queue_t queue;
593
594 if (!audio_dev_input) {
596 [[
error localizedDescription] UTF8String]);
597 return 1;
598 }
599
600 if ([
ctx->capture_session canAddInput:audio_dev_input]) {
601 [ctx->capture_session addInput:audio_dev_input];
602 } else {
604 return 1;
605 }
606
607 // Attaching output
608 ctx->audio_output = [[AVCaptureAudioDataOutput alloc] init];
609
610 if (!
ctx->audio_output) {
612 return 1;
613 }
614
616
617 queue = dispatch_queue_create(
"avf_audio_queue",
NULL);
618 [ctx->audio_output setSampleBufferDelegate:ctx->avf_audio_delegate queue:queue];
619 dispatch_release(queue);
620
621 if ([
ctx->capture_session canAddOutput:
ctx->audio_output]) {
622 [ctx->capture_session addOutput:ctx->audio_output];
623 } else {
625 return 1;
626 }
627
628 return 0;
629 }
630
632 {
634 CVImageBufferRef image_buffer;
635 CMBlockBufferRef block_buffer;
636 CGSize image_buffer_size;
638
639 if (!stream) {
640 return 1;
641 }
642
643 // Take stream info from the first frame.
644 while (
ctx->frames_captured < 1) {
645 CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, YES);
646 }
647
649
650 ctx->video_stream_index = stream->index;
651
653
654 image_buffer = CMSampleBufferGetImageBuffer(
ctx->current_frame);
655 block_buffer = CMSampleBufferGetDataBuffer(
ctx->current_frame);
656
657 if (image_buffer) {
658 image_buffer_size = CVImageBufferGetEncodedSize(image_buffer);
659
662 stream->codecpar->width = (int)image_buffer_size.width;
663 stream->codecpar->height = (int)image_buffer_size.height;
664 stream->codecpar->format =
ctx->pixel_format;
665 } else {
668 stream->codecpar->format =
ctx->pixel_format;
669 }
670
671 CFRelease(
ctx->current_frame);
672 ctx->current_frame = nil;
673
675
676 return 0;
677 }
678
680 {
682 CMFormatDescriptionRef format_desc;
684
685 if (!stream) {
686 return 1;
687 }
688
689 // Take stream info from the first frame.
690 while (
ctx->audio_frames_captured < 1) {
691 CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, YES);
692 }
693
695
696 ctx->audio_stream_index = stream->index;
697
699
700 format_desc = CMSampleBufferGetFormatDescription(
ctx->current_audio_frame);
701 const AudioStreamBasicDescription *basic_desc = CMAudioFormatDescriptionGetStreamBasicDescription(format_desc);
702
703 if (!basic_desc) {
706 return 1;
707 }
708
710 stream->codecpar->sample_rate = basic_desc->mSampleRate;
712
713 ctx->audio_channels = basic_desc->mChannelsPerFrame;
714 ctx->audio_bits_per_sample = basic_desc->mBitsPerChannel;
715 ctx->audio_float = basic_desc->mFormatFlags & kAudioFormatFlagIsFloat;
716 ctx->audio_be = basic_desc->mFormatFlags & kAudioFormatFlagIsBigEndian;
717 ctx->audio_signed_integer = basic_desc->mFormatFlags & kAudioFormatFlagIsSignedInteger;
718 ctx->audio_packed = basic_desc->mFormatFlags & kAudioFormatFlagIsPacked;
719 ctx->audio_non_interleaved = basic_desc->mFormatFlags & kAudioFormatFlagIsNonInterleaved;
720
721 if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
723 ctx->audio_bits_per_sample == 32 &&
726 } else if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
727 ctx->audio_signed_integer &&
728 ctx->audio_bits_per_sample == 16 &&
731 } else if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
732 ctx->audio_signed_integer &&
733 ctx->audio_bits_per_sample == 24 &&
736 } else if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
737 ctx->audio_signed_integer &&
738 ctx->audio_bits_per_sample == 32 &&
741 } else {
744 return 1;
745 }
746
747 if (
ctx->audio_non_interleaved) {
748 CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(
ctx->current_audio_frame);
749 ctx->audio_buffer_size = CMBlockBufferGetDataLength(block_buffer);
751 if (!
ctx->audio_buffer) {
754 return 1;
755 }
756 }
757
758 CFRelease(
ctx->current_audio_frame);
759 ctx->current_audio_frame = nil;
760
762
763 return 0;
764 }
765
767 #if ((TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 100000) || (TARGET_OS_OSX && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101500))
768 NSMutableArray *deviceTypes = nil;
769 if (mediaType == AVMediaTypeVideo) {
770 deviceTypes = [NSMutableArray arrayWithArray:@[AVCaptureDeviceTypeBuiltInWideAngleCamera]];
771 #if (TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 100000)
772 [deviceTypes addObject: AVCaptureDeviceTypeBuiltInDualCamera];
773 [deviceTypes addObject: AVCaptureDeviceTypeBuiltInTelephotoCamera];
774 #endif
775 #if (TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110100)
776 [deviceTypes addObject: AVCaptureDeviceTypeBuiltInTrueDepthCamera];
777 #endif
778 #if (TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 130000)
779 [deviceTypes addObject: AVCaptureDeviceTypeBuiltInTripleCamera];
780 [deviceTypes addObject: AVCaptureDeviceTypeBuiltInDualWideCamera];
781 [deviceTypes addObject: AVCaptureDeviceTypeBuiltInUltraWideCamera];
782 #endif
783 #if (TARGET_OS_OSX && __MAC_OS_X_VERSION_MIN_REQUIRED >= 130000)
784 [deviceTypes addObject: AVCaptureDeviceTypeDeskViewCamera];
785 #endif
786 #if (TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 150400)
787 [deviceTypes addObject: AVCaptureDeviceTypeBuiltInLiDARDepthCamera];
788 #endif
789 #if (TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 170000 || (TARGET_OS_OSX && __MAC_OS_X_VERSION_MIN_REQUIRED >= 140000))
790 [deviceTypes addObject: AVCaptureDeviceTypeContinuityCamera];
791 [deviceTypes addObject: AVCaptureDeviceTypeExternal];
792 #elif (TARGET_OS_OSX && __MAC_OS_X_VERSION_MIN_REQUIRED < 140000)
793 [deviceTypes addObject: AVCaptureDeviceTypeExternalUnknown];
794 #endif
795 } else if (mediaType == AVMediaTypeAudio) {
796 #if (TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 170000 || (TARGET_OS_OSX && __MAC_OS_X_VERSION_MIN_REQUIRED >= 140000))
797 deviceTypes = [NSMutableArray arrayWithArray:@[AVCaptureDeviceTypeMicrophone]];
798 #else
799 deviceTypes = [NSMutableArray arrayWithArray:@[AVCaptureDeviceTypeBuiltInMicrophone]];
800 #endif
801 } else if (mediaType == AVMediaTypeMuxed) {
802 #if (TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 170000 || (TARGET_OS_OSX && __MAC_OS_X_VERSION_MIN_REQUIRED >= 140000))
803 deviceTypes = [NSMutableArray arrayWithArray:@[AVCaptureDeviceTypeExternal]];
804 #elif (TARGET_OS_OSX && __MAC_OS_X_VERSION_MIN_REQUIRED < 140000)
805 deviceTypes = [NSMutableArray arrayWithArray:@[AVCaptureDeviceTypeExternalUnknown]];
806 #else
807 return nil;
808 #endif
809 } else {
810 return nil;
811 }
812
813 AVCaptureDeviceDiscoverySession *captureDeviceDiscoverySession =
814 [AVCaptureDeviceDiscoverySession
815 discoverySessionWithDeviceTypes:deviceTypes
816 mediaType:mediaType
817 position:AVCaptureDevicePositionUnspecified];
818 return [captureDeviceDiscoverySession devices];
819 #else
820 return [AVCaptureDevice devicesWithMediaType:mediaType];
821 #endif
822 }
823
825 {
827 NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
828 uint32_t num_screens = 0;
830 AVCaptureDevice *video_device = nil;
831 AVCaptureDevice *audio_device = nil;
832 // Find capture device
835
836 ctx->num_video_devices = [devices count] + [devices_muxed count];
837
839
840 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
841 CGGetActiveDisplayList(0,
NULL, &num_screens);
842 #endif
843
844 // List devices if requested
845 if (
ctx->list_devices) {
848 for (AVCaptureDevice *device in devices) {
849 const char *
name = [[device localizedName] UTF8String];
850 index = [devices indexOfObject:device];
852 }
853 for (AVCaptureDevice *device in devices_muxed) {
854 const char *
name = [[device localizedName] UTF8String];
855 index = [devices count] + [devices_muxed indexOfObject:device];
857 }
858 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
859 if (num_screens > 0) {
860 CGDirectDisplayID screens[num_screens];
861 CGGetActiveDisplayList(num_screens, screens, &num_screens);
862 for (
int i = 0;
i < num_screens;
i++) {
864 }
865 }
866 #endif
867
870 for (AVCaptureDevice *device in devices) {
871 const char *
name = [[device localizedName] UTF8String];
872 int index = [devices indexOfObject:device];
874 }
876 }
877
878 // parse input filename for video and audio device
882
883 // check for device index given in filename
884 if (
ctx->video_device_index == -1 &&
ctx->video_filename) {
885 sscanf(
ctx->video_filename,
"%d", &
ctx->video_device_index);
886 }
887 if (
ctx->audio_device_index == -1 &&
ctx->audio_filename) {
888 sscanf(
ctx->audio_filename,
"%d", &
ctx->audio_device_index);
889 }
890
891 if (
ctx->video_device_index >= 0) {
892 if (
ctx->video_device_index <
ctx->num_video_devices) {
893 if (
ctx->video_device_index < [devices count]) {
894 video_device = [devices objectAtIndex:ctx->video_device_index];
895 } else {
896 video_device = [devices_muxed objectAtIndex:(ctx->video_device_index - [devices count])];
897 ctx->video_is_muxed = 1;
898 }
899 }
else if (
ctx->video_device_index <
ctx->num_video_devices + num_screens) {
900 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
901 CGDirectDisplayID screens[num_screens];
902 CGGetActiveDisplayList(num_screens, screens, &num_screens);
903 AVCaptureScreenInput* capture_screen_input = [[[AVCaptureScreenInput alloc] initWithDisplayID:screens[ctx->video_device_index - ctx->
num_video_devices]] autorelease];
904
905 if (
ctx->framerate.num > 0) {
906 capture_screen_input.minFrameDuration = CMTimeMake(
ctx->framerate.den,
ctx->framerate.num);
907 }
908
909 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
910 if (
ctx->capture_cursor) {
911 capture_screen_input.capturesCursor = YES;
912 } else {
913 capture_screen_input.capturesCursor = NO;
914 }
915 #endif
916
917 if (
ctx->capture_mouse_clicks) {
918 capture_screen_input.capturesMouseClicks = YES;
919 } else {
920 capture_screen_input.capturesMouseClicks = NO;
921 }
922
923 video_device = (AVCaptureDevice*) capture_screen_input;
924 ctx->video_is_screen = 1;
925 #endif
926 } else {
929 }
930 }
else if (
ctx->video_filename &&
931 strncmp(
ctx->video_filename,
"none", 4)) {
932 if (!strncmp(
ctx->video_filename,
"default", 7)) {
933 video_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
934 } else {
935 // looking for video inputs
936 for (AVCaptureDevice *device in devices) {
937 if (!strncmp(
ctx->video_filename, [[device localizedName] UTF8String], strlen(
ctx->video_filename))) {
938 video_device = device;
939 break;
940 }
941 }
942 // looking for muxed inputs
943 for (AVCaptureDevice *device in devices_muxed) {
944 if (!strncmp(
ctx->video_filename, [[device localizedName] UTF8String], strlen(
ctx->video_filename))) {
945 video_device = device;
946 ctx->video_is_muxed = 1;
947 break;
948 }
949 }
950
951 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
952 // looking for screen inputs
953 if (!video_device) {
954 int idx;
955 if(sscanf(
ctx->video_filename,
"Capture screen %d", &idx) && idx < num_screens) {
956 CGDirectDisplayID screens[num_screens];
957 CGGetActiveDisplayList(num_screens, screens, &num_screens);
958 AVCaptureScreenInput* capture_screen_input = [[[AVCaptureScreenInput alloc] initWithDisplayID:screens[idx]] autorelease];
959 video_device = (AVCaptureDevice*) capture_screen_input;
960 ctx->video_device_index =
ctx->num_video_devices + idx;
961 ctx->video_is_screen = 1;
962
963 if (
ctx->framerate.num > 0) {
964 capture_screen_input.minFrameDuration = CMTimeMake(
ctx->framerate.den,
ctx->framerate.num);
965 }
966
967 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
968 if (
ctx->capture_cursor) {
969 capture_screen_input.capturesCursor = YES;
970 } else {
971 capture_screen_input.capturesCursor = NO;
972 }
973 #endif
974
975 if (
ctx->capture_mouse_clicks) {
976 capture_screen_input.capturesMouseClicks = YES;
977 } else {
978 capture_screen_input.capturesMouseClicks = NO;
979 }
980 }
981 }
982 #endif
983 }
984
985 if (!video_device) {
988 }
989 }
990
991 // get audio device
992 if (
ctx->audio_device_index >= 0) {
994
995 if (
ctx->audio_device_index >= [devices count]) {
998 }
999
1000 audio_device = [devices objectAtIndex:ctx->audio_device_index];
1001 }
else if (
ctx->audio_filename &&
1002 strncmp(
ctx->audio_filename,
"none", 4)) {
1003 if (!strncmp(
ctx->audio_filename,
"default", 7)) {
1004 audio_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
1005 } else {
1007
1008 for (AVCaptureDevice *device in devices) {
1009 if (!strncmp(
ctx->audio_filename, [[device localizedName] UTF8String], strlen(
ctx->audio_filename))) {
1010 audio_device = device;
1011 break;
1012 }
1013 }
1014 }
1015
1016 if (!audio_device) {
1019 }
1020 }
1021
1022 // Video nor Audio capture device not found, looking for AVMediaTypeVideo/Audio
1023 if (!video_device && !audio_device) {
1026 }
1027
1028 if (video_device) {
1029 if (
ctx->video_device_index <
ctx->num_video_devices) {
1031 } else {
1033 }
1034 }
1035 if (audio_device) {
1036 av_log(
s,
AV_LOG_DEBUG,
"audio device '%s' opened\n", [[audio_device localizedName] UTF8String]);
1037 }
1038
1039 // Initialize capture session
1040 ctx->capture_session = [[AVCaptureSession alloc] init];
1041
1044 }
1046 }
1047
1048 [ctx->capture_session startRunning];
1049
1050 /* Unlock device configuration only after the session is started so it
1051 * does not reset the capture formats */
1052 if (!
ctx->video_is_screen) {
1053 [video_device unlockForConfiguration];
1054 }
1055
1058 }
1059
1060 // set audio stream
1063 }
1064
1065 [pool release];
1066 return 0;
1067
1069 [pool release];
1074 }
1075
1077 CVPixelBufferRef image_buffer,
1079 {
1081 int src_linesize[4];
1082 const uint8_t *src_data[4];
1083 int width = CVPixelBufferGetWidth(image_buffer);
1084 int height = CVPixelBufferGetHeight(image_buffer);
1086
1087 memset(src_linesize, 0, sizeof(src_linesize));
1088 memset(src_data, 0, sizeof(src_data));
1089
1090 status = CVPixelBufferLockBaseAddress(image_buffer, 0);
1091 if (
status != kCVReturnSuccess) {
1094 }
1095
1096 if (CVPixelBufferIsPlanar(image_buffer)) {
1097 size_t plane_count = CVPixelBufferGetPlaneCount(image_buffer);
1099 for(
i = 0;
i < plane_count;
i++){
1100 src_linesize[i] = CVPixelBufferGetBytesPerRowOfPlane(image_buffer,
i);
1101 src_data[i] = CVPixelBufferGetBaseAddressOfPlane(image_buffer,
i);
1102 }
1103 } else {
1104 src_linesize[0] = CVPixelBufferGetBytesPerRow(image_buffer);
1105 src_data[0] = CVPixelBufferGetBaseAddress(image_buffer);
1106 }
1107
1109 src_data, src_linesize,
1111
1112
1113
1114 CVPixelBufferUnlockBaseAddress(image_buffer, 0);
1115
1117 }
1118
1120 {
1122
1123 do {
1124 CVImageBufferRef image_buffer;
1125 CMBlockBufferRef block_buffer;
1127
1128 if (
ctx->current_frame != nil) {
1130 int length = 0;
1131
1132 image_buffer = CMSampleBufferGetImageBuffer(
ctx->current_frame);
1133 block_buffer = CMSampleBufferGetDataBuffer(
ctx->current_frame);
1134
1135 if (image_buffer != nil) {
1136 length = (int)CVPixelBufferGetDataSize(image_buffer);
1137 } else if (block_buffer != nil) {
1138 length = (int)CMBlockBufferGetDataLength(block_buffer);
1139 } else {
1142 }
1143
1147 }
1148
1149 CMItemCount count;
1151
1152 if (CMSampleBufferGetOutputSampleTimingInfoArray(
ctx->current_frame, 1, &
timing_info, &count) == noErr) {
1155 }
1156
1159
1160 if (image_buffer) {
1162 } else {
1164 OSStatus
ret = CMBlockBufferCopyDataBytes(block_buffer, 0,
pkt->
size,
pkt->
data);
1165 if (
ret != kCMBlockBufferNoErr) {
1167 }
1168 }
1169 CFRelease(
ctx->current_frame);
1170 ctx->current_frame = nil;
1171
1175 }
1176 }
else if (
ctx->current_audio_frame != nil) {
1177 CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(
ctx->current_audio_frame);
1178 int block_buffer_size = CMBlockBufferGetDataLength(block_buffer);
1179
1180 if (!block_buffer || !block_buffer_size) {
1183 }
1184
1185 if (
ctx->audio_non_interleaved && block_buffer_size >
ctx->audio_buffer_size) {
1188 }
1189
1193 }
1194
1195 CMItemCount count;
1197
1198 if (CMSampleBufferGetOutputSampleTimingInfoArray(
ctx->current_audio_frame, 1, &
timing_info, &count) == noErr) {
1201 }
1202
1205
1206 if (
ctx->audio_non_interleaved) {
1208
1209 OSStatus
ret = CMBlockBufferCopyDataBytes(block_buffer, 0,
pkt->
size,
ctx->audio_buffer);
1210 if (
ret != kCMBlockBufferNoErr) {
1213 }
1214
1215 num_samples =
pkt->
size / (
ctx->audio_channels * (
ctx->audio_bits_per_sample >> 3));
1216
1217 // transform decoded frame into output format
1218 #define INTERLEAVE_OUTPUT(bps) \
1219 { \
1220 int##bps##_t **src; \
1221 int##bps##_t *dest; \
1222 src = av_malloc(ctx->audio_channels * sizeof(int##bps##_t*)); \
1223 if (!src) { \
1224 unlock_frames(ctx); \
1225 return AVERROR(EIO); \
1226 } \
1227 \
1228 for (c = 0; c < ctx->audio_channels; c++) { \
1229 src[c] = ((int##bps##_t*)ctx->audio_buffer) + c * num_samples; \
1230 } \
1231 dest = (int##bps##_t*)pkt->data; \
1232 shift = bps - ctx->audio_bits_per_sample; \
1233 for (sample = 0; sample < num_samples; sample++) \
1234 for (c = 0; c < ctx->audio_channels; c++) \
1235 *dest++ = src[c][sample] << shift; \
1236 av_freep(&src); \
1237 }
1238
1239 if (
ctx->audio_bits_per_sample <= 16) {
1241 } else {
1243 }
1244 } else {
1245 OSStatus
ret = CMBlockBufferCopyDataBytes(block_buffer, 0,
pkt->
size,
pkt->
data);
1246 if (
ret != kCMBlockBufferNoErr) {
1249 }
1250 }
1251
1252 CFRelease(
ctx->current_audio_frame);
1253 ctx->current_audio_frame = nil;
1254 } else {
1257 if (
ctx->observed_quit) {
1259 } else {
1261 }
1262 }
1263
1266
1267 return 0;
1268 }
1269
1271 {
1274 return 0;
1275 }
1276
1288
1290 };
1291
1298 };
1299
1301 .
p.
name =
"avfoundation",
1309 };