1 /*
2 * AVFoundation input device
3 * Copyright (c) 2014 Thilo Borgmann <thilo.borgmann@mail.de>
4 *
5 * This file is part of FFmpeg.
6 *
7 * FFmpeg is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2.1 of the License, or (at your option) any later version.
11 *
12 * FFmpeg is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
16 *
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with FFmpeg; if not, write to the Free Software
19 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20 */
21
22 /**
23 * @file
24 * AVFoundation input device
25 * @author Thilo Borgmann <thilo.borgmann@mail.de>
26 */
27
28 #import <AVFoundation/AVFoundation.h>
29 #include <pthread.h>
30
42
44
48 };
49
53 };
54
78 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
80 #endif
82 };
83
85 {
87
93
96
103
109
113
115
123
126
128
134
136 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
137 AVCaptureDeviceTransportControlsPlaybackMode observed_mode;
138 #endif
141
143 {
145 }
146
148 {
150 }
151
152 /** FrameReciever class - delegate for AVCaptureSession
153 */
155 {
157 }
158
160
161 - (void) captureOutput:(AVCaptureOutput *)captureOutput
162 didOutputSampleBuffer:(CMSampleBufferRef)videoFrame
163 fromConnection:(AVCaptureConnection *)connection;
164
165 @end
166
168
170 {
171 if (
self = [super
init]) {
173
174 // start observing if a device is set for it
175 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
177 NSString *keyPath = NSStringFromSelector(@selector(transportControlsPlaybackMode));
178 NSKeyValueObservingOptions
options = NSKeyValueObservingOptionNew;
179
180 [
_context->observed_device addObserver: self
181 forKeyPath: keyPath
182 options: options
183 context: _context];
184 }
185 #endif
186 }
187 return self;
188 }
189
190 - (void)dealloc {
191 // stop observing if a device is set for it
192 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
194 NSString *keyPath = NSStringFromSelector(@selector(transportControlsPlaybackMode));
195 [_context->observed_device removeObserver: self forKeyPath: keyPath];
196 }
197 #endif
198 [super dealloc];
199 }
200
201 - (void)observeValueForKeyPath:(NSString *)keyPath
202 ofObject:(id)object
203 change:(NSDictionary *)change
204 context:(void *)context {
206 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
207 AVCaptureDeviceTransportControlsPlaybackMode
mode =
208 [change[NSKeyValueChangeNewKey] integerValue];
209
211 if (
mode == AVCaptureDeviceTransportControlsNotPlayingMode) {
213 }
215 }
216 #endif
217 } else {
218 [super observeValueForKeyPath: keyPath
219 ofObject: object
220 change: change
221 context: context];
222 }
223 }
224
225 - (void) captureOutput:(AVCaptureOutput *)captureOutput
226 didOutputSampleBuffer:(CMSampleBufferRef)videoFrame
227 fromConnection:(AVCaptureConnection *)connection
228 {
230
233 }
234
236
238
240 }
241
242 @end
243
244 /** AudioReciever class - delegate for AVCaptureSession
245 */
247 {
249 }
250
252
253 - (void) captureOutput:(AVCaptureOutput *)captureOutput
254 didOutputSampleBuffer:(CMSampleBufferRef)audioFrame
255 fromConnection:(AVCaptureConnection *)connection;
256
257 @end
258
260
262 {
263 if (
self = [super
init]) {
265 }
266 return self;
267 }
268
269 - (void) captureOutput:(AVCaptureOutput *)captureOutput
270 didOutputSampleBuffer:(CMSampleBufferRef)audioFrame
271 fromConnection:(AVCaptureConnection *)connection
272 {
274
277 }
278
280
282
284 }
285
286 @end
287
289 {
290 [ctx->capture_session stopRunning];
291
292 [ctx->capture_session release];
293 [ctx->video_output release];
294 [ctx->audio_output release];
295 [ctx->avf_delegate release];
296 [ctx->avf_audio_delegate release];
297
302 ctx->avf_audio_delegate =
NULL;
303
306
308
309 if (
ctx->current_frame) {
310 CFRelease(
ctx->current_frame);
311 }
312 }
313
315 {
317 char *save;
318
320
326 } else {
328 }
329 return 0;
330 }
331
332 /**
333 * Configure the video device.
334 *
335 * Configure the video device using a run-time approach to access properties
336 * since formats, activeFormat are available since iOS >= 7.0 or OSX >= 10.7
337 * and activeVideoMaxFrameDuration is available since i0S >= 7.0 and OSX >= 10.9.
338 *
339 * The NSUndefinedKeyException must be handled by the caller of this function.
340 *
341 */
343 {
345
347 NSObject *
range = nil;
349 NSObject *selected_range = nil;
350 NSObject *selected_format = nil;
351
352 // try to configure format by formats list
353 // might raise an exception if no format list is given
354 // (then fallback to default, no configuration)
355 @try {
356 for (
format in [video_device valueForKey:
@"formats"]) {
357 CMFormatDescriptionRef formatDescription;
358 CMVideoDimensions dimensions;
359
360 formatDescription = (CMFormatDescriptionRef) [
format performSelector:
@selector(formatDescription)];
361 dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
362
363 if ((
ctx->width == 0 &&
ctx->height == 0) ||
364 (dimensions.width ==
ctx->width && dimensions.height ==
ctx->height)) {
365
367
368 for (
range in [
format valueForKey:
@"videoSupportedFrameRateRanges"]) {
369 double max_framerate;
370
371 [[range valueForKey:@"maxFrameRate"] getValue:&max_framerate];
373 selected_range =
range;
374 break;
375 }
376 }
377 }
378 }
379
380 if (!selected_format) {
383 goto unsupported_format;
384 }
385
386 if (!selected_range) {
389 if (
ctx->video_is_muxed) {
391 } else {
392 goto unsupported_format;
393 }
394 }
395
396 if ([video_device lockForConfiguration:
NULL] == YES) {
397 if (selected_format) {
398 [video_device setValue:selected_format forKey:@"activeFormat"];
399 }
400 if (selected_range) {
401 NSValue *min_frame_duration = [selected_range valueForKey:@"minFrameDuration"];
402 [video_device setValue:min_frame_duration forKey:@"activeVideoMinFrameDuration"];
403 [video_device setValue:min_frame_duration forKey:@"activeVideoMaxFrameDuration"];
404 }
405 } else {
408 }
409 } @catch(NSException *e) {
411 }
412
413 return 0;
414
415 unsupported_format:
416
418 for (
format in [video_device valueForKey:
@"formats"]) {
419 CMFormatDescriptionRef formatDescription;
420 CMVideoDimensions dimensions;
421
422 formatDescription = (CMFormatDescriptionRef) [
format performSelector:
@selector(formatDescription)];
423 dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
424
425 for (
range in [
format valueForKey:
@"videoSupportedFrameRateRanges"]) {
426 double min_framerate;
427 double max_framerate;
428
429 [[range valueForKey:@"minFrameRate"] getValue:&min_framerate];
430 [[range valueForKey:@"maxFrameRate"] getValue:&max_framerate];
432 dimensions.width, dimensions.height,
433 min_framerate, max_framerate);
434 }
435 }
437 }
438
440 {
443 NSError *
error = nil;
444 AVCaptureInput* capture_input = nil;
446 NSNumber *pixel_format;
447 NSDictionary *capture_dict;
448 dispatch_queue_t queue;
449
450 if (
ctx->video_device_index <
ctx->num_video_devices) {
451 capture_input = (AVCaptureInput*) [[[AVCaptureDeviceInput alloc] initWithDevice:video_device
error:&
error] autorelease];
452 } else {
453 capture_input = (AVCaptureInput*) video_device;
454 }
455
456 if (!capture_input) {
458 [[
error localizedDescription] UTF8String]);
459 return 1;
460 }
461
462 if ([
ctx->capture_session canAddInput:capture_input]) {
463 [ctx->capture_session addInput:capture_input];
464 } else {
466 return 1;
467 }
468
469 // Attaching output
470 ctx->video_output = [[AVCaptureVideoDataOutput alloc] init];
471
472 if (!
ctx->video_output) {
474 return 1;
475 }
476
477 // Configure device framerate and video size
478 @try {
481 }
482 } @catch (NSException *exception) {
483 if (![[exception
name] isEqualToString:NSUndefinedKeyException]) {
486 }
487 }
488
489 // select pixel format
491
495 break;
496 }
497 }
498
499 // check if selected pixel format is supported by AVFoundation
503 return 1;
504 }
505
506 // check if the pixel format is available for this device
507 if ([[
ctx->video_output availableVideoCVPixelFormatTypes] indexOfObject:[NSNumber numberWithInt:pxl_fmt_spec.avf_id]] == NSNotFound) {
508 av_log(
s,
AV_LOG_ERROR,
"Selected pixel format (%s) is not supported by the input device.\n",
510
512
514 for (NSNumber *pxl_fmt in [
ctx->video_output availableVideoCVPixelFormatTypes]) {
520 break;
521 }
522 }
523
526
527 // select first supported pixel format instead of user selected (or default) pixel format
529 pxl_fmt_spec = pxl_fmt_dummy;
530 }
531 }
532 }
533
534 // fail if there is no appropriate pixel format or print a warning about overriding the pixel format
536 return 1;
537 } else {
540 }
541 }
542
543 // set videoSettings to an empty dict for receiving raw data of muxed devices
544 if (
ctx->capture_raw_data) {
545 ctx->pixel_format = pxl_fmt_spec.ff_id;
546 ctx->video_output.videoSettings = @{ };
547 } else {
548 ctx->pixel_format = pxl_fmt_spec.ff_id;
549 pixel_format = [NSNumber numberWithUnsignedInt:pxl_fmt_spec.avf_id];
550 capture_dict = [NSDictionary dictionaryWithObject:pixel_format
551 forKey:(id)kCVPixelBufferPixelFormatTypeKey];
552
553 [ctx->video_output setVideoSettings:capture_dict];
554 }
555 [ctx->video_output setAlwaysDiscardsLateVideoFrames:ctx->drop_late_frames];
556
557 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
558 // check for transport control support and set observer device if supported
559 if (!
ctx->video_is_screen) {
560 int trans_ctrl = [video_device transportControlsSupported];
561 AVCaptureDeviceTransportControlsPlaybackMode trans_mode = [video_device transportControlsPlaybackMode];
562
563 if (trans_ctrl) {
564 ctx->observed_mode = trans_mode;
565 ctx->observed_device = video_device;
566 }
567 }
568 #endif
569
571
572 queue = dispatch_queue_create(
"avf_queue",
NULL);
573 [ctx->video_output setSampleBufferDelegate:ctx->avf_delegate queue:queue];
574 dispatch_release(queue);
575
576 if ([
ctx->capture_session canAddOutput:
ctx->video_output]) {
577 [ctx->capture_session addOutput:ctx->video_output];
578 } else {
580 return 1;
581 }
582
583 return 0;
584 }
585
587 {
589 NSError *
error = nil;
590 AVCaptureDeviceInput* audio_dev_input = [[[AVCaptureDeviceInput alloc] initWithDevice:audio_device
error:&
error] autorelease];
591 dispatch_queue_t queue;
592
593 if (!audio_dev_input) {
595 [[
error localizedDescription] UTF8String]);
596 return 1;
597 }
598
599 if ([
ctx->capture_session canAddInput:audio_dev_input]) {
600 [ctx->capture_session addInput:audio_dev_input];
601 } else {
603 return 1;
604 }
605
606 // Attaching output
607 ctx->audio_output = [[AVCaptureAudioDataOutput alloc] init];
608
609 if (!
ctx->audio_output) {
611 return 1;
612 }
613
615
616 queue = dispatch_queue_create(
"avf_audio_queue",
NULL);
617 [ctx->audio_output setSampleBufferDelegate:ctx->avf_audio_delegate queue:queue];
618 dispatch_release(queue);
619
620 if ([
ctx->capture_session canAddOutput:
ctx->audio_output]) {
621 [ctx->capture_session addOutput:ctx->audio_output];
622 } else {
624 return 1;
625 }
626
627 return 0;
628 }
629
631 {
633 CVImageBufferRef image_buffer;
634 CMBlockBufferRef block_buffer;
635 CGSize image_buffer_size;
637
638 if (!stream) {
639 return 1;
640 }
641
642 // Take stream info from the first frame.
643 while (
ctx->frames_captured < 1) {
644 CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, YES);
645 }
646
648
649 ctx->video_stream_index = stream->index;
650
652
653 image_buffer = CMSampleBufferGetImageBuffer(
ctx->current_frame);
654 block_buffer = CMSampleBufferGetDataBuffer(
ctx->current_frame);
655
656 if (image_buffer) {
657 image_buffer_size = CVImageBufferGetEncodedSize(image_buffer);
658
661 stream->codecpar->width = (
int)image_buffer_size.width;
662 stream->codecpar->height = (int)image_buffer_size.height;
663 stream->codecpar->format =
ctx->pixel_format;
664 } else {
667 stream->codecpar->format =
ctx->pixel_format;
668 }
669
670 CFRelease(
ctx->current_frame);
671 ctx->current_frame = nil;
672
674
675 return 0;
676 }
677
679 {
681 CMFormatDescriptionRef format_desc;
683
684 if (!stream) {
685 return 1;
686 }
687
688 // Take stream info from the first frame.
689 while (
ctx->audio_frames_captured < 1) {
690 CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, YES);
691 }
692
694
695 ctx->audio_stream_index = stream->index;
696
698
699 format_desc = CMSampleBufferGetFormatDescription(
ctx->current_audio_frame);
700 const AudioStreamBasicDescription *basic_desc = CMAudioFormatDescriptionGetStreamBasicDescription(format_desc);
701
702 if (!basic_desc) {
705 return 1;
706 }
707
709 stream->codecpar->sample_rate = basic_desc->mSampleRate;
711
712 ctx->audio_channels = basic_desc->mChannelsPerFrame;
713 ctx->audio_bits_per_sample = basic_desc->mBitsPerChannel;
714 ctx->audio_float = basic_desc->mFormatFlags & kAudioFormatFlagIsFloat;
715 ctx->audio_be = basic_desc->mFormatFlags & kAudioFormatFlagIsBigEndian;
716 ctx->audio_signed_integer = basic_desc->mFormatFlags & kAudioFormatFlagIsSignedInteger;
717 ctx->audio_packed = basic_desc->mFormatFlags & kAudioFormatFlagIsPacked;
718 ctx->audio_non_interleaved = basic_desc->mFormatFlags & kAudioFormatFlagIsNonInterleaved;
719
720 if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
722 ctx->audio_bits_per_sample == 32 &&
725 } else if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
726 ctx->audio_signed_integer &&
727 ctx->audio_bits_per_sample == 16 &&
730 } else if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
731 ctx->audio_signed_integer &&
732 ctx->audio_bits_per_sample == 24 &&
735 } else if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
736 ctx->audio_signed_integer &&
737 ctx->audio_bits_per_sample == 32 &&
740 } else {
743 return 1;
744 }
745
746 if (
ctx->audio_non_interleaved) {
747 CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(
ctx->current_audio_frame);
748 ctx->audio_buffer_size = CMBlockBufferGetDataLength(block_buffer);
750 if (!
ctx->audio_buffer) {
753 return 1;
754 }
755 }
756
757 CFRelease(
ctx->current_audio_frame);
758 ctx->current_audio_frame = nil;
759
761
762 return 0;
763 }
764
766 #if ((TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 100000) || (TARGET_OS_OSX && __MAC_OS_X_VERSION_MIN_REQUIRED >= 101500))
767 NSMutableArray *deviceTypes = nil;
768 if (mediaType == AVMediaTypeVideo) {
769 deviceTypes = [NSMutableArray arrayWithArray:@[AVCaptureDeviceTypeBuiltInWideAngleCamera]];
770 #if (TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 100000)
771 [deviceTypes addObject: AVCaptureDeviceTypeBuiltInDualCamera];
772 [deviceTypes addObject: AVCaptureDeviceTypeBuiltInTelephotoCamera];
773 #endif
774 #if (TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 110100)
775 [deviceTypes addObject: AVCaptureDeviceTypeBuiltInTrueDepthCamera];
776 #endif
777 #if (TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 130000)
778 [deviceTypes addObject: AVCaptureDeviceTypeBuiltInTripleCamera];
779 [deviceTypes addObject: AVCaptureDeviceTypeBuiltInDualWideCamera];
780 [deviceTypes addObject: AVCaptureDeviceTypeBuiltInUltraWideCamera];
781 #endif
782 #if (TARGET_OS_OSX && __MAC_OS_X_VERSION_MIN_REQUIRED >= 130000)
783 [deviceTypes addObject: AVCaptureDeviceTypeDeskViewCamera];
784 #endif
785 #if (TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 150400)
786 [deviceTypes addObject: AVCaptureDeviceTypeBuiltInLiDARDepthCamera];
787 #endif
788 #if (TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 170000 || (TARGET_OS_OSX && __MAC_OS_X_VERSION_MIN_REQUIRED >= 140000))
789 [deviceTypes addObject: AVCaptureDeviceTypeContinuityCamera];
790 [deviceTypes addObject: AVCaptureDeviceTypeExternal];
791 #elif (TARGET_OS_OSX && __MAC_OS_X_VERSION_MIN_REQUIRED < 140000)
792 [deviceTypes addObject: AVCaptureDeviceTypeExternalUnknown];
793 #endif
794 } else if (mediaType == AVMediaTypeAudio) {
795 #if (TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 170000 || (TARGET_OS_OSX && __MAC_OS_X_VERSION_MIN_REQUIRED >= 140000))
796 deviceTypes = [NSMutableArray arrayWithArray:@[AVCaptureDeviceTypeMicrophone]];
797 #else
798 deviceTypes = [NSMutableArray arrayWithArray:@[AVCaptureDeviceTypeBuiltInMicrophone]];
799 #endif
800 } else if (mediaType == AVMediaTypeMuxed) {
801 #if (TARGET_OS_IPHONE && __IPHONE_OS_VERSION_MIN_REQUIRED >= 170000 || (TARGET_OS_OSX && __MAC_OS_X_VERSION_MIN_REQUIRED >= 140000))
802 deviceTypes = [NSMutableArray arrayWithArray:@[AVCaptureDeviceTypeExternal]];
803 #elif (TARGET_OS_OSX && __MAC_OS_X_VERSION_MIN_REQUIRED < 140000)
804 deviceTypes = [NSMutableArray arrayWithArray:@[AVCaptureDeviceTypeExternalUnknown]];
805 #else
806 return nil;
807 #endif
808 } else {
809 return nil;
810 }
811
812 AVCaptureDeviceDiscoverySession *captureDeviceDiscoverySession =
813 [AVCaptureDeviceDiscoverySession
814 discoverySessionWithDeviceTypes:deviceTypes
815 mediaType:mediaType
816 position:AVCaptureDevicePositionUnspecified];
817 return [captureDeviceDiscoverySession devices];
818 #else
819 return [AVCaptureDevice devicesWithMediaType:mediaType];
820 #endif
821 }
822
824 {
826 NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
827 uint32_t num_screens = 0;
829 AVCaptureDevice *video_device = nil;
830 AVCaptureDevice *audio_device = nil;
831 // Find capture device
834
835 ctx->num_video_devices = [devices count] + [devices_muxed count];
836
838
839 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
840 CGGetActiveDisplayList(0,
NULL, &num_screens);
841 #endif
842
843 // List devices if requested
844 if (
ctx->list_devices) {
847 for (AVCaptureDevice *device in devices) {
848 const char *
name = [[device localizedName] UTF8String];
849 index = [devices indexOfObject:device];
851 }
852 for (AVCaptureDevice *device in devices_muxed) {
853 const char *
name = [[device localizedName] UTF8String];
854 index = [devices count] + [devices_muxed indexOfObject:device];
856 }
857 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
858 if (num_screens > 0) {
859 CGDirectDisplayID screens[num_screens];
860 CGGetActiveDisplayList(num_screens, screens, &num_screens);
861 for (
int i = 0;
i < num_screens;
i++) {
863 }
864 }
865 #endif
866
869 for (AVCaptureDevice *device in devices) {
870 const char *
name = [[device localizedName] UTF8String];
871 int index = [devices indexOfObject:device];
873 }
875 }
876
877 // parse input filename for video and audio device
881
882 // check for device index given in filename
883 if (
ctx->video_device_index == -1 &&
ctx->video_filename) {
884 sscanf(
ctx->video_filename,
"%d", &
ctx->video_device_index);
885 }
886 if (
ctx->audio_device_index == -1 &&
ctx->audio_filename) {
887 sscanf(
ctx->audio_filename,
"%d", &
ctx->audio_device_index);
888 }
889
890 if (
ctx->video_device_index >= 0) {
891 if (
ctx->video_device_index <
ctx->num_video_devices) {
892 if (
ctx->video_device_index < [devices count]) {
893 video_device = [devices objectAtIndex:ctx->video_device_index];
894 } else {
895 video_device = [devices_muxed objectAtIndex:(ctx->video_device_index - [devices count])];
896 ctx->video_is_muxed = 1;
897 }
898 }
else if (
ctx->video_device_index <
ctx->num_video_devices + num_screens) {
899 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
900 CGDirectDisplayID screens[num_screens];
901 CGGetActiveDisplayList(num_screens, screens, &num_screens);
902 AVCaptureScreenInput* capture_screen_input = [[[AVCaptureScreenInput alloc] initWithDisplayID:screens[ctx->video_device_index - ctx->
num_video_devices]] autorelease];
903
904 if (
ctx->framerate.num > 0) {
905 capture_screen_input.minFrameDuration = CMTimeMake(
ctx->framerate.den,
ctx->framerate.num);
906 }
907
908 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
909 if (
ctx->capture_cursor) {
910 capture_screen_input.capturesCursor = YES;
911 } else {
912 capture_screen_input.capturesCursor = NO;
913 }
914 #endif
915
916 if (
ctx->capture_mouse_clicks) {
917 capture_screen_input.capturesMouseClicks = YES;
918 } else {
919 capture_screen_input.capturesMouseClicks = NO;
920 }
921
922 video_device = (AVCaptureDevice*) capture_screen_input;
923 ctx->video_is_screen = 1;
924 #endif
925 } else {
928 }
929 }
else if (
ctx->video_filename &&
930 strncmp(
ctx->video_filename,
"none", 4)) {
931 if (!strncmp(
ctx->video_filename,
"default", 7)) {
932 video_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
933 } else {
934 // looking for video inputs
935 for (AVCaptureDevice *device in devices) {
936 if (!strncmp(
ctx->video_filename, [[device localizedName] UTF8String], strlen(
ctx->video_filename))) {
937 video_device = device;
938 break;
939 }
940 }
941 // looking for muxed inputs
942 for (AVCaptureDevice *device in devices_muxed) {
943 if (!strncmp(
ctx->video_filename, [[device localizedName] UTF8String], strlen(
ctx->video_filename))) {
944 video_device = device;
945 ctx->video_is_muxed = 1;
946 break;
947 }
948 }
949
950 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
951 // looking for screen inputs
952 if (!video_device) {
953 int idx;
954 if(sscanf(
ctx->video_filename,
"Capture screen %d", &idx) && idx < num_screens) {
955 CGDirectDisplayID screens[num_screens];
956 CGGetActiveDisplayList(num_screens, screens, &num_screens);
957 AVCaptureScreenInput* capture_screen_input = [[[AVCaptureScreenInput alloc] initWithDisplayID:screens[idx]] autorelease];
958 video_device = (AVCaptureDevice*) capture_screen_input;
959 ctx->video_device_index =
ctx->num_video_devices + idx;
960 ctx->video_is_screen = 1;
961
962 if (
ctx->framerate.num > 0) {
963 capture_screen_input.minFrameDuration = CMTimeMake(
ctx->framerate.den,
ctx->framerate.num);
964 }
965
966 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
967 if (
ctx->capture_cursor) {
968 capture_screen_input.capturesCursor = YES;
969 } else {
970 capture_screen_input.capturesCursor = NO;
971 }
972 #endif
973
974 if (
ctx->capture_mouse_clicks) {
975 capture_screen_input.capturesMouseClicks = YES;
976 } else {
977 capture_screen_input.capturesMouseClicks = NO;
978 }
979 }
980 }
981 #endif
982 }
983
984 if (!video_device) {
987 }
988 }
989
990 // get audio device
991 if (
ctx->audio_device_index >= 0) {
993
994 if (
ctx->audio_device_index >= [devices count]) {
997 }
998
999 audio_device = [devices objectAtIndex:ctx->audio_device_index];
1000 }
else if (
ctx->audio_filename &&
1001 strncmp(
ctx->audio_filename,
"none", 4)) {
1002 if (!strncmp(
ctx->audio_filename,
"default", 7)) {
1003 audio_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
1004 } else {
1006
1007 for (AVCaptureDevice *device in devices) {
1008 if (!strncmp(
ctx->audio_filename, [[device localizedName] UTF8String], strlen(
ctx->audio_filename))) {
1009 audio_device = device;
1010 break;
1011 }
1012 }
1013 }
1014
1015 if (!audio_device) {
1018 }
1019 }
1020
1021 // Video nor Audio capture device not found, looking for AVMediaTypeVideo/Audio
1022 if (!video_device && !audio_device) {
1025 }
1026
1027 if (video_device) {
1028 if (
ctx->video_device_index <
ctx->num_video_devices) {
1030 } else {
1032 }
1033 }
1034 if (audio_device) {
1035 av_log(
s,
AV_LOG_DEBUG,
"audio device '%s' opened\n", [[audio_device localizedName] UTF8String]);
1036 }
1037
1038 // Initialize capture session
1039 ctx->capture_session = [[AVCaptureSession alloc] init];
1040
1043 }
1045 }
1046
1047 [ctx->capture_session startRunning];
1048
1049 /* Unlock device configuration only after the session is started so it
1050 * does not reset the capture formats */
1051 if (!
ctx->video_is_screen) {
1052 [video_device unlockForConfiguration];
1053 }
1054
1057 }
1058
1059 // set audio stream
1062 }
1063
1064 [pool release];
1065 return 0;
1066
1068 [pool release];
1073 }
1074
1076 CVPixelBufferRef image_buffer,
1078 {
1080 int src_linesize[4];
1081 const uint8_t *src_data[4];
1082 int width = CVPixelBufferGetWidth(image_buffer);
1083 int height = CVPixelBufferGetHeight(image_buffer);
1085
1086 memset(src_linesize, 0, sizeof(src_linesize));
1087 memset(src_data, 0, sizeof(src_data));
1088
1089 status = CVPixelBufferLockBaseAddress(image_buffer, 0);
1090 if (
status != kCVReturnSuccess) {
1093 }
1094
1095 if (CVPixelBufferIsPlanar(image_buffer)) {
1096 size_t plane_count = CVPixelBufferGetPlaneCount(image_buffer);
1098 for(
i = 0;
i < plane_count;
i++){
1099 src_linesize[i] = CVPixelBufferGetBytesPerRowOfPlane(image_buffer,
i);
1100 src_data[i] = CVPixelBufferGetBaseAddressOfPlane(image_buffer,
i);
1101 }
1102 } else {
1103 src_linesize[0] = CVPixelBufferGetBytesPerRow(image_buffer);
1104 src_data[0] = CVPixelBufferGetBaseAddress(image_buffer);
1105 }
1106
1108 src_data, src_linesize,
1110
1111
1112
1113 CVPixelBufferUnlockBaseAddress(image_buffer, 0);
1114
1116 }
1117
1119 {
1121
1122 do {
1123 CVImageBufferRef image_buffer;
1124 CMBlockBufferRef block_buffer;
1126
1127 if (
ctx->current_frame != nil) {
1129 int length = 0;
1130
1131 image_buffer = CMSampleBufferGetImageBuffer(
ctx->current_frame);
1132 block_buffer = CMSampleBufferGetDataBuffer(
ctx->current_frame);
1133
1134 if (image_buffer != nil) {
1135 length = (
int)CVPixelBufferGetDataSize(image_buffer);
1136 } else if (block_buffer != nil) {
1137 length = (
int)CMBlockBufferGetDataLength(block_buffer);
1138 } else {
1141 }
1142
1146 }
1147
1148 CMItemCount count;
1150
1151 if (CMSampleBufferGetOutputSampleTimingInfoArray(
ctx->current_frame, 1, &
timing_info, &count) == noErr) {
1154 }
1155
1158
1159 if (image_buffer) {
1161 } else {
1163 OSStatus
ret = CMBlockBufferCopyDataBytes(block_buffer, 0,
pkt->
size,
pkt->
data);
1164 if (
ret != kCMBlockBufferNoErr) {
1166 }
1167 }
1168 CFRelease(
ctx->current_frame);
1169 ctx->current_frame = nil;
1170
1174 }
1175 }
else if (
ctx->current_audio_frame != nil) {
1176 CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(
ctx->current_audio_frame);
1177 int block_buffer_size = CMBlockBufferGetDataLength(block_buffer);
1178
1179 if (!block_buffer || !block_buffer_size) {
1182 }
1183
1184 if (
ctx->audio_non_interleaved && block_buffer_size >
ctx->audio_buffer_size) {
1187 }
1188
1192 }
1193
1194 CMItemCount count;
1196
1197 if (CMSampleBufferGetOutputSampleTimingInfoArray(
ctx->current_audio_frame, 1, &
timing_info, &count) == noErr) {
1200 }
1201
1204
1205 if (
ctx->audio_non_interleaved) {
1207
1208 OSStatus
ret = CMBlockBufferCopyDataBytes(block_buffer, 0,
pkt->
size,
ctx->audio_buffer);
1209 if (
ret != kCMBlockBufferNoErr) {
1212 }
1213
1214 num_samples =
pkt->
size / (
ctx->audio_channels * (
ctx->audio_bits_per_sample >> 3));
1215
1216 // transform decoded frame into output format
1217 #define INTERLEAVE_OUTPUT(bps) \
1218 { \
1219 int##bps##_t **src; \
1220 int##bps##_t *dest; \
1221 src = av_malloc(ctx->audio_channels * sizeof(int##bps##_t*)); \
1222 if (!src) { \
1223 unlock_frames(ctx); \
1224 return AVERROR(EIO); \
1225 } \
1226 \
1227 for (c = 0; c < ctx->audio_channels; c++) { \
1228 src[c] = ((int##bps##_t*)ctx->audio_buffer) + c * num_samples; \
1229 } \
1230 dest = (int##bps##_t*)pkt->data; \
1231 shift = bps - ctx->audio_bits_per_sample; \
1232 for (sample = 0; sample < num_samples; sample++) \
1233 for (c = 0; c < ctx->audio_channels; c++) \
1234 *dest++ = src[c][sample] << shift; \
1235 av_freep(&src); \
1236 }
1237
1238 if (
ctx->audio_bits_per_sample <= 16) {
1240 } else {
1242 }
1243 } else {
1244 OSStatus
ret = CMBlockBufferCopyDataBytes(block_buffer, 0,
pkt->
size,
pkt->
data);
1245 if (
ret != kCMBlockBufferNoErr) {
1248 }
1249 }
1250
1251 CFRelease(
ctx->current_audio_frame);
1252 ctx->current_audio_frame = nil;
1253 } else {
1256 if (
ctx->observed_quit) {
1258 } else {
1260 }
1261 }
1262
1265
1266 return 0;
1267 }
1268
1270 {
1273 return 0;
1274 }
1275
1287
1289 };
1290
1297 };
1298
1300 .
p.
name =
"avfoundation",
1308 };