1 /*
2 * AVFoundation input device
3 * Copyright (c) 2014 Thilo Borgmann <thilo.borgmann@mail.de>
4 *
5 * This file is part of FFmpeg.
6 *
7 * FFmpeg is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2.1 of the License, or (at your option) any later version.
11 *
12 * FFmpeg is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
16 *
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with FFmpeg; if not, write to the Free Software
19 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20 */
21
22 /**
23 * @file
24 * AVFoundation input device
25 * @author Thilo Borgmann <thilo.borgmann@mail.de>
26 */
27
28 #import <AVFoundation/AVFoundation.h>
29 #include <pthread.h>
30
41
43
47 };
48
52 };
53
77 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
79 #endif
81 };
82
84 {
86
92
95
102
108
112
114
122
125
127
133
135 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
136 AVCaptureDeviceTransportControlsPlaybackMode observed_mode;
137 #endif
140
142 {
144 }
145
147 {
149 }
150
151 /** FrameReciever class - delegate for AVCaptureSession
152 */
154 {
156 }
157
159
160 - (void) captureOutput:(AVCaptureOutput *)captureOutput
161 didOutputSampleBuffer:(CMSampleBufferRef)videoFrame
162 fromConnection:(AVCaptureConnection *)connection;
163
164 @end
165
167
169 {
170 if (
self = [super
init]) {
172
173 // start observing if a device is set for it
174 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
176 NSString *keyPath = NSStringFromSelector(@selector(transportControlsPlaybackMode));
177 NSKeyValueObservingOptions
options = NSKeyValueObservingOptionNew;
178
179 [
_context->observed_device addObserver: self
180 forKeyPath: keyPath
181 options: options
182 context: _context];
183 }
184 #endif
185 }
186 return self;
187 }
188
189 - (void)dealloc {
190 // stop observing if a device is set for it
191 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
193 NSString *keyPath = NSStringFromSelector(@selector(transportControlsPlaybackMode));
194 [_context->observed_device removeObserver: self forKeyPath: keyPath];
195 }
196 #endif
197 [super dealloc];
198 }
199
200 - (void)observeValueForKeyPath:(NSString *)keyPath
201 ofObject:(id)object
202 change:(NSDictionary *)change
203 context:(void *)context {
205 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
206 AVCaptureDeviceTransportControlsPlaybackMode
mode =
207 [change[NSKeyValueChangeNewKey] integerValue];
208
210 if (
mode == AVCaptureDeviceTransportControlsNotPlayingMode) {
212 }
214 }
215 #endif
216 } else {
217 [super observeValueForKeyPath: keyPath
218 ofObject: object
219 change: change
220 context: context];
221 }
222 }
223
224 - (void) captureOutput:(AVCaptureOutput *)captureOutput
225 didOutputSampleBuffer:(CMSampleBufferRef)videoFrame
226 fromConnection:(AVCaptureConnection *)connection
227 {
229
232 }
233
235
237
239 }
240
241 @end
242
243 /** AudioReciever class - delegate for AVCaptureSession
244 */
246 {
248 }
249
251
252 - (void) captureOutput:(AVCaptureOutput *)captureOutput
253 didOutputSampleBuffer:(CMSampleBufferRef)audioFrame
254 fromConnection:(AVCaptureConnection *)connection;
255
256 @end
257
259
261 {
262 if (
self = [super
init]) {
264 }
265 return self;
266 }
267
268 - (void) captureOutput:(AVCaptureOutput *)captureOutput
269 didOutputSampleBuffer:(CMSampleBufferRef)audioFrame
270 fromConnection:(AVCaptureConnection *)connection
271 {
273
276 }
277
279
281
283 }
284
285 @end
286
288 {
289 [ctx->capture_session stopRunning];
290
291 [ctx->capture_session release];
292 [ctx->video_output release];
293 [ctx->audio_output release];
294 [ctx->avf_delegate release];
295 [ctx->avf_audio_delegate release];
296
301 ctx->avf_audio_delegate =
NULL;
302
305
307
308 if (
ctx->current_frame) {
309 CFRelease(
ctx->current_frame);
310 }
311 }
312
314 {
316 char *save;
317
319
325 } else {
327 }
328 return 0;
329 }
330
331 /**
332 * Configure the video device.
333 *
334 * Configure the video device using a run-time approach to access properties
335 * since formats, activeFormat are available since iOS >= 7.0 or OSX >= 10.7
336 * and activeVideoMaxFrameDuration is available since i0S >= 7.0 and OSX >= 10.9.
337 *
338 * The NSUndefinedKeyException must be handled by the caller of this function.
339 *
340 */
342 {
344
346 NSObject *range = nil;
348 NSObject *selected_range = nil;
349 NSObject *selected_format = nil;
350
351 // try to configure format by formats list
352 // might raise an exception if no format list is given
353 // (then fallback to default, no configuration)
354 @try {
355 for (
format in [video_device valueForKey:
@"formats"]) {
356 CMFormatDescriptionRef formatDescription;
357 CMVideoDimensions dimensions;
358
359 formatDescription = (CMFormatDescriptionRef) [
format performSelector:
@selector(formatDescription)];
360 dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
361
362 if ((
ctx->width == 0 &&
ctx->height == 0) ||
363 (dimensions.width ==
ctx->width && dimensions.height ==
ctx->height)) {
364
366
367 for (range in [
format valueForKey:
@"videoSupportedFrameRateRanges"]) {
368 double max_framerate;
369
370 [[range valueForKey:@"maxFrameRate"] getValue:&max_framerate];
372 selected_range = range;
373 break;
374 }
375 }
376 }
377 }
378
379 if (!selected_format) {
382 goto unsupported_format;
383 }
384
385 if (!selected_range) {
388 if (
ctx->video_is_muxed) {
390 } else {
391 goto unsupported_format;
392 }
393 }
394
395 if ([video_device lockForConfiguration:
NULL] == YES) {
396 if (selected_format) {
397 [video_device setValue:selected_format forKey:@"activeFormat"];
398 }
399 if (selected_range) {
400 NSValue *min_frame_duration = [selected_range valueForKey:@"minFrameDuration"];
401 [video_device setValue:min_frame_duration forKey:@"activeVideoMinFrameDuration"];
402 [video_device setValue:min_frame_duration forKey:@"activeVideoMaxFrameDuration"];
403 }
404 } else {
407 }
408 } @catch(NSException *e) {
410 }
411
412 return 0;
413
414 unsupported_format:
415
417 for (
format in [video_device valueForKey:
@"formats"]) {
418 CMFormatDescriptionRef formatDescription;
419 CMVideoDimensions dimensions;
420
421 formatDescription = (CMFormatDescriptionRef) [
format performSelector:
@selector(formatDescription)];
422 dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
423
424 for (range in [
format valueForKey:
@"videoSupportedFrameRateRanges"]) {
425 double min_framerate;
426 double max_framerate;
427
428 [[range valueForKey:@"minFrameRate"] getValue:&min_framerate];
429 [[range valueForKey:@"maxFrameRate"] getValue:&max_framerate];
431 dimensions.width, dimensions.height,
432 min_framerate, max_framerate);
433 }
434 }
436 }
437
439 {
442 NSError *
error = nil;
443 AVCaptureInput* capture_input = nil;
445 NSNumber *pixel_format;
446 NSDictionary *capture_dict;
447 dispatch_queue_t queue;
448
449 if (
ctx->video_device_index <
ctx->num_video_devices) {
450 capture_input = (AVCaptureInput*) [[[AVCaptureDeviceInput alloc] initWithDevice:video_device
error:&
error] autorelease];
451 } else {
452 capture_input = (AVCaptureInput*) video_device;
453 }
454
455 if (!capture_input) {
457 [[
error localizedDescription] UTF8String]);
458 return 1;
459 }
460
461 if ([
ctx->capture_session canAddInput:capture_input]) {
462 [ctx->capture_session addInput:capture_input];
463 } else {
465 return 1;
466 }
467
468 // Attaching output
469 ctx->video_output = [[AVCaptureVideoDataOutput alloc] init];
470
471 if (!
ctx->video_output) {
473 return 1;
474 }
475
476 // Configure device framerate and video size
477 @try {
480 }
481 } @catch (NSException *exception) {
482 if (![[exception
name] isEqualToString:NSUndefinedKeyException]) {
485 }
486 }
487
488 // select pixel format
490
494 break;
495 }
496 }
497
498 // check if selected pixel format is supported by AVFoundation
502 return 1;
503 }
504
505 // check if the pixel format is available for this device
506 if ([[
ctx->video_output availableVideoCVPixelFormatTypes] indexOfObject:[NSNumber numberWithInt:pxl_fmt_spec.avf_id]] == NSNotFound) {
507 av_log(
s,
AV_LOG_ERROR,
"Selected pixel format (%s) is not supported by the input device.\n",
509
511
513 for (NSNumber *pxl_fmt in [
ctx->video_output availableVideoCVPixelFormatTypes]) {
519 break;
520 }
521 }
522
525
526 // select first supported pixel format instead of user selected (or default) pixel format
528 pxl_fmt_spec = pxl_fmt_dummy;
529 }
530 }
531 }
532
533 // fail if there is no appropriate pixel format or print a warning about overriding the pixel format
535 return 1;
536 } else {
539 }
540 }
541
542 // set videoSettings to an empty dict for receiving raw data of muxed devices
543 if (
ctx->capture_raw_data) {
544 ctx->pixel_format = pxl_fmt_spec.ff_id;
545 ctx->video_output.videoSettings = @{ };
546 } else {
547 ctx->pixel_format = pxl_fmt_spec.ff_id;
548 pixel_format = [NSNumber numberWithUnsignedInt:pxl_fmt_spec.avf_id];
549 capture_dict = [NSDictionary dictionaryWithObject:pixel_format
550 forKey:(id)kCVPixelBufferPixelFormatTypeKey];
551
552 [ctx->video_output setVideoSettings:capture_dict];
553 }
554 [ctx->video_output setAlwaysDiscardsLateVideoFrames:ctx->drop_late_frames];
555
556 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
557 // check for transport control support and set observer device if supported
558 if (!
ctx->video_is_screen) {
559 int trans_ctrl = [video_device transportControlsSupported];
560 AVCaptureDeviceTransportControlsPlaybackMode trans_mode = [video_device transportControlsPlaybackMode];
561
562 if (trans_ctrl) {
563 ctx->observed_mode = trans_mode;
564 ctx->observed_device = video_device;
565 }
566 }
567 #endif
568
570
571 queue = dispatch_queue_create(
"avf_queue",
NULL);
572 [ctx->video_output setSampleBufferDelegate:ctx->avf_delegate queue:queue];
573 dispatch_release(queue);
574
575 if ([
ctx->capture_session canAddOutput:
ctx->video_output]) {
576 [ctx->capture_session addOutput:ctx->video_output];
577 } else {
579 return 1;
580 }
581
582 return 0;
583 }
584
586 {
588 NSError *
error = nil;
589 AVCaptureDeviceInput* audio_dev_input = [[[AVCaptureDeviceInput alloc] initWithDevice:audio_device
error:&
error] autorelease];
590 dispatch_queue_t queue;
591
592 if (!audio_dev_input) {
594 [[
error localizedDescription] UTF8String]);
595 return 1;
596 }
597
598 if ([
ctx->capture_session canAddInput:audio_dev_input]) {
599 [ctx->capture_session addInput:audio_dev_input];
600 } else {
602 return 1;
603 }
604
605 // Attaching output
606 ctx->audio_output = [[AVCaptureAudioDataOutput alloc] init];
607
608 if (!
ctx->audio_output) {
610 return 1;
611 }
612
614
615 queue = dispatch_queue_create(
"avf_audio_queue",
NULL);
616 [ctx->audio_output setSampleBufferDelegate:ctx->avf_audio_delegate queue:queue];
617 dispatch_release(queue);
618
619 if ([
ctx->capture_session canAddOutput:
ctx->audio_output]) {
620 [ctx->capture_session addOutput:ctx->audio_output];
621 } else {
623 return 1;
624 }
625
626 return 0;
627 }
628
630 {
632 CVImageBufferRef image_buffer;
633 CMBlockBufferRef block_buffer;
634 CGSize image_buffer_size;
636
637 if (!stream) {
638 return 1;
639 }
640
641 // Take stream info from the first frame.
642 while (
ctx->frames_captured < 1) {
643 CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, YES);
644 }
645
647
648 ctx->video_stream_index = stream->index;
649
651
652 image_buffer = CMSampleBufferGetImageBuffer(
ctx->current_frame);
653 block_buffer = CMSampleBufferGetDataBuffer(
ctx->current_frame);
654
655 if (image_buffer) {
656 image_buffer_size = CVImageBufferGetEncodedSize(image_buffer);
657
660 stream->codecpar->width = (
int)image_buffer_size.width;
661 stream->codecpar->height = (int)image_buffer_size.height;
662 stream->codecpar->format =
ctx->pixel_format;
663 } else {
666 stream->codecpar->format =
ctx->pixel_format;
667 }
668
669 CFRelease(
ctx->current_frame);
670 ctx->current_frame = nil;
671
673
674 return 0;
675 }
676
678 {
680 CMFormatDescriptionRef format_desc;
682
683 if (!stream) {
684 return 1;
685 }
686
687 // Take stream info from the first frame.
688 while (
ctx->audio_frames_captured < 1) {
689 CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, YES);
690 }
691
693
694 ctx->audio_stream_index = stream->index;
695
697
698 format_desc = CMSampleBufferGetFormatDescription(
ctx->current_audio_frame);
699 const AudioStreamBasicDescription *basic_desc = CMAudioFormatDescriptionGetStreamBasicDescription(format_desc);
700
701 if (!basic_desc) {
704 return 1;
705 }
706
708 stream->codecpar->sample_rate = basic_desc->mSampleRate;
709 stream->codecpar->channels = basic_desc->mChannelsPerFrame;
711
712 ctx->audio_channels = basic_desc->mChannelsPerFrame;
713 ctx->audio_bits_per_sample = basic_desc->mBitsPerChannel;
714 ctx->audio_float = basic_desc->mFormatFlags & kAudioFormatFlagIsFloat;
715 ctx->audio_be = basic_desc->mFormatFlags & kAudioFormatFlagIsBigEndian;
716 ctx->audio_signed_integer = basic_desc->mFormatFlags & kAudioFormatFlagIsSignedInteger;
717 ctx->audio_packed = basic_desc->mFormatFlags & kAudioFormatFlagIsPacked;
718 ctx->audio_non_interleaved = basic_desc->mFormatFlags & kAudioFormatFlagIsNonInterleaved;
719
720 if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
722 ctx->audio_bits_per_sample == 32 &&
725 } else if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
726 ctx->audio_signed_integer &&
727 ctx->audio_bits_per_sample == 16 &&
730 } else if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
731 ctx->audio_signed_integer &&
732 ctx->audio_bits_per_sample == 24 &&
735 } else if (basic_desc->mFormatID == kAudioFormatLinearPCM &&
736 ctx->audio_signed_integer &&
737 ctx->audio_bits_per_sample == 32 &&
740 } else {
743 return 1;
744 }
745
746 if (
ctx->audio_non_interleaved) {
747 CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(
ctx->current_audio_frame);
748 ctx->audio_buffer_size = CMBlockBufferGetDataLength(block_buffer);
750 if (!
ctx->audio_buffer) {
753 return 1;
754 }
755 }
756
757 CFRelease(
ctx->current_audio_frame);
758 ctx->current_audio_frame = nil;
759
761
762 return 0;
763 }
764
766 {
768 NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
769 uint32_t num_screens = 0;
771 AVCaptureDevice *video_device = nil;
772 AVCaptureDevice *audio_device = nil;
773 // Find capture device
774 NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
775 NSArray *devices_muxed = [AVCaptureDevice devicesWithMediaType:AVMediaTypeMuxed];
776
777 ctx->num_video_devices = [devices count] + [devices_muxed count];
778
780
781 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
782 CGGetActiveDisplayList(0,
NULL, &num_screens);
783 #endif
784
785 // List devices if requested
786 if (
ctx->list_devices) {
789 for (AVCaptureDevice *device in devices) {
790 const char *
name = [[device localizedName] UTF8String];
791 index = [devices indexOfObject:device];
793 }
794 for (AVCaptureDevice *device in devices_muxed) {
795 const char *
name = [[device localizedName] UTF8String];
796 index = [devices count] + [devices_muxed indexOfObject:device];
798 }
799 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
800 if (num_screens > 0) {
801 CGDirectDisplayID screens[num_screens];
802 CGGetActiveDisplayList(num_screens, screens, &num_screens);
803 for (
int i = 0;
i < num_screens;
i++) {
805 }
806 }
807 #endif
808
810 devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
811 for (AVCaptureDevice *device in devices) {
812 const char *
name = [[device localizedName] UTF8String];
813 int index = [devices indexOfObject:device];
815 }
817 }
818
819 // parse input filename for video and audio device
823
824 // check for device index given in filename
825 if (
ctx->video_device_index == -1 &&
ctx->video_filename) {
826 sscanf(
ctx->video_filename,
"%d", &
ctx->video_device_index);
827 }
828 if (
ctx->audio_device_index == -1 &&
ctx->audio_filename) {
829 sscanf(
ctx->audio_filename,
"%d", &
ctx->audio_device_index);
830 }
831
832 if (
ctx->video_device_index >= 0) {
833 if (
ctx->video_device_index <
ctx->num_video_devices) {
834 if (
ctx->video_device_index < [devices count]) {
835 video_device = [devices objectAtIndex:ctx->video_device_index];
836 } else {
837 video_device = [devices_muxed objectAtIndex:(ctx->video_device_index - [devices count])];
838 ctx->video_is_muxed = 1;
839 }
840 }
else if (
ctx->video_device_index <
ctx->num_video_devices + num_screens) {
841 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
842 CGDirectDisplayID screens[num_screens];
843 CGGetActiveDisplayList(num_screens, screens, &num_screens);
844 AVCaptureScreenInput* capture_screen_input = [[[AVCaptureScreenInput alloc] initWithDisplayID:screens[ctx->video_device_index - ctx->
num_video_devices]] autorelease];
845
846 if (
ctx->framerate.num > 0) {
847 capture_screen_input.minFrameDuration = CMTimeMake(
ctx->framerate.den,
ctx->framerate.num);
848 }
849
850 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
851 if (
ctx->capture_cursor) {
852 capture_screen_input.capturesCursor = YES;
853 } else {
854 capture_screen_input.capturesCursor = NO;
855 }
856 #endif
857
858 if (
ctx->capture_mouse_clicks) {
859 capture_screen_input.capturesMouseClicks = YES;
860 } else {
861 capture_screen_input.capturesMouseClicks = NO;
862 }
863
864 video_device = (AVCaptureDevice*) capture_screen_input;
865 ctx->video_is_screen = 1;
866 #endif
867 } else {
870 }
871 }
else if (
ctx->video_filename &&
872 strncmp(
ctx->video_filename,
"none", 4)) {
873 if (!strncmp(
ctx->video_filename,
"default", 7)) {
874 video_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
875 } else {
876 // looking for video inputs
877 for (AVCaptureDevice *device in devices) {
878 if (!strncmp(
ctx->video_filename, [[device localizedName] UTF8String], strlen(
ctx->video_filename))) {
879 video_device = device;
880 break;
881 }
882 }
883 // looking for muxed inputs
884 for (AVCaptureDevice *device in devices_muxed) {
885 if (!strncmp(
ctx->video_filename, [[device localizedName] UTF8String], strlen(
ctx->video_filename))) {
886 video_device = device;
887 ctx->video_is_muxed = 1;
888 break;
889 }
890 }
891
892 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
893 // looking for screen inputs
894 if (!video_device) {
895 int idx;
896 if(sscanf(
ctx->video_filename,
"Capture screen %d", &idx) && idx < num_screens) {
897 CGDirectDisplayID screens[num_screens];
898 CGGetActiveDisplayList(num_screens, screens, &num_screens);
899 AVCaptureScreenInput* capture_screen_input = [[[AVCaptureScreenInput alloc] initWithDisplayID:screens[idx]] autorelease];
900 video_device = (AVCaptureDevice*) capture_screen_input;
901 ctx->video_device_index =
ctx->num_video_devices + idx;
902 ctx->video_is_screen = 1;
903
904 if (
ctx->framerate.num > 0) {
905 capture_screen_input.minFrameDuration = CMTimeMake(
ctx->framerate.den,
ctx->framerate.num);
906 }
907
908 #if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
909 if (
ctx->capture_cursor) {
910 capture_screen_input.capturesCursor = YES;
911 } else {
912 capture_screen_input.capturesCursor = NO;
913 }
914 #endif
915
916 if (
ctx->capture_mouse_clicks) {
917 capture_screen_input.capturesMouseClicks = YES;
918 } else {
919 capture_screen_input.capturesMouseClicks = NO;
920 }
921 }
922 }
923 #endif
924 }
925
926 if (!video_device) {
929 }
930 }
931
932 // get audio device
933 if (
ctx->audio_device_index >= 0) {
934 NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
935
936 if (
ctx->audio_device_index >= [devices count]) {
939 }
940
941 audio_device = [devices objectAtIndex:ctx->audio_device_index];
942 }
else if (
ctx->audio_filename &&
943 strncmp(
ctx->audio_filename,
"none", 4)) {
944 if (!strncmp(
ctx->audio_filename,
"default", 7)) {
945 audio_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
946 } else {
947 NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio];
948
949 for (AVCaptureDevice *device in devices) {
950 if (!strncmp(
ctx->audio_filename, [[device localizedName] UTF8String], strlen(
ctx->audio_filename))) {
951 audio_device = device;
952 break;
953 }
954 }
955 }
956
957 if (!audio_device) {
960 }
961 }
962
963 // Video nor Audio capture device not found, looking for AVMediaTypeVideo/Audio
964 if (!video_device && !audio_device) {
967 }
968
969 if (video_device) {
970 if (
ctx->video_device_index <
ctx->num_video_devices) {
972 } else {
974 }
975 }
976 if (audio_device) {
977 av_log(
s,
AV_LOG_DEBUG,
"audio device '%s' opened\n", [[audio_device localizedName] UTF8String]);
978 }
979
980 // Initialize capture session
981 ctx->capture_session = [[AVCaptureSession alloc] init];
982
985 }
987 }
988
989 [ctx->capture_session startRunning];
990
991 /* Unlock device configuration only after the session is started so it
992 * does not reset the capture formats */
993 if (!
ctx->video_is_screen) {
994 [video_device unlockForConfiguration];
995 }
996
999 }
1000
1001 // set audio stream
1004 }
1005
1006 [pool release];
1007 return 0;
1008
1010 [pool release];
1015 }
1016
1018 CVPixelBufferRef image_buffer,
1020 {
1022 int src_linesize[4];
1023 const uint8_t *src_data[4];
1024 int width = CVPixelBufferGetWidth(image_buffer);
1025 int height = CVPixelBufferGetHeight(image_buffer);
1027
1028 memset(src_linesize, 0, sizeof(src_linesize));
1029 memset(src_data, 0, sizeof(src_data));
1030
1031 status = CVPixelBufferLockBaseAddress(image_buffer, 0);
1032 if (
status != kCVReturnSuccess) {
1035 }
1036
1037 if (CVPixelBufferIsPlanar(image_buffer)) {
1038 size_t plane_count = CVPixelBufferGetPlaneCount(image_buffer);
1040 for(
i = 0;
i < plane_count;
i++){
1041 src_linesize[i] = CVPixelBufferGetBytesPerRowOfPlane(image_buffer,
i);
1042 src_data[i] = CVPixelBufferGetBaseAddressOfPlane(image_buffer,
i);
1043 }
1044 } else {
1045 src_linesize[0] = CVPixelBufferGetBytesPerRow(image_buffer);
1046 src_data[0] = CVPixelBufferGetBaseAddress(image_buffer);
1047 }
1048
1050 src_data, src_linesize,
1052
1053
1054
1055 CVPixelBufferUnlockBaseAddress(image_buffer, 0);
1056
1058 }
1059
1061 {
1063
1064 do {
1065 CVImageBufferRef image_buffer;
1066 CMBlockBufferRef block_buffer;
1068
1069 if (
ctx->current_frame != nil) {
1071 int length = 0;
1072
1073 image_buffer = CMSampleBufferGetImageBuffer(
ctx->current_frame);
1074 block_buffer = CMSampleBufferGetDataBuffer(
ctx->current_frame);
1075
1076 if (image_buffer != nil) {
1077 length = (
int)CVPixelBufferGetDataSize(image_buffer);
1078 } else if (block_buffer != nil) {
1079 length = (
int)CMBlockBufferGetDataLength(block_buffer);
1080 } else {
1083 }
1084
1088 }
1089
1090 CMItemCount count;
1092
1093 if (CMSampleBufferGetOutputSampleTimingInfoArray(
ctx->current_frame, 1, &
timing_info, &count) == noErr) {
1096 }
1097
1100
1101 if (image_buffer) {
1103 } else {
1105 OSStatus
ret = CMBlockBufferCopyDataBytes(block_buffer, 0,
pkt->
size,
pkt->
data);
1106 if (
ret != kCMBlockBufferNoErr) {
1108 }
1109 }
1110 CFRelease(
ctx->current_frame);
1111 ctx->current_frame = nil;
1112
1116 }
1117 }
else if (
ctx->current_audio_frame != nil) {
1118 CMBlockBufferRef block_buffer = CMSampleBufferGetDataBuffer(
ctx->current_audio_frame);
1119 int block_buffer_size = CMBlockBufferGetDataLength(block_buffer);
1120
1121 if (!block_buffer || !block_buffer_size) {
1124 }
1125
1126 if (
ctx->audio_non_interleaved && block_buffer_size >
ctx->audio_buffer_size) {
1129 }
1130
1134 }
1135
1136 CMItemCount count;
1138
1139 if (CMSampleBufferGetOutputSampleTimingInfoArray(
ctx->current_audio_frame, 1, &
timing_info, &count) == noErr) {
1142 }
1143
1146
1147 if (
ctx->audio_non_interleaved) {
1149
1150 OSStatus
ret = CMBlockBufferCopyDataBytes(block_buffer, 0,
pkt->
size,
ctx->audio_buffer);
1151 if (
ret != kCMBlockBufferNoErr) {
1154 }
1155
1156 num_samples =
pkt->
size / (
ctx->audio_channels * (
ctx->audio_bits_per_sample >> 3));
1157
1158 // transform decoded frame into output format
1159 #define INTERLEAVE_OUTPUT(bps) \
1160 { \
1161 int##bps##_t **src; \
1162 int##bps##_t *dest; \
1163 src = av_malloc(ctx->audio_channels * sizeof(int##bps##_t*)); \
1164 if (!src) { \
1165 unlock_frames(ctx); \
1166 return AVERROR(EIO); \
1167 } \
1168 \
1169 for (c = 0; c < ctx->audio_channels; c++) { \
1170 src[c] = ((int##bps##_t*)ctx->audio_buffer) + c * num_samples; \
1171 } \
1172 dest = (int##bps##_t*)pkt->data; \
1173 shift = bps - ctx->audio_bits_per_sample; \
1174 for (sample = 0; sample < num_samples; sample++) \
1175 for (c = 0; c < ctx->audio_channels; c++) \
1176 *dest++ = src[c][sample] << shift; \
1177 av_freep(&src); \
1178 }
1179
1180 if (
ctx->audio_bits_per_sample <= 16) {
1182 } else {
1184 }
1185 } else {
1186 OSStatus
ret = CMBlockBufferCopyDataBytes(block_buffer, 0,
pkt->
size,
pkt->
data);
1187 if (
ret != kCMBlockBufferNoErr) {
1190 }
1191 }
1192
1193 CFRelease(
ctx->current_audio_frame);
1194 ctx->current_audio_frame = nil;
1195 } else {
1198 if (
ctx->observed_quit) {
1200 } else {
1202 }
1203 }
1204
1207
1208 return 0;
1209 }
1210
1212 {
1215 return 0;
1216 }
1217
1229
1231 };
1232
1239 };
1240
1242 .
name =
"avfoundation",
1250 };