2 * AVFoundation input device
3 * Copyright (c) 2015 Luca Barbato
6 * This file is part of Libav.
8 * Libav is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU Lesser General Public
10 * License as published by the Free Software Foundation; either
11 * version 2.1 of the License, or (at your option) any later version.
13 * Libav is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * Lesser General Public License for more details.
18 * You should have received a copy of the GNU Lesser General Public
19 * License along with Libav; if not, write to the Free Software
20 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
23 #import <AVFoundation/AVFoundation.h>
26 #include "libavformat/avformat.h"
27 #include "libavformat/internal.h"
29 #include "libavutil/log.h"
30 #include "libavutil/mathematics.h"
31 #include "libavutil/opt.h"
32 #include "libavutil/parseutils.h"
33 #include "libavutil/pixdesc.h"
34 #include "libavutil/time.h"
38 struct AVPixelFormatMap {
39 enum AVPixelFormat pix_fmt;
40 OSType core_video_fmt;
43 static const struct AVPixelFormatMap pixel_format_map[] = {
44 { AV_PIX_FMT_ABGR, kCVPixelFormatType_32ABGR },
45 { AV_PIX_FMT_ARGB, kCVPixelFormatType_32ARGB },
46 { AV_PIX_FMT_BGR24, kCVPixelFormatType_24BGR },
47 { AV_PIX_FMT_BGR48BE, kCVPixelFormatType_48RGB },
48 { AV_PIX_FMT_BGRA, kCVPixelFormatType_32BGRA },
49 { AV_PIX_FMT_MONOBLACK, kCVPixelFormatType_1Monochrome },
50 { AV_PIX_FMT_NV12, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange },
51 { AV_PIX_FMT_RGB24, kCVPixelFormatType_24RGB },
52 { AV_PIX_FMT_RGB555BE, kCVPixelFormatType_16BE555 },
53 { AV_PIX_FMT_RGB555LE, kCVPixelFormatType_16LE555 },
54 { AV_PIX_FMT_RGB565BE, kCVPixelFormatType_16BE565 },
55 { AV_PIX_FMT_RGB565LE, kCVPixelFormatType_16LE565 },
56 { AV_PIX_FMT_RGBA, kCVPixelFormatType_32RGBA },
57 { AV_PIX_FMT_UYVY422, kCVPixelFormatType_422YpCbCr8 },
58 { AV_PIX_FMT_YUV420P, kCVPixelFormatType_420YpCbCr8Planar },
59 { AV_PIX_FMT_YUV422P10, kCVPixelFormatType_422YpCbCr10 },
60 { AV_PIX_FMT_YUV422P16, kCVPixelFormatType_422YpCbCr16 },
61 { AV_PIX_FMT_YUV444P, kCVPixelFormatType_444YpCbCr8 },
62 { AV_PIX_FMT_YUV444P10, kCVPixelFormatType_444YpCbCr10 },
63 { AV_PIX_FMT_YUVA444P, kCVPixelFormatType_4444YpCbCrA8R },
64 { AV_PIX_FMT_YUVA444P16LE, kCVPixelFormatType_4444AYpCbCr16 },
65 { AV_PIX_FMT_YUYV422, kCVPixelFormatType_422YpCbCr8_yuvs },
66 #if __MAC_OS_X_VERSION_MIN_REQUIRED >= 1080
67 { AV_PIX_FMT_GRAY8, kCVPixelFormatType_OneComponent8 },
69 { AV_PIX_FMT_NONE, 0 }
72 static enum AVPixelFormat core_video_to_pix_fmt(OSType core_video_fmt)
75 for (i = 0; pixel_format_map[i].pix_fmt != AV_PIX_FMT_NONE; i++)
76 if (core_video_fmt == pixel_format_map[i].core_video_fmt)
77 return pixel_format_map[i].pix_fmt;
78 return AV_PIX_FMT_NONE;
81 static OSType pix_fmt_to_core_video(enum AVPixelFormat pix_fmt)
84 for (i = 0; pixel_format_map[i].pix_fmt != AV_PIX_FMT_NONE; i++)
85 if (pix_fmt == pixel_format_map[i].pix_fmt)
86 return pixel_format_map[i].core_video_fmt;
90 typedef struct AVFoundationCaptureContext {
96 char *video_size; /* String describing video size */
97 char *framerate; /* String describing the framerate */
99 int video_stream_index;
101 AVRational internal_framerate;
105 pthread_mutex_t frame_lock;
106 pthread_cond_t frame_wait_cond;
108 /* ARC-compatible pointers to ObjC objects */
109 CFTypeRef session; /* AVCaptureSession */
110 CFTypeRef video_output;
111 CFTypeRef video_delegate;
112 CVImageBufferRef current_frame;
113 } AVFoundationCaptureContext;
115 #define AUDIO_DEVICES 1
116 #define VIDEO_DEVICES 2
117 #define ALL_DEVICES AUDIO_DEVICES | VIDEO_DEVICES
119 #define OFFSET(x) offsetof(AVFoundationCaptureContext, x)
120 #define DEC AV_OPT_FLAG_DECODING_PARAM
121 static const AVOption options[] = {
122 { "list_devices", "List available devices and exit", OFFSET(list_devices), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, DEC, "list_devices" },
123 { "all", "Show all the supported devices", OFFSET(list_devices), AV_OPT_TYPE_CONST, { .i64 = ALL_DEVICES }, 0, INT_MAX, DEC, "list_devices" },
124 { "audio", "Show only the audio devices", OFFSET(list_devices), AV_OPT_TYPE_CONST, { .i64 = AUDIO_DEVICES }, 0, INT_MAX, DEC, "list_devices" },
125 { "video", "Show only the video devices", OFFSET(list_devices), AV_OPT_TYPE_CONST, { .i64 = VIDEO_DEVICES }, 0, INT_MAX, DEC, "list_devices" },
126 { "list_formats", "List available formats and exit", OFFSET(list_formats), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, DEC, "list_formats" },
127 { "pixel_format", "Preferred pixel format", OFFSET(pixel_format), AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, DEC },
128 { "video_size", "A string describing frame size, such as 640x480 or hd720.", OFFSET(video_size), AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, DEC },
129 { "framerate", "A string representing desired framerate", OFFSET(framerate), AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, DEC },
133 static void list_capture_devices_by_type(AVFormatContext *s, NSString *type)
135 NSArray *devices = [AVCaptureDevice devicesWithMediaType:type];
137 av_log(s, AV_LOG_INFO, "Type: %s\n", [type UTF8String]);
138 for (AVCaptureDevice *device in devices) {
139 av_log(s, AV_LOG_INFO, "uniqueID: %s\nname: %s\nformat:\n",
140 [[device uniqueID] UTF8String],
141 [[device localizedName] UTF8String]);
143 for (AVCaptureDeviceFormat *format in device.formats)
144 av_log(s, AV_LOG_INFO, "\t%s\n",
145 [[NSString stringWithFormat: @ "%@", format] UTF8String]);
149 static int avfoundation_list_capture_devices(AVFormatContext *s)
151 AVFoundationCaptureContext *ctx = s->priv_data;
153 if (ctx->list_devices & AUDIO_DEVICES)
154 list_capture_devices_by_type(s, AVMediaTypeAudio);
156 if (ctx->list_devices & VIDEO_DEVICES)
157 list_capture_devices_by_type(s, AVMediaTypeVideo);
162 static int list_formats(AVFormatContext *s)
164 av_log(s, AV_LOG_VERBOSE, "Supported pixel formats (first is more efficient):\n");
165 AVCaptureVideoDataOutput *out = [[AVCaptureVideoDataOutput alloc] init];
167 for (NSNumber *cv_pixel_format in[out availableVideoCVPixelFormatTypes]) {
168 OSType cv_fmt = [cv_pixel_format intValue];
169 enum AVPixelFormat pix_fmt = core_video_to_pix_fmt(cv_fmt);
170 if (pix_fmt != AV_PIX_FMT_NONE) {
171 av_log(s, AV_LOG_VERBOSE, " %s: %d\n",
172 av_get_pix_fmt_name(pix_fmt),
179 static void lock_frames(AVFoundationCaptureContext *ctx)
181 pthread_mutex_lock(&ctx->frame_lock);
184 static void unlock_frames(AVFoundationCaptureContext *ctx)
186 pthread_mutex_unlock(&ctx->frame_lock);
189 @interface VideoCapture : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>
191 AVFoundationCaptureContext *_context;
194 - (id)initWithContext:(AVFoundationCaptureContext *)context;
196 - (void)captureOutput:(AVCaptureOutput *)captureOutput
197 didOutputSampleBuffer:(CMSampleBufferRef)videoFrame
198 fromConnection:(AVCaptureConnection *)connection;
202 @implementation VideoCapture
204 - (id)initWithContext:(AVFoundationCaptureContext *)context
206 if (self = [super init]) {
212 - (void)captureOutput:(AVCaptureOutput *)captureOutput
213 didOutputSampleBuffer:(CMSampleBufferRef)videoFrame
214 fromConnection:(AVCaptureConnection *)connection
216 CVImageBufferRef buf;
217 lock_frames(_context);
219 if (_context->current_frame != nil) {
220 CFRelease(_context->current_frame);
223 buf = CMSampleBufferGetImageBuffer(videoFrame);
229 _context->current_frame = buf;
231 pthread_cond_signal(&_context->frame_wait_cond);
233 unlock_frames(_context);
235 ++_context->frames_captured;
241 * Configure the video device.
243 static bool configure_video_device(AVFormatContext *s, AVCaptureDevice *video_device)
245 AVFoundationCaptureContext *ctx = s->priv_data;
246 AVCaptureDeviceFormat *selected_format = nil;
247 AVFrameRateRange *selected_range = nil;
248 double framerate = av_q2d(ctx->internal_framerate);
249 double epsilon = 0.00000001;
251 for (AVCaptureDeviceFormat *format in[video_device formats]) {
252 CMFormatDescriptionRef formatDescription;
253 CMVideoDimensions dimensions;
255 formatDescription = (CMFormatDescriptionRef)format.formatDescription;
256 dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
258 if ((ctx->width == 0 && ctx->height == 0) ||
259 (dimensions.width == ctx->width && dimensions.height == ctx->height)) {
260 av_log(s, AV_LOG_VERBOSE, "Trying video size %dx%d\n",
261 dimensions.width, dimensions.height);
262 ctx->width = dimensions.width;
263 ctx->height = dimensions.height;
264 selected_format = format;
266 av_log(s, AV_LOG_VERBOSE, "Checking support for framerate %f\n",
268 for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges)
269 if (range.minFrameRate <= (framerate + epsilon) &&
270 range.maxFrameRate >= (framerate - epsilon)) {
271 selected_range = range;
275 selected_range = format.videoSupportedFrameRateRanges[0];
276 framerate = selected_range.maxFrameRate;
280 if (selected_format && selected_range)
285 if (!selected_format) {
286 av_log(s, AV_LOG_ERROR, "Selected video size (%dx%d) is not supported by the device\n",
287 ctx->width, ctx->height);
290 av_log(s, AV_LOG_VERBOSE, "Setting video size to %dx%d\n",
291 ctx->width, ctx->height);
294 if (framerate && !selected_range) {
295 av_log(s, AV_LOG_ERROR, "Selected framerate (%f) is not supported by the device\n",
299 av_log(s, AV_LOG_VERBOSE, "Setting framerate to %f\n",
303 if ([video_device lockForConfiguration : NULL] == YES) {
304 [video_device setActiveFormat : selected_format];
305 [video_device setActiveVideoMinFrameDuration : CMTimeMake(1, framerate)];
306 [video_device setActiveVideoMaxFrameDuration : CMTimeMake(1, framerate)];
308 av_log(s, AV_LOG_ERROR, "Could not lock device for configuration\n");
314 static void print_supported_formats(AVFormatContext *s, AVCaptureDevice *device)
316 av_log(s, AV_LOG_WARNING, "Supported modes:\n");
317 for (AVCaptureDeviceFormat *format in[device formats]) {
318 CMFormatDescriptionRef formatDescription;
319 CMVideoDimensions dimensions;
321 formatDescription = (CMFormatDescriptionRef)format.formatDescription;
322 dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
324 for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges)
325 av_log(s, AV_LOG_WARNING, " %dx%d@[%f %f]fps\n",
326 dimensions.width, dimensions.height,
327 range.minFrameRate, range.maxFrameRate);
331 static int setup_stream(AVFormatContext *s, AVCaptureDevice *device)
333 AVFoundationCaptureContext *ctx = s->priv_data;
334 NSError *__autoreleasing error = nil;
335 AVCaptureDeviceInput *input;
336 AVCaptureSession *session = (__bridge AVCaptureSession *)ctx->session;
338 av_log(s, AV_LOG_VERBOSE, "Setting up stream for device %s\n", [[device uniqueID] UTF8String]);
340 if (!configure_video_device(s, device)) {
341 av_log(s, AV_LOG_ERROR, "device configuration failed\n");
342 print_supported_formats(s, device);
343 return AVERROR(EINVAL);
346 // add the input devices
347 input = [AVCaptureDeviceInput deviceInputWithDevice:device
350 av_log(s, AV_LOG_ERROR, "%s\n",
351 [[error localizedDescription] UTF8String]);
352 return AVERROR_UNKNOWN;
355 if ([session canAddInput : input]) {
356 [session addInput : input];
358 av_log(s, AV_LOG_ERROR, "Cannot add video input to capture session\n");
359 return AVERROR(EINVAL);
362 // add the output devices
363 if ([device hasMediaType : AVMediaTypeVideo]) {
364 AVCaptureVideoDataOutput *out = [[AVCaptureVideoDataOutput alloc] init];
365 NSNumber *core_video_fmt = nil;
367 av_log(s, AV_LOG_ERROR, "Failed to init AV video output\n");
368 return AVERROR(EINVAL);
371 [out setAlwaysDiscardsLateVideoFrames : YES];
373 if (ctx->pixel_format) {
374 // Try to use specified pixel format
375 core_video_fmt = [NSNumber numberWithInt:pix_fmt_to_core_video(av_get_pix_fmt(ctx->pixel_format))];
376 if ([[out availableVideoCVPixelFormatTypes] indexOfObject : core_video_fmt] != NSNotFound) {
377 av_log(s, AV_LOG_VERBOSE, "Pixel format %s supported!\n", ctx->pixel_format);
379 core_video_fmt = nil;
383 if (!ctx->pixel_format || !core_video_fmt) {
384 av_log(s, AV_LOG_VERBOSE, "Pixel format not supported or not provided, overriding...\n");
385 for (NSNumber *cv_pixel_format in[out availableVideoCVPixelFormatTypes]) {
386 OSType cv_fmt = [cv_pixel_format intValue];
387 enum AVPixelFormat pix_fmt = core_video_to_pix_fmt(cv_fmt);
388 // Use the first one in the list, it will be the most effective
389 if (pix_fmt != AV_PIX_FMT_NONE) {
390 core_video_fmt = cv_pixel_format;
391 ctx->pixel_format = av_strdup(av_get_pix_fmt_name(pix_fmt));
397 // fail if there is no appropriate pixel format
398 if (!core_video_fmt) {
399 return AVERROR(EINVAL);
401 av_log(s, AV_LOG_VERBOSE, "Using %s.\n",
405 NSDictionary *capture_dict = [NSDictionary dictionaryWithObject:core_video_fmt
406 forKey:(const NSString *)kCVPixelBufferPixelFormatTypeKey];
407 [out setVideoSettings : capture_dict];
409 VideoCapture *delegate = [[VideoCapture alloc] initWithContext:ctx];
411 dispatch_queue_t queue = dispatch_queue_create("avf_queue", NULL);
412 [out setSampleBufferDelegate : delegate queue : queue];
414 if ([session canAddOutput : out]) {
415 [session addOutput : out];
416 ctx->video_output = (__bridge_retained CFTypeRef)out;
417 ctx->video_delegate = (__bridge_retained CFTypeRef)delegate;
419 av_log(s, AV_LOG_ERROR, "can't add video output to capture session\n");
420 return AVERROR(EINVAL);
427 static int get_video_config(AVFormatContext *s)
429 AVFoundationCaptureContext *ctx = (AVFoundationCaptureContext *)s->priv_data;
430 CVImageBufferRef image_buffer;
431 CGSize image_buffer_size;
432 AVStream *stream = avformat_new_stream(s, NULL);
435 av_log(s, AV_LOG_ERROR, "Failed to create AVStream\n");
436 return AVERROR(EINVAL);
439 // Take stream info from the first frame.
440 while (ctx->frames_captured < 1)
441 CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, YES);
445 ctx->video_stream_index = stream->index;
447 avpriv_set_pts_info(stream, 64, 1, 1000000);
449 image_buffer = ctx->current_frame;
450 image_buffer_size = CVImageBufferGetEncodedSize(image_buffer);
452 stream->codec->codec_id = AV_CODEC_ID_RAWVIDEO;
453 stream->codec->codec_type = AVMEDIA_TYPE_VIDEO;
454 stream->codec->width = (int)image_buffer_size.width;
455 stream->codec->height = (int)image_buffer_size.height;
456 stream->codec->pix_fmt = av_get_pix_fmt(ctx->pixel_format);
458 CFRelease(ctx->current_frame);
459 ctx->current_frame = nil;
466 static void destroy_context(AVFoundationCaptureContext *ctx)
468 AVCaptureSession *session = (__bridge AVCaptureSession *)ctx->session;
469 [session stopRunning];
473 pthread_mutex_destroy(&ctx->frame_lock);
474 pthread_cond_destroy(&ctx->frame_wait_cond);
476 if (ctx->current_frame) {
477 CFRelease(ctx->current_frame);
481 static int setup_default_stream(AVFormatContext *s)
483 AVCaptureDevice *device;
484 for (NSString *type in @[AVMediaTypeVideo]) {
485 device = [AVCaptureDevice defaultDeviceWithMediaType:type];
487 av_log(s, AV_LOG_VERBOSE, "Using default device %s\n",
488 [[device uniqueID] UTF8String]);
489 return setup_stream(s, device);
492 return AVERROR(EINVAL);
496 * Try to open device given in filename
497 * Two supported formats: "device_unique_id" or "[device_unique_id]"
499 static AVCaptureDevice *create_device(AVFormatContext *s)
502 NSError *__autoreleasing error = nil;
503 NSRegularExpression *exp;
505 AVCaptureDevice *device;
507 filename = [NSString stringWithFormat:@ "%s", s->filename];
509 if ((device = [AVCaptureDevice deviceWithUniqueID:filename])) {
510 av_log(s, AV_LOG_VERBOSE, "Device with name %s found\n", [filename UTF8String]);
514 // Remove '[]' from the device name
515 NSString *pat = @"(?<=\\[).*?(?=\\])";
516 exp = [NSRegularExpression regularExpressionWithPattern:pat
520 av_log(s, AV_LOG_ERROR, "%s\n",
521 [[error localizedDescription] UTF8String]);
525 matches = [exp matchesInString:filename options:0
526 range:NSMakeRange(0, [filename length])];
528 if (matches.count > 0) {
529 for (NSTextCheckingResult *match in matches) {
530 NSRange range = [match rangeAtIndex:0];
531 NSString *uniqueID = [filename substringWithRange:NSMakeRange(range.location, range.length)];
532 av_log(s, AV_LOG_VERBOSE, "opening device with ID: %s\n", [uniqueID UTF8String]);
533 if (!(device = [AVCaptureDevice deviceWithUniqueID:uniqueID])) {
534 av_log(s, AV_LOG_ERROR, "Device with name %s not found", [filename UTF8String]);
543 static int setup_streams(AVFormatContext *s)
545 AVFoundationCaptureContext *ctx = s->priv_data;
547 AVCaptureDevice *device;
549 pthread_mutex_init(&ctx->frame_lock, NULL);
550 pthread_cond_init(&ctx->frame_wait_cond, NULL);
552 ctx->session = (__bridge_retained CFTypeRef)[[AVCaptureSession alloc] init];
554 if (!strncmp(s->filename, "default", 7)) {
555 ret = setup_default_stream(s);
557 device = create_device(s);
559 ret = setup_stream(s, device);
561 av_log(s, AV_LOG_ERROR, "No matches for %s\n", s->filename);
562 ret = setup_default_stream(s);
567 av_log(s, AV_LOG_ERROR, "No device could be added\n");
571 av_log(s, AV_LOG_VERBOSE, "Starting session!\n");
572 [(__bridge AVCaptureSession *)ctx->session startRunning];
574 // Session is started, unlock device
575 [device unlockForConfiguration];
577 av_log(s, AV_LOG_VERBOSE, "Checking video config\n");
578 if (get_video_config(s)) {
579 destroy_context(ctx);
586 static int avfoundation_read_header(AVFormatContext *s)
588 AVFoundationCaptureContext *ctx = s->priv_data;
589 ctx->first_pts = av_gettime();
591 AVRational framerate_q = { 0, 1 };
592 ctx->internal_framerate = framerate_q;
594 if (ctx->list_devices)
595 return avfoundation_list_capture_devices(s);
596 if (ctx->list_formats) {
597 return list_formats(s);
600 if (ctx->pixel_format) {
601 if (av_get_pix_fmt(ctx->pixel_format) == AV_PIX_FMT_NONE) {
602 av_log(s, AV_LOG_ERROR, "No such input format: %s.\n",
604 return AVERROR(EINVAL);
608 if (ctx->video_size &&
609 (av_parse_video_size(&ctx->width, &ctx->height, ctx->video_size)) < 0) {
610 av_log(s, AV_LOG_ERROR, "Could not parse video size '%s'.\n",
612 return AVERROR(EINVAL);
615 if (ctx->framerate &&
616 (av_parse_video_rate(&ctx->internal_framerate, ctx->framerate)) < 0) {
617 av_log(s, AV_LOG_ERROR, "Could not parse framerate '%s'.\n",
619 return AVERROR(EINVAL);
622 return setup_streams(s);
625 static int avfoundation_read_packet(AVFormatContext *s, AVPacket *pkt)
627 AVFoundationCaptureContext *ctx = (AVFoundationCaptureContext *)s->priv_data;
632 if (ctx->current_frame != nil) {
633 if (av_new_packet(pkt, (int)CVPixelBufferGetDataSize(ctx->current_frame)) < 0) {
637 pkt->pts = pkt->dts = av_rescale_q(av_gettime() - ctx->first_pts,
639 (AVRational) {1, 1000000 });
640 pkt->stream_index = ctx->video_stream_index;
641 pkt->flags |= AV_PKT_FLAG_KEY;
643 CVPixelBufferLockBaseAddress(ctx->current_frame, 0);
645 void *data = CVPixelBufferGetBaseAddress(ctx->current_frame);
646 memcpy(pkt->data, data, pkt->size);
648 CVPixelBufferUnlockBaseAddress(ctx->current_frame, 0);
649 CFRelease(ctx->current_frame);
650 ctx->current_frame = nil;
653 pthread_cond_wait(&ctx->frame_wait_cond, &ctx->frame_lock);
657 } while (!pkt->data);
662 static int avfoundation_read_close(AVFormatContext *s)
664 av_log(s, AV_LOG_VERBOSE, "Closing session...\n");
665 AVFoundationCaptureContext *ctx = s->priv_data;
666 destroy_context(ctx);
670 static const AVClass avfoundation_class = {
671 .class_name = "AVFoundation AVCaptureDevice indev",
672 .item_name = av_default_item_name,
674 .version = LIBAVUTIL_VERSION_INT,
677 AVInputFormat ff_avfoundation_demuxer = {
678 .name = "avfoundation",
679 .long_name = NULL_IF_CONFIG_SMALL("AVFoundation AVCaptureDevice grab"),
680 .priv_data_size = sizeof(AVFoundationCaptureContext),
681 .read_header = avfoundation_read_header,
682 .read_packet = avfoundation_read_packet,
683 .read_close = avfoundation_read_close,
684 .flags = AVFMT_NOFILE,
685 .priv_class = &avfoundation_class,