int64_t first_pts;
int64_t first_audio_pts;
pthread_mutex_t frame_lock;
- pthread_cond_t frame_wait_cond;
id avf_delegate;
id avf_audio_delegate;
int capture_cursor;
int capture_mouse_clicks;
+ int capture_raw_data;
+ int drop_late_frames;
+ int video_is_muxed;
+ int video_is_screen;
int list_devices;
int video_device_index;
AVCaptureAudioDataOutput *audio_output;
CMSampleBufferRef current_frame;
CMSampleBufferRef current_audio_frame;
+
+ AVCaptureDevice *observed_device;
+#if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
+ AVCaptureDeviceTransportControlsPlaybackMode observed_mode;
+#endif
+ int observed_quit;
} AVFContext;
static void lock_frames(AVFContext* ctx)
{
if (self = [super init]) {
_context = context;
+
+ // start observing if a device is set for it
+#if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
+ if (_context->observed_device) {
+ NSString *keyPath = NSStringFromSelector(@selector(transportControlsPlaybackMode));
+ NSKeyValueObservingOptions options = NSKeyValueObservingOptionNew;
+
+ [_context->observed_device addObserver: self
+ forKeyPath: keyPath
+ options: options
+ context: _context];
+ }
+#endif
}
return self;
}
+- (void)dealloc {
+ // stop observing if a device is set for it
+#if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
+ if (_context->observed_device) {
+ NSString *keyPath = NSStringFromSelector(@selector(transportControlsPlaybackMode));
+ [_context->observed_device removeObserver: self forKeyPath: keyPath];
+ }
+#endif
+ [super dealloc];
+}
+
+- (void)observeValueForKeyPath:(NSString *)keyPath
+ ofObject:(id)object
+ change:(NSDictionary *)change
+ context:(void *)context {
+ if (context == _context) {
+#if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
+ AVCaptureDeviceTransportControlsPlaybackMode mode =
+ [change[NSKeyValueChangeNewKey] integerValue];
+
+ if (mode != _context->observed_mode) {
+ if (mode == AVCaptureDeviceTransportControlsNotPlayingMode) {
+ _context->observed_quit = 1;
+ }
+ _context->observed_mode = mode;
+ }
+#endif
+ } else {
+ [super observeValueForKeyPath: keyPath
+ ofObject: object
+ change: change
+ context: context];
+ }
+}
+
- (void) captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)videoFrame
fromConnection:(AVCaptureConnection *)connection
_context->current_frame = (CMSampleBufferRef)CFRetain(videoFrame);
- pthread_cond_signal(&_context->frame_wait_cond);
-
unlock_frames(_context);
++_context->frames_captured;
_context->current_audio_frame = (CMSampleBufferRef)CFRetain(audioFrame);
- pthread_cond_signal(&_context->frame_wait_cond);
-
unlock_frames(_context);
++_context->audio_frames_captured;
av_freep(&ctx->audio_buffer);
pthread_mutex_destroy(&ctx->frame_lock);
- pthread_cond_destroy(&ctx->frame_wait_cond);
if (ctx->current_frame) {
CFRelease(ctx->current_frame);
NSObject *selected_range = nil;
NSObject *selected_format = nil;
- for (format in [video_device valueForKey:@"formats"]) {
- CMFormatDescriptionRef formatDescription;
- CMVideoDimensions dimensions;
+ // try to configure format by formats list
+ // might raise an exception if no format list is given
+ // (then fallback to default, no configuration)
+ @try {
+ for (format in [video_device valueForKey:@"formats"]) {
+ CMFormatDescriptionRef formatDescription;
+ CMVideoDimensions dimensions;
- formatDescription = (CMFormatDescriptionRef) [format performSelector:@selector(formatDescription)];
- dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
+ formatDescription = (CMFormatDescriptionRef) [format performSelector:@selector(formatDescription)];
+ dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription);
- if ((ctx->width == 0 && ctx->height == 0) ||
- (dimensions.width == ctx->width && dimensions.height == ctx->height)) {
+ if ((ctx->width == 0 && ctx->height == 0) ||
+ (dimensions.width == ctx->width && dimensions.height == ctx->height)) {
- selected_format = format;
+ selected_format = format;
- for (range in [format valueForKey:@"videoSupportedFrameRateRanges"]) {
- double max_framerate;
+ for (range in [format valueForKey:@"videoSupportedFrameRateRanges"]) {
+ double max_framerate;
- [[range valueForKey:@"maxFrameRate"] getValue:&max_framerate];
- if (fabs (framerate - max_framerate) < 0.01) {
- selected_range = range;
- break;
+ [[range valueForKey:@"maxFrameRate"] getValue:&max_framerate];
+ if (fabs (framerate - max_framerate) < 0.01) {
+ selected_range = range;
+ break;
+ }
}
}
}
- }
-
- if (!selected_format) {
- av_log(s, AV_LOG_ERROR, "Selected video size (%dx%d) is not supported by the device\n",
- ctx->width, ctx->height);
- goto unsupported_format;
- }
- if (!selected_range) {
- av_log(s, AV_LOG_ERROR, "Selected framerate (%f) is not supported by the device\n",
- framerate);
- goto unsupported_format;
- }
+ if (!selected_format) {
+ av_log(s, AV_LOG_ERROR, "Selected video size (%dx%d) is not supported by the device.\n",
+ ctx->width, ctx->height);
+ goto unsupported_format;
+ }
- if ([video_device lockForConfiguration:NULL] == YES) {
- NSValue *min_frame_duration = [selected_range valueForKey:@"minFrameDuration"];
+ if (!selected_range) {
+ av_log(s, AV_LOG_ERROR, "Selected framerate (%f) is not supported by the device.\n",
+ framerate);
+ if (ctx->video_is_muxed) {
+ av_log(s, AV_LOG_ERROR, "Falling back to default.\n");
+ } else {
+ goto unsupported_format;
+ }
+ }
- [video_device setValue:selected_format forKey:@"activeFormat"];
- [video_device setValue:min_frame_duration forKey:@"activeVideoMinFrameDuration"];
- [video_device setValue:min_frame_duration forKey:@"activeVideoMaxFrameDuration"];
- } else {
- av_log(s, AV_LOG_ERROR, "Could not lock device for configuration");
- return AVERROR(EINVAL);
+ if ([video_device lockForConfiguration:NULL] == YES) {
+ if (selected_format) {
+ [video_device setValue:selected_format forKey:@"activeFormat"];
+ }
+ if (selected_range) {
+ NSValue *min_frame_duration = [selected_range valueForKey:@"minFrameDuration"];
+ [video_device setValue:min_frame_duration forKey:@"activeVideoMinFrameDuration"];
+ [video_device setValue:min_frame_duration forKey:@"activeVideoMaxFrameDuration"];
+ }
+ } else {
+ av_log(s, AV_LOG_ERROR, "Could not lock device for configuration.\n");
+ return AVERROR(EINVAL);
+ }
+ } @catch(NSException *e) {
+ av_log(ctx, AV_LOG_WARNING, "Configuration of video device failed, falling back to default.\n");
}
return 0;
}
}
- ctx->pixel_format = pxl_fmt_spec.ff_id;
- pixel_format = [NSNumber numberWithUnsignedInt:pxl_fmt_spec.avf_id];
- capture_dict = [NSDictionary dictionaryWithObject:pixel_format
- forKey:(id)kCVPixelBufferPixelFormatTypeKey];
+ // set videoSettings to an empty dict for receiving raw data of muxed devices
+ if (ctx->capture_raw_data) {
+ ctx->pixel_format = pxl_fmt_spec.ff_id;
+ ctx->video_output.videoSettings = @{ };
+ } else {
+ ctx->pixel_format = pxl_fmt_spec.ff_id;
+ pixel_format = [NSNumber numberWithUnsignedInt:pxl_fmt_spec.avf_id];
+ capture_dict = [NSDictionary dictionaryWithObject:pixel_format
+ forKey:(id)kCVPixelBufferPixelFormatTypeKey];
- [ctx->video_output setVideoSettings:capture_dict];
- [ctx->video_output setAlwaysDiscardsLateVideoFrames:YES];
+ [ctx->video_output setVideoSettings:capture_dict];
+ }
+ [ctx->video_output setAlwaysDiscardsLateVideoFrames:ctx->drop_late_frames];
+
+#if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
+ // check for transport control support and set observer device if supported
+ if (!ctx->video_is_screen) {
+ int trans_ctrl = [video_device transportControlsSupported];
+ AVCaptureDeviceTransportControlsPlaybackMode trans_mode = [video_device transportControlsPlaybackMode];
+
+ if (trans_ctrl) {
+ ctx->observed_mode = trans_mode;
+ ctx->observed_device = video_device;
+ }
+ }
+#endif
ctx->avf_delegate = [[AVFFrameReceiver alloc] initWithContext:ctx];
{
AVFContext *ctx = (AVFContext*)s->priv_data;
CVImageBufferRef image_buffer;
+ CMBlockBufferRef block_buffer;
CGSize image_buffer_size;
AVStream* stream = avformat_new_stream(s, NULL);
avpriv_set_pts_info(stream, 64, 1, avf_time_base);
- image_buffer = CMSampleBufferGetImageBuffer(ctx->current_frame);
- image_buffer_size = CVImageBufferGetEncodedSize(image_buffer);
+ image_buffer = CMSampleBufferGetImageBuffer(ctx->current_frame);
+ block_buffer = CMSampleBufferGetDataBuffer(ctx->current_frame);
- stream->codecpar->codec_id = AV_CODEC_ID_RAWVIDEO;
- stream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
- stream->codecpar->width = (int)image_buffer_size.width;
- stream->codecpar->height = (int)image_buffer_size.height;
- stream->codecpar->format = ctx->pixel_format;
+ if (image_buffer) {
+ image_buffer_size = CVImageBufferGetEncodedSize(image_buffer);
+
+ stream->codecpar->codec_id = AV_CODEC_ID_RAWVIDEO;
+ stream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
+ stream->codecpar->width = (int)image_buffer_size.width;
+ stream->codecpar->height = (int)image_buffer_size.height;
+ stream->codecpar->format = ctx->pixel_format;
+ } else {
+ stream->codecpar->codec_id = AV_CODEC_ID_DVVIDEO;
+ stream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
+ stream->codecpar->format = ctx->pixel_format;
+ }
CFRelease(ctx->current_frame);
ctx->current_frame = nil;
static int avf_read_header(AVFormatContext *s)
{
NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
- int capture_screen = 0;
uint32_t num_screens = 0;
AVFContext *ctx = (AVFContext*)s->priv_data;
AVCaptureDevice *video_device = nil;
AVCaptureDevice *audio_device = nil;
// Find capture device
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
- ctx->num_video_devices = [devices count];
+ NSArray *devices_muxed = [AVCaptureDevice devicesWithMediaType:AVMediaTypeMuxed];
+ ctx->num_video_devices = [devices count] + [devices_muxed count];
ctx->first_pts = av_gettime();
ctx->first_audio_pts = av_gettime();
pthread_mutex_init(&ctx->frame_lock, NULL);
- pthread_cond_init(&ctx->frame_wait_cond, NULL);
#if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
CGGetActiveDisplayList(0, NULL, &num_screens);
const char *name = [[device localizedName] UTF8String];
index = [devices indexOfObject:device];
av_log(ctx, AV_LOG_INFO, "[%d] %s\n", index, name);
- index++;
+ }
+ for (AVCaptureDevice *device in devices_muxed) {
+ const char *name = [[device localizedName] UTF8String];
+ index = [devices count] + [devices_muxed indexOfObject:device];
+ av_log(ctx, AV_LOG_INFO, "[%d] %s\n", index, name);
}
#if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
if (num_screens > 0) {
CGDirectDisplayID screens[num_screens];
CGGetActiveDisplayList(num_screens, screens, &num_screens);
for (int i = 0; i < num_screens; i++) {
- av_log(ctx, AV_LOG_INFO, "[%d] Capture screen %d\n", index + i, i);
+ av_log(ctx, AV_LOG_INFO, "[%d] Capture screen %d\n", ctx->num_video_devices + i, i);
}
}
#endif
if (ctx->video_device_index >= 0) {
if (ctx->video_device_index < ctx->num_video_devices) {
- video_device = [devices objectAtIndex:ctx->video_device_index];
+ if (ctx->video_device_index < [devices count]) {
+ video_device = [devices objectAtIndex:ctx->video_device_index];
+ } else {
+ video_device = [devices_muxed objectAtIndex:(ctx->video_device_index - [devices count])];
+ ctx->video_is_muxed = 1;
+ }
} else if (ctx->video_device_index < ctx->num_video_devices + num_screens) {
#if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
CGDirectDisplayID screens[num_screens];
}
video_device = (AVCaptureDevice*) capture_screen_input;
- capture_screen = 1;
+ ctx->video_is_screen = 1;
#endif
} else {
av_log(ctx, AV_LOG_ERROR, "Invalid device index\n");
break;
}
}
+ // looking for muxed inputs
+ for (AVCaptureDevice *device in devices_muxed) {
+ if (!strncmp(ctx->video_filename, [[device localizedName] UTF8String], strlen(ctx->video_filename))) {
+ video_device = device;
+ ctx->video_is_muxed = 1;
+ break;
+ }
+ }
#if !TARGET_OS_IPHONE && __MAC_OS_X_VERSION_MIN_REQUIRED >= 1070
// looking for screen inputs
AVCaptureScreenInput* capture_screen_input = [[[AVCaptureScreenInput alloc] initWithDisplayID:screens[idx]] autorelease];
video_device = (AVCaptureDevice*) capture_screen_input;
ctx->video_device_index = ctx->num_video_devices + idx;
- capture_screen = 1;
+ ctx->video_is_screen = 1;
if (ctx->framerate.num > 0) {
capture_screen_input.minFrameDuration = CMTimeMake(ctx->framerate.den, ctx->framerate.num);
/* Unlock device configuration only after the session is started so it
* does not reset the capture formats */
- if (!capture_screen) {
+ if (!ctx->video_is_screen) {
[video_device unlockForConfiguration];
}
status = CVPixelBufferLockBaseAddress(image_buffer, 0);
if (status != kCVReturnSuccess) {
- av_log(s, AV_LOG_ERROR, "Could not lock base address: %d\n", status);
+ av_log(s, AV_LOG_ERROR, "Could not lock base address: %d (%dx%d)\n", status, width, height);
return AVERROR_EXTERNAL;
}
do {
CVImageBufferRef image_buffer;
+ CMBlockBufferRef block_buffer;
lock_frames(ctx);
- image_buffer = CMSampleBufferGetImageBuffer(ctx->current_frame);
-
if (ctx->current_frame != nil) {
int status;
- if (av_new_packet(pkt, (int)CVPixelBufferGetDataSize(image_buffer)) < 0) {
+ int length = 0;
+
+ image_buffer = CMSampleBufferGetImageBuffer(ctx->current_frame);
+ block_buffer = CMSampleBufferGetDataBuffer(ctx->current_frame);
+
+ if (image_buffer != nil) {
+ length = (int)CVPixelBufferGetDataSize(image_buffer);
+ } else if (block_buffer != nil) {
+ length = (int)CMBlockBufferGetDataLength(block_buffer);
+ } else {
+ return AVERROR(EINVAL);
+ }
+
+ if (av_new_packet(pkt, length) < 0) {
return AVERROR(EIO);
}
pkt->stream_index = ctx->video_stream_index;
pkt->flags |= AV_PKT_FLAG_KEY;
- status = copy_cvpixelbuffer(s, image_buffer, pkt);
+ if (image_buffer) {
+ status = copy_cvpixelbuffer(s, image_buffer, pkt);
+ } else {
+ status = 0;
+ OSStatus ret = CMBlockBufferCopyDataBytes(block_buffer, 0, pkt->size, pkt->data);
+ if (ret != kCMBlockBufferNoErr) {
+ status = AVERROR(EIO);
+ }
+ }
CFRelease(ctx->current_frame);
ctx->current_frame = nil;
ctx->current_audio_frame = nil;
} else {
pkt->data = NULL;
- pthread_cond_wait(&ctx->frame_wait_cond, &ctx->frame_lock);
+ unlock_frames(ctx);
+ if (ctx->observed_quit) {
+ return AVERROR_EOF;
+ } else {
+ return AVERROR(EAGAIN);
+ }
}
unlock_frames(ctx);
}
static const AVOption options[] = {
- { "list_devices", "list available devices", offsetof(AVFContext, list_devices), AV_OPT_TYPE_INT, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM, "list_devices" },
- { "true", "", 0, AV_OPT_TYPE_CONST, {.i64=1}, 0, 0, AV_OPT_FLAG_DECODING_PARAM, "list_devices" },
- { "false", "", 0, AV_OPT_TYPE_CONST, {.i64=0}, 0, 0, AV_OPT_FLAG_DECODING_PARAM, "list_devices" },
+ { "list_devices", "list available devices", offsetof(AVFContext, list_devices), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
{ "video_device_index", "select video device by index for devices with same name (starts at 0)", offsetof(AVFContext, video_device_index), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, AV_OPT_FLAG_DECODING_PARAM },
{ "audio_device_index", "select audio device by index for devices with same name (starts at 0)", offsetof(AVFContext, audio_device_index), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, AV_OPT_FLAG_DECODING_PARAM },
{ "pixel_format", "set pixel format", offsetof(AVFContext, pixel_format), AV_OPT_TYPE_PIXEL_FMT, {.i64 = AV_PIX_FMT_YUV420P}, 0, INT_MAX, AV_OPT_FLAG_DECODING_PARAM},
{ "framerate", "set frame rate", offsetof(AVFContext, framerate), AV_OPT_TYPE_VIDEO_RATE, {.str = "ntsc"}, 0, INT_MAX, AV_OPT_FLAG_DECODING_PARAM },
{ "video_size", "set video size", offsetof(AVFContext, width), AV_OPT_TYPE_IMAGE_SIZE, {.str = NULL}, 0, 0, AV_OPT_FLAG_DECODING_PARAM },
- { "capture_cursor", "capture the screen cursor", offsetof(AVFContext, capture_cursor), AV_OPT_TYPE_INT, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
- { "capture_mouse_clicks", "capture the screen mouse clicks", offsetof(AVFContext, capture_mouse_clicks), AV_OPT_TYPE_INT, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
+ { "capture_cursor", "capture the screen cursor", offsetof(AVFContext, capture_cursor), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
+ { "capture_mouse_clicks", "capture the screen mouse clicks", offsetof(AVFContext, capture_mouse_clicks), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
+ { "capture_raw_data", "capture the raw data from device connection", offsetof(AVFContext, capture_raw_data), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
+ { "drop_late_frames", "drop frames that are available later than expected", offsetof(AVFContext, drop_late_frames), AV_OPT_TYPE_BOOL, {.i64=1}, 0, 1, AV_OPT_FLAG_DECODING_PARAM },
{ NULL },
};
static const AVClass avf_class = {
- .class_name = "AVFoundation input device",
+ .class_name = "AVFoundation indev",
.item_name = av_default_item_name,
.option = options,
.version = LIBAVUTIL_VERSION_INT,