X-Git-Url: https://git.sesse.net/?a=blobdiff_plain;f=libavcodec%2Fvdpau.c;h=f638d201159dd8168242e01acce3fdb211c37e41;hb=f450cc7bc595155bacdb9f5d2414a076ccf81b4a;hp=685309f1ff93cbeea88c7938d9a3de1a1eba2bc1;hpb=e3e158e81f0666b8fe66be9ce1cad63a535920e0;p=ffmpeg diff --git a/libavcodec/vdpau.c b/libavcodec/vdpau.c index 685309f1ff9..f638d201159 100644 --- a/libavcodec/vdpau.c +++ b/libavcodec/vdpau.c @@ -22,15 +22,11 @@ */ #include -#include "libavutil/avassert.h" + #include "avcodec.h" #include "internal.h" -#include "h264.h" +#include "h264dec.h" #include "vc1.h" - -#undef NDEBUG -#include - #include "vdpau.h" #include "vdpau_internal.h" @@ -64,17 +60,61 @@ static int vdpau_error(VdpStatus status) } } +int av_vdpau_get_surface_parameters(AVCodecContext *avctx, + VdpChromaType *type, + uint32_t *width, uint32_t *height) +{ + VdpChromaType t; + uint32_t w = avctx->coded_width; + uint32_t h = avctx->coded_height; + + /* See for per-type alignment constraints. */ + switch (avctx->sw_pix_fmt) { + case AV_PIX_FMT_YUV420P: + case AV_PIX_FMT_YUVJ420P: + t = VDP_CHROMA_TYPE_420; + w = (w + 1) & ~1; + h = (h + 3) & ~3; + break; + case AV_PIX_FMT_YUV422P: + case AV_PIX_FMT_YUVJ422P: + t = VDP_CHROMA_TYPE_422; + w = (w + 1) & ~1; + h = (h + 1) & ~1; + break; + case AV_PIX_FMT_YUV444P: + case AV_PIX_FMT_YUVJ444P: + t = VDP_CHROMA_TYPE_444; + h = (h + 1) & ~1; + break; + default: + return AVERROR(ENOSYS); + } + + if (type) + *type = t; + if (width) + *width = w; + if (height) + *height = h; + return 0; +} + int ff_vdpau_common_init(AVCodecContext *avctx, VdpDecoderProfile profile, int level) { VDPAUHWContext *hwctx = avctx->hwaccel_context; VDPAUContext *vdctx = avctx->internal->hwaccel_priv_data; + VdpVideoSurfaceQueryCapabilities *surface_query_caps; + VdpDecoderQueryCapabilities *decoder_query_caps; VdpDecoderCreate *create; void *func; VdpStatus status; - /* See vdpau/vdpau.h for alignment constraints. */ - uint32_t width = (avctx->coded_width + 1) & ~1; - uint32_t height = (avctx->coded_height + 3) & ~3; + VdpBool supported; + uint32_t max_level, max_mb, max_width, max_height; + VdpChromaType type; + uint32_t width; + uint32_t height; vdctx->width = UINT32_MAX; vdctx->height = UINT32_MAX; @@ -90,6 +130,59 @@ int ff_vdpau_common_init(AVCodecContext *avctx, VdpDecoderProfile profile, vdctx->device = hwctx->device; vdctx->get_proc_address = hwctx->get_proc_address; + if (hwctx->flags & AV_HWACCEL_FLAG_IGNORE_LEVEL) + level = 0; + else if (level < 0) + return AVERROR(ENOTSUP); + + if (av_vdpau_get_surface_parameters(avctx, &type, &width, &height)) + return AVERROR(ENOSYS); + + if (!(hwctx->flags & AV_HWACCEL_FLAG_ALLOW_HIGH_DEPTH) && + type != VDP_CHROMA_TYPE_420) + return AVERROR(ENOSYS); + + status = vdctx->get_proc_address(vdctx->device, + VDP_FUNC_ID_VIDEO_SURFACE_QUERY_CAPABILITIES, + &func); + if (status != VDP_STATUS_OK) + return vdpau_error(status); + else + surface_query_caps = func; + + status = surface_query_caps(vdctx->device, type, &supported, + &max_width, &max_height); + if (status != VDP_STATUS_OK) + return vdpau_error(status); + if (supported != VDP_TRUE || + max_width < width || max_height < height) + return AVERROR(ENOTSUP); + + status = vdctx->get_proc_address(vdctx->device, + VDP_FUNC_ID_DECODER_QUERY_CAPABILITIES, + &func); + if (status != VDP_STATUS_OK) + return vdpau_error(status); + else + decoder_query_caps = func; + + status = decoder_query_caps(vdctx->device, profile, &supported, &max_level, + &max_mb, &max_width, &max_height); +#ifdef VDP_DECODER_PROFILE_H264_CONSTRAINED_BASELINE + if ((status != VDP_STATUS_OK || supported != VDP_TRUE) && profile == VDP_DECODER_PROFILE_H264_CONSTRAINED_BASELINE) { + profile = VDP_DECODER_PROFILE_H264_MAIN; + status = decoder_query_caps(vdctx->device, profile, &supported, + &max_level, &max_mb, + &max_width, &max_height); + } +#endif + if (status != VDP_STATUS_OK) + return vdpau_error(status); + + if (supported != VDP_TRUE || max_level < level || + max_width < width || max_height < height) + return AVERROR(ENOTSUP); + status = vdctx->get_proc_address(vdctx->device, VDP_FUNC_ID_DECODER_CREATE, &func); if (status != VDP_STATUS_OK) @@ -182,7 +275,7 @@ int ff_vdpau_common_end_frame(AVCodecContext *avctx, AVFrame *frame, return vdpau_error(status); } -#if CONFIG_H263_VDPAU_HWACCEL || CONFIG_MPEG1_VDPAU_HWACCEL || \ +#if CONFIG_MPEG1_VDPAU_HWACCEL || \ CONFIG_MPEG2_VDPAU_HWACCEL || CONFIG_MPEG4_VDPAU_HWACCEL || \ CONFIG_VC1_VDPAU_HWACCEL || CONFIG_WMV3_VDPAU_HWACCEL int ff_vdpau_mpeg_end_frame(AVCodecContext *avctx) @@ -220,48 +313,54 @@ int ff_vdpau_add_buffer(struct vdpau_picture_context *pic_ctx, return 0; } +#if FF_API_VDPAU_PROFILE int av_vdpau_get_profile(AVCodecContext *avctx, VdpDecoderProfile *profile) { -#define PROFILE(prof) \ -do { \ - *profile = prof; \ - return 0; \ +#define PROFILE(prof) \ +do { \ + *profile = VDP_DECODER_PROFILE_##prof; \ + return 0; \ } while (0) switch (avctx->codec_id) { - case AV_CODEC_ID_MPEG1VIDEO: PROFILE(VDP_DECODER_PROFILE_MPEG1); + case AV_CODEC_ID_MPEG1VIDEO: PROFILE(MPEG1); case AV_CODEC_ID_MPEG2VIDEO: switch (avctx->profile) { - case FF_PROFILE_MPEG2_MAIN: PROFILE(VDP_DECODER_PROFILE_MPEG2_MAIN); - case FF_PROFILE_MPEG2_SIMPLE: PROFILE(VDP_DECODER_PROFILE_MPEG2_SIMPLE); + case FF_PROFILE_MPEG2_MAIN: PROFILE(MPEG2_MAIN); + case FF_PROFILE_MPEG2_SIMPLE: PROFILE(MPEG2_SIMPLE); default: return AVERROR(EINVAL); } - case AV_CODEC_ID_H263: PROFILE(VDP_DECODER_PROFILE_MPEG4_PART2_ASP); + case AV_CODEC_ID_H263: PROFILE(MPEG4_PART2_ASP); case AV_CODEC_ID_MPEG4: switch (avctx->profile) { - case FF_PROFILE_MPEG4_SIMPLE: PROFILE(VDP_DECODER_PROFILE_MPEG4_PART2_SP); - case FF_PROFILE_MPEG4_ADVANCED_SIMPLE: PROFILE(VDP_DECODER_PROFILE_MPEG4_PART2_ASP); + case FF_PROFILE_MPEG4_SIMPLE: PROFILE(MPEG4_PART2_SP); + case FF_PROFILE_MPEG4_ADVANCED_SIMPLE: PROFILE(MPEG4_PART2_ASP); default: return AVERROR(EINVAL); } case AV_CODEC_ID_H264: switch (avctx->profile & ~FF_PROFILE_H264_INTRA) { + case FF_PROFILE_H264_BASELINE: PROFILE(H264_BASELINE); case FF_PROFILE_H264_CONSTRAINED_BASELINE: - case FF_PROFILE_H264_BASELINE: PROFILE(VDP_DECODER_PROFILE_H264_BASELINE); - case FF_PROFILE_H264_MAIN: PROFILE(VDP_DECODER_PROFILE_H264_MAIN); - case FF_PROFILE_H264_HIGH: PROFILE(VDP_DECODER_PROFILE_H264_HIGH); + case FF_PROFILE_H264_MAIN: PROFILE(H264_MAIN); + case FF_PROFILE_H264_HIGH: PROFILE(H264_HIGH); +#ifdef VDP_DECODER_PROFILE_H264_EXTENDED + case FF_PROFILE_H264_EXTENDED: PROFILE(H264_EXTENDED); +#endif default: return AVERROR(EINVAL); } case AV_CODEC_ID_WMV3: case AV_CODEC_ID_VC1: switch (avctx->profile) { - case FF_PROFILE_VC1_SIMPLE: PROFILE(VDP_DECODER_PROFILE_VC1_SIMPLE); - case FF_PROFILE_VC1_MAIN: PROFILE(VDP_DECODER_PROFILE_VC1_MAIN); - case FF_PROFILE_VC1_ADVANCED: PROFILE(VDP_DECODER_PROFILE_VC1_ADVANCED); + case FF_PROFILE_VC1_SIMPLE: PROFILE(VC1_SIMPLE); + case FF_PROFILE_VC1_MAIN: PROFILE(VC1_MAIN); + case FF_PROFILE_VC1_ADVANCED: PROFILE(VC1_ADVANCED); default: return AVERROR(EINVAL); } } return AVERROR(EINVAL); +#undef PROFILE } +#endif /* FF_API_VDPAU_PROFILE */ AVVDPAUContext *av_vdpau_alloc_context(void) { @@ -273,6 +372,9 @@ int av_vdpau_bind_context(AVCodecContext *avctx, VdpDevice device, { VDPAUHWContext *hwctx; + if (flags & ~(AV_HWACCEL_FLAG_IGNORE_LEVEL|AV_HWACCEL_FLAG_ALLOW_HIGH_DEPTH)) + return AVERROR(EINVAL); + if (av_reallocp(&avctx->hwaccel_context, sizeof(*hwctx))) return AVERROR(ENOMEM); @@ -282,6 +384,7 @@ int av_vdpau_bind_context(AVCodecContext *avctx, VdpDevice device, hwctx->context.decoder = VDP_INVALID_HANDLE; hwctx->device = device; hwctx->get_proc_address = get_proc; + hwctx->flags = flags; hwctx->reset = 1; return 0; }