]> git.sesse.net Git - ffmpeg/blobdiff - libavcodec/vdpau.c
lavc: Drop deprecated voxware codec entry
[ffmpeg] / libavcodec / vdpau.c
index 44eef20ab2fe06b8b88b990727198835b081832a..68d0813f658d91be864d8c87249c0328c9a6f5cc 100644 (file)
  */
 
 #include <limits.h>
-#include "libavutil/avassert.h"
+
 #include "avcodec.h"
 #include "internal.h"
-#include "h264.h"
+#include "h264dec.h"
 #include "vc1.h"
-
-#undef NDEBUG
-#include <assert.h>
-
 #include "vdpau.h"
 #include "vdpau_internal.h"
 
@@ -64,6 +60,46 @@ static int vdpau_error(VdpStatus status)
     }
 }
 
+int av_vdpau_get_surface_parameters(AVCodecContext *avctx,
+                                    VdpChromaType *type,
+                                    uint32_t *width, uint32_t *height)
+{
+    VdpChromaType t;
+    uint32_t w = avctx->coded_width;
+    uint32_t h = avctx->coded_height;
+
+    /* See <vdpau/vdpau.h> for per-type alignment constraints. */
+    switch (avctx->sw_pix_fmt) {
+    case AV_PIX_FMT_YUV420P:
+    case AV_PIX_FMT_YUVJ420P:
+        t = VDP_CHROMA_TYPE_420;
+        w = (w + 1) & ~1;
+        h = (h + 3) & ~3;
+        break;
+    case AV_PIX_FMT_YUV422P:
+    case AV_PIX_FMT_YUVJ422P:
+        t = VDP_CHROMA_TYPE_422;
+        w = (w + 1) & ~1;
+        h = (h + 1) & ~1;
+        break;
+    case AV_PIX_FMT_YUV444P:
+    case AV_PIX_FMT_YUVJ444P:
+        t = VDP_CHROMA_TYPE_444;
+        h = (h + 1) & ~1;
+        break;
+    default:
+        return AVERROR(ENOSYS);
+    }
+
+    if (type)
+        *type = t;
+    if (width)
+        *width = w;
+    if (height)
+        *height = h;
+    return 0;
+}
+
 int ff_vdpau_common_init(AVCodecContext *avctx, VdpDecoderProfile profile,
                          int level)
 {
@@ -76,23 +112,79 @@ int ff_vdpau_common_init(AVCodecContext *avctx, VdpDecoderProfile profile,
     VdpStatus status;
     VdpBool supported;
     uint32_t max_level, max_mb, max_width, max_height;
-    /* See vdpau/vdpau.h for alignment constraints. */
-    uint32_t width  = (avctx->coded_width + 1) & ~1;
-    uint32_t height = (avctx->coded_height + 3) & ~3;
+    VdpChromaType type;
+    uint32_t width;
+    uint32_t height;
 
     vdctx->width            = UINT32_MAX;
     vdctx->height           = UINT32_MAX;
-    hwctx->reset            = 0;
 
-    if (hwctx->context.decoder != VDP_INVALID_HANDLE) {
-        vdctx->decoder = hwctx->context.decoder;
-        vdctx->render  = hwctx->context.render;
-        vdctx->device  = VDP_INVALID_HANDLE;
-        return 0; /* Decoder created by user */
-    }
+    if (av_vdpau_get_surface_parameters(avctx, &type, &width, &height))
+        return AVERROR(ENOSYS);
 
-    vdctx->device           = hwctx->device;
-    vdctx->get_proc_address = hwctx->get_proc_address;
+    if (hwctx) {
+        hwctx->reset            = 0;
+
+        if (hwctx->context.decoder != VDP_INVALID_HANDLE) {
+            vdctx->decoder = hwctx->context.decoder;
+            vdctx->render  = hwctx->context.render;
+            vdctx->device  = VDP_INVALID_HANDLE;
+            return 0; /* Decoder created by user */
+        }
+
+        vdctx->device           = hwctx->device;
+        vdctx->get_proc_address = hwctx->get_proc_address;
+
+        if (hwctx->flags & AV_HWACCEL_FLAG_IGNORE_LEVEL)
+            level = 0;
+
+        if (!(hwctx->flags & AV_HWACCEL_FLAG_ALLOW_HIGH_DEPTH) &&
+            type != VDP_CHROMA_TYPE_420)
+            return AVERROR(ENOSYS);
+    } else {
+        AVHWFramesContext *frames_ctx = NULL;
+        AVVDPAUDeviceContext *dev_ctx;
+
+        // We assume the hw_frames_ctx always survives until ff_vdpau_common_uninit
+        // is called. This holds true as the user is not allowed to touch
+        // hw_device_ctx, or hw_frames_ctx after get_format (and ff_get_format
+        // itself also uninits before unreffing hw_frames_ctx).
+        if (avctx->hw_frames_ctx) {
+            frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
+        } else if (avctx->hw_device_ctx) {
+            int ret;
+
+            avctx->hw_frames_ctx = av_hwframe_ctx_alloc(avctx->hw_device_ctx);
+            if (!avctx->hw_frames_ctx)
+                return AVERROR(ENOMEM);
+
+            frames_ctx            = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
+            frames_ctx->format    = AV_PIX_FMT_VDPAU;
+            frames_ctx->sw_format = avctx->sw_pix_fmt;
+            frames_ctx->width     = avctx->coded_width;
+            frames_ctx->height    = avctx->coded_height;
+
+            ret = av_hwframe_ctx_init(avctx->hw_frames_ctx);
+            if (ret < 0) {
+                av_buffer_unref(&avctx->hw_frames_ctx);
+                return ret;
+            }
+        }
+
+        if (!frames_ctx) {
+            av_log(avctx, AV_LOG_ERROR, "A hardware frames context is "
+                   "required for VDPAU decoding.\n");
+            return AVERROR(EINVAL);
+        }
+
+        dev_ctx = frames_ctx->device_ctx->hwctx;
+
+        vdctx->device           = dev_ctx->device;
+        vdctx->get_proc_address = dev_ctx->get_proc_address;
+
+        if (avctx->hwaccel_flags & AV_HWACCEL_FLAG_IGNORE_LEVEL)
+            level = 0;
+    }
 
     if (level < 0)
         return AVERROR(ENOTSUP);
@@ -105,7 +197,7 @@ int ff_vdpau_common_init(AVCodecContext *avctx, VdpDecoderProfile profile,
     else
         surface_query_caps = func;
 
-    status = surface_query_caps(vdctx->device, VDP_CHROMA_TYPE_420, &supported,
+    status = surface_query_caps(vdctx->device, type, &supported,
                                 &max_width, &max_height);
     if (status != VDP_STATUS_OK)
         return vdpau_error(status);
@@ -123,6 +215,14 @@ int ff_vdpau_common_init(AVCodecContext *avctx, VdpDecoderProfile profile,
 
     status = decoder_query_caps(vdctx->device, profile, &supported, &max_level,
                                 &max_mb, &max_width, &max_height);
+#ifdef VDP_DECODER_PROFILE_H264_CONSTRAINED_BASELINE
+    if ((status != VDP_STATUS_OK || supported != VDP_TRUE) && profile == VDP_DECODER_PROFILE_H264_CONSTRAINED_BASELINE) {
+        profile = VDP_DECODER_PROFILE_H264_MAIN;
+        status = decoder_query_caps(vdctx->device, profile, &supported,
+                                    &max_level, &max_mb,
+                                    &max_width, &max_height);
+    }
+#endif
     if (status != VDP_STATUS_OK)
         return vdpau_error(status);
 
@@ -185,7 +285,7 @@ static int ff_vdpau_common_reinit(AVCodecContext *avctx)
     if (vdctx->device == VDP_INVALID_HANDLE)
         return 0; /* Decoder created by user */
     if (avctx->coded_width == vdctx->width &&
-        avctx->coded_height == vdctx->height && !hwctx->reset)
+        avctx->coded_height == vdctx->height && (!hwctx || !hwctx->reset))
         return 0;
 
     avctx->hwaccel->uninit(avctx);
@@ -214,7 +314,7 @@ int ff_vdpau_common_end_frame(AVCodecContext *avctx, AVFrame *frame,
     if (val < 0)
         return val;
 
-    status = vdctx->render(vdctx->decoder, surf, (void *)&pic_ctx->info,
+    status = vdctx->render(vdctx->decoder, surf, &pic_ctx->info,
                            pic_ctx->bitstream_buffers_used,
                            pic_ctx->bitstream_buffers);
 
@@ -222,7 +322,7 @@ int ff_vdpau_common_end_frame(AVCodecContext *avctx, AVFrame *frame,
     return vdpau_error(status);
 }
 
-#if CONFIG_H263_VDPAU_HWACCEL  || CONFIG_MPEG1_VDPAU_HWACCEL || \
+#if CONFIG_MPEG1_VDPAU_HWACCEL || \
     CONFIG_MPEG2_VDPAU_HWACCEL || CONFIG_MPEG4_VDPAU_HWACCEL || \
     CONFIG_VC1_VDPAU_HWACCEL   || CONFIG_WMV3_VDPAU_HWACCEL
 int ff_vdpau_mpeg_end_frame(AVCodecContext *avctx)
@@ -260,48 +360,54 @@ int ff_vdpau_add_buffer(struct vdpau_picture_context *pic_ctx,
     return 0;
 }
 
+#if FF_API_VDPAU_PROFILE
 int av_vdpau_get_profile(AVCodecContext *avctx, VdpDecoderProfile *profile)
 {
-#define PROFILE(prof)       \
-do {                        \
-    *profile = prof;        \
-    return 0;               \
+#define PROFILE(prof)                      \
+do {                                       \
+    *profile = VDP_DECODER_PROFILE_##prof; \
+    return 0;                              \
 } while (0)
 
     switch (avctx->codec_id) {
-    case AV_CODEC_ID_MPEG1VIDEO:               PROFILE(VDP_DECODER_PROFILE_MPEG1);
+    case AV_CODEC_ID_MPEG1VIDEO:               PROFILE(MPEG1);
     case AV_CODEC_ID_MPEG2VIDEO:
         switch (avctx->profile) {
-        case FF_PROFILE_MPEG2_MAIN:            PROFILE(VDP_DECODER_PROFILE_MPEG2_MAIN);
-        case FF_PROFILE_MPEG2_SIMPLE:          PROFILE(VDP_DECODER_PROFILE_MPEG2_SIMPLE);
+        case FF_PROFILE_MPEG2_MAIN:            PROFILE(MPEG2_MAIN);
+        case FF_PROFILE_MPEG2_SIMPLE:          PROFILE(MPEG2_SIMPLE);
         default:                               return AVERROR(EINVAL);
         }
-    case AV_CODEC_ID_H263:                     PROFILE(VDP_DECODER_PROFILE_MPEG4_PART2_ASP);
+    case AV_CODEC_ID_H263:                     PROFILE(MPEG4_PART2_ASP);
     case AV_CODEC_ID_MPEG4:
         switch (avctx->profile) {
-        case FF_PROFILE_MPEG4_SIMPLE:          PROFILE(VDP_DECODER_PROFILE_MPEG4_PART2_SP);
-        case FF_PROFILE_MPEG4_ADVANCED_SIMPLE: PROFILE(VDP_DECODER_PROFILE_MPEG4_PART2_ASP);
+        case FF_PROFILE_MPEG4_SIMPLE:          PROFILE(MPEG4_PART2_SP);
+        case FF_PROFILE_MPEG4_ADVANCED_SIMPLE: PROFILE(MPEG4_PART2_ASP);
         default:                               return AVERROR(EINVAL);
         }
     case AV_CODEC_ID_H264:
         switch (avctx->profile & ~FF_PROFILE_H264_INTRA) {
+        case FF_PROFILE_H264_BASELINE:         PROFILE(H264_BASELINE);
         case FF_PROFILE_H264_CONSTRAINED_BASELINE:
-        case FF_PROFILE_H264_BASELINE:         PROFILE(VDP_DECODER_PROFILE_H264_BASELINE);
-        case FF_PROFILE_H264_MAIN:             PROFILE(VDP_DECODER_PROFILE_H264_MAIN);
-        case FF_PROFILE_H264_HIGH:             PROFILE(VDP_DECODER_PROFILE_H264_HIGH);
+        case FF_PROFILE_H264_MAIN:             PROFILE(H264_MAIN);
+        case FF_PROFILE_H264_HIGH:             PROFILE(H264_HIGH);
+#ifdef VDP_DECODER_PROFILE_H264_EXTENDED
+        case FF_PROFILE_H264_EXTENDED:         PROFILE(H264_EXTENDED);
+#endif
         default:                               return AVERROR(EINVAL);
         }
     case AV_CODEC_ID_WMV3:
     case AV_CODEC_ID_VC1:
         switch (avctx->profile) {
-        case FF_PROFILE_VC1_SIMPLE:            PROFILE(VDP_DECODER_PROFILE_VC1_SIMPLE);
-        case FF_PROFILE_VC1_MAIN:              PROFILE(VDP_DECODER_PROFILE_VC1_MAIN);
-        case FF_PROFILE_VC1_ADVANCED:          PROFILE(VDP_DECODER_PROFILE_VC1_ADVANCED);
+        case FF_PROFILE_VC1_SIMPLE:            PROFILE(VC1_SIMPLE);
+        case FF_PROFILE_VC1_MAIN:              PROFILE(VC1_MAIN);
+        case FF_PROFILE_VC1_ADVANCED:          PROFILE(VC1_ADVANCED);
         default:                               return AVERROR(EINVAL);
         }
     }
     return AVERROR(EINVAL);
+#undef PROFILE
 }
+#endif /* FF_API_VDPAU_PROFILE */
 
 AVVDPAUContext *av_vdpau_alloc_context(void)
 {
@@ -313,6 +419,9 @@ int av_vdpau_bind_context(AVCodecContext *avctx, VdpDevice device,
 {
     VDPAUHWContext *hwctx;
 
+    if (flags & ~(AV_HWACCEL_FLAG_IGNORE_LEVEL|AV_HWACCEL_FLAG_ALLOW_HIGH_DEPTH))
+        return AVERROR(EINVAL);
+
     if (av_reallocp(&avctx->hwaccel_context, sizeof(*hwctx)))
         return AVERROR(ENOMEM);
 
@@ -322,6 +431,7 @@ int av_vdpau_bind_context(AVCodecContext *avctx, VdpDevice device,
     hwctx->context.decoder  = VDP_INVALID_HANDLE;
     hwctx->device           = device;
     hwctx->get_proc_address = get_proc;
+    hwctx->flags            = flags;
     hwctx->reset            = 1;
     return 0;
 }