X-Git-Url: https://git.sesse.net/?a=blobdiff_plain;f=libavcodec%2Fvp9.c;h=32776ebae764ab510fc95ae25ae7d8f4ad2eafae;hb=a247ac640df3da573cd661065bf53f37863e2b46;hp=2a3a4555b94b16fc05004d6928cf79ba0f3d7899;hpb=1f4cf92cfbd3accbae582ac63126ed5570ddfd37;p=ffmpeg diff --git a/libavcodec/vp9.c b/libavcodec/vp9.c index 2a3a4555b94..32776ebae76 100644 --- a/libavcodec/vp9.c +++ b/libavcodec/vp9.c @@ -23,7 +23,7 @@ #include "avcodec.h" #include "get_bits.h" -#include "hwaccel.h" +#include "hwconfig.h" #include "internal.h" #include "profiles.h" #include "thread.h" @@ -34,6 +34,7 @@ #include "vp9dec.h" #include "libavutil/avassert.h" #include "libavutil/pixdesc.h" +#include "libavutil/video_enc_params.h" #define VP9_SYNCCODE 0x498342 @@ -93,6 +94,13 @@ static void vp9_free_entries(AVCodecContext *avctx) {} static int vp9_alloc_entries(AVCodecContext *avctx, int n) { return 0; } #endif +static void vp9_tile_data_free(VP9TileData *td) +{ + av_freep(&td->b_base); + av_freep(&td->block_base); + av_freep(&td->block_structure); +} + static void vp9_frame_unref(AVCodecContext *avctx, VP9Frame *f) { ff_thread_release_buffer(avctx, &f->tf); @@ -199,9 +207,6 @@ static int update_size(AVCodecContext *avctx, int w, int h) switch (s->pix_fmt) { case AV_PIX_FMT_YUV420P: -#if CONFIG_VP9_VDPAU_HWACCEL - *fmtp++ = AV_PIX_FMT_VDPAU; -#endif case AV_PIX_FMT_YUV420P10: #if CONFIG_VP9_DXVA2_HWACCEL *fmtp++ = AV_PIX_FMT_DXVA2_VLD; @@ -215,6 +220,9 @@ static int update_size(AVCodecContext *avctx, int w, int h) #endif #if CONFIG_VP9_VAAPI_HWACCEL *fmtp++ = AV_PIX_FMT_VAAPI; +#endif +#if CONFIG_VP9_VDPAU_HWACCEL + *fmtp++ = AV_PIX_FMT_VDPAU; #endif break; case AV_PIX_FMT_YUV420P12: @@ -223,6 +231,9 @@ static int update_size(AVCodecContext *avctx, int w, int h) #endif #if CONFIG_VP9_VAAPI_HWACCEL *fmtp++ = AV_PIX_FMT_VAAPI; +#endif +#if CONFIG_VP9_VDPAU_HWACCEL + *fmtp++ = AV_PIX_FMT_VDPAU; #endif break; } @@ -281,10 +292,8 @@ static int update_size(AVCodecContext *avctx, int w, int h) #undef assign if (s->td) { - for (i = 0; i < s->active_tile_cols; i++) { - av_freep(&s->td[i].b_base); - av_freep(&s->td[i].block_base); - } + for (i = 0; i < s->active_tile_cols; i++) + vp9_tile_data_free(&s->td[i]); } if (s->s.h.bpp != s->last_bpp) { @@ -306,8 +315,7 @@ static int update_block_buffers(AVCodecContext *avctx) if (td->b_base && td->block_base && s->block_alloc_using_2pass == s->s.frames[CUR_FRAME].uses_2pass) return 0; - av_free(td->b_base); - av_free(td->block_base); + vp9_tile_data_free(td); chroma_blocks = 64 * 64 >> (s->ss_h + s->ss_v); chroma_eobs = 16 * 16 >> (s->ss_h + s->ss_v); if (s->s.frames[CUR_FRAME].uses_2pass) { @@ -323,13 +331,16 @@ static int update_block_buffers(AVCodecContext *avctx) td->eob_base = (uint8_t *) (td->uvblock_base[1] + sbs * chroma_blocks * bytesperpixel); td->uveob_base[0] = td->eob_base + 16 * 16 * sbs; td->uveob_base[1] = td->uveob_base[0] + chroma_eobs * sbs; - } else { - for (i = 1; i < s->active_tile_cols; i++) { - if (s->td[i].b_base && s->td[i].block_base) { - av_free(s->td[i].b_base); - av_free(s->td[i].block_base); - } + + if (avctx->export_side_data & AV_CODEC_EXPORT_DATA_VIDEO_ENC_PARAMS) { + td->block_structure = av_malloc_array(s->cols * s->rows, sizeof(*td->block_structure)); + if (!td->block_structure) + return AVERROR(ENOMEM); } + } else { + for (i = 1; i < s->active_tile_cols; i++) + vp9_tile_data_free(&s->td[i]); + for (i = 0; i < s->active_tile_cols; i++) { s->td[i].b_base = av_malloc(sizeof(VP9Block)); s->td[i].block_base = av_mallocz((64 * 64 + 2 * chroma_blocks) * bytesperpixel * sizeof(int16_t) + @@ -341,6 +352,12 @@ static int update_block_buffers(AVCodecContext *avctx) s->td[i].eob_base = (uint8_t *) (s->td[i].uvblock_base[1] + chroma_blocks * bytesperpixel); s->td[i].uveob_base[0] = s->td[i].eob_base + 16 * 16; s->td[i].uveob_base[1] = s->td[i].uveob_base[0] + chroma_eobs; + + if (avctx->export_side_data & AV_CODEC_EXPORT_DATA_VIDEO_ENC_PARAMS) { + s->td[i].block_structure = av_malloc_array(s->cols * s->rows, sizeof(*td->block_structure)); + if (!s->td[i].block_structure) + return AVERROR(ENOMEM); + } } } s->block_alloc_using_2pass = s->s.frames[CUR_FRAME].uses_2pass; @@ -773,10 +790,8 @@ static int decode_frame_header(AVCodecContext *avctx, VP56RangeCoder *rc; if (s->td) { - for (i = 0; i < s->active_tile_cols; i++) { - av_free(s->td[i].b_base); - av_free(s->td[i].block_base); - } + for (i = 0; i < s->active_tile_cols; i++) + vp9_tile_data_free(&s->td[i]); av_free(s->td); } @@ -804,6 +819,7 @@ static int decode_frame_header(AVCodecContext *avctx, /* check reference frames */ if (!s->s.h.keyframe && !s->s.h.intraonly) { + int valid_ref_frame = 0; for (i = 0; i < 3; i++) { AVFrame *ref = s->s.refs[s->s.h.refidx[i]].f; int refw = ref->width, refh = ref->height; @@ -817,17 +833,25 @@ static int decode_frame_header(AVCodecContext *avctx, } else if (refw == w && refh == h) { s->mvscale[i][0] = s->mvscale[i][1] = 0; } else { + /* Check to make sure at least one of frames that */ + /* this frame references has valid dimensions */ if (w * 2 < refw || h * 2 < refh || w > 16 * refw || h > 16 * refh) { - av_log(avctx, AV_LOG_ERROR, + av_log(avctx, AV_LOG_WARNING, "Invalid ref frame dimensions %dx%d for frame size %dx%d\n", refw, refh, w, h); - return AVERROR_INVALIDDATA; + s->mvscale[i][0] = s->mvscale[i][1] = REF_INVALID_SCALE; + continue; } s->mvscale[i][0] = (refw << 14) / w; s->mvscale[i][1] = (refh << 14) / h; s->mvstep[i][0] = 16 * s->mvscale[i][0] >> 14; s->mvstep[i][1] = 16 * s->mvscale[i][1] >> 14; } + valid_ref_frame++; + } + if (!valid_ref_frame) { + av_log(avctx, AV_LOG_ERROR, "No valid reference frame is found, bitstream not supported\n"); + return AVERROR_INVALIDDATA; } } @@ -873,6 +897,7 @@ static int decode_frame_header(AVCodecContext *avctx, } else { memset(&s->td[i].counts, 0, sizeof(s->td[0].counts)); } + s->td[i].nb_block_structure = 0; } /* FIXME is it faster to not copy here, but do it down in the fw updates @@ -1204,10 +1229,8 @@ static void free_buffers(VP9Context *s) int i; av_freep(&s->intra_pred_data[0]); - for (i = 0; i < s->active_tile_cols; i++) { - av_freep(&s->td[i].b_base); - av_freep(&s->td[i].block_base); - } + for (i = 0; i < s->active_tile_cols; i++) + vp9_tile_data_free(&s->td[i]); } static av_cold int vp9_decode_free(AVCodecContext *avctx) @@ -1216,17 +1239,14 @@ static av_cold int vp9_decode_free(AVCodecContext *avctx) int i; for (i = 0; i < 3; i++) { - if (s->s.frames[i].tf.f->buf[0]) - vp9_frame_unref(avctx, &s->s.frames[i]); + vp9_frame_unref(avctx, &s->s.frames[i]); av_frame_free(&s->s.frames[i].tf.f); } av_buffer_pool_uninit(&s->frame_extradata_pool); for (i = 0; i < 8; i++) { - if (s->s.refs[i].f->buf[0]) - ff_thread_release_buffer(avctx, &s->s.refs[i]); + ff_thread_release_buffer(avctx, &s->s.refs[i]); av_frame_free(&s->s.refs[i].f); - if (s->next_refs[i].f->buf[0]) - ff_thread_release_buffer(avctx, &s->next_refs[i]); + ff_thread_release_buffer(avctx, &s->next_refs[i]); av_frame_free(&s->next_refs[i].f); } @@ -1479,6 +1499,58 @@ int loopfilter_proc(AVCodecContext *avctx) } #endif +static int vp9_export_enc_params(VP9Context *s, VP9Frame *frame) +{ + AVVideoEncParams *par; + unsigned int tile, nb_blocks = 0; + + if (s->s.h.segmentation.enabled) { + for (tile = 0; tile < s->active_tile_cols; tile++) + nb_blocks += s->td[tile].nb_block_structure; + } + + par = av_video_enc_params_create_side_data(frame->tf.f, + AV_VIDEO_ENC_PARAMS_VP9, nb_blocks); + if (!par) + return AVERROR(ENOMEM); + + par->qp = s->s.h.yac_qi; + par->delta_qp[0][0] = s->s.h.ydc_qdelta; + par->delta_qp[1][0] = s->s.h.uvdc_qdelta; + par->delta_qp[2][0] = s->s.h.uvdc_qdelta; + par->delta_qp[1][1] = s->s.h.uvac_qdelta; + par->delta_qp[2][1] = s->s.h.uvac_qdelta; + + if (nb_blocks) { + unsigned int block = 0; + unsigned int tile, block_tile; + + for (tile = 0; tile < s->active_tile_cols; tile++) { + VP9TileData *td = &s->td[tile]; + + for (block_tile = 0; block_tile < td->nb_block_structure; block_tile++) { + AVVideoBlockParams *b = av_video_enc_params_block(par, block++); + unsigned int row = td->block_structure[block_tile].row; + unsigned int col = td->block_structure[block_tile].col; + uint8_t seg_id = frame->segmentation_map[row * 8 * s->sb_cols + col]; + + b->src_x = col * 8; + b->src_y = row * 8; + b->w = 1 << (3 + td->block_structure[block_tile].block_size_idx_x); + b->h = 1 << (3 + td->block_structure[block_tile].block_size_idx_y); + + if (s->s.h.segmentation.feat[seg_id].q_enabled) { + b->delta_qp = s->s.h.segmentation.feat[seg_id].q_val; + if (s->s.h.segmentation.absolute_vals) + b->delta_qp -= par->qp; + } + } + } + } + + return 0; +} + static int vp9_decode_frame(AVCodecContext *avctx, void *frame, int *got_frame, AVPacket *pkt) { @@ -1500,11 +1572,6 @@ static int vp9_decode_frame(AVCodecContext *avctx, void *frame, if ((ret = av_frame_ref(frame, s->s.refs[ref].f)) < 0) return ret; ((AVFrame *)frame)->pts = pkt->pts; -#if FF_API_PKT_PTS -FF_DISABLE_DEPRECATION_WARNINGS - ((AVFrame *)frame)->pkt_pts = pkt->pts; -FF_ENABLE_DEPRECATION_WARNINGS -#endif ((AVFrame *)frame)->pkt_dts = pkt->dts; for (i = 0; i < 8; i++) { if (s->next_refs[i].f->buf[0]) @@ -1625,6 +1692,7 @@ FF_ENABLE_DEPRECATION_WARNINGS s->td[i].eob = s->td[i].eob_base; s->td[i].uveob[0] = s->td[i].uveob_base[0]; s->td[i].uveob[1] = s->td[i].uveob_base[1]; + s->td[i].error_info = 0; } #if HAVE_THREADS @@ -1681,6 +1749,17 @@ FF_ENABLE_DEPRECATION_WARNINGS } while (s->pass++ == 1); ff_thread_report_progress(&s->s.frames[CUR_FRAME].tf, INT_MAX, 0); + if (s->td->error_info < 0) { + av_log(avctx, AV_LOG_ERROR, "Failed to decode tile data\n"); + s->td->error_info = 0; + return AVERROR_INVALIDDATA; + } + if (avctx->export_side_data & AV_CODEC_EXPORT_DATA_VIDEO_ENC_PARAMS) { + ret = vp9_export_enc_params(s, &s->s.frames[CUR_FRAME]); + if (ret < 0) + return ret; + } + finish: // ref frame setup for (i = 0; i < 8; i++) { @@ -1794,7 +1873,7 @@ static int vp9_decode_update_thread_context(AVCodecContext *dst, const AVCodecCo } #endif -AVCodec ff_vp9_decoder = { +const AVCodec ff_vp9_decoder = { .name = "vp9", .long_name = NULL_IF_CONFIG_SMALL("Google VP9"), .type = AVMEDIA_TYPE_VIDEO, @@ -1810,7 +1889,7 @@ AVCodec ff_vp9_decoder = { .update_thread_context = ONLY_IF_THREADS_ENABLED(vp9_decode_update_thread_context), .profiles = NULL_IF_CONFIG_SMALL(ff_vp9_profiles), .bsfs = "vp9_superframe_split", - .hw_configs = (const AVCodecHWConfigInternal*[]) { + .hw_configs = (const AVCodecHWConfigInternal *const []) { #if CONFIG_VP9_DXVA2_HWACCEL HWACCEL_DXVA2(vp9), #endif