#include "internal.h"
#include "cabac.h"
#include "cabac_functions.h"
-#include "dsputil.h"
#include "error_resilience.h"
#include "avcodec.h"
#include "h264.h"
{ 36, 32, 58, 34, 46, 43 },
};
-static const enum AVPixelFormat h264_hwaccel_pixfmt_list_420[] = {
-#if CONFIG_H264_DXVA2_HWACCEL
- AV_PIX_FMT_DXVA2_VLD,
-#endif
-#if CONFIG_H264_VAAPI_HWACCEL
- AV_PIX_FMT_VAAPI_VLD,
-#endif
-#if CONFIG_H264_VDA_HWACCEL
- AV_PIX_FMT_VDA_VLD,
-#endif
-#if CONFIG_H264_VDPAU_HWACCEL
- AV_PIX_FMT_VDPAU,
-#endif
- AV_PIX_FMT_YUV420P,
- AV_PIX_FMT_NONE
-};
-
-static const enum AVPixelFormat h264_hwaccel_pixfmt_list_jpeg_420[] = {
-#if CONFIG_H264_DXVA2_HWACCEL
- AV_PIX_FMT_DXVA2_VLD,
-#endif
-#if CONFIG_H264_VAAPI_HWACCEL
- AV_PIX_FMT_VAAPI_VLD,
-#endif
-#if CONFIG_H264_VDA_HWACCEL
- AV_PIX_FMT_VDA_VLD,
-#endif
-#if CONFIG_H264_VDPAU_HWACCEL
- AV_PIX_FMT_VDPAU,
-#endif
- AV_PIX_FMT_YUVJ420P,
- AV_PIX_FMT_NONE
-};
-
static void release_unused_pictures(H264Context *h, int remove_current)
{
av_buffer_allocz);
h->mb_type_pool = av_buffer_pool_init((big_mb_num + h->mb_stride) *
sizeof(uint32_t), av_buffer_allocz);
- h->motion_val_pool = av_buffer_pool_init(2 * (b4_array_size + 4) *
- sizeof(int16_t), av_buffer_allocz);
- h->ref_index_pool = av_buffer_pool_init(4 * mb_array_size, av_buffer_allocz);
+ h->motion_val_pool = av_buffer_pool_init(2 * (b4_array_size + 4) *
+ sizeof(int16_t), av_buffer_allocz);
+ h->ref_index_pool = av_buffer_pool_init(4 * mb_array_size, av_buffer_allocz);
if (!h->qscale_table_pool || !h->mb_type_pool || !h->motion_val_pool ||
!h->ref_index_pool) {
if (h->avctx->hwaccel) {
const AVHWAccel *hwaccel = h->avctx->hwaccel;
av_assert0(!pic->hwaccel_picture_private);
- if (hwaccel->priv_data_size) {
- pic->hwaccel_priv_buf = av_buffer_allocz(hwaccel->priv_data_size);
+ if (hwaccel->frame_priv_data_size) {
+ pic->hwaccel_priv_buf = av_buffer_allocz(hwaccel->frame_priv_data_size);
if (!pic->hwaccel_priv_buf)
return AVERROR(ENOMEM);
pic->hwaccel_picture_private = pic->hwaccel_priv_buf->data;
}
#define IN_RANGE(a, b, size) (((a) >= (b)) && ((a) < ((b) + (size))))
-#undef REBASE_PICTURE
+
#define REBASE_PICTURE(pic, new_ctx, old_ctx) \
((pic && pic >= old_ctx->DPB && \
pic < old_ctx->DPB + H264_MAX_PICTURE_COUNT) ? \
/* We mark the current picture as non-reference after allocating it, so
* that if we break out due to an error it can be released automatically
- * in the next ff_MPV_frame_start().
+ * in the next ff_mpv_frame_start().
*/
h->cur_pic_ptr->reference = 0;
static enum AVPixelFormat get_pixel_format(H264Context *h)
{
+#define HWACCEL_MAX (CONFIG_H264_DXVA2_HWACCEL + \
+ CONFIG_H264_VAAPI_HWACCEL + \
+ (CONFIG_H264_VDA_HWACCEL * 2) + \
+ CONFIG_H264_VDPAU_HWACCEL)
+ enum AVPixelFormat pix_fmts[HWACCEL_MAX + 2], *fmt = pix_fmts;
+ const enum AVPixelFormat *choices = pix_fmts;
+
switch (h->sps.bit_depth_luma) {
case 9:
if (CHROMA444(h)) {
if (h->avctx->colorspace == AVCOL_SPC_RGB) {
- return AV_PIX_FMT_GBRP9;
+ *fmt++ = AV_PIX_FMT_GBRP9;
} else
- return AV_PIX_FMT_YUV444P9;
+ *fmt++ = AV_PIX_FMT_YUV444P9;
} else if (CHROMA422(h))
- return AV_PIX_FMT_YUV422P9;
+ *fmt++ = AV_PIX_FMT_YUV422P9;
else
- return AV_PIX_FMT_YUV420P9;
+ *fmt++ = AV_PIX_FMT_YUV420P9;
break;
case 10:
if (CHROMA444(h)) {
if (h->avctx->colorspace == AVCOL_SPC_RGB) {
- return AV_PIX_FMT_GBRP10;
+ *fmt++ = AV_PIX_FMT_GBRP10;
} else
- return AV_PIX_FMT_YUV444P10;
+ *fmt++ = AV_PIX_FMT_YUV444P10;
} else if (CHROMA422(h))
- return AV_PIX_FMT_YUV422P10;
+ *fmt++ = AV_PIX_FMT_YUV422P10;
else
- return AV_PIX_FMT_YUV420P10;
+ *fmt++ = AV_PIX_FMT_YUV420P10;
break;
case 8:
+#if CONFIG_H264_VDPAU_HWACCEL
+ *fmt++ = AV_PIX_FMT_VDPAU;
+#endif
if (CHROMA444(h)) {
- if (h->avctx->colorspace == AVCOL_SPC_RGB) {
- return AV_PIX_FMT_GBRP;
- } else
- return h->avctx->color_range == AVCOL_RANGE_JPEG ? AV_PIX_FMT_YUVJ444P
- : AV_PIX_FMT_YUV444P;
+ if (h->avctx->colorspace == AVCOL_SPC_RGB)
+ *fmt++ = AV_PIX_FMT_GBRP;
+ else if (h->avctx->color_range == AVCOL_RANGE_JPEG)
+ *fmt++ = AV_PIX_FMT_YUVJ444P;
+ else
+ *fmt++ = AV_PIX_FMT_YUV444P;
} else if (CHROMA422(h)) {
- return h->avctx->color_range == AVCOL_RANGE_JPEG ? AV_PIX_FMT_YUVJ422P
- : AV_PIX_FMT_YUV422P;
+ if (h->avctx->color_range == AVCOL_RANGE_JPEG)
+ *fmt++ = AV_PIX_FMT_YUVJ422P;
+ else
+ *fmt++ = AV_PIX_FMT_YUV422P;
} else {
- return ff_get_format(h->avctx, h->avctx->codec->pix_fmts ?
- h->avctx->codec->pix_fmts :
- h->avctx->color_range == AVCOL_RANGE_JPEG ?
- h264_hwaccel_pixfmt_list_jpeg_420 :
- h264_hwaccel_pixfmt_list_420);
+#if CONFIG_H264_DXVA2_HWACCEL
+ *fmt++ = AV_PIX_FMT_DXVA2_VLD;
+#endif
+#if CONFIG_H264_VAAPI_HWACCEL
+ *fmt++ = AV_PIX_FMT_VAAPI_VLD;
+#endif
+#if CONFIG_H264_VDA_HWACCEL
+ *fmt++ = AV_PIX_FMT_VDA_VLD;
+ *fmt++ = AV_PIX_FMT_VDA;
+#endif
+ if (h->avctx->codec->pix_fmts)
+ choices = h->avctx->codec->pix_fmts;
+ else if (h->avctx->color_range == AVCOL_RANGE_JPEG)
+ *fmt++ = AV_PIX_FMT_YUVJ420P;
+ else
+ *fmt++ = AV_PIX_FMT_YUV420P;
}
break;
default:
"Unsupported bit depth %d\n", h->sps.bit_depth_luma);
return AVERROR_INVALIDDATA;
}
+
+ *fmt = AV_PIX_FMT_NONE;
+
+ return ff_get_format(h->avctx, choices);
}
/* export coded and cropped frame dimensions to AVCodecContext */
{
int width = h->width - (h->sps.crop_right + h->sps.crop_left);
int height = h->height - (h->sps.crop_top + h->sps.crop_bottom);
+ int crop_present = h->sps.crop_left || h->sps.crop_top ||
+ h->sps.crop_right || h->sps.crop_bottom;
/* handle container cropping */
- if (!h->sps.crop &&
+ if (!crop_present &&
FFALIGN(h->avctx->width, 16) == h->width &&
FFALIGN(h->avctx->height, 16) == h->height) {
width = h->avctx->width;
return AVERROR_INVALIDDATA;
av_log(h->avctx, AV_LOG_WARNING, "Ignoring cropping information.\n");
- h->sps.crop_bottom = h->sps.crop_top = h->sps.crop_right = h->sps.crop_left = 0;
+ h->sps.crop_bottom =
+ h->sps.crop_top =
+ h->sps.crop_right =
+ h->sps.crop_left =
h->sps.crop = 0;
width = h->width;
h->avctx->thread_count : 1;
int i, ret;
- h->avctx->sample_aspect_ratio = h->sps.sar;
- av_assert0(h->avctx->sample_aspect_ratio.den);
+ ff_set_sar(h->avctx, h->sps.sar);
av_pix_fmt_get_chroma_sub_sample(h->avctx->pix_fmt,
&h->chroma_x_shift, &h->chroma_y_shift);
int64_t den = h->sps.time_scale;
if (h->x264_build < 44U)
den *= 2;
- av_reduce(&h->avctx->time_base.num, &h->avctx->time_base.den,
+ av_reduce(&h->avctx->framerate.den, &h->avctx->framerate.num,
h->sps.num_units_in_tick, den, 1 << 30);
}
if (!c)
return AVERROR(ENOMEM);
c->avctx = h->avctx;
- c->dsp = h->dsp;
c->vdsp = h->vdsp;
c->h264dsp = h->h264dsp;
c->h264qpel = h->h264qpel;
c->chroma_y_shift = h->chroma_y_shift;
c->qscale = h->qscale;
c->droppable = h->droppable;
- c->data_partitioning = h->data_partitioning;
c->low_delay = h->low_delay;
c->mb_width = h->mb_width;
c->mb_height = h->mb_height;
if (get_bits_left(&h->gb) == 0) {
er_add_slice(h, h->resync_mb_x, h->resync_mb_y,
- h->mb_x - 1, h->mb_y,
- ER_MB_END);
+ h->mb_x - 1, h->mb_y, ER_MB_END);
return 0;
} else {
er_add_slice(h, h->resync_mb_x, h->resync_mb_y,
- h->mb_x - 1, h->mb_y,
- ER_MB_END);
+ h->mb_x - 1, h->mb_y, ER_MB_END);
return AVERROR_INVALIDDATA;
}
if (get_bits_left(&h->gb) == 0) {
er_add_slice(h, h->resync_mb_x, h->resync_mb_y,
- h->mb_x - 1, h->mb_y,
- ER_MB_END);
+ h->mb_x - 1, h->mb_y, ER_MB_END);
if (h->mb_x > lf_x_start)
loop_filter(h, lf_x_start, h->mb_x);