if (ret < 0)
goto fail;
- pic->crop = h->ps.sps->crop;
- pic->crop_top = h->ps.sps->crop_top;
- pic->crop_left= h->ps.sps->crop_left;
-
if (h->avctx->hwaccel) {
const AVHWAccel *hwaccel = h->avctx->hwaccel;
av_assert0(!pic->hwaccel_picture_private);
h->avctx->coded_width = h1->avctx->coded_width;
h->avctx->width = h1->avctx->width;
h->avctx->height = h1->avctx->height;
+ h->width_from_caller = h1->width_from_caller;
+ h->height_from_caller = h1->height_from_caller;
h->coded_picture_number = h1->coded_picture_number;
h->first_field = h1->first_field;
h->picture_structure = h1->picture_structure;
memcpy(&h->poc, &h1->poc, sizeof(h->poc));
- memcpy(h->default_ref, h1->default_ref, sizeof(h->default_ref));
memcpy(h->short_ref, h1->short_ref, sizeof(h->short_ref));
memcpy(h->long_ref, h1->long_ref, sizeof(h->long_ref));
memcpy(h->delayed_pic, h1->delayed_pic, sizeof(h->delayed_pic));
pic->f->pict_type = h->slice_ctx[0].slice_type;
+ pic->f->crop_left = h->crop_left;
+ pic->f->crop_right = h->crop_right;
+ pic->f->crop_top = h->crop_top;
+ pic->f->crop_bottom = h->crop_bottom;
+
if ((ret = alloc_picture(h, pic)) < 0)
return ret;
if(!h->frame_recovered && !h->avctx->hwaccel
static enum AVPixelFormat get_pixel_format(H264Context *h, int force_callback)
{
#define HWACCEL_MAX (CONFIG_H264_DXVA2_HWACCEL + \
- CONFIG_H264_D3D11VA_HWACCEL + \
+ (CONFIG_H264_D3D11VA_HWACCEL * 2) + \
CONFIG_H264_VAAPI_HWACCEL + \
(CONFIG_H264_VDA_HWACCEL * 2) + \
CONFIG_H264_VIDEOTOOLBOX_HWACCEL + \
#endif
#if CONFIG_H264_D3D11VA_HWACCEL
*fmt++ = AV_PIX_FMT_D3D11VA_VLD;
+ *fmt++ = AV_PIX_FMT_D3D11;
#endif
#if CONFIG_H264_VAAPI_HWACCEL
*fmt++ = AV_PIX_FMT_VAAPI;
static int init_dimensions(H264Context *h)
{
const SPS *sps = (const SPS*)h->ps.sps;
- int width = h->width - (sps->crop_right + sps->crop_left);
- int height = h->height - (sps->crop_top + sps->crop_bottom);
+ int cr = sps->crop_right;
+ int cl = sps->crop_left;
+ int ct = sps->crop_top;
+ int cb = sps->crop_bottom;
+ int width = h->width - (cr + cl);
+ int height = h->height - (ct + cb);
av_assert0(sps->crop_right + sps->crop_left < (unsigned)h->width);
av_assert0(sps->crop_top + sps->crop_bottom < (unsigned)h->height);
/* handle container cropping */
- if (FFALIGN(h->avctx->width, 16) == FFALIGN(width, 16) &&
- FFALIGN(h->avctx->height, 16) == FFALIGN(height, 16) &&
- h->avctx->width <= width &&
- h->avctx->height <= height
- ) {
- width = h->avctx->width;
- height = h->avctx->height;
+ if (h->width_from_caller > 0 && h->height_from_caller > 0 &&
+ !sps->crop_top && !sps->crop_left &&
+ FFALIGN(h->width_from_caller, 16) == FFALIGN(width, 16) &&
+ FFALIGN(h->height_from_caller, 16) == FFALIGN(height, 16) &&
+ h->width_from_caller <= width &&
+ h->height_from_caller <= height) {
+ width = h->width_from_caller;
+ height = h->height_from_caller;
+ cl = 0;
+ ct = 0;
+ cr = h->width - width;
+ cb = h->height - height;
+ } else {
+ h->width_from_caller = 0;
+ h->height_from_caller = 0;
}
h->avctx->coded_width = h->width;
h->avctx->coded_height = h->height;
h->avctx->width = width;
h->avctx->height = height;
+ h->crop_right = cr;
+ h->crop_left = cl;
+ h->crop_top = ct;
+ h->crop_bottom = cb;
return 0;
}
if (sps->pic_struct_present_flag && h->sei.picture_timing.present) {
H264SEIPictureTiming *pt = &h->sei.picture_timing;
switch (pt->pic_struct) {
- case SEI_PIC_STRUCT_FRAME:
+ case H264_SEI_PIC_STRUCT_FRAME:
break;
- case SEI_PIC_STRUCT_TOP_FIELD:
- case SEI_PIC_STRUCT_BOTTOM_FIELD:
+ case H264_SEI_PIC_STRUCT_TOP_FIELD:
+ case H264_SEI_PIC_STRUCT_BOTTOM_FIELD:
cur->f->interlaced_frame = 1;
break;
- case SEI_PIC_STRUCT_TOP_BOTTOM:
- case SEI_PIC_STRUCT_BOTTOM_TOP:
+ case H264_SEI_PIC_STRUCT_TOP_BOTTOM:
+ case H264_SEI_PIC_STRUCT_BOTTOM_TOP:
if (FIELD_OR_MBAFF_PICTURE(h))
cur->f->interlaced_frame = 1;
else
// try to flag soft telecine progressive
cur->f->interlaced_frame = h->prev_interlaced_frame;
break;
- case SEI_PIC_STRUCT_TOP_BOTTOM_TOP:
- case SEI_PIC_STRUCT_BOTTOM_TOP_BOTTOM:
+ case H264_SEI_PIC_STRUCT_TOP_BOTTOM_TOP:
+ case H264_SEI_PIC_STRUCT_BOTTOM_TOP_BOTTOM:
/* Signal the possibility of telecined film externally
* (pic_struct 5,6). From these hints, let the applications
* decide if they apply deinterlacing. */
cur->f->repeat_pict = 1;
break;
- case SEI_PIC_STRUCT_FRAME_DOUBLING:
+ case H264_SEI_PIC_STRUCT_FRAME_DOUBLING:
cur->f->repeat_pict = 2;
break;
- case SEI_PIC_STRUCT_FRAME_TRIPLING:
+ case H264_SEI_PIC_STRUCT_FRAME_TRIPLING:
cur->f->repeat_pict = 4;
break;
}
if ((pt->ct_type & 3) &&
- pt->pic_struct <= SEI_PIC_STRUCT_BOTTOM_TOP)
+ pt->pic_struct <= H264_SEI_PIC_STRUCT_BOTTOM_TOP)
cur->f->interlaced_frame = (pt->ct_type & (1 << 1)) != 0;
} else {
/* Derive interlacing flag from used decoding process. */
if (sps->pic_struct_present_flag && h->sei.picture_timing.present) {
/* Use picture timing SEI information. Even if it is a
* information of a past frame, better than nothing. */
- if (h->sei.picture_timing.pic_struct == SEI_PIC_STRUCT_TOP_BOTTOM ||
- h->sei.picture_timing.pic_struct == SEI_PIC_STRUCT_TOP_BOTTOM_TOP)
+ if (h->sei.picture_timing.pic_struct == H264_SEI_PIC_STRUCT_TOP_BOTTOM ||
+ h->sei.picture_timing.pic_struct == H264_SEI_PIC_STRUCT_TOP_BOTTOM_TOP)
cur->f->top_field_first = 1;
else
cur->f->top_field_first = 0;
h->avctx->properties |= FF_CODEC_PROPERTY_CLOSED_CAPTIONS;
}
+ if (h->sei.alternative_transfer.present &&
+ av_color_transfer_name(h->sei.alternative_transfer.preferred_transfer_characteristics) &&
+ h->sei.alternative_transfer.preferred_transfer_characteristics != AVCOL_TRC_UNSPECIFIED) {
+ h->avctx->color_trc = cur->f->color_trc = h->sei.alternative_transfer.preferred_transfer_characteristics;
+ }
+
return 0;
}
(pps->weighted_bipred_idc == 1 &&
sl->slice_type_nos == AV_PICTURE_TYPE_B)) {
ret = ff_h264_pred_weight_table(&sl->gb, sps, sl->ref_count,
- sl->slice_type_nos, &sl->pwt, h->avctx);
+ sl->slice_type_nos, &sl->pwt,
+ picture_structure, h->avctx);
if (ret < 0)
return ret;
}
}
sl->last_qscale_diff = 0;
- tmp = pps->init_qp + get_se_golomb(&sl->gb);
+ tmp = pps->init_qp + (unsigned)get_se_golomb(&sl->gb);
if (tmp > 51 + 6 * (sps->bit_depth_luma - 8)) {
av_log(h->avctx, AV_LOG_ERROR, "QP %u out of range\n", tmp);
return AVERROR_INVALIDDATA;
sl->deblocking_filter ^= 1; // 1<->0
if (sl->deblocking_filter) {
- sl->slice_alpha_c0_offset = get_se_golomb(&sl->gb) * 2;
- sl->slice_beta_offset = get_se_golomb(&sl->gb) * 2;
- if (sl->slice_alpha_c0_offset > 12 ||
- sl->slice_alpha_c0_offset < -12 ||
- sl->slice_beta_offset > 12 ||
- sl->slice_beta_offset < -12) {
+ int slice_alpha_c0_offset_div2 = get_se_golomb(&sl->gb);
+ int slice_beta_offset_div2 = get_se_golomb(&sl->gb);
+ if (slice_alpha_c0_offset_div2 > 6 ||
+ slice_alpha_c0_offset_div2 < -6 ||
+ slice_beta_offset_div2 > 6 ||
+ slice_beta_offset_div2 < -6) {
av_log(h->avctx, AV_LOG_ERROR,
"deblocking filter parameters %d %d out of range\n",
- sl->slice_alpha_c0_offset, sl->slice_beta_offset);
+ slice_alpha_c0_offset_div2, slice_beta_offset_div2);
return AVERROR_INVALIDDATA;
}
+ sl->slice_alpha_c0_offset = slice_alpha_c0_offset_div2 * 2;
+ sl->slice_beta_offset = slice_beta_offset_div2 * 2;
}
}