#include "libavutil/avassert.h"
#include "libavutil/common.h"
+#include "libavutil/pixdesc.h"
#include "libavutil/opt.h"
#include "libavutil/mastering_display_metadata.h"
#include "avcodec.h"
#include "cbs.h"
#include "cbs_h265.h"
+#include "h265_profile_level.h"
#include "hevc.h"
#include "hevc_sei.h"
#include "internal.h"
SEI_CONTENT_LIGHT_LEVEL = 0x10,
};
+typedef struct VAAPIEncodeH265Picture {
+ int pic_order_cnt;
+
+ int64_t last_idr_frame;
+
+ int slice_nal_unit;
+ int slice_type;
+ int pic_type;
+} VAAPIEncodeH265Picture;
+
typedef struct VAAPIEncodeH265Context {
VAAPIEncodeContext common;
int qp;
int aud;
int profile;
+ int tier;
int level;
int sei;
// Derived settings.
- unsigned int ctu_width;
- unsigned int ctu_height;
-
int fixed_qp_idr;
int fixed_qp_p;
int fixed_qp_b;
- // Stream state.
- int64_t last_idr_frame;
- int pic_order_cnt;
-
- int slice_nal_unit;
- int slice_type;
- int pic_type;
-
// Writer structures.
H265RawAUD raw_aud;
H265RawVPS raw_vps;
err = vaapi_encode_h265_write_access_unit(avctx, data, data_len, au);
fail:
- ff_cbs_fragment_uninit(priv->cbc, au);
+ ff_cbs_fragment_reset(priv->cbc, au);
return err;
}
err = vaapi_encode_h265_write_access_unit(avctx, data, data_len, au);
fail:
- ff_cbs_fragment_uninit(priv->cbc, au);
+ ff_cbs_fragment_reset(priv->cbc, au);
return err;
}
if (err < 0)
goto fail;
- ff_cbs_fragment_uninit(priv->cbc, au);
+ ff_cbs_fragment_reset(priv->cbc, au);
*type = VAEncPackedHeaderRawData;
return 0;
}
fail:
- ff_cbs_fragment_uninit(priv->cbc, au);
+ ff_cbs_fragment_reset(priv->cbc, au);
return err;
}
H265RawVPS *vps = &priv->raw_vps;
H265RawSPS *sps = &priv->raw_sps;
H265RawPPS *pps = &priv->raw_pps;
+ H265RawProfileTierLevel *ptl = &vps->profile_tier_level;
H265RawVUI *vui = &sps->vui;
VAEncSequenceParameterBufferHEVC *vseq = ctx->codec_sequence_params;
VAEncPictureParameterBufferHEVC *vpic = ctx->codec_picture_params;
+ const AVPixFmtDescriptor *desc;
+ int chroma_format, bit_depth;
int i;
- memset(&priv->current_access_unit, 0,
- sizeof(priv->current_access_unit));
-
memset(vps, 0, sizeof(*vps));
memset(sps, 0, sizeof(*sps));
memset(pps, 0, sizeof(*pps));
+ desc = av_pix_fmt_desc_get(priv->common.input_frames->sw_format);
+ av_assert0(desc);
+ if (desc->nb_components == 1) {
+ chroma_format = 0;
+ } else {
+ if (desc->log2_chroma_w == 1 && desc->log2_chroma_h == 1) {
+ chroma_format = 1;
+ } else if (desc->log2_chroma_w == 1 && desc->log2_chroma_h == 0) {
+ chroma_format = 2;
+ } else if (desc->log2_chroma_w == 0 && desc->log2_chroma_h == 0) {
+ chroma_format = 3;
+ } else {
+ av_log(avctx, AV_LOG_ERROR, "Chroma format of input pixel format "
+ "%s is not supported.\n", desc->name);
+ return AVERROR(EINVAL);
+ }
+ }
+ bit_depth = desc->comp[0].depth;
+
+
// VPS
vps->nal_unit_header = (H265RawNALUnitHeader) {
vps->vps_max_sub_layers_minus1 = 0;
vps->vps_temporal_id_nesting_flag = 1;
- vps->profile_tier_level = (H265RawProfileTierLevel) {
- .general_profile_space = 0,
- .general_profile_idc = avctx->profile,
- .general_tier_flag = 0,
+ ptl->general_profile_space = 0;
+ ptl->general_profile_idc = avctx->profile;
+ ptl->general_tier_flag = priv->tier;
- .general_progressive_source_flag = 1,
- .general_interlaced_source_flag = 0,
- .general_non_packed_constraint_flag = 1,
- .general_frame_only_constraint_flag = 1,
+ if (chroma_format == 1) {
+ ptl->general_profile_compatibility_flag[1] = bit_depth == 8;
+ ptl->general_profile_compatibility_flag[2] = bit_depth <= 10;
+ }
+ ptl->general_profile_compatibility_flag[4] = 1;
- .general_level_idc = avctx->level,
- };
- vps->profile_tier_level.general_profile_compatibility_flag[avctx->profile & 31] = 1;
+ ptl->general_progressive_source_flag = 1;
+ ptl->general_interlaced_source_flag = 0;
+ ptl->general_non_packed_constraint_flag = 1;
+ ptl->general_frame_only_constraint_flag = 1;
+
+ ptl->general_max_12bit_constraint_flag = bit_depth <= 12;
+ ptl->general_max_10bit_constraint_flag = bit_depth <= 10;
+ ptl->general_max_8bit_constraint_flag = bit_depth == 8;
+
+ ptl->general_max_422chroma_constraint_flag = chroma_format <= 2;
+ ptl->general_max_420chroma_constraint_flag = chroma_format <= 1;
+ ptl->general_max_monochrome_constraint_flag = chroma_format == 0;
+
+ ptl->general_intra_constraint_flag = ctx->gop_size == 1;
+
+ ptl->general_lower_bit_rate_constraint_flag = 1;
+
+ if (avctx->level != FF_LEVEL_UNKNOWN) {
+ ptl->general_level_idc = avctx->level;
+ } else {
+ const H265LevelDescriptor *level;
+
+ level = ff_h265_guess_level(ptl, avctx->bit_rate,
+ ctx->surface_width, ctx->surface_height,
+ ctx->nb_slices, 1, 1,
+ (ctx->b_per_p > 0) + 1);
+ if (level) {
+ av_log(avctx, AV_LOG_VERBOSE, "Using level %s.\n", level->name);
+ ptl->general_level_idc = level->level_idc;
+ } else {
+ av_log(avctx, AV_LOG_VERBOSE, "Stream will not conform to "
+ "any normal level; using level 8.5.\n");
+ ptl->general_level_idc = 255;
+ // The tier flag must be set in level 8.5.
+ ptl->general_tier_flag = 1;
+ }
+ }
vps->vps_sub_layer_ordering_info_present_flag = 0;
- vps->vps_max_dec_pic_buffering_minus1[0] = (ctx->b_per_p > 0) + 1;
- vps->vps_max_num_reorder_pics[0] = (ctx->b_per_p > 0);
+ vps->vps_max_dec_pic_buffering_minus1[0] = ctx->max_b_depth + 1;
+ vps->vps_max_num_reorder_pics[0] = ctx->max_b_depth;
vps->vps_max_latency_increase_plus1[0] = 0;
vps->vps_max_layer_id = 0;
sps->sps_seq_parameter_set_id = 0;
- sps->chroma_format_idc = 1; // YUV 4:2:0.
+ sps->chroma_format_idc = chroma_format;
sps->separate_colour_plane_flag = 0;
sps->pic_width_in_luma_samples = ctx->surface_width;
sps->conformance_window_flag = 0;
}
- sps->bit_depth_luma_minus8 =
- avctx->profile == FF_PROFILE_HEVC_MAIN_10 ? 2 : 0;
- sps->bit_depth_chroma_minus8 = sps->bit_depth_luma_minus8;
+ sps->bit_depth_luma_minus8 = bit_depth - 8;
+ sps->bit_depth_chroma_minus8 = bit_depth - 8;
sps->log2_max_pic_order_cnt_lsb_minus4 = 8;
{ 80, 33 }, { 18, 11 }, { 15, 11 }, { 64, 33 },
{ 160, 99 }, { 4, 3 }, { 3, 2 }, { 2, 1 },
};
- int i;
+ int num, den, i;
+ av_reduce(&num, &den, avctx->sample_aspect_ratio.num,
+ avctx->sample_aspect_ratio.den, 65535);
for (i = 0; i < FF_ARRAY_ELEMS(sar_idc); i++) {
- if (avctx->sample_aspect_ratio.num == sar_idc[i].num &&
- avctx->sample_aspect_ratio.den == sar_idc[i].den) {
+ if (num == sar_idc[i].num &&
+ den == sar_idc[i].den) {
vui->aspect_ratio_idc = i;
break;
}
}
if (i >= FF_ARRAY_ELEMS(sar_idc)) {
vui->aspect_ratio_idc = 255;
- vui->sar_width = avctx->sample_aspect_ratio.num;
- vui->sar_height = avctx->sample_aspect_ratio.den;
+ vui->sar_width = num;
+ vui->sar_height = den;
}
vui->aspect_ratio_info_present_flag = 1;
}
.general_level_idc = vps->profile_tier_level.general_level_idc,
.general_tier_flag = vps->profile_tier_level.general_tier_flag,
- .intra_period = avctx->gop_size,
- .intra_idr_period = avctx->gop_size,
+ .intra_period = ctx->gop_size,
+ .intra_idr_period = ctx->gop_size,
.ip_period = ctx->b_per_p + 1,
- .bits_per_second = avctx->bit_rate,
+ .bits_per_second = ctx->va_bit_rate,
.pic_width_in_luma_samples = sps->pic_width_in_luma_samples,
.pic_height_in_luma_samples = sps->pic_height_in_luma_samples,
static int vaapi_encode_h265_init_picture_params(AVCodecContext *avctx,
VAAPIEncodePicture *pic)
{
+ VAAPIEncodeContext *ctx = avctx->priv_data;
VAAPIEncodeH265Context *priv = avctx->priv_data;
+ VAAPIEncodeH265Picture *hpic = pic->priv_data;
+ VAAPIEncodePicture *prev = pic->prev;
+ VAAPIEncodeH265Picture *hprev = prev ? prev->priv_data : NULL;
VAEncPictureParameterBufferHEVC *vpic = pic->codec_picture_params;
int i;
if (pic->type == PICTURE_TYPE_IDR) {
av_assert0(pic->display_order == pic->encode_order);
- priv->last_idr_frame = pic->display_order;
+ hpic->last_idr_frame = pic->display_order;
- priv->slice_nal_unit = HEVC_NAL_IDR_W_RADL;
- priv->slice_type = HEVC_SLICE_I;
- priv->pic_type = 0;
+ hpic->slice_nal_unit = HEVC_NAL_IDR_W_RADL;
+ hpic->slice_type = HEVC_SLICE_I;
+ hpic->pic_type = 0;
} else {
- av_assert0(pic->encode_order > priv->last_idr_frame);
+ av_assert0(prev);
+ hpic->last_idr_frame = hprev->last_idr_frame;
if (pic->type == PICTURE_TYPE_I) {
- priv->slice_nal_unit = HEVC_NAL_CRA_NUT;
- priv->slice_type = HEVC_SLICE_I;
- priv->pic_type = 0;
+ hpic->slice_nal_unit = HEVC_NAL_CRA_NUT;
+ hpic->slice_type = HEVC_SLICE_I;
+ hpic->pic_type = 0;
} else if (pic->type == PICTURE_TYPE_P) {
av_assert0(pic->refs[0]);
- priv->slice_nal_unit = HEVC_NAL_TRAIL_R;
- priv->slice_type = HEVC_SLICE_P;
- priv->pic_type = 1;
+ hpic->slice_nal_unit = HEVC_NAL_TRAIL_R;
+ hpic->slice_type = HEVC_SLICE_P;
+ hpic->pic_type = 1;
} else {
+ VAAPIEncodePicture *irap_ref;
av_assert0(pic->refs[0] && pic->refs[1]);
- if (pic->refs[1]->type == PICTURE_TYPE_I)
- priv->slice_nal_unit = HEVC_NAL_RASL_N;
- else
- priv->slice_nal_unit = HEVC_NAL_TRAIL_N;
- priv->slice_type = HEVC_SLICE_B;
- priv->pic_type = 2;
+ for (irap_ref = pic; irap_ref; irap_ref = irap_ref->refs[1]) {
+ if (irap_ref->type == PICTURE_TYPE_I)
+ break;
+ }
+ if (pic->b_depth == ctx->max_b_depth) {
+ hpic->slice_nal_unit = irap_ref ? HEVC_NAL_RASL_N
+ : HEVC_NAL_TRAIL_N;
+ } else {
+ hpic->slice_nal_unit = irap_ref ? HEVC_NAL_RASL_R
+ : HEVC_NAL_TRAIL_R;
+ }
+ hpic->slice_type = HEVC_SLICE_B;
+ hpic->pic_type = 2;
}
}
- priv->pic_order_cnt = pic->display_order - priv->last_idr_frame;
+ hpic->pic_order_cnt = pic->display_order - hpic->last_idr_frame;
if (priv->aud) {
priv->aud_needed = 1;
.nuh_layer_id = 0,
.nuh_temporal_id_plus1 = 1,
},
- .pic_type = priv->pic_type,
+ .pic_type = hpic->pic_type,
};
} else {
priv->aud_needed = 0;
vpic->decoded_curr_pic = (VAPictureHEVC) {
.picture_id = pic->recon_surface,
- .pic_order_cnt = priv->pic_order_cnt,
+ .pic_order_cnt = hpic->pic_order_cnt,
.flags = 0,
};
for (i = 0; i < pic->nb_refs; i++) {
- VAAPIEncodePicture *ref = pic->refs[i];
+ VAAPIEncodePicture *ref = pic->refs[i];
+ VAAPIEncodeH265Picture *href;
+
av_assert0(ref && ref->encode_order < pic->encode_order);
+ href = ref->priv_data;
vpic->reference_frames[i] = (VAPictureHEVC) {
.picture_id = ref->recon_surface,
- .pic_order_cnt = ref->display_order - priv->last_idr_frame,
+ .pic_order_cnt = href->pic_order_cnt,
.flags = (ref->display_order < pic->display_order ?
VA_PICTURE_HEVC_RPS_ST_CURR_BEFORE : 0) |
(ref->display_order > pic->display_order ?
vpic->coded_buf = pic->output_buffer;
- vpic->nal_unit_type = priv->slice_nal_unit;
+ vpic->nal_unit_type = hpic->slice_nal_unit;
switch (pic->type) {
case PICTURE_TYPE_IDR:
av_assert0(0 && "invalid picture type");
}
- pic->nb_slices = 1;
-
return 0;
}
VAAPIEncodePicture *pic,
VAAPIEncodeSlice *slice)
{
- VAAPIEncodeContext *ctx = avctx->priv_data;
VAAPIEncodeH265Context *priv = avctx->priv_data;
+ VAAPIEncodeH265Picture *hpic = pic->priv_data;
const H265RawSPS *sps = &priv->raw_sps;
const H265RawPPS *pps = &priv->raw_pps;
H265RawSliceHeader *sh = &priv->raw_slice.header;
int i;
sh->nal_unit_header = (H265RawNALUnitHeader) {
- .nal_unit_type = priv->slice_nal_unit,
+ .nal_unit_type = hpic->slice_nal_unit,
.nuh_layer_id = 0,
.nuh_temporal_id_plus1 = 1,
};
sh->slice_pic_parameter_set_id = pps->pps_pic_parameter_set_id;
- // Currently we only support one slice per frame.
- sh->first_slice_segment_in_pic_flag = 1;
- sh->slice_segment_address = 0;
+ sh->first_slice_segment_in_pic_flag = slice->index == 0;
+ sh->slice_segment_address = slice->block_start;
- sh->slice_type = priv->slice_type;
+ sh->slice_type = hpic->slice_type;
- sh->slice_pic_order_cnt_lsb = priv->pic_order_cnt &
+ sh->slice_pic_order_cnt_lsb = hpic->pic_order_cnt &
(1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4)) - 1;
if (pic->type != PICTURE_TYPE_IDR) {
H265RawSTRefPicSet *rps;
- VAAPIEncodePicture *st;
- int used;
+ const VAAPIEncodeH265Picture *strp;
+ int rps_poc[MAX_DPB_SIZE];
+ int rps_used[MAX_DPB_SIZE];
+ int i, j, poc, rps_pics;
sh->short_term_ref_pic_set_sps_flag = 0;
rps = &sh->short_term_ref_pic_set;
memset(rps, 0, sizeof(*rps));
- for (st = ctx->pic_start; st; st = st->next) {
- if (st->encode_order >= pic->encode_order) {
- // Not yet in DPB.
+ rps_pics = 0;
+ for (i = 0; i < pic->nb_refs; i++) {
+ strp = pic->refs[i]->priv_data;
+ rps_poc[rps_pics] = strp->pic_order_cnt;
+ rps_used[rps_pics] = 1;
+ ++rps_pics;
+ }
+ for (i = 0; i < pic->nb_dpb_pics; i++) {
+ if (pic->dpb[i] == pic)
continue;
+ for (j = 0; j < pic->nb_refs; j++) {
+ if (pic->dpb[i] == pic->refs[j])
+ break;
}
- used = 0;
- for (i = 0; i < pic->nb_refs; i++) {
- if (pic->refs[i] == st)
- used = 1;
- }
- if (!used) {
- // Usually each picture always uses all of the others in the
- // DPB as references. The one case we have to treat here is
- // a non-IDR IRAP picture, which may need to hold unused
- // references across itself to be used for the decoding of
- // following RASL pictures. This looks for such an RASL
- // picture, and keeps the reference if there is one.
- VAAPIEncodePicture *rp;
- for (rp = ctx->pic_start; rp; rp = rp->next) {
- if (rp->encode_order < pic->encode_order)
- continue;
- if (rp->type != PICTURE_TYPE_B)
- continue;
- if (rp->refs[0] == st && rp->refs[1] == pic)
- break;
- }
- if (!rp)
- continue;
- }
- // This only works for one instance of each (delta_poc_sN_minus1
- // is relative to the previous frame in the list, not relative to
- // the current frame directly).
- if (st->display_order < pic->display_order) {
- rps->delta_poc_s0_minus1[rps->num_negative_pics] =
- pic->display_order - st->display_order - 1;
- rps->used_by_curr_pic_s0_flag[rps->num_negative_pics] = used;
- ++rps->num_negative_pics;
- } else {
- rps->delta_poc_s1_minus1[rps->num_positive_pics] =
- st->display_order - pic->display_order - 1;
- rps->used_by_curr_pic_s1_flag[rps->num_positive_pics] = used;
- ++rps->num_positive_pics;
+ if (j < pic->nb_refs)
+ continue;
+ strp = pic->dpb[i]->priv_data;
+ rps_poc[rps_pics] = strp->pic_order_cnt;
+ rps_used[rps_pics] = 0;
+ ++rps_pics;
+ }
+
+ for (i = 1; i < rps_pics; i++) {
+ for (j = i; j > 0; j--) {
+ if (rps_poc[j] > rps_poc[j - 1])
+ break;
+ av_assert0(rps_poc[j] != rps_poc[j - 1]);
+ FFSWAP(int, rps_poc[j], rps_poc[j - 1]);
+ FFSWAP(int, rps_used[j], rps_used[j - 1]);
}
}
+ av_log(avctx, AV_LOG_DEBUG, "RPS for POC %d:",
+ hpic->pic_order_cnt);
+ for (i = 0; i < rps_pics; i++) {
+ av_log(avctx, AV_LOG_DEBUG, " (%d,%d)",
+ rps_poc[i], rps_used[i]);
+ }
+ av_log(avctx, AV_LOG_DEBUG, "\n");
+
+ for (i = 0; i < rps_pics; i++) {
+ av_assert0(rps_poc[i] != hpic->pic_order_cnt);
+ if (rps_poc[i] > hpic->pic_order_cnt)
+ break;
+ }
+
+ rps->num_negative_pics = i;
+ poc = hpic->pic_order_cnt;
+ for (j = i - 1; j >= 0; j--) {
+ rps->delta_poc_s0_minus1[i - 1 - j] = poc - rps_poc[j] - 1;
+ rps->used_by_curr_pic_s0_flag[i - 1 - j] = rps_used[j];
+ poc = rps_poc[j];
+ }
+
+ rps->num_positive_pics = rps_pics - i;
+ poc = hpic->pic_order_cnt;
+ for (j = i; j < rps_pics; j++) {
+ rps->delta_poc_s1_minus1[j - i] = rps_poc[j] - poc - 1;
+ rps->used_by_curr_pic_s1_flag[j - i] = rps_used[j];
+ poc = rps_poc[j];
+ }
+
sh->num_long_term_sps = 0;
sh->num_long_term_pics = 0;
*vslice = (VAEncSliceParameterBufferHEVC) {
.slice_segment_address = sh->slice_segment_address,
- .num_ctu_in_slice = priv->ctu_width * priv->ctu_height,
+ .num_ctu_in_slice = slice->block_size,
.slice_type = sh->slice_type,
.slice_pic_parameter_set_id = sh->slice_pic_parameter_set_id,
.slice_tc_offset_div2 = sh->slice_tc_offset_div2,
.slice_fields.bits = {
- .last_slice_of_pic_flag = 1,
+ .last_slice_of_pic_flag = slice->index == pic->nb_slices - 1,
.dependent_slice_segment_flag = sh->dependent_slice_segment_flag,
.colour_plane_id = sh->colour_plane_id,
.slice_temporal_mvp_enabled_flag =
if (err < 0)
return err;
- priv->ctu_width = FFALIGN(ctx->surface_width, 32) / 32;
- priv->ctu_height = FFALIGN(ctx->surface_height, 32) / 32;
-
- av_log(avctx, AV_LOG_VERBOSE, "Input %ux%u -> Surface %ux%u -> CTU %ux%u.\n",
- avctx->width, avctx->height, ctx->surface_width,
- ctx->surface_height, priv->ctu_width, priv->ctu_height);
-
if (ctx->va_rc_mode == VA_RC_CQP) {
- priv->fixed_qp_p = priv->qp;
+ // Note that VAAPI only supports positive QP values - the range is
+ // therefore always bounded below by 1, even in 10-bit mode where
+ // it should go down to -12.
+
+ priv->fixed_qp_p = av_clip(ctx->rc_quality, 1, 51);
if (avctx->i_quant_factor > 0.0)
- priv->fixed_qp_idr = (int)((priv->fixed_qp_p * avctx->i_quant_factor +
- avctx->i_quant_offset) + 0.5);
+ priv->fixed_qp_idr =
+ av_clip((avctx->i_quant_factor * priv->fixed_qp_p +
+ avctx->i_quant_offset) + 0.5, 1, 51);
else
priv->fixed_qp_idr = priv->fixed_qp_p;
if (avctx->b_quant_factor > 0.0)
- priv->fixed_qp_b = (int)((priv->fixed_qp_p * avctx->b_quant_factor +
- avctx->b_quant_offset) + 0.5);
+ priv->fixed_qp_b =
+ av_clip((avctx->b_quant_factor * priv->fixed_qp_p +
+ avctx->b_quant_offset) + 0.5, 1, 51);
else
priv->fixed_qp_b = priv->fixed_qp_p;
"%d / %d / %d for IDR- / P- / B-frames.\n",
priv->fixed_qp_idr, priv->fixed_qp_p, priv->fixed_qp_b);
- } else if (ctx->va_rc_mode == VA_RC_CBR ||
- ctx->va_rc_mode == VA_RC_VBR) {
- // These still need to be set for pic_init_qp/slice_qp_delta.
+ } else {
+ // These still need to be set for init_qp/slice_qp_delta.
priv->fixed_qp_idr = 30;
priv->fixed_qp_p = 30;
priv->fixed_qp_b = 30;
-
- av_log(avctx, AV_LOG_DEBUG, "Using %s-bitrate = %"PRId64" bps.\n",
- ctx->va_rc_mode == VA_RC_CBR ? "constant" : "variable",
- avctx->bit_rate);
-
- } else {
- av_assert0(0 && "Invalid RC mode.");
}
return 0;
}
+static const VAAPIEncodeProfile vaapi_encode_h265_profiles[] = {
+ { FF_PROFILE_HEVC_MAIN, 8, 3, 1, 1, VAProfileHEVCMain },
+ { FF_PROFILE_HEVC_REXT, 8, 3, 1, 1, VAProfileHEVCMain },
+#if VA_CHECK_VERSION(0, 37, 0)
+ { FF_PROFILE_HEVC_MAIN_10, 10, 3, 1, 1, VAProfileHEVCMain10 },
+ { FF_PROFILE_HEVC_REXT, 10, 3, 1, 1, VAProfileHEVCMain10 },
+#endif
+ { FF_PROFILE_UNKNOWN }
+};
+
static const VAAPIEncodeType vaapi_encode_type_h265 = {
+ .profiles = vaapi_encode_h265_profiles,
+
+ .flags = FLAG_SLICE_CONTROL |
+ FLAG_B_PICTURES |
+ FLAG_B_PICTURE_REFERENCES |
+ FLAG_NON_IDR_KEY_PICTURES,
+
+ .default_quality = 25,
+
.configure = &vaapi_encode_h265_configure,
+ .picture_priv_data_size = sizeof(VAAPIEncodeH265Picture),
+
.sequence_params_size = sizeof(VAEncSequenceParameterBufferHEVC),
.init_sequence_params = &vaapi_encode_h265_init_sequence_params,
if (avctx->level == FF_LEVEL_UNKNOWN)
avctx->level = priv->level;
- switch (avctx->profile) {
- case FF_PROFILE_HEVC_MAIN:
- case FF_PROFILE_UNKNOWN:
- ctx->va_profile = VAProfileHEVCMain;
- ctx->va_rt_format = VA_RT_FORMAT_YUV420;
- break;
- case FF_PROFILE_HEVC_MAIN_10:
-#ifdef VA_RT_FORMAT_YUV420_10BPP
- ctx->va_profile = VAProfileHEVCMain10;
- ctx->va_rt_format = VA_RT_FORMAT_YUV420_10BPP;
- break;
-#else
- av_log(avctx, AV_LOG_ERROR, "10-bit encoding is not "
- "supported with this VAAPI version.\n");
- return AVERROR(ENOSYS);
-#endif
- default:
- av_log(avctx, AV_LOG_ERROR, "Unknown H.265 profile %d.\n",
- avctx->profile);
+ if (avctx->level != FF_LEVEL_UNKNOWN && avctx->level & ~0xff) {
+ av_log(avctx, AV_LOG_ERROR, "Invalid level %d: must fit "
+ "in 8-bit unsigned integer.\n", avctx->level);
return AVERROR(EINVAL);
}
- ctx->va_entrypoint = VAEntrypointEncSlice;
-
- if (avctx->bit_rate > 0) {
- if (avctx->rc_max_rate == avctx->bit_rate)
- ctx->va_rc_mode = VA_RC_CBR;
- else
- ctx->va_rc_mode = VA_RC_VBR;
- } else
- ctx->va_rc_mode = VA_RC_CQP;
- ctx->va_packed_headers =
+ ctx->desired_packed_headers =
VA_ENC_PACKED_HEADER_SEQUENCE | // VPS, SPS and PPS.
VA_ENC_PACKED_HEADER_SLICE | // Slice headers.
VA_ENC_PACKED_HEADER_MISC; // SEI
ctx->surface_width = FFALIGN(avctx->width, 16);
ctx->surface_height = FFALIGN(avctx->height, 16);
+ // CTU size is currently hard-coded to 32.
+ ctx->slice_block_width = ctx->slice_block_height = 32;
+
+ if (priv->qp > 0)
+ ctx->explicit_qp = priv->qp;
+
return ff_vaapi_encode_init(avctx);
}
{
VAAPIEncodeH265Context *priv = avctx->priv_data;
+ ff_cbs_fragment_free(priv->cbc, &priv->current_access_unit);
ff_cbs_close(&priv->cbc);
return ff_vaapi_encode_close(avctx);
#define OFFSET(x) offsetof(VAAPIEncodeH265Context, x)
#define FLAGS (AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM)
static const AVOption vaapi_encode_h265_options[] = {
+ VAAPI_ENCODE_COMMON_OPTIONS,
+ VAAPI_ENCODE_RC_OPTIONS,
+
{ "qp", "Constant QP (for P-frames; scaled by qfactor/qoffset for I/B)",
- OFFSET(qp), AV_OPT_TYPE_INT, { .i64 = 25 }, 0, 52, FLAGS },
+ OFFSET(qp), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 52, FLAGS },
{ "aud", "Include AUD",
- OFFSET(aud), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, FLAGS },
+ OFFSET(aud), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, FLAGS },
{ "profile", "Set profile (general_profile_idc)",
OFFSET(profile), AV_OPT_TYPE_INT,
- { .i64 = FF_PROFILE_HEVC_MAIN }, 0x00, 0xff, FLAGS, "profile" },
+ { .i64 = FF_PROFILE_UNKNOWN }, FF_PROFILE_UNKNOWN, 0xff, FLAGS, "profile" },
#define PROFILE(name, value) name, NULL, 0, AV_OPT_TYPE_CONST, \
{ .i64 = value }, 0, 0, FLAGS, "profile"
{ PROFILE("main", FF_PROFILE_HEVC_MAIN) },
{ PROFILE("main10", FF_PROFILE_HEVC_MAIN_10) },
+ { PROFILE("rext", FF_PROFILE_HEVC_REXT) },
#undef PROFILE
+ { "tier", "Set tier (general_tier_flag)",
+ OFFSET(tier), AV_OPT_TYPE_INT,
+ { .i64 = 0 }, 0, 1, FLAGS, "tier" },
+ { "main", NULL, 0, AV_OPT_TYPE_CONST,
+ { .i64 = 0 }, 0, 0, FLAGS, "tier" },
+ { "high", NULL, 0, AV_OPT_TYPE_CONST,
+ { .i64 = 1 }, 0, 0, FLAGS, "tier" },
+
{ "level", "Set level (general_level_idc)",
OFFSET(level), AV_OPT_TYPE_INT,
- { .i64 = 153 }, 0x00, 0xff, FLAGS, "level" },
+ { .i64 = FF_LEVEL_UNKNOWN }, FF_LEVEL_UNKNOWN, 0xff, FLAGS, "level" },
#define LEVEL(name, value) name, NULL, 0, AV_OPT_TYPE_CONST, \
{ .i64 = value }, 0, 0, FLAGS, "level"
{ "i_qoffset", "0" },
{ "b_qfactor", "6/5" },
{ "b_qoffset", "0" },
+ { "qmin", "-1" },
+ { "qmax", "-1" },
{ NULL },
};
.id = AV_CODEC_ID_HEVC,
.priv_data_size = sizeof(VAAPIEncodeH265Context),
.init = &vaapi_encode_h265_init,
- .encode2 = &ff_vaapi_encode2,
+ .send_frame = &ff_vaapi_encode_send_frame,
+ .receive_packet = &ff_vaapi_encode_receive_packet,
.close = &vaapi_encode_h265_close,
.priv_class = &vaapi_encode_h265_class,
.capabilities = AV_CODEC_CAP_DELAY | AV_CODEC_CAP_HARDWARE,