2 * This file is part of FFmpeg.
4 * FFmpeg is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2.1 of the License, or (at your option) any later version.
9 * FFmpeg is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Lesser General Public License for more details.
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with FFmpeg; if not, write to the Free Software
16 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 static int FUNC(rbsp_trailing_bits)(CodedBitstreamContext *ctx, RWContext *rw)
23 fixed(1, rbsp_stop_one_bit, 1);
24 while (byte_alignment(rw) != 0)
25 fixed(1, rbsp_alignment_zero_bit, 0);
30 static int FUNC(nal_unit_header)(CodedBitstreamContext *ctx, RWContext *rw,
31 H265RawNALUnitHeader *current,
32 int expected_nal_unit_type)
36 u(1, forbidden_zero_bit, 0, 0);
38 if (expected_nal_unit_type >= 0)
39 u(6, nal_unit_type, expected_nal_unit_type,
40 expected_nal_unit_type);
42 u(6, nal_unit_type, 0, 63);
44 u(6, nuh_layer_id, 0, 62);
45 u(3, nuh_temporal_id_plus1, 1, 7);
50 static int FUNC(byte_alignment)(CodedBitstreamContext *ctx, RWContext *rw)
54 fixed(1, alignment_bit_equal_to_one, 1);
55 while (byte_alignment(rw) != 0)
56 fixed(1, alignment_bit_equal_to_zero, 0);
61 static int FUNC(extension_data)(CodedBitstreamContext *ctx, RWContext *rw,
62 H265RawPSExtensionData *current)
70 for (k = 0; cbs_h2645_read_more_rbsp_data(rw); k++)
72 current->bit_length = k;
75 allocate(current->data, (current->bit_length + 7) / 8);
76 for (k = 0; k < current->bit_length; k++) {
77 xu(1, extension_data, bit, 0, 1, 0);
78 current->data[k / 8] |= bit << (7 - k % 8);
82 for (k = 0; k < current->bit_length; k++)
83 xu(1, extension_data, current->data[k / 8] >> (7 - k % 8), 0, 1, 0);
88 static int FUNC(profile_tier_level)(CodedBitstreamContext *ctx, RWContext *rw,
89 H265RawProfileTierLevel *current,
90 int profile_present_flag,
91 int max_num_sub_layers_minus1)
95 if (profile_present_flag) {
96 u(2, general_profile_space, 0, 0);
97 flag(general_tier_flag);
98 u(5, general_profile_idc, 0, 31);
100 for (j = 0; j < 32; j++)
101 flags(general_profile_compatibility_flag[j], 1, j);
103 flag(general_progressive_source_flag);
104 flag(general_interlaced_source_flag);
105 flag(general_non_packed_constraint_flag);
106 flag(general_frame_only_constraint_flag);
108 #define profile_compatible(x) (current->general_profile_idc == (x) || \
109 current->general_profile_compatibility_flag[x])
110 if (profile_compatible(4) || profile_compatible(5) ||
111 profile_compatible(6) || profile_compatible(7) ||
112 profile_compatible(8) || profile_compatible(9) ||
113 profile_compatible(10)) {
114 flag(general_max_12bit_constraint_flag);
115 flag(general_max_10bit_constraint_flag);
116 flag(general_max_8bit_constraint_flag);
117 flag(general_max_422chroma_constraint_flag);
118 flag(general_max_420chroma_constraint_flag);
119 flag(general_max_monochrome_constraint_flag);
120 flag(general_intra_constraint_flag);
121 flag(general_one_picture_only_constraint_flag);
122 flag(general_lower_bit_rate_constraint_flag);
124 if (profile_compatible(5) || profile_compatible(9) ||
125 profile_compatible(10)) {
126 flag(general_max_14bit_constraint_flag);
127 fixed(24, general_reserved_zero_33bits, 0);
128 fixed( 9, general_reserved_zero_33bits, 0);
130 fixed(24, general_reserved_zero_34bits, 0);
131 fixed(10, general_reserved_zero_34bits, 0);
134 fixed(24, general_reserved_zero_43bits, 0);
135 fixed(19, general_reserved_zero_43bits, 0);
138 if (profile_compatible(1) || profile_compatible(2) ||
139 profile_compatible(3) || profile_compatible(4) ||
140 profile_compatible(5) || profile_compatible(9)) {
141 flag(general_inbld_flag);
143 fixed(1, general_reserved_zero_bit, 0);
145 #undef profile_compatible
148 u(8, general_level_idc, 0, 255);
150 for (i = 0; i < max_num_sub_layers_minus1; i++) {
151 flags(sub_layer_profile_present_flag[i], 1, i);
152 flags(sub_layer_level_present_flag[i], 1, i);
155 if (max_num_sub_layers_minus1 > 0) {
156 for (i = max_num_sub_layers_minus1; i < 8; i++)
157 fixed(2, reserved_zero_2bits, 0);
160 for (i = 0; i < max_num_sub_layers_minus1; i++) {
161 if (current->sub_layer_profile_present_flag[i])
162 return AVERROR_PATCHWELCOME;
163 if (current->sub_layer_level_present_flag[i])
164 return AVERROR_PATCHWELCOME;
170 static int FUNC(sub_layer_hrd_parameters)(CodedBitstreamContext *ctx, RWContext *rw,
171 H265RawHRDParameters *hrd,
172 int nal, int sub_layer_id)
174 H265RawSubLayerHRDParameters *current;
178 current = &hrd->nal_sub_layer_hrd_parameters[sub_layer_id];
180 current = &hrd->vcl_sub_layer_hrd_parameters[sub_layer_id];
182 for (i = 0; i <= hrd->cpb_cnt_minus1[sub_layer_id]; i++) {
183 ues(bit_rate_value_minus1[i], 0, UINT32_MAX - 1, 1, i);
184 ues(cpb_size_value_minus1[i], 0, UINT32_MAX - 1, 1, i);
185 if (hrd->sub_pic_hrd_params_present_flag) {
186 ues(cpb_size_du_value_minus1[i], 0, UINT32_MAX - 1, 1, i);
187 ues(bit_rate_du_value_minus1[i], 0, UINT32_MAX - 1, 1, i);
189 flags(cbr_flag[i], 1, i);
195 static int FUNC(hrd_parameters)(CodedBitstreamContext *ctx, RWContext *rw,
196 H265RawHRDParameters *current, int common_inf_present_flag,
197 int max_num_sub_layers_minus1)
201 if (common_inf_present_flag) {
202 flag(nal_hrd_parameters_present_flag);
203 flag(vcl_hrd_parameters_present_flag);
205 if (current->nal_hrd_parameters_present_flag ||
206 current->vcl_hrd_parameters_present_flag) {
207 flag(sub_pic_hrd_params_present_flag);
208 if (current->sub_pic_hrd_params_present_flag) {
209 u(8, tick_divisor_minus2, 0, 255);
210 u(5, du_cpb_removal_delay_increment_length_minus1, 0, 31);
211 flag(sub_pic_cpb_params_in_pic_timing_sei_flag);
212 u(5, dpb_output_delay_du_length_minus1, 0, 31);
215 u(4, bit_rate_scale, 0, 15);
216 u(4, cpb_size_scale, 0, 15);
217 if (current->sub_pic_hrd_params_present_flag)
218 u(4, cpb_size_du_scale, 0, 15);
220 u(5, initial_cpb_removal_delay_length_minus1, 0, 31);
221 u(5, au_cpb_removal_delay_length_minus1, 0, 31);
222 u(5, dpb_output_delay_length_minus1, 0, 31);
224 infer(sub_pic_hrd_params_present_flag, 0);
226 infer(initial_cpb_removal_delay_length_minus1, 23);
227 infer(au_cpb_removal_delay_length_minus1, 23);
228 infer(dpb_output_delay_length_minus1, 23);
232 for (i = 0; i <= max_num_sub_layers_minus1; i++) {
233 flags(fixed_pic_rate_general_flag[i], 1, i);
235 if (!current->fixed_pic_rate_general_flag[i])
236 flags(fixed_pic_rate_within_cvs_flag[i], 1, i);
238 infer(fixed_pic_rate_within_cvs_flag[i], 1);
240 if (current->fixed_pic_rate_within_cvs_flag[i]) {
241 ues(elemental_duration_in_tc_minus1[i], 0, 2047, 1, i);
242 infer(low_delay_hrd_flag[i], 0);
244 flags(low_delay_hrd_flag[i], 1, i);
246 if (!current->low_delay_hrd_flag[i])
247 ues(cpb_cnt_minus1[i], 0, 31, 1, i);
249 infer(cpb_cnt_minus1[i], 0);
251 if (current->nal_hrd_parameters_present_flag)
252 CHECK(FUNC(sub_layer_hrd_parameters)(ctx, rw, current, 0, i));
253 if (current->vcl_hrd_parameters_present_flag)
254 CHECK(FUNC(sub_layer_hrd_parameters)(ctx, rw, current, 1, i));
260 static int FUNC(vui_parameters)(CodedBitstreamContext *ctx, RWContext *rw,
261 H265RawVUI *current, const H265RawSPS *sps)
265 flag(aspect_ratio_info_present_flag);
266 if (current->aspect_ratio_info_present_flag) {
267 u(8, aspect_ratio_idc, 0, 255);
268 if (current->aspect_ratio_idc == 255) {
269 u(16, sar_width, 0, 65535);
270 u(16, sar_height, 0, 65535);
273 infer(aspect_ratio_idc, 0);
276 flag(overscan_info_present_flag);
277 if (current->overscan_info_present_flag)
278 flag(overscan_appropriate_flag);
280 flag(video_signal_type_present_flag);
281 if (current->video_signal_type_present_flag) {
282 u(3, video_format, 0, 7);
283 flag(video_full_range_flag);
284 flag(colour_description_present_flag);
285 if (current->colour_description_present_flag) {
286 u(8, colour_primaries, 0, 255);
287 u(8, transfer_characteristics, 0, 255);
288 u(8, matrix_coefficients, 0, 255);
290 infer(colour_primaries, 2);
291 infer(transfer_characteristics, 2);
292 infer(matrix_coefficients, 2);
295 infer(video_format, 5);
296 infer(video_full_range_flag, 0);
297 infer(colour_primaries, 2);
298 infer(transfer_characteristics, 2);
299 infer(matrix_coefficients, 2);
302 flag(chroma_loc_info_present_flag);
303 if (current->chroma_loc_info_present_flag) {
304 ue(chroma_sample_loc_type_top_field, 0, 5);
305 ue(chroma_sample_loc_type_bottom_field, 0, 5);
307 infer(chroma_sample_loc_type_top_field, 0);
308 infer(chroma_sample_loc_type_bottom_field, 0);
311 flag(neutral_chroma_indication_flag);
312 flag(field_seq_flag);
313 flag(frame_field_info_present_flag);
315 flag(default_display_window_flag);
316 if (current->default_display_window_flag) {
317 ue(def_disp_win_left_offset, 0, 16384);
318 ue(def_disp_win_right_offset, 0, 16384);
319 ue(def_disp_win_top_offset, 0, 16384);
320 ue(def_disp_win_bottom_offset, 0, 16384);
323 flag(vui_timing_info_present_flag);
324 if (current->vui_timing_info_present_flag) {
325 u(32, vui_num_units_in_tick, 1, UINT32_MAX);
326 u(32, vui_time_scale, 1, UINT32_MAX);
327 flag(vui_poc_proportional_to_timing_flag);
328 if (current->vui_poc_proportional_to_timing_flag)
329 ue(vui_num_ticks_poc_diff_one_minus1, 0, UINT32_MAX - 1);
331 flag(vui_hrd_parameters_present_flag);
332 if (current->vui_hrd_parameters_present_flag) {
333 CHECK(FUNC(hrd_parameters)(ctx, rw, ¤t->hrd_parameters,
334 1, sps->sps_max_sub_layers_minus1));
338 flag(bitstream_restriction_flag);
339 if (current->bitstream_restriction_flag) {
340 flag(tiles_fixed_structure_flag);
341 flag(motion_vectors_over_pic_boundaries_flag);
342 flag(restricted_ref_pic_lists_flag);
343 ue(min_spatial_segmentation_idc, 0, 4095);
344 ue(max_bytes_per_pic_denom, 0, 16);
345 ue(max_bits_per_min_cu_denom, 0, 16);
346 ue(log2_max_mv_length_horizontal, 0, 16);
347 ue(log2_max_mv_length_vertical, 0, 16);
349 infer(tiles_fixed_structure_flag, 0);
350 infer(motion_vectors_over_pic_boundaries_flag, 1);
351 infer(min_spatial_segmentation_idc, 0);
352 infer(max_bytes_per_pic_denom, 2);
353 infer(max_bits_per_min_cu_denom, 1);
354 infer(log2_max_mv_length_horizontal, 15);
355 infer(log2_max_mv_length_vertical, 15);
361 static int FUNC(vps)(CodedBitstreamContext *ctx, RWContext *rw,
366 HEADER("Video Parameter Set");
368 CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, HEVC_NAL_VPS));
370 u(4, vps_video_parameter_set_id, 0, 15);
372 flag(vps_base_layer_internal_flag);
373 flag(vps_base_layer_available_flag);
374 u(6, vps_max_layers_minus1, 0, HEVC_MAX_LAYERS - 1);
375 u(3, vps_max_sub_layers_minus1, 0, HEVC_MAX_SUB_LAYERS - 1);
376 flag(vps_temporal_id_nesting_flag);
378 if (current->vps_max_sub_layers_minus1 == 0 &&
379 current->vps_temporal_id_nesting_flag != 1) {
380 av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid stream: "
381 "vps_temporal_id_nesting_flag must be 1 if "
382 "vps_max_sub_layers_minus1 is 0.\n");
383 return AVERROR_INVALIDDATA;
386 fixed(16, vps_reserved_0xffff_16bits, 0xffff);
388 CHECK(FUNC(profile_tier_level)(ctx, rw, ¤t->profile_tier_level,
389 1, current->vps_max_sub_layers_minus1));
391 flag(vps_sub_layer_ordering_info_present_flag);
392 for (i = (current->vps_sub_layer_ordering_info_present_flag ?
393 0 : current->vps_max_sub_layers_minus1);
394 i <= current->vps_max_sub_layers_minus1; i++) {
395 ues(vps_max_dec_pic_buffering_minus1[i],
396 0, HEVC_MAX_DPB_SIZE - 1, 1, i);
397 ues(vps_max_num_reorder_pics[i],
398 0, current->vps_max_dec_pic_buffering_minus1[i], 1, i);
399 ues(vps_max_latency_increase_plus1[i],
400 0, UINT32_MAX - 1, 1, i);
402 if (!current->vps_sub_layer_ordering_info_present_flag) {
403 for (i = 0; i < current->vps_max_sub_layers_minus1; i++) {
404 infer(vps_max_dec_pic_buffering_minus1[i],
405 current->vps_max_dec_pic_buffering_minus1[current->vps_max_sub_layers_minus1]);
406 infer(vps_max_num_reorder_pics[i],
407 current->vps_max_num_reorder_pics[current->vps_max_sub_layers_minus1]);
408 infer(vps_max_latency_increase_plus1[i],
409 current->vps_max_latency_increase_plus1[current->vps_max_sub_layers_minus1]);
413 u(6, vps_max_layer_id, 0, HEVC_MAX_LAYERS - 1);
414 ue(vps_num_layer_sets_minus1, 0, HEVC_MAX_LAYER_SETS - 1);
415 for (i = 1; i <= current->vps_num_layer_sets_minus1; i++) {
416 for (j = 0; j <= current->vps_max_layer_id; j++)
417 flags(layer_id_included_flag[i][j], 2, i, j);
419 for (j = 0; j <= current->vps_max_layer_id; j++)
420 infer(layer_id_included_flag[0][j], j == 0);
422 flag(vps_timing_info_present_flag);
423 if (current->vps_timing_info_present_flag) {
424 u(32, vps_num_units_in_tick, 1, UINT32_MAX);
425 u(32, vps_time_scale, 1, UINT32_MAX);
426 flag(vps_poc_proportional_to_timing_flag);
427 if (current->vps_poc_proportional_to_timing_flag)
428 ue(vps_num_ticks_poc_diff_one_minus1, 0, UINT32_MAX - 1);
429 ue(vps_num_hrd_parameters, 0, current->vps_num_layer_sets_minus1 + 1);
430 for (i = 0; i < current->vps_num_hrd_parameters; i++) {
431 ues(hrd_layer_set_idx[i],
432 current->vps_base_layer_internal_flag ? 0 : 1,
433 current->vps_num_layer_sets_minus1, 1, i);
435 flags(cprms_present_flag[i], 1, i);
437 infer(cprms_present_flag[0], 1);
439 CHECK(FUNC(hrd_parameters)(ctx, rw, ¤t->hrd_parameters[i],
440 current->cprms_present_flag[i],
441 current->vps_max_sub_layers_minus1));
445 flag(vps_extension_flag);
446 if (current->vps_extension_flag)
447 CHECK(FUNC(extension_data)(ctx, rw, ¤t->extension_data));
449 CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
454 static int FUNC(st_ref_pic_set)(CodedBitstreamContext *ctx, RWContext *rw,
455 H265RawSTRefPicSet *current, int st_rps_idx,
456 const H265RawSPS *sps)
461 flag(inter_ref_pic_set_prediction_flag);
463 infer(inter_ref_pic_set_prediction_flag, 0);
465 if (current->inter_ref_pic_set_prediction_flag) {
466 unsigned int ref_rps_idx, num_delta_pocs;
467 const H265RawSTRefPicSet *ref;
468 int delta_rps, d_poc;
469 int ref_delta_poc_s0[HEVC_MAX_REFS], ref_delta_poc_s1[HEVC_MAX_REFS];
470 int delta_poc_s0[HEVC_MAX_REFS], delta_poc_s1[HEVC_MAX_REFS];
471 uint8_t used_by_curr_pic_s0[HEVC_MAX_REFS],
472 used_by_curr_pic_s1[HEVC_MAX_REFS];
474 if (st_rps_idx == sps->num_short_term_ref_pic_sets)
475 ue(delta_idx_minus1, 0, st_rps_idx - 1);
477 infer(delta_idx_minus1, 0);
479 ref_rps_idx = st_rps_idx - (current->delta_idx_minus1 + 1);
480 ref = &sps->st_ref_pic_set[ref_rps_idx];
481 num_delta_pocs = ref->num_negative_pics + ref->num_positive_pics;
483 flag(delta_rps_sign);
484 ue(abs_delta_rps_minus1, 0, INT16_MAX);
485 delta_rps = (1 - 2 * current->delta_rps_sign) *
486 (current->abs_delta_rps_minus1 + 1);
488 for (j = 0; j <= num_delta_pocs; j++) {
489 flags(used_by_curr_pic_flag[j], 1, j);
490 if (!current->used_by_curr_pic_flag[j])
491 flags(use_delta_flag[j], 1, j);
493 infer(use_delta_flag[j], 1);
496 // Since the stored form of an RPS here is actually the delta-step
497 // form used when inter_ref_pic_set_prediction_flag is not set, we
498 // need to reconstruct that here in order to be able to refer to
499 // the RPS later (which is required for parsing, because we don't
500 // even know what syntax elements appear without it). Therefore,
501 // this code takes the delta-step form of the reference set, turns
502 // it into the delta-array form, applies the prediction process of
503 // 7.4.8, converts the result back to the delta-step form, and
504 // stores that as the current set for future use. Note that the
505 // inferences here mean that writers using prediction will need
506 // to fill in the delta-step values correctly as well - since the
507 // whole RPS prediction process is somewhat overly sophisticated,
508 // this hopefully forms a useful check for them to ensure their
509 // predicted form actually matches what was intended rather than
510 // an onerous additional requirement.
513 for (i = 0; i < ref->num_negative_pics; i++) {
514 d_poc -= ref->delta_poc_s0_minus1[i] + 1;
515 ref_delta_poc_s0[i] = d_poc;
518 for (i = 0; i < ref->num_positive_pics; i++) {
519 d_poc += ref->delta_poc_s1_minus1[i] + 1;
520 ref_delta_poc_s1[i] = d_poc;
524 for (j = ref->num_positive_pics - 1; j >= 0; j--) {
525 d_poc = ref_delta_poc_s1[j] + delta_rps;
526 if (d_poc < 0 && current->use_delta_flag[ref->num_negative_pics + j]) {
527 delta_poc_s0[i] = d_poc;
528 used_by_curr_pic_s0[i++] =
529 current->used_by_curr_pic_flag[ref->num_negative_pics + j];
532 if (delta_rps < 0 && current->use_delta_flag[num_delta_pocs]) {
533 delta_poc_s0[i] = delta_rps;
534 used_by_curr_pic_s0[i++] =
535 current->used_by_curr_pic_flag[num_delta_pocs];
537 for (j = 0; j < ref->num_negative_pics; j++) {
538 d_poc = ref_delta_poc_s0[j] + delta_rps;
539 if (d_poc < 0 && current->use_delta_flag[j]) {
540 delta_poc_s0[i] = d_poc;
541 used_by_curr_pic_s0[i++] = current->used_by_curr_pic_flag[j];
545 infer(num_negative_pics, i);
546 for (i = 0; i < current->num_negative_pics; i++) {
547 infer(delta_poc_s0_minus1[i],
548 -(delta_poc_s0[i] - (i == 0 ? 0 : delta_poc_s0[i - 1])) - 1);
549 infer(used_by_curr_pic_s0_flag[i], used_by_curr_pic_s0[i]);
553 for (j = ref->num_negative_pics - 1; j >= 0; j--) {
554 d_poc = ref_delta_poc_s0[j] + delta_rps;
555 if (d_poc > 0 && current->use_delta_flag[j]) {
556 delta_poc_s1[i] = d_poc;
557 used_by_curr_pic_s1[i++] = current->used_by_curr_pic_flag[j];
560 if (delta_rps > 0 && current->use_delta_flag[num_delta_pocs]) {
561 delta_poc_s1[i] = delta_rps;
562 used_by_curr_pic_s1[i++] =
563 current->used_by_curr_pic_flag[num_delta_pocs];
565 for (j = 0; j < ref->num_positive_pics; j++) {
566 d_poc = ref_delta_poc_s1[j] + delta_rps;
567 if (d_poc > 0 && current->use_delta_flag[ref->num_negative_pics + j]) {
568 delta_poc_s1[i] = d_poc;
569 used_by_curr_pic_s1[i++] =
570 current->used_by_curr_pic_flag[ref->num_negative_pics + j];
574 infer(num_positive_pics, i);
575 for (i = 0; i < current->num_positive_pics; i++) {
576 infer(delta_poc_s1_minus1[i],
577 delta_poc_s1[i] - (i == 0 ? 0 : delta_poc_s1[i - 1]) - 1);
578 infer(used_by_curr_pic_s1_flag[i], used_by_curr_pic_s1[i]);
582 ue(num_negative_pics, 0, 15);
583 ue(num_positive_pics, 0, 15 - current->num_negative_pics);
585 for (i = 0; i < current->num_negative_pics; i++) {
586 ues(delta_poc_s0_minus1[i], 0, INT16_MAX, 1, i);
587 flags(used_by_curr_pic_s0_flag[i], 1, i);
590 for (i = 0; i < current->num_positive_pics; i++) {
591 ues(delta_poc_s1_minus1[i], 0, INT16_MAX, 1, i);
592 flags(used_by_curr_pic_s1_flag[i], 1, i);
599 static int FUNC(scaling_list_data)(CodedBitstreamContext *ctx, RWContext *rw,
600 H265RawScalingList *current)
602 int sizeId, matrixId;
605 for (sizeId = 0; sizeId < 4; sizeId++) {
606 for (matrixId = 0; matrixId < 6; matrixId += (sizeId == 3 ? 3 : 1)) {
607 flags(scaling_list_pred_mode_flag[sizeId][matrixId],
608 2, sizeId, matrixId);
609 if (!current->scaling_list_pred_mode_flag[sizeId][matrixId]) {
610 ues(scaling_list_pred_matrix_id_delta[sizeId][matrixId],
611 0, sizeId == 3 ? matrixId / 3 : matrixId,
612 2, sizeId, matrixId);
614 n = FFMIN(64, 1 << (4 + (sizeId << 1)));
616 ses(scaling_list_dc_coef_minus8[sizeId - 2][matrixId], -7, +247,
617 2, sizeId - 2, matrixId);
619 for (i = 0; i < n; i++) {
620 ses(scaling_list_delta_coeff[sizeId][matrixId][i],
621 -128, +127, 3, sizeId, matrixId, i);
630 static int FUNC(sps_range_extension)(CodedBitstreamContext *ctx, RWContext *rw,
635 flag(transform_skip_rotation_enabled_flag);
636 flag(transform_skip_context_enabled_flag);
637 flag(implicit_rdpcm_enabled_flag);
638 flag(explicit_rdpcm_enabled_flag);
639 flag(extended_precision_processing_flag);
640 flag(intra_smoothing_disabled_flag);
641 flag(high_precision_offsets_enabled_flag);
642 flag(persistent_rice_adaptation_enabled_flag);
643 flag(cabac_bypass_alignment_enabled_flag);
648 static int FUNC(sps_scc_extension)(CodedBitstreamContext *ctx, RWContext *rw,
653 flag(sps_curr_pic_ref_enabled_flag);
655 flag(palette_mode_enabled_flag);
656 if (current->palette_mode_enabled_flag) {
657 ue(palette_max_size, 0, 64);
658 ue(delta_palette_max_predictor_size, 0, 128);
660 flag(sps_palette_predictor_initializer_present_flag);
661 if (current->sps_palette_predictor_initializer_present_flag) {
662 ue(sps_num_palette_predictor_initializer_minus1, 0, 128);
663 for (comp = 0; comp < (current->chroma_format_idc ? 3 : 1); comp++) {
664 int bit_depth = comp == 0 ? current->bit_depth_luma_minus8 + 8
665 : current->bit_depth_chroma_minus8 + 8;
666 for (i = 0; i <= current->sps_num_palette_predictor_initializer_minus1; i++)
667 us(bit_depth, sps_palette_predictor_initializers[comp][i],
668 0, MAX_UINT_BITS(bit_depth), 2, comp, i);
673 u(2, motion_vector_resolution_control_idc, 0, 2);
674 flag(intra_boundary_filtering_disable_flag);
679 static int FUNC(sps)(CodedBitstreamContext *ctx, RWContext *rw,
682 CodedBitstreamH265Context *h265 = ctx->priv_data;
683 const H265RawVPS *vps;
685 unsigned int min_cb_log2_size_y, ctb_log2_size_y,
686 min_cb_size_y, min_tb_log2_size_y;
688 HEADER("Sequence Parameter Set");
690 CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, HEVC_NAL_SPS));
692 u(4, sps_video_parameter_set_id, 0, 15);
693 h265->active_vps = vps = h265->vps[current->sps_video_parameter_set_id];
695 u(3, sps_max_sub_layers_minus1, 0, HEVC_MAX_SUB_LAYERS - 1);
696 flag(sps_temporal_id_nesting_flag);
698 if (vps->vps_max_sub_layers_minus1 > current->sps_max_sub_layers_minus1) {
699 av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid stream: "
700 "sps_max_sub_layers_minus1 (%d) must be less than or equal to "
701 "vps_max_sub_layers_minus1 (%d).\n",
702 vps->vps_max_sub_layers_minus1,
703 current->sps_max_sub_layers_minus1);
704 return AVERROR_INVALIDDATA;
706 if (vps->vps_temporal_id_nesting_flag &&
707 !current->sps_temporal_id_nesting_flag) {
708 av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid stream: "
709 "sps_temporal_id_nesting_flag must be 1 if "
710 "vps_temporal_id_nesting_flag is 1.\n");
711 return AVERROR_INVALIDDATA;
715 CHECK(FUNC(profile_tier_level)(ctx, rw, ¤t->profile_tier_level,
716 1, current->sps_max_sub_layers_minus1));
718 ue(sps_seq_parameter_set_id, 0, 15);
720 ue(chroma_format_idc, 0, 3);
721 if (current->chroma_format_idc == 3)
722 flag(separate_colour_plane_flag);
724 infer(separate_colour_plane_flag, 0);
726 ue(pic_width_in_luma_samples, 1, HEVC_MAX_WIDTH);
727 ue(pic_height_in_luma_samples, 1, HEVC_MAX_HEIGHT);
729 flag(conformance_window_flag);
730 if (current->conformance_window_flag) {
731 ue(conf_win_left_offset, 0, current->pic_width_in_luma_samples);
732 ue(conf_win_right_offset, 0, current->pic_width_in_luma_samples);
733 ue(conf_win_top_offset, 0, current->pic_height_in_luma_samples);
734 ue(conf_win_bottom_offset, 0, current->pic_height_in_luma_samples);
736 infer(conf_win_left_offset, 0);
737 infer(conf_win_right_offset, 0);
738 infer(conf_win_top_offset, 0);
739 infer(conf_win_bottom_offset, 0);
742 ue(bit_depth_luma_minus8, 0, 8);
743 ue(bit_depth_chroma_minus8, 0, 8);
745 ue(log2_max_pic_order_cnt_lsb_minus4, 0, 12);
747 flag(sps_sub_layer_ordering_info_present_flag);
748 for (i = (current->sps_sub_layer_ordering_info_present_flag ?
749 0 : current->sps_max_sub_layers_minus1);
750 i <= current->sps_max_sub_layers_minus1; i++) {
751 ues(sps_max_dec_pic_buffering_minus1[i],
752 0, HEVC_MAX_DPB_SIZE - 1, 1, i);
753 ues(sps_max_num_reorder_pics[i],
754 0, current->sps_max_dec_pic_buffering_minus1[i], 1, i);
755 ues(sps_max_latency_increase_plus1[i],
756 0, UINT32_MAX - 1, 1, i);
758 if (!current->sps_sub_layer_ordering_info_present_flag) {
759 for (i = 0; i < current->sps_max_sub_layers_minus1; i++) {
760 infer(sps_max_dec_pic_buffering_minus1[i],
761 current->sps_max_dec_pic_buffering_minus1[current->sps_max_sub_layers_minus1]);
762 infer(sps_max_num_reorder_pics[i],
763 current->sps_max_num_reorder_pics[current->sps_max_sub_layers_minus1]);
764 infer(sps_max_latency_increase_plus1[i],
765 current->sps_max_latency_increase_plus1[current->sps_max_sub_layers_minus1]);
769 ue(log2_min_luma_coding_block_size_minus3, 0, 3);
770 min_cb_log2_size_y = current->log2_min_luma_coding_block_size_minus3 + 3;
772 ue(log2_diff_max_min_luma_coding_block_size, 0, 3);
773 ctb_log2_size_y = min_cb_log2_size_y +
774 current->log2_diff_max_min_luma_coding_block_size;
776 min_cb_size_y = 1 << min_cb_log2_size_y;
777 if (current->pic_width_in_luma_samples % min_cb_size_y ||
778 current->pic_height_in_luma_samples % min_cb_size_y) {
779 av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid dimensions: %ux%u not divisible "
780 "by MinCbSizeY = %u.\n", current->pic_width_in_luma_samples,
781 current->pic_height_in_luma_samples, min_cb_size_y);
782 return AVERROR_INVALIDDATA;
785 ue(log2_min_luma_transform_block_size_minus2, 0, min_cb_log2_size_y - 3);
786 min_tb_log2_size_y = current->log2_min_luma_transform_block_size_minus2 + 2;
788 ue(log2_diff_max_min_luma_transform_block_size,
789 0, FFMIN(ctb_log2_size_y, 5) - min_tb_log2_size_y);
791 ue(max_transform_hierarchy_depth_inter,
792 0, ctb_log2_size_y - min_tb_log2_size_y);
793 ue(max_transform_hierarchy_depth_intra,
794 0, ctb_log2_size_y - min_tb_log2_size_y);
796 flag(scaling_list_enabled_flag);
797 if (current->scaling_list_enabled_flag) {
798 flag(sps_scaling_list_data_present_flag);
799 if (current->sps_scaling_list_data_present_flag)
800 CHECK(FUNC(scaling_list_data)(ctx, rw, ¤t->scaling_list));
802 infer(sps_scaling_list_data_present_flag, 0);
805 flag(amp_enabled_flag);
806 flag(sample_adaptive_offset_enabled_flag);
808 flag(pcm_enabled_flag);
809 if (current->pcm_enabled_flag) {
810 u(4, pcm_sample_bit_depth_luma_minus1,
811 0, current->bit_depth_luma_minus8 + 8 - 1);
812 u(4, pcm_sample_bit_depth_chroma_minus1,
813 0, current->bit_depth_chroma_minus8 + 8 - 1);
815 ue(log2_min_pcm_luma_coding_block_size_minus3,
816 FFMIN(min_cb_log2_size_y, 5) - 3, FFMIN(ctb_log2_size_y, 5) - 3);
817 ue(log2_diff_max_min_pcm_luma_coding_block_size,
818 0, FFMIN(ctb_log2_size_y, 5) - (current->log2_min_pcm_luma_coding_block_size_minus3 + 3));
820 flag(pcm_loop_filter_disabled_flag);
823 ue(num_short_term_ref_pic_sets, 0, HEVC_MAX_SHORT_TERM_REF_PIC_SETS);
824 for (i = 0; i < current->num_short_term_ref_pic_sets; i++)
825 CHECK(FUNC(st_ref_pic_set)(ctx, rw, ¤t->st_ref_pic_set[i], i, current));
827 flag(long_term_ref_pics_present_flag);
828 if (current->long_term_ref_pics_present_flag) {
829 ue(num_long_term_ref_pics_sps, 0, HEVC_MAX_LONG_TERM_REF_PICS);
830 for (i = 0; i < current->num_long_term_ref_pics_sps; i++) {
831 us(current->log2_max_pic_order_cnt_lsb_minus4 + 4,
832 lt_ref_pic_poc_lsb_sps[i],
833 0, MAX_UINT_BITS(current->log2_max_pic_order_cnt_lsb_minus4 + 4), 1, i);
834 flags(used_by_curr_pic_lt_sps_flag[i], 1, i);
838 flag(sps_temporal_mvp_enabled_flag);
839 flag(strong_intra_smoothing_enabled_flag);
841 flag(vui_parameters_present_flag);
842 if (current->vui_parameters_present_flag)
843 CHECK(FUNC(vui_parameters)(ctx, rw, ¤t->vui, current));
845 flag(sps_extension_present_flag);
846 if (current->sps_extension_present_flag) {
847 flag(sps_range_extension_flag);
848 flag(sps_multilayer_extension_flag);
849 flag(sps_3d_extension_flag);
850 flag(sps_scc_extension_flag);
851 u(4, sps_extension_4bits, 0, MAX_UINT_BITS(4));
854 if (current->sps_range_extension_flag)
855 CHECK(FUNC(sps_range_extension)(ctx, rw, current));
856 if (current->sps_multilayer_extension_flag)
857 return AVERROR_PATCHWELCOME;
858 if (current->sps_3d_extension_flag)
859 return AVERROR_PATCHWELCOME;
860 if (current->sps_scc_extension_flag)
861 CHECK(FUNC(sps_scc_extension)(ctx, rw, current));
862 if (current->sps_extension_4bits)
863 CHECK(FUNC(extension_data)(ctx, rw, ¤t->extension_data));
865 CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
870 static int FUNC(pps_range_extension)(CodedBitstreamContext *ctx, RWContext *rw,
873 CodedBitstreamH265Context *h265 = ctx->priv_data;
874 const H265RawSPS *sps = h265->active_sps;
877 if (current->transform_skip_enabled_flag)
878 ue(log2_max_transform_skip_block_size_minus2, 0, 3);
879 flag(cross_component_prediction_enabled_flag);
881 flag(chroma_qp_offset_list_enabled_flag);
882 if (current->chroma_qp_offset_list_enabled_flag) {
883 ue(diff_cu_chroma_qp_offset_depth,
884 0, sps->log2_diff_max_min_luma_coding_block_size);
885 ue(chroma_qp_offset_list_len_minus1, 0, 5);
886 for (i = 0; i <= current->chroma_qp_offset_list_len_minus1; i++) {
887 ses(cb_qp_offset_list[i], -12, +12, 1, i);
888 ses(cr_qp_offset_list[i], -12, +12, 1, i);
892 ue(log2_sao_offset_scale_luma, 0, FFMAX(0, sps->bit_depth_luma_minus8 - 2));
893 ue(log2_sao_offset_scale_chroma, 0, FFMAX(0, sps->bit_depth_chroma_minus8 - 2));
898 static int FUNC(pps_scc_extension)(CodedBitstreamContext *ctx, RWContext *rw,
903 flag(pps_curr_pic_ref_enabled_flag);
905 flag(residual_adaptive_colour_transform_enabled_flag);
906 if (current->residual_adaptive_colour_transform_enabled_flag) {
907 flag(pps_slice_act_qp_offsets_present_flag);
908 se(pps_act_y_qp_offset_plus5, -7, +17);
909 se(pps_act_cb_qp_offset_plus5, -7, +17);
910 se(pps_act_cr_qp_offset_plus3, -9, +15);
912 infer(pps_slice_act_qp_offsets_present_flag, 0);
913 infer(pps_act_y_qp_offset_plus5, 0);
914 infer(pps_act_cb_qp_offset_plus5, 0);
915 infer(pps_act_cr_qp_offset_plus3, 0);
918 flag(pps_palette_predictor_initializer_present_flag);
919 if (current->pps_palette_predictor_initializer_present_flag) {
920 ue(pps_num_palette_predictor_initializer, 0, 128);
921 if (current->pps_num_palette_predictor_initializer > 0) {
922 flag(monochrome_palette_flag);
923 ue(luma_bit_depth_entry_minus8, 0, 8);
924 if (!current->monochrome_palette_flag)
925 ue(chroma_bit_depth_entry_minus8, 0, 8);
926 for (comp = 0; comp < (current->monochrome_palette_flag ? 1 : 3); comp++) {
927 int bit_depth = comp == 0 ? current->luma_bit_depth_entry_minus8 + 8
928 : current->chroma_bit_depth_entry_minus8 + 8;
929 for (i = 0; i < current->pps_num_palette_predictor_initializer; i++)
930 us(bit_depth, pps_palette_predictor_initializers[comp][i],
931 0, MAX_UINT_BITS(bit_depth), 2, comp, i);
939 static int FUNC(pps)(CodedBitstreamContext *ctx, RWContext *rw,
942 CodedBitstreamH265Context *h265 = ctx->priv_data;
943 const H265RawSPS *sps;
946 HEADER("Picture Parameter Set");
948 CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, HEVC_NAL_PPS));
950 ue(pps_pic_parameter_set_id, 0, 63);
951 ue(pps_seq_parameter_set_id, 0, 15);
952 sps = h265->sps[current->pps_seq_parameter_set_id];
954 av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n",
955 current->pps_seq_parameter_set_id);
956 return AVERROR_INVALIDDATA;
958 h265->active_sps = sps;
960 flag(dependent_slice_segments_enabled_flag);
961 flag(output_flag_present_flag);
962 u(3, num_extra_slice_header_bits, 0, 7);
963 flag(sign_data_hiding_enabled_flag);
964 flag(cabac_init_present_flag);
966 ue(num_ref_idx_l0_default_active_minus1, 0, 14);
967 ue(num_ref_idx_l1_default_active_minus1, 0, 14);
969 se(init_qp_minus26, -(26 + 6 * sps->bit_depth_luma_minus8), +25);
971 flag(constrained_intra_pred_flag);
972 flag(transform_skip_enabled_flag);
973 flag(cu_qp_delta_enabled_flag);
974 if (current->cu_qp_delta_enabled_flag)
975 ue(diff_cu_qp_delta_depth,
976 0, sps->log2_diff_max_min_luma_coding_block_size);
978 infer(diff_cu_qp_delta_depth, 0);
980 se(pps_cb_qp_offset, -12, +12);
981 se(pps_cr_qp_offset, -12, +12);
982 flag(pps_slice_chroma_qp_offsets_present_flag);
984 flag(weighted_pred_flag);
985 flag(weighted_bipred_flag);
987 flag(transquant_bypass_enabled_flag);
988 flag(tiles_enabled_flag);
989 flag(entropy_coding_sync_enabled_flag);
991 if (current->tiles_enabled_flag) {
992 ue(num_tile_columns_minus1, 0, HEVC_MAX_TILE_COLUMNS);
993 ue(num_tile_rows_minus1, 0, HEVC_MAX_TILE_ROWS);
994 flag(uniform_spacing_flag);
995 if (!current->uniform_spacing_flag) {
996 for (i = 0; i < current->num_tile_columns_minus1; i++)
997 ues(column_width_minus1[i], 0, sps->pic_width_in_luma_samples, 1, i);
998 for (i = 0; i < current->num_tile_rows_minus1; i++)
999 ues(row_height_minus1[i], 0, sps->pic_height_in_luma_samples, 1, i);
1001 flag(loop_filter_across_tiles_enabled_flag);
1003 infer(num_tile_columns_minus1, 0);
1004 infer(num_tile_rows_minus1, 0);
1007 flag(pps_loop_filter_across_slices_enabled_flag);
1008 flag(deblocking_filter_control_present_flag);
1009 if (current->deblocking_filter_control_present_flag) {
1010 flag(deblocking_filter_override_enabled_flag);
1011 flag(pps_deblocking_filter_disabled_flag);
1012 if (!current->pps_deblocking_filter_disabled_flag) {
1013 se(pps_beta_offset_div2, -6, +6);
1014 se(pps_tc_offset_div2, -6, +6);
1016 infer(pps_beta_offset_div2, 0);
1017 infer(pps_tc_offset_div2, 0);
1020 infer(deblocking_filter_override_enabled_flag, 0);
1021 infer(pps_deblocking_filter_disabled_flag, 0);
1022 infer(pps_beta_offset_div2, 0);
1023 infer(pps_tc_offset_div2, 0);
1026 flag(pps_scaling_list_data_present_flag);
1027 if (current->pps_scaling_list_data_present_flag)
1028 CHECK(FUNC(scaling_list_data)(ctx, rw, ¤t->scaling_list));
1030 flag(lists_modification_present_flag);
1032 ue(log2_parallel_merge_level_minus2,
1033 0, (sps->log2_min_luma_coding_block_size_minus3 + 3 +
1034 sps->log2_diff_max_min_luma_coding_block_size - 2));
1036 flag(slice_segment_header_extension_present_flag);
1038 flag(pps_extension_present_flag);
1039 if (current->pps_extension_present_flag) {
1040 flag(pps_range_extension_flag);
1041 flag(pps_multilayer_extension_flag);
1042 flag(pps_3d_extension_flag);
1043 flag(pps_scc_extension_flag);
1044 u(4, pps_extension_4bits, 0, MAX_UINT_BITS(4));
1046 if (current->pps_range_extension_flag)
1047 CHECK(FUNC(pps_range_extension)(ctx, rw, current));
1048 if (current->pps_multilayer_extension_flag)
1049 return AVERROR_PATCHWELCOME;
1050 if (current->pps_3d_extension_flag)
1051 return AVERROR_PATCHWELCOME;
1052 if (current->pps_scc_extension_flag)
1053 CHECK(FUNC(pps_scc_extension)(ctx, rw, current));
1054 if (current->pps_extension_4bits)
1055 CHECK(FUNC(extension_data)(ctx, rw, ¤t->extension_data));
1057 CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
1062 static int FUNC(aud)(CodedBitstreamContext *ctx, RWContext *rw,
1063 H265RawAUD *current)
1067 HEADER("Access Unit Delimiter");
1069 CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, HEVC_NAL_AUD));
1071 u(3, pic_type, 0, 2);
1073 CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
1078 static int FUNC(ref_pic_lists_modification)(CodedBitstreamContext *ctx, RWContext *rw,
1079 H265RawSliceHeader *current,
1080 unsigned int num_pic_total_curr)
1082 unsigned int entry_size;
1085 entry_size = av_log2(num_pic_total_curr - 1) + 1;
1087 flag(ref_pic_list_modification_flag_l0);
1088 if (current->ref_pic_list_modification_flag_l0) {
1089 for (i = 0; i <= current->num_ref_idx_l0_active_minus1; i++)
1090 us(entry_size, list_entry_l0[i], 0, num_pic_total_curr - 1, 1, i);
1093 if (current->slice_type == HEVC_SLICE_B) {
1094 flag(ref_pic_list_modification_flag_l1);
1095 if (current->ref_pic_list_modification_flag_l1) {
1096 for (i = 0; i <= current->num_ref_idx_l1_active_minus1; i++)
1097 us(entry_size, list_entry_l1[i], 0, num_pic_total_curr - 1, 1, i);
1104 static int FUNC(pred_weight_table)(CodedBitstreamContext *ctx, RWContext *rw,
1105 H265RawSliceHeader *current)
1107 CodedBitstreamH265Context *h265 = ctx->priv_data;
1108 const H265RawSPS *sps = h265->active_sps;
1110 int chroma = !sps->separate_colour_plane_flag &&
1111 sps->chroma_format_idc != 0;
1113 ue(luma_log2_weight_denom, 0, 7);
1115 se(delta_chroma_log2_weight_denom, -7, 7);
1117 infer(delta_chroma_log2_weight_denom, 0);
1119 for (i = 0; i <= current->num_ref_idx_l0_active_minus1; i++) {
1120 if (1 /* is not same POC and same layer_id */)
1121 flags(luma_weight_l0_flag[i], 1, i);
1123 infer(luma_weight_l0_flag[i], 0);
1126 for (i = 0; i <= current->num_ref_idx_l0_active_minus1; i++) {
1127 if (1 /* is not same POC and same layer_id */)
1128 flags(chroma_weight_l0_flag[i], 1, i);
1130 infer(chroma_weight_l0_flag[i], 0);
1134 for (i = 0; i <= current->num_ref_idx_l0_active_minus1; i++) {
1135 if (current->luma_weight_l0_flag[i]) {
1136 ses(delta_luma_weight_l0[i], -128, +127, 1, i);
1137 ses(luma_offset_l0[i],
1138 -(1 << (sps->bit_depth_luma_minus8 + 8 - 1)),
1139 ((1 << (sps->bit_depth_luma_minus8 + 8 - 1)) - 1), 1, i);
1141 infer(delta_luma_weight_l0[i], 0);
1142 infer(luma_offset_l0[i], 0);
1144 if (current->chroma_weight_l0_flag[i]) {
1145 for (j = 0; j < 2; j++) {
1146 ses(delta_chroma_weight_l0[i][j], -128, +127, 2, i, j);
1147 ses(chroma_offset_l0[i][j],
1148 -(4 << (sps->bit_depth_chroma_minus8 + 8 - 1)),
1149 ((4 << (sps->bit_depth_chroma_minus8 + 8 - 1)) - 1), 2, i, j);
1152 for (j = 0; j < 2; j++) {
1153 infer(delta_chroma_weight_l0[i][j], 0);
1154 infer(chroma_offset_l0[i][j], 0);
1159 if (current->slice_type == HEVC_SLICE_B) {
1160 for (i = 0; i <= current->num_ref_idx_l1_active_minus1; i++) {
1161 if (1 /* RefPicList1[i] is not CurrPic, nor is it in a different layer */)
1162 flags(luma_weight_l1_flag[i], 1, i);
1164 infer(luma_weight_l1_flag[i], 0);
1167 for (i = 0; i <= current->num_ref_idx_l1_active_minus1; i++) {
1168 if (1 /* RefPicList1[i] is not CurrPic, nor is it in a different layer */)
1169 flags(chroma_weight_l1_flag[i], 1, i);
1171 infer(chroma_weight_l1_flag[i], 0);
1175 for (i = 0; i <= current->num_ref_idx_l1_active_minus1; i++) {
1176 if (current->luma_weight_l1_flag[i]) {
1177 ses(delta_luma_weight_l1[i], -128, +127, 1, i);
1178 ses(luma_offset_l1[i],
1179 -(1 << (sps->bit_depth_luma_minus8 + 8 - 1)),
1180 ((1 << (sps->bit_depth_luma_minus8 + 8 - 1)) - 1), 1, i);
1182 infer(delta_luma_weight_l1[i], 0);
1183 infer(luma_offset_l1[i], 0);
1185 if (current->chroma_weight_l1_flag[i]) {
1186 for (j = 0; j < 2; j++) {
1187 ses(delta_chroma_weight_l1[i][j], -128, +127, 2, i, j);
1188 ses(chroma_offset_l1[i][j],
1189 -(4 << (sps->bit_depth_chroma_minus8 + 8 - 1)),
1190 ((4 << (sps->bit_depth_chroma_minus8 + 8 - 1)) - 1), 2, i, j);
1193 for (j = 0; j < 2; j++) {
1194 infer(delta_chroma_weight_l1[i][j], 0);
1195 infer(chroma_offset_l1[i][j], 0);
1204 static int FUNC(slice_segment_header)(CodedBitstreamContext *ctx, RWContext *rw,
1205 H265RawSliceHeader *current)
1207 CodedBitstreamH265Context *h265 = ctx->priv_data;
1208 const H265RawSPS *sps;
1209 const H265RawPPS *pps;
1210 unsigned int min_cb_log2_size_y, ctb_log2_size_y, ctb_size_y;
1211 unsigned int pic_width_in_ctbs_y, pic_height_in_ctbs_y, pic_size_in_ctbs_y;
1212 unsigned int num_pic_total_curr = 0;
1215 HEADER("Slice Segment Header");
1217 CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header, -1));
1219 flag(first_slice_segment_in_pic_flag);
1221 if (current->nal_unit_header.nal_unit_type >= HEVC_NAL_BLA_W_LP &&
1222 current->nal_unit_header.nal_unit_type <= HEVC_NAL_IRAP_VCL23)
1223 flag(no_output_of_prior_pics_flag);
1225 ue(slice_pic_parameter_set_id, 0, 63);
1227 pps = h265->pps[current->slice_pic_parameter_set_id];
1229 av_log(ctx->log_ctx, AV_LOG_ERROR, "PPS id %d not available.\n",
1230 current->slice_pic_parameter_set_id);
1231 return AVERROR_INVALIDDATA;
1233 h265->active_pps = pps;
1235 sps = h265->sps[pps->pps_seq_parameter_set_id];
1237 av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n",
1238 pps->pps_seq_parameter_set_id);
1239 return AVERROR_INVALIDDATA;
1241 h265->active_sps = sps;
1243 min_cb_log2_size_y = sps->log2_min_luma_coding_block_size_minus3 + 3;
1244 ctb_log2_size_y = min_cb_log2_size_y + sps->log2_diff_max_min_luma_coding_block_size;
1245 ctb_size_y = 1 << ctb_log2_size_y;
1246 pic_width_in_ctbs_y =
1247 (sps->pic_width_in_luma_samples + ctb_size_y - 1) / ctb_size_y;
1248 pic_height_in_ctbs_y =
1249 (sps->pic_height_in_luma_samples + ctb_size_y - 1) / ctb_size_y;
1250 pic_size_in_ctbs_y = pic_width_in_ctbs_y * pic_height_in_ctbs_y;
1252 if (!current->first_slice_segment_in_pic_flag) {
1253 unsigned int address_size = av_log2(pic_size_in_ctbs_y - 1) + 1;
1254 if (pps->dependent_slice_segments_enabled_flag)
1255 flag(dependent_slice_segment_flag);
1257 infer(dependent_slice_segment_flag, 0);
1258 u(address_size, slice_segment_address, 0, pic_size_in_ctbs_y - 1);
1260 infer(dependent_slice_segment_flag, 0);
1263 if (!current->dependent_slice_segment_flag) {
1264 for (i = 0; i < pps->num_extra_slice_header_bits; i++)
1265 flags(slice_reserved_flag[i], 1, i);
1267 ue(slice_type, 0, 2);
1269 if (pps->output_flag_present_flag)
1270 flag(pic_output_flag);
1272 if (sps->separate_colour_plane_flag)
1273 u(2, colour_plane_id, 0, 2);
1275 if (current->nal_unit_header.nal_unit_type != HEVC_NAL_IDR_W_RADL &&
1276 current->nal_unit_header.nal_unit_type != HEVC_NAL_IDR_N_LP) {
1277 const H265RawSTRefPicSet *rps;
1279 u(sps->log2_max_pic_order_cnt_lsb_minus4 + 4, slice_pic_order_cnt_lsb,
1280 0, MAX_UINT_BITS(sps->log2_max_pic_order_cnt_lsb_minus4 + 4));
1282 flag(short_term_ref_pic_set_sps_flag);
1283 if (!current->short_term_ref_pic_set_sps_flag) {
1284 CHECK(FUNC(st_ref_pic_set)(ctx, rw, ¤t->short_term_ref_pic_set,
1285 sps->num_short_term_ref_pic_sets, sps));
1286 rps = ¤t->short_term_ref_pic_set;
1287 } else if (sps->num_short_term_ref_pic_sets > 1) {
1288 unsigned int idx_size = av_log2(sps->num_short_term_ref_pic_sets - 1) + 1;
1289 u(idx_size, short_term_ref_pic_set_idx,
1290 0, sps->num_short_term_ref_pic_sets - 1);
1291 rps = &sps->st_ref_pic_set[current->short_term_ref_pic_set_idx];
1293 infer(short_term_ref_pic_set_idx, 0);
1294 rps = &sps->st_ref_pic_set[0];
1297 num_pic_total_curr = 0;
1298 for (i = 0; i < rps->num_negative_pics; i++)
1299 if (rps->used_by_curr_pic_s0_flag[i])
1300 ++num_pic_total_curr;
1301 for (i = 0; i < rps->num_positive_pics; i++)
1302 if (rps->used_by_curr_pic_s1_flag[i])
1303 ++num_pic_total_curr;
1305 if (sps->long_term_ref_pics_present_flag) {
1306 unsigned int idx_size;
1308 if (sps->num_long_term_ref_pics_sps > 0) {
1309 ue(num_long_term_sps, 0, sps->num_long_term_ref_pics_sps);
1310 idx_size = av_log2(sps->num_long_term_ref_pics_sps - 1) + 1;
1312 infer(num_long_term_sps, 0);
1315 ue(num_long_term_pics, 0, HEVC_MAX_LONG_TERM_REF_PICS);
1317 for (i = 0; i < current->num_long_term_sps +
1318 current->num_long_term_pics; i++) {
1319 if (i < current->num_long_term_sps) {
1320 if (sps->num_long_term_ref_pics_sps > 1)
1321 us(idx_size, lt_idx_sps[i],
1322 0, sps->num_long_term_ref_pics_sps - 1, 1, i);
1323 if (sps->used_by_curr_pic_lt_sps_flag[current->lt_idx_sps[i]])
1324 ++num_pic_total_curr;
1326 us(sps->log2_max_pic_order_cnt_lsb_minus4 + 4, poc_lsb_lt[i],
1327 0, MAX_UINT_BITS(sps->log2_max_pic_order_cnt_lsb_minus4 + 4), 1, i);
1328 flags(used_by_curr_pic_lt_flag[i], 1, i);
1329 if (current->used_by_curr_pic_lt_flag[i])
1330 ++num_pic_total_curr;
1332 flags(delta_poc_msb_present_flag[i], 1, i);
1333 if (current->delta_poc_msb_present_flag[i])
1334 ues(delta_poc_msb_cycle_lt[i], 0, UINT32_MAX - 1, 1, i);
1336 infer(delta_poc_msb_cycle_lt[i], 0);
1340 if (sps->sps_temporal_mvp_enabled_flag)
1341 flag(slice_temporal_mvp_enabled_flag);
1343 infer(slice_temporal_mvp_enabled_flag, 0);
1345 if (pps->pps_curr_pic_ref_enabled_flag)
1346 ++num_pic_total_curr;
1349 if (sps->sample_adaptive_offset_enabled_flag) {
1350 flag(slice_sao_luma_flag);
1351 if (!sps->separate_colour_plane_flag && sps->chroma_format_idc != 0)
1352 flag(slice_sao_chroma_flag);
1354 infer(slice_sao_chroma_flag, 0);
1356 infer(slice_sao_luma_flag, 0);
1357 infer(slice_sao_chroma_flag, 0);
1360 if (current->slice_type == HEVC_SLICE_P ||
1361 current->slice_type == HEVC_SLICE_B) {
1362 flag(num_ref_idx_active_override_flag);
1363 if (current->num_ref_idx_active_override_flag) {
1364 ue(num_ref_idx_l0_active_minus1, 0, 14);
1365 if (current->slice_type == HEVC_SLICE_B)
1366 ue(num_ref_idx_l1_active_minus1, 0, 14);
1368 infer(num_ref_idx_l1_active_minus1, pps->num_ref_idx_l1_default_active_minus1);
1370 infer(num_ref_idx_l0_active_minus1, pps->num_ref_idx_l0_default_active_minus1);
1371 infer(num_ref_idx_l1_active_minus1, pps->num_ref_idx_l1_default_active_minus1);
1374 if (pps->lists_modification_present_flag && num_pic_total_curr > 1)
1375 CHECK(FUNC(ref_pic_lists_modification)(ctx, rw, current,
1376 num_pic_total_curr));
1378 if (current->slice_type == HEVC_SLICE_B)
1379 flag(mvd_l1_zero_flag);
1380 if (pps->cabac_init_present_flag)
1381 flag(cabac_init_flag);
1383 infer(cabac_init_flag, 0);
1384 if (current->slice_temporal_mvp_enabled_flag) {
1385 if (current->slice_type == HEVC_SLICE_B)
1386 flag(collocated_from_l0_flag);
1388 infer(collocated_from_l0_flag, 1);
1389 if (current->collocated_from_l0_flag) {
1390 if (current->num_ref_idx_l0_active_minus1 > 0)
1391 ue(collocated_ref_idx, 0, current->num_ref_idx_l0_active_minus1);
1393 infer(collocated_ref_idx, 0);
1395 if (current->num_ref_idx_l1_active_minus1 > 0)
1396 ue(collocated_ref_idx, 0, current->num_ref_idx_l1_active_minus1);
1398 infer(collocated_ref_idx, 0);
1402 if ((pps->weighted_pred_flag && current->slice_type == HEVC_SLICE_P) ||
1403 (pps->weighted_bipred_flag && current->slice_type == HEVC_SLICE_B))
1404 CHECK(FUNC(pred_weight_table)(ctx, rw, current));
1406 ue(five_minus_max_num_merge_cand, 0, 4);
1407 if (sps->motion_vector_resolution_control_idc == 2)
1408 flag(use_integer_mv_flag);
1410 infer(use_integer_mv_flag, sps->motion_vector_resolution_control_idc);
1414 - 6 * sps->bit_depth_luma_minus8 - (pps->init_qp_minus26 + 26),
1415 + 51 - (pps->init_qp_minus26 + 26));
1416 if (pps->pps_slice_chroma_qp_offsets_present_flag) {
1417 se(slice_cb_qp_offset, -12, +12);
1418 se(slice_cr_qp_offset, -12, +12);
1420 infer(slice_cb_qp_offset, 0);
1421 infer(slice_cr_qp_offset, 0);
1423 if (pps->pps_slice_act_qp_offsets_present_flag) {
1424 se(slice_act_y_qp_offset,
1425 -12 - (pps->pps_act_y_qp_offset_plus5 - 5),
1426 +12 - (pps->pps_act_y_qp_offset_plus5 - 5));
1427 se(slice_act_cb_qp_offset,
1428 -12 - (pps->pps_act_cb_qp_offset_plus5 - 5),
1429 +12 - (pps->pps_act_cb_qp_offset_plus5 - 5));
1430 se(slice_act_cr_qp_offset,
1431 -12 - (pps->pps_act_cr_qp_offset_plus3 - 3),
1432 +12 - (pps->pps_act_cr_qp_offset_plus3 - 3));
1434 infer(slice_act_y_qp_offset, 0);
1435 infer(slice_act_cb_qp_offset, 0);
1436 infer(slice_act_cr_qp_offset, 0);
1438 if (pps->chroma_qp_offset_list_enabled_flag)
1439 flag(cu_chroma_qp_offset_enabled_flag);
1441 infer(cu_chroma_qp_offset_enabled_flag, 0);
1443 if (pps->deblocking_filter_override_enabled_flag)
1444 flag(deblocking_filter_override_flag);
1446 infer(deblocking_filter_override_flag, 0);
1447 if (current->deblocking_filter_override_flag) {
1448 flag(slice_deblocking_filter_disabled_flag);
1449 if (!current->slice_deblocking_filter_disabled_flag) {
1450 se(slice_beta_offset_div2, -6, +6);
1451 se(slice_tc_offset_div2, -6, +6);
1453 infer(slice_beta_offset_div2, pps->pps_beta_offset_div2);
1454 infer(slice_tc_offset_div2, pps->pps_tc_offset_div2);
1457 infer(slice_deblocking_filter_disabled_flag,
1458 pps->pps_deblocking_filter_disabled_flag);
1459 infer(slice_beta_offset_div2, pps->pps_beta_offset_div2);
1460 infer(slice_tc_offset_div2, pps->pps_tc_offset_div2);
1462 if (pps->pps_loop_filter_across_slices_enabled_flag &&
1463 (current->slice_sao_luma_flag || current->slice_sao_chroma_flag ||
1464 !current->slice_deblocking_filter_disabled_flag))
1465 flag(slice_loop_filter_across_slices_enabled_flag);
1467 infer(slice_loop_filter_across_slices_enabled_flag,
1468 pps->pps_loop_filter_across_slices_enabled_flag);
1471 if (pps->tiles_enabled_flag || pps->entropy_coding_sync_enabled_flag) {
1472 unsigned int num_entry_point_offsets_limit;
1473 if (!pps->tiles_enabled_flag && pps->entropy_coding_sync_enabled_flag)
1474 num_entry_point_offsets_limit = pic_height_in_ctbs_y - 1;
1475 else if (pps->tiles_enabled_flag && !pps->entropy_coding_sync_enabled_flag)
1476 num_entry_point_offsets_limit =
1477 (pps->num_tile_columns_minus1 + 1) * (pps->num_tile_rows_minus1 + 1);
1479 num_entry_point_offsets_limit =
1480 (pps->num_tile_columns_minus1 + 1) * pic_height_in_ctbs_y - 1;
1481 ue(num_entry_point_offsets, 0, num_entry_point_offsets_limit);
1483 if (current->num_entry_point_offsets > HEVC_MAX_ENTRY_POINT_OFFSETS) {
1484 av_log(ctx->log_ctx, AV_LOG_ERROR, "Too many entry points: "
1485 "%"PRIu16".\n", current->num_entry_point_offsets);
1486 return AVERROR_PATCHWELCOME;
1489 if (current->num_entry_point_offsets > 0) {
1490 ue(offset_len_minus1, 0, 31);
1491 for (i = 0; i < current->num_entry_point_offsets; i++)
1492 us(current->offset_len_minus1 + 1, entry_point_offset_minus1[i],
1493 0, MAX_UINT_BITS(current->offset_len_minus1 + 1), 1, i);
1497 if (pps->slice_segment_header_extension_present_flag) {
1498 ue(slice_segment_header_extension_length, 0, 256);
1499 for (i = 0; i < current->slice_segment_header_extension_length; i++)
1500 us(8, slice_segment_header_extension_data_byte[i], 0x00, 0xff, 1, i);
1503 CHECK(FUNC(byte_alignment)(ctx, rw));
1508 static int FUNC(sei_mastering_display)(CodedBitstreamContext *ctx, RWContext *rw,
1509 H265RawSEIMasteringDisplayColourVolume *current)
1513 for (c = 0; c < 3; c++) {
1514 us(16, display_primaries_x[c], 0, 50000, 1, c);
1515 us(16, display_primaries_y[c], 0, 50000, 1, c);
1518 u(16, white_point_x, 0, 50000);
1519 u(16, white_point_y, 0, 50000);
1521 u(32, max_display_mastering_luminance,
1522 1, MAX_UINT_BITS(32));
1523 u(32, min_display_mastering_luminance,
1524 0, current->max_display_mastering_luminance - 1);
1529 static int FUNC(sei_content_light_level)(CodedBitstreamContext *ctx, RWContext *rw,
1530 H265RawSEIContentLightLevelInfo *current)
1534 u(16, max_content_light_level, 0, MAX_UINT_BITS(16));
1535 u(16, max_pic_average_light_level, 0, MAX_UINT_BITS(16));
1540 static int FUNC(sei_payload)(CodedBitstreamContext *ctx, RWContext *rw,
1541 H265RawSEIPayload *current)
1544 int start_position, end_position;
1547 start_position = get_bits_count(rw);
1549 start_position = put_bits_count(rw);
1552 switch (current->payload_type) {
1553 case HEVC_SEI_TYPE_MASTERING_DISPLAY_INFO:
1554 CHECK(FUNC(sei_mastering_display)
1555 (ctx, rw, ¤t->payload.mastering_display));
1559 case HEVC_SEI_TYPE_CONTENT_LIGHT_LEVEL_INFO:
1560 CHECK(FUNC(sei_content_light_level)
1561 (ctx, rw, ¤t->payload.content_light_level));
1568 current->payload.other.data_length = current->payload_size;
1570 allocate(current->payload.other.data, current->payload.other.data_length);
1572 for (i = 0; i < current->payload_size; i++)
1573 xu(8, payload_byte[i], current->payload.other.data[i], 0, 255,
1578 if (byte_alignment(rw)) {
1579 fixed(1, bit_equal_to_one, 1);
1580 while (byte_alignment(rw))
1581 fixed(1, bit_equal_to_zero, 0);
1585 end_position = get_bits_count(rw);
1586 if (end_position < start_position + 8 * current->payload_size) {
1587 av_log(ctx->log_ctx, AV_LOG_ERROR, "Incorrect SEI payload length: "
1588 "header %"PRIu32" bits, actually %d bits.\n",
1589 8 * current->payload_size,
1590 end_position - start_position);
1591 return AVERROR_INVALIDDATA;
1594 end_position = put_bits_count(rw);
1595 current->payload_size = (end_position - start_position) >> 3;
1601 static int FUNC(sei)(CodedBitstreamContext *ctx, RWContext *rw,
1602 H265RawSEI *current)
1606 HEADER("Supplemental Enhancement Information");
1608 CHECK(FUNC(nal_unit_header)(ctx, rw, ¤t->nal_unit_header,
1609 HEVC_NAL_SEI_PREFIX));
1612 for (k = 0; k < H265_MAX_SEI_PAYLOADS; k++) {
1613 uint32_t payload_type = 0;
1614 uint32_t payload_size = 0;
1617 while (show_bits(rw, 8) == 0xff) {
1618 fixed(8, ff_byte, 0xff);
1619 payload_type += 255;
1621 xu(8, last_payload_type_byte, tmp, 0, 254, 0);
1622 payload_type += tmp;
1624 while (show_bits(rw, 8) == 0xff) {
1625 fixed(8, ff_byte, 0xff);
1626 payload_size += 255;
1628 xu(8, last_payload_size_byte, tmp, 0, 254, 0);
1629 payload_size += tmp;
1631 current->payload[k].payload_type = payload_type;
1632 current->payload[k].payload_size = payload_size;
1634 CHECK(FUNC(sei_payload)(ctx, rw, ¤t->payload[k]));
1636 if (!cbs_h2645_read_more_rbsp_data(rw))
1639 if (k >= H265_MAX_SEI_PAYLOADS) {
1640 av_log(ctx->log_ctx, AV_LOG_ERROR, "Too many payloads in "
1641 "SEI message: found %d.\n", k);
1642 return AVERROR_INVALIDDATA;
1644 current->payload_count = k + 1;
1646 for (k = 0; k < current->payload_count; k++) {
1647 PutBitContext start_state;
1651 // Somewhat clumsy: we write the payload twice when
1652 // we don't know the size in advance. This will mess
1653 // with trace output, but is otherwise harmless.
1655 need_size = !current->payload[k].payload_size;
1656 for (i = 0; i < 1 + need_size; i++) {
1659 tmp = current->payload[k].payload_type;
1660 while (tmp >= 255) {
1661 fixed(8, ff_byte, 0xff);
1664 xu(8, last_payload_type_byte, tmp, 0, 254, 0);
1666 tmp = current->payload[k].payload_size;
1667 while (tmp >= 255) {
1668 fixed(8, ff_byte, 0xff);
1671 xu(8, last_payload_size_byte, tmp, 0, 254, 0);
1673 CHECK(FUNC(sei_payload)(ctx, rw, ¤t->payload[k]));
1678 CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));