]> git.sesse.net Git - ffmpeg/blobdiff - libavcodec/h264.c
hevc: remove HEVCContext usage from hevc_ps
[ffmpeg] / libavcodec / h264.c
index 20de3254a9479801a2e17a39da7e6a76d7550ad6..6925a8ac318a988a9c36639c40568f8d9249719b 100644 (file)
@@ -83,7 +83,7 @@ void ff_h264_draw_horiz_band(const H264Context *h, H264SliceContext *sl,
                              int y, int height)
 {
     AVCodecContext *avctx = h->avctx;
-    const AVFrame   *src  = &h->cur_pic.f;
+    const AVFrame   *src  = h->cur_pic.f;
     const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(avctx->pix_fmt);
     int vshift = desc->log2_chroma_h;
     const int field_pic = h->picture_structure != PICT_FRAME;
@@ -590,12 +590,15 @@ static int h264_init_context(AVCodecContext *avctx, H264Context *h)
         return AVERROR(ENOMEM);
     }
 
-    h->DPB = av_mallocz_array(H264_MAX_PICTURE_COUNT, sizeof(*h->DPB));
-    if (!h->DPB)
+    for (i = 0; i < H264_MAX_PICTURE_COUNT; i++) {
+        h->DPB[i].f = av_frame_alloc();
+        if (!h->DPB[i].f)
+            return AVERROR(ENOMEM);
+    }
+
+    h->cur_pic.f = av_frame_alloc();
+    if (!h->cur_pic.f)
         return AVERROR(ENOMEM);
-    for (i = 0; i < H264_MAX_PICTURE_COUNT; i++)
-        av_frame_unref(&h->DPB[i].f);
-    av_frame_unref(&h->cur_pic.f);
 
     for (i = 0; i < h->nb_slice_ctx; i++)
         h->slice_ctx[i].h264 = h;
@@ -685,7 +688,7 @@ static void decode_postinit(H264Context *h, int setup_finished)
     int i, pics, out_of_order, out_idx;
     int invalid = 0, cnt = 0;
 
-    h->cur_pic_ptr->f.pict_type = h->pict_type;
+    h->cur_pic_ptr->f->pict_type = h->pict_type;
 
     if (h->next_output_pic)
         return;
@@ -700,8 +703,8 @@ static void decode_postinit(H264Context *h, int setup_finished)
         return;
     }
 
-    cur->f.interlaced_frame = 0;
-    cur->f.repeat_pict      = 0;
+    cur->f->interlaced_frame = 0;
+    cur->f->repeat_pict      = 0;
 
     /* Signal interlacing information externally. */
     /* Prioritize picture timing SEI information over used
@@ -713,55 +716,55 @@ static void decode_postinit(H264Context *h, int setup_finished)
             break;
         case SEI_PIC_STRUCT_TOP_FIELD:
         case SEI_PIC_STRUCT_BOTTOM_FIELD:
-            cur->f.interlaced_frame = 1;
+            cur->f->interlaced_frame = 1;
             break;
         case SEI_PIC_STRUCT_TOP_BOTTOM:
         case SEI_PIC_STRUCT_BOTTOM_TOP:
             if (FIELD_OR_MBAFF_PICTURE(h))
-                cur->f.interlaced_frame = 1;
+                cur->f->interlaced_frame = 1;
             else
                 // try to flag soft telecine progressive
-                cur->f.interlaced_frame = h->prev_interlaced_frame;
+                cur->f->interlaced_frame = h->prev_interlaced_frame;
             break;
         case SEI_PIC_STRUCT_TOP_BOTTOM_TOP:
         case SEI_PIC_STRUCT_BOTTOM_TOP_BOTTOM:
             /* Signal the possibility of telecined film externally
              * (pic_struct 5,6). From these hints, let the applications
              * decide if they apply deinterlacing. */
-            cur->f.repeat_pict = 1;
+            cur->f->repeat_pict = 1;
             break;
         case SEI_PIC_STRUCT_FRAME_DOUBLING:
-            cur->f.repeat_pict = 2;
+            cur->f->repeat_pict = 2;
             break;
         case SEI_PIC_STRUCT_FRAME_TRIPLING:
-            cur->f.repeat_pict = 4;
+            cur->f->repeat_pict = 4;
             break;
         }
 
         if ((h->sei_ct_type & 3) &&
             h->sei_pic_struct <= SEI_PIC_STRUCT_BOTTOM_TOP)
-            cur->f.interlaced_frame = (h->sei_ct_type & (1 << 1)) != 0;
+            cur->f->interlaced_frame = (h->sei_ct_type & (1 << 1)) != 0;
     } else {
         /* Derive interlacing flag from used decoding process. */
-        cur->f.interlaced_frame = FIELD_OR_MBAFF_PICTURE(h);
+        cur->f->interlaced_frame = FIELD_OR_MBAFF_PICTURE(h);
     }
-    h->prev_interlaced_frame = cur->f.interlaced_frame;
+    h->prev_interlaced_frame = cur->f->interlaced_frame;
 
     if (cur->field_poc[0] != cur->field_poc[1]) {
         /* Derive top_field_first from field pocs. */
-        cur->f.top_field_first = cur->field_poc[0] < cur->field_poc[1];
+        cur->f->top_field_first = cur->field_poc[0] < cur->field_poc[1];
     } else {
-        if (cur->f.interlaced_frame || h->sps.pic_struct_present_flag) {
+        if (cur->f->interlaced_frame || h->sps.pic_struct_present_flag) {
             /* Use picture timing SEI information. Even if it is a
              * information of a past frame, better than nothing. */
             if (h->sei_pic_struct == SEI_PIC_STRUCT_TOP_BOTTOM ||
                 h->sei_pic_struct == SEI_PIC_STRUCT_TOP_BOTTOM_TOP)
-                cur->f.top_field_first = 1;
+                cur->f->top_field_first = 1;
             else
-                cur->f.top_field_first = 0;
+                cur->f->top_field_first = 0;
         } else {
             /* Most likely progressive */
-            cur->f.top_field_first = 0;
+            cur->f->top_field_first = 0;
         }
     }
 
@@ -770,7 +773,7 @@ static void decode_postinit(H264Context *h, int setup_finished)
         h->frame_packing_arrangement_type <= 6 &&
         h->content_interpretation_type > 0 &&
         h->content_interpretation_type < 3) {
-        AVStereo3D *stereo = av_stereo3d_create_side_data(&cur->f);
+        AVStereo3D *stereo = av_stereo3d_create_side_data(cur->f);
         if (!stereo)
             return;
 
@@ -808,7 +811,7 @@ static void decode_postinit(H264Context *h, int setup_finished)
     if (h->sei_display_orientation_present &&
         (h->sei_anticlockwise_rotation || h->sei_hflip || h->sei_vflip)) {
         double angle = h->sei_anticlockwise_rotation * 360 / (double) (1 << 16);
-        AVFrameSideData *rotation = av_frame_new_side_data(&cur->f,
+        AVFrameSideData *rotation = av_frame_new_side_data(cur->f,
                                                            AV_FRAME_DATA_DISPLAYMATRIX,
                                                            sizeof(int32_t) * 9);
         if (!rotation)
@@ -819,6 +822,16 @@ static void decode_postinit(H264Context *h, int setup_finished)
                                h->sei_hflip, h->sei_vflip);
     }
 
+    if (h->sei_reguserdata_afd_present) {
+        AVFrameSideData *sd = av_frame_new_side_data(cur->f, AV_FRAME_DATA_AFD,
+                                                     sizeof(uint8_t));
+        if (!sd)
+            return;
+
+        *sd->data = h->active_format_description;
+        h->sei_reguserdata_afd_present = 0;
+    }
+
     // FIXME do something with unavailable reference frames
 
     /* Sort B-frames into display order */
@@ -862,13 +875,13 @@ static void decode_postinit(H264Context *h, int setup_finished)
         cnt     += out->poc < h->last_pocs[i];
         invalid += out->poc == INT_MIN;
     }
-    if (!h->mmco_reset && !cur->f.key_frame &&
+    if (!h->mmco_reset && !cur->f->key_frame &&
         cnt + invalid == MAX_DELAYED_PIC_COUNT && cnt > 0) {
         h->mmco_reset = 2;
         if (pics > 1)
             h->delayed_pic[pics - 2]->mmco_reset = 2;
     }
-    if (h->mmco_reset || cur->f.key_frame) {
+    if (h->mmco_reset || cur->f->key_frame) {
         for (i = 0; i < MAX_DELAYED_PIC_COUNT; i++)
             h->last_pocs[i] = INT_MIN;
         cnt     = 0;
@@ -879,16 +892,16 @@ static void decode_postinit(H264Context *h, int setup_finished)
     for (i = 1; i < MAX_DELAYED_PIC_COUNT &&
                 h->delayed_pic[i] &&
                 !h->delayed_pic[i - 1]->mmco_reset &&
-                !h->delayed_pic[i]->f.key_frame;
+                !h->delayed_pic[i]->f->key_frame;
          i++)
         if (h->delayed_pic[i]->poc < out->poc) {
             out     = h->delayed_pic[i];
             out_idx = i;
         }
     if (h->avctx->has_b_frames == 0 &&
-        (h->delayed_pic[0]->f.key_frame || h->mmco_reset))
+        (h->delayed_pic[0]->f->key_frame || h->mmco_reset))
         h->next_outputed_poc = INT_MIN;
-    out_of_order = !out->f.key_frame && !h->mmco_reset &&
+    out_of_order = !out->f->key_frame && !h->mmco_reset &&
                    (out->poc < h->next_outputed_poc);
 
     if (h->sps.bitstream_restriction_flag &&
@@ -902,7 +915,7 @@ static void decode_postinit(H264Context *h, int setup_finished)
     } else if (h->low_delay &&
                ((h->next_outputed_poc != INT_MIN &&
                  out->poc > h->next_outputed_poc + 2) ||
-                cur->f.pict_type == AV_PICTURE_TYPE_B)) {
+                cur->f->pict_type == AV_PICTURE_TYPE_B)) {
         h->low_delay = 0;
         h->avctx->has_b_frames++;
     }
@@ -927,7 +940,7 @@ static void decode_postinit(H264Context *h, int setup_finished)
                 h->next_outputed_poc = INT_MIN;
             }
         } else {
-            if (out_idx == 0 && pics > 1 && h->delayed_pic[0]->f.key_frame) {
+            if (out_idx == 0 && pics > 1 && h->delayed_pic[0]->f->key_frame) {
                 h->next_outputed_poc = INT_MIN;
             } else {
                 h->next_outputed_poc = out->poc;
@@ -947,8 +960,12 @@ static void decode_postinit(H264Context *h, int setup_finished)
         h->next_output_pic->recovered |= !!(h->frame_recovered & FRAME_RECOVERED_SEI);
     }
 
-    if (setup_finished && !h->avctx->hwaccel)
+    if (setup_finished && !h->avctx->hwaccel) {
         ff_thread_finish_setup(h->avctx);
+
+        if (h->avctx->active_thread_type & FF_THREAD_FRAME)
+            h->setup_finished = 1;
+    }
 }
 
 int ff_pred_weight_table(H264Context *h, H264SliceContext *sl)
@@ -1052,9 +1069,8 @@ static void flush_dpb(AVCodecContext *avctx)
 
     ff_h264_flush_change(h);
 
-    if (h->DPB)
-        for (i = 0; i < H264_MAX_PICTURE_COUNT; i++)
-            ff_h264_unref_picture(h, &h->DPB[i]);
+    for (i = 0; i < H264_MAX_PICTURE_COUNT; i++)
+        ff_h264_unref_picture(h, &h->DPB[i]);
     h->cur_pic_ptr = NULL;
     ff_h264_unref_picture(h, &h->cur_pic);
 
@@ -1449,7 +1465,7 @@ again:
                                         ((1 << h->sps.log2_max_frame_num) - 1);
                 }
 
-                h->cur_pic_ptr->f.key_frame |=
+                h->cur_pic_ptr->f->key_frame |=
                     (h->nal_unit_type == NAL_IDR_SLICE) ||
                     (h->sei_recovery_frame_cnt >= 0);
 
@@ -1616,6 +1632,7 @@ static int h264_decode_frame(AVCodecContext *avctx, void *data,
     int ret;
 
     h->flags = avctx->flags;
+    h->setup_finished = 0;
 
     /* end of stream, output what is still in the buffers */
 out:
@@ -1630,7 +1647,7 @@ out:
         out_idx = 0;
         for (i = 1;
              h->delayed_pic[i] &&
-             !h->delayed_pic[i]->f.key_frame &&
+             !h->delayed_pic[i]->f->key_frame &&
              !h->delayed_pic[i]->mmco_reset;
              i++)
             if (h->delayed_pic[i]->poc < out->poc) {
@@ -1642,7 +1659,7 @@ out:
             h->delayed_pic[i] = h->delayed_pic[i + 1];
 
         if (out) {
-            ret = output_frame(h, pict, &out->f);
+            ret = output_frame(h, pict, out->f);
             if (ret < 0)
                 return ret;
             *got_frame = 1;
@@ -1678,9 +1695,9 @@ out:
         if (h->next_output_pic && ((avctx->flags & CODEC_FLAG_OUTPUT_CORRUPT) ||
                                    h->next_output_pic->recovered)) {
             if (!h->next_output_pic->recovered)
-                h->next_output_pic->f.flags |= AV_FRAME_FLAG_CORRUPT;
+                h->next_output_pic->f->flags |= AV_FRAME_FLAG_CORRUPT;
 
-            ret = output_frame(h, pict, &h->next_output_pic->f);
+            ret = output_frame(h, pict, h->next_output_pic->f);
             if (ret < 0)
                 return ret;
             *got_frame = 1;
@@ -1698,10 +1715,9 @@ av_cold void ff_h264_free_context(H264Context *h)
 
     ff_h264_free_tables(h);
 
-    if (h->DPB) {
-        for (i = 0; i < H264_MAX_PICTURE_COUNT; i++)
-            ff_h264_unref_picture(h, &h->DPB[i]);
-        av_freep(&h->DPB);
+    for (i = 0; i < H264_MAX_PICTURE_COUNT; i++) {
+        ff_h264_unref_picture(h, &h->DPB[i]);
+        av_frame_free(&h->DPB[i].f);
     }
 
     h->cur_pic_ptr = NULL;
@@ -1725,6 +1741,7 @@ static av_cold int h264_decode_end(AVCodecContext *avctx)
     ff_h264_free_context(h);
 
     ff_h264_unref_picture(h, &h->cur_pic);
+    av_frame_free(&h->cur_pic.f);
 
     return 0;
 }