]> git.sesse.net Git - ffmpeg/blobdiff - libavcodec/h264_slice.c
avcodec/hevc_sei: use ff_parse_a53_cc() to parse A53 Closed Captions
[ffmpeg] / libavcodec / h264_slice.c
index 5a8a4a7f867c8928088e1dc356b9251547bd1bb2..c7b2764270599857a4d022825cb03b5195cc29b1 100644 (file)
@@ -243,6 +243,15 @@ static int alloc_picture(H264Context *h, H264Picture *pic)
         pic->ref_index[i]  = pic->ref_index_buf[i]->data;
     }
 
+    pic->pps_buf = av_buffer_ref(h->ps.pps_ref);
+    if (!pic->pps_buf)
+        goto fail;
+    pic->pps = (const PPS*)pic->pps_buf->data;
+
+    pic->mb_width  = h->mb_width;
+    pic->mb_height = h->mb_height;
+    pic->mb_stride = h->mb_stride;
+
     return 0;
 fail:
     ff_h264_unref_picture(h, pic);
@@ -1116,9 +1125,10 @@ static int h264_export_frame_props(H264Context *h)
 {
     const SPS *sps = h->ps.sps;
     H264Picture *cur = h->cur_pic_ptr;
+    AVFrame *out = cur->f;
 
-    cur->f->interlaced_frame = 0;
-    cur->f->repeat_pict      = 0;
+    out->interlaced_frame = 0;
+    out->repeat_pict      = 0;
 
     /* Signal interlacing information externally. */
     /* Prioritize picture timing SEI information over used
@@ -1141,59 +1151,59 @@ static int h264_export_frame_props(H264Context *h)
             break;
         case H264_SEI_PIC_STRUCT_TOP_FIELD:
         case H264_SEI_PIC_STRUCT_BOTTOM_FIELD:
-            cur->f->interlaced_frame = 1;
+            out->interlaced_frame = 1;
             break;
         case H264_SEI_PIC_STRUCT_TOP_BOTTOM:
         case H264_SEI_PIC_STRUCT_BOTTOM_TOP:
             if (FIELD_OR_MBAFF_PICTURE(h))
-                cur->f->interlaced_frame = 1;
+                out->interlaced_frame = 1;
             else
                 // try to flag soft telecine progressive
-                cur->f->interlaced_frame = h->prev_interlaced_frame;
+                out->interlaced_frame = h->prev_interlaced_frame;
             break;
         case H264_SEI_PIC_STRUCT_TOP_BOTTOM_TOP:
         case H264_SEI_PIC_STRUCT_BOTTOM_TOP_BOTTOM:
             /* Signal the possibility of telecined film externally
              * (pic_struct 5,6). From these hints, let the applications
              * decide if they apply deinterlacing. */
-            cur->f->repeat_pict = 1;
+            out->repeat_pict = 1;
             break;
         case H264_SEI_PIC_STRUCT_FRAME_DOUBLING:
-            cur->f->repeat_pict = 2;
+            out->repeat_pict = 2;
             break;
         case H264_SEI_PIC_STRUCT_FRAME_TRIPLING:
-            cur->f->repeat_pict = 4;
+            out->repeat_pict = 4;
             break;
         }
 
         if ((pt->ct_type & 3) &&
             pt->pic_struct <= H264_SEI_PIC_STRUCT_BOTTOM_TOP)
-            cur->f->interlaced_frame = (pt->ct_type & (1 << 1)) != 0;
+            out->interlaced_frame = (pt->ct_type & (1 << 1)) != 0;
     } else {
         /* Derive interlacing flag from used decoding process. */
-        cur->f->interlaced_frame = FIELD_OR_MBAFF_PICTURE(h);
+        out->interlaced_frame = FIELD_OR_MBAFF_PICTURE(h);
     }
-    h->prev_interlaced_frame = cur->f->interlaced_frame;
+    h->prev_interlaced_frame = out->interlaced_frame;
 
     if (cur->field_poc[0] != cur->field_poc[1]) {
         /* Derive top_field_first from field pocs. */
-        cur->f->top_field_first = cur->field_poc[0] < cur->field_poc[1];
+        out->top_field_first = cur->field_poc[0] < cur->field_poc[1];
     } else {
         if (sps->pic_struct_present_flag && h->sei.picture_timing.present) {
             /* Use picture timing SEI information. Even if it is a
              * information of a past frame, better than nothing. */
             if (h->sei.picture_timing.pic_struct == H264_SEI_PIC_STRUCT_TOP_BOTTOM ||
                 h->sei.picture_timing.pic_struct == H264_SEI_PIC_STRUCT_TOP_BOTTOM_TOP)
-                cur->f->top_field_first = 1;
+                out->top_field_first = 1;
             else
-                cur->f->top_field_first = 0;
-        } else if (cur->f->interlaced_frame) {
+                out->top_field_first = 0;
+        } else if (out->interlaced_frame) {
             /* Default to top field first when pic_struct_present_flag
              * is not set but interlaced frame detected */
-            cur->f->top_field_first = 1;
+            out->top_field_first = 1;
         } else {
             /* Most likely progressive */
-            cur->f->top_field_first = 0;
+            out->top_field_first = 0;
         }
     }
 
@@ -1202,7 +1212,7 @@ static int h264_export_frame_props(H264Context *h)
         h->sei.frame_packing.content_interpretation_type > 0 &&
         h->sei.frame_packing.content_interpretation_type < 3) {
         H264SEIFramePacking *fp = &h->sei.frame_packing;
-        AVStereo3D *stereo = av_stereo3d_create_side_data(cur->f);
+        AVStereo3D *stereo = av_stereo3d_create_side_data(out);
         if (stereo) {
         switch (fp->arrangement_type) {
         case H264_SEI_FPA_TYPE_CHECKERBOARD:
@@ -1249,7 +1259,7 @@ static int h264_export_frame_props(H264Context *h)
          h->sei.display_orientation.vflip)) {
         H264SEIDisplayOrientation *o = &h->sei.display_orientation;
         double angle = o->anticlockwise_rotation * 360 / (double) (1 << 16);
-        AVFrameSideData *rotation = av_frame_new_side_data(cur->f,
+        AVFrameSideData *rotation = av_frame_new_side_data(out,
                                                            AV_FRAME_DATA_DISPLAYMATRIX,
                                                            sizeof(int32_t) * 9);
         if (rotation) {
@@ -1260,7 +1270,7 @@ static int h264_export_frame_props(H264Context *h)
     }
 
     if (h->sei.afd.present) {
-        AVFrameSideData *sd = av_frame_new_side_data(cur->f, AV_FRAME_DATA_AFD,
+        AVFrameSideData *sd = av_frame_new_side_data(out, AV_FRAME_DATA_AFD,
                                                      sizeof(uint8_t));
 
         if (sd) {
@@ -1272,7 +1282,7 @@ static int h264_export_frame_props(H264Context *h)
     if (h->sei.a53_caption.buf_ref) {
         H264SEIA53Caption *a53 = &h->sei.a53_caption;
 
-        AVFrameSideData *sd = av_frame_new_side_data_from_buf(cur->f, AV_FRAME_DATA_A53_CC, a53->buf_ref);
+        AVFrameSideData *sd = av_frame_new_side_data_from_buf(out, AV_FRAME_DATA_A53_CC, a53->buf_ref);
         if (!sd)
             av_buffer_unref(&a53->buf_ref);
         a53->buf_ref = NULL;
@@ -1280,11 +1290,25 @@ static int h264_export_frame_props(H264Context *h)
         h->avctx->properties |= FF_CODEC_PROPERTY_CLOSED_CAPTIONS;
     }
 
+    for (int i = 0; i < h->sei.unregistered.nb_buf_ref; i++) {
+        H264SEIUnregistered *unreg = &h->sei.unregistered;
+
+        if (unreg->buf_ref[i]) {
+            AVFrameSideData *sd = av_frame_new_side_data_from_buf(out,
+                    AV_FRAME_DATA_SEI_UNREGISTERED,
+                    unreg->buf_ref[i]);
+            if (!sd)
+                av_buffer_unref(&unreg->buf_ref[i]);
+            unreg->buf_ref[i] = NULL;
+        }
+    }
+    h->sei.unregistered.nb_buf_ref = 0;
+
     if (h->sei.picture_timing.timecode_cnt > 0) {
-        uint32_t tc = 0;
         uint32_t *tc_sd;
+        char tcbuf[AV_TIMECODE_STR_SIZE];
 
-        AVFrameSideData *tcside = av_frame_new_side_data(cur->f,
+        AVFrameSideData *tcside = av_frame_new_side_data(out,
                                                          AV_FRAME_DATA_S12M_TIMECODE,
                                                          sizeof(uint32_t)*4);
         if (!tcside)
@@ -1294,33 +1318,15 @@ static int h264_export_frame_props(H264Context *h)
         tc_sd[0] = h->sei.picture_timing.timecode_cnt;
 
         for (int i = 0; i < tc_sd[0]; i++) {
-            uint32_t frames;
-
-            /* For SMPTE 12-M timecodes, frame count is a special case if > 30 FPS.
-               See SMPTE ST 12-1:2014 Sec 12.1 for more info. */
-            if (av_cmp_q(h->avctx->framerate, (AVRational) {30, 1}) == 1) {
-                frames = h->sei.picture_timing.timecode[i].frame / 2;
-                if (h->sei.picture_timing.timecode[i].frame % 2 == 1) {
-                    if (av_cmp_q(h->avctx->framerate, (AVRational) {50, 1}) == 0)
-                        tc |= (1 << 7);
-                    else
-                        tc |= (1 << 23);
-                }
-            } else {
-                frames = h->sei.picture_timing.timecode[i].frame;
-            }
-
-            tc |= h->sei.picture_timing.timecode[i].dropframe << 30;
-            tc |= (frames / 10) << 28;
-            tc |= (frames % 10) << 24;
-            tc |= (h->sei.picture_timing.timecode[i].seconds / 10) << 20;
-            tc |= (h->sei.picture_timing.timecode[i].seconds % 10) << 16;
-            tc |= (h->sei.picture_timing.timecode[i].minutes / 10) << 12;
-            tc |= (h->sei.picture_timing.timecode[i].minutes % 10) << 8;
-            tc |= (h->sei.picture_timing.timecode[i].hours / 10) << 4;
-            tc |= (h->sei.picture_timing.timecode[i].hours % 10);
-
-            tc_sd[i + 1] = tc;
+            int drop = h->sei.picture_timing.timecode[i].dropframe;
+            int   hh = h->sei.picture_timing.timecode[i].hours;
+            int   mm = h->sei.picture_timing.timecode[i].minutes;
+            int   ss = h->sei.picture_timing.timecode[i].seconds;
+            int   ff = h->sei.picture_timing.timecode[i].frame;
+
+            tc_sd[i + 1] = av_timecode_get_smpte(h->avctx->framerate, drop, hh, mm, ss, ff);
+            av_timecode_make_smpte_tc_string(tcbuf, tc_sd[i + 1], 0);
+            av_dict_set(&out->metadata, "timecode", tcbuf, 0);
         }
         h->sei.picture_timing.timecode_cnt = 0;
     }