]> git.sesse.net Git - ffmpeg/commitdiff
Merge commit 'f651c6a259d4bc78f25db11d25df9256d5110bd3'
authorClément Bœsch <u@pkh.me>
Fri, 29 Jul 2016 08:39:57 +0000 (10:39 +0200)
committerClément Bœsch <u@pkh.me>
Fri, 29 Jul 2016 08:39:57 +0000 (10:39 +0200)
* commit 'f651c6a259d4bc78f25db11d25df9256d5110bd3':
  h264: factor out setting frame properties / side data

Merged-by: Clément Bœsch <u@pkh.me>
1  2 
libavcodec/h264.c
libavcodec/h264_slice.c

index c2c8727bf668ad51b1466d9f257b58e7005491fe,7949ee66ac5195c2996ff0bc1daf1f7e403cd344..6f71d78f737ad552fd8826ef5ffa49fda033a340
@@@ -491,162 -446,9 +491,15 @@@ static void decode_postinit(H264Contex
           * yet, so we assume the worst for now. */
          // if (setup_finished)
          //    ff_thread_finish_setup(h->avctx);
 -        return;
 +        if (cur->field_poc[0] == INT_MAX && cur->field_poc[1] == INT_MAX)
 +            return;
 +        if (h->avctx->hwaccel || h->missing_fields <=1)
 +            return;
      }
  
-     cur->f->interlaced_frame = 0;
-     cur->f->repeat_pict      = 0;
-     /* Signal interlacing information externally. */
-     /* Prioritize picture timing SEI information over used
-      * decoding process if it exists. */
-     if (sps->pic_struct_present_flag) {
-         H264SEIPictureTiming *pt = &h->sei.picture_timing;
-         switch (pt->pic_struct) {
-         case SEI_PIC_STRUCT_FRAME:
-             break;
-         case SEI_PIC_STRUCT_TOP_FIELD:
-         case SEI_PIC_STRUCT_BOTTOM_FIELD:
-             cur->f->interlaced_frame = 1;
-             break;
-         case SEI_PIC_STRUCT_TOP_BOTTOM:
-         case SEI_PIC_STRUCT_BOTTOM_TOP:
-             if (FIELD_OR_MBAFF_PICTURE(h))
-                 cur->f->interlaced_frame = 1;
-             else
-                 // try to flag soft telecine progressive
-                 cur->f->interlaced_frame = h->prev_interlaced_frame;
-             break;
-         case SEI_PIC_STRUCT_TOP_BOTTOM_TOP:
-         case SEI_PIC_STRUCT_BOTTOM_TOP_BOTTOM:
-             /* Signal the possibility of telecined film externally
-              * (pic_struct 5,6). From these hints, let the applications
-              * decide if they apply deinterlacing. */
-             cur->f->repeat_pict = 1;
-             break;
-         case SEI_PIC_STRUCT_FRAME_DOUBLING:
-             cur->f->repeat_pict = 2;
-             break;
-         case SEI_PIC_STRUCT_FRAME_TRIPLING:
-             cur->f->repeat_pict = 4;
-             break;
-         }
-         if ((pt->ct_type & 3) &&
-             pt->pic_struct <= SEI_PIC_STRUCT_BOTTOM_TOP)
-             cur->f->interlaced_frame = (pt->ct_type & (1 << 1)) != 0;
-     } else {
-         /* Derive interlacing flag from used decoding process. */
-         cur->f->interlaced_frame = FIELD_OR_MBAFF_PICTURE(h);
-     }
-     h->prev_interlaced_frame = cur->f->interlaced_frame;
-     if (cur->field_poc[0] != cur->field_poc[1]) {
-         /* Derive top_field_first from field pocs. */
-         cur->f->top_field_first = cur->field_poc[0] < cur->field_poc[1];
-     } else {
-         if (sps->pic_struct_present_flag) {
-             /* Use picture timing SEI information. Even if it is a
-              * information of a past frame, better than nothing. */
-             if (h->sei.picture_timing.pic_struct == SEI_PIC_STRUCT_TOP_BOTTOM ||
-                 h->sei.picture_timing.pic_struct == SEI_PIC_STRUCT_TOP_BOTTOM_TOP)
-                 cur->f->top_field_first = 1;
-             else
-                 cur->f->top_field_first = 0;
-         } else if (cur->f->interlaced_frame) {
-             /* Default to top field first when pic_struct_present_flag
-              * is not set but interlaced frame detected */
-             cur->f->top_field_first = 1;
-         } else {
-             /* Most likely progressive */
-             cur->f->top_field_first = 0;
-         }
-     }
-     if (h->sei.frame_packing.present &&
-         h->sei.frame_packing.frame_packing_arrangement_type <= 6 &&
-         h->sei.frame_packing.content_interpretation_type > 0 &&
-         h->sei.frame_packing.content_interpretation_type < 3) {
-         H264SEIFramePacking *fp = &h->sei.frame_packing;
-         AVStereo3D *stereo = av_stereo3d_create_side_data(cur->f);
-         if (stereo) {
-         switch (fp->frame_packing_arrangement_type) {
-         case 0:
-             stereo->type = AV_STEREO3D_CHECKERBOARD;
-             break;
-         case 1:
-             stereo->type = AV_STEREO3D_COLUMNS;
-             break;
-         case 2:
-             stereo->type = AV_STEREO3D_LINES;
-             break;
-         case 3:
-             if (fp->quincunx_sampling_flag)
-                 stereo->type = AV_STEREO3D_SIDEBYSIDE_QUINCUNX;
-             else
-                 stereo->type = AV_STEREO3D_SIDEBYSIDE;
-             break;
-         case 4:
-             stereo->type = AV_STEREO3D_TOPBOTTOM;
-             break;
-         case 5:
-             stereo->type = AV_STEREO3D_FRAMESEQUENCE;
-             break;
-         case 6:
-             stereo->type = AV_STEREO3D_2D;
-             break;
-         }
-         if (fp->content_interpretation_type == 2)
-             stereo->flags = AV_STEREO3D_FLAG_INVERT;
-         }
-     }
-     if (h->sei.display_orientation.present &&
-         (h->sei.display_orientation.anticlockwise_rotation ||
-          h->sei.display_orientation.hflip ||
-          h->sei.display_orientation.vflip)) {
-         H264SEIDisplayOrientation *o = &h->sei.display_orientation;
-         double angle = o->anticlockwise_rotation * 360 / (double) (1 << 16);
-         AVFrameSideData *rotation = av_frame_new_side_data(cur->f,
-                                                            AV_FRAME_DATA_DISPLAYMATRIX,
-                                                            sizeof(int32_t) * 9);
-         if (rotation) {
-             av_display_rotation_set((int32_t *)rotation->data, angle);
-             av_display_matrix_flip((int32_t *)rotation->data,
-                                    o->hflip, o->vflip);
-         }
-     }
-     if (h->sei.afd.present) {
-         AVFrameSideData *sd = av_frame_new_side_data(cur->f, AV_FRAME_DATA_AFD,
-                                                      sizeof(uint8_t));
-         if (sd) {
-             *sd->data = h->sei.afd.active_format_description;
-             h->sei.afd.present = 0;
-         }
-     }
-     if (h->sei.a53_caption.a53_caption) {
-         H264SEIA53Caption *a53 = &h->sei.a53_caption;
-         AVFrameSideData *sd = av_frame_new_side_data(cur->f,
-                                                      AV_FRAME_DATA_A53_CC,
-                                                      a53->a53_caption_size);
-         if (sd)
-             memcpy(sd->data, a53->a53_caption, a53->a53_caption_size);
-         av_freep(&a53->a53_caption);
-         a53->a53_caption_size = 0;
-         h->avctx->properties |= FF_CODEC_PROPERTY_CLOSED_CAPTIONS;
-     }
 +    cur->mmco_reset = h->mmco_reset;
 +    h->mmco_reset = 0;
 +
      // FIXME do something with unavailable reference frames
  
      /* Sort B-frames into display order */
index 7dcfd843d2d70cef96b6e95bbe3975c024dd97d5,6a89a416ac31a94e9d8e5410b4f73d41bc27c56e..66e3c885f9e166c41d6c9b980ee88c6e394c7b45
@@@ -1124,6 -986,161 +1126,161 @@@ static int h264_init_ps(H264Context *h
      return 0;
  }
  
 -        if (cur->f->interlaced_frame || sps->pic_struct_present_flag) {
+ static int h264_export_frame_props(H264Context *h)
+ {
+     const SPS *sps = h->ps.sps;
+     H264Picture *cur = h->cur_pic_ptr;
+     cur->f->interlaced_frame = 0;
+     cur->f->repeat_pict      = 0;
+     /* Signal interlacing information externally. */
+     /* Prioritize picture timing SEI information over used
+      * decoding process if it exists. */
+     if (sps->pic_struct_present_flag) {
+         H264SEIPictureTiming *pt = &h->sei.picture_timing;
+         switch (pt->pic_struct) {
+         case SEI_PIC_STRUCT_FRAME:
+             break;
+         case SEI_PIC_STRUCT_TOP_FIELD:
+         case SEI_PIC_STRUCT_BOTTOM_FIELD:
+             cur->f->interlaced_frame = 1;
+             break;
+         case SEI_PIC_STRUCT_TOP_BOTTOM:
+         case SEI_PIC_STRUCT_BOTTOM_TOP:
+             if (FIELD_OR_MBAFF_PICTURE(h))
+                 cur->f->interlaced_frame = 1;
+             else
+                 // try to flag soft telecine progressive
+                 cur->f->interlaced_frame = h->prev_interlaced_frame;
+             break;
+         case SEI_PIC_STRUCT_TOP_BOTTOM_TOP:
+         case SEI_PIC_STRUCT_BOTTOM_TOP_BOTTOM:
+             /* Signal the possibility of telecined film externally
+              * (pic_struct 5,6). From these hints, let the applications
+              * decide if they apply deinterlacing. */
+             cur->f->repeat_pict = 1;
+             break;
+         case SEI_PIC_STRUCT_FRAME_DOUBLING:
+             cur->f->repeat_pict = 2;
+             break;
+         case SEI_PIC_STRUCT_FRAME_TRIPLING:
+             cur->f->repeat_pict = 4;
+             break;
+         }
+         if ((pt->ct_type & 3) &&
+             pt->pic_struct <= SEI_PIC_STRUCT_BOTTOM_TOP)
+             cur->f->interlaced_frame = (pt->ct_type & (1 << 1)) != 0;
+     } else {
+         /* Derive interlacing flag from used decoding process. */
+         cur->f->interlaced_frame = FIELD_OR_MBAFF_PICTURE(h);
+     }
+     h->prev_interlaced_frame = cur->f->interlaced_frame;
+     if (cur->field_poc[0] != cur->field_poc[1]) {
+         /* Derive top_field_first from field pocs. */
+         cur->f->top_field_first = cur->field_poc[0] < cur->field_poc[1];
+     } else {
 -        h->sei.frame_packing.arrangement_type >= 0 &&
 -        h->sei.frame_packing.arrangement_type <= 6 &&
++        if (sps->pic_struct_present_flag) {
+             /* Use picture timing SEI information. Even if it is a
+              * information of a past frame, better than nothing. */
+             if (h->sei.picture_timing.pic_struct == SEI_PIC_STRUCT_TOP_BOTTOM ||
+                 h->sei.picture_timing.pic_struct == SEI_PIC_STRUCT_TOP_BOTTOM_TOP)
+                 cur->f->top_field_first = 1;
+             else
+                 cur->f->top_field_first = 0;
++        } else if (cur->f->interlaced_frame) {
++            /* Default to top field first when pic_struct_present_flag
++             * is not set but interlaced frame detected */
++            cur->f->top_field_first = 1;
+         } else {
+             /* Most likely progressive */
+             cur->f->top_field_first = 0;
+         }
+     }
+     if (h->sei.frame_packing.present &&
 -        if (!stereo)
 -            return AVERROR(ENOMEM);
 -
 -        switch (fp->arrangement_type) {
++        h->sei.frame_packing.frame_packing_arrangement_type <= 6 &&
+         h->sei.frame_packing.content_interpretation_type > 0 &&
+         h->sei.frame_packing.content_interpretation_type < 3) {
+         H264SEIFramePacking *fp = &h->sei.frame_packing;
+         AVStereo3D *stereo = av_stereo3d_create_side_data(cur->f);
 -            if (fp->quincunx_subsampling)
++        if (stereo) {
++        switch (fp->frame_packing_arrangement_type) {
+         case 0:
+             stereo->type = AV_STEREO3D_CHECKERBOARD;
+             break;
+         case 1:
+             stereo->type = AV_STEREO3D_COLUMNS;
+             break;
+         case 2:
+             stereo->type = AV_STEREO3D_LINES;
+             break;
+         case 3:
 -        if (!rotation)
 -            return AVERROR(ENOMEM);
 -
 -        av_display_rotation_set((int32_t *)rotation->data, angle);
 -        av_display_matrix_flip((int32_t *)rotation->data,
 -                               o->hflip, o->vflip);
++            if (fp->quincunx_sampling_flag)
+                 stereo->type = AV_STEREO3D_SIDEBYSIDE_QUINCUNX;
+             else
+                 stereo->type = AV_STEREO3D_SIDEBYSIDE;
+             break;
+         case 4:
+             stereo->type = AV_STEREO3D_TOPBOTTOM;
+             break;
+         case 5:
+             stereo->type = AV_STEREO3D_FRAMESEQUENCE;
+             break;
+         case 6:
+             stereo->type = AV_STEREO3D_2D;
+             break;
+         }
+         if (fp->content_interpretation_type == 2)
+             stereo->flags = AV_STEREO3D_FLAG_INVERT;
++        }
+     }
+     if (h->sei.display_orientation.present &&
+         (h->sei.display_orientation.anticlockwise_rotation ||
+          h->sei.display_orientation.hflip ||
+          h->sei.display_orientation.vflip)) {
+         H264SEIDisplayOrientation *o = &h->sei.display_orientation;
+         double angle = o->anticlockwise_rotation * 360 / (double) (1 << 16);
+         AVFrameSideData *rotation = av_frame_new_side_data(cur->f,
+                                                            AV_FRAME_DATA_DISPLAYMATRIX,
+                                                            sizeof(int32_t) * 9);
 -        if (!sd)
 -            return AVERROR(ENOMEM);
++        if (rotation) {
++            av_display_rotation_set((int32_t *)rotation->data, angle);
++            av_display_matrix_flip((int32_t *)rotation->data,
++                                   o->hflip, o->vflip);
++        }
+     }
+     if (h->sei.afd.present) {
+         AVFrameSideData *sd = av_frame_new_side_data(cur->f, AV_FRAME_DATA_AFD,
+                                                      sizeof(uint8_t));
 -        *sd->data = h->sei.afd.active_format_description;
 -        h->sei.afd.present = 0;
 -        if (!sd)
 -            return AVERROR(ENOMEM);
 -
 -        memcpy(sd->data, a53->a53_caption, a53->a53_caption_size);
++        if (sd) {
++            *sd->data = h->sei.afd.active_format_description;
++            h->sei.afd.present = 0;
++        }
+     }
+     if (h->sei.a53_caption.a53_caption) {
+         H264SEIA53Caption *a53 = &h->sei.a53_caption;
+         AVFrameSideData *sd = av_frame_new_side_data(cur->f,
+                                                      AV_FRAME_DATA_A53_CC,
+                                                      a53->a53_caption_size);
++        if (sd)
++            memcpy(sd->data, a53->a53_caption, a53->a53_caption_size);
+         av_freep(&a53->a53_caption);
+         a53->a53_caption_size = 0;
++        h->avctx->properties |= FF_CODEC_PROPERTY_CLOSED_CAPTIONS;
+     }
+     return 0;
+ }
  /* This function is called right after decoding the slice header for a first
   * slice in a field (or a frame). It decides whether we are decoding a new frame
   * or a second field in a pair and does the necessary setup.