]> git.sesse.net Git - ffmpeg/commitdiff
Merge commit 'e481458bc308ee838deaeacac51929514762e7a7'
authorDerek Buitenhuis <derek.buitenhuis@gmail.com>
Tue, 26 Apr 2016 14:04:29 +0000 (15:04 +0100)
committerDerek Buitenhuis <derek.buitenhuis@gmail.com>
Tue, 26 Apr 2016 14:04:29 +0000 (15:04 +0100)
* commit 'e481458bc308ee838deaeacac51929514762e7a7':
  h264: factor out pred weight table parsing into a separate file

Merged-by: Derek Buitenhuis <derek.buitenhuis@gmail.com>
12 files changed:
1  2 
libavcodec/Makefile
libavcodec/dxva2_h264.c
libavcodec/h264.c
libavcodec/h264.h
libavcodec/h264_mb.c
libavcodec/h264_mc_template.c
libavcodec/h264_parse.c
libavcodec/h264_parse.h
libavcodec/h264_parser.c
libavcodec/h264_refs.c
libavcodec/h264_slice.c
libavcodec/vaapi_h264.c

diff --combined libavcodec/Makefile
index 36b65f758e6c12e84f02f0d581be1712432be552,54b12d35d359e2091b9d2ab55a47f89dcb10912b..4cda4dcb8d74631266c9be96fea47971a70a0953
@@@ -1,27 -1,20 +1,27 @@@
 +include $(SUBDIR)../config.mak
 +
  NAME = avcodec
  
  HEADERS = avcodec.h                                                     \
 +          avdct.h                                                       \
            avfft.h                                                       \
            dv_profile.h                                                  \
            d3d11va.h                                                     \
            dirac.h                                                       \
            dxva2.h                                                       \
 +          jni.h                                                         \
            qsv.h                                                         \
            vaapi.h                                                       \
            vda.h                                                         \
            vdpau.h                                                       \
            version.h                                                     \
 +          videotoolbox.h                                                \
            vorbis_parser.h                                               \
            xvmc.h                                                        \
  
  OBJS = allcodecs.o                                                      \
 +       audioconvert.o                                                   \
 +       avdct.o                                                          \
         avpacket.o                                                       \
         avpicture.o                                                      \
         bitstream.o                                                      \
         dirac.o                                                          \
         dv_profile.o                                                     \
         imgconvert.o                                                     \
 -       log2_tab.o                                                       \
 +       jni.o                                                            \
         mathtables.o                                                     \
         options.o                                                        \
         parser.o                                                         \
         profiles.o                                                       \
         qsv_api.o                                                        \
         raw.o                                                            \
 +       resample.o                                                       \
 +       resample2.o                                                      \
         utils.o                                                          \
         vorbis_parser.o                                                  \
         xiph.o                                                           \
@@@ -54,16 -45,13 +54,16 @@@ OBJS-$(CONFIG_AUDIODSP)                
  OBJS-$(CONFIG_BLOCKDSP)                += blockdsp.o
  OBJS-$(CONFIG_BSWAPDSP)                += bswapdsp.o
  OBJS-$(CONFIG_CABAC)                   += cabac.o
 +OBJS-$(CONFIG_CRYSTALHD)               += crystalhd.o
  OBJS-$(CONFIG_DCT)                     += dct.o dct32_fixed.o dct32_float.o
  OBJS-$(CONFIG_ERROR_RESILIENCE)        += error_resilience.o
 +OBJS-$(CONFIG_EXIF)                    += exif.o tiff_common.o
  OBJS-$(CONFIG_FAANDCT)                 += faandct.o
  OBJS-$(CONFIG_FAANIDCT)                += faanidct.o
  OBJS-$(CONFIG_FDCTDSP)                 += fdctdsp.o jfdctfst.o jfdctint.o
  FFT-OBJS-$(CONFIG_HARDCODED_TABLES)    += cos_tables.o cos_fixed_tables.o
  OBJS-$(CONFIG_FFT)                     += avfft.o fft_fixed.o fft_float.o \
 +                                          fft_fixed_32.o fft_init_table.o \
                                            $(FFT-OBJS-yes)
  OBJS-$(CONFIG_FLACDSP)                 += flacdsp.o
  OBJS-$(CONFIG_FMTCONVERT)              += fmtconvert.o
@@@ -73,7 -61,6 +73,7 @@@ OBJS-$(CONFIG_H264CHROMA)              
  OBJS-$(CONFIG_H264DSP)                 += h264dsp.o h264idct.o
  OBJS-$(CONFIG_H264PRED)                += h264pred.o
  OBJS-$(CONFIG_H264QPEL)                += h264qpel.o
 +OBJS-$(CONFIG_H264_VIDEOTOOLBOX_ENCODER) += videotoolboxenc.o
  OBJS-$(CONFIG_HPELDSP)                 += hpeldsp.o
  OBJS-$(CONFIG_HUFFMAN)                 += huffman.o
  OBJS-$(CONFIG_HUFFYUVDSP)              += huffyuvdsp.o
@@@ -83,17 -70,13 +83,17 @@@ OBJS-$(CONFIG_IIRFILTER)               
  OBJS-$(CONFIG_IMDCT15)                 += imdct15.o
  OBJS-$(CONFIG_INTRAX8)                 += intrax8.o intrax8dsp.o
  OBJS-$(CONFIG_IVIDSP)                  += ivi_dsp.o
 +OBJS-$(CONFIG_JNI)                     += ffjni.o jni.o
  OBJS-$(CONFIG_JPEGTABLES)              += jpegtables.o
  OBJS-$(CONFIG_LIBXVID)                 += libxvid_rc.o
 +OBJS-$(CONFIG_LLAUDDSP)                += lossless_audiodsp.o
 +OBJS-$(CONFIG_LLVIDDSP)                += lossless_videodsp.o
  OBJS-$(CONFIG_LPC)                     += lpc.o
  OBJS-$(CONFIG_LSP)                     += lsp.o
  OBJS-$(CONFIG_LZF)                     += lzf.o
 -OBJS-$(CONFIG_MDCT)                    += mdct_fixed.o mdct_float.o
 +OBJS-$(CONFIG_MDCT)                    += mdct_fixed.o mdct_float.o mdct_fixed_32.o
  OBJS-$(CONFIG_ME_CMP)                  += me_cmp.o
 +OBJS-$(CONFIG_MEDIACODEC)              += mediacodecdec.o mediacodec_wrapper.o mediacodec_sw_buffer.o
  OBJS-$(CONFIG_MPEG_ER)                 += mpeg_er.o
  OBJS-$(CONFIG_MPEGAUDIO)               += mpegaudio.o mpegaudiodata.o   \
                                            mpegaudiodecheader.o
@@@ -108,8 -91,8 +108,8 @@@ OBJS-$(CONFIG_MPEGVIDEOENC)            
                                            motion_est.o ratecontrol.o    \
                                            mpegvideoencdsp.o
  OBJS-$(CONFIG_MSS34DSP)                += mss34dsp.o
 +OBJS-$(CONFIG_NVENC)                   += nvenc.o
  OBJS-$(CONFIG_PIXBLOCKDSP)             += pixblockdsp.o
 -OBJS-${CONFIG_NVENC}                   += nvenc.o
  OBJS-$(CONFIG_QPELDSP)                 += qpeldsp.o
  OBJS-$(CONFIG_QSV)                     += qsv.o
  OBJS-$(CONFIG_QSVDEC)                  += qsvdec.o
@@@ -118,8 -101,7 +118,8 @@@ OBJS-$(CONFIG_RANGECODER)              
  RDFT-OBJS-$(CONFIG_HARDCODED_TABLES)   += sin_tables.o
  OBJS-$(CONFIG_RDFT)                    += rdft.o $(RDFT-OBJS-yes)
  OBJS-$(CONFIG_RV34DSP)                 += rv34dsp.o
 -OBJS-$(CONFIG_SINEWIN)                 += sinewin.o
 +OBJS-$(CONFIG_SHARED)                  += log2_tab.o reverse.o
 +OBJS-$(CONFIG_SINEWIN)                 += sinewin.o sinewin_fixed.o
  OBJS-$(CONFIG_SNAPPY)                  += snappy.o
  OBJS-$(CONFIG_STARTCODE)               += startcode.o
  OBJS-$(CONFIG_TEXTUREDSP)              += texturedsp.o
@@@ -134,30 -116,21 +134,30 @@@ OBJS-$(CONFIG_WMA_FREQS)               
  OBJS-$(CONFIG_WMV2DSP)                 += wmv2dsp.o
  
  # decoders/encoders
 +OBJS-$(CONFIG_ZERO12V_DECODER)         += 012v.o
  OBJS-$(CONFIG_A64MULTI_ENCODER)        += a64multienc.o elbg.o
  OBJS-$(CONFIG_A64MULTI5_ENCODER)       += a64multienc.o elbg.o
 -OBJS-$(CONFIG_AAC_DECODER)             += aacdec.o aactab.o aacsbr.o aacps.o \
 +OBJS-$(CONFIG_AAC_DECODER)             += aacdec.o aactab.o aacsbr.o aacps_float.o \
 +                                          aacadtsdec.o mpeg4audio.o kbdwin.o \
 +                                          sbrdsp.o aacpsdsp_float.o cbrt_data.o
 +OBJS-$(CONFIG_AAC_FIXED_DECODER)       += aacdec_fixed.o aactab.o aacsbr_fixed.o aacps_fixed.o \
                                            aacadtsdec.o mpeg4audio.o kbdwin.o \
 -                                          sbrdsp.o aacpsdsp.o
 -OBJS-$(CONFIG_AAC_ENCODER)             += aacenc.o aaccoder.o    \
 +                                          sbrdsp_fixed.o aacpsdsp_fixed.o cbrt_data_fixed.o
 +OBJS-$(CONFIG_AAC_ENCODER)             += aacenc.o aaccoder.o aacenctab.o    \
                                            aacpsy.o aactab.o      \
 -                                          psymodel.o mpeg4audio.o kbdwin.o
 +                                          aacenc_is.o \
 +                                          aacenc_tns.o \
 +                                          aacenc_ltp.o \
 +                                          aacenc_pred.o \
 +                                          psymodel.o mpeg4audio.o kbdwin.o cbrt_data.o
  OBJS-$(CONFIG_AASC_DECODER)            += aasc.o msrledec.o
 -OBJS-$(CONFIG_AC3_DECODER)             += ac3dec.o ac3dec_data.o ac3.o kbdwin.o
 +OBJS-$(CONFIG_AC3_DECODER)             += ac3dec_float.o ac3dec_data.o ac3.o kbdwin.o
 +OBJS-$(CONFIG_AC3_FIXED_DECODER)       += ac3dec_fixed.o ac3dec_data.o ac3.o kbdwin.o
  OBJS-$(CONFIG_AC3_ENCODER)             += ac3enc_float.o ac3enc.o ac3tab.o \
                                            ac3.o kbdwin.o
  OBJS-$(CONFIG_AC3_FIXED_ENCODER)       += ac3enc_fixed.o ac3enc.o ac3tab.o ac3.o
  OBJS-$(CONFIG_AIC_DECODER)             += aic.o
 -OBJS-$(CONFIG_ALAC_DECODER)            += alac.o alac_data.o
 +OBJS-$(CONFIG_ALAC_DECODER)            += alac.o alac_data.o alacdsp.o
  OBJS-$(CONFIG_ALAC_ENCODER)            += alacenc.o alac_data.o
  OBJS-$(CONFIG_ALIAS_PIX_DECODER)       += aliaspixdec.o
  OBJS-$(CONFIG_ALIAS_PIX_ENCODER)       += aliaspixenc.o
@@@ -170,17 -143,9 +170,17 @@@ OBJS-$(CONFIG_AMRWB_DECODER)           
                                            celp_math.o acelp_filters.o \
                                            acelp_vectors.o             \
                                            acelp_pitch_delay.o
 +OBJS-$(CONFIG_AMV_ENCODER)             += mjpegenc.o mjpegenc_common.o \
 +                                          mpegvideo_enc.o motion_est.o \
 +                                          ratecontrol.o mpeg12data.o   \
 +                                          mpegvideo.o
  OBJS-$(CONFIG_ANM_DECODER)             += anm.o
  OBJS-$(CONFIG_ANSI_DECODER)            += ansi.o cga_data.o
  OBJS-$(CONFIG_APE_DECODER)             += apedec.o
 +OBJS-$(CONFIG_APNG_DECODER)            += png.o pngdec.o pngdsp.o
 +OBJS-$(CONFIG_APNG_ENCODER)            += png.o pngenc.o
 +OBJS-$(CONFIG_SSA_DECODER)             += assdec.o ass.o
 +OBJS-$(CONFIG_SSA_ENCODER)             += assenc.o ass.o
  OBJS-$(CONFIG_ASS_DECODER)             += assdec.o ass.o
  OBJS-$(CONFIG_ASS_ENCODER)             += assenc.o ass.o
  OBJS-$(CONFIG_ASV1_DECODER)            += asvdec.o asv.o mpeg12data.o
@@@ -193,20 -158,12 +193,20 @@@ OBJS-$(CONFIG_ATRAC3P_DECODER)         
                                            atrac3plusdsp.o atrac.o
  OBJS-$(CONFIG_AURA_DECODER)            += cyuv.o
  OBJS-$(CONFIG_AURA2_DECODER)           += aura.o
 +OBJS-$(CONFIG_AVRN_DECODER)            += avrndec.o mjpegdec.o
 +OBJS-$(CONFIG_AVRP_DECODER)            += r210dec.o
 +OBJS-$(CONFIG_AVRP_ENCODER)            += r210enc.o
  OBJS-$(CONFIG_AVS_DECODER)             += avs.o
 +OBJS-$(CONFIG_AVUI_DECODER)            += avuidec.o
 +OBJS-$(CONFIG_AVUI_ENCODER)            += avuienc.o
 +OBJS-$(CONFIG_AYUV_DECODER)            += v408dec.o
 +OBJS-$(CONFIG_AYUV_ENCODER)            += v408enc.o
  OBJS-$(CONFIG_BETHSOFTVID_DECODER)     += bethsoftvideo.o
  OBJS-$(CONFIG_BFI_DECODER)             += bfi.o
  OBJS-$(CONFIG_BINK_DECODER)            += bink.o binkdsp.o
  OBJS-$(CONFIG_BINKAUDIO_DCT_DECODER)   += binkaudio.o
  OBJS-$(CONFIG_BINKAUDIO_RDFT_DECODER)  += binkaudio.o
 +OBJS-$(CONFIG_BINTEXT_DECODER)         += bintext.o cga_data.o
  OBJS-$(CONFIG_BMP_DECODER)             += bmp.o msrledec.o
  OBJS-$(CONFIG_BMP_ENCODER)             += bmpenc.o
  OBJS-$(CONFIG_BMV_AUDIO_DECODER)       += bmvaudio.o
@@@ -215,37 -172,26 +215,37 @@@ OBJS-$(CONFIG_BRENDER_PIX_DECODER)     
  OBJS-$(CONFIG_C93_DECODER)             += c93.o
  OBJS-$(CONFIG_CAVS_DECODER)            += cavs.o cavsdec.o cavsdsp.o \
                                            cavsdata.o mpeg12data.o
 +OBJS-$(CONFIG_CCAPTION_DECODER)        += ccaption_dec.o
  OBJS-$(CONFIG_CDGRAPHICS_DECODER)      += cdgraphics.o
  OBJS-$(CONFIG_CDXL_DECODER)            += cdxl.o
 +OBJS-$(CONFIG_CFHD_DECODER)            += cfhd.o cfhddata.o
  OBJS-$(CONFIG_CINEPAK_DECODER)         += cinepak.o
 +OBJS-$(CONFIG_CINEPAK_ENCODER)         += cinepakenc.o elbg.o
  OBJS-$(CONFIG_CLJR_DECODER)            += cljrdec.o
  OBJS-$(CONFIG_CLJR_ENCODER)            += cljrenc.o
  OBJS-$(CONFIG_CLLC_DECODER)            += cllc.o canopus.o
  OBJS-$(CONFIG_COOK_DECODER)            += cook.o
  OBJS-$(CONFIG_COMFORTNOISE_DECODER)    += cngdec.o celp_filters.o
  OBJS-$(CONFIG_COMFORTNOISE_ENCODER)    += cngenc.o
 +OBJS-$(CONFIG_CPIA_DECODER)            += cpia.o
  OBJS-$(CONFIG_CSCD_DECODER)            += cscd.o
  OBJS-$(CONFIG_CYUV_DECODER)            += cyuv.o
 -OBJS-$(CONFIG_DCA_DECODER)             += dcadec.o dca.o dcadsp.o      \
 -                                          dcadata.o dca_exss.o         \
 -                                          dca_xll.o synth_filter.o
 +OBJS-$(CONFIG_DCA_DECODER)             += dcadec.o dca.o dcadata.o        \
 +                                          dca_core.o dca_exss.o dca_xll.o \
 +                                          dcadsp.o dcadct.o synth_filter.o
 +OBJS-$(CONFIG_DCA_ENCODER)             += dcaenc.o dca.o dcadata.o
  OBJS-$(CONFIG_DDS_DECODER)             += dds.o
 +OBJS-$(CONFIG_DIRAC_DECODER)           += diracdec.o dirac.o diracdsp.o diractab.o \
 +                                          dirac_arith.o mpeg12data.o dirac_dwt.o
  OBJS-$(CONFIG_DFA_DECODER)             += dfa.o
  OBJS-$(CONFIG_DNXHD_DECODER)           += dnxhddec.o dnxhddata.o
  OBJS-$(CONFIG_DNXHD_ENCODER)           += dnxhdenc.o dnxhddata.o
  OBJS-$(CONFIG_DPX_DECODER)             += dpx.o
  OBJS-$(CONFIG_DPX_ENCODER)             += dpxenc.o
 +OBJS-$(CONFIG_DSD_LSBF_DECODER)        += dsddec.o
 +OBJS-$(CONFIG_DSD_MSBF_DECODER)        += dsddec.o
 +OBJS-$(CONFIG_DSD_LSBF_PLANAR_DECODER) += dsddec.o
 +OBJS-$(CONFIG_DSD_MSBF_PLANAR_DECODER) += dsddec.o
  OBJS-$(CONFIG_DSICINAUDIO_DECODER)     += dsicinaudio.o
  OBJS-$(CONFIG_DSICINVIDEO_DECODER)     += dsicinvideo.o
  OBJS-$(CONFIG_DSS_SP_DECODER)          += dss_sp.o
@@@ -253,13 -199,12 +253,13 @@@ OBJS-$(CONFIG_DVBSUB_DECODER)          
  OBJS-$(CONFIG_DVBSUB_ENCODER)          += dvbsub.o
  OBJS-$(CONFIG_DVDSUB_DECODER)          += dvdsubdec.o
  OBJS-$(CONFIG_DVDSUB_ENCODER)          += dvdsubenc.o
 +OBJS-$(CONFIG_DVAUDIO_DECODER)         += dvaudiodec.o
  OBJS-$(CONFIG_DVVIDEO_DECODER)         += dvdec.o dv.o dvdata.o
  OBJS-$(CONFIG_DVVIDEO_ENCODER)         += dvenc.o dv.o dvdata.o
  OBJS-$(CONFIG_DXA_DECODER)             += dxa.o
  OBJS-$(CONFIG_DXTORY_DECODER)          += dxtory.o
  OBJS-$(CONFIG_DXV_DECODER)             += dxv.o
 -OBJS-$(CONFIG_EAC3_DECODER)            += eac3dec.o eac3_data.o
 +OBJS-$(CONFIG_EAC3_DECODER)            += eac3_data.o
  OBJS-$(CONFIG_EAC3_ENCODER)            += eac3enc.o eac3_data.o
  OBJS-$(CONFIG_EACMV_DECODER)           += eacmv.o
  OBJS-$(CONFIG_EAMAD_DECODER)           += eamad.o eaidct.o mpeg12.o \
@@@ -272,17 -217,14 +272,17 @@@ OBJS-$(CONFIG_EIGHTSVX_EXP_DECODER)    
  OBJS-$(CONFIG_EIGHTSVX_FIB_DECODER)    += 8svx.o
  OBJS-$(CONFIG_ESCAPE124_DECODER)       += escape124.o
  OBJS-$(CONFIG_ESCAPE130_DECODER)       += escape130.o
 +OBJS-$(CONFIG_EVRC_DECODER)            += evrcdec.o acelp_vectors.o lsp.o
  OBJS-$(CONFIG_EXR_DECODER)             += exr.o
  OBJS-$(CONFIG_FFV1_DECODER)            += ffv1dec.o ffv1.o
  OBJS-$(CONFIG_FFV1_ENCODER)            += ffv1enc.o ffv1.o
 +OBJS-$(CONFIG_FFWAVESYNTH_DECODER)     += ffwavesynth.o
  OBJS-$(CONFIG_FIC_DECODER)             += fic.o
  OBJS-$(CONFIG_FLAC_DECODER)            += flacdec.o flacdata.o flac.o
 -OBJS-$(CONFIG_FLAC_ENCODER)            += flacenc.o flacdata.o flac.o
 +OBJS-$(CONFIG_FLAC_ENCODER)            += flacenc.o flacdata.o flac.o vorbis_data.o
  OBJS-$(CONFIG_FLASHSV_DECODER)         += flashsv.o
  OBJS-$(CONFIG_FLASHSV_ENCODER)         += flashsvenc.o
 +OBJS-$(CONFIG_FLASHSV2_ENCODER)        += flashsv2enc.o
  OBJS-$(CONFIG_FLASHSV2_DECODER)        += flashsv.o
  OBJS-$(CONFIG_FLIC_DECODER)            += flicvideo.o
  OBJS-$(CONFIG_FOURXM_DECODER)          += 4xm.o
@@@ -290,10 -232,11 +290,10 @@@ OBJS-$(CONFIG_FRAPS_DECODER)           
  OBJS-$(CONFIG_FRWU_DECODER)            += frwu.o
  OBJS-$(CONFIG_G2M_DECODER)             += g2meet.o elsdec.o
  OBJS-$(CONFIG_G723_1_DECODER)          += g723_1dec.o g723_1.o \
 -                                          acelp_vectors.o celp_filters.o \
 -                                          celp_math.o
 +                                          acelp_vectors.o celp_filters.o celp_math.o
  OBJS-$(CONFIG_G723_1_ENCODER)          += g723_1enc.o g723_1.o \
 -                                          acelp_vectors.o celp_filters.o \
 -                                          celp_math.o
 +                                          acelp_vectors.o celp_filters.o celp_math.o
 +OBJS-$(CONFIG_G729_DECODER)            += g729dec.o lsp.o celp_math.o acelp_filters.o acelp_pitch_delay.o acelp_vectors.o g729postfilter.o
  OBJS-$(CONFIG_GIF_DECODER)             += gifdec.o lzw.o
  OBJS-$(CONFIG_GIF_ENCODER)             += gif.o lzwenc.o
  OBJS-$(CONFIG_GSM_DECODER)             += gsmdec.o gsmdec_data.o msgsmdec.o
@@@ -308,10 -251,10 +308,10 @@@ OBJS-$(CONFIG_H263_ENCODER)            
  OBJS-$(CONFIG_H264_DECODER)            += h264.o h264_cabac.o h264_cavlc.o \
                                            h264_direct.o h264_loopfilter.o  \
                                            h264_mb.o h264_picture.o h264_ps.o \
-                                           h264_refs.o h264_sei.o h264_slice.o h264data.o
 -                                          h264_refs.o h264_sei.o \
 -                                          h264_slice.o h264data.o h264_parse.o
++                                          h264_refs.o h264_sei.o h264_slice.o h264data.o h264_parse.o
 +OBJS-$(CONFIG_H264_MEDIACODEC_DECODER) += mediacodecdec_h264.o
  OBJS-$(CONFIG_H264_MMAL_DECODER)       += mmaldec.o
 -OBJS-$(CONFIG_H264_NVENC_ENCODER)      += nvenc_h264.o
 +OBJS-$(CONFIG_H264_VDA_DECODER)        += vda_h264_dec.o
  OBJS-$(CONFIG_H264_QSV_DECODER)        += qsvdec_h2645.o
  OBJS-$(CONFIG_H264_QSV_ENCODER)        += qsvenc_h264.o
  OBJS-$(CONFIG_HAP_DECODER)             += hapdec.o hap.o
@@@ -319,6 -262,7 +319,6 @@@ OBJS-$(CONFIG_HAP_ENCODER)             
  OBJS-$(CONFIG_HEVC_DECODER)            += hevc.o hevc_mvs.o hevc_ps.o hevc_sei.o \
                                            hevc_cabac.o hevc_refs.o hevcpred.o    \
                                            hevcdsp.o hevc_filter.o h2645_parse.o hevc_data.o
 -OBJS-$(CONFIG_HEVC_NVENC_ENCODER)      += nvenc_hevc.o
  OBJS-$(CONFIG_HEVC_QSV_DECODER)        += qsvdec_h2645.o
  OBJS-$(CONFIG_HEVC_QSV_ENCODER)        += qsvenc_hevc.o hevc_ps_enc.o h2645_parse.o
  OBJS-$(CONFIG_HNM4_VIDEO_DECODER)      += hnm4video.o
@@@ -328,19 -272,15 +328,19 @@@ OBJS-$(CONFIG_HQX_DECODER)             
  OBJS-$(CONFIG_HUFFYUV_DECODER)         += huffyuv.o huffyuvdec.o
  OBJS-$(CONFIG_HUFFYUV_ENCODER)         += huffyuv.o huffyuvenc.o
  OBJS-$(CONFIG_IDCIN_DECODER)           += idcinvideo.o
 -OBJS-$(CONFIG_IFF_BYTERUN1_DECODER)    += iff.o
 +OBJS-$(CONFIG_IDF_DECODER)             += bintext.o cga_data.o
  OBJS-$(CONFIG_IFF_ILBM_DECODER)        += iff.o
  OBJS-$(CONFIG_IMC_DECODER)             += imc.o
  OBJS-$(CONFIG_INDEO2_DECODER)          += indeo2.o
  OBJS-$(CONFIG_INDEO3_DECODER)          += indeo3.o
  OBJS-$(CONFIG_INDEO4_DECODER)          += indeo4.o ivi.o
  OBJS-$(CONFIG_INDEO5_DECODER)          += indeo5.o ivi.o
 +OBJS-$(CONFIG_INTERPLAY_ACM_DECODER)   += interplayacm.o
  OBJS-$(CONFIG_INTERPLAY_DPCM_DECODER)  += dpcm.o
  OBJS-$(CONFIG_INTERPLAY_VIDEO_DECODER) += interplayvideo.o
 +OBJS-$(CONFIG_JACOSUB_DECODER)         += jacosubdec.o ass.o
 +OBJS-$(CONFIG_JPEG2000_ENCODER)        += j2kenc.o mqcenc.o mqc.o jpeg2000.o \
 +                                          jpeg2000dwt.o
  OBJS-$(CONFIG_JPEG2000_DECODER)        += jpeg2000dec.o jpeg2000.o jpeg2000dsp.o \
                                            jpeg2000dwt.o mqcdec.o mqc.o
  OBJS-$(CONFIG_JPEGLS_DECODER)          += jpeglsdec.o jpegls.o
@@@ -356,7 -296,6 +356,7 @@@ OBJS-$(CONFIG_MACE6_DECODER)           
  OBJS-$(CONFIG_MDEC_DECODER)            += mdec.o mpeg12.o mpeg12data.o
  OBJS-$(CONFIG_METASOUND_DECODER)       += metasound.o metasound_data.o \
                                            twinvq.o
 +OBJS-$(CONFIG_MICRODVD_DECODER)        += microdvddec.o ass.o
  OBJS-$(CONFIG_MIMIC_DECODER)           += mimic.o
  OBJS-$(CONFIG_MJPEG_DECODER)           += mjpegdec.o
  OBJS-$(CONFIG_MJPEG_ENCODER)           += mjpegenc.o mjpegenc_common.o
@@@ -364,14 -303,10 +364,14 @@@ OBJS-$(CONFIG_MJPEGB_DECODER)          
  OBJS-$(CONFIG_MLP_DECODER)             += mlpdec.o mlpdsp.o
  OBJS-$(CONFIG_MMVIDEO_DECODER)         += mmvideo.o
  OBJS-$(CONFIG_MOTIONPIXELS_DECODER)    += motionpixels.o
 +OBJS-$(CONFIG_MOVTEXT_DECODER)         += movtextdec.o ass.o
 +OBJS-$(CONFIG_MOVTEXT_ENCODER)         += movtextenc.o ass_split.o
  OBJS-$(CONFIG_MP1_DECODER)             += mpegaudiodec_fixed.o
  OBJS-$(CONFIG_MP1FLOAT_DECODER)        += mpegaudiodec_float.o
  OBJS-$(CONFIG_MP2_DECODER)             += mpegaudiodec_fixed.o
 -OBJS-$(CONFIG_MP2_ENCODER)             += mpegaudioenc.o mpegaudio.o \
 +OBJS-$(CONFIG_MP2_ENCODER)             += mpegaudioenc_float.o mpegaudio.o \
 +                                          mpegaudiodata.o mpegaudiodsp_data.o
 +OBJS-$(CONFIG_MP2FIXED_ENCODER)        += mpegaudioenc_fixed.o mpegaudio.o \
                                            mpegaudiodata.o mpegaudiodsp_data.o
  OBJS-$(CONFIG_MP2FLOAT_DECODER)        += mpegaudiodec_float.o
  OBJS-$(CONFIG_MP3_DECODER)             += mpegaudiodec_fixed.o
@@@ -382,16 -317,14 +382,16 @@@ OBJS-$(CONFIG_MP3ON4_DECODER)          
  OBJS-$(CONFIG_MP3ON4FLOAT_DECODER)     += mpegaudiodec_float.o mpeg4audio.o
  OBJS-$(CONFIG_MPC7_DECODER)            += mpc7.o mpc.o
  OBJS-$(CONFIG_MPC8_DECODER)            += mpc8.o mpc.o
 -OBJS-$(CONFIG_MPEG_XVMC_DECODER)       += mpegvideo_xvmc.o
 +OBJS-$(CONFIG_MPEGVIDEO_DECODER)       += mpeg12dec.o mpeg12.o mpeg12data.o
  OBJS-$(CONFIG_MPEG1VIDEO_DECODER)      += mpeg12dec.o mpeg12.o mpeg12data.o
  OBJS-$(CONFIG_MPEG1VIDEO_ENCODER)      += mpeg12enc.o mpeg12.o
  OBJS-$(CONFIG_MPEG2VIDEO_DECODER)      += mpeg12dec.o mpeg12.o mpeg12data.o
  OBJS-$(CONFIG_MPEG2VIDEO_ENCODER)      += mpeg12enc.o mpeg12.o
 +OBJS-$(CONFIG_MPEG2_MMAL_DECODER)      += mmaldec.o
  OBJS-$(CONFIG_MPEG2_QSV_DECODER)       += qsvdec_mpeg2.o
  OBJS-$(CONFIG_MPEG2_QSV_ENCODER)       += qsvenc_mpeg2.o
  OBJS-$(CONFIG_MPEG4_DECODER)           += xvididct.o
 +OBJS-$(CONFIG_MPL2_DECODER)            += mpl2dec.o ass.o
  OBJS-$(CONFIG_MSMPEG4V1_DECODER)       += msmpeg4dec.o msmpeg4.o msmpeg4data.o
  OBJS-$(CONFIG_MSMPEG4V2_DECODER)       += msmpeg4dec.o msmpeg4.o msmpeg4data.o
  OBJS-$(CONFIG_MSMPEG4V2_ENCODER)       += msmpeg4enc.o msmpeg4.o msmpeg4data.o
@@@ -402,7 -335,6 +402,7 @@@ OBJS-$(CONFIG_MSA1_DECODER)            
  OBJS-$(CONFIG_MSS1_DECODER)            += mss1.o mss12.o
  OBJS-$(CONFIG_MSS2_DECODER)            += mss2.o mss12.o mss2dsp.o wmv2data.o
  OBJS-$(CONFIG_MSVIDEO1_DECODER)        += msvideo1.o
 +OBJS-$(CONFIG_MSVIDEO1_ENCODER)        += msvideo1enc.o elbg.o
  OBJS-$(CONFIG_MSZH_DECODER)            += lcldec.o
  OBJS-$(CONFIG_MTS2_DECODER)            += mss4.o
  OBJS-$(CONFIG_MVC1_DECODER)            += mvcdec.o
@@@ -428,16 -360,12 +428,16 @@@ OBJS-$(CONFIG_PGMYUV_DECODER)          
  OBJS-$(CONFIG_PGMYUV_ENCODER)          += pnmenc.o
  OBJS-$(CONFIG_PGSSUB_DECODER)          += pgssubdec.o
  OBJS-$(CONFIG_PICTOR_DECODER)          += pictordec.o cga_data.o
 +OBJS-$(CONFIG_PJS_DECODER)             += textdec.o ass.o
  OBJS-$(CONFIG_PNG_DECODER)             += png.o pngdec.o pngdsp.o
  OBJS-$(CONFIG_PNG_ENCODER)             += png.o pngenc.o
  OBJS-$(CONFIG_PPM_DECODER)             += pnmdec.o pnm.o
  OBJS-$(CONFIG_PPM_ENCODER)             += pnmenc.o
 -OBJS-$(CONFIG_PRORES_DECODER)          += proresdec.o proresdata.o proresdsp.o
 -OBJS-$(CONFIG_PRORES_ENCODER)          += proresenc.o proresdata.o
 +OBJS-$(CONFIG_PRORES_DECODER)          += proresdec2.o proresdsp.o proresdata.o
 +OBJS-$(CONFIG_PRORES_LGPL_DECODER)     += proresdec_lgpl.o proresdsp.o proresdata.o
 +OBJS-$(CONFIG_PRORES_ENCODER)          += proresenc_anatoliy.o
 +OBJS-$(CONFIG_PRORES_AW_ENCODER)       += proresenc_anatoliy.o
 +OBJS-$(CONFIG_PRORES_KS_ENCODER)       += proresenc_kostya.o proresdata.o
  OBJS-$(CONFIG_PTX_DECODER)             += ptx.o
  OBJS-$(CONFIG_QCELP_DECODER)           += qcelpdec.o                     \
                                            celp_filters.o acelp_vectors.o \
@@@ -448,16 -376,13 +448,16 @@@ OBJS-$(CONFIG_QPEG_DECODER)            
  OBJS-$(CONFIG_QTRLE_DECODER)           += qtrle.o
  OBJS-$(CONFIG_QTRLE_ENCODER)           += qtrleenc.o
  OBJS-$(CONFIG_R10K_DECODER)            += r210dec.o
 +OBJS-$(CONFIG_R10K_ENCODER)            += r210enc.o
  OBJS-$(CONFIG_R210_DECODER)            += r210dec.o
 +OBJS-$(CONFIG_R210_ENCODER)            += r210enc.o
  OBJS-$(CONFIG_RA_144_DECODER)          += ra144dec.o ra144.o celp_filters.o
  OBJS-$(CONFIG_RA_144_ENCODER)          += ra144enc.o ra144.o celp_filters.o
  OBJS-$(CONFIG_RA_288_DECODER)          += ra288.o celp_filters.o
  OBJS-$(CONFIG_RALF_DECODER)            += ralf.o
  OBJS-$(CONFIG_RAWVIDEO_DECODER)        += rawdec.o
  OBJS-$(CONFIG_RAWVIDEO_ENCODER)        += rawenc.o
 +OBJS-$(CONFIG_REALTEXT_DECODER)        += realtextdec.o ass.o
  OBJS-$(CONFIG_RL2_DECODER)             += rl2.o
  OBJS-$(CONFIG_ROQ_DECODER)             += roqvideodec.o roqvideo.o
  OBJS-$(CONFIG_ROQ_ENCODER)             += roqvideoenc.o roqvideo.o elbg.o
@@@ -471,12 -396,9 +471,12 @@@ OBJS-$(CONFIG_RV20_DECODER)            
  OBJS-$(CONFIG_RV20_ENCODER)            += rv20enc.o
  OBJS-$(CONFIG_RV30_DECODER)            += rv30.o rv34.o rv30dsp.o
  OBJS-$(CONFIG_RV40_DECODER)            += rv40.o rv34.o rv40dsp.o
 +OBJS-$(CONFIG_SAMI_DECODER)            += samidec.o ass.o htmlsubtitles.o
  OBJS-$(CONFIG_S302M_DECODER)           += s302m.o
 +OBJS-$(CONFIG_S302M_ENCODER)           += s302menc.o
  OBJS-$(CONFIG_SANM_DECODER)            += sanm.o
  OBJS-$(CONFIG_SCREENPRESSO_DECODER)    += screenpresso.o
 +OBJS-$(CONFIG_SDX2_DPCM_DECODER)       += dpcm.o
  OBJS-$(CONFIG_SGI_DECODER)             += sgidec.o
  OBJS-$(CONFIG_SGI_ENCODER)             += sgienc.o rle.o
  OBJS-$(CONFIG_SGIRLE_DECODER)          += sgirledec.o
@@@ -488,48 -410,29 +488,48 @@@ OBJS-$(CONFIG_SIPR_DECODER)            
  OBJS-$(CONFIG_SMACKAUD_DECODER)        += smacker.o
  OBJS-$(CONFIG_SMACKER_DECODER)         += smacker.o
  OBJS-$(CONFIG_SMC_DECODER)             += smc.o
 +OBJS-$(CONFIG_SMVJPEG_DECODER)         += smvjpegdec.o
 +OBJS-$(CONFIG_SNOW_DECODER)            += snowdec.o snow.o snow_dwt.o
 +OBJS-$(CONFIG_SNOW_ENCODER)            += snowenc.o snow.o snow_dwt.o             \
 +                                          h263.o ituh263enc.o
  OBJS-$(CONFIG_SOL_DPCM_DECODER)        += dpcm.o
 +OBJS-$(CONFIG_SONIC_DECODER)           += sonic.o
 +OBJS-$(CONFIG_SONIC_ENCODER)           += sonic.o
 +OBJS-$(CONFIG_SONIC_LS_ENCODER)        += sonic.o
  OBJS-$(CONFIG_SP5X_DECODER)            += sp5xdec.o
 -OBJS-$(CONFIG_SRT_DECODER)             += srtdec.o ass.o
 +OBJS-$(CONFIG_SRT_DECODER)             += srtdec.o ass.o htmlsubtitles.o
 +OBJS-$(CONFIG_SRT_ENCODER)             += srtenc.o ass_split.o
 +OBJS-$(CONFIG_STL_DECODER)             += textdec.o ass.o
 +OBJS-$(CONFIG_SUBRIP_DECODER)          += srtdec.o ass.o htmlsubtitles.o
 +OBJS-$(CONFIG_SUBRIP_ENCODER)          += srtenc.o ass_split.o
 +OBJS-$(CONFIG_SUBVIEWER1_DECODER)      += textdec.o ass.o
 +OBJS-$(CONFIG_SUBVIEWER_DECODER)       += subviewerdec.o ass.o
  OBJS-$(CONFIG_SUNRAST_DECODER)         += sunrast.o
  OBJS-$(CONFIG_SUNRAST_ENCODER)         += sunrastenc.o
  OBJS-$(CONFIG_SVQ1_DECODER)            += svq1dec.o svq1.o svq13.o h263data.o
  OBJS-$(CONFIG_SVQ1_ENCODER)            += svq1enc.o svq1.o  h263data.o  \
                                            h263.o ituh263enc.o
  OBJS-$(CONFIG_SVQ3_DECODER)            += svq3.o svq13.o mpegutils.o
 -OBJS-$(CONFIG_TAK_DECODER)             += takdec.o tak.o
 +OBJS-$(CONFIG_TEXT_DECODER)            += textdec.o ass.o
 +OBJS-$(CONFIG_TEXT_ENCODER)            += srtenc.o ass_split.o
 +OBJS-$(CONFIG_TAK_DECODER)             += takdec.o tak.o takdsp.o
  OBJS-$(CONFIG_TARGA_DECODER)           += targa.o
  OBJS-$(CONFIG_TARGA_ENCODER)           += targaenc.o rle.o
 +OBJS-$(CONFIG_TARGA_Y216_DECODER)      += targa_y216dec.o
  OBJS-$(CONFIG_TDSC_DECODER)            += tdsc.o
  OBJS-$(CONFIG_TIERTEXSEQVIDEO_DECODER) += tiertexseqv.o
 -OBJS-$(CONFIG_TIFF_DECODER)            += tiff.o lzw.o faxcompr.o
 -OBJS-$(CONFIG_TIFF_ENCODER)            += tiffenc.o rle.o lzwenc.o
 +OBJS-$(CONFIG_TIFF_DECODER)            += tiff.o lzw.o faxcompr.o tiff_data.o tiff_common.o
 +OBJS-$(CONFIG_TIFF_ENCODER)            += tiffenc.o rle.o lzwenc.o tiff_data.o
  OBJS-$(CONFIG_TMV_DECODER)             += tmv.o cga_data.o
 +OBJS-$(CONFIG_TRUEHD_DECODER)          += mlpdec.o mlpdsp.o
  OBJS-$(CONFIG_TRUEMOTION1_DECODER)     += truemotion1.o
  OBJS-$(CONFIG_TRUEMOTION2_DECODER)     += truemotion2.o
 +OBJS-$(CONFIG_TRUEMOTION2RT_DECODER)   += truemotion2rt.o
  OBJS-$(CONFIG_TRUESPEECH_DECODER)      += truespeech.o
  OBJS-$(CONFIG_TSCC_DECODER)            += tscc.o msrledec.o
  OBJS-$(CONFIG_TSCC2_DECODER)           += tscc2.o
 -OBJS-$(CONFIG_TTA_DECODER)             += tta.o
 +OBJS-$(CONFIG_TTA_DECODER)             += tta.o ttadata.o ttadsp.o
 +OBJS-$(CONFIG_TTA_ENCODER)             += ttaenc.o ttadata.o
  OBJS-$(CONFIG_TWINVQ_DECODER)          += twinvqdec.o twinvq.o
  OBJS-$(CONFIG_TXD_DECODER)             += txd.o
  OBJS-$(CONFIG_ULTI_DECODER)            += ulti.o
@@@ -537,10 -440,6 +537,10 @@@ OBJS-$(CONFIG_UTVIDEO_DECODER)         
  OBJS-$(CONFIG_UTVIDEO_ENCODER)         += utvideoenc.o utvideo.o
  OBJS-$(CONFIG_V210_DECODER)            += v210dec.o
  OBJS-$(CONFIG_V210_ENCODER)            += v210enc.o
 +OBJS-$(CONFIG_V308_DECODER)            += v308dec.o
 +OBJS-$(CONFIG_V308_ENCODER)            += v308enc.o
 +OBJS-$(CONFIG_V408_DECODER)            += v408dec.o
 +OBJS-$(CONFIG_V408_ENCODER)            += v408enc.o
  OBJS-$(CONFIG_V410_DECODER)            += v410dec.o
  OBJS-$(CONFIG_V410_ENCODER)            += v410enc.o
  OBJS-$(CONFIG_V210X_DECODER)           += v210x.o
@@@ -549,10 -448,7 +549,10 @@@ OBJS-$(CONFIG_VBLE_DECODER)            
  OBJS-$(CONFIG_VC1_DECODER)             += vc1dec.o vc1_block.o vc1_loopfilter.o \
                                            vc1_mc.o vc1_pred.o vc1.o vc1data.o \
                                            msmpeg4dec.o msmpeg4.o msmpeg4data.o \
 -                                          wmv2data.o
 +                                          wmv2dsp.o wmv2data.o
 +OBJS-$(CONFIG_VC1_MMAL_DECODER)        += mmaldec.o
 +OBJS-$(CONFIG_VC1_QSV_DECODER)         += qsvdec_vc1.o
 +OBJS-$(CONFIG_VC2_ENCODER)             += vc2enc.o vc2enc_dwt.o diractab.o
  OBJS-$(CONFIG_VCR1_DECODER)            += vcr1.o
  OBJS-$(CONFIG_VMDAUDIO_DECODER)        += vmdaudio.o
  OBJS-$(CONFIG_VMDVIDEO_DECODER)        += vmdvideo.o
@@@ -567,15 -463,11 +567,15 @@@ OBJS-$(CONFIG_VP6_DECODER)             
                                            vp6dsp.o vp56rac.o
  OBJS-$(CONFIG_VP7_DECODER)             += vp8.o vp56rac.o
  OBJS-$(CONFIG_VP8_DECODER)             += vp8.o vp56rac.o
 -OBJS-$(CONFIG_VP9_DECODER)             += vp9.o vp9data.o vp9dsp.o \
 -                                          vp9block.o vp9prob.o vp9mvs.o vp56rac.o
 +OBJS-$(CONFIG_VP9_DECODER)             += vp9.o vp9dsp.o vp56rac.o vp9dsp_8bpp.o \
 +                                          vp9dsp_10bpp.o vp9dsp_12bpp.o
 +OBJS-$(CONFIG_VPLAYER_DECODER)         += textdec.o ass.o
  OBJS-$(CONFIG_VQA_DECODER)             += vqavideo.o
  OBJS-$(CONFIG_WAVPACK_DECODER)         += wavpack.o
 +OBJS-$(CONFIG_WAVPACK_ENCODER)         += wavpackenc.o
  OBJS-$(CONFIG_WEBP_DECODER)            += webp.o
 +OBJS-$(CONFIG_WEBVTT_DECODER)          += webvttdec.o ass.o
 +OBJS-$(CONFIG_WEBVTT_ENCODER)          += webvttenc.o ass_split.o
  OBJS-$(CONFIG_WMALOSSLESS_DECODER)     += wmalosslessdec.o wma_common.o
  OBJS-$(CONFIG_WMAPRO_DECODER)          += wmaprodec.o wma.o wma_common.o
  OBJS-$(CONFIG_WMAV1_DECODER)           += wmadec.o wma.o wma_common.o aactab.o
@@@ -586,7 -478,6 +586,7 @@@ OBJS-$(CONFIG_WMAVOICE_DECODER)        
                                            celp_filters.o \
                                            acelp_vectors.o acelp_filters.o
  OBJS-$(CONFIG_WMV1_DECODER)            += msmpeg4dec.o msmpeg4.o msmpeg4data.o
 +OBJS-$(CONFIG_WMV1_ENCODER)            += msmpeg4enc.o
  OBJS-$(CONFIG_WMV2_DECODER)            += wmv2dec.o wmv2.o wmv2data.o \
                                            msmpeg4dec.o msmpeg4.o msmpeg4data.o
  OBJS-$(CONFIG_WMV2_ENCODER)            += wmv2enc.o wmv2.o wmv2data.o \
@@@ -597,23 -488,14 +597,23 @@@ OBJS-$(CONFIG_WRAPPED_AVFRAME_ENCODER) 
  OBJS-$(CONFIG_XAN_DPCM_DECODER)        += dpcm.o
  OBJS-$(CONFIG_XAN_WC3_DECODER)         += xan.o
  OBJS-$(CONFIG_XAN_WC4_DECODER)         += xxan.o
 +OBJS-$(CONFIG_XBIN_DECODER)            += bintext.o cga_data.o
  OBJS-$(CONFIG_XBM_DECODER)             += xbmdec.o
  OBJS-$(CONFIG_XBM_ENCODER)             += xbmenc.o
 +OBJS-$(CONFIG_XFACE_DECODER)           += xfacedec.o xface.o
 +OBJS-$(CONFIG_XFACE_ENCODER)           += xfaceenc.o xface.o
  OBJS-$(CONFIG_XL_DECODER)              += xl.o
 +OBJS-$(CONFIG_XMA1_DECODER)            += wmaprodec.o wma.o wma_common.o
 +OBJS-$(CONFIG_XMA2_DECODER)            += wmaprodec.o wma.o wma_common.o
  OBJS-$(CONFIG_XSUB_DECODER)            += xsubdec.o
  OBJS-$(CONFIG_XSUB_ENCODER)            += xsubenc.o
  OBJS-$(CONFIG_XWD_DECODER)             += xwddec.o
  OBJS-$(CONFIG_XWD_ENCODER)             += xwdenc.o
 +OBJS-$(CONFIG_Y41P_DECODER)            += y41pdec.o
 +OBJS-$(CONFIG_Y41P_ENCODER)            += y41penc.o
  OBJS-$(CONFIG_YOP_DECODER)             += yop.o
 +OBJS-$(CONFIG_YUV4_DECODER)            += yuv4dec.o
 +OBJS-$(CONFIG_YUV4_ENCODER)            += yuv4enc.o
  OBJS-$(CONFIG_ZEROCODEC_DECODER)       += zerocodec.o
  OBJS-$(CONFIG_ZLIB_DECODER)            += lcldec.o
  OBJS-$(CONFIG_ZLIB_ENCODER)            += lclenc.o
@@@ -638,16 -520,13 +638,16 @@@ OBJS-$(CONFIG_PCM_MULAW_DECODER
  OBJS-$(CONFIG_PCM_MULAW_ENCODER)          += pcm.o
  OBJS-$(CONFIG_PCM_S8_DECODER)             += pcm.o
  OBJS-$(CONFIG_PCM_S8_ENCODER)             += pcm.o
 -OBJS-$(CONFIG_PCM_S8_PLANAR_DECODER)      += 8svx.o
 +OBJS-$(CONFIG_PCM_S8_PLANAR_DECODER)      += pcm.o
 +OBJS-$(CONFIG_PCM_S8_PLANAR_ENCODER)      += pcm.o
  OBJS-$(CONFIG_PCM_S16BE_DECODER)          += pcm.o
  OBJS-$(CONFIG_PCM_S16BE_ENCODER)          += pcm.o
  OBJS-$(CONFIG_PCM_S16BE_PLANAR_DECODER)   += pcm.o
 +OBJS-$(CONFIG_PCM_S16BE_PLANAR_ENCODER)   += pcm.o
  OBJS-$(CONFIG_PCM_S16LE_DECODER)          += pcm.o
  OBJS-$(CONFIG_PCM_S16LE_ENCODER)          += pcm.o
  OBJS-$(CONFIG_PCM_S16LE_PLANAR_DECODER)   += pcm.o
 +OBJS-$(CONFIG_PCM_S16LE_PLANAR_ENCODER)   += pcm.o
  OBJS-$(CONFIG_PCM_S24BE_DECODER)          += pcm.o
  OBJS-$(CONFIG_PCM_S24BE_ENCODER)          += pcm.o
  OBJS-$(CONFIG_PCM_S24DAUD_DECODER)        += pcm.o
@@@ -655,13 -534,11 +655,13 @@@ OBJS-$(CONFIG_PCM_S24DAUD_ENCODER
  OBJS-$(CONFIG_PCM_S24LE_DECODER)          += pcm.o
  OBJS-$(CONFIG_PCM_S24LE_ENCODER)          += pcm.o
  OBJS-$(CONFIG_PCM_S24LE_PLANAR_DECODER)   += pcm.o
 +OBJS-$(CONFIG_PCM_S24LE_PLANAR_ENCODER)   += pcm.o
  OBJS-$(CONFIG_PCM_S32BE_DECODER)          += pcm.o
  OBJS-$(CONFIG_PCM_S32BE_ENCODER)          += pcm.o
  OBJS-$(CONFIG_PCM_S32LE_DECODER)          += pcm.o
  OBJS-$(CONFIG_PCM_S32LE_ENCODER)          += pcm.o
  OBJS-$(CONFIG_PCM_S32LE_PLANAR_DECODER)   += pcm.o
 +OBJS-$(CONFIG_PCM_S32LE_PLANAR_ENCODER)   += pcm.o
  OBJS-$(CONFIG_PCM_U8_DECODER)             += pcm.o
  OBJS-$(CONFIG_PCM_U8_ENCODER)             += pcm.o
  OBJS-$(CONFIG_PCM_U16BE_DECODER)          += pcm.o
@@@ -681,10 -558,7 +681,10 @@@ OBJS-$(CONFIG_PCM_ZORK_DECODER
  OBJS-$(CONFIG_ADPCM_4XM_DECODER)          += adpcm.o adpcm_data.o
  OBJS-$(CONFIG_ADPCM_ADX_DECODER)          += adxdec.o adx.o
  OBJS-$(CONFIG_ADPCM_ADX_ENCODER)          += adxenc.o adx.o
 +OBJS-$(CONFIG_ADPCM_AFC_DECODER)          += adpcm.o adpcm_data.o
 +OBJS-$(CONFIG_ADPCM_AICA_DECODER)         += adpcm.o adpcm_data.o
  OBJS-$(CONFIG_ADPCM_CT_DECODER)           += adpcm.o adpcm_data.o
 +OBJS-$(CONFIG_ADPCM_DTK_DECODER)          += adpcm.o adpcm_data.o
  OBJS-$(CONFIG_ADPCM_EA_DECODER)           += adpcm.o adpcm_data.o
  OBJS-$(CONFIG_ADPCM_EA_MAXIS_XA_DECODER)  += adpcm.o adpcm_data.o
  OBJS-$(CONFIG_ADPCM_EA_R1_DECODER)        += adpcm.o adpcm_data.o
@@@ -695,26 -569,21 +695,26 @@@ OBJS-$(CONFIG_ADPCM_G722_DECODER
  OBJS-$(CONFIG_ADPCM_G722_ENCODER)         += g722.o g722dsp.o g722enc.o
  OBJS-$(CONFIG_ADPCM_G726_DECODER)         += g726.o
  OBJS-$(CONFIG_ADPCM_G726_ENCODER)         += g726.o
 +OBJS-$(CONFIG_ADPCM_G726LE_DECODER)       += g726.o
  OBJS-$(CONFIG_ADPCM_IMA_AMV_DECODER)      += adpcm.o adpcm_data.o
  OBJS-$(CONFIG_ADPCM_IMA_APC_DECODER)      += adpcm.o adpcm_data.o
 +OBJS-$(CONFIG_ADPCM_IMA_DAT4_DECODER)     += adpcm.o adpcm_data.o
  OBJS-$(CONFIG_ADPCM_IMA_DK3_DECODER)      += adpcm.o adpcm_data.o
  OBJS-$(CONFIG_ADPCM_IMA_DK4_DECODER)      += adpcm.o adpcm_data.o
  OBJS-$(CONFIG_ADPCM_IMA_EA_EACS_DECODER)  += adpcm.o adpcm_data.o
  OBJS-$(CONFIG_ADPCM_IMA_EA_SEAD_DECODER)  += adpcm.o adpcm_data.o
  OBJS-$(CONFIG_ADPCM_IMA_ISS_DECODER)      += adpcm.o adpcm_data.o
 +OBJS-$(CONFIG_ADPCM_IMA_OKI_DECODER)      += adpcm.o adpcm_data.o
  OBJS-$(CONFIG_ADPCM_IMA_QT_DECODER)       += adpcm.o adpcm_data.o
  OBJS-$(CONFIG_ADPCM_IMA_QT_ENCODER)       += adpcmenc.o adpcm_data.o
 +OBJS-$(CONFIG_ADPCM_IMA_RAD_DECODER)      += adpcm.o adpcm_data.o
  OBJS-$(CONFIG_ADPCM_IMA_SMJPEG_DECODER)   += adpcm.o adpcm_data.o
  OBJS-$(CONFIG_ADPCM_IMA_WAV_DECODER)      += adpcm.o adpcm_data.o
  OBJS-$(CONFIG_ADPCM_IMA_WAV_ENCODER)      += adpcmenc.o adpcm_data.o
  OBJS-$(CONFIG_ADPCM_IMA_WS_DECODER)       += adpcm.o adpcm_data.o
  OBJS-$(CONFIG_ADPCM_MS_DECODER)           += adpcm.o adpcm_data.o
  OBJS-$(CONFIG_ADPCM_MS_ENCODER)           += adpcmenc.o adpcm_data.o
 +OBJS-$(CONFIG_ADPCM_PSX_DECODER)          += adpcm.o adpcm_data.o
  OBJS-$(CONFIG_ADPCM_SBPRO_2_DECODER)      += adpcm.o adpcm_data.o
  OBJS-$(CONFIG_ADPCM_SBPRO_3_DECODER)      += adpcm.o adpcm_data.o
  OBJS-$(CONFIG_ADPCM_SBPRO_4_DECODER)      += adpcm.o adpcm_data.o
@@@ -730,102 -599,60 +730,102 @@@ OBJS-$(CONFIG_ADPCM_YAMAHA_ENCODER
  OBJS-$(CONFIG_D3D11VA)                    += dxva2.o
  OBJS-$(CONFIG_DXVA2)                      += dxva2.o
  OBJS-$(CONFIG_VAAPI)                      += vaapi.o
 -OBJS-$(CONFIG_VDA)                        += vda.o
 +OBJS-$(CONFIG_VDA)                        += vda.o videotoolbox.o
 +OBJS-$(CONFIG_VIDEOTOOLBOX)               += videotoolbox.o
  OBJS-$(CONFIG_VDPAU)                      += vdpau.o
  
  OBJS-$(CONFIG_H263_VAAPI_HWACCEL)         += vaapi_mpeg4.o
 +OBJS-$(CONFIG_H263_VIDEOTOOLBOX_HWACCEL)  += videotoolbox.o
  OBJS-$(CONFIG_H264_D3D11VA_HWACCEL)       += dxva2_h264.o
  OBJS-$(CONFIG_H264_DXVA2_HWACCEL)         += dxva2_h264.o
  OBJS-$(CONFIG_H264_VAAPI_HWACCEL)         += vaapi_h264.o
  OBJS-$(CONFIG_H264_VDA_HWACCEL)           += vda_h264.o
  OBJS-$(CONFIG_H264_VDPAU_HWACCEL)         += vdpau_h264.o
 +OBJS-$(CONFIG_H264_VIDEOTOOLBOX_HWACCEL)  += videotoolbox.o
  OBJS-$(CONFIG_HEVC_D3D11VA_HWACCEL)       += dxva2_hevc.o
  OBJS-$(CONFIG_HEVC_DXVA2_HWACCEL)         += dxva2_hevc.o
 +OBJS-$(CONFIG_HEVC_VAAPI_HWACCEL)         += vaapi_hevc.o
  OBJS-$(CONFIG_HEVC_VDPAU_HWACCEL)         += vdpau_hevc.o
  OBJS-$(CONFIG_MPEG1_VDPAU_HWACCEL)        += vdpau_mpeg12.o
 +OBJS-$(CONFIG_MPEG1_VIDEOTOOLBOX_HWACCEL) += videotoolbox.o
 +OBJS-$(CONFIG_MPEG1_XVMC_HWACCEL)         += mpegvideo_xvmc.o
  OBJS-$(CONFIG_MPEG2_D3D11VA_HWACCEL)      += dxva2_mpeg2.o
  OBJS-$(CONFIG_MPEG2_DXVA2_HWACCEL)        += dxva2_mpeg2.o
  OBJS-$(CONFIG_MPEG2_VAAPI_HWACCEL)        += vaapi_mpeg2.o
  OBJS-$(CONFIG_MPEG2_VDPAU_HWACCEL)        += vdpau_mpeg12.o
 +OBJS-$(CONFIG_MPEG2_VIDEOTOOLBOX_HWACCEL) += videotoolbox.o
 +OBJS-$(CONFIG_MPEG2_XVMC_HWACCEL)         += mpegvideo_xvmc.o
  OBJS-$(CONFIG_MPEG4_VAAPI_HWACCEL)        += vaapi_mpeg4.o
  OBJS-$(CONFIG_MPEG4_VDPAU_HWACCEL)        += vdpau_mpeg4.o
 +OBJS-$(CONFIG_MPEG4_VIDEOTOOLBOX_HWACCEL) += videotoolbox.o
  OBJS-$(CONFIG_VC1_D3D11VA_HWACCEL)        += dxva2_vc1.o
  OBJS-$(CONFIG_VC1_DXVA2_HWACCEL)          += dxva2_vc1.o
  OBJS-$(CONFIG_VC1_VAAPI_HWACCEL)          += vaapi_vc1.o
  OBJS-$(CONFIG_VC1_VDPAU_HWACCEL)          += vdpau_vc1.o
 +OBJS-$(CONFIG_VP9_D3D11VA_HWACCEL)        += dxva2_vp9.o
 +OBJS-$(CONFIG_VP9_DXVA2_HWACCEL)          += dxva2_vp9.o
 +OBJS-$(CONFIG_VP9_VAAPI_HWACCEL)          += vaapi_vp9.o
  
  # libavformat dependencies
  OBJS-$(CONFIG_ISO_MEDIA)               += mpeg4audio.o mpegaudiodata.o
  
  OBJS-$(CONFIG_ADTS_MUXER)              += mpeg4audio.o
  OBJS-$(CONFIG_CAF_DEMUXER)             += ac3tab.o
 -OBJS-$(CONFIG_FLAC_MUXER)              += flac.o flacdata.o
 +OBJS-$(CONFIG_DNXHD_DEMUXER)           += dnxhddata.o
 +OBJS-$(CONFIG_FLAC_DEMUXER)            += flac.o flacdata.o vorbis_data.o
 +OBJS-$(CONFIG_FLAC_MUXER)              += flac.o flacdata.o vorbis_data.o
  OBJS-$(CONFIG_FLV_DEMUXER)             += mpeg4audio.o
  OBJS-$(CONFIG_GXF_DEMUXER)             += mpeg12data.o
  OBJS-$(CONFIG_IFF_DEMUXER)             += iff.o
  OBJS-$(CONFIG_LATM_MUXER)              += mpeg4audio.o
 -OBJS-$(CONFIG_MATROSKA_AUDIO_MUXER)    += mpeg4audio.o                  \
 +OBJS-$(CONFIG_MATROSKA_AUDIO_MUXER)    += mpeg4audio.o vorbis_data.o    \
                                            flac.o flacdata.o
 -OBJS-$(CONFIG_MATROSKA_MUXER)          += flac.o flacdata.o
 +OBJS-$(CONFIG_MATROSKA_MUXER)          += flac.o flacdata.o vorbis_data.o
  OBJS-$(CONFIG_MP2_MUXER)               += mpegaudiodata.o mpegaudiodecheader.o
  OBJS-$(CONFIG_MP3_MUXER)               += mpegaudiodata.o mpegaudiodecheader.o
  OBJS-$(CONFIG_MOV_DEMUXER)             += ac3tab.o
  OBJS-$(CONFIG_MPEGTS_MUXER)            += mpeg4audio.o
 +OBJS-$(CONFIG_MXF_MUXER)               += dnxhddata.o
  OBJS-$(CONFIG_NUT_MUXER)               += mpegaudiodata.o
 +OBJS-$(CONFIG_NUT_DEMUXER)             += mpegaudiodata.o mpeg4audio.o
 +OBJS-$(CONFIG_OGA_MUXER)               += flac.o flacdata.o
  OBJS-$(CONFIG_OGG_DEMUXER)             += mpeg12data.o \
 -                                          dirac.o
 -OBJS-$(CONFIG_OGG_MUXER)               += flac.o flacdata.o
 +                                          dirac.o vorbis_data.o
 +OBJS-$(CONFIG_OGG_MUXER)               += flac.o flacdata.o \
 +                                          vorbis_data.o
  OBJS-$(CONFIG_RTP_MUXER)               += mpeg4audio.o
  OBJS-$(CONFIG_SPDIF_DEMUXER)           += aacadtsdec.o mpeg4audio.o
  OBJS-$(CONFIG_SPDIF_MUXER)             += dca.o
  OBJS-$(CONFIG_TAK_DEMUXER)             += tak.o
  OBJS-$(CONFIG_WEBM_MUXER)              += mpeg4audio.o mpegaudiodata.o  \
 -                                          flac.o flacdata.o
 +                                          flac.o flacdata.o \
 +                                          vorbis_data.o
 +
 +# libavfilter dependencies
 +OBJS-$(CONFIG_ELBG_FILTER)             += elbg.o
  
  # external codec libraries
 -OBJS-$(CONFIG_LIBDCADEC_DECODER)          += libdcadec.o dca.o
 +OBJS-$(CONFIG_AAC_AT_DECODER)             += audiotoolboxdec.o
 +OBJS-$(CONFIG_AC3_AT_DECODER)             += audiotoolboxdec.o
 +OBJS-$(CONFIG_ADPCM_IMA_QT_AT_DECODER)    += audiotoolboxdec.o
 +OBJS-$(CONFIG_ALAC_AT_DECODER)            += audiotoolboxdec.o
 +OBJS-$(CONFIG_AMR_NB_AT_DECODER)          += audiotoolboxdec.o
 +OBJS-$(CONFIG_EAC3_AT_DECODER)            += audiotoolboxdec.o
 +OBJS-$(CONFIG_GSM_MS_AT_DECODER)          += audiotoolboxdec.o
 +OBJS-$(CONFIG_ILBC_AT_DECODER)            += audiotoolboxdec.o
 +OBJS-$(CONFIG_MP1_AT_DECODER)             += audiotoolboxdec.o mpegaudiodecheader.o
 +OBJS-$(CONFIG_MP2_AT_DECODER)             += audiotoolboxdec.o mpegaudiodecheader.o
 +OBJS-$(CONFIG_MP3_AT_DECODER)             += audiotoolboxdec.o mpegaudiodecheader.o
 +OBJS-$(CONFIG_PCM_MULAW_AT_DECODER)       += audiotoolboxdec.o
 +OBJS-$(CONFIG_PCM_ALAW_AT_DECODER)        += audiotoolboxdec.o
 +OBJS-$(CONFIG_QDMC_AT_DECODER)            += audiotoolboxdec.o
 +OBJS-$(CONFIG_QDM2_AT_DECODER)            += audiotoolboxdec.o
 +OBJS-$(CONFIG_AAC_AT_ENCODER)             += audiotoolboxenc.o
 +OBJS-$(CONFIG_ALAC_AT_ENCODER)            += audiotoolboxenc.o
 +OBJS-$(CONFIG_ILBC_AT_ENCODER)            += audiotoolboxenc.o
 +OBJS-$(CONFIG_PCM_ALAW_AT_ENCODER)        += audiotoolboxenc.o
 +OBJS-$(CONFIG_PCM_MULAW_AT_ENCODER)       += audiotoolboxenc.o
 +OBJS-$(CONFIG_LIBCELT_DECODER)            += libcelt_dec.o
  OBJS-$(CONFIG_LIBFAAC_ENCODER)            += libfaac.o
  OBJS-$(CONFIG_LIBFDK_AAC_DECODER)         += libfdk-aacdec.o
  OBJS-$(CONFIG_LIBFDK_AAC_ENCODER)         += libfdk-aacenc.o
@@@ -851,30 -678,25 +851,30 @@@ OBJS-$(CONFIG_LIBSCHROEDINGER_DECODER
                                               libschroedinger.o
  OBJS-$(CONFIG_LIBSCHROEDINGER_ENCODER)    += libschroedingerenc.o \
                                               libschroedinger.o
 +OBJS-$(CONFIG_LIBSHINE_ENCODER)           += libshine.o
  OBJS-$(CONFIG_LIBSPEEX_DECODER)           += libspeexdec.o
  OBJS-$(CONFIG_LIBSPEEX_ENCODER)           += libspeexenc.o
  OBJS-$(CONFIG_LIBTHEORA_ENCODER)          += libtheoraenc.o
  OBJS-$(CONFIG_LIBTWOLAME_ENCODER)         += libtwolame.o
 -OBJS-$(CONFIG_LIBVO_AACENC_ENCODER)       += libvo-aacenc.o mpeg4audio.o
 +OBJS-$(CONFIG_LIBUTVIDEO_DECODER)         += libutvideodec.o
 +OBJS-$(CONFIG_LIBUTVIDEO_ENCODER)         += libutvideoenc.o
  OBJS-$(CONFIG_LIBVO_AMRWBENC_ENCODER)     += libvo-amrwbenc.o
 -OBJS-$(CONFIG_LIBVORBIS_ENCODER)          += libvorbis.o \
 +OBJS-$(CONFIG_LIBVORBIS_DECODER)          += libvorbisdec.o
 +OBJS-$(CONFIG_LIBVORBIS_ENCODER)          += libvorbisenc.o \
                                               vorbis_data.o
 -OBJS-$(CONFIG_LIBVPX_VP8_DECODER)         += libvpxdec.o libvpx.o
 -OBJS-$(CONFIG_LIBVPX_VP8_ENCODER)         += libvpxenc.o libvpx.o
 +OBJS-$(CONFIG_LIBVPX_VP8_DECODER)         += libvpxdec.o
 +OBJS-$(CONFIG_LIBVPX_VP8_ENCODER)         += libvpxenc.o
  OBJS-$(CONFIG_LIBVPX_VP9_DECODER)         += libvpxdec.o libvpx.o
  OBJS-$(CONFIG_LIBVPX_VP9_ENCODER)         += libvpxenc.o libvpx.o
  OBJS-$(CONFIG_LIBWAVPACK_ENCODER)         += libwavpackenc.o
 -OBJS-$(CONFIG_LIBWEBP_ENCODER)            += libwebpenc.o
 +OBJS-$(CONFIG_LIBWEBP_ENCODER)            += libwebpenc_common.o libwebpenc.o
 +OBJS-$(CONFIG_LIBWEBP_ANIM_ENCODER)       += libwebpenc_common.o libwebpenc_animencoder.o
  OBJS-$(CONFIG_LIBX262_ENCODER)            += libx264.o
  OBJS-$(CONFIG_LIBX264_ENCODER)            += libx264.o
  OBJS-$(CONFIG_LIBX265_ENCODER)            += libx265.o
  OBJS-$(CONFIG_LIBXAVS_ENCODER)            += libxavs.o
  OBJS-$(CONFIG_LIBXVID_ENCODER)            += libxvid.o
 +OBJS-$(CONFIG_LIBZVBI_TELETEXT_DECODER)   += libzvbi-teletextdec.o
  
  # parsers
  OBJS-$(CONFIG_AAC_PARSER)              += aac_parser.o aac_ac3_parser.o \
@@@ -890,24 -712,19 +890,24 @@@ OBJS-$(CONFIG_DCA_PARSER)              
  OBJS-$(CONFIG_DIRAC_PARSER)            += dirac_parser.o
  OBJS-$(CONFIG_DNXHD_PARSER)            += dnxhd_parser.o
  OBJS-$(CONFIG_DPX_PARSER)              += dpx_parser.o
 +OBJS-$(CONFIG_DVAUDIO_PARSER)          += dvaudio_parser.o
  OBJS-$(CONFIG_DVBSUB_PARSER)           += dvbsub_parser.o
 +OBJS-$(CONFIG_DVD_NAV_PARSER)          += dvd_nav_parser.o
  OBJS-$(CONFIG_DVDSUB_PARSER)           += dvdsub_parser.o
 -OBJS-$(CONFIG_FLAC_PARSER)             += flac_parser.o flacdata.o flac.o
 +OBJS-$(CONFIG_FLAC_PARSER)             += flac_parser.o flacdata.o flac.o \
 +                                          vorbis_data.o
 +OBJS-$(CONFIG_G729_PARSER)             += g729_parser.o
  OBJS-$(CONFIG_GSM_PARSER)              += gsm_parser.o
  OBJS-$(CONFIG_H261_PARSER)             += h261_parser.o
  OBJS-$(CONFIG_H263_PARSER)             += h263_parser.o
- OBJS-$(CONFIG_H264_PARSER)             += h264_parser.o
+ OBJS-$(CONFIG_H264_PARSER)             += h264_parser.o h264_parse.o
  OBJS-$(CONFIG_HEVC_PARSER)             += hevc_parser.o h2645_parse.o hevc_ps.o hevc_data.o
  OBJS-$(CONFIG_MJPEG_PARSER)            += mjpeg_parser.o
  OBJS-$(CONFIG_MLP_PARSER)              += mlp_parser.o mlp.o
  OBJS-$(CONFIG_MPEG4VIDEO_PARSER)       += mpeg4video_parser.o h263.o \
                                            mpeg4videodec.o mpeg4video.o \
                                            ituh263dec.o h263dec.o h263data.o
 +OBJS-$(CONFIG_PNG_PARSER)              += png_parser.o
  OBJS-$(CONFIG_MPEGAUDIO_PARSER)        += mpegaudio_parser.o \
                                            mpegaudiodecheader.o mpegaudiodata.o
  OBJS-$(CONFIG_MPEGVIDEO_PARSER)        += mpegvideo_parser.o    \
@@@ -922,89 -739,57 +922,89 @@@ OBJS-$(CONFIG_VC1_PARSER)              
                                            simple_idct.o wmv2data.o
  OBJS-$(CONFIG_VP3_PARSER)              += vp3_parser.o
  OBJS-$(CONFIG_VP8_PARSER)              += vp8_parser.o
 +OBJS-$(CONFIG_VP9_PARSER)              += vp9_parser.o
  
  # bitstream filters
  OBJS-$(CONFIG_AAC_ADTSTOASC_BSF)          += aac_adtstoasc_bsf.o aacadtsdec.o \
                                               mpeg4audio.o
  OBJS-$(CONFIG_CHOMP_BSF)                  += chomp_bsf.o
  OBJS-$(CONFIG_DUMP_EXTRADATA_BSF)         += dump_extradata_bsf.o
 +OBJS-$(CONFIG_DCA_CORE_BSF)               += dca_core_bsf.o
  OBJS-$(CONFIG_H264_MP4TOANNEXB_BSF)       += h264_mp4toannexb_bsf.o
  OBJS-$(CONFIG_HEVC_MP4TOANNEXB_BSF)       += hevc_mp4toannexb_bsf.o
  OBJS-$(CONFIG_IMX_DUMP_HEADER_BSF)        += imx_dump_header_bsf.o
  OBJS-$(CONFIG_MJPEG2JPEG_BSF)             += mjpeg2jpeg_bsf.o
  OBJS-$(CONFIG_MJPEGA_DUMP_HEADER_BSF)     += mjpega_dump_header_bsf.o
 +OBJS-$(CONFIG_MPEG4_UNPACK_BFRAMES_BSF)   += mpeg4_unpack_bframes_bsf.o
  OBJS-$(CONFIG_MOV2TEXTSUB_BSF)            += movsub_bsf.o
 +OBJS-$(CONFIG_MP3_HEADER_DECOMPRESS_BSF)  += mp3_header_decompress_bsf.o \
 +                                             mpegaudiodata.o
  OBJS-$(CONFIG_NOISE_BSF)                  += noise_bsf.o
  OBJS-$(CONFIG_REMOVE_EXTRADATA_BSF)       += remove_extradata_bsf.o
  OBJS-$(CONFIG_TEXT2MOVSUB_BSF)            += movsub_bsf.o
 +OBJS-$(CONFIG_VP9_SUPERFRAME_BSF)         += vp9_superframe_bsf.o
  
  # thread libraries
  OBJS-$(HAVE_LIBC_MSVCRT)               += file_open.o
  OBJS-$(HAVE_THREADS)                   += pthread.o pthread_slice.o pthread_frame.o
  
 +OBJS-$(CONFIG_FRAME_THREAD_ENCODER)    += frame_thread_encoder.o
 +
 +# Windows resource file
 +SLIBOBJS-$(HAVE_GNU_WINDRES)           += avcodecres.o
 +
  SKIPHEADERS                            += %_tablegen.h                  \
                                            %_tables.h                    \
 -                                          aac_tablegen_decl.h           \
                                            fft-internal.h                \
                                            tableprint.h                  \
 +                                          tableprint_vlc.h              \
 +                                          aaccoder_twoloop.h            \
 +                                          aaccoder_trellis.h            \
 +                                          aacenc_quantization.h         \
 +                                          aacenc_quantization_misc.h    \
                                            $(ARCH)/vp56_arith.h          \
  
  SKIPHEADERS-$(CONFIG_D3D11VA)          += d3d11va.h dxva2_internal.h
  SKIPHEADERS-$(CONFIG_DXVA2)            += dxva2.h dxva2_internal.h
 +SKIPHEADERS-$(CONFIG_JNI)              += ffjni.h
  SKIPHEADERS-$(CONFIG_LIBSCHROEDINGER)  += libschroedinger.h
 +SKIPHEADERS-$(CONFIG_LIBUTVIDEO)       += libutvideo.h
  SKIPHEADERS-$(CONFIG_LIBVPX)           += libvpx.h
 -SKIPHEADERS-$(CONFIG_MPEG_XVMC_DECODER) += xvmc.h
 -SKIPHEADERS-$(CONFIG_NVENC)            += nvenc.h
 +SKIPHEADERS-$(CONFIG_LIBWEBP_ENCODER)  += libwebpenc_common.h
 +SKIPHEADERS-$(CONFIG_MEDIACODEC)       += mediacodecdec.h mediacodec_wrapper.h mediacodec_sw_buffer.h
  SKIPHEADERS-$(CONFIG_QSV)              += qsv.h qsv_internal.h
  SKIPHEADERS-$(CONFIG_QSVDEC)           += qsvdec.h
  SKIPHEADERS-$(CONFIG_QSVENC)           += qsvenc.h
 +SKIPHEADERS-$(CONFIG_XVMC)             += xvmc.h
  SKIPHEADERS-$(CONFIG_VAAPI)            += vaapi_internal.h
 -SKIPHEADERS-$(CONFIG_VDA)              += vda.h vda_internal.h
 +SKIPHEADERS-$(CONFIG_VDA)              += vda.h vda_vt_internal.h
  SKIPHEADERS-$(CONFIG_VDPAU)            += vdpau.h vdpau_internal.h
 +SKIPHEADERS-$(CONFIG_VIDEOTOOLBOX)     += videotoolbox.h vda_vt_internal.h
 +
 +TESTPROGS = imgconvert                                                  \
 +            jpeg2000dwt                                                 \
 +            mathops                                                    \
 +            options                                                     \
 +            utils                                                       \
 +            avfft                                                       \
  
 -TESTPROGS-$(CONFIG_FFT)                   += fft fft-fixed
 +TESTPROGS-$(CONFIG_CABAC)                 += cabac
 +TESTPROGS-$(CONFIG_FFT)                   += fft fft-fixed fft-fixed32
  TESTPROGS-$(CONFIG_IDCTDSP)               += dct
  TESTPROGS-$(CONFIG_IIRFILTER)             += iirfilter
 +TESTPROGS-$(HAVE_MMX)                     += motion
  TESTPROGS-$(CONFIG_GOLOMB)                += golomb
  TESTPROGS-$(CONFIG_RANGECODER)            += rangecoder
 +TESTPROGS-$(CONFIG_SNOW_ENCODER)          += snowenc
  
  TESTOBJS = dctref.o
  
 -HOSTPROGS = aac_tablegen                                                \
 -            aacps_tablegen                                              \
 +TOOLS = fourcc2pixfmt
 +
 +HOSTPROGS = aacps_tablegen                                              \
 +            aacps_fixed_tablegen                                        \
              cbrt_tablegen                                               \
 +            cbrt_fixed_tablegen                                         \
              cos_tablegen                                                \
              dv_tablegen                                                 \
              motionpixels_tablegen                                       \
              pcm_tablegen                                                \
              qdm2_tablegen                                               \
              sinewin_tablegen                                            \
 +            sinewin_fixed_tablegen                                      \
  
  CLEANFILES = *_tables.c *_tables.h *_tablegen$(HOSTEXESUF)
  
@@@ -1031,9 -815,8 +1031,9 @@@ els
  $(SUBDIR)%_tablegen$(HOSTEXESUF): HOSTCFLAGS += -DCONFIG_SMALL=0
  endif
  
 -GEN_HEADERS = cbrt_tables.h aacps_tables.h aac_tables.h dv_tables.h     \
 -              sinewin_tables.h mpegaudio_tables.h motionpixels_tables.h \
 +GEN_HEADERS = cbrt_tables.h cbrt_fixed_tables.h aacps_tables.h aacps_fixed_tables.h \
 +              dv_tables.h     \
 +              sinewin_tables.h sinewin_fixed_tables.h mpegaudio_tables.h motionpixels_tables.h \
                pcm_tables.h qdm2_tables.h
  GEN_HEADERS := $(addprefix $(SUBDIR), $(GEN_HEADERS))
  
@@@ -1041,14 -824,11 +1041,14 @@@ $(GEN_HEADERS): $(SUBDIR)%_tables.h: $(
        $(M)./$< > $@
  
  ifdef CONFIG_HARDCODED_TABLES
 -$(SUBDIR)aacdec.o: $(SUBDIR)cbrt_tables.h
 -$(SUBDIR)aacps.o: $(SUBDIR)aacps_tables.h
 -$(SUBDIR)aactab.o: $(SUBDIR)aac_tables.h
 +$(SUBDIR)cbrt_data.o: $(SUBDIR)cbrt_tables.h
 +$(SUBDIR)cbrt_data_fixed.o: $(SUBDIR)cbrt_fixed_tables.h
 +$(SUBDIR)aacps_float.o: $(SUBDIR)aacps_tables.h
 +$(SUBDIR)aacps_fixed.o: $(SUBDIR)aacps_fixed_tables.h
 +$(SUBDIR)aactab_fixed.o: $(SUBDIR)aac_fixed_tables.h
  $(SUBDIR)dvenc.o: $(SUBDIR)dv_tables.h
  $(SUBDIR)sinewin.o: $(SUBDIR)sinewin_tables.h
 +$(SUBDIR)sinewin_fixed.o: $(SUBDIR)sinewin_fixed_tables.h
  $(SUBDIR)mpegaudiodec_fixed.o: $(SUBDIR)mpegaudio_tables.h
  $(SUBDIR)mpegaudiodec_float.o: $(SUBDIR)mpegaudio_tables.h
  $(SUBDIR)motionpixels.o: $(SUBDIR)motionpixels_tables.h
diff --combined libavcodec/dxva2_h264.c
index b1abb9a9af6de584f8044b06939bd66885919cf5,2d6fa79152dd49394255529fc26ae76b098b9b07..7048c4aada915e06cbd6934975cc233cd98d10d1
@@@ -3,25 -3,23 +3,25 @@@
   *
   * copyright (c) 2009 Laurent Aimar
   *
 - * This file is part of Libav.
 + * This file is part of FFmpeg.
   *
 - * Libav is free software; you can redistribute it and/or
 + * FFmpeg is free software; you can redistribute it and/or
   * modify it under the terms of the GNU Lesser General Public
   * License as published by the Free Software Foundation; either
   * version 2.1 of the License, or (at your option) any later version.
   *
 - * Libav is distributed in the hope that it will be useful,
 + * FFmpeg is distributed in the hope that it will be useful,
   * but WITHOUT ANY WARRANTY; without even the implied warranty of
   * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
   * Lesser General Public License for more details.
   *
   * You should have received a copy of the GNU Lesser General Public
 - * License along with Libav; if not, write to the Free Software
 + * License along with FFmpeg; if not, write to the Free Software
   * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
   */
  
 +#include "libavutil/avassert.h"
 +
  #include "h264.h"
  #include "h264data.h"
  #include "mpegutils.h"
@@@ -102,7 -100,7 +102,7 @@@ static void fill_picture_parameters(con
                                          ((h->sps.mb_aff &&
                                          (h->picture_structure == PICT_FRAME)) <<  1) |
                                          (h->sps.residual_color_transform_flag <<  2) |
 -                                        /* sp_for_switch_flag (not implemented by Libav) */
 +                                        /* sp_for_switch_flag (not implemented by FFmpeg) */
                                          (0                                    <<  3) |
                                          (h->sps.chroma_format_idc             <<  4) |
                                          ((h->nal_ref_idc != 0)                <<  6) |
      pp->deblocking_filter_control_present_flag = h->pps.deblocking_filter_parameters_present;
      pp->redundant_pic_cnt_present_flag= h->pps.redundant_pic_cnt_present;
      pp->Reserved8BitsB                = 0;
 -    pp->slice_group_change_rate_minus1= 0;  /* XXX not implemented by Libav */
 -    //pp->SliceGroupMap[810];               /* XXX not implemented by Libav */
 +    pp->slice_group_change_rate_minus1= 0;  /* XXX not implemented by FFmpeg */
 +    //pp->SliceGroupMap[810];               /* XXX not implemented by FFmpeg */
  }
  
  static void fill_scaling_lists(const AVCodecContext *avctx, AVDXVAContext *ctx, const H264Context *h, DXVA_Qmatrix_H264 *qm)
@@@ -232,8 -230,8 +232,8 @@@ static void fill_slice_long(AVCodecCont
      slice->slice_type            = ff_h264_get_slice_type(sl);
      if (sl->slice_type_fixed)
          slice->slice_type += 5;
-     slice->luma_log2_weight_denom       = sl->luma_log2_weight_denom;
-     slice->chroma_log2_weight_denom     = sl->chroma_log2_weight_denom;
+     slice->luma_log2_weight_denom       = sl->pwt.luma_log2_weight_denom;
+     slice->chroma_log2_weight_denom     = sl->pwt.chroma_log2_weight_denom;
      if (sl->list_count > 0)
          slice->num_ref_idx_l0_active_minus1 = sl->ref_count[0] - 1;
      if (sl->list_count > 1)
                  else
                      index = get_refpic_index(pp, ff_dxva2_get_surface_index(avctx, ctx, r->f));
                  fill_picture_entry(&slice->RefPicList[list][i], index,
 -                                   r->reference == PICT_BOTTOM_FIELD);
 +                                   sl->ref_list[list][i].reference == PICT_BOTTOM_FIELD);
                  for (plane = 0; plane < 3; plane++) {
                      int w, o;
-                     if (plane == 0 && sl->luma_weight_flag[list]) {
-                         w = sl->luma_weight[i][list][0];
-                         o = sl->luma_weight[i][list][1];
-                     } else if (plane >= 1 && sl->chroma_weight_flag[list]) {
-                         w = sl->chroma_weight[i][list][plane-1][0];
-                         o = sl->chroma_weight[i][list][plane-1][1];
+                     if (plane == 0 && sl->pwt.luma_weight_flag[list]) {
+                         w = sl->pwt.luma_weight[i][list][0];
+                         o = sl->pwt.luma_weight[i][list][1];
+                     } else if (plane >= 1 && sl->pwt.chroma_weight_flag[list]) {
+                         w = sl->pwt.chroma_weight[i][list][plane-1][0];
+                         o = sl->pwt.chroma_weight[i][list][plane-1][1];
                      } else {
-                         w = 1 << (plane == 0 ? sl->luma_log2_weight_denom :
-                                                sl->chroma_log2_weight_denom);
+                         w = 1 << (plane == 0 ? sl->pwt.luma_log2_weight_denom :
+                                                sl->pwt.chroma_log2_weight_denom);
                          o = 0;
                      }
                      slice->Weights[list][i][plane][0] = w;
              }
          }
      }
 -    slice->slice_qs_delta    = 0; /* XXX not implemented by Libav */
 +    slice->slice_qs_delta    = 0; /* XXX not implemented by FFmpeg */
      slice->slice_qp_delta    = sl->qscale - h->pps.init_qp;
      slice->redundant_pic_cnt = sl->redundant_pic_count;
      if (sl->slice_type == AV_PICTURE_TYPE_B)
@@@ -304,9 -302,9 +304,9 @@@ static int commit_bitstream_and_slice_b
      const H264Picture *current_picture = h->cur_pic_ptr;
      struct dxva2_picture_context *ctx_pic = current_picture->hwaccel_picture_private;
      DXVA_Slice_H264_Short *slice = NULL;
 -    void     *dxva_data_ptr;
 +    void     *dxva_data_ptr = NULL;
      uint8_t  *dxva_data, *current, *end;
 -    unsigned dxva_size;
 +    unsigned dxva_size = 0;
      void     *slice_data;
      unsigned slice_size;
      unsigned padding;
          dsc11->NumMBsInBuffer       = mb_count;
  
          type = D3D11_VIDEO_DECODER_BUFFER_SLICE_CONTROL;
 +
 +        av_assert0((dsc11->DataSize & 127) == 0);
      }
  #endif
  #if CONFIG_DXVA2
          dsc2->NumMBsInBuffer       = mb_count;
  
          type = DXVA2_SliceControlBufferType;
 +
 +        av_assert0((dsc2->DataSize & 127) == 0);
      }
  #endif
  
          slice_data = ctx_pic->slice_long;
          slice_size = ctx_pic->slice_count * sizeof(*ctx_pic->slice_long);
      }
 -    assert((bs->DataSize & 127) == 0);
      return ff_dxva2_commit_buffer(avctx, ctx, sc,
                                    type,
                                    slice_data, slice_size, mb_count);
diff --combined libavcodec/h264.c
index 88768af733ddd844233397e6ac119562ca65714f,5332203436721be3e32dea3865987bee7ca26e10..22375aa2dbe7deb18fa988af583079549036c03a
@@@ -2,20 -2,20 +2,20 @@@
   * H.26L/H.264/AVC/JVT/14496-10/... decoder
   * Copyright (c) 2003 Michael Niedermayer <michaelni@gmx.at>
   *
 - * This file is part of Libav.
 + * This file is part of FFmpeg.
   *
 - * Libav is free software; you can redistribute it and/or
 + * FFmpeg is free software; you can redistribute it and/or
   * modify it under the terms of the GNU Lesser General Public
   * License as published by the Free Software Foundation; either
   * version 2.1 of the License, or (at your option) any later version.
   *
 - * Libav is distributed in the hope that it will be useful,
 + * FFmpeg is distributed in the hope that it will be useful,
   * but WITHOUT ANY WARRANTY; without even the implied warranty of
   * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
   * Lesser General Public License for more details.
   *
   * You should have received a copy of the GNU Lesser General Public
 - * License along with Libav; if not, write to the Free Software
 + * License along with FFmpeg; if not, write to the Free Software
   * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
   */
  
@@@ -25,8 -25,6 +25,8 @@@
   * @author Michael Niedermayer <michaelni@gmx.at>
   */
  
 +#define UNCHECKED_BITSTREAM_READER 1
 +
  #include "libavutil/avassert.h"
  #include "libavutil/display.h"
  #include "libavutil/imgutils.h"
  #include "rectangle.h"
  #include "svq3.h"
  #include "thread.h"
 +#include "vdpau_compat.h"
  
 -#include <assert.h>
 +static int h264_decode_end(AVCodecContext *avctx);
  
  const uint16_t ff_h264_mb_sizes[4] = { 256, 384, 512, 768 };
  
 +int avpriv_h264_has_num_reorder_frames(AVCodecContext *avctx)
 +{
 +    H264Context *h = avctx->priv_data;
 +    return h ? h->sps.num_reorder_frames : 0;
 +}
 +
  static void h264_er_decode_mb(void *opaque, int ref, int mv_dir, int mv_type,
                                int (*mv)[2][4][2],
                                int mb_x, int mb_y, int mb_intra, int mb_skipped)
      sl->mb_y = mb_y;
      sl->mb_xy = mb_x + mb_y * h->mb_stride;
      memset(sl->non_zero_count_cache, 0, sizeof(sl->non_zero_count_cache));
 -    assert(ref >= 0);
 +    av_assert1(ref >= 0);
      /* FIXME: It is possible albeit uncommon that slice references
       * differ between slices. We take the easy approach and ignore
       * it for now. If this turns out to have any relevance in
       * practice then correct remapping should be added. */
      if (ref >= sl->ref_count[0])
          ref = 0;
 +    if (!sl->ref_list[0][ref].data[0]) {
 +        av_log(h->avctx, AV_LOG_DEBUG, "Reference not available for error concealing\n");
 +        ref = 0;
 +    }
 +    if ((sl->ref_list[0][ref].reference&3) != 3) {
 +        av_log(h->avctx, AV_LOG_DEBUG, "Reference invalid\n");
 +        return;
 +    }
      fill_rectangle(&h->cur_pic.ref_index[0][4 * sl->mb_xy],
                     2, 2, 2, ref, 1);
      fill_rectangle(&sl->ref_cache[0][scan8[0]], 4, 4, 8, ref, 1);
      fill_rectangle(sl->mv_cache[0][scan8[0]], 4, 4, 8,
                     pack16to32((*mv)[0][0][0], (*mv)[0][0][1]), 4);
 -    assert(!FRAME_MBAFF(h));
 +    sl->mb_mbaff =
 +    sl->mb_field_decoding_flag = 0;
      ff_h264_hl_decode_mb(h, &h->slice_ctx[0]);
  }
  
@@@ -209,18 -191,18 +209,18 @@@ int ff_h264_check_intra_pred_mode(cons
  
      if ((sl->left_samples_available & 0x8080) != 0x8080) {
          mode = left[mode];
 -        if (is_chroma && (sl->left_samples_available & 0x8080)) {
 -            // mad cow disease mode, aka MBAFF + constrained_intra_pred
 -            mode = ALZHEIMER_DC_L0T_PRED8x8 +
 -                   (!(sl->left_samples_available & 0x8000)) +
 -                   2 * (mode == DC_128_PRED8x8);
 -        }
          if (mode < 0) {
              av_log(h->avctx, AV_LOG_ERROR,
                     "left block unavailable for requested intra mode at %d %d\n",
                     sl->mb_x, sl->mb_y);
              return AVERROR_INVALIDDATA;
          }
 +        if (is_chroma && (sl->left_samples_available & 0x8080)) {
 +            // mad cow disease mode, aka MBAFF + constrained_intra_pred
 +            mode = ALZHEIMER_DC_L0T_PRED8x8 +
 +                   (!(sl->left_samples_available & 0x8000)) +
 +                   2 * (mode == DC_128_PRED8x8);
 +        }
      }
  
      return mode;
@@@ -242,7 -224,7 +242,7 @@@ const uint8_t *ff_h264_decode_nal(H264C
  
  #define STARTCODE_TEST                                                  \
      if (i + 2 < length && src[i + 1] == 0 && src[i + 2] <= 3) {         \
 -        if (src[i + 2] != 3) {                                          \
 +        if (src[i + 2] != 3 && src[i + 2] != 0) {                       \
              /* startcode, so we must be past the end */                 \
              length = i;                                                 \
          }                                                               \
      }
  #endif
  
 -    if (i >= length - 1) { // no escaped 0
 -        *dst_length = length;
 -        *consumed   = length + 1; // +1 for the header
 -        return src;
 -    }
 -
 -    av_fast_malloc(&sl->rbsp_buffer, &sl->rbsp_buffer_size,
 -                   length + AV_INPUT_BUFFER_PADDING_SIZE);
 +    av_fast_padded_malloc(&sl->rbsp_buffer, &sl->rbsp_buffer_size, length+MAX_MBPAIR_SIZE);
      dst = sl->rbsp_buffer;
  
      if (!dst)
          return NULL;
  
 +    if(i>=length-1){ //no escaped 0
 +        *dst_length= length;
 +        *consumed= length+1; //+1 for the header
 +        if(h->avctx->flags2 & AV_CODEC_FLAG2_FAST){
 +            return src;
 +        }else{
 +            memcpy(dst, src, length);
 +            return dst;
 +        }
 +    }
 +
      memcpy(dst, src, i);
      si = di = i;
      while (si + 2 < length) {
          if (src[si + 2] > 3) {
              dst[di++] = src[si++];
              dst[di++] = src[si++];
 -        } else if (src[si] == 0 && src[si + 1] == 0) {
 +        } else if (src[si] == 0 && src[si + 1] == 0 && src[si + 2] != 0) {
              if (src[si + 2] == 3) { // escape
                  dst[di++]  = 0;
                  dst[di++]  = 0;
@@@ -401,11 -379,11 +401,11 @@@ void ff_h264_free_tables(H264Context *h
  int ff_h264_alloc_tables(H264Context *h)
  {
      const int big_mb_num = h->mb_stride * (h->mb_height + 1);
 -    const int row_mb_num = h->mb_stride * 2 * h->avctx->thread_count;
 +    const int row_mb_num = 2*h->mb_stride*FFMAX(h->avctx->thread_count, 1);
      int x, y;
  
 -    FF_ALLOCZ_OR_GOTO(h->avctx, h->intra4x4_pred_mode,
 -                      row_mb_num * 8 * sizeof(uint8_t), fail)
 +    FF_ALLOCZ_ARRAY_OR_GOTO(h->avctx, h->intra4x4_pred_mode,
 +                      row_mb_num, 8 * sizeof(uint8_t), fail)
      h->slice_ctx[0].intra4x4_pred_mode = h->intra4x4_pred_mode;
  
      FF_ALLOCZ_OR_GOTO(h->avctx, h->non_zero_count,
                        big_mb_num * sizeof(uint16_t), fail)
      FF_ALLOCZ_OR_GOTO(h->avctx, h->chroma_pred_mode_table,
                        big_mb_num * sizeof(uint8_t), fail)
 -    FF_ALLOCZ_OR_GOTO(h->avctx, h->mvd_table[0],
 -                      16 * row_mb_num * sizeof(uint8_t), fail);
 -    FF_ALLOCZ_OR_GOTO(h->avctx, h->mvd_table[1],
 -                      16 * row_mb_num * sizeof(uint8_t), fail);
 +    FF_ALLOCZ_ARRAY_OR_GOTO(h->avctx, h->mvd_table[0],
 +                      row_mb_num, 16 * sizeof(uint8_t), fail);
 +    FF_ALLOCZ_ARRAY_OR_GOTO(h->avctx, h->mvd_table[1],
 +                      row_mb_num, 16 * sizeof(uint8_t), fail);
      h->slice_ctx[0].mvd_table[0] = h->mvd_table[0];
      h->slice_ctx[0].mvd_table[1] = h->mvd_table[1];
  
          }
  
      if (!h->dequant4_coeff[0])
 -        h264_init_dequant_tables(h);
 +        ff_h264_init_dequant_tables(h);
  
      return 0;
  
@@@ -475,11 -453,7 +475,11 @@@ int ff_h264_slice_context_init(H264Cont
      sl->ref_cache[1][scan8[7]  + 1] =
      sl->ref_cache[1][scan8[13] + 1] = PART_NOT_AVAILABLE;
  
 +    if (sl != h->slice_ctx) {
 +        memset(er, 0, sizeof(*er));
 +    } else
      if (CONFIG_ERROR_RESILIENCE) {
 +
          /* init ER */
          er->avctx          = h->avctx;
          er->decode_mb      = h264_er_decode_mb;
@@@ -527,23 -501,20 +527,23 @@@ fail
  static int decode_nal_units(H264Context *h, const uint8_t *buf, int buf_size,
                              int parse_extradata);
  
 -int ff_h264_decode_extradata(H264Context *h)
 +int ff_h264_decode_extradata(H264Context *h, const uint8_t *buf, int size)
  {
      AVCodecContext *avctx = h->avctx;
      int ret;
  
 -    if (avctx->extradata[0] == 1) {
 +    if (!buf || size <= 0)
 +        return -1;
 +
 +    if (buf[0] == 1) {
          int i, cnt, nalsize;
 -        unsigned char *p = avctx->extradata;
 +        const unsigned char *p = buf;
  
          h->is_avc = 1;
  
 -        if (avctx->extradata_size < 7) {
 +        if (size < 7) {
              av_log(avctx, AV_LOG_ERROR,
 -                   "avcC %d too short\n", avctx->extradata_size);
 +                   "avcC %d too short\n", size);
              return AVERROR_INVALIDDATA;
          }
          /* sps and pps in the avcC always have length coded with 2 bytes,
          p  += 6;
          for (i = 0; i < cnt; i++) {
              nalsize = AV_RB16(p) + 2;
 -            if (p - avctx->extradata + nalsize > avctx->extradata_size)
 +            if(nalsize > size - (p-buf))
                  return AVERROR_INVALIDDATA;
              ret = decode_nal_units(h, p, nalsize, 1);
              if (ret < 0) {
          cnt = *(p++); // Number of pps
          for (i = 0; i < cnt; i++) {
              nalsize = AV_RB16(p) + 2;
 -            if (p - avctx->extradata + nalsize > avctx->extradata_size)
 +            if(nalsize > size - (p-buf))
                  return AVERROR_INVALIDDATA;
              ret = decode_nal_units(h, p, nalsize, 1);
              if (ret < 0) {
              p += nalsize;
          }
          // Store right nal length size that will be used to parse all other nals
 -        h->nal_length_size = (avctx->extradata[4] & 0x03) + 1;
 +        h->nal_length_size = (buf[4] & 0x03) + 1;
      } else {
          h->is_avc = 0;
 -        ret = decode_nal_units(h, avctx->extradata, avctx->extradata_size, 1);
 +        ret = decode_nal_units(h, buf, size, 1);
          if (ret < 0)
              return ret;
      }
 -    return 0;
 +    return size;
  }
  
  static int h264_init_context(AVCodecContext *avctx, H264Context *h)
      int i;
  
      h->avctx                 = avctx;
 +    h->backup_width          = -1;
 +    h->backup_height         = -1;
 +    h->backup_pix_fmt        = AV_PIX_FMT_NONE;
      h->dequant_coeff_pps     = -1;
 +    h->current_sps_id        = -1;
 +    h->cur_chroma_format_idc = -1;
  
      h->picture_structure     = PICT_FRAME;
      h->slice_context_count   = 1;
      h->x264_build            = -1;
      h->recovery_frame        = -1;
      h->frame_recovered       = 0;
 +    h->prev_frame_num        = -1;
 +    h->sei_fpa.frame_packing_arrangement_cancel_flag = -1;
  
      h->next_outputed_poc = INT_MIN;
      for (i = 0; i < MAX_DELAYED_PIC_COUNT; i++)
      if (!h->cur_pic.f)
          return AVERROR(ENOMEM);
  
 +    h->last_pic_for_ec.f = av_frame_alloc();
 +    if (!h->last_pic_for_ec.f)
 +        return AVERROR(ENOMEM);
 +
      for (i = 0; i < h->nb_slice_ctx; i++)
          h->slice_ctx[i].h264 = h;
  
@@@ -669,21 -629,17 +669,21 @@@ av_cold int ff_h264_decode_init(AVCodec
      }
  
      if (avctx->codec_id == AV_CODEC_ID_H264) {
 -        if (avctx->ticks_per_frame == 1)
 -            h->avctx->framerate.num *= 2;
 +        if (avctx->ticks_per_frame == 1) {
 +            if(h->avctx->time_base.den < INT_MAX/2) {
 +                h->avctx->time_base.den *= 2;
 +            } else
 +                h->avctx->time_base.num /= 2;
 +        }
          avctx->ticks_per_frame = 2;
      }
  
      if (avctx->extradata_size > 0 && avctx->extradata) {
 -       ret = ff_h264_decode_extradata(h);
 -       if (ret < 0) {
 -           ff_h264_free_context(h);
 -           return ret;
 -       }
 +        ret = ff_h264_decode_extradata(h, avctx->extradata, avctx->extradata_size);
 +        if (ret < 0) {
 +            h264_decode_end(avctx);
 +            return ret;
 +        }
      }
  
      if (h->sps.bitstream_restriction_flag &&
  
      avctx->internal->allocate_progress = 1;
  
 -    if (h->enable_er) {
 +    ff_h264_flush_change(h);
 +
 +    if (h->enable_er < 0 && (avctx->active_thread_type & FF_THREAD_SLICE))
 +        h->enable_er = 0;
 +
 +    if (h->enable_er && (avctx->active_thread_type & FF_THREAD_SLICE)) {
          av_log(avctx, AV_LOG_WARNING,
 -               "Error resilience is enabled. It is unsafe and unsupported and may crash. "
 +               "Error resilience with slice threads is enabled. It is unsafe and unsupported and may crash. "
                 "Use it at your own risk\n");
      }
  
      return 0;
  }
  
 +#if HAVE_THREADS
  static int decode_init_thread_copy(AVCodecContext *avctx)
  {
      H264Context *h = avctx->priv_data;
  
      return 0;
  }
 +#endif
  
  /**
   * Run setup operations that must be run after slice header decoding.
@@@ -742,6 -691,7 +742,6 @@@ static void decode_postinit(H264Contex
      H264Picture *out = h->cur_pic_ptr;
      H264Picture *cur = h->cur_pic_ptr;
      int i, pics, out_of_order, out_idx;
 -    int invalid = 0, cnt = 0;
  
      h->cur_pic_ptr->f->pict_type = h->pict_type;
  
           * yet, so we assume the worst for now. */
          // if (setup_finished)
          //    ff_thread_finish_setup(h->avctx);
 -        return;
 +        if (cur->field_poc[0] == INT_MAX && cur->field_poc[1] == INT_MAX)
 +            return;
 +        if (h->avctx->hwaccel || h->missing_fields <=1)
 +            return;
      }
  
      cur->f->interlaced_frame = 0;
          /* Derive top_field_first from field pocs. */
          cur->f->top_field_first = cur->field_poc[0] < cur->field_poc[1];
      } else {
 -        if (cur->f->interlaced_frame || h->sps.pic_struct_present_flag) {
 +        if (h->sps.pic_struct_present_flag) {
              /* Use picture timing SEI information. Even if it is a
               * information of a past frame, better than nothing. */
              if (h->sei_pic_struct == SEI_PIC_STRUCT_TOP_BOTTOM ||
                  cur->f->top_field_first = 1;
              else
                  cur->f->top_field_first = 0;
 +        } else if (cur->f->interlaced_frame) {
 +            /* Default to top field first when pic_struct_present_flag
 +             * is not set but interlaced frame detected */
 +            cur->f->top_field_first = 1;
          } else {
              /* Most likely progressive */
              cur->f->top_field_first = 0;
          h->content_interpretation_type > 0 &&
          h->content_interpretation_type < 3) {
          AVStereo3D *stereo = av_stereo3d_create_side_data(cur->f);
 -        if (!stereo)
 -            return;
 -
 +        if (stereo) {
          switch (h->frame_packing_arrangement_type) {
          case 0:
              stereo->type = AV_STEREO3D_CHECKERBOARD;
  
          if (h->content_interpretation_type == 2)
              stereo->flags = AV_STEREO3D_FLAG_INVERT;
 +        }
      }
  
      if (h->sei_display_orientation_present &&
          AVFrameSideData *rotation = av_frame_new_side_data(cur->f,
                                                             AV_FRAME_DATA_DISPLAYMATRIX,
                                                             sizeof(int32_t) * 9);
 -        if (!rotation)
 -            return;
 -
 -        av_display_rotation_set((int32_t *)rotation->data, angle);
 -        av_display_matrix_flip((int32_t *)rotation->data,
 -                               h->sei_hflip, h->sei_vflip);
 +        if (rotation) {
 +            av_display_rotation_set((int32_t *)rotation->data, angle);
 +            av_display_matrix_flip((int32_t *)rotation->data,
 +                                   h->sei_hflip, h->sei_vflip);
 +        }
      }
  
      if (h->sei_reguserdata_afd_present) {
          AVFrameSideData *sd = av_frame_new_side_data(cur->f, AV_FRAME_DATA_AFD,
                                                       sizeof(uint8_t));
 -        if (!sd)
 -            return;
  
 -        *sd->data = h->active_format_description;
 -        h->sei_reguserdata_afd_present = 0;
 +        if (sd) {
 +            *sd->data = h->active_format_description;
 +            h->sei_reguserdata_afd_present = 0;
 +        }
      }
  
      if (h->a53_caption) {
          AVFrameSideData *sd = av_frame_new_side_data(cur->f,
                                                       AV_FRAME_DATA_A53_CC,
                                                       h->a53_caption_size);
 -        if (!sd)
 -            return;
 -
 -        memcpy(sd->data, h->a53_caption, h->a53_caption_size);
 +        if (sd)
 +            memcpy(sd->data, h->a53_caption, h->a53_caption_size);
          av_freep(&h->a53_caption);
          h->a53_caption_size = 0;
 +        h->avctx->properties |= FF_CODEC_PROPERTY_CLOSED_CAPTIONS;
      }
  
 +    cur->mmco_reset = h->mmco_reset;
 +    h->mmco_reset = 0;
 +
      // FIXME do something with unavailable reference frames
  
      /* Sort B-frames into display order */
      if (h->sps.bitstream_restriction_flag ||
 -        h->avctx->strict_std_compliance >= FF_COMPLIANCE_NORMAL) {
 +        h->avctx->strict_std_compliance >= FF_COMPLIANCE_STRICT) {
          h->avctx->has_b_frames = FFMAX(h->avctx->has_b_frames, h->sps.num_reorder_frames);
      }
      h->low_delay = !h->avctx->has_b_frames;
  
 +    for (i = 0; 1; i++) {
 +        if(i == MAX_DELAYED_PIC_COUNT || cur->poc < h->last_pocs[i]){
 +            if(i)
 +                h->last_pocs[i-1] = cur->poc;
 +            break;
 +        } else if(i) {
 +            h->last_pocs[i-1]= h->last_pocs[i];
 +        }
 +    }
 +    out_of_order = MAX_DELAYED_PIC_COUNT - i;
 +    if(   cur->f->pict_type == AV_PICTURE_TYPE_B
 +       || (h->last_pocs[MAX_DELAYED_PIC_COUNT-2] > INT_MIN && h->last_pocs[MAX_DELAYED_PIC_COUNT-1] - h->last_pocs[MAX_DELAYED_PIC_COUNT-2] > 2))
 +        out_of_order = FFMAX(out_of_order, 1);
 +    if (out_of_order == MAX_DELAYED_PIC_COUNT) {
 +        av_log(h->avctx, AV_LOG_VERBOSE, "Invalid POC %d<%d\n", cur->poc, h->last_pocs[0]);
 +        for (i = 1; i < MAX_DELAYED_PIC_COUNT; i++)
 +            h->last_pocs[i] = INT_MIN;
 +        h->last_pocs[0] = cur->poc;
 +        cur->mmco_reset = 1;
 +    } else if(h->avctx->has_b_frames < out_of_order && !h->sps.bitstream_restriction_flag){
 +        av_log(h->avctx, AV_LOG_INFO, "Increasing reorder buffer to %d\n", out_of_order);
 +        h->avctx->has_b_frames = out_of_order;
 +        h->low_delay = 0;
 +    }
 +
      pics = 0;
      while (h->delayed_pic[pics])
          pics++;
  
 -    assert(pics <= MAX_DELAYED_PIC_COUNT);
 +    av_assert0(pics <= MAX_DELAYED_PIC_COUNT);
  
      h->delayed_pic[pics++] = cur;
      if (cur->reference == 0)
          cur->reference = DELAYED_PIC_REF;
  
 -    /* Frame reordering. This code takes pictures from coding order and sorts
 -     * them by their incremental POC value into display order. It supports POC
 -     * gaps, MMCO reset codes and random resets.
 -     * A "display group" can start either with a IDR frame (f.key_frame = 1),
 -     * and/or can be closed down with a MMCO reset code. In sequences where
 -     * there is no delay, we can't detect that (since the frame was already
 -     * output to the user), so we also set h->mmco_reset to detect the MMCO
 -     * reset code.
 -     * FIXME: if we detect insufficient delays (as per h->avctx->has_b_frames),
 -     * we increase the delay between input and output. All frames affected by
 -     * the lag (e.g. those that should have been output before another frame
 -     * that we already returned to the user) will be dropped. This is a bug
 -     * that we will fix later. */
 -    for (i = 0; i < MAX_DELAYED_PIC_COUNT; i++) {
 -        cnt     += out->poc < h->last_pocs[i];
 -        invalid += out->poc == INT_MIN;
 -    }
 -    if (!h->mmco_reset && !cur->f->key_frame &&
 -        cnt + invalid == MAX_DELAYED_PIC_COUNT && cnt > 0) {
 -        h->mmco_reset = 2;
 -        if (pics > 1)
 -            h->delayed_pic[pics - 2]->mmco_reset = 2;
 -    }
 -    if (h->mmco_reset || cur->f->key_frame) {
 -        for (i = 0; i < MAX_DELAYED_PIC_COUNT; i++)
 -            h->last_pocs[i] = INT_MIN;
 -        cnt     = 0;
 -        invalid = MAX_DELAYED_PIC_COUNT;
 -    }
      out     = h->delayed_pic[0];
      out_idx = 0;
 -    for (i = 1; i < MAX_DELAYED_PIC_COUNT &&
 -                h->delayed_pic[i] &&
 -                !h->delayed_pic[i - 1]->mmco_reset &&
 -                !h->delayed_pic[i]->f->key_frame;
 +    for (i = 1; h->delayed_pic[i] &&
 +                !h->delayed_pic[i]->f->key_frame &&
 +                !h->delayed_pic[i]->mmco_reset;
           i++)
          if (h->delayed_pic[i]->poc < out->poc) {
              out     = h->delayed_pic[i];
              out_idx = i;
          }
      if (h->avctx->has_b_frames == 0 &&
 -        (h->delayed_pic[0]->f->key_frame || h->mmco_reset))
 +        (h->delayed_pic[0]->f->key_frame || h->delayed_pic[0]->mmco_reset))
          h->next_outputed_poc = INT_MIN;
 -    out_of_order = !out->f->key_frame && !h->mmco_reset &&
 -                   (out->poc < h->next_outputed_poc);
 -
 -    if (h->sps.bitstream_restriction_flag &&
 -        h->avctx->has_b_frames >= h->sps.num_reorder_frames) {
 -    } else if (out_of_order && pics - 1 == h->avctx->has_b_frames &&
 -               h->avctx->has_b_frames < MAX_DELAYED_PIC_COUNT) {
 -        if (invalid + cnt < MAX_DELAYED_PIC_COUNT) {
 -            h->avctx->has_b_frames = FFMAX(h->avctx->has_b_frames, cnt);
 -        }
 -        h->low_delay = 0;
 -    } else if (h->low_delay &&
 -               ((h->next_outputed_poc != INT_MIN &&
 -                 out->poc > h->next_outputed_poc + 2) ||
 -                cur->f->pict_type == AV_PICTURE_TYPE_B)) {
 -        h->low_delay = 0;
 -        h->avctx->has_b_frames++;
 -    }
 +    out_of_order = out->poc < h->next_outputed_poc;
  
 -    if (pics > h->avctx->has_b_frames) {
 +    if (out_of_order || pics > h->avctx->has_b_frames) {
          out->reference &= ~DELAYED_PIC_REF;
          // for frame threading, the owner must be the second field's thread or
          // else the first thread can release the picture and reuse it unsafely
          for (i = out_idx; h->delayed_pic[i]; i++)
              h->delayed_pic[i] = h->delayed_pic[i + 1];
      }
 -    memmove(h->last_pocs, &h->last_pocs[1],
 -            sizeof(*h->last_pocs) * (MAX_DELAYED_PIC_COUNT - 1));
 -    h->last_pocs[MAX_DELAYED_PIC_COUNT - 1] = cur->poc;
      if (!out_of_order && pics > h->avctx->has_b_frames) {
          h->next_output_pic = out;
 -        if (out->mmco_reset) {
 -            if (out_idx > 0) {
 -                h->next_outputed_poc                    = out->poc;
 -                h->delayed_pic[out_idx - 1]->mmco_reset = out->mmco_reset;
 -            } else {
 -                h->next_outputed_poc = INT_MIN;
 -            }
 -        } else {
 -            if (out_idx == 0 && pics > 1 && h->delayed_pic[0]->f->key_frame) {
 -                h->next_outputed_poc = INT_MIN;
 -            } else {
 -                h->next_outputed_poc = out->poc;
 -            }
 -        }
 -        h->mmco_reset = 0;
 +        if (out_idx == 0 && h->delayed_pic[0] && (h->delayed_pic[0]->f->key_frame || h->delayed_pic[0]->mmco_reset)) {
 +            h->next_outputed_poc = INT_MIN;
 +        } else
 +            h->next_outputed_poc = out->poc;
      } else {
 -        av_log(h->avctx, AV_LOG_DEBUG, "no picture\n");
 +        av_log(h->avctx, AV_LOG_DEBUG, "no picture %s\n", out_of_order ? "ooo" : "");
      }
  
      if (h->next_output_pic) {
      }
  }
  
- int ff_pred_weight_table(H264Context *h, H264SliceContext *sl)
- {
-     int list, i;
-     int luma_def, chroma_def;
-     sl->use_weight             = 0;
-     sl->use_weight_chroma      = 0;
-     sl->luma_log2_weight_denom = get_ue_golomb(&sl->gb);
-     if (h->sps.chroma_format_idc)
-         sl->chroma_log2_weight_denom = get_ue_golomb(&sl->gb);
-     if (sl->luma_log2_weight_denom > 7U) {
-         av_log(h->avctx, AV_LOG_ERROR, "luma_log2_weight_denom %d is out of range\n", sl->luma_log2_weight_denom);
-         sl->luma_log2_weight_denom = 0;
-     }
-     if (sl->chroma_log2_weight_denom > 7U) {
-         av_log(h->avctx, AV_LOG_ERROR, "chroma_log2_weight_denom %d is out of range\n", sl->chroma_log2_weight_denom);
-         sl->chroma_log2_weight_denom = 0;
-     }
-     luma_def   = 1 << sl->luma_log2_weight_denom;
-     chroma_def = 1 << sl->chroma_log2_weight_denom;
-     for (list = 0; list < 2; list++) {
-         sl->luma_weight_flag[list]   = 0;
-         sl->chroma_weight_flag[list] = 0;
-         for (i = 0; i < sl->ref_count[list]; i++) {
-             int luma_weight_flag, chroma_weight_flag;
-             luma_weight_flag = get_bits1(&sl->gb);
-             if (luma_weight_flag) {
-                 sl->luma_weight[i][list][0] = get_se_golomb(&sl->gb);
-                 sl->luma_weight[i][list][1] = get_se_golomb(&sl->gb);
-                 if (sl->luma_weight[i][list][0] != luma_def ||
-                     sl->luma_weight[i][list][1] != 0) {
-                     sl->use_weight             = 1;
-                     sl->luma_weight_flag[list] = 1;
-                 }
-             } else {
-                 sl->luma_weight[i][list][0] = luma_def;
-                 sl->luma_weight[i][list][1] = 0;
-             }
-             if (h->sps.chroma_format_idc) {
-                 chroma_weight_flag = get_bits1(&sl->gb);
-                 if (chroma_weight_flag) {
-                     int j;
-                     for (j = 0; j < 2; j++) {
-                         sl->chroma_weight[i][list][j][0] = get_se_golomb(&sl->gb);
-                         sl->chroma_weight[i][list][j][1] = get_se_golomb(&sl->gb);
-                         if (sl->chroma_weight[i][list][j][0] != chroma_def ||
-                             sl->chroma_weight[i][list][j][1] != 0) {
-                             sl->use_weight_chroma        = 1;
-                             sl->chroma_weight_flag[list] = 1;
-                         }
-                     }
-                 } else {
-                     int j;
-                     for (j = 0; j < 2; j++) {
-                         sl->chroma_weight[i][list][j][0] = chroma_def;
-                         sl->chroma_weight[i][list][j][1] = 0;
-                     }
-                 }
-             }
-         }
-         if (sl->slice_type_nos != AV_PICTURE_TYPE_B)
-             break;
-     }
-     sl->use_weight = sl->use_weight || sl->use_weight_chroma;
-     return 0;
- }
  /**
   * instantaneous decoder refresh.
   */
  static void idr(H264Context *h)
  {
 +    int i;
      ff_h264_remove_all_refs(h);
      h->prev_frame_num        =
 -    h->prev_frame_num_offset =
 -    h->prev_poc_msb          =
 +    h->prev_frame_num_offset = 0;
 +    h->prev_poc_msb          = 1<<16;
      h->prev_poc_lsb          = 0;
 +    for (i = 0; i < MAX_DELAYED_PIC_COUNT; i++)
 +        h->last_pocs[i] = INT_MIN;
  }
  
  /* forget old pics after a seek */
  void ff_h264_flush_change(H264Context *h)
  {
 -    int i;
 -    for (i = 0; i < MAX_DELAYED_PIC_COUNT; i++)
 -        h->last_pocs[i] = INT_MIN;
 +    int i, j;
 +
      h->next_outputed_poc = INT_MIN;
      h->prev_interlaced_frame = 1;
      idr(h);
 -    if (h->cur_pic_ptr)
 +
 +    h->prev_frame_num = -1;
 +    if (h->cur_pic_ptr) {
          h->cur_pic_ptr->reference = 0;
 +        for (j=i=0; h->delayed_pic[i]; i++)
 +            if (h->delayed_pic[i] != h->cur_pic_ptr)
 +                h->delayed_pic[j++] = h->delayed_pic[i];
 +        h->delayed_pic[j] = NULL;
 +    }
 +    ff_h264_unref_picture(h, &h->last_pic_for_ec);
 +
      h->first_field = 0;
      ff_h264_reset_sei(h);
      h->recovery_frame = -1;
      h->frame_recovered = 0;
 +    h->current_slice = 0;
 +    h->mmco_reset = 1;
 +    for (i = 0; i < h->nb_slice_ctx; i++)
 +        h->slice_ctx[i].list_count = 0;
  }
  
  /* forget old pics after a seek */
@@@ -1244,34 -1136,26 +1172,34 @@@ int ff_h264_get_profile(SPS *sps
  int ff_set_ref_count(H264Context *h, H264SliceContext *sl)
  {
      int ref_count[2], list_count;
 -    int num_ref_idx_active_override_flag, max_refs;
 +    int num_ref_idx_active_override_flag;
  
      // set defaults, might be overridden a few lines later
      ref_count[0] = h->pps.ref_count[0];
      ref_count[1] = h->pps.ref_count[1];
  
      if (sl->slice_type_nos != AV_PICTURE_TYPE_I) {
 +        unsigned max[2];
 +        max[0] = max[1] = h->picture_structure == PICT_FRAME ? 15 : 31;
 +
          if (sl->slice_type_nos == AV_PICTURE_TYPE_B)
              sl->direct_spatial_mv_pred = get_bits1(&sl->gb);
          num_ref_idx_active_override_flag = get_bits1(&sl->gb);
  
          if (num_ref_idx_active_override_flag) {
              ref_count[0] = get_ue_golomb(&sl->gb) + 1;
 -            if (ref_count[0] < 1)
 -                return AVERROR_INVALIDDATA;
              if (sl->slice_type_nos == AV_PICTURE_TYPE_B) {
                  ref_count[1] = get_ue_golomb(&sl->gb) + 1;
 -                if (ref_count[1] < 1)
 -                    return AVERROR_INVALIDDATA;
 -            }
 +            } else
 +                // full range is spec-ok in this case, even for frames
 +                ref_count[1] = 1;
 +        }
 +
 +        if (ref_count[0]-1 > max[0] || ref_count[1]-1 > max[1]){
 +            av_log(h->avctx, AV_LOG_ERROR, "reference overflow %u > %u or %u > %u\n", ref_count[0]-1, max[0], ref_count[1]-1, max[1]);
 +            sl->ref_count[0] = sl->ref_count[1] = 0;
 +            sl->list_count   = 0;
 +            return AVERROR_INVALIDDATA;
          }
  
          if (sl->slice_type_nos == AV_PICTURE_TYPE_B)
          ref_count[0] = ref_count[1] = 0;
      }
  
 -    max_refs = h->picture_structure == PICT_FRAME ? 16 : 32;
 -
 -    if (ref_count[0] > max_refs || ref_count[1] > max_refs) {
 -        av_log(h->avctx, AV_LOG_ERROR, "reference overflow\n");
 -        sl->ref_count[0] = sl->ref_count[1] = 0;
 -        return AVERROR_INVALIDDATA;
 -    }
 -
      if (list_count   != sl->list_count   ||
          ref_count[0] != sl->ref_count[0] ||
          ref_count[1] != sl->ref_count[1]) {
      return 0;
  }
  
 -static int find_start_code(const uint8_t *buf, int buf_size,
 -                           int buf_index, int next_avc)
 -{
 -    // start code prefix search
 -    for (; buf_index + 3 < next_avc; buf_index++)
 -        // This should always succeed in the first iteration.
 -        if (buf[buf_index]     == 0 &&
 -            buf[buf_index + 1] == 0 &&
 -            buf[buf_index + 2] == 1)
 -            break;
 -
 -    if (buf_index + 3 >= buf_size)
 -        return buf_size;
 -
 -    return buf_index + 3;
 -}
 -
 -static int get_avc_nalsize(H264Context *h, const uint8_t *buf,
 -                           int buf_size, int *buf_index)
 -{
 -    int i, nalsize = 0;
 -
 -    if (*buf_index >= buf_size - h->nal_length_size) {
 -        // the end of the buffer is reached, refill it.
 -        return AVERROR(EAGAIN);
 -    }
 -
 -    for (i = 0; i < h->nal_length_size; i++)
 -        nalsize = (nalsize << 8) | buf[(*buf_index)++];
 -    if (nalsize <= 0 || nalsize > buf_size - *buf_index) {
 -        av_log(h->avctx, AV_LOG_ERROR,
 -               "AVC: nal size %d\n", nalsize);
 -        return AVERROR_INVALIDDATA;
 -    }
 -    return nalsize;
 -}
 +static const uint8_t start_code[] = { 0x00, 0x00, 0x01 };
  
  static int get_bit_length(H264Context *h, const uint8_t *buf,
                            const uint8_t *ptr, int dst_length,
@@@ -1322,7 -1249,6 +1250,7 @@@ static int get_last_needed_nal(H264Cont
      int nal_index   = 0;
      int buf_index   = 0;
      int nals_needed = 0;
 +    int first_slice = 0;
  
      while(1) {
          GetBitContext gb;
              buf_index = find_start_code(buf, buf_size, buf_index, next_avc);
              if (buf_index >= buf_size)
                  break;
 +            if (buf_index >= next_avc)
 +                continue;
          }
  
          ptr = ff_h264_decode_nal(h, &h->slice_ctx[0], buf + buf_index, &dst_length, &consumed,
          case NAL_IDR_SLICE:
          case NAL_SLICE:
              init_get_bits(&gb, ptr, bit_length);
 -            if (!get_ue_golomb(&gb))
 +            if (!get_ue_golomb_long(&gb) ||  // first_mb_in_slice
 +                !first_slice ||
 +                first_slice != h->nal_unit_type)
                  nals_needed = nal_index;
 +            if (!first_slice)
 +                first_slice = h->nal_unit_type;
          }
      }
  
@@@ -1390,13 -1310,8 +1318,13 @@@ static int decode_nal_units(H264Contex
      int next_avc;
      int nals_needed = 0; ///< number of NALs that need decoding before the next frame thread starts
      int nal_index;
 +    int idr_cleared=0;
      int ret = 0;
  
 +    h->nal_unit_type= 0;
 +
 +    if(!h->slice_context_count)
 +         h->slice_context_count= 1;
      h->max_contexts = h->slice_context_count;
      if (!(avctx->flags2 & AV_CODEC_FLAG2_CHUNKS)) {
          h->current_slice = 0;
          ff_h264_reset_sei(h);
      }
  
 +    if (h->nal_length_size == 4) {
 +        if (buf_size > 8 && AV_RB32(buf) == 1 && AV_RB32(buf+5) > (unsigned)buf_size) {
 +            h->is_avc = 0;
 +        }else if(buf_size > 3 && AV_RB32(buf) > 1 && AV_RB32(buf) <= (unsigned)buf_size)
 +            h->is_avc = 1;
 +    }
 +
      if (avctx->active_thread_type & FF_THREAD_FRAME)
          nals_needed = get_last_needed_nal(h, buf, buf_size);
  
  
              if (h->avctx->debug & FF_DEBUG_STARTCODE)
                  av_log(h->avctx, AV_LOG_DEBUG,
 -                       "NAL %d at %d/%d length %d\n",
 -                       h->nal_unit_type, buf_index, buf_size, dst_length);
 +                       "NAL %d/%d at %d/%d length %d\n",
 +                       h->nal_unit_type, h->nal_ref_idc, buf_index, buf_size, dst_length);
  
              if (h->is_avc && (nalsize != consumed) && nalsize)
                  av_log(h->avctx, AV_LOG_DEBUG,
                  continue;
  
  again:
 -            /* Ignore every NAL unit type except PPS and SPS during extradata
 +            /* Ignore per frame NAL unit type during extradata
               * parsing. Decoding slices is not possible in codec init
               * with frame-mt */
 -            if (parse_extradata && HAVE_THREADS &&
 -                (h->avctx->active_thread_type & FF_THREAD_FRAME) &&
 -                (h->nal_unit_type != NAL_PPS &&
 -                 h->nal_unit_type != NAL_SPS)) {
 -                if (h->nal_unit_type < NAL_AUD ||
 -                    h->nal_unit_type > NAL_AUXILIARY_SLICE)
 -                    av_log(avctx, AV_LOG_INFO,
 -                           "Ignoring NAL unit %d during extradata parsing\n",
 +            if (parse_extradata) {
 +                switch (h->nal_unit_type) {
 +                case NAL_IDR_SLICE:
 +                case NAL_SLICE:
 +                case NAL_DPA:
 +                case NAL_DPB:
 +                case NAL_DPC:
 +                    av_log(h->avctx, AV_LOG_WARNING,
 +                           "Ignoring NAL %d in global header/extradata\n",
                             h->nal_unit_type);
 -                h->nal_unit_type = NAL_FF_IGNORE;
 +                    // fall through to next case
 +                case NAL_AUXILIARY_SLICE:
 +                    h->nal_unit_type = NAL_FF_IGNORE;
 +                }
              }
 +
              err = 0;
 +
              switch (h->nal_unit_type) {
              case NAL_IDR_SLICE:
 +                if ((ptr[0] & 0xFC) == 0x98) {
 +                    av_log(h->avctx, AV_LOG_ERROR, "Invalid inter IDR frame\n");
 +                    h->next_outputed_poc = INT_MIN;
 +                    ret = -1;
 +                    goto end;
 +                }
                  if (h->nal_unit_type != NAL_IDR_SLICE) {
                      av_log(h->avctx, AV_LOG_ERROR,
                             "Invalid mix of idr and non-idr slices\n");
                      ret = -1;
                      goto end;
                  }
 -                idr(h); // FIXME ensure we don't lose some frames if there is reordering
 +                if(!idr_cleared) {
 +                    if (h->current_slice && (avctx->active_thread_type & FF_THREAD_SLICE)) {
 +                        av_log(h, AV_LOG_ERROR, "invalid mixed IDR / non IDR frames cannot be decoded in slice multithreading mode\n");
 +                        ret = AVERROR_INVALIDDATA;
 +                        goto end;
 +                    }
 +                    idr(h); // FIXME ensure we don't lose some frames if there is reordering
 +                }
 +                idr_cleared = 1;
 +                h->has_recovery_point = 1;
              case NAL_SLICE:
                  init_get_bits(&sl->gb, ptr, bit_length);
  
 +                if (   nals_needed >= nal_index
 +                    || (!(avctx->active_thread_type & FF_THREAD_FRAME) && !context_count))
 +                    h->au_pps_id = -1;
 +
                  if ((err = ff_h264_decode_slice_header(h, sl)))
                      break;
  
 -                if (h->sei_recovery_frame_cnt >= 0 && h->recovery_frame < 0) {
 -                    h->recovery_frame = (h->frame_num + h->sei_recovery_frame_cnt) &
 -                                        ((1 << h->sps.log2_max_frame_num) - 1);
 +                if (h->sei_recovery_frame_cnt >= 0) {
 +                    if (h->frame_num != h->sei_recovery_frame_cnt || sl->slice_type_nos != AV_PICTURE_TYPE_I)
 +                        h->valid_recovery_point = 1;
 +
 +                    if (   h->recovery_frame < 0
 +                        || av_mod_uintp2(h->recovery_frame - h->frame_num, h->sps.log2_max_frame_num) > h->sei_recovery_frame_cnt) {
 +                        h->recovery_frame = av_mod_uintp2(h->frame_num + h->sei_recovery_frame_cnt, h->sps.log2_max_frame_num);
 +
 +                        if (!h->valid_recovery_point)
 +                            h->recovery_frame = h->frame_num;
 +                    }
                  }
  
                  h->cur_pic_ptr->f->key_frame |=
 -                    (h->nal_unit_type == NAL_IDR_SLICE) ||
 -                    (h->sei_recovery_frame_cnt >= 0);
 +                    (h->nal_unit_type == NAL_IDR_SLICE);
  
                  if (h->nal_unit_type == NAL_IDR_SLICE ||
                      h->recovery_frame == h->frame_num) {
                  // "recovered".
                  if (h->nal_unit_type == NAL_IDR_SLICE)
                      h->frame_recovered |= FRAME_RECOVERED_IDR;
 +#if 1
 +                h->cur_pic_ptr->recovered |= h->frame_recovered;
 +#else
                  h->cur_pic_ptr->recovered |= !!(h->frame_recovered & FRAME_RECOVERED_IDR);
 +#endif
  
                  if (h->current_slice == 1) {
                      if (!(avctx->flags2 & AV_CODEC_FLAG2_CHUNKS))
                          decode_postinit(h, nal_index >= nals_needed);
  
                      if (h->avctx->hwaccel &&
 -                        (ret = h->avctx->hwaccel->start_frame(h->avctx, NULL, 0)) < 0)
 -                        return ret;
 +                        (ret = h->avctx->hwaccel->start_frame(h->avctx, buf, buf_size)) < 0)
 +                        goto end;
 +#if FF_API_CAP_VDPAU
 +                    if (CONFIG_H264_VDPAU_DECODER &&
 +                        h->avctx->codec->capabilities & AV_CODEC_CAP_HWACCEL_VDPAU)
 +                        ff_vdpau_h264_picture_start(h);
 +#endif
                  }
  
 -                if (sl->redundant_pic_count == 0 &&
 -                    (avctx->skip_frame < AVDISCARD_NONREF ||
 -                     h->nal_ref_idc) &&
 -                    (avctx->skip_frame < AVDISCARD_BIDIR  ||
 -                     sl->slice_type_nos != AV_PICTURE_TYPE_B) &&
 -                    (avctx->skip_frame < AVDISCARD_NONKEY ||
 -                     h->cur_pic_ptr->f->key_frame) &&
 -                    avctx->skip_frame < AVDISCARD_ALL) {
 +                if (sl->redundant_pic_count == 0) {
                      if (avctx->hwaccel) {
                          ret = avctx->hwaccel->decode_slice(avctx,
                                                             &buf[buf_index - consumed],
                                                             consumed);
                          if (ret < 0)
 -                            return ret;
 +                            goto end;
 +#if FF_API_CAP_VDPAU
 +                    } else if (CONFIG_H264_VDPAU_DECODER &&
 +                               h->avctx->codec->capabilities & AV_CODEC_CAP_HWACCEL_VDPAU) {
 +                        ff_vdpau_add_data_chunk(h->cur_pic_ptr->f->data[0],
 +                                                start_code,
 +                                                sizeof(start_code));
 +                        ff_vdpau_add_data_chunk(h->cur_pic_ptr->f->data[0],
 +                                                &buf[buf_index - consumed],
 +                                                consumed);
 +#endif
                      } else
                          context_count++;
                  }
              case NAL_DPB:
              case NAL_DPC:
                  avpriv_request_sample(avctx, "data partitioning");
 -                ret = AVERROR(ENOSYS);
 -                goto end;
                  break;
              case NAL_SEI:
                  init_get_bits(&h->gb, ptr, bit_length);
                  break;
              case NAL_SPS:
                  init_get_bits(&h->gb, ptr, bit_length);
 -                ret = ff_h264_decode_seq_parameter_set(h);
 -                if (ret < 0 && h->is_avc && (nalsize != consumed) && nalsize) {
 +                if (ff_h264_decode_seq_parameter_set(h, 0) >= 0)
 +                    break;
 +                if (h->is_avc ? nalsize : 1) {
                      av_log(h->avctx, AV_LOG_DEBUG,
                             "SPS decoding failure, trying again with the complete NAL\n");
 -                    init_get_bits(&h->gb, buf + buf_index + 1 - consumed,
 -                                  8 * (nalsize - 1));
 -                    ff_h264_decode_seq_parameter_set(h);
 +                    if (h->is_avc)
 +                        av_assert0(next_avc - buf_index + consumed == nalsize);
 +                    if ((next_avc - buf_index + consumed - 1) >= INT_MAX/8)
 +                        break;
 +                    init_get_bits(&h->gb, &buf[buf_index + 1 - consumed],
 +                                  8*(next_avc - buf_index + consumed - 1));
 +                    if (ff_h264_decode_seq_parameter_set(h, 0) >= 0)
 +                        break;
                  }
 +                init_get_bits(&h->gb, ptr, bit_length);
 +                ff_h264_decode_seq_parameter_set(h, 1);
  
                  break;
              case NAL_PPS:
                  context_count = 0;
              }
  
 -            if (err < 0) {
 -                av_log(h->avctx, AV_LOG_ERROR, "decode_slice_header error\n");
 +            if (err < 0 || err == SLICE_SKIPED) {
 +                if (err < 0)
 +                    av_log(h->avctx, AV_LOG_ERROR, "decode_slice_header error\n");
                  sl->ref_count[0] = sl->ref_count[1] = sl->list_count = 0;
 -            } else if (err == 1) {
 +            } else if (err == SLICE_SINGLETHREAD) {
 +                if (context_count > 1) {
 +                    ret = ff_h264_execute_decode_slices(h, context_count - 1);
 +                    if (ret < 0 && (h->avctx->err_recognition & AV_EF_EXPLODE))
 +                        goto end;
 +                    context_count = 0;
 +                }
                  /* Slice could not be decoded in parallel mode, restart. Note
                   * that rbsp_buffer is not transferred, but since we no longer
                   * run in parallel mode this should not be an issue. */
  
      ret = 0;
  end:
 +
 +#if CONFIG_ERROR_RESILIENCE
 +    sl = h->slice_ctx;
 +    /*
 +     * FIXME: Error handling code does not seem to support interlaced
 +     * when slices span multiple rows
 +     * The ff_er_add_slice calls don't work right for bottom
 +     * fields; they cause massive erroneous error concealing
 +     * Error marking covers both fields (top and bottom).
 +     * This causes a mismatched s->error_count
 +     * and a bad error table. Further, the error count goes to
 +     * INT_MAX when called for bottom field, because mb_y is
 +     * past end by one (callers fault) and resync_mb_y != 0
 +     * causes problems for the first MB line, too.
 +     */
 +    if (!FIELD_PICTURE(h) && h->current_slice && !h->sps.new && h->enable_er) {
 +        int use_last_pic = h->last_pic_for_ec.f->buf[0] && !sl->ref_count[0];
 +
 +        ff_h264_set_erpic(&sl->er.cur_pic, h->cur_pic_ptr);
 +
 +        if (use_last_pic) {
 +            ff_h264_set_erpic(&sl->er.last_pic, &h->last_pic_for_ec);
 +            sl->ref_list[0][0].parent = &h->last_pic_for_ec;
 +            memcpy(sl->ref_list[0][0].data, h->last_pic_for_ec.f->data, sizeof(sl->ref_list[0][0].data));
 +            memcpy(sl->ref_list[0][0].linesize, h->last_pic_for_ec.f->linesize, sizeof(sl->ref_list[0][0].linesize));
 +            sl->ref_list[0][0].reference = h->last_pic_for_ec.reference;
 +        } else if (sl->ref_count[0]) {
 +            ff_h264_set_erpic(&sl->er.last_pic, sl->ref_list[0][0].parent);
 +        } else
 +            ff_h264_set_erpic(&sl->er.last_pic, NULL);
 +
 +        if (sl->ref_count[1])
 +            ff_h264_set_erpic(&sl->er.next_pic, sl->ref_list[1][0].parent);
 +
 +        sl->er.ref_count = sl->ref_count[0];
 +
 +        ff_er_frame_end(&sl->er);
 +        if (use_last_pic)
 +            memset(&sl->ref_list[0][0], 0, sizeof(sl->ref_list[0][0]));
 +    }
 +#endif /* CONFIG_ERROR_RESILIENCE */
      /* clean up */
      if (h->cur_pic_ptr && !h->droppable) {
          ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX,
@@@ -1741,62 -1551,26 +1669,62 @@@ static int get_consumed_bytes(int pos, 
      return pos;
  }
  
 -static int output_frame(H264Context *h, AVFrame *dst, AVFrame *src)
 +static int output_frame(H264Context *h, AVFrame *dst, H264Picture *srcp)
  {
 +    AVFrame *src = srcp->f;
 +    const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(src->format);
      int i;
      int ret = av_frame_ref(dst, src);
      if (ret < 0)
          return ret;
  
 -    if (!h->sps.crop)
 +    av_dict_set(&dst->metadata, "stereo_mode", ff_h264_sei_stereo_mode(h), 0);
 +
 +    h->backup_width   = h->avctx->width;
 +    h->backup_height  = h->avctx->height;
 +    h->backup_pix_fmt = h->avctx->pix_fmt;
 +
 +    h->avctx->width   = dst->width;
 +    h->avctx->height  = dst->height;
 +    h->avctx->pix_fmt = dst->format;
 +
 +    if (srcp->sei_recovery_frame_cnt == 0)
 +        dst->key_frame = 1;
 +    if (!srcp->crop)
          return 0;
  
 -    for (i = 0; i < 3; i++) {
 -        int hshift = (i > 0) ? h->chroma_x_shift : 0;
 -        int vshift = (i > 0) ? h->chroma_y_shift : 0;
 -        int off    = ((h->sps.crop_left >> hshift) << h->pixel_shift) +
 -                     (h->sps.crop_top >> vshift) * dst->linesize[i];
 +    for (i = 0; i < desc->nb_components; i++) {
 +        int hshift = (i > 0) ? desc->log2_chroma_w : 0;
 +        int vshift = (i > 0) ? desc->log2_chroma_h : 0;
 +        int off    = ((srcp->crop_left >> hshift) << h->pixel_shift) +
 +                      (srcp->crop_top  >> vshift) * dst->linesize[i];
          dst->data[i] += off;
      }
      return 0;
  }
  
 +static int is_extra(const uint8_t *buf, int buf_size)
 +{
 +    int cnt= buf[5]&0x1f;
 +    const uint8_t *p= buf+6;
 +    while(cnt--){
 +        int nalsize= AV_RB16(p) + 2;
 +        if(nalsize > buf_size - (p-buf) || (p[2] & 0x9F) != 7)
 +            return 0;
 +        p += nalsize;
 +    }
 +    cnt = *(p++);
 +    if(!cnt)
 +        return 0;
 +    while(cnt--){
 +        int nalsize= AV_RB16(p) + 2;
 +        if(nalsize > buf_size - (p-buf) || (p[2] & 0x9F) != 8)
 +            return 0;
 +        p += nalsize;
 +    }
 +    return 1;
 +}
 +
  static int h264_decode_frame(AVCodecContext *avctx, void *data,
                               int *got_frame, AVPacket *avpkt)
  {
      H264Context *h     = avctx->priv_data;
      AVFrame *pict      = data;
      int buf_index      = 0;
 +    H264Picture *out;
 +    int i, out_idx;
      int ret;
  
      h->flags = avctx->flags;
      h->setup_finished = 0;
  
 +    if (h->backup_width != -1) {
 +        avctx->width    = h->backup_width;
 +        h->backup_width = -1;
 +    }
 +    if (h->backup_height != -1) {
 +        avctx->height    = h->backup_height;
 +        h->backup_height = -1;
 +    }
 +    if (h->backup_pix_fmt != AV_PIX_FMT_NONE) {
 +        avctx->pix_fmt    = h->backup_pix_fmt;
 +        h->backup_pix_fmt = AV_PIX_FMT_NONE;
 +    }
 +
 +    ff_h264_unref_picture(h, &h->last_pic_for_ec);
 +
      /* end of stream, output what is still in the buffers */
 -out:
      if (buf_size == 0) {
 -        H264Picture *out;
 -        int i, out_idx;
 + out:
  
          h->cur_pic_ptr = NULL;
 +        h->first_field = 0;
  
          // FIXME factorize this with the output code below
          out     = h->delayed_pic[0];
              h->delayed_pic[i] = h->delayed_pic[i + 1];
  
          if (out) {
 -            ret = output_frame(h, pict, out->f);
 +            out->reference &= ~DELAYED_PIC_REF;
 +            ret = output_frame(h, pict, out);
              if (ret < 0)
                  return ret;
              *got_frame = 1;
  
          return buf_index;
      }
 +    if (h->is_avc && av_packet_get_side_data(avpkt, AV_PKT_DATA_NEW_EXTRADATA, NULL)) {
 +        int side_size;
 +        uint8_t *side = av_packet_get_side_data(avpkt, AV_PKT_DATA_NEW_EXTRADATA, &side_size);
 +        if (is_extra(side, side_size))
 +            ff_h264_decode_extradata(h, side, side_size);
 +    }
 +    if(h->is_avc && buf_size >= 9 && buf[0]==1 && buf[2]==0 && (buf[4]&0xFC)==0xFC && (buf[5]&0x1F) && buf[8]==0x67){
 +        if (is_extra(buf, buf_size))
 +            return ff_h264_decode_extradata(h, buf, buf_size);
 +    }
  
      buf_index = decode_nal_units(h, buf, buf_size, 0);
      if (buf_index < 0)
          return AVERROR_INVALIDDATA;
  
      if (!h->cur_pic_ptr && h->nal_unit_type == NAL_END_SEQUENCE) {
 -        buf_size = 0;
 +        av_assert0(buf_index <= buf_size);
          goto out;
      }
  
      if (!(avctx->flags2 & AV_CODEC_FLAG2_CHUNKS) && !h->cur_pic_ptr) {
 -        if (avctx->skip_frame >= AVDISCARD_NONREF)
 -            return 0;
 +        if (avctx->skip_frame >= AVDISCARD_NONREF ||
 +            buf_size >= 4 && !memcmp("Q264", buf, 4))
 +            return buf_size;
          av_log(avctx, AV_LOG_ERROR, "no frame!\n");
          return AVERROR_INVALIDDATA;
      }
          if (avctx->flags2 & AV_CODEC_FLAG2_CHUNKS)
              decode_postinit(h, 1);
  
 -        ff_h264_field_end(h, &h->slice_ctx[0], 0);
 +        if ((ret = ff_h264_field_end(h, &h->slice_ctx[0], 0)) < 0)
 +            return ret;
  
 +        /* Wait for second field. */
          *got_frame = 0;
          if (h->next_output_pic && ((avctx->flags & AV_CODEC_FLAG_OUTPUT_CORRUPT) ||
 +                                   (avctx->flags2 & AV_CODEC_FLAG2_SHOW_ALL) ||
                                     h->next_output_pic->recovered)) {
              if (!h->next_output_pic->recovered)
                  h->next_output_pic->f->flags |= AV_FRAME_FLAG_CORRUPT;
  
 -            ret = output_frame(h, pict, h->next_output_pic->f);
 +            if (!h->avctx->hwaccel &&
 +                 (h->next_output_pic->field_poc[0] == INT_MAX ||
 +                  h->next_output_pic->field_poc[1] == INT_MAX)
 +            ) {
 +                int p;
 +                AVFrame *f = h->next_output_pic->f;
 +                int field = h->next_output_pic->field_poc[0] == INT_MAX;
 +                uint8_t *dst_data[4];
 +                int linesizes[4];
 +                const uint8_t *src_data[4];
 +
 +                av_log(h->avctx, AV_LOG_DEBUG, "Duplicating field %d to fill missing\n", field);
 +
 +                for (p = 0; p<4; p++) {
 +                    dst_data[p] = f->data[p] + (field^1)*f->linesize[p];
 +                    src_data[p] = f->data[p] +  field   *f->linesize[p];
 +                    linesizes[p] = 2*f->linesize[p];
 +                }
 +
 +                av_image_copy(dst_data, linesizes, src_data, linesizes,
 +                              f->format, f->width, f->height>>1);
 +            }
 +
 +            ret = output_frame(h, pict, h->next_output_pic);
              if (ret < 0)
                  return ret;
              *got_frame = 1;
 +            if (CONFIG_MPEGVIDEO) {
 +                ff_print_debug_info2(h->avctx, pict, NULL,
 +                                    h->next_output_pic->mb_type,
 +                                    h->next_output_pic->qscale_table,
 +                                    h->next_output_pic->motion_val,
 +                                    &h->low_delay,
 +                                    h->mb_width, h->mb_height, h->mb_stride, 1);
 +            }
          }
      }
  
 -    assert(pict->buf[0] || !*got_frame);
 +    av_assert0(pict->buf[0] || !*got_frame);
 +
 +    ff_h264_unref_picture(h, &h->last_pic_for_ec);
  
      return get_consumed_bytes(buf_index, buf_size);
  }
@@@ -1959,7 -1669,6 +1887,7 @@@ av_cold void ff_h264_free_context(H264C
          ff_h264_unref_picture(h, &h->DPB[i]);
          av_frame_free(&h->DPB[i].f);
      }
 +    memset(h->delayed_pic, 0, sizeof(h->delayed_pic));
  
      h->cur_pic_ptr = NULL;
  
      av_freep(&h->slice_ctx);
      h->nb_slice_ctx = 0;
  
 +    h->a53_caption_size = 0;
 +    av_freep(&h->a53_caption);
 +
      for (i = 0; i < MAX_SPS_COUNT; i++)
          av_freep(h->sps_buffers + i);
  
@@@ -1982,13 -1688,10 +1910,13 @@@ static av_cold int h264_decode_end(AVCo
  {
      H264Context *h = avctx->priv_data;
  
 +    ff_h264_remove_all_refs(h);
      ff_h264_free_context(h);
  
      ff_h264_unref_picture(h, &h->cur_pic);
      av_frame_free(&h->cur_pic.f);
 +    ff_h264_unref_picture(h, &h->last_pic_for_ec);
 +    av_frame_free(&h->last_pic_for_ec.f);
  
      return 0;
  }
  #define OFFSET(x) offsetof(H264Context, x)
  #define VD AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_DECODING_PARAM
  static const AVOption h264_options[] = {
 -    { "enable_er", "Enable error resilience on damaged frames (unsafe)", OFFSET(enable_er), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VD },
 +    {"is_avc", "is avc", offsetof(H264Context, is_avc), AV_OPT_TYPE_BOOL, {.i64 = 0}, 0, 1, 0},
 +    {"nal_length_size", "nal_length_size", offsetof(H264Context, nal_length_size), AV_OPT_TYPE_INT, {.i64 = 0}, 0, 4, 0},
 +    { "enable_er", "Enable error resilience on damaged frames (unsafe)", OFFSET(enable_er), AV_OPT_TYPE_BOOL, { .i64 = -1 }, -1, 1, VD },
      { NULL },
  };
  
  static const AVClass h264_class = {
 -    .class_name = "h264",
 +    .class_name = "H264 Decoder",
      .item_name  = av_default_item_name,
      .option     = h264_options,
      .version    = LIBAVUTIL_VERSION_INT,
@@@ -2028,29 -1729,3 +1956,29 @@@ AVCodec ff_h264_decoder = 
      .profiles              = NULL_IF_CONFIG_SMALL(ff_h264_profiles),
      .priv_class            = &h264_class,
  };
 +
 +#if CONFIG_H264_VDPAU_DECODER && FF_API_VDPAU
 +static const AVClass h264_vdpau_class = {
 +    .class_name = "H264 VDPAU Decoder",
 +    .item_name  = av_default_item_name,
 +    .option     = h264_options,
 +    .version    = LIBAVUTIL_VERSION_INT,
 +};
 +
 +AVCodec ff_h264_vdpau_decoder = {
 +    .name           = "h264_vdpau",
 +    .long_name      = NULL_IF_CONFIG_SMALL("H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10 (VDPAU acceleration)"),
 +    .type           = AVMEDIA_TYPE_VIDEO,
 +    .id             = AV_CODEC_ID_H264,
 +    .priv_data_size = sizeof(H264Context),
 +    .init           = ff_h264_decode_init,
 +    .close          = h264_decode_end,
 +    .decode         = h264_decode_frame,
 +    .capabilities   = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DELAY | AV_CODEC_CAP_HWACCEL_VDPAU,
 +    .flush          = flush_dpb,
 +    .pix_fmts       = (const enum AVPixelFormat[]) { AV_PIX_FMT_VDPAU_H264,
 +                                                     AV_PIX_FMT_NONE},
 +    .profiles       = NULL_IF_CONFIG_SMALL(ff_h264_profiles),
 +    .priv_class     = &h264_vdpau_class,
 +};
 +#endif
diff --combined libavcodec/h264.h
index 6d6629b043715c7fa24d02f73210a6af395aaf6e,3fe5b9c7e5bd73c547c40f0f4646ff87afed5062..e6559950fd3bf33e2d184f659c5d1c8003abd439
@@@ -2,20 -2,20 +2,20 @@@
   * H.26L/H.264/AVC/JVT/14496-10/... encoder/decoder
   * Copyright (c) 2003 Michael Niedermayer <michaelni@gmx.at>
   *
 - * This file is part of Libav.
 + * This file is part of FFmpeg.
   *
 - * Libav is free software; you can redistribute it and/or
 + * FFmpeg is free software; you can redistribute it and/or
   * modify it under the terms of the GNU Lesser General Public
   * License as published by the Free Software Foundation; either
   * version 2.1 of the License, or (at your option) any later version.
   *
 - * Libav is distributed in the hope that it will be useful,
 + * FFmpeg is distributed in the hope that it will be useful,
   * but WITHOUT ANY WARRANTY; without even the implied warranty of
   * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
   * Lesser General Public License for more details.
   *
   * You should have received a copy of the GNU Lesser General Public
 - * License along with Libav; if not, write to the Free Software
 + * License along with FFmpeg; if not, write to the Free Software
   * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
   */
  
@@@ -33,6 -33,7 +33,7 @@@
  #include "cabac.h"
  #include "error_resilience.h"
  #include "get_bits.h"
+ #include "h264_parse.h"
  #include "h264chroma.h"
  #include "h264dsp.h"
  #include "h264pred.h"
@@@ -44,8 -45,8 +45,8 @@@
  #include "rectangle.h"
  #include "videodsp.h"
  
 -#define H264_MAX_PICTURE_COUNT 32
 -#define H264_MAX_THREADS       16
 +#define H264_MAX_PICTURE_COUNT 36
 +#define H264_MAX_THREADS       32
  
  #define MAX_SPS_COUNT          32
  #define MAX_PPS_COUNT         256
@@@ -54,8 -55,6 +55,8 @@@
  
  #define MAX_DELAYED_PIC_COUNT  16
  
 +#define MAX_MBPAIR_SIZE (256*1024) // a tighter bound could be calculated if someone cares about a few bytes
 +
  /* Compiling in interlaced support reduces the speed
   * of progressive decoding by about 2%. */
  #define ALLOW_INTERLACE
  #define MAX_SLICES 32
  
  #ifdef ALLOW_INTERLACE
 -#define MB_MBAFF(h)    h->mb_mbaff
 -#define MB_FIELD(h)    h->mb_field_decoding_flag
 -#define FRAME_MBAFF(h) h->mb_aff_frame
 -#define FIELD_PICTURE(h) (h->picture_structure != PICT_FRAME)
 +#define MB_MBAFF(h)    (h)->mb_mbaff
 +#define MB_FIELD(sl)  (sl)->mb_field_decoding_flag
 +#define FRAME_MBAFF(h) (h)->mb_aff_frame
 +#define FIELD_PICTURE(h) ((h)->picture_structure != PICT_FRAME)
  #define LEFT_MBS 2
  #define LTOP     0
  #define LBOT     1
  #define LEFT(i)  (i)
  #else
  #define MB_MBAFF(h)      0
 -#define MB_FIELD(h)      0
 +#define MB_FIELD(sl)     0
  #define FRAME_MBAFF(h)   0
  #define FIELD_PICTURE(h) 0
  #undef  IS_INTERLACED
  #define FIELD_OR_MBAFF_PICTURE(h) (FRAME_MBAFF(h) || FIELD_PICTURE(h))
  
  #ifndef CABAC
 -#define CABAC(h) h->pps.cabac
 +#define CABAC(h) (h)->pps.cabac
  #endif
  
 -#define CHROMA422(h) (h->sps.chroma_format_idc == 2)
 -#define CHROMA444(h) (h->sps.chroma_format_idc == 3)
 +#define CHROMA(h)    ((h)->sps.chroma_format_idc)
 +#define CHROMA422(h) ((h)->sps.chroma_format_idc == 2)
 +#define CHROMA444(h) ((h)->sps.chroma_format_idc == 3)
  
  #define EXTENDED_SAR       255
  
  #define IS_REF0(a)         ((a) & MB_TYPE_REF0)
  #define IS_8x8DCT(a)       ((a) & MB_TYPE_8x8DCT)
  
 -#define QP_MAX_NUM (51 + 2 * 6)           // The maximum supported qp
 +#define QP_MAX_NUM (51 + 6*6)           // The maximum supported qp
  
  /* NAL unit types */
  enum {
@@@ -138,7 -136,6 +139,7 @@@ typedef enum 
      SEI_TYPE_RECOVERY_POINT         = 6,   ///< recovery point (frame # to decoder sync)
      SEI_TYPE_FRAME_PACKING          = 45,  ///< frame packing arrangement
      SEI_TYPE_DISPLAY_ORIENTATION    = 47,  ///< display orientation
 +    SEI_TYPE_GREEN_METADATA         = 56   ///< GreenMPEG information
  } SEI_Type;
  
  /**
@@@ -156,19 -153,6 +157,19 @@@ typedef enum 
      SEI_PIC_STRUCT_FRAME_TRIPLING    = 8  ///<  8: %frame tripling
  } SEI_PicStructType;
  
 +/**
 + * frame_packing_arrangement types
 + */
 +typedef enum {
 +    SEI_FPA_TYPE_CHECKERBOARD        = 0,
 +    SEI_FPA_TYPE_INTERLEAVE_COLUMN   = 1,
 +    SEI_FPA_TYPE_INTERLEAVE_ROW      = 2,
 +    SEI_FPA_TYPE_SIDE_BY_SIDE        = 3,
 +    SEI_FPA_TYPE_TOP_BOTTOM          = 4,
 +    SEI_FPA_TYPE_INTERLEAVE_TEMPORAL = 5,
 +    SEI_FPA_TYPE_2D                  = 6,
 +} SEI_FpaType;
 +
  /**
   * Sequence parameter set
   */
@@@ -230,8 -214,6 +231,8 @@@ typedef struct SPS 
      int residual_color_transform_flag;    ///< residual_colour_transform_flag
      int constraint_set_flags;             ///< constraint_set[0-3]_flag
      int new;                              ///< flag to keep track if the decoder context needs re-init due to changed SPS
 +    uint8_t data[4096];
 +    size_t data_size;
  } SPS;
  
  /**
@@@ -255,40 -237,10 +256,40 @@@ typedef struct PPS 
      int transform_8x8_mode;         ///< transform_8x8_mode_flag
      uint8_t scaling_matrix4[6][16];
      uint8_t scaling_matrix8[6][64];
 -    uint8_t chroma_qp_table[2][64]; ///< pre-scaled (with chroma_qp_index_offset) version of qp_table
 +    uint8_t chroma_qp_table[2][QP_MAX_NUM+1];  ///< pre-scaled (with chroma_qp_index_offset) version of qp_table
      int chroma_qp_diff;
 +    uint8_t data[4096];
 +    size_t data_size;
  } PPS;
  
 +/**
 + * Frame Packing Arrangement Type
 + */
 +typedef struct FPA {
 +    int         frame_packing_arrangement_id;
 +    int         frame_packing_arrangement_cancel_flag; ///< is previous arrangement canceled, -1 if never received
 +    SEI_FpaType frame_packing_arrangement_type;
 +    int         frame_packing_arrangement_repetition_period;
 +    int         content_interpretation_type;
 +    int         quincunx_sampling_flag;
 +} FPA;
 +
 +/**
 + *     Green MetaData Information Type
 + */
 +typedef struct GreenMetaData {
 +    uint8_t  green_metadata_type;
 +    uint8_t  period_type;
 +    uint16_t  num_seconds;
 +    uint16_t  num_pictures;
 +    uint8_t percent_non_zero_macroblocks;
 +    uint8_t percent_intra_coded_macroblocks;
 +    uint8_t percent_six_tap_filtering;
 +    uint8_t percent_alpha_point_deblocking_instance;
 +    uint8_t xsd_metric_type;
 +    uint16_t xsd_metric_value;
 +} GreenMetaData;
 +
  /**
   * Memory management control operation opcode.
   */
@@@ -338,19 -290,13 +339,19 @@@ typedef struct H264Picture 
      int pic_id;             /**< pic_num (short -> no wrap version of pic_num,
                                   pic_num & max_pic_num; long -> long_pic_num) */
      int long_ref;           ///< 1->long term reference 0->short term reference
 -    int ref_poc[2][2][32];  ///< POCs of the frames used as reference (FIXME need per slice)
 +    int ref_poc[2][2][32];  ///< POCs of the frames/fields used as reference (FIXME need per slice)
      int ref_count[2][2];    ///< number of entries in ref_poc         (FIXME need per slice)
      int mbaff;              ///< 1 -> MBAFF frame 0-> not MBAFF
      int field_picture;      ///< whether or not picture was encoded in separate fields
  
      int reference;
      int recovered;          ///< picture at IDR or recovery point + recovery count
 +    int invalid_gap;
 +    int sei_recovery_frame_cnt;
 +
 +    int crop;
 +    int crop_left;
 +    int crop_top;
  } H264Picture;
  
  typedef struct H264Ref {
@@@ -384,17 -330,7 +385,7 @@@ typedef struct H264SliceContext 
      int slice_alpha_c0_offset;
      int slice_beta_offset;
  
-     // Weighted pred stuff
-     int use_weight;
-     int use_weight_chroma;
-     int luma_log2_weight_denom;
-     int chroma_log2_weight_denom;
-     int luma_weight_flag[2];    ///< 7.4.3.2 luma_weight_lX_flag
-     int chroma_weight_flag[2];  ///< 7.4.3.2 chroma_weight_lX_flag
-     // The following 2 can be changed to int8_t but that causes 10cpu cycles speedloss
-     int luma_weight[48][2][2];
-     int chroma_weight[48][2][2][2];
-     int implicit_weight[48][48][2];
+     H264PredWeightTable pwt;
  
      int prev_mb_skipped;
      int next_mb_skipped;
  
      DECLARE_ALIGNED(8, uint16_t, sub_mb_type)[4];
  
 -    ///< as a dct coeffecient is int32_t in high depth, we need to reserve twice the space.
 +    ///< as a dct coefficient is int32_t in high depth, we need to reserve twice the space.
      DECLARE_ALIGNED(16, int16_t, mb)[16 * 48 * 2];
      DECLARE_ALIGNED(16, int16_t, mb_luma_dc)[3][16 * 2];
      ///< as mb is addressed by scantable[i] and scantable is uint8_t we can either
@@@ -531,7 -467,6 +522,7 @@@ typedef struct H264Context 
      H264Picture DPB[H264_MAX_PICTURE_COUNT];
      H264Picture *cur_pic_ptr;
      H264Picture cur_pic;
 +    H264Picture last_pic_for_ec;
  
      H264SliceContext *slice_ctx;
      int            nb_slice_ctx;
      int width, height;
      int chroma_x_shift, chroma_y_shift;
  
 +    /**
 +     * Backup frame properties: needed, because they can be different
 +     * between returned frame and last decoded frame.
 +     **/
 +    int backup_width;
 +    int backup_height;
 +    enum AVPixelFormat backup_pix_fmt;
 +
      int droppable;
      int coded_picture_number;
      int low_delay;
      uint32_t *mb2br_xy;
      int b_stride;       // FIXME use s->b4_stride
  
 +
 +    unsigned current_sps_id; ///< id of the current SPS
      SPS sps; ///< current sps
      PPS pps; ///< current pps
  
 +    int au_pps_id; ///< pps_id of current access unit
 +
      uint32_t dequant4_buffer[6][QP_MAX_NUM + 1][16]; // FIXME should these be moved down?
      uint32_t dequant8_buffer[6][QP_MAX_NUM + 1][64];
      uint32_t(*dequant4_coeff[6])[16];
      uint8_t field_scan[16];
      uint8_t field_scan8x8[64];
      uint8_t field_scan8x8_cavlc[64];
 -    const uint8_t *zigzag_scan_q0;
 -    const uint8_t *zigzag_scan8x8_q0;
 -    const uint8_t *zigzag_scan8x8_cavlc_q0;
 -    const uint8_t *field_scan_q0;
 -    const uint8_t *field_scan8x8_q0;
 -    const uint8_t *field_scan8x8_cavlc_q0;
 +    uint8_t zigzag_scan_q0[16];
 +    uint8_t zigzag_scan8x8_q0[64];
 +    uint8_t zigzag_scan8x8_cavlc_q0[64];
 +    uint8_t field_scan_q0[16];
 +    uint8_t field_scan8x8_q0[64];
 +    uint8_t field_scan8x8_cavlc_q0[64];
  
      int x264_build;
  
       */
      int max_pic_num;
  
 +    H264Ref default_ref[2];
      H264Picture *short_ref[32];
      H264Picture *long_ref[32];
      H264Picture *delayed_pic[MAX_DELAYED_PIC_COUNT + 2]; // FIXME size?
       * @{
       */
      /**
 -     * current slice number, used to initalize slice_num of each thread/context
 +     * current slice number, used to initialize slice_num of each thread/context
       */
      int current_slice;
  
       */
      int sei_recovery_frame_cnt;
  
 +    /**
 +     * Are the SEI recovery points looking valid.
 +     */
 +    int valid_recovery_point;
 +
 +    FPA sei_fpa;
 +
      /**
       * recovery_frame is the frame_num at which the next frame should
       * be fully constructed.
  
      int frame_recovered;    ///< Initial frame has been completely recovered
  
 -    /* for frame threading, this is set to 1
 +    int has_recovery_point;
 +
 +    int missing_fields;
 +
 +/* for frame threading, this is set to 1
       * after finish_setup() has been called, so we cannot modify
       * some context properties (which are supposed to stay constant between
       * slices) anymore */
      int sei_buffering_period_present;   ///< Buffering period SEI flag
      int initial_cpb_removal_delay[32];  ///< Initial timestamps for CPBs
  
 +    int cur_chroma_format_idc;
 +    int cur_bit_depth_luma;
 +    int16_t slice_row[MAX_SLICES]; ///< to detect when MAX_SLICES is too low
 +
 +    uint8_t parse_history[6];
 +    int parse_history_count;
 +    int parse_last_mb;
 +
      int enable_er;
  
      AVBufferPool *qscale_table_pool;
      /* Motion Estimation */
      qpel_mc_func (*qpel_put)[16];
      qpel_mc_func (*qpel_avg)[16];
 +
 +    /*Green Metadata */
 +    GreenMetaData sei_green_metadata;
 +
  } H264Context;
  
 -extern const uint8_t ff_h264_chroma_qp[3][QP_MAX_NUM + 1]; ///< One chroma qp table for each supported bit depth (8, 9, 10).
 +extern const uint8_t ff_h264_chroma_qp[7][QP_MAX_NUM + 1]; ///< One chroma qp table for each possible bit depth (8-14).
  extern const uint16_t ff_h264_mb_sizes[4];
  
  /**
@@@ -855,7 -754,7 +846,7 @@@ int ff_h264_decode_sei(H264Context *h)
  /**
   * Decode SPS
   */
 -int ff_h264_decode_seq_parameter_set(H264Context *h);
 +int ff_h264_decode_seq_parameter_set(H264Context *h, int ignore_truncation);
  
  /**
   * compute profile from sps
@@@ -923,7 -822,7 +914,7 @@@ int ff_h264_check_intra_pred_mode(cons
                                    int mode, int is_chroma);
  
  void ff_h264_hl_decode_mb(const H264Context *h, H264SliceContext *sl);
 -int ff_h264_decode_extradata(H264Context *h);
 +int ff_h264_decode_extradata(H264Context *h, const uint8_t *buf, int size);
  int ff_h264_decode_init(AVCodecContext *avctx);
  void ff_h264_decode_init_vlc(void);
  
@@@ -941,7 -840,7 +932,7 @@@ int ff_h264_decode_mb_cabac(const H264C
  
  void ff_h264_init_cabac_states(const H264Context *h, H264SliceContext *sl);
  
 -void h264_init_dequant_tables(H264Context *h);
 +void ff_h264_init_dequant_tables(H264Context *h);
  
  void ff_h264_direct_dist_scale_factor(const H264Context *const h, H264SliceContext *sl);
  void ff_h264_direct_ref_list_init(const H264Context *const h, H264SliceContext *sl);
@@@ -962,12 -861,6 +953,12 @@@ void ff_h264_filter_mb(const H264Contex
   */
  void ff_h264_reset_sei(H264Context *h);
  
 +/**
 + * Get stereo_mode string from the h264 frame_packing_arrangement
 + * @param h H.264 context.
 + */
 +const char* ff_h264_sei_stereo_mode(H264Context *h);
 +
  /*
   * o-o o-o
   *  / / /
@@@ -1018,7 -911,7 +1009,7 @@@ static const uint8_t scan8[16 * 3 + 3] 
      0 +  0 * 8, 0 +  5 * 8, 0 + 10 * 8
  };
  
 -static av_always_inline uint32_t pack16to32(int a, int b)
 +static av_always_inline uint32_t pack16to32(unsigned a, unsigned b)
  {
  #if HAVE_BIGENDIAN
      return (b & 0xFFFF) + (a << 16);
  #endif
  }
  
 -static av_always_inline uint16_t pack8to16(int a, int b)
 +static av_always_inline uint16_t pack8to16(unsigned a, unsigned b)
  {
  #if HAVE_BIGENDIAN
      return (b & 0xFF) + (a << 8);
@@@ -1174,36 -1067,6 +1165,36 @@@ static av_always_inline int get_dct8x8_
                    0x0001000100010001ULL));
  }
  
 +static inline int find_start_code(const uint8_t *buf, int buf_size,
 +                           int buf_index, int next_avc)
 +{
 +    uint32_t state = -1;
 +
 +    buf_index = avpriv_find_start_code(buf + buf_index, buf + next_avc + 1, &state) - buf - 1;
 +
 +    return FFMIN(buf_index, buf_size);
 +}
 +
 +static inline int get_avc_nalsize(H264Context *h, const uint8_t *buf,
 +                           int buf_size, int *buf_index)
 +{
 +    int i, nalsize = 0;
 +
 +    if (*buf_index >= buf_size - h->nal_length_size) {
 +        // the end of the buffer is reached, refill it.
 +        return AVERROR(EAGAIN);
 +    }
 +
 +    for (i = 0; i < h->nal_length_size; i++)
 +        nalsize = ((unsigned)nalsize << 8) | buf[(*buf_index)++];
 +    if (nalsize <= 0 || nalsize > buf_size - *buf_index) {
 +        av_log(h->avctx, AV_LOG_ERROR,
 +               "AVC: nal size %d\n", nalsize);
 +        return AVERROR_INVALIDDATA;
 +    }
 +    return nalsize;
 +}
 +
  int ff_h264_field_end(H264Context *h, H264SliceContext *sl, int in_setup);
  
  int ff_h264_ref_picture(H264Context *h, H264Picture *dst, H264Picture *src);
@@@ -1213,13 -1076,9 +1204,12 @@@ int ff_h264_slice_context_init(H264Cont
  
  void ff_h264_draw_horiz_band(const H264Context *h, H264SliceContext *sl, int y, int height);
  int ff_init_poc(H264Context *h, int pic_field_poc[2], int *pic_poc);
- int ff_pred_weight_table(H264Context *h, H264SliceContext *sl);
  int ff_set_ref_count(H264Context *h, H264SliceContext *sl);
  
  int ff_h264_decode_slice_header(H264Context *h, H264SliceContext *sl);
 +#define SLICE_SINGLETHREAD 1
 +#define SLICE_SKIPED 2
 +
  int ff_h264_execute_decode_slices(H264Context *h, unsigned context_count);
  int ff_h264_update_thread_context(AVCodecContext *dst,
                                    const AVCodecContext *src);
@@@ -1228,6 -1087,4 +1218,6 @@@ void ff_h264_flush_change(H264Context *
  
  void ff_h264_free_tables(H264Context *h);
  
 +void ff_h264_set_erpic(ERPicture *dst, H264Picture *src);
 +
  #endif /* AVCODEC_H264_H */
diff --combined libavcodec/h264_mb.c
index 8302de040ef128926b06d9eafaef5f82e2f1780b,f94333b590072e913b577224756b08c004073799..8791e2e182bc804dd8e60ab4f463ba5dbc908aee
@@@ -2,20 -2,20 +2,20 @@@
   * H.26L/H.264/AVC/JVT/14496-10/... decoder
   * Copyright (c) 2003 Michael Niedermayer <michaelni@gmx.at>
   *
 - * This file is part of Libav.
 + * This file is part of FFmpeg.
   *
 - * Libav is free software; you can redistribute it and/or
 + * FFmpeg is free software; you can redistribute it and/or
   * modify it under the terms of the GNU Lesser General Public
   * License as published by the Free Software Foundation; either
   * version 2.1 of the License, or (at your option) any later version.
   *
 - * Libav is distributed in the hope that it will be useful,
 + * FFmpeg is distributed in the hope that it will be useful,
   * but WITHOUT ANY WARRANTY; without even the implied warranty of
   * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
   * Lesser General Public License for more details.
   *
   * You should have received a copy of the GNU Lesser General Public
 - * License along with Libav; if not, write to the Free Software
 + * License along with FFmpeg; if not, write to the Free Software
   * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
   */
  
@@@ -40,17 -40,17 +40,17 @@@ static inline int get_lowest_part_list_
                                           int n, int height, int y_offset, int list)
  {
      int raw_my             = sl->mv_cache[list][scan8[n]][1];
 -    int filter_height_up   = (raw_my & 3) ? 2 : 0;
      int filter_height_down = (raw_my & 3) ? 3 : 0;
      int full_my            = (raw_my >> 2) + y_offset;
 -    int top                = full_my - filter_height_up;
      int bottom             = full_my + filter_height_down + height;
  
 -    return FFMAX(abs(top), bottom);
 +    av_assert2(height >= 0);
 +
 +    return FFMAX(0, bottom);
  }
  
  static inline void get_lowest_part_y(const H264Context *h, H264SliceContext *sl,
 -                                     int refs[2][48], int n,
 +                                     int16_t refs[2][48], int n,
                                       int height, int y_offset, int list0,
                                       int list1, int *nrefs)
  {
@@@ -97,7 -97,7 +97,7 @@@ static void await_references(const H264
  {
      const int mb_xy   = sl->mb_xy;
      const int mb_type = h->cur_pic.mb_type[mb_xy];
 -    int refs[2][48];
 +    int16_t refs[2][48];
      int nrefs[2] = { 0 };
      int ref, list;
  
      } else {
          int i;
  
 -        assert(IS_8X8(mb_type));
 +        av_assert2(IS_8X8(mb_type));
  
          for (i = 0; i < 4; i++) {
              const int sub_mb_type = sl->sub_mb_type[i];
                                    nrefs);
              } else {
                  int j;
 -                assert(IS_SUB_4X4(sub_mb_type));
 +                av_assert2(IS_SUB_4X4(sub_mb_type));
                  for (j = 0; j < 4; j++) {
                      int sub_y_offset = y_offset + 2 * (j & 2);
                      get_lowest_part_y(h, sl, refs, n + j, 4, sub_y_offset,
                  nrefs[list]--;
  
                  if (!FIELD_PICTURE(h) && ref_field_picture) { // frame referencing two fields
 +                    av_assert2((ref_pic->parent->reference & 3) == 3);
                      ff_thread_await_progress(&ref_pic->parent->tf,
                                               FFMIN((row >> 1) - !(row & 1),
                                                     pic_height - 1),
@@@ -216,7 -215,7 +216,7 @@@ static av_always_inline void mc_dir_par
      const int mx      = sl->mv_cache[list][scan8[n]][0] + src_x_offset * 8;
      int my            = sl->mv_cache[list][scan8[n]][1] + src_y_offset * 8;
      const int luma_xy = (mx & 3) + ((my & 3) << 2);
 -    ptrdiff_t offset  = ((mx >> 2) << pixel_shift) + (my >> 2) * sl->mb_linesize;
 +    ptrdiff_t offset  = (mx >> 2) * (1 << pixel_shift) + (my >> 2) * sl->mb_linesize;
      uint8_t *src_y    = pic->data[0] + offset;
      uint8_t *src_cb, *src_cr;
      int extra_width  = 0;
          emu |= (my >> 3) < 0 || (my >> 3) + 8 >= (pic_height >> 1);
      }
  
 -    src_cb = pic->data[1] + ((mx >> 3) << pixel_shift) +
 +    src_cb = pic->data[1] + ((mx >> 3) * (1 << pixel_shift)) +
               (my >> ysh) * sl->mb_uvlinesize;
 -    src_cr = pic->data[2] + ((mx >> 3) << pixel_shift) +
 +    src_cr = pic->data[2] + ((mx >> 3) * (1 << pixel_shift)) +
               (my >> ysh) * sl->mb_uvlinesize;
  
      if (emu) {
      }
      chroma_op(dest_cb, src_cb, sl->mb_uvlinesize,
                height >> (chroma_idc == 1 /* yuv420 */),
 -              mx & 7, (my << (chroma_idc == 2 /* yuv422 */)) & 7);
 +              mx & 7, ((unsigned)my << (chroma_idc == 2 /* yuv422 */)) & 7);
  
      if (emu) {
          h->vdsp.emulated_edge_mc(sl->edge_emu_buffer, src_cr,
          src_cr = sl->edge_emu_buffer;
      }
      chroma_op(dest_cr, src_cr, sl->mb_uvlinesize, height >> (chroma_idc == 1 /* yuv420 */),
 -              mx & 7, (my << (chroma_idc == 2 /* yuv422 */)) & 7);
 +              mx & 7, ((unsigned)my << (chroma_idc == 2 /* yuv422 */)) & 7);
  }
  
  static av_always_inline void mc_part_std(const H264Context *h, H264SliceContext *sl,
@@@ -420,38 -419,34 +420,38 @@@ static av_always_inline void mc_part_we
                      x_offset, y_offset, qpix_put, chroma_put,
                      pixel_shift, chroma_idc);
  
-         if (sl->use_weight == 2) {
-             int weight0 = sl->implicit_weight[refn0][refn1][sl->mb_y & 1];
+         if (sl->pwt.use_weight == 2) {
+             int weight0 = sl->pwt.implicit_weight[refn0][refn1][sl->mb_y & 1];
              int weight1 = 64 - weight0;
              luma_weight_avg(dest_y, tmp_y, sl->mb_linesize,
                              height, 5, weight0, weight1, 0);
 -            chroma_weight_avg(dest_cb, tmp_cb, sl->mb_uvlinesize,
 -                              chroma_height, 5, weight0, weight1, 0);
 -            chroma_weight_avg(dest_cr, tmp_cr, sl->mb_uvlinesize,
 -                              chroma_height, 5, weight0, weight1, 0);
 +            if (!CONFIG_GRAY || !(h->flags & AV_CODEC_FLAG_GRAY)) {
 +                chroma_weight_avg(dest_cb, tmp_cb, sl->mb_uvlinesize,
 +                                  chroma_height, 5, weight0, weight1, 0);
 +                chroma_weight_avg(dest_cr, tmp_cr, sl->mb_uvlinesize,
 +                                  chroma_height, 5, weight0, weight1, 0);
 +            }
          } else {
              luma_weight_avg(dest_y, tmp_y, sl->mb_linesize, height,
-                             sl->luma_log2_weight_denom,
-                             sl->luma_weight[refn0][0][0],
-                             sl->luma_weight[refn1][1][0],
-                             sl->luma_weight[refn0][0][1] +
-                             sl->luma_weight[refn1][1][1]);
+                             sl->pwt.luma_log2_weight_denom,
+                             sl->pwt.luma_weight[refn0][0][0],
+                             sl->pwt.luma_weight[refn1][1][0],
+                             sl->pwt.luma_weight[refn0][0][1] +
+                             sl->pwt.luma_weight[refn1][1][1]);
 -            chroma_weight_avg(dest_cb, tmp_cb, sl->mb_uvlinesize, chroma_height,
 -                              sl->pwt.chroma_log2_weight_denom,
 -                              sl->pwt.chroma_weight[refn0][0][0][0],
 -                              sl->pwt.chroma_weight[refn1][1][0][0],
 -                              sl->pwt.chroma_weight[refn0][0][0][1] +
 -                              sl->pwt.chroma_weight[refn1][1][0][1]);
 -            chroma_weight_avg(dest_cr, tmp_cr, sl->mb_uvlinesize, chroma_height,
 -                              sl->pwt.chroma_log2_weight_denom,
 -                              sl->pwt.chroma_weight[refn0][0][1][0],
 -                              sl->pwt.chroma_weight[refn1][1][1][0],
 -                              sl->pwt.chroma_weight[refn0][0][1][1] +
 -                              sl->pwt.chroma_weight[refn1][1][1][1]);
 +            if (!CONFIG_GRAY || !(h->flags & AV_CODEC_FLAG_GRAY)) {
 +                chroma_weight_avg(dest_cb, tmp_cb, sl->mb_uvlinesize, chroma_height,
-                                   sl->chroma_log2_weight_denom,
-                                   sl->chroma_weight[refn0][0][0][0],
-                                   sl->chroma_weight[refn1][1][0][0],
-                                   sl->chroma_weight[refn0][0][0][1] +
-                                   sl->chroma_weight[refn1][1][0][1]);
++                                  sl->pwt.chroma_log2_weight_denom,
++                                  sl->pwt.chroma_weight[refn0][0][0][0],
++                                  sl->pwt.chroma_weight[refn1][1][0][0],
++                                  sl->pwt.chroma_weight[refn0][0][0][1] +
++                                  sl->pwt.chroma_weight[refn1][1][0][1]);
 +                chroma_weight_avg(dest_cr, tmp_cr, sl->mb_uvlinesize, chroma_height,
-                                   sl->chroma_log2_weight_denom,
-                                   sl->chroma_weight[refn0][0][1][0],
-                                   sl->chroma_weight[refn1][1][1][0],
-                                   sl->chroma_weight[refn0][0][1][1] +
-                                   sl->chroma_weight[refn1][1][1][1]);
++                                  sl->pwt.chroma_log2_weight_denom,
++                                  sl->pwt.chroma_weight[refn0][0][1][0],
++                                  sl->pwt.chroma_weight[refn1][1][1][0],
++                                  sl->pwt.chroma_weight[refn0][0][1][1] +
++                                  sl->pwt.chroma_weight[refn1][1][1][1]);
 +            }
          }
      } else {
          int list     = list1 ? 1 : 0;
                      qpix_put, chroma_put, pixel_shift, chroma_idc);
  
          luma_weight_op(dest_y, sl->mb_linesize, height,
-                        sl->luma_log2_weight_denom,
-                        sl->luma_weight[refn][list][0],
-                        sl->luma_weight[refn][list][1]);
+                        sl->pwt.luma_log2_weight_denom,
+                        sl->pwt.luma_weight[refn][list][0],
+                        sl->pwt.luma_weight[refn][list][1]);
 -        if (sl->pwt.use_weight_chroma) {
 -            chroma_weight_op(dest_cb, sl->mb_uvlinesize, chroma_height,
 -                             sl->pwt.chroma_log2_weight_denom,
 -                             sl->pwt.chroma_weight[refn][list][0][0],
 -                             sl->pwt.chroma_weight[refn][list][0][1]);
 -            chroma_weight_op(dest_cr, sl->mb_uvlinesize, chroma_height,
 -                             sl->pwt.chroma_log2_weight_denom,
 -                             sl->pwt.chroma_weight[refn][list][1][0],
 -                             sl->pwt.chroma_weight[refn][list][1][1]);
 +        if (!CONFIG_GRAY || !(h->flags & AV_CODEC_FLAG_GRAY)) {
-             if (sl->use_weight_chroma) {
++            if (sl->pwt.use_weight_chroma) {
 +                chroma_weight_op(dest_cb, sl->mb_uvlinesize, chroma_height,
-                                  sl->chroma_log2_weight_denom,
-                                  sl->chroma_weight[refn][list][0][0],
-                                  sl->chroma_weight[refn][list][0][1]);
++                                 sl->pwt.chroma_log2_weight_denom,
++                                 sl->pwt.chroma_weight[refn][list][0][0],
++                                 sl->pwt.chroma_weight[refn][list][0][1]);
 +                chroma_weight_op(dest_cr, sl->mb_uvlinesize, chroma_height,
-                                  sl->chroma_log2_weight_denom,
-                                  sl->chroma_weight[refn][list][1][0],
-                                  sl->chroma_weight[refn][list][1][1]);
++                                 sl->pwt.chroma_log2_weight_denom,
++                                 sl->pwt.chroma_weight[refn][list][1][0],
++                                 sl->pwt.chroma_weight[refn][list][1][1]);
 +            }
          }
      }
  }
@@@ -491,7 -484,7 +491,7 @@@ static av_always_inline void prefetch_m
          const int mx  = (sl->mv_cache[list][scan8[0]][0] >> 2) + 16 * sl->mb_x + 8;
          const int my  = (sl->mv_cache[list][scan8[0]][1] >> 2) + 16 * sl->mb_y;
          uint8_t **src = sl->ref_list[list][refn].data;
 -        int off       = (mx << pixel_shift) +
 +        int off       =  mx * (1<< pixel_shift) +
                          (my + (sl->mb_x & 3) * 4) * sl->mb_linesize +
                          (64 << pixel_shift);
          h->vdsp.prefetch(src[0] + off, sl->linesize, 4);
              h->vdsp.prefetch(src[1] + off, sl->linesize, 4);
              h->vdsp.prefetch(src[2] + off, sl->linesize, 4);
          } else {
 -            off = ((mx >> 1) << pixel_shift) +
 -                  ((my >> 1) + (sl->mb_x & 7)) * sl->uvlinesize +
 -                  (64 << pixel_shift);
 +            off= ((mx>>1)+64) * (1<<pixel_shift) + ((my>>1) + (sl->mb_x&7))*sl->uvlinesize;
              h->vdsp.prefetch(src[1] + off, src[2] - src[1], 2);
          }
      }
@@@ -566,8 -561,10 +566,8 @@@ static av_always_inline void xchg_mb_bo
              XCHG(sl->top_borders[top_idx][sl->mb_x + 1],
                   src_y + (17 << pixel_shift), 1);
          }
 -    }
 -    if (simple || !CONFIG_GRAY || !(h->flags & AV_CODEC_FLAG_GRAY)) {
 -        if (chroma444) {
 -            if (deblock_top) {
 +        if (simple || !CONFIG_GRAY || !(h->flags & AV_CODEC_FLAG_GRAY)) {
 +            if (chroma444) {
                  if (deblock_topleft) {
                      XCHG(top_border_m1 + (24 << pixel_shift), src_cb - (7 << pixel_shift), 1);
                      XCHG(top_border_m1 + (40 << pixel_shift), src_cr - (7 << pixel_shift), 1);
                      XCHG(sl->top_borders[top_idx][sl->mb_x + 1] + (16 << pixel_shift), src_cb + (17 << pixel_shift), 1);
                      XCHG(sl->top_borders[top_idx][sl->mb_x + 1] + (32 << pixel_shift), src_cr + (17 << pixel_shift), 1);
                  }
 -            }
 -        } else {
 -            if (deblock_top) {
 +            } else {
                  if (deblock_topleft) {
                      XCHG(top_border_m1 + (16 << pixel_shift), src_cb - (7 << pixel_shift), 1);
                      XCHG(top_border_m1 + (24 << pixel_shift), src_cr - (7 << pixel_shift), 1);
@@@ -638,12 -637,7 +638,12 @@@ static av_always_inline void hl_decode_
                  uint8_t *const ptr = dest_y + block_offset[i];
                  const int dir      = sl->intra4x4_pred_mode_cache[scan8[i]];
                  if (transform_bypass && h->sps.profile_idc == 244 && dir <= 1) {
 -                    h->hpc.pred8x8l_add[dir](ptr, sl->mb + (i * 16 + p * 256 << pixel_shift), linesize);
 +                    if (h->x264_build != -1) {
 +                        h->hpc.pred8x8l_add[dir](ptr, sl->mb + (i * 16 + p * 256 << pixel_shift), linesize);
 +                    } else
 +                        h->hpc.pred8x8l_filter_add[dir](ptr, sl->mb + (i * 16 + p * 256 << pixel_shift),
 +                                                        (sl-> topleft_samples_available << i) & 0x8000,
 +                                                        (sl->topright_samples_available << i) & 0x4000, linesize);
                  } else {
                      const int nnz = sl->non_zero_count_cache[scan8[i + p * 16]];
                      h->hpc.pred8x8l[dir](ptr, (sl->topleft_samples_available << i) & 0x8000,
                      uint64_t tr_high;
                      if (dir == DIAG_DOWN_LEFT_PRED || dir == VERT_LEFT_PRED) {
                          const int topright_avail = (sl->topright_samples_available << i) & 0x8000;
 -                        assert(sl->mb_y || linesize <= block_offset[i]);
 +                        av_assert2(sl->mb_y || linesize <= block_offset[i]);
                          if (!topright_avail) {
                              if (pixel_shift) {
                                  tr_high  = ((uint16_t *)ptr)[3 - linesize / 2] * 0x0001000100010001ULL;
index e4333a733cd3bac7176fd6976b9316383d9ccfe4,8ae1eef90a3e2ff91d2765789e6fe6e0f8ec01c3..bf8f0ad4737996ae4a6f12a5df7d50ce5de92d9b
@@@ -2,20 -2,20 +2,20 @@@
   * H.26L/H.264/AVC/JVT/14496-10/... decoder
   * Copyright (c) 2003 Michael Niedermayer <michaelni@gmx.at>
   *
 - * This file is part of Libav.
 + * This file is part of FFmpeg.
   *
 - * Libav is free software; you can redistribute it and/or
 + * FFmpeg is free software; you can redistribute it and/or
   * modify it under the terms of the GNU Lesser General Public
   * License as published by the Free Software Foundation; either
   * version 2.1 of the License, or (at your option) any later version.
   *
 - * Libav is distributed in the hope that it will be useful,
 + * FFmpeg is distributed in the hope that it will be useful,
   * but WITHOUT ANY WARRANTY; without even the implied warranty of
   * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
   * Lesser General Public License for more details.
   *
   * You should have received a copy of the GNU Lesser General Public
 - * License along with Libav; if not, write to the Free Software
 + * License along with FFmpeg; if not, write to the Free Software
   * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
   */
  
@@@ -48,9 -48,9 +48,9 @@@ static void mc_part(const H264Context *
                      const h264_biweight_func *weight_avg,
                      int list0, int list1)
  {
-     if ((sl->use_weight == 2 && list0 && list1 &&
-          (sl->implicit_weight[sl->ref_cache[0][scan8[n]]][sl->ref_cache[1][scan8[n]]][sl->mb_y & 1] != 32)) ||
-         sl->use_weight == 1)
+     if ((sl->pwt.use_weight == 2 && list0 && list1 &&
+          (sl->pwt.implicit_weight[sl->ref_cache[0][scan8[n]]][sl->ref_cache[1][scan8[n]]][sl->mb_y & 1] != 32)) ||
+         sl->pwt.use_weight == 1)
          mc_part_weighted(h, sl, n, square, height, delta, dest_y, dest_cb, dest_cr,
                           x_offset, y_offset, qpix_put, chroma_put,
                           weight_op[0], weight_op[1], weight_avg[0],
@@@ -74,7 -74,7 +74,7 @@@ static void MCFUNC(hl_motion)(const H26
      const int mb_xy   = sl->mb_xy;
      const int mb_type = h->cur_pic.mb_type[mb_xy];
  
 -    assert(IS_INTER(mb_type));
 +    av_assert2(IS_INTER(mb_type));
  
      if (HAVE_THREADS && (h->avctx->active_thread_type & FF_THREAD_FRAME))
          await_references(h, sl);
      } else {
          int i;
  
 -        assert(IS_8X8(mb_type));
 +        av_assert2(IS_8X8(mb_type));
  
          for (i = 0; i < 4; i++) {
              const int sub_mb_type = sl->sub_mb_type[i];
                          IS_DIR(sub_mb_type, 0, 0), IS_DIR(sub_mb_type, 0, 1));
              } else {
                  int j;
 -                assert(IS_SUB_4X4(sub_mb_type));
 +                av_assert2(IS_SUB_4X4(sub_mb_type));
                  for (j = 0; j < 4; j++) {
                      int sub_x_offset = x_offset + 2 * (j & 1);
                      int sub_y_offset = y_offset + (j & 2);
          }
      }
  
 -    prefetch_motion(h, sl, 1, PIXEL_SHIFT, CHROMA_IDC);
 +    if (USES_LIST(mb_type, 1))
 +        prefetch_motion(h, sl, 1, PIXEL_SHIFT, CHROMA_IDC);
  }
  
diff --combined libavcodec/h264_parse.c
index 0000000000000000000000000000000000000000,88caa48f1928b3e38d212bc93b1d74031f04dc47..ebd87b0e5b97ebd1ab7f2fe3f86acea95899af78
mode 000000,100644..100644
--- /dev/null
@@@ -1,0 -1,86 +1,96 @@@
 - * This file is part of Libav.
+ /*
 - * Libav is free software; you can redistribute it and/or
++ * This file is part of FFmpeg.
+  *
 - * Libav is distributed in the hope that it will be useful,
++ * FFmpeg is free software; you can redistribute it and/or
+  * modify it under the terms of the GNU Lesser General Public
+  * License as published by the Free Software Foundation; either
+  * version 2.1 of the License, or (at your option) any later version.
+  *
 - * License along with Libav; if not, write to the Free Software
++ * FFmpeg is distributed in the hope that it will be useful,
+  * but WITHOUT ANY WARRANTY; without even the implied warranty of
+  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+  * Lesser General Public License for more details.
+  *
+  * You should have received a copy of the GNU Lesser General Public
++ * License along with FFmpeg; if not, write to the Free Software
+  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+  */
+ #include "get_bits.h"
+ #include "golomb.h"
+ #include "h264.h"
+ #include "h264_parse.h"
+ int ff_h264_pred_weight_table(GetBitContext *gb, const SPS *sps,
+                               const int *ref_count, int slice_type_nos,
+                               H264PredWeightTable *pwt)
+ {
+     int list, i;
+     int luma_def, chroma_def;
+     pwt->use_weight             = 0;
+     pwt->use_weight_chroma      = 0;
+     pwt->luma_log2_weight_denom = get_ue_golomb(gb);
+     if (sps->chroma_format_idc)
+         pwt->chroma_log2_weight_denom = get_ue_golomb(gb);
++
++    if (pwt->luma_log2_weight_denom > 7U) {
++        av_log(NULL, AV_LOG_ERROR, "luma_log2_weight_denom %d is out of range\n", pwt->luma_log2_weight_denom);
++        pwt->luma_log2_weight_denom = 0;
++    }
++    if (pwt->chroma_log2_weight_denom > 7U) {
++        av_log(NULL, AV_LOG_ERROR, "chroma_log2_weight_denom %d is out of range\n", pwt->chroma_log2_weight_denom);
++        pwt->chroma_log2_weight_denom = 0;
++    }
++
+     luma_def   = 1 << pwt->luma_log2_weight_denom;
+     chroma_def = 1 << pwt->chroma_log2_weight_denom;
+     for (list = 0; list < 2; list++) {
+         pwt->luma_weight_flag[list]   = 0;
+         pwt->chroma_weight_flag[list] = 0;
+         for (i = 0; i < ref_count[list]; i++) {
+             int luma_weight_flag, chroma_weight_flag;
+             luma_weight_flag = get_bits1(gb);
+             if (luma_weight_flag) {
+                 pwt->luma_weight[i][list][0] = get_se_golomb(gb);
+                 pwt->luma_weight[i][list][1] = get_se_golomb(gb);
+                 if (pwt->luma_weight[i][list][0] != luma_def ||
+                     pwt->luma_weight[i][list][1] != 0) {
+                     pwt->use_weight             = 1;
+                     pwt->luma_weight_flag[list] = 1;
+                 }
+             } else {
+                 pwt->luma_weight[i][list][0] = luma_def;
+                 pwt->luma_weight[i][list][1] = 0;
+             }
+             if (sps->chroma_format_idc) {
+                 chroma_weight_flag = get_bits1(gb);
+                 if (chroma_weight_flag) {
+                     int j;
+                     for (j = 0; j < 2; j++) {
+                         pwt->chroma_weight[i][list][j][0] = get_se_golomb(gb);
+                         pwt->chroma_weight[i][list][j][1] = get_se_golomb(gb);
+                         if (pwt->chroma_weight[i][list][j][0] != chroma_def ||
+                             pwt->chroma_weight[i][list][j][1] != 0) {
+                             pwt->use_weight_chroma        = 1;
+                             pwt->chroma_weight_flag[list] = 1;
+                         }
+                     }
+                 } else {
+                     int j;
+                     for (j = 0; j < 2; j++) {
+                         pwt->chroma_weight[i][list][j][0] = chroma_def;
+                         pwt->chroma_weight[i][list][j][1] = 0;
+                     }
+                 }
+             }
+         }
+         if (slice_type_nos != AV_PICTURE_TYPE_B)
+             break;
+     }
+     pwt->use_weight = pwt->use_weight || pwt->use_weight_chroma;
+     return 0;
+ }
diff --combined libavcodec/h264_parse.h
index 0000000000000000000000000000000000000000,7bdce04a3b896d060fb6cf0f3d90a0b520bdf019..d1a7af08e819497f76c4ff89df6676ccfea94bf9
mode 000000,100644..100644
--- /dev/null
@@@ -1,0 -1,48 +1,48 @@@
 - * This file is part of Libav.
+ /*
 - * Libav is free software; you can redistribute it and/or
++ * This file is part of FFmpeg.
+  *
 - * Libav is distributed in the hope that it will be useful,
++ * FFmpeg is free software; you can redistribute it and/or
+  * modify it under the terms of the GNU Lesser General Public
+  * License as published by the Free Software Foundation; either
+  * version 2.1 of the License, or (at your option) any later version.
+  *
 - * License along with Libav; if not, write to the Free Software
++ * FFmpeg is distributed in the hope that it will be useful,
+  * but WITHOUT ANY WARRANTY; without even the implied warranty of
+  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+  * Lesser General Public License for more details.
+  *
+  * You should have received a copy of the GNU Lesser General Public
++ * License along with FFmpeg; if not, write to the Free Software
+  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+  */
+ /**
+  * @file
+  * H.264 decoder/parser shared code
+  */
+ #ifndef AVCODEC_H264_PARSE_H
+ #define AVCODEC_H264_PARSE_H
+ #include "get_bits.h"
+ typedef struct H264PredWeightTable {
+     int use_weight;
+     int use_weight_chroma;
+     int luma_log2_weight_denom;
+     int chroma_log2_weight_denom;
+     int luma_weight_flag[2];    ///< 7.4.3.2 luma_weight_lX_flag
+     int chroma_weight_flag[2];  ///< 7.4.3.2 chroma_weight_lX_flag
+     // The following 2 can be changed to int8_t but that causes 10cpu cycles speedloss
+     int luma_weight[48][2][2];
+     int chroma_weight[48][2][2][2];
+     int implicit_weight[48][48][2];
+ } H264PredWeightTable;
+ struct SPS;
+ int ff_h264_pred_weight_table(GetBitContext *gb, const struct SPS *sps,
+                               const int *ref_count, int slice_type_nos,
+                               H264PredWeightTable *pwt);
+ #endif /* AVCODEC_H264_PARSE_H */
diff --combined libavcodec/h264_parser.c
index 9a06d61cf28f74af9c305bc5c7e7ed3de107748a,b5ccce342502ea0f978c327eff04e149e4c0134f..c86f0324f89d7401ed46c8f73942f005f39b7a6b
@@@ -2,20 -2,20 +2,20 @@@
   * H.26L/H.264/AVC/JVT/14496-10/... parser
   * Copyright (c) 2003 Michael Niedermayer <michaelni@gmx.at>
   *
 - * This file is part of Libav.
 + * This file is part of FFmpeg.
   *
 - * Libav is free software; you can redistribute it and/or
 + * FFmpeg is free software; you can redistribute it and/or
   * modify it under the terms of the GNU Lesser General Public
   * License as published by the Free Software Foundation; either
   * version 2.1 of the License, or (at your option) any later version.
   *
 - * Libav is distributed in the hope that it will be useful,
 + * FFmpeg is distributed in the hope that it will be useful,
   * but WITHOUT ANY WARRANTY; without even the implied warranty of
   * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
   * Lesser General Public License for more details.
   *
   * You should have received a copy of the GNU Lesser General Public
 - * License along with Libav; if not, write to the Free Software
 + * License along with FFmpeg; if not, write to the Free Software
   * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
   */
  
@@@ -25,8 -25,6 +25,8 @@@
   * @author Michael Niedermayer <michaelni@gmx.at>
   */
  
 +#define UNCHECKED_BITSTREAM_READER 1
 +
  #include <assert.h>
  #include <stdint.h>
  
@@@ -55,36 -53,18 +55,36 @@@ static int h264_find_frame_end(H264Pars
                                 int buf_size)
  {
      H264Context *h = &p->h;
 -    int i;
 +    int i, j;
      uint32_t state;
      ParseContext *pc = &p->pc;
 +
 +    int next_avc= h->is_avc ? 0 : buf_size;
  //    mb_addr= pc->mb_addr - 1;
      state = pc->state;
      if (state > 13)
          state = 7;
  
 +    if (h->is_avc && !h->nal_length_size)
 +        av_log(h->avctx, AV_LOG_ERROR, "AVC-parser: nal length size invalid\n");
 +
      for (i = 0; i < buf_size; i++) {
 +        if (i >= next_avc) {
 +            int nalsize = 0;
 +            i = next_avc;
 +            for (j = 0; j < h->nal_length_size; j++)
 +                nalsize = (nalsize << 8) | buf[i++];
 +            if (nalsize <= 0 || nalsize > buf_size - i) {
 +                av_log(h->avctx, AV_LOG_ERROR, "AVC-parser: nal size %d remaining %d\n", nalsize, buf_size - i);
 +                return buf_size;
 +            }
 +            next_avc = i + nalsize;
 +            state    = 5;
 +        }
 +
          if (state == 7) {
 -            i += h->h264dsp.startcode_find_candidate(buf + i, buf_size - i);
 -            if (i < buf_size)
 +            i += h->h264dsp.startcode_find_candidate(buf + i, next_avc - i);
 +            if (i < next_avc)
                  state = 2;
          } else if (state <= 2) {
              if (buf[i] == 1)
                  }
              } else if (nalu_type == NAL_SLICE || nalu_type == NAL_DPA ||
                         nalu_type == NAL_IDR_SLICE) {
 +                state += 8;
 +                continue;
 +            }
 +            state = 7;
 +        } else {
 +            h->parse_history[h->parse_history_count++]= buf[i];
 +            if (h->parse_history_count>5) {
 +                unsigned int mb, last_mb= h->parse_last_mb;
 +                GetBitContext gb;
 +
 +                init_get_bits(&gb, h->parse_history, 8*h->parse_history_count);
 +                h->parse_history_count=0;
 +                mb= get_ue_golomb_long(&gb);
 +                h->parse_last_mb= mb;
                  if (pc->frame_start_found) {
 -                    state += 8;
 -                    continue;
 +                    if (mb <= last_mb)
 +                        goto found;
                  } else
                      pc->frame_start_found = 1;
 +                state = 7;
              }
 -            state = 7;
 -        } else {
 -            // first_mb_in_slice is 0, probably the first nal of a new slice
 -            if (buf[i] & 0x80)
 -                goto found;
 -            state = 7;
          }
      }
      pc->state = state;
 +    if (h->is_avc)
 +        return next_avc;
      return END_NOT_FOUND;
  
  found:
      pc->state             = 7;
      pc->frame_start_found = 0;
 -    return i - (state & 5);
 +    if (h->is_avc)
 +        return next_avc;
 +    return i - (state & 5) - 5 * (state > 7);
  }
  
  static int scan_mmco_reset(AVCodecParserContext *s)
                      unsigned int reordering_of_pic_nums_idc = get_ue_golomb_31(&sl->gb);
  
                      if (reordering_of_pic_nums_idc < 3)
 -                        get_ue_golomb(&sl->gb);
 +                        get_ue_golomb_long(&sl->gb);
                      else if (reordering_of_pic_nums_idc > 3) {
                          av_log(h->avctx, AV_LOG_ERROR,
                                 "illegal reordering_of_pic_nums_idc %d\n",
  
      if ((h->pps.weighted_pred && sl->slice_type_nos == AV_PICTURE_TYPE_P) ||
          (h->pps.weighted_bipred_idc == 1 && sl->slice_type_nos == AV_PICTURE_TYPE_B))
-         ff_pred_weight_table(h, sl);
+         ff_h264_pred_weight_table(&sl->gb, &h->sps, sl->ref_count, sl->slice_type_nos,
+                                   &sl->pwt);
  
      if (get_bits1(&sl->gb)) { // adaptive_ref_pic_marking_mode_flag
          int i;
                  return 1;
  
              if (opcode == MMCO_SHORT2UNUSED || opcode == MMCO_SHORT2LONG)
 -                get_ue_golomb(&sl->gb);
 +                get_ue_golomb_long(&sl->gb); // difference_of_pic_nums_minus1
              if (opcode == MMCO_SHORT2LONG || opcode == MMCO_LONG2UNUSED ||
                  opcode == MMCO_LONG || opcode == MMCO_SET_MAX_LONG)
                  get_ue_golomb_31(&sl->gb);
   */
  static inline int parse_nal_units(AVCodecParserContext *s,
                                    AVCodecContext *avctx,
 -                                  const uint8_t *buf, int buf_size)
 +                                  const uint8_t * const buf, int buf_size)
  {
      H264ParseContext *p = s->priv_data;
      H264Context      *h = &p->h;
      H264SliceContext *sl = &h->slice_ctx[0];
 -    const uint8_t *buf_end = buf + buf_size;
 +    int buf_index, next_avc;
      unsigned int pps_id;
      unsigned int slice_type;
      int state = -1, got_reset = 0;
      const uint8_t *ptr;
 +    int q264 = buf_size >=4 && !memcmp("Q264", buf, 4);
      int field_poc[2];
  
      /* set some sane default values */
  
      h->avctx = avctx;
      ff_h264_reset_sei(h);
 +    h->sei_fpa.frame_packing_arrangement_cancel_flag = -1;
  
      if (!buf_size)
          return 0;
  
 +    buf_index     = 0;
 +    next_avc      = h->is_avc ? 0 : buf_size;
      for (;;) {
 -        int src_length, dst_length, consumed;
 -        buf = avpriv_find_start_code(buf, buf_end, &state);
 -        if (buf >= buf_end)
 -            break;
 -        --buf;
 -        src_length = buf_end - buf;
 +        int src_length, dst_length, consumed, nalsize = 0;
 +
 +        if (buf_index >= next_avc) {
 +            nalsize = get_avc_nalsize(h, buf, buf_size, &buf_index);
 +            if (nalsize < 0)
 +                break;
 +            next_avc = buf_index + nalsize;
 +        } else {
 +            buf_index = find_start_code(buf, buf_size, buf_index, next_avc);
 +            if (buf_index >= buf_size)
 +                break;
 +            if (buf_index >= next_avc)
 +                continue;
 +        }
 +        src_length = next_avc - buf_index;
 +
 +        state = buf[buf_index];
          switch (state & 0x1f) {
          case NAL_SLICE:
          case NAL_IDR_SLICE:
              }
              break;
          }
 -        ptr = ff_h264_decode_nal(h, sl, buf, &dst_length, &consumed, src_length);
 +        ptr = ff_h264_decode_nal(h, sl, buf + buf_index, &dst_length,
 +                                 &consumed, src_length);
          if (!ptr || dst_length < 0)
              break;
  
 +        buf_index += consumed;
 +
          init_get_bits(&h->gb, ptr, 8 * dst_length);
          switch (h->nal_unit_type) {
          case NAL_SPS:
 -            ff_h264_decode_seq_parameter_set(h);
 +            ff_h264_decode_seq_parameter_set(h, 0);
              break;
          case NAL_PPS:
              ff_h264_decode_picture_parameter_set(h, h->gb.size_in_bits);
          /* fall through */
          case NAL_SLICE:
              init_get_bits(&sl->gb, ptr, 8 * dst_length);
 -            get_ue_golomb(&sl->gb);  // skip first_mb_in_slice
 +            get_ue_golomb_long(&sl->gb);  // skip first_mb_in_slice
              slice_type   = get_ue_golomb_31(&sl->gb);
              s->pict_type = ff_h264_golomb_to_pict_type[slice_type % 5];
              if (h->sei_recovery_frame_cnt >= 0) {
              h->sps       = *h->sps_buffers[h->pps.sps_id];
              h->frame_num = get_bits(&sl->gb, h->sps.log2_max_frame_num);
  
 +            if(h->sps.ref_frame_count <= 1 && h->pps.ref_count[0] <= 1 && s->pict_type == AV_PICTURE_TYPE_I)
 +                s->key_frame = 1;
 +
              s->coded_width  = 16 * h->sps.mb_width;
              s->coded_height = 16 * h->sps.mb_height;
              s->width        = s->coded_width  - (h->sps.crop_right + h->sps.crop_left);
              }
  
              if (h->nal_unit_type == NAL_IDR_SLICE)
 -                get_ue_golomb(&sl->gb); /* idr_pic_id */
 +                get_ue_golomb_long(&sl->gb); /* idr_pic_id */
              if (h->sps.poc_type == 0) {
                  h->poc_lsb = get_bits(&sl->gb, h->sps.log2_max_poc_lsb);
  
  
              return 0; /* no need to evaluate the rest */
          }
 -        buf += consumed;
      }
 +    if (q264)
 +        return 0;
      /* didn't find a picture! */
 -    av_log(h->avctx, AV_LOG_ERROR, "missing picture in access unit\n");
 +    av_log(h->avctx, AV_LOG_ERROR, "missing picture in access unit with size %d\n", buf_size);
      return -1;
  }
  
@@@ -515,13 -461,14 +516,13 @@@ static int h264_parse(AVCodecParserCont
          p->got_first = 1;
          if (avctx->extradata_size) {
              h->avctx = avctx;
 -            // must be done like in the decoder.
 -            // otherwise opening the parser, creating extradata,
 -            // and then closing and opening again
 +            // must be done like in decoder, otherwise opening the parser,
 +            // letting it create extradata and then closing and opening again
              // will cause has_b_frames to be always set.
 -            // NB: estimate_timings_from_pts behaves exactly like this.
 +            // Note that estimate_timings_from_pts does exactly this.
              if (!avctx->has_b_frames)
                  h->low_delay = 1;
 -            ff_h264_decode_extradata(h);
 +            ff_h264_decode_extradata(h, avctx->extradata, avctx->extradata_size);
          }
      }
  
          }
  
          if (next < 0 && next != END_NOT_FOUND) {
 -            assert(pc->last_index + next >= 0);
 +            av_assert1(pc->last_index + next >= 0);
              h264_find_frame_end(p, &pc->buffer[pc->last_index + next], -next); // update state
          }
      }
  
      parse_nal_units(s, avctx, buf, buf_size);
  
 +    if (avctx->framerate.num)
 +        avctx->time_base = av_inv_q(av_mul_q(avctx->framerate, (AVRational){avctx->ticks_per_frame, 1}));
      if (h->sei_cpb_removal_delay >= 0) {
          s->dts_sync_point    = h->sei_buffering_period_present;
          s->dts_ref_dts_delta = h->sei_cpb_removal_delay;
  static int h264_split(AVCodecContext *avctx,
                        const uint8_t *buf, int buf_size)
  {
 -    int i;
      uint32_t state = -1;
      int has_sps    = 0;
 +    int has_pps    = 0;
 +    const uint8_t *ptr = buf, *end = buf + buf_size;
 +    int nalu_type;
  
 -    for (i = 0; i <= buf_size; i++) {
 -        if ((state & 0xFFFFFF1F) == 0x107)
 +    while (ptr < end) {
 +        ptr = avpriv_find_start_code(ptr, end, &state);
 +        if ((state & 0xFFFFFF00) != 0x100)
 +            break;
 +        nalu_type = state & 0x1F;
 +        if (nalu_type == NAL_SPS) {
              has_sps = 1;
 -        /*  if((state&0xFFFFFF1F) == 0x101 ||
 -         *     (state&0xFFFFFF1F) == 0x102 ||
 -         *     (state&0xFFFFFF1F) == 0x105) {
 +        } else if (nalu_type == NAL_PPS)
 +            has_pps = 1;
 +        /* else if (nalu_type == 0x01 ||
 +         *     nalu_type == 0x02 ||
 +         *     nalu_type == 0x05) {
           *  }
           */
 -        if ((state & 0xFFFFFF00) == 0x100 && (state & 0xFFFFFF1F) != 0x106 &&
 -            (state & 0xFFFFFF1F) != 0x107 && (state & 0xFFFFFF1F) != 0x108 &&
 -            (state & 0xFFFFFF1F) != 0x109 && (state & 0xFFFFFF1F) != 0x10d &&
 -            (state & 0xFFFFFF1F) != 0x10f) {
 +        else if ((nalu_type != NAL_SEI || has_pps) &&
 +                  nalu_type != NAL_AUD && nalu_type != NAL_SPS_EXT &&
 +                  nalu_type != 0x0f) {
              if (has_sps) {
 -                while (i > 4 && buf[i - 5] == 0)
 -                    i--;
 -                return i - 4;
 +                while (ptr - 4 > buf && ptr[-5] == 0)
 +                    ptr--;
 +                return ptr - 4 - buf;
              }
          }
 -        if (i < buf_size)
 -            state = (state << 8) | buf[i];
      }
 +
      return 0;
  }
  
@@@ -608,7 -547,7 +609,7 @@@ static void h264_close(AVCodecParserCon
      H264Context      *h = &p->h;
      ParseContext *pc = &p->pc;
  
 -    av_free(pc->buffer);
 +    av_freep(&pc->buffer);
      ff_h264_free_context(h);
  }
  
diff --combined libavcodec/h264_refs.c
index 35e254e0950aa21ce575ac560edf40e4ffb326d6,2102f88b58e9771abcd20ab47061617ac9584dd0..1b1526a80360c50e1c01d706e2e234423ed69f6c
@@@ -2,20 -2,20 +2,20 @@@
   * H.26L/H.264/AVC/JVT/14496-10/... reference picture handling
   * Copyright (c) 2003 Michael Niedermayer <michaelni@gmx.at>
   *
 - * This file is part of Libav.
 + * This file is part of FFmpeg.
   *
 - * Libav is free software; you can redistribute it and/or
 + * FFmpeg is free software; you can redistribute it and/or
   * modify it under the terms of the GNU Lesser General Public
   * License as published by the Free Software Foundation; either
   * version 2.1 of the License, or (at your option) any later version.
   *
 - * Libav is distributed in the hope that it will be useful,
 + * FFmpeg is distributed in the hope that it will be useful,
   * but WITHOUT ANY WARRANTY; without even the implied warranty of
   * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
   * Lesser General Public License for more details.
   *
   * You should have received a copy of the GNU Lesser General Public
 - * License along with Libav; if not, write to the Free Software
 + * License along with FFmpeg; if not, write to the Free Software
   * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
   */
  
@@@ -27,7 -27,6 +27,7 @@@
  
  #include <inttypes.h>
  
 +#include "libavutil/avassert.h"
  #include "internal.h"
  #include "avcodec.h"
  #include "h264.h"
@@@ -80,18 -79,16 +80,18 @@@ static int build_def_list(H264Ref *def
      int  i[2] = { 0 };
      int index = 0;
  
 -    while ((i[0] < len || i[1] < len) && index < def_len) {
 +    while (i[0] < len || i[1] < len) {
          while (i[0] < len && !(in[i[0]] && (in[i[0]]->reference & sel)))
              i[0]++;
          while (i[1] < len && !(in[i[1]] && (in[i[1]]->reference & (sel ^ 3))))
              i[1]++;
 -        if (i[0] < len && index < def_len) {
 +        if (i[0] < len) {
 +            av_assert0(index < def_len);
              in[i[0]]->pic_id = is_long ? i[0] : in[i[0]]->frame_num;
              split_field_copy(&def[index++], in[i[0]++], sel, 1);
          }
 -        if (i[1] < len && index < def_len) {
 +        if (i[1] < len) {
 +            av_assert0(index < def_len);
              in[i[1]]->pic_id = is_long ? i[1] : in[i[1]]->frame_num;
              split_field_copy(&def[index++], in[i[1]++], sel ^ 3, 0);
          }
@@@ -122,18 -119,9 +122,18 @@@ static int add_sorted(H264Picture **sor
      return out_i;
  }
  
 +static int mismatches_ref(H264Context *h, H264Picture *pic)
 +{
 +    AVFrame *f = pic->f;
 +    return (h->cur_pic_ptr->f->width  != f->width ||
 +            h->cur_pic_ptr->f->height != f->height ||
 +            h->cur_pic_ptr->f->format != f->format);
 +}
 +
  static void h264_initialise_ref_list(H264Context *h, H264SliceContext *sl)
  {
      int i, len;
 +    int j;
  
      if (sl->slice_type_nos == AV_PICTURE_TYPE_B) {
          H264Picture *sorted[32];
          for (list = 0; list < 2; list++) {
              len  = add_sorted(sorted,       h->short_ref, h->short_ref_count, cur_poc, 1 ^ list);
              len += add_sorted(sorted + len, h->short_ref, h->short_ref_count, cur_poc, 0 ^ list);
 -            assert(len <= 32);
 +            av_assert0(len <= 32);
  
              len  = build_def_list(sl->ref_list[list], FF_ARRAY_ELEMS(sl->ref_list[0]),
                                    sorted, len, 0, h->picture_structure);
              len += build_def_list(sl->ref_list[list] + len,
                                    FF_ARRAY_ELEMS(sl->ref_list[0]) - len,
                                    h->long_ref, 16, 1, h->picture_structure);
 +            av_assert0(len <= 32);
  
              if (len < sl->ref_count[list])
                  memset(&sl->ref_list[list][len], 0, sizeof(H264Ref) * (sl->ref_count[list] - len));
          len += build_def_list(sl->ref_list[0] + len,
                                FF_ARRAY_ELEMS(sl->ref_list[0]) - len,
                                h-> long_ref, 16, 1, h->picture_structure);
 +        av_assert0(len <= 32);
  
          if (len < sl->ref_count[0])
              memset(&sl->ref_list[0][len], 0, sizeof(H264Ref) * (sl->ref_count[0] - len));
      }
 +#ifdef TRACE
 +    for (i = 0; i < sl->ref_count[0]; i++) {
 +        ff_tlog(h->avctx, "List0: %s fn:%d 0x%p\n",
 +                (sl->ref_list[0][i].parent ? (sl->ref_list[0][i].parent->long_ref ? "LT" : "ST") : "??"),
 +                sl->ref_list[0][i].pic_id,
 +                sl->ref_list[0][i].data[0]);
 +    }
 +    if (sl->slice_type_nos == AV_PICTURE_TYPE_B) {
 +        for (i = 0; i < sl->ref_count[1]; i++) {
 +            ff_tlog(h->avctx, "List1: %s fn:%d 0x%p\n",
 +                    (sl->ref_list[1][i].parent ? (sl->ref_list[1][i].parent->long_ref ? "LT" : "ST") : "??"),
 +                    sl->ref_list[1][i].pic_id,
 +                    sl->ref_list[1][i].data[0]);
 +        }
 +    }
 +#endif
 +
 +    for (j = 0; j<1+(sl->slice_type_nos == AV_PICTURE_TYPE_B); j++) {
 +        for (i = 0; i < sl->ref_count[j]; i++) {
 +            if (sl->ref_list[j][i].parent) {
 +                if (mismatches_ref(h, sl->ref_list[j][i].parent)) {
 +                    av_log(h->avctx, AV_LOG_ERROR, "Discarding mismatching reference\n");
 +                    memset(&sl->ref_list[j][i], 0, sizeof(sl->ref_list[j][i]));
 +                }
 +            }
 +        }
 +    }
 +    for (i = 0; i < sl->list_count; i++)
 +        h->default_ref[i] = sl->ref_list[i][0];
  }
  
  static void print_short_term(H264Context *h);
@@@ -268,7 -225,7 +268,7 @@@ int ff_h264_decode_ref_pic_list_reorder
                  switch (modification_of_pic_nums_idc) {
                  case 0:
                  case 1: {
 -                    const unsigned int abs_diff_pic_num = get_ue_golomb(&sl->gb) + 1;
 +                    const unsigned int abs_diff_pic_num = get_ue_golomb_long(&sl->gb) + 1;
                      int frame_num;
  
                      if (abs_diff_pic_num > h->max_pic_num) {
  
                      long_idx = pic_num_extract(h, pic_id, &pic_structure);
  
 -                    if (long_idx > 31) {
 +                    if (long_idx > 31U) {
                          av_log(h->avctx, AV_LOG_ERROR,
                                 "long_term_pic_idx overflow\n");
                          return AVERROR_INVALIDDATA;
                      }
                      ref = h->long_ref[long_idx];
                      assert(!(ref && !ref->reference));
 -                    if (ref && (ref->reference & pic_structure)) {
 +                    if (ref && (ref->reference & pic_structure) && !mismatches_ref(h, ref)) {
                          ref->pic_id = pic_id;
                          assert(ref->long_ref);
                          i = 0;
      }
      for (list = 0; list < sl->list_count; list++) {
          for (index = 0; index < sl->ref_count[list]; index++) {
 -            if (!sl->ref_list[list][index].parent) {
 -                av_log(h->avctx, AV_LOG_ERROR, "Missing reference picture\n");
 -                if (index == 0 || h->avctx->err_recognition & AV_EF_EXPLODE)
 -                    return AVERROR_INVALIDDATA;
 +            if (   !sl->ref_list[list][index].parent
 +                || (!FIELD_PICTURE(h) && (sl->ref_list[list][index].reference&3) != 3)) {
 +                int i;
 +                av_log(h->avctx, AV_LOG_ERROR, "Missing reference picture, default is %d\n", h->default_ref[list].poc);
 +                for (i = 0; i < FF_ARRAY_ELEMS(h->last_pocs); i++)
 +                    h->last_pocs[i] = INT_MIN;
 +                if (h->default_ref[list].parent
 +                    && !(!FIELD_PICTURE(h) && (h->default_ref[list].reference&3) != 3))
 +                    sl->ref_list[list][index] = h->default_ref[list];
                  else
 -                    sl->ref_list[list][index] = sl->ref_list[list][index - 1];
 +                    return -1;
              }
 +            av_assert0(av_buffer_get_ref_count(sl->ref_list[list][index].parent->f->buf[0]) > 0);
          }
      }
  
  void ff_h264_fill_mbaff_ref_list(H264Context *h, H264SliceContext *sl)
  {
      int list, i, j;
 -    for (list = 0; list < sl->list_count; list++) { //FIXME try list_count
 +    for (list = 0; list < sl->list_count; list++) {
          for (i = 0; i < sl->ref_count[list]; i++) {
              H264Ref *frame = &sl->ref_list[list][i];
              H264Ref *field = &sl->ref_list[list][16 + 2 * i];
              field[1].reference = PICT_BOTTOM_FIELD;
              field[1].poc       = field[1].parent->field_poc[1];
  
-             sl->luma_weight[16 + 2 * i][list][0] = sl->luma_weight[16 + 2 * i + 1][list][0] = sl->luma_weight[i][list][0];
-             sl->luma_weight[16 + 2 * i][list][1] = sl->luma_weight[16 + 2 * i + 1][list][1] = sl->luma_weight[i][list][1];
+             sl->pwt.luma_weight[16 + 2 * i][list][0] = sl->pwt.luma_weight[16 + 2 * i + 1][list][0] = sl->pwt.luma_weight[i][list][0];
+             sl->pwt.luma_weight[16 + 2 * i][list][1] = sl->pwt.luma_weight[16 + 2 * i + 1][list][1] = sl->pwt.luma_weight[i][list][1];
              for (j = 0; j < 2; j++) {
-                 sl->chroma_weight[16 + 2 * i][list][j][0] = sl->chroma_weight[16 + 2 * i + 1][list][j][0] = sl->chroma_weight[i][list][j][0];
-                 sl->chroma_weight[16 + 2 * i][list][j][1] = sl->chroma_weight[16 + 2 * i + 1][list][j][1] = sl->chroma_weight[i][list][j][1];
+                 sl->pwt.chroma_weight[16 + 2 * i][list][j][0] = sl->pwt.chroma_weight[16 + 2 * i + 1][list][j][0] = sl->pwt.chroma_weight[i][list][j][0];
+                 sl->pwt.chroma_weight[16 + 2 * i][list][j][1] = sl->pwt.chroma_weight[16 + 2 * i + 1][list][j][1] = sl->pwt.chroma_weight[i][list][j][1];
              }
          }
      }
@@@ -517,24 -468,11 +517,24 @@@ void ff_h264_remove_all_refs(H264Contex
      }
      assert(h->long_ref_count == 0);
  
 +    if (h->short_ref_count && !h->last_pic_for_ec.f->data[0]) {
 +        ff_h264_unref_picture(h, &h->last_pic_for_ec);
 +        if (h->short_ref[0]->f->buf[0])
 +            ff_h264_ref_picture(h, &h->last_pic_for_ec, h->short_ref[0]);
 +    }
 +
      for (i = 0; i < h->short_ref_count; i++) {
          unreference_pic(h, h->short_ref[i], 0);
          h->short_ref[i] = NULL;
      }
      h->short_ref_count = 0;
 +
 +    memset(h->default_ref, 0, sizeof(h->default_ref));
 +    for (i = 0; i < h->nb_slice_ctx; i++) {
 +        H264SliceContext *sl = &h->slice_ctx[i];
 +        sl->list_count = sl->ref_count[0] = sl->ref_count[1] = 0;
 +        memset(sl->ref_list, 0, sizeof(sl->ref_list));
 +    }
  }
  
  /**
@@@ -576,11 -514,8 +576,11 @@@ static int check_opcodes(MMCO *mmco1, M
      int i;
  
      for (i = 0; i < n_mmcos; i++) {
 -        if (mmco1[i].opcode != mmco2[i].opcode)
 +        if (mmco1[i].opcode != mmco2[i].opcode) {
 +            av_log(NULL, AV_LOG_ERROR, "MMCO opcode [%d, %d] at %d mismatches between slices\n",
 +                   mmco1[i].opcode, mmco2[i].opcode, i);
              return -1;
 +        }
      }
  
      return 0;
@@@ -591,8 -526,10 +591,8 @@@ int ff_generate_sliding_window_mmcos(H2
      MMCO mmco_temp[MAX_MMCO_COUNT], *mmco = first_slice ? h->mmco : mmco_temp;
      int mmco_index = 0, i = 0;
  
 -    assert(h->long_ref_count + h->short_ref_count <= h->sps.ref_frame_count);
 -
      if (h->short_ref_count &&
 -        h->long_ref_count + h->short_ref_count == h->sps.ref_frame_count &&
 +        h->long_ref_count + h->short_ref_count >= h->sps.ref_frame_count &&
          !(FIELD_PICTURE(h) && !h->first_field && h->cur_pic_ptr->reference)) {
          mmco[0].opcode        = MMCO_SHORT2UNUSED;
          mmco[0].short_pic_num = h->short_ref[h->short_ref_count - 1]->frame_num;
                 (mmco_index != h->mmco_index ||
                  (i = check_opcodes(h->mmco, mmco_temp, mmco_index)))) {
          av_log(h->avctx, AV_LOG_ERROR,
 -               "Inconsistent MMCO state between slices [%d, %d, %d]\n",
 -               mmco_index, h->mmco_index, i);
 +               "Inconsistent MMCO state between slices [%d, %d]\n",
 +               mmco_index, h->mmco_index);
          return AVERROR_INVALIDDATA;
      }
      return 0;
  int ff_h264_execute_ref_pic_marking(H264Context *h, MMCO *mmco, int mmco_count)
  {
      int i, av_uninit(j);
 +    int pps_count;
 +    int pps_ref_count[2] = {0};
      int current_ref_assigned = 0, err = 0;
      H264Picture *av_uninit(pic);
  
                  if (mmco[i].opcode != MMCO_SHORT2LONG ||
                      !h->long_ref[mmco[i].long_arg]    ||
                      h->long_ref[mmco[i].long_arg]->frame_num != frame_num) {
 -                    av_log(h->avctx, AV_LOG_ERROR, "mmco: unref short failure\n");
 +                    av_log(h->avctx, h->short_ref_count ? AV_LOG_ERROR : AV_LOG_DEBUG, "mmco: unref short failure\n");
                      err = AVERROR_INVALIDDATA;
                  }
                  continue;
                       * Report the problem and keep the pair where it is,
                       * and mark this field valid.
                       */
 -            if (h->short_ref[0] == h->cur_pic_ptr)
 +            if (h->short_ref[0] == h->cur_pic_ptr) {
 +                av_log(h->avctx, AV_LOG_ERROR, "mmco: cannot assign current picture to short and long at the same time\n");
                  remove_short_at_index(h, 0);
 +            }
  
              /* make sure the current picture is not already assigned as a long ref */
              if (h->cur_pic_ptr->long_ref) {
                  for (j = 0; j < FF_ARRAY_ELEMS(h->long_ref); j++) {
 -                    if (h->long_ref[j] == h->cur_pic_ptr)
 +                    if (h->long_ref[j] == h->cur_pic_ptr) {
 +                        if (j != mmco[i].long_arg)
 +                            av_log(h->avctx, AV_LOG_ERROR, "mmco: cannot assign current picture to 2 long term references\n");
                          remove_long(h, j, 0);
 +                    }
                  }
              }
  
 -
              if (h->long_ref[mmco[i].long_arg] != h->cur_pic_ptr) {
 +                av_assert0(!h->cur_pic_ptr->long_ref);
                  remove_long(h, mmco[i].long_arg, 0);
  
                  h->long_ref[mmco[i].long_arg]           = h->cur_pic_ptr;
              h->frame_num  = h->cur_pic_ptr->frame_num = 0;
              h->mmco_reset = 1;
              h->cur_pic_ptr->mmco_reset = 1;
 +            for (j = 0; j < MAX_DELAYED_PIC_COUNT; j++)
 +                h->last_pocs[j] = INT_MIN;
              break;
          default: assert(0);
          }
           */
          if (h->short_ref_count && h->short_ref[0] == h->cur_pic_ptr) {
              /* Just mark the second field valid */
 -            h->cur_pic_ptr->reference = PICT_FRAME;
 +            h->cur_pic_ptr->reference |= h->picture_structure;
          } else if (h->cur_pic_ptr->long_ref) {
              av_log(h->avctx, AV_LOG_ERROR, "illegal short term reference "
                                             "assignment for second field "
          }
      }
  
 -    if (h->long_ref_count + h->short_ref_count -
 -        (h->short_ref[0] == h->cur_pic_ptr) > h->sps.ref_frame_count) {
 +    if (h->long_ref_count + h->short_ref_count > FFMAX(h->sps.ref_frame_count, 1)) {
  
          /* We have too many reference frames, probably due to corrupted
           * stream. Need to discard one frame. Prevents overrun of the
          }
      }
  
 +    for (i = 0; i<h->short_ref_count; i++) {
 +        pic = h->short_ref[i];
 +        if (pic->invalid_gap) {
 +            int d = av_mod_uintp2(h->cur_pic_ptr->frame_num - pic->frame_num, h->sps.log2_max_frame_num);
 +            if (d > h->sps.ref_frame_count)
 +                remove_short(h, pic->frame_num, 0);
 +        }
 +    }
 +
      print_short_term(h);
      print_long_term(h);
 +
 +    pps_count = 0;
 +    for (i = 0; i < FF_ARRAY_ELEMS(h->pps_buffers); i++) {
 +        pps_count += !!h->pps_buffers[i];
 +        pps_ref_count[0] = FFMAX(pps_ref_count[0], h->pps.ref_count[0]);
 +        pps_ref_count[1] = FFMAX(pps_ref_count[1], h->pps.ref_count[1]);
 +    }
 +
 +    if (   err >= 0
 +        && h->long_ref_count==0
 +        && (   h->short_ref_count<=2
 +            || pps_ref_count[0] <= 1 + (h->picture_structure != PICT_FRAME) && pps_ref_count[1] <= 1)
 +        && pps_ref_count[0]<=2 + (h->picture_structure != PICT_FRAME) + (2*!h->has_recovery_point)
 +        && h->cur_pic_ptr->f->pict_type == AV_PICTURE_TYPE_I){
 +        h->cur_pic_ptr->recovered |= 1;
 +        if(!h->avctx->has_b_frames)
 +            h->frame_recovered |= FRAME_RECOVERED_SEI;
 +    }
 +
      return (h->avctx->err_recognition & AV_EF_EXPLODE) ? err : 0;
  }
  
@@@ -831,7 -732,7 +831,7 @@@ int ff_h264_decode_ref_pic_marking(H264
                                     int first_slice)
  {
      int i, ret;
 -    MMCO mmco_temp[MAX_MMCO_COUNT], *mmco = first_slice ? h->mmco : mmco_temp;
 +    MMCO mmco_temp[MAX_MMCO_COUNT], *mmco = mmco_temp;
      int mmco_index = 0;
  
      if (h->nal_unit_type == NAL_IDR_SLICE) { // FIXME fields
                  mmco[i].opcode = opcode;
                  if (opcode == MMCO_SHORT2UNUSED || opcode == MMCO_SHORT2LONG) {
                      mmco[i].short_pic_num =
 -                        (h->curr_pic_num - get_ue_golomb(gb) - 1) &
 +                        (h->curr_pic_num - get_ue_golomb_long(gb) - 1) &
                              (h->max_pic_num - 1);
  #if 0
                      if (mmco[i].short_pic_num >= h->short_ref_count ||
      }
  
      if (first_slice && mmco_index != -1) {
 +        memcpy(h->mmco, mmco_temp, sizeof(h->mmco));
          h->mmco_index = mmco_index;
      } else if (!first_slice && mmco_index >= 0 &&
                 (mmco_index != h->mmco_index ||
diff --combined libavcodec/h264_slice.c
index f0f0a50a55a260eb5912e34e9504b96725288d23,49d7e6c21533446e7752f7d77c8190c2e1aeb02b..880b69602e3e7425eef244a06f7859a4abbf6193
@@@ -2,20 -2,20 +2,20 @@@
   * H.26L/H.264/AVC/JVT/14496-10/... decoder
   * Copyright (c) 2003 Michael Niedermayer <michaelni@gmx.at>
   *
 - * This file is part of Libav.
 + * This file is part of FFmpeg.
   *
 - * Libav is free software; you can redistribute it and/or
 + * FFmpeg is free software; you can redistribute it and/or
   * modify it under the terms of the GNU Lesser General Public
   * License as published by the Free Software Foundation; either
   * version 2.1 of the License, or (at your option) any later version.
   *
 - * Libav is distributed in the hope that it will be useful,
 + * FFmpeg is distributed in the hope that it will be useful,
   * but WITHOUT ANY WARRANTY; without even the implied warranty of
   * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
   * Lesser General Public License for more details.
   *
   * You should have received a copy of the GNU Lesser General Public
 - * License along with Libav; if not, write to the Free Software
 + * License along with FFmpeg; if not, write to the Free Software
   * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
   */
  
  static const uint8_t rem6[QP_MAX_NUM + 1] = {
      0, 1, 2, 3, 4, 5, 0, 1, 2, 3, 4, 5, 0, 1, 2, 3, 4, 5, 0, 1, 2,
      3, 4, 5, 0, 1, 2, 3, 4, 5, 0, 1, 2, 3, 4, 5, 0, 1, 2, 3, 4, 5,
 -    0, 1, 2, 3, 4, 5, 0, 1, 2, 3, 4, 5, 0, 1, 2, 3, 4, 5, 0, 1, 2, 3,
 +    0, 1, 2, 3, 4, 5, 0, 1, 2, 3, 4, 5, 0, 1, 2, 3, 4, 5, 0, 1, 2,
 +    3, 4, 5, 0, 1, 2, 3, 4, 5, 0, 1, 2, 3, 4, 5, 0, 1, 2, 3, 4, 5,
 +    0, 1, 2, 3,
  };
  
  static const uint8_t div6[QP_MAX_NUM + 1] = {
      0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 3,  3,  3,
      3, 3, 3, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 6, 6, 6, 6,  6,  6,
 -    7, 7, 7, 7, 7, 7, 8, 8, 8, 8, 8, 8, 9, 9, 9, 9, 9, 9, 10, 10, 10, 10,
 +    7, 7, 7, 7, 7, 7, 8, 8, 8, 8, 8, 8, 9, 9, 9, 9, 9, 9, 10, 10, 10,
 +   10,10,10,11,11,11,11,11,11,12,12,12,12,12,12,13,13,13, 13, 13, 13,
 +   14,14,14,14,
  };
  
 -static const uint8_t field_scan[16] = {
 +static const uint8_t field_scan[16+1] = {
      0 + 0 * 4, 0 + 1 * 4, 1 + 0 * 4, 0 + 2 * 4,
      0 + 3 * 4, 1 + 1 * 4, 1 + 2 * 4, 1 + 3 * 4,
      2 + 0 * 4, 2 + 1 * 4, 2 + 2 * 4, 2 + 3 * 4,
      3 + 0 * 4, 3 + 1 * 4, 3 + 2 * 4, 3 + 3 * 4,
  };
  
 -static const uint8_t field_scan8x8[64] = {
 +static const uint8_t field_scan8x8[64+1] = {
      0 + 0 * 8, 0 + 1 * 8, 0 + 2 * 8, 1 + 0 * 8,
      1 + 1 * 8, 0 + 3 * 8, 0 + 4 * 8, 1 + 2 * 8,
      2 + 0 * 8, 1 + 3 * 8, 0 + 5 * 8, 0 + 6 * 8,
@@@ -86,7 -82,7 +86,7 @@@
      7 + 4 * 8, 7 + 5 * 8, 7 + 6 * 8, 7 + 7 * 8,
  };
  
 -static const uint8_t field_scan8x8_cavlc[64] = {
 +static const uint8_t field_scan8x8_cavlc[64+1] = {
      0 + 0 * 8, 1 + 1 * 8, 2 + 0 * 8, 0 + 7 * 8,
      2 + 2 * 8, 2 + 3 * 8, 2 + 4 * 8, 3 + 3 * 8,
      3 + 4 * 8, 4 + 3 * 8, 4 + 4 * 8, 5 + 3 * 8,
  };
  
  // zigzag_scan8x8_cavlc[i] = zigzag_scan8x8[(i/4) + 16*(i%4)]
 -static const uint8_t zigzag_scan8x8_cavlc[64] = {
 +static const uint8_t zigzag_scan8x8_cavlc[64+1] = {
      0 + 0 * 8, 1 + 1 * 8, 1 + 2 * 8, 2 + 2 * 8,
      4 + 1 * 8, 0 + 5 * 8, 3 + 3 * 8, 7 + 0 * 8,
      3 + 4 * 8, 1 + 7 * 8, 5 + 3 * 8, 6 + 3 * 8,
@@@ -171,9 -167,9 +171,9 @@@ static int alloc_scratch_buffers(H264Sl
      // (= 21x21 for  h264)
      av_fast_malloc(&sl->edge_emu_buffer, &sl->edge_emu_buffer_allocated, alloc_size * 2 * 21);
  
 -    av_fast_malloc(&sl->top_borders[0], &sl->top_borders_allocated[0],
 +    av_fast_mallocz(&sl->top_borders[0], &sl->top_borders_allocated[0],
                     h->mb_width * 16 * 3 * sizeof(uint8_t) * 2);
 -    av_fast_malloc(&sl->top_borders[1], &sl->top_borders_allocated[1],
 +    av_fast_mallocz(&sl->top_borders[1], &sl->top_borders_allocated[1],
                     h->mb_width * 16 * 3 * sizeof(uint8_t) * 2);
  
      if (!sl->bipred_scratchpad || !sl->edge_emu_buffer ||
@@@ -232,10 -228,6 +232,10 @@@ static int alloc_picture(H264Context *h
      if (ret < 0)
          goto fail;
  
 +    pic->crop     = h->sps.crop;
 +    pic->crop_top = h->sps.crop_top;
 +    pic->crop_left= h->sps.crop_left;
 +
      if (h->avctx->hwaccel) {
          const AVHWAccel *hwaccel = h->avctx->hwaccel;
          av_assert0(!pic->hwaccel_picture_private);
              pic->hwaccel_picture_private = pic->hwaccel_priv_buf->data;
          }
      }
 +    if (CONFIG_GRAY && !h->avctx->hwaccel && h->flags & AV_CODEC_FLAG_GRAY && pic->f->data[2]) {
 +        int h_chroma_shift, v_chroma_shift;
 +        av_pix_fmt_get_chroma_sub_sample(pic->f->format,
 +                                         &h_chroma_shift, &v_chroma_shift);
 +
 +        for(i=0; i<AV_CEIL_RSHIFT(pic->f->height, v_chroma_shift); i++) {
 +            memset(pic->f->data[1] + pic->f->linesize[1]*i,
 +                   0x80, AV_CEIL_RSHIFT(pic->f->width, h_chroma_shift));
 +            memset(pic->f->data[2] + pic->f->linesize[2]*i,
 +                   0x80, AV_CEIL_RSHIFT(pic->f->width, h_chroma_shift));
 +        }
 +    }
  
      if (!h->qscale_table_pool) {
          ret = init_table_pools(h);
@@@ -310,6 -290,33 +310,6 @@@ static int find_unused_picture(H264Cont
      return i;
  }
  
 -static int initialize_cur_frame(H264Context *h)
 -{
 -    H264Picture *cur;
 -    int ret;
 -
 -    release_unused_pictures(h, 1);
 -    ff_h264_unref_picture(h, &h->cur_pic);
 -    h->cur_pic_ptr = NULL;
 -
 -    ret = find_unused_picture(h);
 -    if (ret < 0) {
 -        av_log(h->avctx, AV_LOG_ERROR, "no frame buffer available\n");
 -        return ret;
 -    }
 -    cur = &h->DPB[ret];
 -
 -    ret = alloc_picture(h, cur);
 -    if (ret < 0)
 -        return ret;
 -
 -    ret = ff_h264_ref_picture(h, &h->cur_pic, cur);
 -    if (ret < 0)
 -        return ret;
 -    h->cur_pic_ptr = cur;
 -
 -    return 0;
 -}
  
  static void init_dequant8_coeff_table(H264Context *h)
  {
@@@ -364,12 -371,10 +364,12 @@@ static void init_dequant4_coeff_table(H
      }
  }
  
 -void h264_init_dequant_tables(H264Context *h)
 +void ff_h264_init_dequant_tables(H264Context *h)
  {
      int i, x;
      init_dequant4_coeff_table(h);
 +    memset(h->dequant8_coeff, 0, sizeof(h->dequant8_coeff));
 +
      if (h->pps.transform_8x8_mode)
          init_dequant8_coeff_table(h);
      if (h->sps.transform_bypass) {
      }
  }
  
 -#define IN_RANGE(a, b, size) (((a) >= (b)) && ((a) < ((b) + (size))))
 +#define IN_RANGE(a, b, size) (((void*)(a) >= (void*)(b)) && ((void*)(a) < (void*)((b) + (size))))
  
  #define REBASE_PICTURE(pic, new_ctx, old_ctx)             \
 -    ((pic && pic >= old_ctx->DPB &&                       \
 -      pic < old_ctx->DPB + H264_MAX_PICTURE_COUNT) ?          \
 -     &new_ctx->DPB[pic - old_ctx->DPB] : NULL)
 +    (((pic) && (pic) >= (old_ctx)->DPB &&                       \
 +      (pic) < (old_ctx)->DPB + H264_MAX_PICTURE_COUNT) ?          \
 +     &(new_ctx)->DPB[(pic) - (old_ctx)->DPB] : NULL)
  
  static void copy_picture_range(H264Picture **to, H264Picture **from, int count,
                                 H264Context *new_base,
      int i;
  
      for (i = 0; i < count; i++) {
 -        assert((IN_RANGE(from[i], old_base, sizeof(*old_base)) ||
 -                IN_RANGE(from[i], old_base->DPB,
 -                         sizeof(H264Picture) * H264_MAX_PICTURE_COUNT) ||
 -                !from[i]));
 +        av_assert1(!from[i] ||
 +                   IN_RANGE(from[i], old_base, 1) ||
 +                   IN_RANGE(from[i], old_base->DPB, H264_MAX_PICTURE_COUNT));
          to[i] = REBASE_PICTURE(from[i], new_base, old_base);
      }
  }
@@@ -425,8 -431,8 +425,8 @@@ static int copy_parameter_set(void **to
  }
  
  #define copy_fields(to, from, start_field, end_field)                   \
 -    memcpy(&to->start_field, &from->start_field,                        \
 -           (char *)&to->end_field - (char *)&to->start_field)
 +    memcpy(&(to)->start_field, &(from)->start_field,                        \
 +           (char *)&(to)->end_field - (char *)&(to)->start_field)
  
  static int h264_slice_header_init(H264Context *h);
  
@@@ -438,7 -444,7 +438,7 @@@ int ff_h264_update_thread_context(AVCod
      int need_reinit = 0;
      int i, ret;
  
 -    if (dst == src || !h1->context_initialized)
 +    if (dst == src)
          return 0;
  
      if (inited &&
           h->sps.bit_depth_luma    != h1->sps.bit_depth_luma    ||
           h->sps.chroma_format_idc != h1->sps.chroma_format_idc ||
           h->sps.colorspace        != h1->sps.colorspace)) {
 +
          need_reinit = 1;
      }
  
 +    /* copy block_offset since frame_start may not be called */
 +    memcpy(h->block_offset, h1->block_offset, sizeof(h->block_offset));
 +
      // SPS/PPS
      if ((ret = copy_parameter_set((void **)h->sps_buffers,
                                    (void **)h1->sps_buffers,
          h->mb_stride = h1->mb_stride;
          h->b_stride  = h1->b_stride;
  
 -        if ((err = h264_slice_header_init(h)) < 0) {
 -            av_log(h->avctx, AV_LOG_ERROR, "h264_slice_header_init() failed");
 -            return err;
 +        if (h->context_initialized || h1->context_initialized) {
 +            if ((err = h264_slice_header_init(h)) < 0) {
 +                av_log(h->avctx, AV_LOG_ERROR, "h264_slice_header_init() failed");
 +                return err;
 +            }
          }
 -
          /* copy block_offset since frame_start may not be called */
          memcpy(h->block_offset, h1->block_offset, sizeof(h->block_offset));
      }
      h->picture_structure    = h1->picture_structure;
      h->droppable            = h1->droppable;
      h->low_delay            = h1->low_delay;
 +    h->backup_width         = h1->backup_width;
 +    h->backup_height        = h1->backup_height;
 +    h->backup_pix_fmt       = h1->backup_pix_fmt;
  
      for (i = 0; i < H264_MAX_PICTURE_COUNT; i++) {
          ff_h264_unref_picture(h, &h->DPB[i]);
      // extradata/NAL handling
      h->is_avc = h1->is_avc;
      h->nal_length_size = h1->nal_length_size;
 +    h->x264_build      = h1->x264_build;
  
      // Dequantization matrices
      // FIXME these are big - can they be only copied when PPS changes?
      copy_picture_range(h->delayed_pic, h1->delayed_pic,
                         MAX_DELAYED_PIC_COUNT + 2, h, h1);
  
 +    h->frame_recovered       = h1->frame_recovered;
 +
      if (!h->cur_pic_ptr)
          return 0;
  
      h->prev_frame_num        = h->frame_num;
  
      h->recovery_frame        = h1->recovery_frame;
 -    h->frame_recovered       = h1->frame_recovered;
  
      return err;
  }
@@@ -570,33 -566,16 +570,33 @@@ static int h264_frame_start(H264Contex
      H264Picture *pic;
      int i, ret;
      const int pixel_shift = h->pixel_shift;
 +    int c[4] = {
 +        1<<(h->sps.bit_depth_luma-1),
 +        1<<(h->sps.bit_depth_chroma-1),
 +        1<<(h->sps.bit_depth_chroma-1),
 +        -1
 +    };
  
 -    ret = initialize_cur_frame(h);
 -    if (ret < 0)
 -        return ret;
 +    if (!ff_thread_can_start_frame(h->avctx)) {
 +        av_log(h->avctx, AV_LOG_ERROR, "Attempt to start a frame outside SETUP state\n");
 +        return -1;
 +    }
 +
 +    release_unused_pictures(h, 1);
 +    h->cur_pic_ptr = NULL;
 +
 +    i = find_unused_picture(h);
 +    if (i < 0) {
 +        av_log(h->avctx, AV_LOG_ERROR, "no frame buffer available\n");
 +        return i;
 +    }
 +    pic = &h->DPB[i];
  
 -    pic = h->cur_pic_ptr;
      pic->reference              = h->droppable ? 0 : h->picture_structure;
      pic->f->coded_picture_number = h->coded_picture_number++;
      pic->field_picture          = h->picture_structure != PICT_FRAME;
      pic->frame_num               = h->frame_num;
 +
      /*
       * Zero key_frame here; IDR markings per slice in frame or fields are ORed
       * in later.
      pic->f->key_frame = 0;
      pic->mmco_reset  = 0;
      pic->recovered   = 0;
 +    pic->invalid_gap = 0;
 +    pic->sei_recovery_frame_cnt = h->sei_recovery_frame_cnt;
  
 -    if (CONFIG_ERROR_RESILIENCE && h->enable_er)
 +    if ((ret = alloc_picture(h, pic)) < 0)
 +        return ret;
 +    if(!h->frame_recovered && !h->avctx->hwaccel
 +#if FF_API_CAP_VDPAU
 +       && !(h->avctx->codec->capabilities & AV_CODEC_CAP_HWACCEL_VDPAU)
 +#endif
 +       )
 +        ff_color_frame(pic->f, c);
 +
 +    h->cur_pic_ptr = pic;
 +    ff_h264_unref_picture(h, &h->cur_pic);
 +    if (CONFIG_ERROR_RESILIENCE) {
 +        ff_h264_set_erpic(&h->slice_ctx[0].er.cur_pic, NULL);
 +    }
 +
 +    if ((ret = ff_h264_ref_picture(h, &h->cur_pic, h->cur_pic_ptr)) < 0)
 +        return ret;
 +
 +    for (i = 0; i < h->nb_slice_ctx; i++) {
 +        h->slice_ctx[i].linesize   = h->cur_pic_ptr->f->linesize[0];
 +        h->slice_ctx[i].uvlinesize = h->cur_pic_ptr->f->linesize[1];
 +    }
 +
 +    if (CONFIG_ERROR_RESILIENCE && h->enable_er) {
          ff_er_frame_start(&h->slice_ctx[0].er);
 +        ff_h264_set_erpic(&h->slice_ctx[0].er.last_pic, NULL);
 +        ff_h264_set_erpic(&h->slice_ctx[0].er.next_pic, NULL);
 +    }
  
      for (i = 0; i < 16; i++) {
          h->block_offset[i]           = (4 * ((scan8[i] - scan8[0]) & 7) << pixel_shift) + 4 * pic->f->linesize[0] * ((scan8[i] - scan8[0]) >> 3);
          h->block_offset[48 + 32 + i] = (4 * ((scan8[i] - scan8[0]) & 7) << pixel_shift) + 8 * pic->f->linesize[1] * ((scan8[i] - scan8[0]) >> 3);
      }
  
 -    /* Some macroblocks can be accessed before they're available in case
 -     * of lost slices, MBAFF or threading. */
 -    memset(h->slice_table, -1,
 -           (h->mb_height * h->mb_stride - 1) * sizeof(*h->slice_table));
 -
      /* We mark the current picture as non-reference after allocating it, so
       * that if we break out due to an error it can be released automatically
       * in the next ff_mpv_frame_start().
@@@ -770,8 -726,8 +770,8 @@@ static void implicit_weight_table(cons
      int ref0, ref1, i, cur_poc, ref_start, ref_count0, ref_count1;
  
      for (i = 0; i < 2; i++) {
-         sl->luma_weight_flag[i]   = 0;
-         sl->chroma_weight_flag[i] = 0;
+         sl->pwt.luma_weight_flag[i]   = 0;
+         sl->pwt.chroma_weight_flag[i] = 0;
      }
  
      if (field < 0) {
              cur_poc = h->cur_pic_ptr->field_poc[h->picture_structure - 1];
          }
          if (sl->ref_count[0] == 1 && sl->ref_count[1] == 1 && !FRAME_MBAFF(h) &&
 -            sl->ref_list[0][0].poc + sl->ref_list[1][0].poc == 2 * cur_poc) {
 +            sl->ref_list[0][0].poc + (int64_t)sl->ref_list[1][0].poc == 2 * cur_poc) {
-             sl->use_weight        = 0;
-             sl->use_weight_chroma = 0;
+             sl->pwt.use_weight        = 0;
+             sl->pwt.use_weight_chroma = 0;
              return;
          }
          ref_start  = 0;
          ref_count1 = 16 + 2 * sl->ref_count[1];
      }
  
-     sl->use_weight               = 2;
-     sl->use_weight_chroma        = 2;
-     sl->luma_log2_weight_denom   = 5;
-     sl->chroma_log2_weight_denom = 5;
+     sl->pwt.use_weight               = 2;
+     sl->pwt.use_weight_chroma        = 2;
+     sl->pwt.luma_log2_weight_denom   = 5;
+     sl->pwt.chroma_log2_weight_denom = 5;
  
      for (ref0 = ref_start; ref0 < ref_count0; ref0++) {
 -        int poc0 = sl->ref_list[0][ref0].poc;
 +        int64_t poc0 = sl->ref_list[0][ref0].poc;
          for (ref1 = ref_start; ref1 < ref_count1; ref1++) {
              int w = 32;
              if (!sl->ref_list[0][ref0].parent->long_ref && !sl->ref_list[1][ref1].parent->long_ref) {
                  }
              }
              if (field < 0) {
-                 sl->implicit_weight[ref0][ref1][0] =
-                 sl->implicit_weight[ref0][ref1][1] = w;
+                 sl->pwt.implicit_weight[ref0][ref1][0] =
+                 sl->pwt.implicit_weight[ref0][ref1][1] = w;
              } else {
-                 sl->implicit_weight[ref0][ref1][field] = w;
+                 sl->pwt.implicit_weight[ref0][ref1][field] = w;
              }
          }
      }
@@@ -833,13 -789,13 +833,13 @@@ static void init_scan_tables(H264Contex
  {
      int i;
      for (i = 0; i < 16; i++) {
 -#define TRANSPOSE(x) (x >> 2) | ((x << 2) & 0xF)
 +#define TRANSPOSE(x) ((x) >> 2) | (((x) << 2) & 0xF)
          h->zigzag_scan[i] = TRANSPOSE(ff_zigzag_scan[i]);
          h->field_scan[i]  = TRANSPOSE(field_scan[i]);
  #undef TRANSPOSE
      }
      for (i = 0; i < 64; i++) {
 -#define TRANSPOSE(x) (x >> 3) | ((x & 7) << 3)
 +#define TRANSPOSE(x) ((x) >> 3) | (((x) & 7) << 3)
          h->zigzag_scan8x8[i]       = TRANSPOSE(ff_zigzag_direct[i]);
          h->zigzag_scan8x8_cavlc[i] = TRANSPOSE(zigzag_scan8x8_cavlc[i]);
          h->field_scan8x8[i]        = TRANSPOSE(field_scan8x8[i]);
  #undef TRANSPOSE
      }
      if (h->sps.transform_bypass) { // FIXME same ugly
 -        h->zigzag_scan_q0          = ff_zigzag_scan;
 -        h->zigzag_scan8x8_q0       = ff_zigzag_direct;
 -        h->zigzag_scan8x8_cavlc_q0 = zigzag_scan8x8_cavlc;
 -        h->field_scan_q0           = field_scan;
 -        h->field_scan8x8_q0        = field_scan8x8;
 -        h->field_scan8x8_cavlc_q0  = field_scan8x8_cavlc;
 +        memcpy(h->zigzag_scan_q0          , ff_zigzag_scan          , sizeof(h->zigzag_scan_q0         ));
 +        memcpy(h->zigzag_scan8x8_q0       , ff_zigzag_direct        , sizeof(h->zigzag_scan8x8_q0      ));
 +        memcpy(h->zigzag_scan8x8_cavlc_q0 , zigzag_scan8x8_cavlc    , sizeof(h->zigzag_scan8x8_cavlc_q0));
 +        memcpy(h->field_scan_q0           , field_scan              , sizeof(h->field_scan_q0          ));
 +        memcpy(h->field_scan8x8_q0        , field_scan8x8           , sizeof(h->field_scan8x8_q0       ));
 +        memcpy(h->field_scan8x8_cavlc_q0  , field_scan8x8_cavlc     , sizeof(h->field_scan8x8_cavlc_q0 ));
      } else {
 -        h->zigzag_scan_q0          = h->zigzag_scan;
 -        h->zigzag_scan8x8_q0       = h->zigzag_scan8x8;
 -        h->zigzag_scan8x8_cavlc_q0 = h->zigzag_scan8x8_cavlc;
 -        h->field_scan_q0           = h->field_scan;
 -        h->field_scan8x8_q0        = h->field_scan8x8;
 -        h->field_scan8x8_cavlc_q0  = h->field_scan8x8_cavlc;
 +        memcpy(h->zigzag_scan_q0          , h->zigzag_scan          , sizeof(h->zigzag_scan_q0         ));
 +        memcpy(h->zigzag_scan8x8_q0       , h->zigzag_scan8x8       , sizeof(h->zigzag_scan8x8_q0      ));
 +        memcpy(h->zigzag_scan8x8_cavlc_q0 , h->zigzag_scan8x8_cavlc , sizeof(h->zigzag_scan8x8_cavlc_q0));
 +        memcpy(h->field_scan_q0           , h->field_scan           , sizeof(h->field_scan_q0          ));
 +        memcpy(h->field_scan8x8_q0        , h->field_scan8x8        , sizeof(h->field_scan8x8_q0       ));
 +        memcpy(h->field_scan8x8_cavlc_q0  , h->field_scan8x8_cavlc  , sizeof(h->field_scan8x8_cavlc_q0 ));
      }
  }
  
 -static enum AVPixelFormat get_pixel_format(H264Context *h)
 +static enum AVPixelFormat get_pixel_format(H264Context *h, int force_callback)
  {
  #define HWACCEL_MAX (CONFIG_H264_DXVA2_HWACCEL + \
                       CONFIG_H264_D3D11VA_HWACCEL + \
                       CONFIG_H264_VAAPI_HWACCEL + \
                       (CONFIG_H264_VDA_HWACCEL * 2) + \
 +                     CONFIG_H264_VIDEOTOOLBOX_HWACCEL + \
                       CONFIG_H264_VDPAU_HWACCEL)
      enum AVPixelFormat pix_fmts[HWACCEL_MAX + 2], *fmt = pix_fmts;
      const enum AVPixelFormat *choices = pix_fmts;
 +    int i;
  
      switch (h->sps.bit_depth_luma) {
      case 9:
          else
              *fmt++ = AV_PIX_FMT_YUV420P10;
          break;
 +    case 12:
 +        if (CHROMA444(h)) {
 +            if (h->avctx->colorspace == AVCOL_SPC_RGB) {
 +                *fmt++ = AV_PIX_FMT_GBRP12;
 +            } else
 +                *fmt++ = AV_PIX_FMT_YUV444P12;
 +        } else if (CHROMA422(h))
 +            *fmt++ = AV_PIX_FMT_YUV422P12;
 +        else
 +            *fmt++ = AV_PIX_FMT_YUV420P12;
 +        break;
 +    case 14:
 +        if (CHROMA444(h)) {
 +            if (h->avctx->colorspace == AVCOL_SPC_RGB) {
 +                *fmt++ = AV_PIX_FMT_GBRP14;
 +            } else
 +                *fmt++ = AV_PIX_FMT_YUV444P14;
 +        } else if (CHROMA422(h))
 +            *fmt++ = AV_PIX_FMT_YUV422P14;
 +        else
 +            *fmt++ = AV_PIX_FMT_YUV420P14;
 +        break;
      case 8:
  #if CONFIG_H264_VDPAU_HWACCEL
          *fmt++ = AV_PIX_FMT_VDPAU;
  #if CONFIG_H264_VDA_HWACCEL
              *fmt++ = AV_PIX_FMT_VDA_VLD;
              *fmt++ = AV_PIX_FMT_VDA;
 +#endif
 +#if CONFIG_H264_VIDEOTOOLBOX_HWACCEL
 +            *fmt++ = AV_PIX_FMT_VIDEOTOOLBOX;
  #endif
              if (h->avctx->codec->pix_fmts)
                  choices = h->avctx->codec->pix_fmts;
  
      *fmt = AV_PIX_FMT_NONE;
  
 -    return ff_get_format(h->avctx, choices);
 +    for (i=0; choices[i] != AV_PIX_FMT_NONE; i++)
 +        if (choices[i] == h->avctx->pix_fmt && !force_callback)
 +            return choices[i];
 +    return ff_thread_get_format(h->avctx, choices);
  }
  
  /* export coded and cropped frame dimensions to AVCodecContext */
@@@ -980,15 -906,10 +980,15 @@@ static int init_dimensions(H264Context 
  {
      int width  = h->width  - (h->sps.crop_right + h->sps.crop_left);
      int height = h->height - (h->sps.crop_top   + h->sps.crop_bottom);
 +    av_assert0(h->sps.crop_right + h->sps.crop_left < (unsigned)h->width);
 +    av_assert0(h->sps.crop_top + h->sps.crop_bottom < (unsigned)h->height);
  
      /* handle container cropping */
      if (FFALIGN(h->avctx->width,  16) == FFALIGN(width,  16) &&
 -        FFALIGN(h->avctx->height, 16) == FFALIGN(height, 16)) {
 +        FFALIGN(h->avctx->height, 16) == FFALIGN(height, 16) &&
 +        h->avctx->width  <= width &&
 +        h->avctx->height <= height
 +    ) {
          width  = h->avctx->width;
          height = h->avctx->height;
      }
@@@ -1034,7 -955,7 +1034,7 @@@ static int h264_slice_header_init(H264C
          if (h->x264_build < 44U)
              den *= 2;
          av_reduce(&h->avctx->framerate.den, &h->avctx->framerate.num,
 -                  h->sps.num_units_in_tick, den, 1 << 30);
 +                  h->sps.num_units_in_tick * h->avctx->ticks_per_frame, den, 1 << 30);
      }
  
      ff_h264_free_tables(h);
      ret = ff_h264_alloc_tables(h);
      if (ret < 0) {
          av_log(h->avctx, AV_LOG_ERROR, "Could not allocate memory\n");
 -        return ret;
 +        goto fail;
      }
  
 -    if (h->sps.bit_depth_luma < 8 || h->sps.bit_depth_luma > 10) {
 +#if FF_API_CAP_VDPAU
 +    if (h->avctx->codec &&
 +        h->avctx->codec->capabilities & AV_CODEC_CAP_HWACCEL_VDPAU &&
 +        (h->sps.bit_depth_luma != 8 || h->sps.chroma_format_idc > 1)) {
 +        av_log(h->avctx, AV_LOG_ERROR,
 +                "VDPAU decoding does not support video colorspace.\n");
 +        ret = AVERROR_INVALIDDATA;
 +        goto fail;
 +    }
 +#endif
 +
 +    if (h->sps.bit_depth_luma < 8 || h->sps.bit_depth_luma > 14 ||
 +        h->sps.bit_depth_luma == 11 || h->sps.bit_depth_luma == 13
 +    ) {
          av_log(h->avctx, AV_LOG_ERROR, "Unsupported bit depth %d\n",
                 h->sps.bit_depth_luma);
 -        return AVERROR_INVALIDDATA;
 +        ret = AVERROR_INVALIDDATA;
 +        goto fail;
      }
  
 +    h->cur_bit_depth_luma         =
      h->avctx->bits_per_raw_sample = h->sps.bit_depth_luma;
 +    h->cur_chroma_format_idc      = h->sps.chroma_format_idc;
      h->pixel_shift                = h->sps.bit_depth_luma > 8;
      h->chroma_format_idc          = h->sps.chroma_format_idc;
      h->bit_depth_luma             = h->sps.bit_depth_luma;
          nb_slices = max_slices;
      }
      h->slice_context_count = nb_slices;
 +    h->max_contexts = FFMIN(h->max_contexts, nb_slices);
  
      if (!HAVE_THREADS || !(h->avctx->active_thread_type & FF_THREAD_SLICE)) {
          ret = ff_h264_slice_context_init(h, &h->slice_ctx[0]);
          if (ret < 0) {
              av_log(h->avctx, AV_LOG_ERROR, "context_init() failed.\n");
 -            return ret;
 +            goto fail;
          }
      } else {
          for (i = 0; i < h->slice_context_count; i++) {
  
              if ((ret = ff_h264_slice_context_init(h, sl)) < 0) {
                  av_log(h->avctx, AV_LOG_ERROR, "context_init() failed.\n");
 -                return ret;
 +                goto fail;
              }
          }
      }
      h->context_initialized = 1;
  
      return 0;
 +fail:
 +    ff_h264_free_tables(h);
 +    h->context_initialized = 0;
 +    return ret;
 +}
 +
 +static enum AVPixelFormat non_j_pixfmt(enum AVPixelFormat a)
 +{
 +    switch (a) {
 +    case AV_PIX_FMT_YUVJ420P: return AV_PIX_FMT_YUV420P;
 +    case AV_PIX_FMT_YUVJ422P: return AV_PIX_FMT_YUV422P;
 +    case AV_PIX_FMT_YUVJ444P: return AV_PIX_FMT_YUV444P;
 +    default:
 +        return a;
 +    }
  }
  
  /**
@@@ -1154,55 -1043,22 +1154,55 @@@ int ff_h264_decode_slice_header(H264Con
      int ret;
      unsigned int slice_type, tmp, i, j;
      int last_pic_structure, last_pic_droppable;
 +    int must_reinit;
      int needs_reinit = 0;
      int field_pic_flag, bottom_field_flag;
 +    int first_slice = sl == h->slice_ctx && !h->current_slice;
      int frame_num, droppable, picture_structure;
 -    int mb_aff_frame = 0;
 +    int mb_aff_frame, last_mb_aff_frame;
 +    PPS *pps;
 +
 +    if (first_slice)
 +        av_assert0(!h->setup_finished);
  
      h->qpel_put = h->h264qpel.put_h264_qpel_pixels_tab;
      h->qpel_avg = h->h264qpel.avg_h264_qpel_pixels_tab;
  
 -    first_mb_in_slice = get_ue_golomb(&sl->gb);
 +    first_mb_in_slice = get_ue_golomb_long(&sl->gb);
  
      if (first_mb_in_slice == 0) { // FIXME better field boundary detection
 -        if (h->current_slice && h->cur_pic_ptr && FIELD_PICTURE(h)) {
 -            ff_h264_field_end(h, sl, 1);
 +        if (h->current_slice) {
 +            if (h->setup_finished) {
 +                av_log(h->avctx, AV_LOG_ERROR, "Too many fields\n");
 +                return AVERROR_INVALIDDATA;
 +            }
 +            if (h->max_contexts > 1) {
 +                if (!h->single_decode_warning) {
 +                    av_log(h->avctx, AV_LOG_WARNING, "Cannot decode multiple access units as slice threads\n");
 +                    h->single_decode_warning = 1;
 +                }
 +                h->max_contexts = 1;
 +                return SLICE_SINGLETHREAD;
 +            }
 +
 +            if (h->cur_pic_ptr && FIELD_PICTURE(h) && h->first_field) {
 +                ret = ff_h264_field_end(h, h->slice_ctx, 1);
 +                h->current_slice = 0;
 +                if (ret < 0)
 +                    return ret;
 +            } else if (h->cur_pic_ptr && !FIELD_PICTURE(h) && !h->first_field && h->nal_unit_type  == NAL_IDR_SLICE) {
 +                av_log(h, AV_LOG_WARNING, "Broken frame packetizing\n");
 +                ret = ff_h264_field_end(h, h->slice_ctx, 1);
 +                h->current_slice = 0;
 +                ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX, 0);
 +                ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX, 1);
 +                h->cur_pic_ptr = NULL;
 +                if (ret < 0)
 +                    return ret;
 +            } else
 +                return AVERROR_INVALIDDATA;
          }
  
 -        h->current_slice = 0;
          if (!h->first_field) {
              if (h->cur_pic_ptr && !h->droppable) {
                  ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX,
          }
      }
  
 +    if (!h->current_slice)
 +        av_assert0(sl == h->slice_ctx);
 +
      slice_type = get_ue_golomb_31(&sl->gb);
      if (slice_type > 9) {
          av_log(h->avctx, AV_LOG_ERROR,
          return AVERROR_INVALIDDATA;
      }
  
 +    if (h->current_slice == 0 && !h->first_field) {
 +        if (
 +            (h->avctx->skip_frame >= AVDISCARD_NONREF && !h->nal_ref_idc) ||
 +            (h->avctx->skip_frame >= AVDISCARD_BIDIR  && sl->slice_type_nos == AV_PICTURE_TYPE_B) ||
 +            (h->avctx->skip_frame >= AVDISCARD_NONINTRA && sl->slice_type_nos != AV_PICTURE_TYPE_I) ||
 +            (h->avctx->skip_frame >= AVDISCARD_NONKEY && h->nal_unit_type != NAL_IDR_SLICE && h->sei_recovery_frame_cnt < 0) ||
 +            h->avctx->skip_frame >= AVDISCARD_ALL) {
 +            return SLICE_SKIPED;
 +        }
 +    }
 +
      // to make a few old functions happy, it's wrong though
      if (!h->setup_finished)
          h->pict_type = sl->slice_type;
                 pps_id);
          return AVERROR_INVALIDDATA;
      }
 -    if (!h->setup_finished) {
 -        h->pps = *h->pps_buffers[pps_id];
 -    } else if (h->dequant_coeff_pps != pps_id) {
 -        av_log(h->avctx, AV_LOG_ERROR, "PPS changed between slices\n");
 +    if (h->au_pps_id >= 0 && pps_id != h->au_pps_id) {
 +        av_log(h->avctx, AV_LOG_ERROR,
 +               "PPS change from %d to %d forbidden\n",
 +               h->au_pps_id, pps_id);
          return AVERROR_INVALIDDATA;
      }
  
 -    if (!h->sps_buffers[h->pps.sps_id]) {
 +    pps = h->pps_buffers[pps_id];
 +
 +    if (!h->sps_buffers[pps->sps_id]) {
          av_log(h->avctx, AV_LOG_ERROR,
                 "non-existing SPS %u referenced\n",
                 h->pps.sps_id);
          return AVERROR_INVALIDDATA;
      }
  
 -    if (h->pps.sps_id != h->sps.sps_id ||
 -        h->sps_buffers[h->pps.sps_id]->new) {
 -        h->sps_buffers[h->pps.sps_id]->new = 0;
 +    if (first_slice) {
 +        h->pps = *h->pps_buffers[pps_id];
 +    } else {
 +        if (h->pps.sps_id != pps->sps_id ||
 +            h->pps.transform_8x8_mode != pps->transform_8x8_mode ||
 +            (h->setup_finished && h->dequant_coeff_pps != pps_id)) {
 +            av_log(h->avctx, AV_LOG_ERROR, "PPS changed between slices\n");
 +            return AVERROR_INVALIDDATA;
 +        }
 +    }
 +
 +    if (pps->sps_id != h->sps.sps_id ||
 +        pps->sps_id != h->current_sps_id ||
 +        h->sps_buffers[pps->sps_id]->new) {
 +
 +        if (!first_slice) {
 +            av_log(h->avctx, AV_LOG_ERROR,
 +               "SPS changed in the middle of the frame\n");
 +            return AVERROR_INVALIDDATA;
 +        }
  
          h->sps = *h->sps_buffers[h->pps.sps_id];
  
 +        if (h->mb_width  != h->sps.mb_width ||
 +            h->mb_height != h->sps.mb_height * (2 - h->sps.frame_mbs_only_flag) ||
 +            h->cur_bit_depth_luma    != h->sps.bit_depth_luma ||
 +            h->cur_chroma_format_idc != h->sps.chroma_format_idc
 +        )
 +            needs_reinit = 1;
 +
          if (h->bit_depth_luma    != h->sps.bit_depth_luma ||
              h->chroma_format_idc != h->sps.chroma_format_idc)
              needs_reinit         = 1;
  
      }
  
 +    must_reinit = (h->context_initialized &&
 +                    (   16*h->sps.mb_width != h->avctx->coded_width
 +                     || 16*h->sps.mb_height * (2 - h->sps.frame_mbs_only_flag) != h->avctx->coded_height
 +                     || h->cur_bit_depth_luma    != h->sps.bit_depth_luma
 +                     || h->cur_chroma_format_idc != h->sps.chroma_format_idc
 +                     || h->mb_width  != h->sps.mb_width
 +                     || h->mb_height != h->sps.mb_height * (2 - h->sps.frame_mbs_only_flag)
 +                    ));
 +    if (h->avctx->pix_fmt == AV_PIX_FMT_NONE
 +        || (non_j_pixfmt(h->avctx->pix_fmt) != non_j_pixfmt(get_pixel_format(h, 0))))
 +        must_reinit = 1;
 +
 +    if (first_slice && av_cmp_q(h->sps.sar, h->avctx->sample_aspect_ratio))
 +        must_reinit = 1;
 +
      if (!h->setup_finished) {
          h->avctx->profile = ff_h264_get_profile(&h->sps);
          h->avctx->level   = h->sps.level_idc;
          h->avctx->refs    = h->sps.ref_frame_count;
  
 -        if (h->mb_width  != h->sps.mb_width ||
 -            h->mb_height != h->sps.mb_height * (2 - h->sps.frame_mbs_only_flag))
 -            needs_reinit = 1;
 -
          h->mb_width  = h->sps.mb_width;
          h->mb_height = h->sps.mb_height * (2 - h->sps.frame_mbs_only_flag);
          h->mb_num    = h->mb_width * h->mb_height;
              return ret;
  
          if (h->sps.video_signal_type_present_flag) {
 -            h->avctx->color_range = h->sps.full_range ? AVCOL_RANGE_JPEG
 -                : AVCOL_RANGE_MPEG;
 +            h->avctx->color_range = h->sps.full_range>0 ? AVCOL_RANGE_JPEG
 +                                                        : AVCOL_RANGE_MPEG;
              if (h->sps.colour_description_present_flag) {
                  if (h->avctx->colorspace != h->sps.colorspace)
                      needs_reinit = 1;
          }
      }
  
 -    if (h->context_initialized && needs_reinit) {
 +    if (h->context_initialized &&
 +        (must_reinit || needs_reinit)) {
          h->context_initialized = 0;
          if (sl != h->slice_ctx) {
              av_log(h->avctx, AV_LOG_ERROR,
              return AVERROR_INVALIDDATA;
          }
  
 +        av_assert1(first_slice);
 +
          ff_h264_flush_change(h);
  
 -        if ((ret = get_pixel_format(h)) < 0)
 +        if ((ret = get_pixel_format(h, 1)) < 0)
              return ret;
          h->avctx->pix_fmt = ret;
  
          av_log(h->avctx, AV_LOG_INFO, "Reinit context to %dx%d, "
 -               "pix_fmt: %d\n", h->width, h->height, h->avctx->pix_fmt);
 +               "pix_fmt: %s\n", h->width, h->height, av_get_pix_fmt_name(h->avctx->pix_fmt));
  
          if ((ret = h264_slice_header_init(h)) < 0) {
              av_log(h->avctx, AV_LOG_ERROR,
              return AVERROR_PATCHWELCOME;
          }
  
 -        if ((ret = get_pixel_format(h)) < 0)
 +        if ((ret = get_pixel_format(h, 1)) < 0)
              return ret;
          h->avctx->pix_fmt = ret;
  
          }
      }
  
 -    if (sl == h->slice_ctx && h->dequant_coeff_pps != pps_id) {
 +    if (!h->current_slice && h->dequant_coeff_pps != pps_id) {
          h->dequant_coeff_pps = pps_id;
 -        h264_init_dequant_tables(h);
 +        ff_h264_init_dequant_tables(h);
      }
  
      frame_num = get_bits(&sl->gb, h->sps.log2_max_frame_num);
 +    if (!first_slice) {
 +        if (h->frame_num != frame_num) {
 +            av_log(h->avctx, AV_LOG_ERROR, "Frame num change from %d to %d\n",
 +                   h->frame_num, frame_num);
 +            return AVERROR_INVALIDDATA;
 +        }
 +    }
 +
      if (!h->setup_finished)
          h->frame_num = frame_num;
  
      sl->mb_mbaff       = 0;
 -
 +    mb_aff_frame       = 0;
 +    last_mb_aff_frame  = h->mb_aff_frame;
      last_pic_structure = h->picture_structure;
      last_pic_droppable = h->droppable;
  
      if (h->sps.frame_mbs_only_flag) {
          picture_structure = PICT_FRAME;
      } else {
 +        if (!h->sps.direct_8x8_inference_flag && slice_type == AV_PICTURE_TYPE_B) {
 +            av_log(h->avctx, AV_LOG_ERROR, "This stream was generated by a broken encoder, invalid 8x8 inference\n");
 +            return -1;
 +        }
          field_pic_flag = get_bits1(&sl->gb);
 +
          if (field_pic_flag) {
              bottom_field_flag = get_bits1(&sl->gb);
              picture_structure = PICT_TOP_FIELD + bottom_field_flag;
              mb_aff_frame      = h->sps.mb_aff;
          }
      }
 -    if (!h->setup_finished) {
 -        h->droppable         = droppable;
 -        h->picture_structure = picture_structure;
 -        h->mb_aff_frame      = mb_aff_frame;
 -    }
 -    sl->mb_field_decoding_flag = h->picture_structure != PICT_FRAME;
  
 -    if (h->current_slice != 0) {
 +    if (h->current_slice) {
          if (last_pic_structure != picture_structure ||
 -            last_pic_droppable != droppable) {
 +            last_pic_droppable != droppable ||
 +            last_mb_aff_frame  != mb_aff_frame) {
              av_log(h->avctx, AV_LOG_ERROR,
                     "Changing field mode (%d -> %d) between slices is not allowed\n",
                     last_pic_structure, h->picture_structure);
                     h->current_slice + 1);
              return AVERROR_INVALIDDATA;
          }
 -    } else {
 +    }
 +
 +    if (!h->setup_finished) {
 +        h->droppable         = droppable;
 +        h->picture_structure = picture_structure;
 +        h->mb_aff_frame      = mb_aff_frame;
 +    }
 +    sl->mb_field_decoding_flag = picture_structure != PICT_FRAME;
 +
 +    if (h->current_slice == 0) {
          /* Shorten frame num gaps so we don't have to allocate reference
           * frames just to throw them away */
          if (h->frame_num != h->prev_frame_num) {
           * Here, we're using that to see if we should mark previously
           * decode frames as "finished".
           * We have to do that before the "dummy" in-between frame allocation,
 -         * since that can modify s->current_picture_ptr. */
 +         * since that can modify h->cur_pic_ptr. */
          if (h->first_field) {
 -            assert(h->cur_pic_ptr);
 -            assert(h->cur_pic_ptr->f->buf[0]);
 +            av_assert0(h->cur_pic_ptr);
 +            av_assert0(h->cur_pic_ptr->f->buf[0]);
              assert(h->cur_pic_ptr->reference != DELAYED_PIC_REF);
  
 +            /* Mark old field/frame as completed */
 +            if (h->cur_pic_ptr->tf.owner == h->avctx) {
 +                ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX,
 +                                          last_pic_structure == PICT_BOTTOM_FIELD);
 +            }
 +
              /* figure out if we have a complementary field pair */
              if (!FIELD_PICTURE(h) || h->picture_structure == last_pic_structure) {
                  /* Previous field is unmatched. Don't display it, but let it
                   * remain for reference if marked as such. */
 -                if (!last_pic_droppable && last_pic_structure != PICT_FRAME) {
 +                if (last_pic_structure != PICT_FRAME) {
                      ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX,
                                                last_pic_structure == PICT_TOP_FIELD);
                  }
                       * different frame_nums. Consider this field first in
                       * pair. Throw away previous field except for reference
                       * purposes. */
 -                    if (!last_pic_droppable && last_pic_structure != PICT_FRAME) {
 +                    if (last_pic_structure != PICT_FRAME) {
                          ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX,
                                                    last_pic_structure == PICT_TOP_FIELD);
                      }
              }
          }
  
 -        while (h->frame_num != h->prev_frame_num &&
 +        while (h->frame_num != h->prev_frame_num && !h->first_field &&
                 h->frame_num != (h->prev_frame_num + 1) % (1 << h->sps.log2_max_frame_num)) {
              H264Picture *prev = h->short_ref_count ? h->short_ref[0] : NULL;
              av_log(h->avctx, AV_LOG_DEBUG, "Frame num gap %d %d\n",
                     h->frame_num, h->prev_frame_num);
 -            ret = initialize_cur_frame(h);
 +            if (!h->sps.gaps_in_frame_num_allowed_flag)
 +                for(i=0; i<FF_ARRAY_ELEMS(h->last_pocs); i++)
 +                    h->last_pocs[i] = INT_MIN;
 +            ret = h264_frame_start(h);
              if (ret < 0) {
                  h->first_field = 0;
                  return ret;
              h->prev_frame_num++;
              h->prev_frame_num        %= 1 << h->sps.log2_max_frame_num;
              h->cur_pic_ptr->frame_num = h->prev_frame_num;
 +            h->cur_pic_ptr->invalid_gap = !h->sps.gaps_in_frame_num_allowed_flag;
              ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX, 0);
              ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX, 1);
              ret = ff_generate_sliding_window_mmcos(h, 1);
                                    (const uint8_t **)prev->f->data,
                                    prev->f->linesize,
                                    prev->f->format,
 -                                  h->mb_width  * 16,
 -                                  h->mb_height * 16);
 +                                  prev->f->width,
 +                                  prev->f->height);
                      h->short_ref[0]->poc = prev->poc + 2;
                  }
                  h->short_ref[0]->frame_num = h->prev_frame_num;
           * We're using that to see whether to continue decoding in that
           * frame, or to allocate a new one. */
          if (h->first_field) {
 -            assert(h->cur_pic_ptr);
 -            assert(h->cur_pic_ptr->f->buf[0]);
 +            av_assert0(h->cur_pic_ptr);
 +            av_assert0(h->cur_pic_ptr->f->buf[0]);
              assert(h->cur_pic_ptr->reference != DELAYED_PIC_REF);
  
              /* figure out if we have a complementary field pair */
              if (!FIELD_PICTURE(h) || h->picture_structure == last_pic_structure) {
                  /* Previous field is unmatched. Don't display it, but let it
                   * remain for reference if marked as such. */
 +                h->missing_fields ++;
                  h->cur_pic_ptr = NULL;
                  h->first_field = FIELD_PICTURE(h);
              } else {
 +                h->missing_fields = 0;
                  if (h->cur_pic_ptr->frame_num != h->frame_num) {
 +                    ff_thread_report_progress(&h->cur_pic_ptr->tf, INT_MAX,
 +                                              h->picture_structure==PICT_BOTTOM_FIELD);
                      /* This and the previous field had different frame_nums.
                       * Consider this field first in pair. Throw away previous
                       * one except for reference purposes. */
          } else {
              release_unused_pictures(h, 0);
          }
 +        /* Some macroblocks can be accessed before they're available in case
 +        * of lost slices, MBAFF or threading. */
 +        if (FIELD_PICTURE(h)) {
 +            for(i = (h->picture_structure == PICT_BOTTOM_FIELD); i<h->mb_height; i++)
 +                memset(h->slice_table + i*h->mb_stride, -1, (h->mb_stride - (i+1==h->mb_height)) * sizeof(*h->slice_table));
 +        } else {
 +            memset(h->slice_table, -1,
 +                (h->mb_height * h->mb_stride - 1) * sizeof(*h->slice_table));
 +        }
      }
  
 -    assert(h->mb_num == h->mb_width * h->mb_height);
 +    av_assert1(h->mb_num == h->mb_width * h->mb_height);
      if (first_mb_in_slice << FIELD_OR_MBAFF_PICTURE(h) >= h->mb_num ||
          first_mb_in_slice >= h->mb_num) {
          av_log(h->avctx, AV_LOG_ERROR, "first_mb_in_slice overflow\n");
                                   FIELD_OR_MBAFF_PICTURE(h);
      if (h->picture_structure == PICT_BOTTOM_FIELD)
          sl->resync_mb_y = sl->mb_y = sl->mb_y + 1;
 -    assert(sl->mb_y < h->mb_height);
 +    av_assert1(sl->mb_y < h->mb_height);
  
      if (h->picture_structure == PICT_FRAME) {
          h->curr_pic_num = h->frame_num;
      }
  
      if (h->nal_unit_type == NAL_IDR_SLICE)
 -        get_ue_golomb(&sl->gb); /* idr_pic_id */
 +        get_ue_golomb_long(&sl->gb); /* idr_pic_id */
  
      if (h->sps.poc_type == 0) {
          int poc_lsb = get_bits(&sl->gb, h->sps.log2_max_poc_lsb);
      if ((h->pps.weighted_pred && sl->slice_type_nos == AV_PICTURE_TYPE_P) ||
          (h->pps.weighted_bipred_idc == 1 &&
           sl->slice_type_nos == AV_PICTURE_TYPE_B))
-         ff_pred_weight_table(h, sl);
+         ff_h264_pred_weight_table(&sl->gb, &h->sps, sl->ref_count,
+                                   sl->slice_type_nos, &sl->pwt);
      else if (h->pps.weighted_bipred_idc == 2 &&
               sl->slice_type_nos == AV_PICTURE_TYPE_B) {
          implicit_weight_table(h, sl, -1);
      } else {
-         sl->use_weight = 0;
+         sl->pwt.use_weight = 0;
          for (i = 0; i < 2; i++) {
-             sl->luma_weight_flag[i]   = 0;
-             sl->chroma_weight_flag[i] = 0;
+             sl->pwt.luma_weight_flag[i]   = 0;
+             sl->pwt.chroma_weight_flag[i] = 0;
          }
      }
  
  
      if (h->avctx->skip_loop_filter >= AVDISCARD_ALL ||
          (h->avctx->skip_loop_filter >= AVDISCARD_NONKEY &&
 +         h->nal_unit_type != NAL_IDR_SLICE) ||
 +        (h->avctx->skip_loop_filter >= AVDISCARD_NONINTRA &&
           sl->slice_type_nos != AV_PICTURE_TYPE_I) ||
          (h->avctx->skip_loop_filter >= AVDISCARD_BIDIR  &&
           sl->slice_type_nos == AV_PICTURE_TYPE_B) ||
              h->max_contexts = 1;
              if (!h->single_decode_warning) {
                  av_log(h->avctx, AV_LOG_INFO,
 -                       "Cannot parallelize deblocking type 1, decoding such frames in sequential order\n");
 +                       "Cannot parallelize slice decoding with deblocking filter type 1, decoding such frames in sequential order\n"
 +                       "To parallelize slice decoding you need video encoded with disable_deblocking_filter_idc set to 2 (deblock only edges that do not cross slices).\n"
 +                       "Setting the flags2 libavcodec option to +fast (-flags2 +fast) will disable deblocking across slices and enable parallel slice decoding "
 +                       "but will generate non-standard-compliant output.\n");
                  h->single_decode_warning = 1;
              }
              if (sl != h->slice_ctx) {
                  av_log(h->avctx, AV_LOG_ERROR,
                         "Deblocking switched inside frame.\n");
 -                return 1;
 +                return SLICE_SINGLETHREAD;
              }
          }
      }
                     6 * (h->sps.bit_depth_luma - 8);
  
      sl->slice_num       = ++h->current_slice;
 -    if (sl->slice_num >= MAX_SLICES) {
 -        av_log(h->avctx, AV_LOG_ERROR,
 -               "Too many slices, increase MAX_SLICES and recompile\n");
 +
 +    if (sl->slice_num)
 +        h->slice_row[(sl->slice_num-1)&(MAX_SLICES-1)]= sl->resync_mb_y;
 +    if (   h->slice_row[sl->slice_num&(MAX_SLICES-1)] + 3 >= sl->resync_mb_y
 +        && h->slice_row[sl->slice_num&(MAX_SLICES-1)] <= sl->resync_mb_y
 +        && sl->slice_num >= MAX_SLICES) {
 +        //in case of ASO this check needs to be updated depending on how we decide to assign slice numbers in this case
 +        av_log(h->avctx, AV_LOG_WARNING, "Possibly too many slices (%d >= %d), increase MAX_SLICES and recompile if there are artifacts\n", sl->slice_num, MAX_SLICES);
      }
  
      for (j = 0; j < 2; j++) {
                               (sl->ref_list[j][i].reference & 3);
      }
  
 +    h->au_pps_id = pps_id;
 +    h->sps.new =
 +    h->sps_buffers[h->pps.sps_id]->new = 0;
 +    h->current_sps_id = h->pps.sps_id;
 +
      if (h->avctx->debug & FF_DEBUG_PICT_INFO) {
          av_log(h->avctx, AV_LOG_DEBUG,
                 "slice:%d %s mb:%d %c%s%s pps:%u frame:%d poc:%d/%d ref:%d/%d qp:%d loop:%d:%d:%d weight:%d%s %s\n",
                 sl->qscale,
                 sl->deblocking_filter,
                 sl->slice_alpha_c0_offset, sl->slice_beta_offset,
-                sl->use_weight,
-                sl->use_weight == 1 && sl->use_weight_chroma ? "c" : "",
+                sl->pwt.use_weight,
+                sl->pwt.use_weight == 1 && sl->pwt.use_weight_chroma ? "c" : "",
                 sl->slice_type == AV_PICTURE_TYPE_B ? (sl->direct_spatial_mv_pred ? "SPAT" : "TEMP") : "");
      }
  
@@@ -1982,12 -1729,12 +1983,12 @@@ static av_always_inline void fill_filte
          if (USES_LIST(top_type, list)) {
              const int b_xy  = h->mb2b_xy[top_xy] + 3 * b_stride;
              const int b8_xy = 4 * top_xy + 2;
 -            int (*ref2frm)[64] = sl->ref2frm[h->slice_table[top_xy] & (MAX_SLICES - 1)][0] + (MB_MBAFF(sl) ? 20 : 2);
 +            int *ref2frm = sl->ref2frm[h->slice_table[top_xy] & (MAX_SLICES - 1)][list] + (MB_MBAFF(sl) ? 20 : 2);
              AV_COPY128(mv_dst - 1 * 8, h->cur_pic.motion_val[list][b_xy + 0]);
              ref_cache[0 - 1 * 8] =
 -            ref_cache[1 - 1 * 8] = ref2frm[list][h->cur_pic.ref_index[list][b8_xy + 0]];
 +            ref_cache[1 - 1 * 8] = ref2frm[h->cur_pic.ref_index[list][b8_xy + 0]];
              ref_cache[2 - 1 * 8] =
 -            ref_cache[3 - 1 * 8] = ref2frm[list][h->cur_pic.ref_index[list][b8_xy + 1]];
 +            ref_cache[3 - 1 * 8] = ref2frm[h->cur_pic.ref_index[list][b8_xy + 1]];
          } else {
              AV_ZERO128(mv_dst - 1 * 8);
              AV_WN32A(&ref_cache[0 - 1 * 8], ((LIST_NOT_USED) & 0xFF) * 0x01010101u);
              if (USES_LIST(left_type[LTOP], list)) {
                  const int b_xy  = h->mb2b_xy[left_xy[LTOP]] + 3;
                  const int b8_xy = 4 * left_xy[LTOP] + 1;
 -                int (*ref2frm)[64] = sl->ref2frm[h->slice_table[left_xy[LTOP]] & (MAX_SLICES - 1)][0] + (MB_MBAFF(sl) ? 20 : 2);
 +                int *ref2frm = sl->ref2frm[h->slice_table[left_xy[LTOP]] & (MAX_SLICES - 1)][list] + (MB_MBAFF(sl) ? 20 : 2);
                  AV_COPY32(mv_dst - 1 +  0, h->cur_pic.motion_val[list][b_xy + b_stride * 0]);
                  AV_COPY32(mv_dst - 1 +  8, h->cur_pic.motion_val[list][b_xy + b_stride * 1]);
                  AV_COPY32(mv_dst - 1 + 16, h->cur_pic.motion_val[list][b_xy + b_stride * 2]);
                  AV_COPY32(mv_dst - 1 + 24, h->cur_pic.motion_val[list][b_xy + b_stride * 3]);
                  ref_cache[-1 +  0] =
 -                ref_cache[-1 +  8] = ref2frm[list][h->cur_pic.ref_index[list][b8_xy + 2 * 0]];
 +                ref_cache[-1 +  8] = ref2frm[h->cur_pic.ref_index[list][b8_xy + 2 * 0]];
                  ref_cache[-1 + 16] =
 -                ref_cache[-1 + 24] = ref2frm[list][h->cur_pic.ref_index[list][b8_xy + 2 * 1]];
 +                ref_cache[-1 + 24] = ref2frm[h->cur_pic.ref_index[list][b8_xy + 2 * 1]];
              } else {
                  AV_ZERO32(mv_dst - 1 +  0);
                  AV_ZERO32(mv_dst - 1 +  8);
  
      {
          int8_t *ref = &h->cur_pic.ref_index[list][4 * mb_xy];
 -        int (*ref2frm)[64] = sl->ref2frm[sl->slice_num & (MAX_SLICES - 1)][0] + (MB_MBAFF(sl) ? 20 : 2);
 -        uint32_t ref01 = (pack16to32(ref2frm[list][ref[0]], ref2frm[list][ref[1]]) & 0x00FF00FF) * 0x0101;
 -        uint32_t ref23 = (pack16to32(ref2frm[list][ref[2]], ref2frm[list][ref[3]]) & 0x00FF00FF) * 0x0101;
 +        int *ref2frm = sl->ref2frm[sl->slice_num & (MAX_SLICES - 1)][list] + (MB_MBAFF(sl) ? 20 : 2);
 +        uint32_t ref01 = (pack16to32(ref2frm[ref[0]], ref2frm[ref[1]]) & 0x00FF00FF) * 0x0101;
 +        uint32_t ref23 = (pack16to32(ref2frm[ref[2]], ref2frm[ref[3]]) & 0x00FF00FF) * 0x0101;
          AV_WN32A(&ref_cache[0 * 8], ref01);
          AV_WN32A(&ref_cache[1 * 8], ref01);
          AV_WN32A(&ref_cache[2 * 8], ref23);
@@@ -2302,7 -2049,7 +2303,7 @@@ static void decode_finish_row(const H26
  
      ff_h264_draw_horiz_band(h, sl, top, height);
  
 -    if (h->droppable)
 +    if (h->droppable || sl->h264->slice_ctx[0].er.error_occurred)
          return;
  
      ff_thread_report_progress(&h->cur_pic_ptr->tf, top + height - 1,
@@@ -2313,14 -2060,15 +2314,14 @@@ static void er_add_slice(H264SliceConte
                           int startx, int starty,
                           int endx, int endy, int status)
  {
 -#if CONFIG_ERROR_RESILIENCE
 -    ERContext *er = &sl->er;
 -
      if (!sl->h264->enable_er)
          return;
  
 -    er->ref_count = sl->ref_count[0];
 -    ff_er_add_slice(er, startx, starty, endx, endy, status);
 -#endif
 +    if (CONFIG_ERROR_RESILIENCE) {
 +        ERContext *er = &sl->h264->slice_ctx[0].er;
 +
 +        ff_er_add_slice(er, startx, starty, endx, endy, status);
 +    }
  }
  
  static int decode_slice(struct AVCodecContext *avctx, void *arg)
  
      sl->mb_skip_run = -1;
  
 +    av_assert0(h->block_offset[15] == (4 * ((scan8[15] - scan8[0]) & 7) << h->pixel_shift) + 4 * sl->linesize * ((scan8[15] - scan8[0]) >> 3));
 +
      sl->is_complex = FRAME_MBAFF(h) || h->picture_structure != PICT_FRAME ||
                       avctx->codec_id != AV_CODEC_ID_H264 ||
                       (CONFIG_GRAY && (h->flags & AV_CODEC_FLAG_GRAY));
  
 +    if (!(h->avctx->active_thread_type & FF_THREAD_SLICE) && h->picture_structure == PICT_FRAME && h->slice_ctx[0].er.error_status_table) {
 +        const int start_i  = av_clip(sl->resync_mb_x + sl->resync_mb_y * h->mb_width, 0, h->mb_num - 1);
 +        if (start_i) {
 +            int prev_status = h->slice_ctx[0].er.error_status_table[h->slice_ctx[0].er.mb_index2xy[start_i - 1]];
 +            prev_status &= ~ VP_START;
 +            if (prev_status != (ER_MV_END | ER_DC_END | ER_AC_END))
 +                h->slice_ctx[0].er.error_occurred = 1;
 +        }
 +    }
 +
      if (h->pps.cabac) {
          /* realign */
          align_get_bits(&sl->gb);
  
          /* init cabac */
 -        ff_init_cabac_decoder(&sl->cabac,
 +        ret = ff_init_cabac_decoder(&sl->cabac,
                                sl->gb.buffer + get_bits_count(&sl->gb) / 8,
                                (get_bits_left(&sl->gb) + 7) / 8);
 +        if (ret < 0)
 +            return ret;
  
          ff_h264_init_cabac_states(h, sl);
  
          for (;;) {
              // START_TIMER
              int ret, eos;
 -
              if (sl->mb_x + sl->mb_y * h->mb_width >= sl->next_slice_idx) {
                  av_log(h->avctx, AV_LOG_ERROR, "Slice overlaps with next at %d\n",
                         sl->next_slice_idx);
 +                er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y, sl->mb_x,
 +                             sl->mb_y, ER_MB_ERROR);
                  return AVERROR_INVALIDDATA;
              }
  
                      loop_filter(h, sl, lf_x_start, sl->mb_x + 1);
                  return 0;
              }
 -            if (ret < 0 || sl->cabac.bytestream > sl->cabac.bytestream_end + 2) {
 +            if (sl->cabac.bytestream > sl->cabac.bytestream_end + 2 )
 +                av_log(h->avctx, AV_LOG_DEBUG, "bytestream overread %"PTRDIFF_SPECIFIER"\n", sl->cabac.bytestream_end - sl->cabac.bytestream);
 +            if (ret < 0 || sl->cabac.bytestream > sl->cabac.bytestream_end + 4) {
                  av_log(h->avctx, AV_LOG_ERROR,
 -                       "error while decoding MB %d %d, bytestream %td\n",
 +                       "error while decoding MB %d %d, bytestream %"PTRDIFF_SPECIFIER"\n",
                         sl->mb_x, sl->mb_y,
                         sl->cabac.bytestream_end - sl->cabac.bytestream);
                  er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y, sl->mb_x,
              if (sl->mb_x + sl->mb_y * h->mb_width >= sl->next_slice_idx) {
                  av_log(h->avctx, AV_LOG_ERROR, "Slice overlaps with next at %d\n",
                         sl->next_slice_idx);
 +                er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y, sl->mb_x,
 +                             sl->mb_y, ER_MB_ERROR);
                  return AVERROR_INVALIDDATA;
              }
  
                      ff_tlog(h->avctx, "slice end %d %d\n",
                              get_bits_count(&sl->gb), sl->gb.size_in_bits);
  
 -                    if (get_bits_left(&sl->gb) == 0) {
 +                    if (   get_bits_left(&sl->gb) == 0
 +                        || get_bits_left(&sl->gb) > 0 && !(h->avctx->err_recognition & AV_EF_AGGRESSIVE)) {
                          er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y,
                                       sl->mb_x - 1, sl->mb_y, ER_MB_END);
  
                          return 0;
                      } else {
                          er_add_slice(sl, sl->resync_mb_x, sl->resync_mb_y,
 -                                     sl->mb_x - 1, sl->mb_y, ER_MB_END);
 +                                     sl->mb_x, sl->mb_y, ER_MB_END);
  
                          return AVERROR_INVALIDDATA;
                      }
@@@ -2537,15 -2265,7 +2538,15 @@@ int ff_h264_execute_decode_slices(H264C
      H264SliceContext *sl;
      int i, j;
  
 -    if (h->avctx->hwaccel)
 +    av_assert0(context_count && h->slice_ctx[context_count - 1].mb_y < h->mb_height);
 +
 +    h->slice_ctx[0].next_slice_idx = INT_MAX;
 +
 +    if (h->avctx->hwaccel
 +#if FF_API_CAP_VDPAU
 +        || h->avctx->codec->capabilities & AV_CODEC_CAP_HWACCEL_VDPAU
 +#endif
 +        )
          return 0;
      if (context_count == 1) {
          int ret;
          h->mb_y = h->slice_ctx[0].mb_y;
          return ret;
      } else {
 +        av_assert0(context_count > 0);
          for (i = 0; i < context_count; i++) {
              int next_slice_idx = h->mb_width * h->mb_height;
              int slice_idx;
  
              sl                 = &h->slice_ctx[i];
 -            sl->er.error_count = 0;
 +            if (CONFIG_ERROR_RESILIENCE) {
 +                sl->er.error_count = 0;
 +            }
  
              /* make sure none of those slices overlap */
              slice_idx = sl->mb_y * h->mb_width + sl->mb_x;
          /* pull back stuff from slices to master context */
          sl                   = &h->slice_ctx[context_count - 1];
          h->mb_y              = sl->mb_y;
 -        for (i = 1; i < context_count; i++)
 -            h->slice_ctx[0].er.error_count += h->slice_ctx[i].er.error_count;
 +        if (CONFIG_ERROR_RESILIENCE) {
 +            for (i = 1; i < context_count; i++)
 +                h->slice_ctx[0].er.error_count += h->slice_ctx[i].er.error_count;
 +        }
      }
  
      return 0;
diff --combined libavcodec/vaapi_h264.c
index ded2cb3d49146b52f141ac6bd05f3cef3ee31f47,431a428b476f7dbade5e3740d52f2898e152cf59..b02473851e248e4a792641e103da0d28bb0c6801
@@@ -3,20 -3,20 +3,20 @@@
   *
   * Copyright (C) 2008-2009 Splitted-Desktop Systems
   *
 - * This file is part of Libav.
 + * This file is part of FFmpeg.
   *
 - * Libav is free software; you can redistribute it and/or
 + * FFmpeg is free software; you can redistribute it and/or
   * modify it under the terms of the GNU Lesser General Public
   * License as published by the Free Software Foundation; either
   * version 2.1 of the License, or (at your option) any later version.
   *
 - * Libav is distributed in the hope that it will be useful,
 + * FFmpeg is distributed in the hope that it will be useful,
   * but WITHOUT ANY WARRANTY; without even the implied warranty of
   * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
   * Lesser General Public License for more details.
   *
   * You should have received a copy of the GNU Lesser General Public
 - * License along with Libav; if not, write to the Free Software
 + * License along with FFmpeg; if not, write to the Free Software
   * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
   */
  
@@@ -26,7 -26,7 +26,7 @@@
  
  /**
   * @file
 - * This file implements the glue code between Libav's and VA API's
 + * This file implements the glue code between FFmpeg's and VA API's
   * structures for H.264 decoding.
   */
  
@@@ -44,10 -44,10 +44,10 @@@ static void init_vaapi_pic(VAPictureH26
  }
  
  /**
 - * Translate an Libav Picture into its VA API form.
 + * Translate an FFmpeg Picture into its VA API form.
   *
   * @param[out] va_pic          A pointer to VA API's own picture struct
 - * @param[in]  pic             A pointer to the Libav picture struct to convert
 + * @param[in]  pic             A pointer to the FFmpeg picture struct to convert
   * @param[in]  pic_structure   The picture field type (as defined in mpegvideo.h),
   *                             supersedes pic's field type if nonzero.
   */
@@@ -148,11 -148,11 +148,11 @@@ static int fill_vaapi_ReferenceFrames(V
  }
  
  /**
 - * Fill in VA API reference picture lists from the Libav reference
 + * Fill in VA API reference picture lists from the FFmpeg reference
   * picture list.
   *
   * @param[out] RefPicList  VA API internal reference picture list
 - * @param[in]  ref_list    A pointer to the Libav reference list
 + * @param[in]  ref_list    A pointer to the FFmpeg reference list
   * @param[in]  ref_count   The number of reference pictures in ref_list
   */
  static void fill_vaapi_RefPicList(VAPictureH264 RefPicList[32],
      unsigned int i, n = 0;
      for (i = 0; i < ref_count; i++)
          if (ref_list[i].reference)
 -            fill_vaapi_pic(&RefPicList[n++], ref_list[i].parent, 0);
 +            fill_vaapi_pic(&RefPicList[n++], ref_list[i].parent,
 +                           ref_list[i].reference);
  
      for (; n < 32; n++)
          init_vaapi_pic(&RefPicList[n]);
@@@ -196,25 -195,25 +196,25 @@@ static void fill_vaapi_plain_pred_weigh
      H264SliceContext *sl = &h->slice_ctx[0];
      unsigned int i, j;
  
-     *luma_weight_flag    = sl->luma_weight_flag[list];
-     *chroma_weight_flag  = sl->chroma_weight_flag[list];
+     *luma_weight_flag    = sl->pwt.luma_weight_flag[list];
+     *chroma_weight_flag  = sl->pwt.chroma_weight_flag[list];
  
      for (i = 0; i < sl->ref_count[list]; i++) {
          /* VA API also wants the inferred (default) values, not
             only what is available in the bitstream (7.4.3.2). */
-         if (sl->luma_weight_flag[list]) {
-             luma_weight[i] = sl->luma_weight[i][list][0];
-             luma_offset[i] = sl->luma_weight[i][list][1];
+         if (sl->pwt.luma_weight_flag[list]) {
+             luma_weight[i] = sl->pwt.luma_weight[i][list][0];
+             luma_offset[i] = sl->pwt.luma_weight[i][list][1];
          } else {
-             luma_weight[i] = 1 << sl->luma_log2_weight_denom;
+             luma_weight[i] = 1 << sl->pwt.luma_log2_weight_denom;
              luma_offset[i] = 0;
          }
          for (j = 0; j < 2; j++) {
-             if (sl->chroma_weight_flag[list]) {
-                 chroma_weight[i][j] = sl->chroma_weight[i][list][j][0];
-                 chroma_offset[i][j] = sl->chroma_weight[i][list][j][1];
+             if (sl->pwt.chroma_weight_flag[list]) {
+                 chroma_weight[i][j] = sl->pwt.chroma_weight[i][list][j][0];
+                 chroma_offset[i][j] = sl->pwt.chroma_weight[i][list][j][1];
              } else {
-                 chroma_weight[i][j] = 1 << sl->chroma_log2_weight_denom;
+                 chroma_weight[i][j] = 1 << sl->pwt.chroma_log2_weight_denom;
                  chroma_offset[i][j] = 0;
              }
          }
@@@ -227,7 -226,7 +227,7 @@@ static int vaapi_h264_start_frame(AVCod
                                    av_unused uint32_t       size)
  {
      H264Context * const h = avctx->priv_data;
 -    struct vaapi_context * const vactx = avctx->hwaccel_context;
 +    FFVAContext * const vactx = ff_vaapi_get_context(avctx);
      VAPictureParameterBufferH264 *pic_param;
      VAIQMatrixBufferH264 *iq_matrix;
  
      pic_param->seq_fields.bits.delta_pic_order_always_zero_flag = h->sps.delta_pic_order_always_zero_flag;
      pic_param->num_slice_groups_minus1                          = h->pps.slice_group_count - 1;
      pic_param->slice_group_map_type                             = h->pps.mb_slice_group_map_type;
 -    pic_param->slice_group_change_rate_minus1                   = 0; /* XXX: unimplemented in Libav */
 +    pic_param->slice_group_change_rate_minus1                   = 0; /* XXX: unimplemented in FFmpeg */
      pic_param->pic_init_qp_minus26                              = h->pps.init_qp - 26;
      pic_param->pic_init_qs_minus26                              = h->pps.init_qs - 26;
      pic_param->chroma_qp_index_offset                           = h->pps.chroma_qp_index_offset[0];
  /** End a hardware decoding based frame. */
  static int vaapi_h264_end_frame(AVCodecContext *avctx)
  {
 -    struct vaapi_context * const vactx = avctx->hwaccel_context;
 +    FFVAContext * const vactx = ff_vaapi_get_context(avctx);
      H264Context * const h = avctx->priv_data;
      H264SliceContext *sl = &h->slice_ctx[0];
      int ret;
@@@ -318,7 -317,6 +318,7 @@@ static int vaapi_h264_decode_slice(AVCo
                                     const uint8_t  *buffer,
                                     uint32_t        size)
  {
 +    FFVAContext * const vactx = ff_vaapi_get_context(avctx);
      H264Context * const h = avctx->priv_data;
      H264SliceContext *sl  = &h->slice_ctx[0];
      VASliceParameterBufferH264 *slice_param;
              buffer, size);
  
      /* Fill in VASliceParameterBufferH264. */
 -    slice_param = (VASliceParameterBufferH264 *)ff_vaapi_alloc_slice(avctx->hwaccel_context, buffer, size);
 +    slice_param = (VASliceParameterBufferH264 *)ff_vaapi_alloc_slice(vactx, buffer, size);
      if (!slice_param)
          return -1;
      slice_param->slice_data_bit_offset          = get_bits_count(&sl->gb) + 8; /* bit buffer started beyond nal_unit_type */
      slice_param->disable_deblocking_filter_idc  = sl->deblocking_filter < 2 ? !sl->deblocking_filter : sl->deblocking_filter;
      slice_param->slice_alpha_c0_offset_div2     = sl->slice_alpha_c0_offset / 2;
      slice_param->slice_beta_offset_div2         = sl->slice_beta_offset     / 2;
-     slice_param->luma_log2_weight_denom         = sl->luma_log2_weight_denom;
-     slice_param->chroma_log2_weight_denom       = sl->chroma_log2_weight_denom;
+     slice_param->luma_log2_weight_denom         = sl->pwt.luma_log2_weight_denom;
+     slice_param->chroma_log2_weight_denom       = sl->pwt.chroma_log2_weight_denom;
  
      fill_vaapi_RefPicList(slice_param->RefPicList0, sl->ref_list[0], sl->list_count > 0 ? sl->ref_count[0] : 0);
      fill_vaapi_RefPicList(slice_param->RefPicList1, sl->ref_list[1], sl->list_count > 1 ? sl->ref_count[1] : 0);
@@@ -364,7 -362,4 +364,7 @@@ AVHWAccel ff_h264_vaapi_hwaccel = 
      .start_frame    = vaapi_h264_start_frame,
      .end_frame      = vaapi_h264_end_frame,
      .decode_slice   = vaapi_h264_decode_slice,
 +    .init           = ff_vaapi_context_init,
 +    .uninit         = ff_vaapi_context_fini,
 +    .priv_data_size = sizeof(FFVAContext),
  };