X-Git-Url: https://git.sesse.net/?a=blobdiff_plain;f=libavcodec%2Fvc1.c;h=6edf746a2f829256790f7892f6b81ad9064fb455;hb=d11f9e1b15e3aac6862bdfe2e5cb212b706e2c2f;hp=86ca7b978bcdb962d8d2a739ca438f097d72a296;hpb=95e3fdb846847dd91d3a04bebbdefcab48813d83;p=ffmpeg diff --git a/libavcodec/vc1.c b/libavcodec/vc1.c index 86ca7b978bc..6edf746a2f8 100644 --- a/libavcodec/vc1.c +++ b/libavcodec/vc1.c @@ -400,7 +400,7 @@ static void vc1_mc_1mv(VC1Context *v, int dir) my = s->mv[dir][0][1]; // store motion vectors for further use in B frames - if(s->pict_type == P_TYPE) { + if(s->pict_type == FF_P_TYPE) { s->current_picture.motion_val[1][s->block_index[0]][0] = mx; s->current_picture.motion_val[1][s->block_index[0]][1] = my; } @@ -783,8 +783,8 @@ static int decode_sequence_header(AVCodecContext *avctx, GetBitContext *gb) } else { - v->zz_8x4 = ff_vc1_simple_progressive_8x4_zz; - v->zz_4x8 = ff_vc1_simple_progressive_4x8_zz; + v->zz_8x4 = wmv2_scantableA; + v->zz_4x8 = wmv2_scantableB; v->res_sm = get_bits(gb, 2); //reserved if (v->res_sm) { @@ -1041,26 +1041,26 @@ static int vc1_parse_frame_header(VC1Context *v, GetBitContext* gb) v->s.pict_type = get_bits1(gb); if (v->s.avctx->max_b_frames) { if (!v->s.pict_type) { - if (get_bits1(gb)) v->s.pict_type = I_TYPE; - else v->s.pict_type = B_TYPE; - } else v->s.pict_type = P_TYPE; - } else v->s.pict_type = v->s.pict_type ? P_TYPE : I_TYPE; + if (get_bits1(gb)) v->s.pict_type = FF_I_TYPE; + else v->s.pict_type = FF_B_TYPE; + } else v->s.pict_type = FF_P_TYPE; + } else v->s.pict_type = v->s.pict_type ? FF_P_TYPE : FF_I_TYPE; v->bi_type = 0; - if(v->s.pict_type == B_TYPE) { + if(v->s.pict_type == FF_B_TYPE) { v->bfraction = get_vlc2(gb, ff_vc1_bfraction_vlc.table, VC1_BFRACTION_VLC_BITS, 1); v->bfraction = ff_vc1_bfraction_lut[v->bfraction]; if(v->bfraction == 0) { - v->s.pict_type = BI_TYPE; + v->s.pict_type = FF_BI_TYPE; } } - if(v->s.pict_type == I_TYPE || v->s.pict_type == BI_TYPE) + if(v->s.pict_type == FF_I_TYPE || v->s.pict_type == FF_BI_TYPE) skip_bits(gb, 7); // skip buffer fullness /* calculate RND */ - if(v->s.pict_type == I_TYPE || v->s.pict_type == BI_TYPE) + if(v->s.pict_type == FF_I_TYPE || v->s.pict_type == FF_BI_TYPE) v->rnd = 1; - if(v->s.pict_type == P_TYPE) + if(v->s.pict_type == FF_P_TYPE) v->rnd ^= 1; /* Quantizer stuff */ @@ -1092,18 +1092,18 @@ static int vc1_parse_frame_header(VC1Context *v, GetBitContext* gb) if (v->postprocflag) v->postproc = get_bits1(gb); } else - if (v->multires && v->s.pict_type != B_TYPE) v->respic = get_bits(gb, 2); + if (v->multires && v->s.pict_type != FF_B_TYPE) v->respic = get_bits(gb, 2); - if(v->res_x8 && (v->s.pict_type == I_TYPE || v->s.pict_type == BI_TYPE)){ + if(v->res_x8 && (v->s.pict_type == FF_I_TYPE || v->s.pict_type == FF_BI_TYPE)){ v->x8_type = get_bits1(gb); }else v->x8_type = 0; //av_log(v->s.avctx, AV_LOG_INFO, "%c Frame: QP=[%i]%i (+%i/2) %i\n", -// (v->s.pict_type == P_TYPE) ? 'P' : ((v->s.pict_type == I_TYPE) ? 'I' : 'B'), pqindex, v->pq, v->halfpq, v->rangeredfrm); +// (v->s.pict_type == FF_P_TYPE) ? 'P' : ((v->s.pict_type == FF_I_TYPE) ? 'I' : 'B'), pqindex, v->pq, v->halfpq, v->rangeredfrm); - if(v->s.pict_type == I_TYPE || v->s.pict_type == P_TYPE) v->use_ic = 0; + if(v->s.pict_type == FF_I_TYPE || v->s.pict_type == FF_P_TYPE) v->use_ic = 0; switch(v->s.pict_type) { - case P_TYPE: + case FF_P_TYPE: if (v->pq < 5) v->tt_index = 0; else if(v->pq < 13) v->tt_index = 1; else v->tt_index = 2; @@ -1186,7 +1186,7 @@ static int vc1_parse_frame_header(VC1Context *v, GetBitContext* gb) v->ttfrm = TT_8X8; } break; - case B_TYPE: + case FF_B_TYPE: if (v->pq < 5) v->tt_index = 0; else if(v->pq < 13) v->tt_index = 1; else v->tt_index = 2; @@ -1233,7 +1233,7 @@ static int vc1_parse_frame_header(VC1Context *v, GetBitContext* gb) { /* AC Syntax */ v->c_ac_table_index = decode012(gb); - if (v->s.pict_type == I_TYPE || v->s.pict_type == BI_TYPE) + if (v->s.pict_type == FF_I_TYPE || v->s.pict_type == FF_BI_TYPE) { v->y_ac_table_index = decode012(gb); } @@ -1241,8 +1241,8 @@ static int vc1_parse_frame_header(VC1Context *v, GetBitContext* gb) v->s.dc_table_index = get_bits1(gb); } - if(v->s.pict_type == BI_TYPE) { - v->s.pict_type = B_TYPE; + if(v->s.pict_type == FF_BI_TYPE) { + v->s.pict_type = FF_B_TYPE; v->bi_type = 1; } return 0; @@ -1261,19 +1261,19 @@ static int vc1_parse_frame_header_adv(VC1Context *v, GetBitContext* gb) } switch(get_unary(gb, 0, 4)) { case 0: - v->s.pict_type = P_TYPE; + v->s.pict_type = FF_P_TYPE; break; case 1: - v->s.pict_type = B_TYPE; + v->s.pict_type = FF_B_TYPE; break; case 2: - v->s.pict_type = I_TYPE; + v->s.pict_type = FF_I_TYPE; break; case 3: - v->s.pict_type = BI_TYPE; + v->s.pict_type = FF_BI_TYPE; break; case 4: - v->s.pict_type = P_TYPE; // skipped pic + v->s.pict_type = FF_P_TYPE; // skipped pic v->p_frame_skipped = 1; return 0; } @@ -1294,11 +1294,11 @@ static int vc1_parse_frame_header_adv(VC1Context *v, GetBitContext* gb) if(v->interlace) v->uvsamp = get_bits1(gb); if(v->finterpflag) v->interpfrm = get_bits1(gb); - if(v->s.pict_type == B_TYPE) { + if(v->s.pict_type == FF_B_TYPE) { v->bfraction = get_vlc2(gb, ff_vc1_bfraction_vlc.table, VC1_BFRACTION_VLC_BITS, 1); v->bfraction = ff_vc1_bfraction_lut[v->bfraction]; if(v->bfraction == 0) { - v->s.pict_type = BI_TYPE; /* XXX: should not happen here */ + v->s.pict_type = FF_BI_TYPE; /* XXX: should not happen here */ } } pqindex = get_bits(gb, 5); @@ -1320,11 +1320,11 @@ static int vc1_parse_frame_header_adv(VC1Context *v, GetBitContext* gb) if (v->quantizer_mode == QUANT_FRAME_EXPLICIT) v->pquantizer = get_bits1(gb); - if(v->s.pict_type == I_TYPE || v->s.pict_type == P_TYPE) v->use_ic = 0; + if(v->s.pict_type == FF_I_TYPE || v->s.pict_type == FF_P_TYPE) v->use_ic = 0; switch(v->s.pict_type) { - case I_TYPE: - case BI_TYPE: + case FF_I_TYPE: + case FF_BI_TYPE: status = bitplane_decoding(v->acpred_plane, &v->acpred_is_raw, v); if (status < 0) return -1; av_log(v->s.avctx, AV_LOG_DEBUG, "ACPRED plane encoding: " @@ -1340,7 +1340,7 @@ static int vc1_parse_frame_header_adv(VC1Context *v, GetBitContext* gb) } } break; - case P_TYPE: + case FF_P_TYPE: if(v->postprocflag) v->postproc = get_bits1(gb); if (v->extended_mv) v->mvrange = get_unary(gb, 0, 3); @@ -1431,7 +1431,7 @@ static int vc1_parse_frame_header_adv(VC1Context *v, GetBitContext* gb) v->ttfrm = TT_8X8; } break; - case B_TYPE: + case FF_B_TYPE: if(v->postprocflag) v->postproc = get_bits1(gb); if (v->extended_mv) v->mvrange = get_unary(gb, 0, 3); @@ -1485,20 +1485,20 @@ static int vc1_parse_frame_header_adv(VC1Context *v, GetBitContext* gb) /* AC Syntax */ v->c_ac_table_index = decode012(gb); - if (v->s.pict_type == I_TYPE || v->s.pict_type == BI_TYPE) + if (v->s.pict_type == FF_I_TYPE || v->s.pict_type == FF_BI_TYPE) { v->y_ac_table_index = decode012(gb); } /* DC Syntax */ v->s.dc_table_index = get_bits1(gb); - if ((v->s.pict_type == I_TYPE || v->s.pict_type == BI_TYPE) && v->dquant) { + if ((v->s.pict_type == FF_I_TYPE || v->s.pict_type == FF_BI_TYPE) && v->dquant) { av_log(v->s.avctx, AV_LOG_DEBUG, "VOP DQuant info\n"); vop_dquant_decoding(v); } v->bi_type = 0; - if(v->s.pict_type == BI_TYPE) { - v->s.pict_type = B_TYPE; + if(v->s.pict_type == FF_BI_TYPE) { + v->s.pict_type = FF_B_TYPE; v->bi_type = 1; } return 0; @@ -2398,11 +2398,11 @@ static int vc1_decode_i_block(VC1Context *v, DCTELEM block[64], int n, int coded if(v->s.ac_pred) { if(!dc_pred_dir) - zz_table = ff_vc1_horizontal_zz; + zz_table = wmv1_scantable[2]; else - zz_table = ff_vc1_vertical_zz; + zz_table = wmv1_scantable[3]; } else - zz_table = ff_vc1_normal_zz; + zz_table = wmv1_scantable[1]; ac_val = s->ac_val[0][0] + s->block_index[n] * 16; ac_val2 = ac_val; @@ -2581,11 +2581,11 @@ static int vc1_decode_i_block_adv(VC1Context *v, DCTELEM block[64], int n, int c if(v->s.ac_pred) { if(!dc_pred_dir) - zz_table = ff_vc1_horizontal_zz; + zz_table = wmv1_scantable[2]; else - zz_table = ff_vc1_vertical_zz; + zz_table = wmv1_scantable[3]; } else - zz_table = ff_vc1_normal_zz; + zz_table = wmv1_scantable[1]; while (!last) { vc1_decode_ac_coeff(v, &last, &skip, &value, codingset); @@ -2786,7 +2786,7 @@ static int vc1_decode_intra_block(VC1Context *v, DCTELEM block[64], int n, int c const int8_t *zz_table; int k; - zz_table = ff_vc1_simple_progressive_8x8_zz; + zz_table = wmv1_scantable[0]; while (!last) { vc1_decode_ac_coeff(v, &last, &skip, &value, codingset); @@ -2928,7 +2928,7 @@ static int vc1_decode_p_block(VC1Context *v, DCTELEM block[64], int n, int mquan i += skip; if(i > 63) break; - idx = ff_vc1_simple_progressive_8x8_zz[i++]; + idx = wmv1_scantable[0][i++]; block[idx] = value * scale; if(!v->pquantizer) block[idx] += (block[idx] < 0) ? -mquant : mquant; @@ -3017,7 +3017,7 @@ static int vc1_decode_p_mb(VC1Context *v) offset_table[6] = { 0, 1, 3, 7, 15, 31 }; int mb_has_coeffs = 1; /* last_flag */ int dmv_x, dmv_y; /* Differential MV components */ - int index, index1; /* LUT indices */ + int index, index1; /* LUT indexes */ int val, sign; /* temp values */ int first_block = 1; int dst_idx, off; @@ -3246,7 +3246,7 @@ static void vc1_decode_b_mb(VC1Context *v) static const int size_table[6] = { 0, 2, 3, 4, 5, 8 }, offset_table[6] = { 0, 1, 3, 7, 15, 31 }; int mb_has_coeffs = 0; /* last_flag */ - int index, index1; /* LUT indices */ + int index, index1; /* LUT indexes */ int val, sign; /* temp values */ int first_block = 1; int dst_idx, off; @@ -3736,7 +3736,7 @@ static void vc1_decode_skip_blocks(VC1Context *v) ff_draw_horiz_band(s, s->mb_y * 16, 16); s->first_slice_line = 0; } - s->pict_type = P_TYPE; + s->pict_type = FF_P_TYPE; } static void vc1_decode_blocks(VC1Context *v) @@ -3748,19 +3748,19 @@ static void vc1_decode_blocks(VC1Context *v) }else{ switch(v->s.pict_type) { - case I_TYPE: + case FF_I_TYPE: if(v->profile == PROFILE_ADVANCED) vc1_decode_i_blocks_adv(v); else vc1_decode_i_blocks(v); break; - case P_TYPE: + case FF_P_TYPE: if(v->p_frame_skipped) vc1_decode_skip_blocks(v); else vc1_decode_p_blocks(v); break; - case B_TYPE: + case FF_B_TYPE: if(v->bi_type){ if(v->profile == PROFILE_ADVANCED) vc1_decode_i_blocks_adv(v); @@ -3776,7 +3776,7 @@ static void vc1_decode_blocks(VC1Context *v) /** Find VC-1 marker in buffer * @return position where next marker starts or end of buffer if no marker found */ -static av_always_inline uint8_t* find_next_marker(uint8_t *src, uint8_t *end) +static av_always_inline const uint8_t* find_next_marker(const uint8_t *src, const uint8_t *end) { uint32_t mrk = 0xFFFFFFFF; @@ -3789,7 +3789,7 @@ static av_always_inline uint8_t* find_next_marker(uint8_t *src, uint8_t *end) return end; } -static av_always_inline int vc1_unescape_buffer(uint8_t *src, int size, uint8_t *dst) +static av_always_inline int vc1_unescape_buffer(const uint8_t *src, int size, uint8_t *dst) { int dsize = 0, i; @@ -3812,7 +3812,7 @@ static av_always_inline int vc1_unescape_buffer(uint8_t *src, int size, uint8_t * @todo TODO: Handle VC-1 IDUs (Transport level?) * @todo TODO: Decypher remaining bits in extra_data */ -static int vc1_decode_init(AVCodecContext *avctx) +static av_cold int vc1_decode_init(AVCodecContext *avctx) { VC1Context *v = avctx->priv_data; MpegEncContext *s = &v->s; @@ -3862,10 +3862,12 @@ static int vc1_decode_init(AVCodecContext *avctx) av_log(avctx, AV_LOG_INFO, "Read %i bits in overflow\n", -count); } } else { // VC1/WVC1 - uint8_t *start = avctx->extradata, *end = avctx->extradata + avctx->extradata_size; - uint8_t *next; int size, buf2_size; + const uint8_t *start = avctx->extradata; + uint8_t *end = avctx->extradata + avctx->extradata_size; + const uint8_t *next; + int size, buf2_size; uint8_t *buf2 = NULL; - int seq_inited = 0, ep_inited = 0; + int seq_initialized = 0, ep_initialized = 0; if(avctx->extradata_size < 16) { av_log(avctx, AV_LOG_ERROR, "Extradata size too small: %i\n", avctx->extradata_size); @@ -3887,19 +3889,19 @@ static int vc1_decode_init(AVCodecContext *avctx) av_free(buf2); return -1; } - seq_inited = 1; + seq_initialized = 1; break; case VC1_CODE_ENTRYPOINT: if(decode_entry_point(avctx, &gb) < 0){ av_free(buf2); return -1; } - ep_inited = 1; + ep_initialized = 1; break; } } av_free(buf2); - if(!seq_inited || !ep_inited){ + if(!seq_initialized || !ep_initialized){ av_log(avctx, AV_LOG_ERROR, "Incomplete extradata\n"); return -1; } @@ -3941,7 +3943,7 @@ static int vc1_decode_init(AVCodecContext *avctx) */ static int vc1_decode_frame(AVCodecContext *avctx, void *data, int *data_size, - uint8_t *buf, int buf_size) + const uint8_t *buf, int buf_size) { VC1Context *v = avctx->priv_data; MpegEncContext *s = &v->s; @@ -3974,7 +3976,7 @@ static int vc1_decode_frame(AVCodecContext *avctx, buf2 = av_mallocz(buf_size + FF_INPUT_BUFFER_PADDING_SIZE); if(IS_MARKER(AV_RB32(buf))){ /* frame starts with marker and needs to be parsed */ - uint8_t *start, *end, *next; + const uint8_t *start, *end, *next; int size; next = buf; @@ -3998,11 +4000,12 @@ static int vc1_decode_frame(AVCodecContext *avctx, } } }else if(v->interlace && ((buf[0] & 0xC0) == 0xC0)){ /* WVC1 interlaced stores both fields divided by marker */ - uint8_t *divider; + const uint8_t *divider; divider = find_next_marker(buf, buf + buf_size); if((divider == (buf + buf_size)) || AV_RB32(divider) != VC1_CODE_FIELD){ av_log(avctx, AV_LOG_ERROR, "Error in WVC1 interlaced frame\n"); + av_free(buf2); return -1; } @@ -4028,24 +4031,24 @@ static int vc1_decode_frame(AVCodecContext *avctx, } } - if(s->pict_type != I_TYPE && !v->res_rtm_flag){ + if(s->pict_type != FF_I_TYPE && !v->res_rtm_flag){ av_free(buf2); return -1; } // for hurry_up==5 s->current_picture.pict_type= s->pict_type; - s->current_picture.key_frame= s->pict_type == I_TYPE; + s->current_picture.key_frame= s->pict_type == FF_I_TYPE; /* skip B-frames if we don't have reference frames */ - if(s->last_picture_ptr==NULL && (s->pict_type==B_TYPE || s->dropable)){ + if(s->last_picture_ptr==NULL && (s->pict_type==FF_B_TYPE || s->dropable)){ av_free(buf2); return -1;//buf_size; } /* skip b frames if we are in a hurry */ - if(avctx->hurry_up && s->pict_type==B_TYPE) return -1;//buf_size; - if( (avctx->skip_frame >= AVDISCARD_NONREF && s->pict_type==B_TYPE) - || (avctx->skip_frame >= AVDISCARD_NONKEY && s->pict_type!=I_TYPE) + if(avctx->hurry_up && s->pict_type==FF_B_TYPE) return -1;//buf_size; + if( (avctx->skip_frame >= AVDISCARD_NONREF && s->pict_type==FF_B_TYPE) + || (avctx->skip_frame >= AVDISCARD_NONKEY && s->pict_type!=FF_I_TYPE) || avctx->skip_frame >= AVDISCARD_ALL) { av_free(buf2); return buf_size; @@ -4057,7 +4060,7 @@ static int vc1_decode_frame(AVCodecContext *avctx, } if(s->next_p_frame_damaged){ - if(s->pict_type==B_TYPE) + if(s->pict_type==FF_B_TYPE) return buf_size; else s->next_p_frame_damaged=0; @@ -4084,7 +4087,7 @@ static int vc1_decode_frame(AVCodecContext *avctx, assert(s->current_picture.pict_type == s->current_picture_ptr->pict_type); assert(s->current_picture.pict_type == s->pict_type); - if (s->pict_type == B_TYPE || s->low_delay) { + if (s->pict_type == FF_B_TYPE || s->low_delay) { *pict= *(AVFrame*)s->current_picture_ptr; } else if (s->last_picture_ptr != NULL) { *pict= *(AVFrame*)s->last_picture_ptr; @@ -4107,7 +4110,7 @@ assert(s->current_picture.pict_type == s->pict_type); /** Close a VC1/WMV3 decoder * @warning Initial try at using MpegEncContext stuff */ -static int vc1_decode_end(AVCodecContext *avctx) +static av_cold int vc1_decode_end(AVCodecContext *avctx) { VC1Context *v = avctx->priv_data; @@ -4119,6 +4122,7 @@ static int vc1_decode_end(AVCodecContext *avctx) av_freep(&v->acpred_plane); av_freep(&v->over_flags_plane); av_freep(&v->mb_type_base); + ff_intrax8_common_end(&v->x8); return 0; } @@ -4133,7 +4137,8 @@ AVCodec vc1_decoder = { vc1_decode_end, vc1_decode_frame, CODEC_CAP_DELAY, - NULL + NULL, + .long_name = NULL_IF_CONFIG_SMALL("SMPTE VC-1"), }; AVCodec wmv3_decoder = { @@ -4146,5 +4151,6 @@ AVCodec wmv3_decoder = { vc1_decode_end, vc1_decode_frame, CODEC_CAP_DELAY, - NULL + NULL, + .long_name = NULL_IF_CONFIG_SMALL("Windows Media Video 9"), };