2 * VC-1 and WMV3 decoder
3 * Copyright (c) 2011 Mashiat Sarker Shakkhar
4 * Copyright (c) 2006-2007 Konstantin Shishkov
5 * Partly based on vc9.c (c) 2005 Anonymous, Alex Beregszaszi, Michael Niedermayer
7 * This file is part of FFmpeg.
9 * FFmpeg is free software; you can redistribute it and/or
10 * modify it under the terms of the GNU Lesser General Public
11 * License as published by the Free Software Foundation; either
12 * version 2.1 of the License, or (at your option) any later version.
14 * FFmpeg is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 * Lesser General Public License for more details.
19 * You should have received a copy of the GNU Lesser General Public
20 * License along with FFmpeg; if not, write to the Free Software
21 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
26 * VC-1 and WMV3 decoder
35 #include "mpegvideo.h"
37 #include "msmpeg4data.h"
41 #include "libavutil/avassert.h"
44 #if CONFIG_WMV3IMAGE_DECODER || CONFIG_VC1IMAGE_DECODER
46 typedef struct SpriteData {
48 * Transform coefficients for both sprites in 16.16 fixed point format,
49 * in the order they appear in the bitstream:
60 int effect_type, effect_flag;
61 int effect_pcount1, effect_pcount2; ///< amount of effect parameters stored in effect_params
62 int effect_params1[15], effect_params2[10]; ///< effect parameters in 16.16 fixed point format
65 static inline int get_fp_val(GetBitContext* gb)
67 return (get_bits_long(gb, 30) - (1 << 29)) << 1;
70 static void vc1_sprite_parse_transform(GetBitContext* gb, int c[7])
74 switch (get_bits(gb, 2)) {
77 c[2] = get_fp_val(gb);
81 c[0] = c[4] = get_fp_val(gb);
82 c[2] = get_fp_val(gb);
85 c[0] = get_fp_val(gb);
86 c[2] = get_fp_val(gb);
87 c[4] = get_fp_val(gb);
90 c[0] = get_fp_val(gb);
91 c[1] = get_fp_val(gb);
92 c[2] = get_fp_val(gb);
93 c[3] = get_fp_val(gb);
94 c[4] = get_fp_val(gb);
97 c[5] = get_fp_val(gb);
99 c[6] = get_fp_val(gb);
104 static int vc1_parse_sprites(VC1Context *v, GetBitContext* gb, SpriteData* sd)
106 AVCodecContext *avctx = v->s.avctx;
109 for (sprite = 0; sprite <= v->two_sprites; sprite++) {
110 vc1_sprite_parse_transform(gb, sd->coefs[sprite]);
111 if (sd->coefs[sprite][1] || sd->coefs[sprite][3])
112 avpriv_request_sample(avctx, "Non-zero rotation coefficients");
113 av_log(avctx, AV_LOG_DEBUG, sprite ? "S2:" : "S1:");
114 for (i = 0; i < 7; i++)
115 av_log(avctx, AV_LOG_DEBUG, " %d.%.3d",
116 sd->coefs[sprite][i] / (1<<16),
117 (abs(sd->coefs[sprite][i]) & 0xFFFF) * 1000 / (1 << 16));
118 av_log(avctx, AV_LOG_DEBUG, "\n");
122 if (sd->effect_type = get_bits_long(gb, 30)) {
123 switch (sd->effect_pcount1 = get_bits(gb, 4)) {
125 vc1_sprite_parse_transform(gb, sd->effect_params1);
128 vc1_sprite_parse_transform(gb, sd->effect_params1);
129 vc1_sprite_parse_transform(gb, sd->effect_params1 + 7);
132 for (i = 0; i < sd->effect_pcount1; i++)
133 sd->effect_params1[i] = get_fp_val(gb);
135 if (sd->effect_type != 13 || sd->effect_params1[0] != sd->coefs[0][6]) {
136 // effect 13 is simple alpha blending and matches the opacity above
137 av_log(avctx, AV_LOG_DEBUG, "Effect: %d; params: ", sd->effect_type);
138 for (i = 0; i < sd->effect_pcount1; i++)
139 av_log(avctx, AV_LOG_DEBUG, " %d.%.2d",
140 sd->effect_params1[i] / (1 << 16),
141 (abs(sd->effect_params1[i]) & 0xFFFF) * 1000 / (1 << 16));
142 av_log(avctx, AV_LOG_DEBUG, "\n");
145 sd->effect_pcount2 = get_bits(gb, 16);
146 if (sd->effect_pcount2 > 10) {
147 av_log(avctx, AV_LOG_ERROR, "Too many effect parameters\n");
148 return AVERROR_INVALIDDATA;
149 } else if (sd->effect_pcount2) {
151 av_log(avctx, AV_LOG_DEBUG, "Effect params 2: ");
152 while (++i < sd->effect_pcount2) {
153 sd->effect_params2[i] = get_fp_val(gb);
154 av_log(avctx, AV_LOG_DEBUG, " %d.%.2d",
155 sd->effect_params2[i] / (1 << 16),
156 (abs(sd->effect_params2[i]) & 0xFFFF) * 1000 / (1 << 16));
158 av_log(avctx, AV_LOG_DEBUG, "\n");
161 if (sd->effect_flag = get_bits1(gb))
162 av_log(avctx, AV_LOG_DEBUG, "Effect flag set\n");
164 if (get_bits_count(gb) >= gb->size_in_bits +
165 (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE ? 64 : 0)) {
166 av_log(avctx, AV_LOG_ERROR, "Buffer overrun\n");
167 return AVERROR_INVALIDDATA;
169 if (get_bits_count(gb) < gb->size_in_bits - 8)
170 av_log(avctx, AV_LOG_WARNING, "Buffer not fully read\n");
175 static void vc1_draw_sprites(VC1Context *v, SpriteData* sd)
177 int i, plane, row, sprite;
178 int sr_cache[2][2] = { { -1, -1 }, { -1, -1 } };
179 uint8_t* src_h[2][2];
180 int xoff[2], xadv[2], yoff[2], yadv[2], alpha;
182 MpegEncContext *s = &v->s;
184 for (i = 0; i <= v->two_sprites; i++) {
185 xoff[i] = av_clip(sd->coefs[i][2], 0, v->sprite_width-1 << 16);
186 xadv[i] = sd->coefs[i][0];
187 if (xadv[i] != 1<<16 || (v->sprite_width << 16) - (v->output_width << 16) - xoff[i])
188 xadv[i] = av_clip(xadv[i], 0, ((v->sprite_width<<16) - xoff[i] - 1) / v->output_width);
190 yoff[i] = av_clip(sd->coefs[i][5], 0, v->sprite_height-1 << 16);
191 yadv[i] = av_clip(sd->coefs[i][4], 0, ((v->sprite_height << 16) - yoff[i]) / v->output_height);
193 alpha = av_clip_uint16(sd->coefs[1][6]);
195 for (plane = 0; plane < (CONFIG_GRAY && s->avctx->flags & AV_CODEC_FLAG_GRAY ? 1 : 3); plane++) {
196 int width = v->output_width>>!!plane;
198 for (row = 0; row < v->output_height>>!!plane; row++) {
199 uint8_t *dst = v->sprite_output_frame->data[plane] +
200 v->sprite_output_frame->linesize[plane] * row;
202 for (sprite = 0; sprite <= v->two_sprites; sprite++) {
203 uint8_t *iplane = s->current_picture.f->data[plane];
204 int iline = s->current_picture.f->linesize[plane];
205 int ycoord = yoff[sprite] + yadv[sprite] * row;
206 int yline = ycoord >> 16;
208 ysub[sprite] = ycoord & 0xFFFF;
210 iplane = s->last_picture.f->data[plane];
211 iline = s->last_picture.f->linesize[plane];
213 next_line = FFMIN(yline + 1, (v->sprite_height >> !!plane) - 1) * iline;
214 if (!(xoff[sprite] & 0xFFFF) && xadv[sprite] == 1 << 16) {
215 src_h[sprite][0] = iplane + (xoff[sprite] >> 16) + yline * iline;
217 src_h[sprite][1] = iplane + (xoff[sprite] >> 16) + next_line;
219 if (sr_cache[sprite][0] != yline) {
220 if (sr_cache[sprite][1] == yline) {
221 FFSWAP(uint8_t*, v->sr_rows[sprite][0], v->sr_rows[sprite][1]);
222 FFSWAP(int, sr_cache[sprite][0], sr_cache[sprite][1]);
224 v->vc1dsp.sprite_h(v->sr_rows[sprite][0], iplane + yline * iline, xoff[sprite], xadv[sprite], width);
225 sr_cache[sprite][0] = yline;
228 if (ysub[sprite] && sr_cache[sprite][1] != yline + 1) {
229 v->vc1dsp.sprite_h(v->sr_rows[sprite][1],
230 iplane + next_line, xoff[sprite],
231 xadv[sprite], width);
232 sr_cache[sprite][1] = yline + 1;
234 src_h[sprite][0] = v->sr_rows[sprite][0];
235 src_h[sprite][1] = v->sr_rows[sprite][1];
239 if (!v->two_sprites) {
241 v->vc1dsp.sprite_v_single(dst, src_h[0][0], src_h[0][1], ysub[0], width);
243 memcpy(dst, src_h[0][0], width);
246 if (ysub[0] && ysub[1]) {
247 v->vc1dsp.sprite_v_double_twoscale(dst, src_h[0][0], src_h[0][1], ysub[0],
248 src_h[1][0], src_h[1][1], ysub[1], alpha, width);
249 } else if (ysub[0]) {
250 v->vc1dsp.sprite_v_double_onescale(dst, src_h[0][0], src_h[0][1], ysub[0],
251 src_h[1][0], alpha, width);
252 } else if (ysub[1]) {
253 v->vc1dsp.sprite_v_double_onescale(dst, src_h[1][0], src_h[1][1], ysub[1],
254 src_h[0][0], (1<<16)-1-alpha, width);
256 v->vc1dsp.sprite_v_double_noscale(dst, src_h[0][0], src_h[1][0], alpha, width);
262 for (i = 0; i <= v->two_sprites; i++) {
272 static int vc1_decode_sprites(VC1Context *v, GetBitContext* gb)
275 MpegEncContext *s = &v->s;
276 AVCodecContext *avctx = s->avctx;
279 memset(&sd, 0, sizeof(sd));
281 ret = vc1_parse_sprites(v, gb, &sd);
285 if (!s->current_picture.f || !s->current_picture.f->data[0]) {
286 av_log(avctx, AV_LOG_ERROR, "Got no sprites\n");
287 return AVERROR_UNKNOWN;
290 if (v->two_sprites && (!s->last_picture_ptr || !s->last_picture.f->data[0])) {
291 av_log(avctx, AV_LOG_WARNING, "Need two sprites, only got one\n");
295 av_frame_unref(v->sprite_output_frame);
296 if ((ret = ff_get_buffer(avctx, v->sprite_output_frame, 0)) < 0)
299 vc1_draw_sprites(v, &sd);
304 static void vc1_sprite_flush(AVCodecContext *avctx)
306 VC1Context *v = avctx->priv_data;
307 MpegEncContext *s = &v->s;
308 AVFrame *f = s->current_picture.f;
311 /* Windows Media Image codecs have a convergence interval of two keyframes.
312 Since we can't enforce it, clear to black the missing sprite. This is
313 wrong but it looks better than doing nothing. */
316 for (plane = 0; plane < (CONFIG_GRAY && s->avctx->flags & AV_CODEC_FLAG_GRAY ? 1 : 3); plane++)
317 for (i = 0; i < v->sprite_height>>!!plane; i++)
318 memset(f->data[plane] + i * f->linesize[plane],
319 plane ? 128 : 0, f->linesize[plane]);
324 av_cold int ff_vc1_decode_init_alloc_tables(VC1Context *v)
326 MpegEncContext *s = &v->s;
327 int i, ret = AVERROR(ENOMEM);
328 int mb_height = FFALIGN(s->mb_height, 2);
330 /* Allocate mb bitplanes */
331 v->mv_type_mb_plane = av_malloc (s->mb_stride * mb_height);
332 v->direct_mb_plane = av_malloc (s->mb_stride * mb_height);
333 v->forward_mb_plane = av_malloc (s->mb_stride * mb_height);
334 v->fieldtx_plane = av_mallocz(s->mb_stride * mb_height);
335 v->acpred_plane = av_malloc (s->mb_stride * mb_height);
336 v->over_flags_plane = av_malloc (s->mb_stride * mb_height);
337 if (!v->mv_type_mb_plane || !v->direct_mb_plane || !v->forward_mb_plane ||
338 !v->fieldtx_plane || !v->acpred_plane || !v->over_flags_plane)
341 v->n_allocated_blks = s->mb_width + 2;
342 v->block = av_malloc(sizeof(*v->block) * v->n_allocated_blks);
343 v->cbp_base = av_malloc(sizeof(v->cbp_base[0]) * 3 * s->mb_stride);
344 if (!v->block || !v->cbp_base)
346 v->cbp = v->cbp_base + 2 * s->mb_stride;
347 v->ttblk_base = av_malloc(sizeof(v->ttblk_base[0]) * 3 * s->mb_stride);
350 v->ttblk = v->ttblk_base + 2 * s->mb_stride;
351 v->is_intra_base = av_mallocz(sizeof(v->is_intra_base[0]) * 3 * s->mb_stride);
352 if (!v->is_intra_base)
354 v->is_intra = v->is_intra_base + 2 * s->mb_stride;
355 v->luma_mv_base = av_mallocz(sizeof(v->luma_mv_base[0]) * 3 * s->mb_stride);
356 if (!v->luma_mv_base)
358 v->luma_mv = v->luma_mv_base + 2 * s->mb_stride;
360 /* allocate block type info in that way so it could be used with s->block_index[] */
361 v->mb_type_base = av_malloc(s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2);
362 if (!v->mb_type_base)
364 v->mb_type[0] = v->mb_type_base + s->b8_stride + 1;
365 v->mb_type[1] = v->mb_type_base + s->b8_stride * (mb_height * 2 + 1) + s->mb_stride + 1;
366 v->mb_type[2] = v->mb_type[1] + s->mb_stride * (mb_height + 1);
368 /* allocate memory to store block level MV info */
369 v->blk_mv_type_base = av_mallocz( s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2);
370 if (!v->blk_mv_type_base)
372 v->blk_mv_type = v->blk_mv_type_base + s->b8_stride + 1;
373 v->mv_f_base = av_mallocz(2 * (s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2));
376 v->mv_f[0] = v->mv_f_base + s->b8_stride + 1;
377 v->mv_f[1] = v->mv_f[0] + (s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2);
378 v->mv_f_next_base = av_mallocz(2 * (s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2));
379 if (!v->mv_f_next_base)
381 v->mv_f_next[0] = v->mv_f_next_base + s->b8_stride + 1;
382 v->mv_f_next[1] = v->mv_f_next[0] + (s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2);
384 if (s->avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || s->avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
385 for (i = 0; i < 4; i++)
386 if (!(v->sr_rows[i >> 1][i & 1] = av_malloc(v->output_width)))
390 ret = ff_intrax8_common_init(s->avctx, &v->x8, &s->idsp,
391 s->block, s->block_last_index,
392 s->mb_width, s->mb_height);
399 ff_vc1_decode_end(s->avctx);
403 av_cold void ff_vc1_init_transposed_scantables(VC1Context *v)
406 for (i = 0; i < 64; i++) {
407 #define transpose(x) (((x) >> 3) | (((x) & 7) << 3))
408 v->zz_8x8[0][i] = transpose(ff_wmv1_scantable[0][i]);
409 v->zz_8x8[1][i] = transpose(ff_wmv1_scantable[1][i]);
410 v->zz_8x8[2][i] = transpose(ff_wmv1_scantable[2][i]);
411 v->zz_8x8[3][i] = transpose(ff_wmv1_scantable[3][i]);
412 v->zzi_8x8[i] = transpose(ff_vc1_adv_interlaced_8x8_zz[i]);
418 /** Initialize a VC1/WMV3 decoder
419 * @todo TODO: Handle VC-1 IDUs (Transport level?)
420 * @todo TODO: Decipher remaining bits in extra_data
422 static av_cold int vc1_decode_init(AVCodecContext *avctx)
424 VC1Context *v = avctx->priv_data;
425 MpegEncContext *s = &v->s;
429 /* save the container output size for WMImage */
430 v->output_width = avctx->width;
431 v->output_height = avctx->height;
433 if (!avctx->extradata_size || !avctx->extradata)
434 return AVERROR_INVALIDDATA;
437 ff_vc1_init_common(v);
439 if (avctx->codec_id == AV_CODEC_ID_WMV3 || avctx->codec_id == AV_CODEC_ID_WMV3IMAGE) {
442 // looks like WMV3 has a sequence header stored in the extradata
443 // advanced sequence header may be before the first frame
444 // the last byte of the extradata is a version number, 1 for the
445 // samples we can decode
447 init_get_bits(&gb, avctx->extradata, avctx->extradata_size*8);
449 if ((ret = ff_vc1_decode_sequence_header(avctx, v, &gb)) < 0)
452 if (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE && !v->res_sprite) {
453 avpriv_request_sample(avctx, "Non sprite WMV3IMAGE");
454 return AVERROR_PATCHWELCOME;
457 count = avctx->extradata_size*8 - get_bits_count(&gb);
459 av_log(avctx, AV_LOG_INFO, "Extra data: %i bits left, value: %X\n",
460 count, get_bits_long(&gb, FFMIN(count, 32)));
461 } else if (count < 0) {
462 av_log(avctx, AV_LOG_INFO, "Read %i bits in overflow\n", -count);
464 } else { // VC1/WVC1/WVP2
465 const uint8_t *start = avctx->extradata;
466 uint8_t *end = avctx->extradata + avctx->extradata_size;
469 uint8_t *buf2 = NULL;
470 int seq_initialized = 0, ep_initialized = 0;
472 if (avctx->extradata_size < 16) {
473 av_log(avctx, AV_LOG_ERROR, "Extradata size too small: %i\n", avctx->extradata_size);
474 return AVERROR_INVALIDDATA;
477 buf2 = av_mallocz(avctx->extradata_size + AV_INPUT_BUFFER_PADDING_SIZE);
479 return AVERROR(ENOMEM);
481 start = find_next_marker(start, end); // in WVC1 extradata first byte is its size, but can be 0 in mkv
483 for (; next < end; start = next) {
484 next = find_next_marker(start + 4, end);
485 size = next - start - 4;
488 buf2_size = vc1_unescape_buffer(start + 4, size, buf2);
489 init_get_bits(&gb, buf2, buf2_size * 8);
490 switch (AV_RB32(start)) {
491 case VC1_CODE_SEQHDR:
492 if ((ret = ff_vc1_decode_sequence_header(avctx, v, &gb)) < 0) {
498 case VC1_CODE_ENTRYPOINT:
499 if ((ret = ff_vc1_decode_entry_point(avctx, v, &gb)) < 0) {
508 if (!seq_initialized || !ep_initialized) {
509 av_log(avctx, AV_LOG_ERROR, "Incomplete extradata\n");
510 return AVERROR_INVALIDDATA;
512 v->res_sprite = (avctx->codec_id == AV_CODEC_ID_VC1IMAGE);
515 avctx->profile = v->profile;
516 if (v->profile == PROFILE_ADVANCED)
517 avctx->level = v->level;
519 if (!CONFIG_GRAY || !(avctx->flags & AV_CODEC_FLAG_GRAY))
520 avctx->pix_fmt = ff_get_format(avctx, avctx->codec->pix_fmts);
522 avctx->pix_fmt = AV_PIX_FMT_GRAY8;
523 if (avctx->color_range == AVCOL_RANGE_UNSPECIFIED)
524 avctx->color_range = AVCOL_RANGE_MPEG;
527 // ensure static VLC tables are initialized
528 if ((ret = ff_msmpeg4_decode_init(avctx)) < 0)
530 if ((ret = ff_vc1_decode_init_alloc_tables(v)) < 0)
532 // Hack to ensure the above functions will be called
533 // again once we know all necessary settings.
534 // That this is necessary might indicate a bug.
535 ff_vc1_decode_end(avctx);
537 ff_blockdsp_init(&s->bdsp, avctx);
538 ff_h264chroma_init(&v->h264chroma, 8);
539 ff_qpeldsp_init(&s->qdsp);
541 avctx->has_b_frames = !!avctx->max_b_frames;
543 if (v->color_prim == 1 || v->color_prim == 5 || v->color_prim == 6)
544 avctx->color_primaries = v->color_prim;
545 if (v->transfer_char == 1 || v->transfer_char == 7)
546 avctx->color_trc = v->transfer_char;
547 if (v->matrix_coef == 1 || v->matrix_coef == 6 || v->matrix_coef == 7)
548 avctx->colorspace = v->matrix_coef;
550 s->mb_width = (avctx->coded_width + 15) >> 4;
551 s->mb_height = (avctx->coded_height + 15) >> 4;
553 if (v->profile == PROFILE_ADVANCED || v->res_fasttx) {
554 ff_vc1_init_transposed_scantables(v);
556 memcpy(v->zz_8x8, ff_wmv1_scantable, 4*64);
561 if (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
562 v->sprite_width = avctx->coded_width;
563 v->sprite_height = avctx->coded_height;
565 avctx->coded_width = avctx->width = v->output_width;
566 avctx->coded_height = avctx->height = v->output_height;
568 // prevent 16.16 overflows
569 if (v->sprite_width > 1 << 14 ||
570 v->sprite_height > 1 << 14 ||
571 v->output_width > 1 << 14 ||
572 v->output_height > 1 << 14) {
573 return AVERROR_INVALIDDATA;
576 if ((v->sprite_width&1) || (v->sprite_height&1)) {
577 avpriv_request_sample(avctx, "odd sprites support");
578 return AVERROR_PATCHWELCOME;
584 /** Close a VC1/WMV3 decoder
585 * @warning Initial try at using MpegEncContext stuff
587 av_cold int ff_vc1_decode_end(AVCodecContext *avctx)
589 VC1Context *v = avctx->priv_data;
592 av_frame_free(&v->sprite_output_frame);
594 for (i = 0; i < 4; i++)
595 av_freep(&v->sr_rows[i >> 1][i & 1]);
596 ff_mpv_common_end(&v->s);
597 av_freep(&v->mv_type_mb_plane);
598 av_freep(&v->direct_mb_plane);
599 av_freep(&v->forward_mb_plane);
600 av_freep(&v->fieldtx_plane);
601 av_freep(&v->acpred_plane);
602 av_freep(&v->over_flags_plane);
603 av_freep(&v->mb_type_base);
604 av_freep(&v->blk_mv_type_base);
605 av_freep(&v->mv_f_base);
606 av_freep(&v->mv_f_next_base);
608 av_freep(&v->cbp_base);
609 av_freep(&v->ttblk_base);
610 av_freep(&v->is_intra_base); // FIXME use v->mb_type[]
611 av_freep(&v->luma_mv_base);
612 ff_intrax8_common_end(&v->x8);
617 /** Decode a VC1/WMV3 frame
618 * @todo TODO: Handle VC-1 IDUs (Transport level?)
620 static int vc1_decode_frame(AVCodecContext *avctx, void *data,
621 int *got_frame, AVPacket *avpkt)
623 const uint8_t *buf = avpkt->data;
624 int buf_size = avpkt->size, n_slices = 0, i, ret;
625 VC1Context *v = avctx->priv_data;
626 MpegEncContext *s = &v->s;
627 AVFrame *pict = data;
628 uint8_t *buf2 = NULL;
629 const uint8_t *buf_start = buf, *buf_start_second_field = NULL;
630 int mb_height, n_slices1=-1;
635 const uint8_t *rawbuf;
637 } *slices = NULL, *tmp;
641 if(s->avctx->flags & AV_CODEC_FLAG_LOW_DELAY)
644 /* no supplementary picture */
645 if (buf_size == 0 || (buf_size == 4 && AV_RB32(buf) == VC1_CODE_ENDOFSEQ)) {
646 /* special case for last picture */
647 if (s->low_delay == 0 && s->next_picture_ptr) {
648 if ((ret = av_frame_ref(pict, s->next_picture_ptr->f)) < 0)
650 s->next_picture_ptr = NULL;
658 //for advanced profile we may need to parse and unescape data
659 if (avctx->codec_id == AV_CODEC_ID_VC1 || avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
661 buf2 = av_mallocz(buf_size + AV_INPUT_BUFFER_PADDING_SIZE);
663 return AVERROR(ENOMEM);
665 if (IS_MARKER(AV_RB32(buf))) { /* frame starts with marker and needs to be parsed */
666 const uint8_t *start, *end, *next;
670 for (start = buf, end = buf + buf_size; next < end; start = next) {
671 next = find_next_marker(start + 4, end);
672 size = next - start - 4;
673 if (size <= 0) continue;
674 switch (AV_RB32(start)) {
678 buf_size2 = vc1_unescape_buffer(start + 4, size, buf2);
680 case VC1_CODE_FIELD: {
683 buf_start_second_field = start;
684 tmp = av_realloc_array(slices, sizeof(*slices), n_slices+1);
686 ret = AVERROR(ENOMEM);
690 slices[n_slices].buf = av_mallocz(size + AV_INPUT_BUFFER_PADDING_SIZE);
691 if (!slices[n_slices].buf) {
692 ret = AVERROR(ENOMEM);
695 buf_size3 = vc1_unescape_buffer(start + 4, size,
696 slices[n_slices].buf);
697 init_get_bits(&slices[n_slices].gb, slices[n_slices].buf,
699 slices[n_slices].mby_start = avctx->coded_height + 31 >> 5;
700 slices[n_slices].rawbuf = start;
701 slices[n_slices].raw_size = size + 4;
702 n_slices1 = n_slices - 1; // index of the last slice of the first field
706 case VC1_CODE_ENTRYPOINT: /* it should be before frame data */
707 buf_size2 = vc1_unescape_buffer(start + 4, size, buf2);
708 init_get_bits(&s->gb, buf2, buf_size2 * 8);
709 ff_vc1_decode_entry_point(avctx, v, &s->gb);
711 case VC1_CODE_SLICE: {
713 tmp = av_realloc_array(slices, sizeof(*slices), n_slices+1);
715 ret = AVERROR(ENOMEM);
719 slices[n_slices].buf = av_mallocz(size + AV_INPUT_BUFFER_PADDING_SIZE);
720 if (!slices[n_slices].buf) {
721 ret = AVERROR(ENOMEM);
724 buf_size3 = vc1_unescape_buffer(start + 4, size,
725 slices[n_slices].buf);
726 init_get_bits(&slices[n_slices].gb, slices[n_slices].buf,
728 slices[n_slices].mby_start = get_bits(&slices[n_slices].gb, 9);
729 slices[n_slices].rawbuf = start;
730 slices[n_slices].raw_size = size + 4;
736 } else if (v->interlace && ((buf[0] & 0xC0) == 0xC0)) { /* WVC1 interlaced stores both fields divided by marker */
737 const uint8_t *divider;
740 divider = find_next_marker(buf, buf + buf_size);
741 if ((divider == (buf + buf_size)) || AV_RB32(divider) != VC1_CODE_FIELD) {
742 av_log(avctx, AV_LOG_ERROR, "Error in WVC1 interlaced frame\n");
743 ret = AVERROR_INVALIDDATA;
745 } else { // found field marker, unescape second field
747 buf_start_second_field = divider;
748 tmp = av_realloc_array(slices, sizeof(*slices), n_slices+1);
750 ret = AVERROR(ENOMEM);
754 slices[n_slices].buf = av_mallocz(buf_size + AV_INPUT_BUFFER_PADDING_SIZE);
755 if (!slices[n_slices].buf) {
756 ret = AVERROR(ENOMEM);
759 buf_size3 = vc1_unescape_buffer(divider + 4, buf + buf_size - divider - 4, slices[n_slices].buf);
760 init_get_bits(&slices[n_slices].gb, slices[n_slices].buf,
762 slices[n_slices].mby_start = s->mb_height + 1 >> 1;
763 slices[n_slices].rawbuf = divider;
764 slices[n_slices].raw_size = buf + buf_size - divider;
765 n_slices1 = n_slices - 1;
768 buf_size2 = vc1_unescape_buffer(buf, divider - buf, buf2);
770 buf_size2 = vc1_unescape_buffer(buf, buf_size, buf2);
772 init_get_bits(&s->gb, buf2, buf_size2*8);
774 init_get_bits(&s->gb, buf, buf_size*8);
777 v->new_sprite = !get_bits1(&s->gb);
778 v->two_sprites = get_bits1(&s->gb);
779 /* res_sprite means a Windows Media Image stream, AV_CODEC_ID_*IMAGE means
780 we're using the sprite compositor. These are intentionally kept separate
781 so you can get the raw sprites by using the wmv3 decoder for WMVP or
782 the vc1 one for WVP2 */
783 if (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
785 // switch AVCodecContext parameters to those of the sprites
786 avctx->width = avctx->coded_width = v->sprite_width;
787 avctx->height = avctx->coded_height = v->sprite_height;
794 if (s->context_initialized &&
795 (s->width != avctx->coded_width ||
796 s->height != avctx->coded_height)) {
797 ff_vc1_decode_end(avctx);
800 if (!s->context_initialized) {
801 if ((ret = ff_msmpeg4_decode_init(avctx)) < 0)
803 if ((ret = ff_vc1_decode_init_alloc_tables(v)) < 0) {
804 ff_mpv_common_end(s);
808 s->low_delay = !avctx->has_b_frames || v->res_sprite;
810 if (v->profile == PROFILE_ADVANCED) {
811 if(avctx->coded_width<=1 || avctx->coded_height<=1) {
812 ret = AVERROR_INVALIDDATA;
815 s->h_edge_pos = avctx->coded_width;
816 s->v_edge_pos = avctx->coded_height;
820 // do parse frame header
821 v->pic_header_flag = 0;
822 v->first_pic_header_flag = 1;
823 if (v->profile < PROFILE_ADVANCED) {
824 if ((ret = ff_vc1_parse_frame_header(v, &s->gb)) < 0) {
828 if ((ret = ff_vc1_parse_frame_header_adv(v, &s->gb)) < 0) {
832 v->first_pic_header_flag = 0;
834 if (avctx->debug & FF_DEBUG_PICT_INFO)
835 av_log(v->s.avctx, AV_LOG_DEBUG, "pict_type: %c\n", av_get_picture_type_char(s->pict_type));
837 if ((avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE)
838 && s->pict_type != AV_PICTURE_TYPE_I) {
839 av_log(v->s.avctx, AV_LOG_ERROR, "Sprite decoder: expected I-frame\n");
840 ret = AVERROR_INVALIDDATA;
843 if ((avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE)
845 av_log(v->s.avctx, AV_LOG_ERROR, "Sprite decoder: expected Frames not Fields\n");
846 ret = AVERROR_INVALIDDATA;
849 if ((s->mb_height >> v->field_mode) == 0) {
850 av_log(v->s.avctx, AV_LOG_ERROR, "image too short\n");
851 ret = AVERROR_INVALIDDATA;
855 // for skipping the frame
856 s->current_picture.f->pict_type = s->pict_type;
857 s->current_picture.f->key_frame = s->pict_type == AV_PICTURE_TYPE_I;
859 /* skip B-frames if we don't have reference frames */
860 if (!s->last_picture_ptr && (s->pict_type == AV_PICTURE_TYPE_B || s->droppable)) {
861 av_log(v->s.avctx, AV_LOG_DEBUG, "Skipping B frame without reference frames\n");
864 if ((avctx->skip_frame >= AVDISCARD_NONREF && s->pict_type == AV_PICTURE_TYPE_B) ||
865 (avctx->skip_frame >= AVDISCARD_NONKEY && s->pict_type != AV_PICTURE_TYPE_I) ||
866 avctx->skip_frame >= AVDISCARD_ALL) {
870 if (s->next_p_frame_damaged) {
871 if (s->pict_type == AV_PICTURE_TYPE_B)
874 s->next_p_frame_damaged = 0;
877 if ((ret = ff_mpv_frame_start(s, avctx)) < 0) {
881 v->s.current_picture_ptr->field_picture = v->field_mode;
882 v->s.current_picture_ptr->f->interlaced_frame = (v->fcm != PROGRESSIVE);
883 v->s.current_picture_ptr->f->top_field_first = v->tff;
885 // process pulldown flags
886 s->current_picture_ptr->f->repeat_pict = 0;
887 // Pulldown flags are only valid when 'broadcast' has been set.
888 // So ticks_per_frame will be 2
891 s->current_picture_ptr->f->repeat_pict = 1;
892 } else if (v->rptfrm) {
894 s->current_picture_ptr->f->repeat_pict = v->rptfrm * 2;
897 s->me.qpel_put = s->qdsp.put_qpel_pixels_tab;
898 s->me.qpel_avg = s->qdsp.avg_qpel_pixels_tab;
900 if (avctx->hwaccel) {
902 if (v->field_mode && buf_start_second_field) {
903 // decode first field
904 s->picture_structure = PICT_BOTTOM_FIELD - v->tff;
905 if ((ret = avctx->hwaccel->start_frame(avctx, buf_start, buf_start_second_field - buf_start)) < 0)
908 if (n_slices1 == -1) {
909 // no slices, decode the field as-is
910 if ((ret = avctx->hwaccel->decode_slice(avctx, buf_start, buf_start_second_field - buf_start)) < 0)
913 if ((ret = avctx->hwaccel->decode_slice(avctx, buf_start, slices[0].rawbuf - buf_start)) < 0)
916 for (i = 0 ; i < n_slices1 + 1; i++) {
917 s->gb = slices[i].gb;
918 s->mb_y = slices[i].mby_start;
920 v->pic_header_flag = get_bits1(&s->gb);
921 if (v->pic_header_flag) {
922 if (ff_vc1_parse_frame_header_adv(v, &s->gb) < 0) {
923 av_log(v->s.avctx, AV_LOG_ERROR, "Slice header damaged\n");
924 ret = AVERROR_INVALIDDATA;
925 if (avctx->err_recognition & AV_EF_EXPLODE)
931 if ((ret = avctx->hwaccel->decode_slice(avctx, slices[i].rawbuf, slices[i].raw_size)) < 0)
936 if ((ret = avctx->hwaccel->end_frame(avctx)) < 0)
939 // decode second field
940 s->gb = slices[n_slices1 + 1].gb;
941 s->mb_y = slices[n_slices1 + 1].mby_start;
942 s->picture_structure = PICT_TOP_FIELD + v->tff;
944 v->pic_header_flag = 0;
945 if (ff_vc1_parse_frame_header_adv(v, &s->gb) < 0) {
946 av_log(avctx, AV_LOG_ERROR, "parsing header for second field failed");
947 ret = AVERROR_INVALIDDATA;
950 v->s.current_picture_ptr->f->pict_type = v->s.pict_type;
952 if ((ret = avctx->hwaccel->start_frame(avctx, buf_start_second_field, (buf + buf_size) - buf_start_second_field)) < 0)
955 if (n_slices - n_slices1 == 2) {
956 // no slices, decode the field as-is
957 if ((ret = avctx->hwaccel->decode_slice(avctx, buf_start_second_field, (buf + buf_size) - buf_start_second_field)) < 0)
960 if ((ret = avctx->hwaccel->decode_slice(avctx, buf_start_second_field, slices[n_slices1 + 2].rawbuf - buf_start_second_field)) < 0)
963 for (i = n_slices1 + 2; i < n_slices; i++) {
964 s->gb = slices[i].gb;
965 s->mb_y = slices[i].mby_start;
967 v->pic_header_flag = get_bits1(&s->gb);
968 if (v->pic_header_flag) {
969 if (ff_vc1_parse_frame_header_adv(v, &s->gb) < 0) {
970 av_log(v->s.avctx, AV_LOG_ERROR, "Slice header damaged\n");
971 ret = AVERROR_INVALIDDATA;
972 if (avctx->err_recognition & AV_EF_EXPLODE)
978 if ((ret = avctx->hwaccel->decode_slice(avctx, slices[i].rawbuf, slices[i].raw_size)) < 0)
983 if ((ret = avctx->hwaccel->end_frame(avctx)) < 0)
986 s->picture_structure = PICT_FRAME;
987 if ((ret = avctx->hwaccel->start_frame(avctx, buf_start, (buf + buf_size) - buf_start)) < 0)
991 // no slices, decode the frame as-is
992 if ((ret = avctx->hwaccel->decode_slice(avctx, buf_start, (buf + buf_size) - buf_start)) < 0)
995 // decode the frame part as the first slice
996 if ((ret = avctx->hwaccel->decode_slice(avctx, buf_start, slices[0].rawbuf - buf_start)) < 0)
999 // and process the slices as additional slices afterwards
1000 for (i = 0 ; i < n_slices; i++) {
1001 s->gb = slices[i].gb;
1002 s->mb_y = slices[i].mby_start;
1004 v->pic_header_flag = get_bits1(&s->gb);
1005 if (v->pic_header_flag) {
1006 if (ff_vc1_parse_frame_header_adv(v, &s->gb) < 0) {
1007 av_log(v->s.avctx, AV_LOG_ERROR, "Slice header damaged\n");
1008 ret = AVERROR_INVALIDDATA;
1009 if (avctx->err_recognition & AV_EF_EXPLODE)
1015 if ((ret = avctx->hwaccel->decode_slice(avctx, slices[i].rawbuf, slices[i].raw_size)) < 0)
1019 if ((ret = avctx->hwaccel->end_frame(avctx)) < 0)
1025 ff_mpeg_er_frame_start(s);
1027 v->end_mb_x = s->mb_width;
1028 if (v->field_mode) {
1029 s->current_picture.f->linesize[0] <<= 1;
1030 s->current_picture.f->linesize[1] <<= 1;
1031 s->current_picture.f->linesize[2] <<= 1;
1033 s->uvlinesize <<= 1;
1035 mb_height = s->mb_height >> v->field_mode;
1037 av_assert0 (mb_height > 0);
1039 for (i = 0; i <= n_slices; i++) {
1040 if (i > 0 && slices[i - 1].mby_start >= mb_height) {
1041 if (v->field_mode <= 0) {
1042 av_log(v->s.avctx, AV_LOG_ERROR, "Slice %d starts beyond "
1043 "picture boundary (%d >= %d)\n", i,
1044 slices[i - 1].mby_start, mb_height);
1047 v->second_field = 1;
1048 av_assert0((s->mb_height & 1) == 0);
1049 v->blocks_off = s->b8_stride * (s->mb_height&~1);
1050 v->mb_off = s->mb_stride * s->mb_height >> 1;
1052 v->second_field = 0;
1057 v->pic_header_flag = 0;
1058 if (v->field_mode && i == n_slices1 + 2) {
1059 if ((header_ret = ff_vc1_parse_frame_header_adv(v, &s->gb)) < 0) {
1060 av_log(v->s.avctx, AV_LOG_ERROR, "Field header damaged\n");
1061 ret = AVERROR_INVALIDDATA;
1062 if (avctx->err_recognition & AV_EF_EXPLODE)
1066 } else if (get_bits1(&s->gb)) {
1067 v->pic_header_flag = 1;
1068 if ((header_ret = ff_vc1_parse_frame_header_adv(v, &s->gb)) < 0) {
1069 av_log(v->s.avctx, AV_LOG_ERROR, "Slice header damaged\n");
1070 ret = AVERROR_INVALIDDATA;
1071 if (avctx->err_recognition & AV_EF_EXPLODE)
1079 s->start_mb_y = (i == 0) ? 0 : FFMAX(0, slices[i-1].mby_start % mb_height);
1080 if (!v->field_mode || v->second_field)
1081 s->end_mb_y = (i == n_slices ) ? mb_height : FFMIN(mb_height, slices[i].mby_start % mb_height);
1083 if (i >= n_slices) {
1084 av_log(v->s.avctx, AV_LOG_ERROR, "first field slice count too large\n");
1087 s->end_mb_y = (i == n_slices1 + 1) ? mb_height : FFMIN(mb_height, slices[i].mby_start % mb_height);
1089 if (s->end_mb_y <= s->start_mb_y) {
1090 av_log(v->s.avctx, AV_LOG_ERROR, "end mb y %d %d invalid\n", s->end_mb_y, s->start_mb_y);
1093 if (((s->pict_type == AV_PICTURE_TYPE_P && !v->p_frame_skipped) ||
1094 (s->pict_type == AV_PICTURE_TYPE_B && !v->bi_type)) &&
1096 av_log(v->s.avctx, AV_LOG_ERROR, "missing cbpcy_vlc\n");
1099 ff_vc1_decode_blocks(v);
1100 if (i != n_slices) {
1101 s->gb = slices[i].gb;
1104 if (v->field_mode) {
1105 v->second_field = 0;
1106 s->current_picture.f->linesize[0] >>= 1;
1107 s->current_picture.f->linesize[1] >>= 1;
1108 s->current_picture.f->linesize[2] >>= 1;
1110 s->uvlinesize >>= 1;
1111 if (v->s.pict_type != AV_PICTURE_TYPE_BI && v->s.pict_type != AV_PICTURE_TYPE_B) {
1112 FFSWAP(uint8_t *, v->mv_f_next[0], v->mv_f[0]);
1113 FFSWAP(uint8_t *, v->mv_f_next[1], v->mv_f[1]);
1116 ff_dlog(s->avctx, "Consumed %i/%i bits\n",
1117 get_bits_count(&s->gb), s->gb.size_in_bits);
1118 // if (get_bits_count(&s->gb) > buf_size * 8)
1120 if(s->er.error_occurred && s->pict_type == AV_PICTURE_TYPE_B) {
1121 ret = AVERROR_INVALIDDATA;
1125 ff_er_frame_end(&s->er);
1128 ff_mpv_frame_end(s);
1130 if (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
1132 avctx->width = avctx->coded_width = v->output_width;
1133 avctx->height = avctx->coded_height = v->output_height;
1134 if (avctx->skip_frame >= AVDISCARD_NONREF)
1136 if (!v->sprite_output_frame &&
1137 !(v->sprite_output_frame = av_frame_alloc())) {
1138 ret = AVERROR(ENOMEM);
1141 #if CONFIG_WMV3IMAGE_DECODER || CONFIG_VC1IMAGE_DECODER
1142 if ((ret = vc1_decode_sprites(v, &s->gb)) < 0)
1145 if ((ret = av_frame_ref(pict, v->sprite_output_frame)) < 0)
1149 if (s->pict_type == AV_PICTURE_TYPE_B || s->low_delay) {
1150 if ((ret = av_frame_ref(pict, s->current_picture_ptr->f)) < 0)
1152 ff_print_debug_info(s, s->current_picture_ptr, pict);
1154 } else if (s->last_picture_ptr) {
1155 if ((ret = av_frame_ref(pict, s->last_picture_ptr->f)) < 0)
1157 ff_print_debug_info(s, s->last_picture_ptr, pict);
1164 for (i = 0; i < n_slices; i++)
1165 av_free(slices[i].buf);
1171 for (i = 0; i < n_slices; i++)
1172 av_free(slices[i].buf);
1178 static const enum AVPixelFormat vc1_hwaccel_pixfmt_list_420[] = {
1179 #if CONFIG_VC1_DXVA2_HWACCEL
1180 AV_PIX_FMT_DXVA2_VLD,
1182 #if CONFIG_VC1_D3D11VA_HWACCEL
1183 AV_PIX_FMT_D3D11VA_VLD,
1186 #if CONFIG_VC1_NVDEC_HWACCEL
1189 #if CONFIG_VC1_VAAPI_HWACCEL
1192 #if CONFIG_VC1_VDPAU_HWACCEL
1199 const AVCodec ff_vc1_decoder = {
1201 .long_name = NULL_IF_CONFIG_SMALL("SMPTE VC-1"),
1202 .type = AVMEDIA_TYPE_VIDEO,
1203 .id = AV_CODEC_ID_VC1,
1204 .priv_data_size = sizeof(VC1Context),
1205 .init = vc1_decode_init,
1206 .close = ff_vc1_decode_end,
1207 .decode = vc1_decode_frame,
1208 .flush = ff_mpeg_flush,
1209 .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DELAY,
1210 .pix_fmts = vc1_hwaccel_pixfmt_list_420,
1211 .hw_configs = (const AVCodecHWConfigInternal *const []) {
1212 #if CONFIG_VC1_DXVA2_HWACCEL
1215 #if CONFIG_VC1_D3D11VA_HWACCEL
1216 HWACCEL_D3D11VA(vc1),
1218 #if CONFIG_VC1_D3D11VA2_HWACCEL
1219 HWACCEL_D3D11VA2(vc1),
1221 #if CONFIG_VC1_NVDEC_HWACCEL
1224 #if CONFIG_VC1_VAAPI_HWACCEL
1227 #if CONFIG_VC1_VDPAU_HWACCEL
1232 .profiles = NULL_IF_CONFIG_SMALL(ff_vc1_profiles)
1235 #if CONFIG_WMV3_DECODER
1236 const AVCodec ff_wmv3_decoder = {
1238 .long_name = NULL_IF_CONFIG_SMALL("Windows Media Video 9"),
1239 .type = AVMEDIA_TYPE_VIDEO,
1240 .id = AV_CODEC_ID_WMV3,
1241 .priv_data_size = sizeof(VC1Context),
1242 .init = vc1_decode_init,
1243 .close = ff_vc1_decode_end,
1244 .decode = vc1_decode_frame,
1245 .flush = ff_mpeg_flush,
1246 .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DELAY,
1247 .pix_fmts = vc1_hwaccel_pixfmt_list_420,
1248 .hw_configs = (const AVCodecHWConfigInternal *const []) {
1249 #if CONFIG_WMV3_DXVA2_HWACCEL
1250 HWACCEL_DXVA2(wmv3),
1252 #if CONFIG_WMV3_D3D11VA_HWACCEL
1253 HWACCEL_D3D11VA(wmv3),
1255 #if CONFIG_WMV3_D3D11VA2_HWACCEL
1256 HWACCEL_D3D11VA2(wmv3),
1258 #if CONFIG_WMV3_NVDEC_HWACCEL
1259 HWACCEL_NVDEC(wmv3),
1261 #if CONFIG_WMV3_VAAPI_HWACCEL
1262 HWACCEL_VAAPI(wmv3),
1264 #if CONFIG_WMV3_VDPAU_HWACCEL
1265 HWACCEL_VDPAU(wmv3),
1269 .profiles = NULL_IF_CONFIG_SMALL(ff_vc1_profiles)
1273 #if CONFIG_WMV3IMAGE_DECODER
1274 const AVCodec ff_wmv3image_decoder = {
1275 .name = "wmv3image",
1276 .long_name = NULL_IF_CONFIG_SMALL("Windows Media Video 9 Image"),
1277 .type = AVMEDIA_TYPE_VIDEO,
1278 .id = AV_CODEC_ID_WMV3IMAGE,
1279 .priv_data_size = sizeof(VC1Context),
1280 .init = vc1_decode_init,
1281 .close = ff_vc1_decode_end,
1282 .decode = vc1_decode_frame,
1283 .capabilities = AV_CODEC_CAP_DR1,
1284 .flush = vc1_sprite_flush,
1285 .pix_fmts = (const enum AVPixelFormat[]) {
1292 #if CONFIG_VC1IMAGE_DECODER
1293 const AVCodec ff_vc1image_decoder = {
1295 .long_name = NULL_IF_CONFIG_SMALL("Windows Media Video 9 Image v2"),
1296 .type = AVMEDIA_TYPE_VIDEO,
1297 .id = AV_CODEC_ID_VC1IMAGE,
1298 .priv_data_size = sizeof(VC1Context),
1299 .init = vc1_decode_init,
1300 .close = ff_vc1_decode_end,
1301 .decode = vc1_decode_frame,
1302 .capabilities = AV_CODEC_CAP_DR1,
1303 .flush = vc1_sprite_flush,
1304 .pix_fmts = (const enum AVPixelFormat[]) {