2 * VC-1 and WMV3 decoder
3 * Copyright (c) 2011 Mashiat Sarker Shakkhar
4 * Copyright (c) 2006-2007 Konstantin Shishkov
5 * Partly based on vc9.c (c) 2005 Anonymous, Alex Beregszaszi, Michael Niedermayer
7 * This file is part of FFmpeg.
9 * FFmpeg is free software; you can redistribute it and/or
10 * modify it under the terms of the GNU Lesser General Public
11 * License as published by the Free Software Foundation; either
12 * version 2.1 of the License, or (at your option) any later version.
14 * FFmpeg is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 * Lesser General Public License for more details.
19 * You should have received a copy of the GNU Lesser General Public
20 * License along with FFmpeg; if not, write to the Free Software
21 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
26 * VC-1 and WMV3 decoder
35 #include "mpegvideo.h"
37 #include "msmpeg4data.h"
41 #include "libavutil/avassert.h"
44 #if CONFIG_WMV3IMAGE_DECODER || CONFIG_VC1IMAGE_DECODER
46 typedef struct SpriteData {
48 * Transform coefficients for both sprites in 16.16 fixed point format,
49 * in the order they appear in the bitstream:
60 int effect_type, effect_flag;
61 int effect_pcount1, effect_pcount2; ///< amount of effect parameters stored in effect_params
62 int effect_params1[15], effect_params2[10]; ///< effect parameters in 16.16 fixed point format
65 static inline int get_fp_val(GetBitContext* gb)
67 return (get_bits_long(gb, 30) - (1 << 29)) << 1;
70 static void vc1_sprite_parse_transform(GetBitContext* gb, int c[7])
74 switch (get_bits(gb, 2)) {
77 c[2] = get_fp_val(gb);
81 c[0] = c[4] = get_fp_val(gb);
82 c[2] = get_fp_val(gb);
85 c[0] = get_fp_val(gb);
86 c[2] = get_fp_val(gb);
87 c[4] = get_fp_val(gb);
90 c[0] = get_fp_val(gb);
91 c[1] = get_fp_val(gb);
92 c[2] = get_fp_val(gb);
93 c[3] = get_fp_val(gb);
94 c[4] = get_fp_val(gb);
97 c[5] = get_fp_val(gb);
99 c[6] = get_fp_val(gb);
104 static int vc1_parse_sprites(VC1Context *v, GetBitContext* gb, SpriteData* sd)
106 AVCodecContext *avctx = v->s.avctx;
109 for (sprite = 0; sprite <= v->two_sprites; sprite++) {
110 vc1_sprite_parse_transform(gb, sd->coefs[sprite]);
111 if (sd->coefs[sprite][1] || sd->coefs[sprite][3])
112 avpriv_request_sample(avctx, "Non-zero rotation coefficients");
113 av_log(avctx, AV_LOG_DEBUG, sprite ? "S2:" : "S1:");
114 for (i = 0; i < 7; i++)
115 av_log(avctx, AV_LOG_DEBUG, " %d.%.3d",
116 sd->coefs[sprite][i] / (1<<16),
117 (abs(sd->coefs[sprite][i]) & 0xFFFF) * 1000 / (1 << 16));
118 av_log(avctx, AV_LOG_DEBUG, "\n");
122 if (sd->effect_type = get_bits_long(gb, 30)) {
123 switch (sd->effect_pcount1 = get_bits(gb, 4)) {
125 vc1_sprite_parse_transform(gb, sd->effect_params1);
128 vc1_sprite_parse_transform(gb, sd->effect_params1);
129 vc1_sprite_parse_transform(gb, sd->effect_params1 + 7);
132 for (i = 0; i < sd->effect_pcount1; i++)
133 sd->effect_params1[i] = get_fp_val(gb);
135 if (sd->effect_type != 13 || sd->effect_params1[0] != sd->coefs[0][6]) {
136 // effect 13 is simple alpha blending and matches the opacity above
137 av_log(avctx, AV_LOG_DEBUG, "Effect: %d; params: ", sd->effect_type);
138 for (i = 0; i < sd->effect_pcount1; i++)
139 av_log(avctx, AV_LOG_DEBUG, " %d.%.2d",
140 sd->effect_params1[i] / (1 << 16),
141 (abs(sd->effect_params1[i]) & 0xFFFF) * 1000 / (1 << 16));
142 av_log(avctx, AV_LOG_DEBUG, "\n");
145 sd->effect_pcount2 = get_bits(gb, 16);
146 if (sd->effect_pcount2 > 10) {
147 av_log(avctx, AV_LOG_ERROR, "Too many effect parameters\n");
148 return AVERROR_INVALIDDATA;
149 } else if (sd->effect_pcount2) {
151 av_log(avctx, AV_LOG_DEBUG, "Effect params 2: ");
152 while (++i < sd->effect_pcount2) {
153 sd->effect_params2[i] = get_fp_val(gb);
154 av_log(avctx, AV_LOG_DEBUG, " %d.%.2d",
155 sd->effect_params2[i] / (1 << 16),
156 (abs(sd->effect_params2[i]) & 0xFFFF) * 1000 / (1 << 16));
158 av_log(avctx, AV_LOG_DEBUG, "\n");
161 if (sd->effect_flag = get_bits1(gb))
162 av_log(avctx, AV_LOG_DEBUG, "Effect flag set\n");
164 if (get_bits_count(gb) >= gb->size_in_bits +
165 (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE ? 64 : 0)) {
166 av_log(avctx, AV_LOG_ERROR, "Buffer overrun\n");
167 return AVERROR_INVALIDDATA;
169 if (get_bits_count(gb) < gb->size_in_bits - 8)
170 av_log(avctx, AV_LOG_WARNING, "Buffer not fully read\n");
175 static void vc1_draw_sprites(VC1Context *v, SpriteData* sd)
177 int i, plane, row, sprite;
178 int sr_cache[2][2] = { { -1, -1 }, { -1, -1 } };
179 uint8_t* src_h[2][2];
180 int xoff[2], xadv[2], yoff[2], yadv[2], alpha;
182 MpegEncContext *s = &v->s;
184 for (i = 0; i <= v->two_sprites; i++) {
185 xoff[i] = av_clip(sd->coefs[i][2], 0, v->sprite_width-1 << 16);
186 xadv[i] = sd->coefs[i][0];
187 if (xadv[i] != 1<<16 || (v->sprite_width << 16) - (v->output_width << 16) - xoff[i])
188 xadv[i] = av_clip(xadv[i], 0, ((v->sprite_width<<16) - xoff[i] - 1) / v->output_width);
190 yoff[i] = av_clip(sd->coefs[i][5], 0, v->sprite_height-1 << 16);
191 yadv[i] = av_clip(sd->coefs[i][4], 0, ((v->sprite_height << 16) - yoff[i]) / v->output_height);
193 alpha = av_clip_uint16(sd->coefs[1][6]);
195 for (plane = 0; plane < (CONFIG_GRAY && s->avctx->flags & AV_CODEC_FLAG_GRAY ? 1 : 3); plane++) {
196 int width = v->output_width>>!!plane;
198 for (row = 0; row < v->output_height>>!!plane; row++) {
199 uint8_t *dst = v->sprite_output_frame->data[plane] +
200 v->sprite_output_frame->linesize[plane] * row;
202 for (sprite = 0; sprite <= v->two_sprites; sprite++) {
203 uint8_t *iplane = s->current_picture.f->data[plane];
204 int iline = s->current_picture.f->linesize[plane];
205 int ycoord = yoff[sprite] + yadv[sprite] * row;
206 int yline = ycoord >> 16;
208 ysub[sprite] = ycoord & 0xFFFF;
210 iplane = s->last_picture.f->data[plane];
211 iline = s->last_picture.f->linesize[plane];
213 next_line = FFMIN(yline + 1, (v->sprite_height >> !!plane) - 1) * iline;
214 if (!(xoff[sprite] & 0xFFFF) && xadv[sprite] == 1 << 16) {
215 src_h[sprite][0] = iplane + (xoff[sprite] >> 16) + yline * iline;
217 src_h[sprite][1] = iplane + (xoff[sprite] >> 16) + next_line;
219 if (sr_cache[sprite][0] != yline) {
220 if (sr_cache[sprite][1] == yline) {
221 FFSWAP(uint8_t*, v->sr_rows[sprite][0], v->sr_rows[sprite][1]);
222 FFSWAP(int, sr_cache[sprite][0], sr_cache[sprite][1]);
224 v->vc1dsp.sprite_h(v->sr_rows[sprite][0], iplane + yline * iline, xoff[sprite], xadv[sprite], width);
225 sr_cache[sprite][0] = yline;
228 if (ysub[sprite] && sr_cache[sprite][1] != yline + 1) {
229 v->vc1dsp.sprite_h(v->sr_rows[sprite][1],
230 iplane + next_line, xoff[sprite],
231 xadv[sprite], width);
232 sr_cache[sprite][1] = yline + 1;
234 src_h[sprite][0] = v->sr_rows[sprite][0];
235 src_h[sprite][1] = v->sr_rows[sprite][1];
239 if (!v->two_sprites) {
241 v->vc1dsp.sprite_v_single(dst, src_h[0][0], src_h[0][1], ysub[0], width);
243 memcpy(dst, src_h[0][0], width);
246 if (ysub[0] && ysub[1]) {
247 v->vc1dsp.sprite_v_double_twoscale(dst, src_h[0][0], src_h[0][1], ysub[0],
248 src_h[1][0], src_h[1][1], ysub[1], alpha, width);
249 } else if (ysub[0]) {
250 v->vc1dsp.sprite_v_double_onescale(dst, src_h[0][0], src_h[0][1], ysub[0],
251 src_h[1][0], alpha, width);
252 } else if (ysub[1]) {
253 v->vc1dsp.sprite_v_double_onescale(dst, src_h[1][0], src_h[1][1], ysub[1],
254 src_h[0][0], (1<<16)-1-alpha, width);
256 v->vc1dsp.sprite_v_double_noscale(dst, src_h[0][0], src_h[1][0], alpha, width);
262 for (i = 0; i <= v->two_sprites; i++) {
272 static int vc1_decode_sprites(VC1Context *v, GetBitContext* gb)
275 MpegEncContext *s = &v->s;
276 AVCodecContext *avctx = s->avctx;
279 memset(&sd, 0, sizeof(sd));
281 ret = vc1_parse_sprites(v, gb, &sd);
285 if (!s->current_picture.f || !s->current_picture.f->data[0]) {
286 av_log(avctx, AV_LOG_ERROR, "Got no sprites\n");
287 return AVERROR_UNKNOWN;
290 if (v->two_sprites && (!s->last_picture_ptr || !s->last_picture.f->data[0])) {
291 av_log(avctx, AV_LOG_WARNING, "Need two sprites, only got one\n");
295 av_frame_unref(v->sprite_output_frame);
296 if ((ret = ff_get_buffer(avctx, v->sprite_output_frame, 0)) < 0)
299 vc1_draw_sprites(v, &sd);
304 static void vc1_sprite_flush(AVCodecContext *avctx)
306 VC1Context *v = avctx->priv_data;
307 MpegEncContext *s = &v->s;
308 AVFrame *f = s->current_picture.f;
311 /* Windows Media Image codecs have a convergence interval of two keyframes.
312 Since we can't enforce it, clear to black the missing sprite. This is
313 wrong but it looks better than doing nothing. */
316 for (plane = 0; plane < (CONFIG_GRAY && s->avctx->flags & AV_CODEC_FLAG_GRAY ? 1 : 3); plane++)
317 for (i = 0; i < v->sprite_height>>!!plane; i++)
318 memset(f->data[plane] + i * f->linesize[plane],
319 plane ? 128 : 0, f->linesize[plane]);
324 av_cold int ff_vc1_decode_init_alloc_tables(VC1Context *v)
326 MpegEncContext *s = &v->s;
327 int i, ret = AVERROR(ENOMEM);
328 int mb_height = FFALIGN(s->mb_height, 2);
330 /* Allocate mb bitplanes */
331 v->mv_type_mb_plane = av_malloc (s->mb_stride * mb_height);
332 v->direct_mb_plane = av_malloc (s->mb_stride * mb_height);
333 v->forward_mb_plane = av_malloc (s->mb_stride * mb_height);
334 v->fieldtx_plane = av_mallocz(s->mb_stride * mb_height);
335 v->acpred_plane = av_malloc (s->mb_stride * mb_height);
336 v->over_flags_plane = av_malloc (s->mb_stride * mb_height);
337 if (!v->mv_type_mb_plane || !v->direct_mb_plane || !v->forward_mb_plane ||
338 !v->fieldtx_plane || !v->acpred_plane || !v->over_flags_plane)
341 v->n_allocated_blks = s->mb_width + 2;
342 v->block = av_malloc(sizeof(*v->block) * v->n_allocated_blks);
343 v->cbp_base = av_malloc(sizeof(v->cbp_base[0]) * 3 * s->mb_stride);
344 if (!v->block || !v->cbp_base)
346 v->cbp = v->cbp_base + 2 * s->mb_stride;
347 v->ttblk_base = av_malloc(sizeof(v->ttblk_base[0]) * 3 * s->mb_stride);
350 v->ttblk = v->ttblk_base + 2 * s->mb_stride;
351 v->is_intra_base = av_mallocz(sizeof(v->is_intra_base[0]) * 3 * s->mb_stride);
352 if (!v->is_intra_base)
354 v->is_intra = v->is_intra_base + 2 * s->mb_stride;
355 v->luma_mv_base = av_mallocz(sizeof(v->luma_mv_base[0]) * 3 * s->mb_stride);
356 if (!v->luma_mv_base)
358 v->luma_mv = v->luma_mv_base + 2 * s->mb_stride;
360 /* allocate block type info in that way so it could be used with s->block_index[] */
361 v->mb_type_base = av_malloc(s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2);
362 if (!v->mb_type_base)
364 v->mb_type[0] = v->mb_type_base + s->b8_stride + 1;
365 v->mb_type[1] = v->mb_type_base + s->b8_stride * (mb_height * 2 + 1) + s->mb_stride + 1;
366 v->mb_type[2] = v->mb_type[1] + s->mb_stride * (mb_height + 1);
368 /* allocate memory to store block level MV info */
369 v->blk_mv_type_base = av_mallocz( s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2);
370 if (!v->blk_mv_type_base)
372 v->blk_mv_type = v->blk_mv_type_base + s->b8_stride + 1;
373 v->mv_f_base = av_mallocz(2 * (s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2));
376 v->mv_f[0] = v->mv_f_base + s->b8_stride + 1;
377 v->mv_f[1] = v->mv_f[0] + (s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2);
378 v->mv_f_next_base = av_mallocz(2 * (s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2));
379 if (!v->mv_f_next_base)
381 v->mv_f_next[0] = v->mv_f_next_base + s->b8_stride + 1;
382 v->mv_f_next[1] = v->mv_f_next[0] + (s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2);
384 if (s->avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || s->avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
385 for (i = 0; i < 4; i++)
386 if (!(v->sr_rows[i >> 1][i & 1] = av_malloc(v->output_width)))
390 ret = ff_intrax8_common_init(s->avctx, &v->x8, &s->idsp,
391 s->block, s->block_last_index,
392 s->mb_width, s->mb_height);
399 ff_vc1_decode_end(s->avctx);
403 av_cold void ff_vc1_init_transposed_scantables(VC1Context *v)
406 for (i = 0; i < 64; i++) {
407 #define transpose(x) (((x) >> 3) | (((x) & 7) << 3))
408 v->zz_8x8[0][i] = transpose(ff_wmv1_scantable[0][i]);
409 v->zz_8x8[1][i] = transpose(ff_wmv1_scantable[1][i]);
410 v->zz_8x8[2][i] = transpose(ff_wmv1_scantable[2][i]);
411 v->zz_8x8[3][i] = transpose(ff_wmv1_scantable[3][i]);
412 v->zzi_8x8[i] = transpose(ff_vc1_adv_interlaced_8x8_zz[i]);
418 /** Initialize a VC1/WMV3 decoder
419 * @todo TODO: Handle VC-1 IDUs (Transport level?)
420 * @todo TODO: Decipher remaining bits in extra_data
422 static av_cold int vc1_decode_init(AVCodecContext *avctx)
424 VC1Context *v = avctx->priv_data;
425 MpegEncContext *s = &v->s;
429 /* save the container output size for WMImage */
430 v->output_width = avctx->width;
431 v->output_height = avctx->height;
433 if (!avctx->extradata_size || !avctx->extradata)
434 return AVERROR_INVALIDDATA;
437 if ((ret = ff_vc1_init_common(v)) < 0)
440 if (avctx->codec_id == AV_CODEC_ID_WMV3 || avctx->codec_id == AV_CODEC_ID_WMV3IMAGE) {
443 // looks like WMV3 has a sequence header stored in the extradata
444 // advanced sequence header may be before the first frame
445 // the last byte of the extradata is a version number, 1 for the
446 // samples we can decode
448 init_get_bits(&gb, avctx->extradata, avctx->extradata_size*8);
450 if ((ret = ff_vc1_decode_sequence_header(avctx, v, &gb)) < 0)
453 if (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE && !v->res_sprite) {
454 avpriv_request_sample(avctx, "Non sprite WMV3IMAGE");
455 return AVERROR_PATCHWELCOME;
458 count = avctx->extradata_size*8 - get_bits_count(&gb);
460 av_log(avctx, AV_LOG_INFO, "Extra data: %i bits left, value: %X\n",
461 count, get_bits_long(&gb, FFMIN(count, 32)));
462 } else if (count < 0) {
463 av_log(avctx, AV_LOG_INFO, "Read %i bits in overflow\n", -count);
465 } else { // VC1/WVC1/WVP2
466 const uint8_t *start = avctx->extradata;
467 uint8_t *end = avctx->extradata + avctx->extradata_size;
470 uint8_t *buf2 = NULL;
471 int seq_initialized = 0, ep_initialized = 0;
473 if (avctx->extradata_size < 16) {
474 av_log(avctx, AV_LOG_ERROR, "Extradata size too small: %i\n", avctx->extradata_size);
475 return AVERROR_INVALIDDATA;
478 buf2 = av_mallocz(avctx->extradata_size + AV_INPUT_BUFFER_PADDING_SIZE);
480 return AVERROR(ENOMEM);
482 start = find_next_marker(start, end); // in WVC1 extradata first byte is its size, but can be 0 in mkv
484 for (; next < end; start = next) {
485 next = find_next_marker(start + 4, end);
486 size = next - start - 4;
489 buf2_size = vc1_unescape_buffer(start + 4, size, buf2);
490 init_get_bits(&gb, buf2, buf2_size * 8);
491 switch (AV_RB32(start)) {
492 case VC1_CODE_SEQHDR:
493 if ((ret = ff_vc1_decode_sequence_header(avctx, v, &gb)) < 0) {
499 case VC1_CODE_ENTRYPOINT:
500 if ((ret = ff_vc1_decode_entry_point(avctx, v, &gb)) < 0) {
509 if (!seq_initialized || !ep_initialized) {
510 av_log(avctx, AV_LOG_ERROR, "Incomplete extradata\n");
511 return AVERROR_INVALIDDATA;
513 v->res_sprite = (avctx->codec_id == AV_CODEC_ID_VC1IMAGE);
516 avctx->profile = v->profile;
517 if (v->profile == PROFILE_ADVANCED)
518 avctx->level = v->level;
520 if (!CONFIG_GRAY || !(avctx->flags & AV_CODEC_FLAG_GRAY))
521 avctx->pix_fmt = ff_get_format(avctx, avctx->codec->pix_fmts);
523 avctx->pix_fmt = AV_PIX_FMT_GRAY8;
524 if (avctx->color_range == AVCOL_RANGE_UNSPECIFIED)
525 avctx->color_range = AVCOL_RANGE_MPEG;
528 // ensure static VLC tables are initialized
529 if ((ret = ff_msmpeg4_decode_init(avctx)) < 0)
531 if ((ret = ff_vc1_decode_init_alloc_tables(v)) < 0)
533 // Hack to ensure the above functions will be called
534 // again once we know all necessary settings.
535 // That this is necessary might indicate a bug.
536 ff_vc1_decode_end(avctx);
538 ff_blockdsp_init(&s->bdsp, avctx);
539 ff_h264chroma_init(&v->h264chroma, 8);
540 ff_qpeldsp_init(&s->qdsp);
542 avctx->has_b_frames = !!avctx->max_b_frames;
544 if (v->color_prim == 1 || v->color_prim == 5 || v->color_prim == 6)
545 avctx->color_primaries = v->color_prim;
546 if (v->transfer_char == 1 || v->transfer_char == 7)
547 avctx->color_trc = v->transfer_char;
548 if (v->matrix_coef == 1 || v->matrix_coef == 6 || v->matrix_coef == 7)
549 avctx->colorspace = v->matrix_coef;
551 s->mb_width = (avctx->coded_width + 15) >> 4;
552 s->mb_height = (avctx->coded_height + 15) >> 4;
554 if (v->profile == PROFILE_ADVANCED || v->res_fasttx) {
555 ff_vc1_init_transposed_scantables(v);
557 memcpy(v->zz_8x8, ff_wmv1_scantable, 4*64);
562 if (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
563 v->sprite_width = avctx->coded_width;
564 v->sprite_height = avctx->coded_height;
566 avctx->coded_width = avctx->width = v->output_width;
567 avctx->coded_height = avctx->height = v->output_height;
569 // prevent 16.16 overflows
570 if (v->sprite_width > 1 << 14 ||
571 v->sprite_height > 1 << 14 ||
572 v->output_width > 1 << 14 ||
573 v->output_height > 1 << 14) {
574 return AVERROR_INVALIDDATA;
577 if ((v->sprite_width&1) || (v->sprite_height&1)) {
578 avpriv_request_sample(avctx, "odd sprites support");
579 return AVERROR_PATCHWELCOME;
585 /** Close a VC1/WMV3 decoder
586 * @warning Initial try at using MpegEncContext stuff
588 av_cold int ff_vc1_decode_end(AVCodecContext *avctx)
590 VC1Context *v = avctx->priv_data;
593 av_frame_free(&v->sprite_output_frame);
595 for (i = 0; i < 4; i++)
596 av_freep(&v->sr_rows[i >> 1][i & 1]);
597 av_freep(&v->hrd_rate);
598 av_freep(&v->hrd_buffer);
599 ff_mpv_common_end(&v->s);
600 av_freep(&v->mv_type_mb_plane);
601 av_freep(&v->direct_mb_plane);
602 av_freep(&v->forward_mb_plane);
603 av_freep(&v->fieldtx_plane);
604 av_freep(&v->acpred_plane);
605 av_freep(&v->over_flags_plane);
606 av_freep(&v->mb_type_base);
607 av_freep(&v->blk_mv_type_base);
608 av_freep(&v->mv_f_base);
609 av_freep(&v->mv_f_next_base);
611 av_freep(&v->cbp_base);
612 av_freep(&v->ttblk_base);
613 av_freep(&v->is_intra_base); // FIXME use v->mb_type[]
614 av_freep(&v->luma_mv_base);
615 ff_intrax8_common_end(&v->x8);
620 /** Decode a VC1/WMV3 frame
621 * @todo TODO: Handle VC-1 IDUs (Transport level?)
623 static int vc1_decode_frame(AVCodecContext *avctx, void *data,
624 int *got_frame, AVPacket *avpkt)
626 const uint8_t *buf = avpkt->data;
627 int buf_size = avpkt->size, n_slices = 0, i, ret;
628 VC1Context *v = avctx->priv_data;
629 MpegEncContext *s = &v->s;
630 AVFrame *pict = data;
631 uint8_t *buf2 = NULL;
632 const uint8_t *buf_start = buf, *buf_start_second_field = NULL;
633 int mb_height, n_slices1=-1;
638 const uint8_t *rawbuf;
640 } *slices = NULL, *tmp;
644 if(s->avctx->flags & AV_CODEC_FLAG_LOW_DELAY)
647 /* no supplementary picture */
648 if (buf_size == 0 || (buf_size == 4 && AV_RB32(buf) == VC1_CODE_ENDOFSEQ)) {
649 /* special case for last picture */
650 if (s->low_delay == 0 && s->next_picture_ptr) {
651 if ((ret = av_frame_ref(pict, s->next_picture_ptr->f)) < 0)
653 s->next_picture_ptr = NULL;
661 //for advanced profile we may need to parse and unescape data
662 if (avctx->codec_id == AV_CODEC_ID_VC1 || avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
664 buf2 = av_mallocz(buf_size + AV_INPUT_BUFFER_PADDING_SIZE);
666 return AVERROR(ENOMEM);
668 if (IS_MARKER(AV_RB32(buf))) { /* frame starts with marker and needs to be parsed */
669 const uint8_t *start, *end, *next;
673 for (start = buf, end = buf + buf_size; next < end; start = next) {
674 next = find_next_marker(start + 4, end);
675 size = next - start - 4;
676 if (size <= 0) continue;
677 switch (AV_RB32(start)) {
681 buf_size2 = vc1_unescape_buffer(start + 4, size, buf2);
683 case VC1_CODE_FIELD: {
686 buf_start_second_field = start;
687 tmp = av_realloc_array(slices, sizeof(*slices), n_slices+1);
689 ret = AVERROR(ENOMEM);
693 slices[n_slices].buf = av_mallocz(size + AV_INPUT_BUFFER_PADDING_SIZE);
694 if (!slices[n_slices].buf) {
695 ret = AVERROR(ENOMEM);
698 buf_size3 = vc1_unescape_buffer(start + 4, size,
699 slices[n_slices].buf);
700 init_get_bits(&slices[n_slices].gb, slices[n_slices].buf,
702 slices[n_slices].mby_start = avctx->coded_height + 31 >> 5;
703 slices[n_slices].rawbuf = start;
704 slices[n_slices].raw_size = size + 4;
705 n_slices1 = n_slices - 1; // index of the last slice of the first field
709 case VC1_CODE_ENTRYPOINT: /* it should be before frame data */
710 buf_size2 = vc1_unescape_buffer(start + 4, size, buf2);
711 init_get_bits(&s->gb, buf2, buf_size2 * 8);
712 ff_vc1_decode_entry_point(avctx, v, &s->gb);
714 case VC1_CODE_SLICE: {
716 tmp = av_realloc_array(slices, sizeof(*slices), n_slices+1);
718 ret = AVERROR(ENOMEM);
722 slices[n_slices].buf = av_mallocz(size + AV_INPUT_BUFFER_PADDING_SIZE);
723 if (!slices[n_slices].buf) {
724 ret = AVERROR(ENOMEM);
727 buf_size3 = vc1_unescape_buffer(start + 4, size,
728 slices[n_slices].buf);
729 init_get_bits(&slices[n_slices].gb, slices[n_slices].buf,
731 slices[n_slices].mby_start = get_bits(&slices[n_slices].gb, 9);
732 slices[n_slices].rawbuf = start;
733 slices[n_slices].raw_size = size + 4;
739 } else if (v->interlace && ((buf[0] & 0xC0) == 0xC0)) { /* WVC1 interlaced stores both fields divided by marker */
740 const uint8_t *divider;
743 divider = find_next_marker(buf, buf + buf_size);
744 if ((divider == (buf + buf_size)) || AV_RB32(divider) != VC1_CODE_FIELD) {
745 av_log(avctx, AV_LOG_ERROR, "Error in WVC1 interlaced frame\n");
746 ret = AVERROR_INVALIDDATA;
748 } else { // found field marker, unescape second field
750 buf_start_second_field = divider;
751 tmp = av_realloc_array(slices, sizeof(*slices), n_slices+1);
753 ret = AVERROR(ENOMEM);
757 slices[n_slices].buf = av_mallocz(buf_size + AV_INPUT_BUFFER_PADDING_SIZE);
758 if (!slices[n_slices].buf) {
759 ret = AVERROR(ENOMEM);
762 buf_size3 = vc1_unescape_buffer(divider + 4, buf + buf_size - divider - 4, slices[n_slices].buf);
763 init_get_bits(&slices[n_slices].gb, slices[n_slices].buf,
765 slices[n_slices].mby_start = s->mb_height + 1 >> 1;
766 slices[n_slices].rawbuf = divider;
767 slices[n_slices].raw_size = buf + buf_size - divider;
768 n_slices1 = n_slices - 1;
771 buf_size2 = vc1_unescape_buffer(buf, divider - buf, buf2);
773 buf_size2 = vc1_unescape_buffer(buf, buf_size, buf2);
775 init_get_bits(&s->gb, buf2, buf_size2*8);
777 init_get_bits(&s->gb, buf, buf_size*8);
780 v->new_sprite = !get_bits1(&s->gb);
781 v->two_sprites = get_bits1(&s->gb);
782 /* res_sprite means a Windows Media Image stream, AV_CODEC_ID_*IMAGE means
783 we're using the sprite compositor. These are intentionally kept separate
784 so you can get the raw sprites by using the wmv3 decoder for WMVP or
785 the vc1 one for WVP2 */
786 if (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
788 // switch AVCodecContext parameters to those of the sprites
789 avctx->width = avctx->coded_width = v->sprite_width;
790 avctx->height = avctx->coded_height = v->sprite_height;
797 if (s->context_initialized &&
798 (s->width != avctx->coded_width ||
799 s->height != avctx->coded_height)) {
800 ff_vc1_decode_end(avctx);
803 if (!s->context_initialized) {
804 if ((ret = ff_msmpeg4_decode_init(avctx)) < 0)
806 if ((ret = ff_vc1_decode_init_alloc_tables(v)) < 0) {
807 ff_mpv_common_end(s);
811 s->low_delay = !avctx->has_b_frames || v->res_sprite;
813 if (v->profile == PROFILE_ADVANCED) {
814 if(avctx->coded_width<=1 || avctx->coded_height<=1) {
815 ret = AVERROR_INVALIDDATA;
818 s->h_edge_pos = avctx->coded_width;
819 s->v_edge_pos = avctx->coded_height;
823 // do parse frame header
824 v->pic_header_flag = 0;
825 v->first_pic_header_flag = 1;
826 if (v->profile < PROFILE_ADVANCED) {
827 if ((ret = ff_vc1_parse_frame_header(v, &s->gb)) < 0) {
831 if ((ret = ff_vc1_parse_frame_header_adv(v, &s->gb)) < 0) {
835 v->first_pic_header_flag = 0;
837 if (avctx->debug & FF_DEBUG_PICT_INFO)
838 av_log(v->s.avctx, AV_LOG_DEBUG, "pict_type: %c\n", av_get_picture_type_char(s->pict_type));
840 if ((avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE)
841 && s->pict_type != AV_PICTURE_TYPE_I) {
842 av_log(v->s.avctx, AV_LOG_ERROR, "Sprite decoder: expected I-frame\n");
843 ret = AVERROR_INVALIDDATA;
846 if ((avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE)
848 av_log(v->s.avctx, AV_LOG_ERROR, "Sprite decoder: expected Frames not Fields\n");
849 ret = AVERROR_INVALIDDATA;
852 if ((s->mb_height >> v->field_mode) == 0) {
853 av_log(v->s.avctx, AV_LOG_ERROR, "image too short\n");
854 ret = AVERROR_INVALIDDATA;
858 // for skipping the frame
859 s->current_picture.f->pict_type = s->pict_type;
860 s->current_picture.f->key_frame = s->pict_type == AV_PICTURE_TYPE_I;
862 /* skip B-frames if we don't have reference frames */
863 if (!s->last_picture_ptr && (s->pict_type == AV_PICTURE_TYPE_B || s->droppable)) {
864 av_log(v->s.avctx, AV_LOG_DEBUG, "Skipping B frame without reference frames\n");
867 if ((avctx->skip_frame >= AVDISCARD_NONREF && s->pict_type == AV_PICTURE_TYPE_B) ||
868 (avctx->skip_frame >= AVDISCARD_NONKEY && s->pict_type != AV_PICTURE_TYPE_I) ||
869 avctx->skip_frame >= AVDISCARD_ALL) {
873 if (s->next_p_frame_damaged) {
874 if (s->pict_type == AV_PICTURE_TYPE_B)
877 s->next_p_frame_damaged = 0;
880 if ((ret = ff_mpv_frame_start(s, avctx)) < 0) {
884 v->s.current_picture_ptr->field_picture = v->field_mode;
885 v->s.current_picture_ptr->f->interlaced_frame = (v->fcm != PROGRESSIVE);
886 v->s.current_picture_ptr->f->top_field_first = v->tff;
888 // process pulldown flags
889 s->current_picture_ptr->f->repeat_pict = 0;
890 // Pulldown flags are only valid when 'broadcast' has been set.
891 // So ticks_per_frame will be 2
894 s->current_picture_ptr->f->repeat_pict = 1;
895 } else if (v->rptfrm) {
897 s->current_picture_ptr->f->repeat_pict = v->rptfrm * 2;
900 s->me.qpel_put = s->qdsp.put_qpel_pixels_tab;
901 s->me.qpel_avg = s->qdsp.avg_qpel_pixels_tab;
903 if (avctx->hwaccel) {
905 if (v->field_mode && buf_start_second_field) {
906 // decode first field
907 s->picture_structure = PICT_BOTTOM_FIELD - v->tff;
908 if ((ret = avctx->hwaccel->start_frame(avctx, buf_start, buf_start_second_field - buf_start)) < 0)
911 if (n_slices1 == -1) {
912 // no slices, decode the field as-is
913 if ((ret = avctx->hwaccel->decode_slice(avctx, buf_start, buf_start_second_field - buf_start)) < 0)
916 if ((ret = avctx->hwaccel->decode_slice(avctx, buf_start, slices[0].rawbuf - buf_start)) < 0)
919 for (i = 0 ; i < n_slices1 + 1; i++) {
920 s->gb = slices[i].gb;
921 s->mb_y = slices[i].mby_start;
923 v->pic_header_flag = get_bits1(&s->gb);
924 if (v->pic_header_flag) {
925 if (ff_vc1_parse_frame_header_adv(v, &s->gb) < 0) {
926 av_log(v->s.avctx, AV_LOG_ERROR, "Slice header damaged\n");
927 ret = AVERROR_INVALIDDATA;
928 if (avctx->err_recognition & AV_EF_EXPLODE)
934 if ((ret = avctx->hwaccel->decode_slice(avctx, slices[i].rawbuf, slices[i].raw_size)) < 0)
939 if ((ret = avctx->hwaccel->end_frame(avctx)) < 0)
942 // decode second field
943 s->gb = slices[n_slices1 + 1].gb;
944 s->mb_y = slices[n_slices1 + 1].mby_start;
945 s->picture_structure = PICT_TOP_FIELD + v->tff;
947 v->pic_header_flag = 0;
948 if (ff_vc1_parse_frame_header_adv(v, &s->gb) < 0) {
949 av_log(avctx, AV_LOG_ERROR, "parsing header for second field failed");
950 ret = AVERROR_INVALIDDATA;
953 v->s.current_picture_ptr->f->pict_type = v->s.pict_type;
955 if ((ret = avctx->hwaccel->start_frame(avctx, buf_start_second_field, (buf + buf_size) - buf_start_second_field)) < 0)
958 if (n_slices - n_slices1 == 2) {
959 // no slices, decode the field as-is
960 if ((ret = avctx->hwaccel->decode_slice(avctx, buf_start_second_field, (buf + buf_size) - buf_start_second_field)) < 0)
963 if ((ret = avctx->hwaccel->decode_slice(avctx, buf_start_second_field, slices[n_slices1 + 2].rawbuf - buf_start_second_field)) < 0)
966 for (i = n_slices1 + 2; i < n_slices; i++) {
967 s->gb = slices[i].gb;
968 s->mb_y = slices[i].mby_start;
970 v->pic_header_flag = get_bits1(&s->gb);
971 if (v->pic_header_flag) {
972 if (ff_vc1_parse_frame_header_adv(v, &s->gb) < 0) {
973 av_log(v->s.avctx, AV_LOG_ERROR, "Slice header damaged\n");
974 ret = AVERROR_INVALIDDATA;
975 if (avctx->err_recognition & AV_EF_EXPLODE)
981 if ((ret = avctx->hwaccel->decode_slice(avctx, slices[i].rawbuf, slices[i].raw_size)) < 0)
986 if ((ret = avctx->hwaccel->end_frame(avctx)) < 0)
989 s->picture_structure = PICT_FRAME;
990 if ((ret = avctx->hwaccel->start_frame(avctx, buf_start, (buf + buf_size) - buf_start)) < 0)
994 // no slices, decode the frame as-is
995 if ((ret = avctx->hwaccel->decode_slice(avctx, buf_start, (buf + buf_size) - buf_start)) < 0)
998 // decode the frame part as the first slice
999 if ((ret = avctx->hwaccel->decode_slice(avctx, buf_start, slices[0].rawbuf - buf_start)) < 0)
1002 // and process the slices as additional slices afterwards
1003 for (i = 0 ; i < n_slices; i++) {
1004 s->gb = slices[i].gb;
1005 s->mb_y = slices[i].mby_start;
1007 v->pic_header_flag = get_bits1(&s->gb);
1008 if (v->pic_header_flag) {
1009 if (ff_vc1_parse_frame_header_adv(v, &s->gb) < 0) {
1010 av_log(v->s.avctx, AV_LOG_ERROR, "Slice header damaged\n");
1011 ret = AVERROR_INVALIDDATA;
1012 if (avctx->err_recognition & AV_EF_EXPLODE)
1018 if ((ret = avctx->hwaccel->decode_slice(avctx, slices[i].rawbuf, slices[i].raw_size)) < 0)
1022 if ((ret = avctx->hwaccel->end_frame(avctx)) < 0)
1028 ff_mpeg_er_frame_start(s);
1030 v->end_mb_x = s->mb_width;
1031 if (v->field_mode) {
1032 s->current_picture.f->linesize[0] <<= 1;
1033 s->current_picture.f->linesize[1] <<= 1;
1034 s->current_picture.f->linesize[2] <<= 1;
1036 s->uvlinesize <<= 1;
1038 mb_height = s->mb_height >> v->field_mode;
1040 av_assert0 (mb_height > 0);
1042 for (i = 0; i <= n_slices; i++) {
1043 if (i > 0 && slices[i - 1].mby_start >= mb_height) {
1044 if (v->field_mode <= 0) {
1045 av_log(v->s.avctx, AV_LOG_ERROR, "Slice %d starts beyond "
1046 "picture boundary (%d >= %d)\n", i,
1047 slices[i - 1].mby_start, mb_height);
1050 v->second_field = 1;
1051 av_assert0((s->mb_height & 1) == 0);
1052 v->blocks_off = s->b8_stride * (s->mb_height&~1);
1053 v->mb_off = s->mb_stride * s->mb_height >> 1;
1055 v->second_field = 0;
1060 v->pic_header_flag = 0;
1061 if (v->field_mode && i == n_slices1 + 2) {
1062 if ((header_ret = ff_vc1_parse_frame_header_adv(v, &s->gb)) < 0) {
1063 av_log(v->s.avctx, AV_LOG_ERROR, "Field header damaged\n");
1064 ret = AVERROR_INVALIDDATA;
1065 if (avctx->err_recognition & AV_EF_EXPLODE)
1069 } else if (get_bits1(&s->gb)) {
1070 v->pic_header_flag = 1;
1071 if ((header_ret = ff_vc1_parse_frame_header_adv(v, &s->gb)) < 0) {
1072 av_log(v->s.avctx, AV_LOG_ERROR, "Slice header damaged\n");
1073 ret = AVERROR_INVALIDDATA;
1074 if (avctx->err_recognition & AV_EF_EXPLODE)
1082 s->start_mb_y = (i == 0) ? 0 : FFMAX(0, slices[i-1].mby_start % mb_height);
1083 if (!v->field_mode || v->second_field)
1084 s->end_mb_y = (i == n_slices ) ? mb_height : FFMIN(mb_height, slices[i].mby_start % mb_height);
1086 if (i >= n_slices) {
1087 av_log(v->s.avctx, AV_LOG_ERROR, "first field slice count too large\n");
1090 s->end_mb_y = (i == n_slices1 + 1) ? mb_height : FFMIN(mb_height, slices[i].mby_start % mb_height);
1092 if (s->end_mb_y <= s->start_mb_y) {
1093 av_log(v->s.avctx, AV_LOG_ERROR, "end mb y %d %d invalid\n", s->end_mb_y, s->start_mb_y);
1096 if (((s->pict_type == AV_PICTURE_TYPE_P && !v->p_frame_skipped) ||
1097 (s->pict_type == AV_PICTURE_TYPE_B && !v->bi_type)) &&
1099 av_log(v->s.avctx, AV_LOG_ERROR, "missing cbpcy_vlc\n");
1102 ff_vc1_decode_blocks(v);
1103 if (i != n_slices) {
1104 s->gb = slices[i].gb;
1107 if (v->field_mode) {
1108 v->second_field = 0;
1109 s->current_picture.f->linesize[0] >>= 1;
1110 s->current_picture.f->linesize[1] >>= 1;
1111 s->current_picture.f->linesize[2] >>= 1;
1113 s->uvlinesize >>= 1;
1114 if (v->s.pict_type != AV_PICTURE_TYPE_BI && v->s.pict_type != AV_PICTURE_TYPE_B) {
1115 FFSWAP(uint8_t *, v->mv_f_next[0], v->mv_f[0]);
1116 FFSWAP(uint8_t *, v->mv_f_next[1], v->mv_f[1]);
1119 ff_dlog(s->avctx, "Consumed %i/%i bits\n",
1120 get_bits_count(&s->gb), s->gb.size_in_bits);
1121 // if (get_bits_count(&s->gb) > buf_size * 8)
1123 if(s->er.error_occurred && s->pict_type == AV_PICTURE_TYPE_B) {
1124 ret = AVERROR_INVALIDDATA;
1128 ff_er_frame_end(&s->er);
1131 ff_mpv_frame_end(s);
1133 if (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
1135 avctx->width = avctx->coded_width = v->output_width;
1136 avctx->height = avctx->coded_height = v->output_height;
1137 if (avctx->skip_frame >= AVDISCARD_NONREF)
1139 if (!v->sprite_output_frame &&
1140 !(v->sprite_output_frame = av_frame_alloc())) {
1141 ret = AVERROR(ENOMEM);
1144 #if CONFIG_WMV3IMAGE_DECODER || CONFIG_VC1IMAGE_DECODER
1145 if ((ret = vc1_decode_sprites(v, &s->gb)) < 0)
1148 if ((ret = av_frame_ref(pict, v->sprite_output_frame)) < 0)
1152 if (s->pict_type == AV_PICTURE_TYPE_B || s->low_delay) {
1153 if ((ret = av_frame_ref(pict, s->current_picture_ptr->f)) < 0)
1155 ff_print_debug_info(s, s->current_picture_ptr, pict);
1157 } else if (s->last_picture_ptr) {
1158 if ((ret = av_frame_ref(pict, s->last_picture_ptr->f)) < 0)
1160 ff_print_debug_info(s, s->last_picture_ptr, pict);
1167 for (i = 0; i < n_slices; i++)
1168 av_free(slices[i].buf);
1174 for (i = 0; i < n_slices; i++)
1175 av_free(slices[i].buf);
1181 static const enum AVPixelFormat vc1_hwaccel_pixfmt_list_420[] = {
1182 #if CONFIG_VC1_DXVA2_HWACCEL
1183 AV_PIX_FMT_DXVA2_VLD,
1185 #if CONFIG_VC1_D3D11VA_HWACCEL
1186 AV_PIX_FMT_D3D11VA_VLD,
1189 #if CONFIG_VC1_NVDEC_HWACCEL
1192 #if CONFIG_VC1_VAAPI_HWACCEL
1195 #if CONFIG_VC1_VDPAU_HWACCEL
1202 AVCodec ff_vc1_decoder = {
1204 .long_name = NULL_IF_CONFIG_SMALL("SMPTE VC-1"),
1205 .type = AVMEDIA_TYPE_VIDEO,
1206 .id = AV_CODEC_ID_VC1,
1207 .priv_data_size = sizeof(VC1Context),
1208 .init = vc1_decode_init,
1209 .close = ff_vc1_decode_end,
1210 .decode = vc1_decode_frame,
1211 .flush = ff_mpeg_flush,
1212 .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DELAY,
1213 .pix_fmts = vc1_hwaccel_pixfmt_list_420,
1214 .hw_configs = (const AVCodecHWConfigInternal *const []) {
1215 #if CONFIG_VC1_DXVA2_HWACCEL
1218 #if CONFIG_VC1_D3D11VA_HWACCEL
1219 HWACCEL_D3D11VA(vc1),
1221 #if CONFIG_VC1_D3D11VA2_HWACCEL
1222 HWACCEL_D3D11VA2(vc1),
1224 #if CONFIG_VC1_NVDEC_HWACCEL
1227 #if CONFIG_VC1_VAAPI_HWACCEL
1230 #if CONFIG_VC1_VDPAU_HWACCEL
1235 .profiles = NULL_IF_CONFIG_SMALL(ff_vc1_profiles)
1238 #if CONFIG_WMV3_DECODER
1239 AVCodec ff_wmv3_decoder = {
1241 .long_name = NULL_IF_CONFIG_SMALL("Windows Media Video 9"),
1242 .type = AVMEDIA_TYPE_VIDEO,
1243 .id = AV_CODEC_ID_WMV3,
1244 .priv_data_size = sizeof(VC1Context),
1245 .init = vc1_decode_init,
1246 .close = ff_vc1_decode_end,
1247 .decode = vc1_decode_frame,
1248 .flush = ff_mpeg_flush,
1249 .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DELAY,
1250 .pix_fmts = vc1_hwaccel_pixfmt_list_420,
1251 .hw_configs = (const AVCodecHWConfigInternal *const []) {
1252 #if CONFIG_WMV3_DXVA2_HWACCEL
1253 HWACCEL_DXVA2(wmv3),
1255 #if CONFIG_WMV3_D3D11VA_HWACCEL
1256 HWACCEL_D3D11VA(wmv3),
1258 #if CONFIG_WMV3_D3D11VA2_HWACCEL
1259 HWACCEL_D3D11VA2(wmv3),
1261 #if CONFIG_WMV3_NVDEC_HWACCEL
1262 HWACCEL_NVDEC(wmv3),
1264 #if CONFIG_WMV3_VAAPI_HWACCEL
1265 HWACCEL_VAAPI(wmv3),
1267 #if CONFIG_WMV3_VDPAU_HWACCEL
1268 HWACCEL_VDPAU(wmv3),
1272 .profiles = NULL_IF_CONFIG_SMALL(ff_vc1_profiles)
1276 #if CONFIG_WMV3IMAGE_DECODER
1277 AVCodec ff_wmv3image_decoder = {
1278 .name = "wmv3image",
1279 .long_name = NULL_IF_CONFIG_SMALL("Windows Media Video 9 Image"),
1280 .type = AVMEDIA_TYPE_VIDEO,
1281 .id = AV_CODEC_ID_WMV3IMAGE,
1282 .priv_data_size = sizeof(VC1Context),
1283 .init = vc1_decode_init,
1284 .close = ff_vc1_decode_end,
1285 .decode = vc1_decode_frame,
1286 .capabilities = AV_CODEC_CAP_DR1,
1287 .flush = vc1_sprite_flush,
1288 .pix_fmts = (const enum AVPixelFormat[]) {
1295 #if CONFIG_VC1IMAGE_DECODER
1296 AVCodec ff_vc1image_decoder = {
1298 .long_name = NULL_IF_CONFIG_SMALL("Windows Media Video 9 Image v2"),
1299 .type = AVMEDIA_TYPE_VIDEO,
1300 .id = AV_CODEC_ID_VC1IMAGE,
1301 .priv_data_size = sizeof(VC1Context),
1302 .init = vc1_decode_init,
1303 .close = ff_vc1_decode_end,
1304 .decode = vc1_decode_frame,
1305 .capabilities = AV_CODEC_CAP_DR1,
1306 .flush = vc1_sprite_flush,
1307 .pix_fmts = (const enum AVPixelFormat[]) {