2 * VC-1 and WMV3 decoder
3 * Copyright (c) 2011 Mashiat Sarker Shakkhar
4 * Copyright (c) 2006-2007 Konstantin Shishkov
5 * Partly based on vc9.c (c) 2005 Anonymous, Alex Beregszaszi, Michael Niedermayer
7 * This file is part of Libav.
9 * Libav is free software; you can redistribute it and/or
10 * modify it under the terms of the GNU Lesser General Public
11 * License as published by the Free Software Foundation; either
12 * version 2.1 of the License, or (at your option) any later version.
14 * Libav is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 * Lesser General Public License for more details.
19 * You should have received a copy of the GNU Lesser General Public
20 * License along with Libav; if not, write to the Free Software
21 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
26 * VC-1 and WMV3 decoder
34 #include "mpegvideo.h"
36 #include "msmpeg4data.h"
41 #if CONFIG_WMV3IMAGE_DECODER || CONFIG_VC1IMAGE_DECODER
43 typedef struct SpriteData {
45 * Transform coefficients for both sprites in 16.16 fixed point format,
46 * in the order they appear in the bitstream:
57 int effect_type, effect_flag;
58 int effect_pcount1, effect_pcount2; ///< amount of effect parameters stored in effect_params
59 int effect_params1[15], effect_params2[10]; ///< effect parameters in 16.16 fixed point format
62 static inline int get_fp_val(GetBitContext* gb)
64 return (get_bits_long(gb, 30) - (1 << 29)) << 1;
67 static void vc1_sprite_parse_transform(GetBitContext* gb, int c[7])
71 switch (get_bits(gb, 2)) {
74 c[2] = get_fp_val(gb);
78 c[0] = c[4] = get_fp_val(gb);
79 c[2] = get_fp_val(gb);
82 c[0] = get_fp_val(gb);
83 c[2] = get_fp_val(gb);
84 c[4] = get_fp_val(gb);
87 c[0] = get_fp_val(gb);
88 c[1] = get_fp_val(gb);
89 c[2] = get_fp_val(gb);
90 c[3] = get_fp_val(gb);
91 c[4] = get_fp_val(gb);
94 c[5] = get_fp_val(gb);
96 c[6] = get_fp_val(gb);
101 static void vc1_parse_sprites(VC1Context *v, GetBitContext* gb, SpriteData* sd)
103 AVCodecContext *avctx = v->s.avctx;
106 for (sprite = 0; sprite <= v->two_sprites; sprite++) {
107 vc1_sprite_parse_transform(gb, sd->coefs[sprite]);
108 if (sd->coefs[sprite][1] || sd->coefs[sprite][3])
109 avpriv_request_sample(avctx, "Non-zero rotation coefficients");
110 av_log(avctx, AV_LOG_DEBUG, sprite ? "S2:" : "S1:");
111 for (i = 0; i < 7; i++)
112 av_log(avctx, AV_LOG_DEBUG, " %d.%.3d",
113 sd->coefs[sprite][i] / (1<<16),
114 (abs(sd->coefs[sprite][i]) & 0xFFFF) * 1000 / (1 << 16));
115 av_log(avctx, AV_LOG_DEBUG, "\n");
119 if (sd->effect_type = get_bits_long(gb, 30)) {
120 switch (sd->effect_pcount1 = get_bits(gb, 4)) {
122 vc1_sprite_parse_transform(gb, sd->effect_params1);
125 vc1_sprite_parse_transform(gb, sd->effect_params1);
126 vc1_sprite_parse_transform(gb, sd->effect_params1 + 7);
129 for (i = 0; i < sd->effect_pcount1; i++)
130 sd->effect_params1[i] = get_fp_val(gb);
132 if (sd->effect_type != 13 || sd->effect_params1[0] != sd->coefs[0][6]) {
133 // effect 13 is simple alpha blending and matches the opacity above
134 av_log(avctx, AV_LOG_DEBUG, "Effect: %d; params: ", sd->effect_type);
135 for (i = 0; i < sd->effect_pcount1; i++)
136 av_log(avctx, AV_LOG_DEBUG, " %d.%.2d",
137 sd->effect_params1[i] / (1 << 16),
138 (abs(sd->effect_params1[i]) & 0xFFFF) * 1000 / (1 << 16));
139 av_log(avctx, AV_LOG_DEBUG, "\n");
142 sd->effect_pcount2 = get_bits(gb, 16);
143 if (sd->effect_pcount2 > 10) {
144 av_log(avctx, AV_LOG_ERROR, "Too many effect parameters\n");
146 } else if (sd->effect_pcount2) {
148 av_log(avctx, AV_LOG_DEBUG, "Effect params 2: ");
149 while (++i < sd->effect_pcount2) {
150 sd->effect_params2[i] = get_fp_val(gb);
151 av_log(avctx, AV_LOG_DEBUG, " %d.%.2d",
152 sd->effect_params2[i] / (1 << 16),
153 (abs(sd->effect_params2[i]) & 0xFFFF) * 1000 / (1 << 16));
155 av_log(avctx, AV_LOG_DEBUG, "\n");
158 if (sd->effect_flag = get_bits1(gb))
159 av_log(avctx, AV_LOG_DEBUG, "Effect flag set\n");
161 if (get_bits_count(gb) >= gb->size_in_bits +
162 (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE ? 64 : 0))
163 av_log(avctx, AV_LOG_ERROR, "Buffer overrun\n");
164 if (get_bits_count(gb) < gb->size_in_bits - 8)
165 av_log(avctx, AV_LOG_WARNING, "Buffer not fully read\n");
168 static void vc1_draw_sprites(VC1Context *v, SpriteData* sd)
170 int i, plane, row, sprite;
171 int sr_cache[2][2] = { { -1, -1 }, { -1, -1 } };
172 uint8_t* src_h[2][2];
173 int xoff[2], xadv[2], yoff[2], yadv[2], alpha;
175 MpegEncContext *s = &v->s;
177 for (i = 0; i < 2; i++) {
178 xoff[i] = av_clip(sd->coefs[i][2], 0, v->sprite_width-1 << 16);
179 xadv[i] = sd->coefs[i][0];
180 if (xadv[i] != 1<<16 || (v->sprite_width << 16) - (v->output_width << 16) - xoff[i])
181 xadv[i] = av_clip(xadv[i], 0, ((v->sprite_width<<16) - xoff[i] - 1) / v->output_width);
183 yoff[i] = av_clip(sd->coefs[i][5], 0, v->sprite_height-1 << 16);
184 yadv[i] = av_clip(sd->coefs[i][4], 0, ((v->sprite_height << 16) - yoff[i]) / v->output_height);
186 alpha = av_clip_uint16(sd->coefs[1][6]);
188 for (plane = 0; plane < (s->avctx->flags & AV_CODEC_FLAG_GRAY ? 1 : 3); plane++) {
189 int width = v->output_width>>!!plane;
191 for (row = 0; row < v->output_height>>!!plane; row++) {
192 uint8_t *dst = v->sprite_output_frame->data[plane] +
193 v->sprite_output_frame->linesize[plane] * row;
195 for (sprite = 0; sprite <= v->two_sprites; sprite++) {
196 uint8_t *iplane = s->current_picture.f->data[plane];
197 int iline = s->current_picture.f->linesize[plane];
198 int ycoord = yoff[sprite] + yadv[sprite] * row;
199 int yline = ycoord >> 16;
201 ysub[sprite] = ycoord & 0xFFFF;
203 iplane = s->last_picture.f->data[plane];
204 iline = s->last_picture.f->linesize[plane];
206 next_line = FFMIN(yline + 1, (v->sprite_height >> !!plane) - 1) * iline;
207 if (!(xoff[sprite] & 0xFFFF) && xadv[sprite] == 1 << 16) {
208 src_h[sprite][0] = iplane + (xoff[sprite] >> 16) + yline * iline;
210 src_h[sprite][1] = iplane + (xoff[sprite] >> 16) + next_line;
212 if (sr_cache[sprite][0] != yline) {
213 if (sr_cache[sprite][1] == yline) {
214 FFSWAP(uint8_t*, v->sr_rows[sprite][0], v->sr_rows[sprite][1]);
215 FFSWAP(int, sr_cache[sprite][0], sr_cache[sprite][1]);
217 v->vc1dsp.sprite_h(v->sr_rows[sprite][0], iplane + yline * iline, xoff[sprite], xadv[sprite], width);
218 sr_cache[sprite][0] = yline;
221 if (ysub[sprite] && sr_cache[sprite][1] != yline + 1) {
222 v->vc1dsp.sprite_h(v->sr_rows[sprite][1],
223 iplane + next_line, xoff[sprite],
224 xadv[sprite], width);
225 sr_cache[sprite][1] = yline + 1;
227 src_h[sprite][0] = v->sr_rows[sprite][0];
228 src_h[sprite][1] = v->sr_rows[sprite][1];
232 if (!v->two_sprites) {
234 v->vc1dsp.sprite_v_single(dst, src_h[0][0], src_h[0][1], ysub[0], width);
236 memcpy(dst, src_h[0][0], width);
239 if (ysub[0] && ysub[1]) {
240 v->vc1dsp.sprite_v_double_twoscale(dst, src_h[0][0], src_h[0][1], ysub[0],
241 src_h[1][0], src_h[1][1], ysub[1], alpha, width);
242 } else if (ysub[0]) {
243 v->vc1dsp.sprite_v_double_onescale(dst, src_h[0][0], src_h[0][1], ysub[0],
244 src_h[1][0], alpha, width);
245 } else if (ysub[1]) {
246 v->vc1dsp.sprite_v_double_onescale(dst, src_h[1][0], src_h[1][1], ysub[1],
247 src_h[0][0], (1<<16)-1-alpha, width);
249 v->vc1dsp.sprite_v_double_noscale(dst, src_h[0][0], src_h[1][0], alpha, width);
255 for (i = 0; i < 2; i++) {
265 static int vc1_decode_sprites(VC1Context *v, GetBitContext* gb)
267 MpegEncContext *s = &v->s;
268 AVCodecContext *avctx = s->avctx;
271 vc1_parse_sprites(v, gb, &sd);
273 if (!s->current_picture.f->data[0]) {
274 av_log(avctx, AV_LOG_ERROR, "Got no sprites\n");
278 if (v->two_sprites && (!s->last_picture_ptr || !s->last_picture.f->data[0])) {
279 av_log(avctx, AV_LOG_WARNING, "Need two sprites, only got one\n");
283 av_frame_unref(v->sprite_output_frame);
284 if (ff_get_buffer(avctx, v->sprite_output_frame, 0) < 0) {
285 av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");
289 vc1_draw_sprites(v, &sd);
294 static void vc1_sprite_flush(AVCodecContext *avctx)
296 VC1Context *v = avctx->priv_data;
297 MpegEncContext *s = &v->s;
298 AVFrame *f = s->current_picture.f;
301 /* Windows Media Image codecs have a convergence interval of two keyframes.
302 Since we can't enforce it, clear to black the missing sprite. This is
303 wrong but it looks better than doing nothing. */
306 for (plane = 0; plane < (s->avctx->flags & AV_CODEC_FLAG_GRAY ? 1 : 3); plane++)
307 for (i = 0; i < v->sprite_height>>!!plane; i++)
308 memset(f->data[plane] + i * f->linesize[plane],
309 plane ? 128 : 0, f->linesize[plane]);
314 av_cold int ff_vc1_decode_init_alloc_tables(VC1Context *v)
316 MpegEncContext *s = &v->s;
318 int mb_height = FFALIGN(s->mb_height, 2);
320 /* Allocate mb bitplanes */
321 v->mv_type_mb_plane = av_malloc (s->mb_stride * mb_height);
322 v->direct_mb_plane = av_malloc (s->mb_stride * mb_height);
323 v->forward_mb_plane = av_malloc (s->mb_stride * mb_height);
324 v->fieldtx_plane = av_mallocz(s->mb_stride * mb_height);
325 v->acpred_plane = av_malloc (s->mb_stride * mb_height);
326 v->over_flags_plane = av_malloc (s->mb_stride * mb_height);
327 if (!v->mv_type_mb_plane || !v->direct_mb_plane || !v->forward_mb_plane ||
328 !v->fieldtx_plane || !v->acpred_plane || !v->over_flags_plane)
331 v->n_allocated_blks = s->mb_width + 2;
332 v->block = av_malloc(sizeof(*v->block) * v->n_allocated_blks);
333 v->cbp_base = av_malloc(sizeof(v->cbp_base[0]) * 2 * s->mb_stride);
334 if (!v->block || !v->cbp_base)
336 v->cbp = v->cbp_base + s->mb_stride;
337 v->ttblk_base = av_malloc(sizeof(v->ttblk_base[0]) * 2 * s->mb_stride);
340 v->ttblk = v->ttblk_base + s->mb_stride;
341 v->is_intra_base = av_mallocz(sizeof(v->is_intra_base[0]) * 2 * s->mb_stride);
342 if (!v->is_intra_base)
344 v->is_intra = v->is_intra_base + s->mb_stride;
345 v->luma_mv_base = av_malloc(sizeof(v->luma_mv_base[0]) * 2 * s->mb_stride);
346 if (!v->luma_mv_base)
348 v->luma_mv = v->luma_mv_base + s->mb_stride;
350 /* allocate block type info in that way so it could be used with s->block_index[] */
351 v->mb_type_base = av_malloc(s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2);
352 if (!v->mb_type_base)
354 v->mb_type[0] = v->mb_type_base + s->b8_stride + 1;
355 v->mb_type[1] = v->mb_type_base + s->b8_stride * (mb_height * 2 + 1) + s->mb_stride + 1;
356 v->mb_type[2] = v->mb_type[1] + s->mb_stride * (mb_height + 1);
358 /* allocate memory to store block level MV info */
359 v->blk_mv_type_base = av_mallocz( s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2);
360 if (!v->blk_mv_type_base)
362 v->blk_mv_type = v->blk_mv_type_base + s->b8_stride + 1;
363 v->mv_f_base = av_mallocz(2 * (s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2));
366 v->mv_f[0] = v->mv_f_base + s->b8_stride + 1;
367 v->mv_f[1] = v->mv_f[0] + (s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2);
368 v->mv_f_next_base = av_mallocz(2 * (s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2));
369 if (!v->mv_f_next_base)
371 v->mv_f_next[0] = v->mv_f_next_base + s->b8_stride + 1;
372 v->mv_f_next[1] = v->mv_f_next[0] + (s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2);
374 ff_intrax8_common_init(&v->x8,s);
376 if (s->avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || s->avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
377 for (i = 0; i < 4; i++) {
378 v->sr_rows[i >> 1][i & 1] = av_malloc(v->output_width);
379 if (!v->sr_rows[i >> 1][i & 1])
387 ff_vc1_decode_end(s->avctx);
388 return AVERROR(ENOMEM);
391 av_cold void ff_vc1_init_transposed_scantables(VC1Context *v)
394 for (i = 0; i < 64; i++) {
395 #define transpose(x) ((x >> 3) | ((x & 7) << 3))
396 v->zz_8x8[0][i] = transpose(ff_wmv1_scantable[0][i]);
397 v->zz_8x8[1][i] = transpose(ff_wmv1_scantable[1][i]);
398 v->zz_8x8[2][i] = transpose(ff_wmv1_scantable[2][i]);
399 v->zz_8x8[3][i] = transpose(ff_wmv1_scantable[3][i]);
400 v->zzi_8x8[i] = transpose(ff_vc1_adv_interlaced_8x8_zz[i]);
406 /** Initialize a VC1/WMV3 decoder
407 * @todo TODO: Handle VC-1 IDUs (Transport level?)
408 * @todo TODO: Decypher remaining bits in extra_data
410 static av_cold int vc1_decode_init(AVCodecContext *avctx)
412 VC1Context *v = avctx->priv_data;
413 MpegEncContext *s = &v->s;
416 /* save the container output size for WMImage */
417 v->output_width = avctx->width;
418 v->output_height = avctx->height;
420 if (!avctx->extradata_size || !avctx->extradata)
422 if (!(avctx->flags & AV_CODEC_FLAG_GRAY))
423 avctx->pix_fmt = ff_get_format(avctx, avctx->codec->pix_fmts);
425 avctx->pix_fmt = AV_PIX_FMT_GRAY8;
428 if (ff_vc1_init_common(v) < 0)
430 ff_blockdsp_init(&s->bdsp, avctx);
431 ff_h264chroma_init(&v->h264chroma, 8);
432 ff_qpeldsp_init(&s->qdsp);
434 if (avctx->codec_id == AV_CODEC_ID_WMV3 || avctx->codec_id == AV_CODEC_ID_WMV3IMAGE) {
437 // looks like WMV3 has a sequence header stored in the extradata
438 // advanced sequence header may be before the first frame
439 // the last byte of the extradata is a version number, 1 for the
440 // samples we can decode
442 init_get_bits(&gb, avctx->extradata, avctx->extradata_size*8);
444 if (ff_vc1_decode_sequence_header(avctx, v, &gb) < 0)
447 count = avctx->extradata_size*8 - get_bits_count(&gb);
449 av_log(avctx, AV_LOG_INFO, "Extra data: %i bits left, value: %X\n",
450 count, get_bits_long(&gb, FFMIN(count, 32)));
451 } else if (count < 0) {
452 av_log(avctx, AV_LOG_INFO, "Read %i bits in overflow\n", -count);
454 } else { // VC1/WVC1/WVP2
455 const uint8_t *start = avctx->extradata;
456 uint8_t *end = avctx->extradata + avctx->extradata_size;
459 uint8_t *buf2 = NULL;
460 int seq_initialized = 0, ep_initialized = 0;
462 if (avctx->extradata_size < 16) {
463 av_log(avctx, AV_LOG_ERROR, "Extradata size too small: %i\n", avctx->extradata_size);
467 buf2 = av_mallocz(avctx->extradata_size + AV_INPUT_BUFFER_PADDING_SIZE);
468 start = find_next_marker(start, end); // in WVC1 extradata first byte is its size, but can be 0 in mkv
470 for (; next < end; start = next) {
471 next = find_next_marker(start + 4, end);
472 size = next - start - 4;
475 buf2_size = vc1_unescape_buffer(start + 4, size, buf2);
476 init_get_bits(&gb, buf2, buf2_size * 8);
477 switch (AV_RB32(start)) {
478 case VC1_CODE_SEQHDR:
479 if (ff_vc1_decode_sequence_header(avctx, v, &gb) < 0) {
485 case VC1_CODE_ENTRYPOINT:
486 if (ff_vc1_decode_entry_point(avctx, v, &gb) < 0) {
495 if (!seq_initialized || !ep_initialized) {
496 av_log(avctx, AV_LOG_ERROR, "Incomplete extradata\n");
499 v->res_sprite = (avctx->codec_id == AV_CODEC_ID_VC1IMAGE);
502 v->sprite_output_frame = av_frame_alloc();
503 if (!v->sprite_output_frame)
504 return AVERROR(ENOMEM);
506 avctx->profile = v->profile;
507 if (v->profile == PROFILE_ADVANCED)
508 avctx->level = v->level;
510 avctx->has_b_frames = !!avctx->max_b_frames;
512 if (v->color_prim == 1 || v->color_prim == 5 || v->color_prim == 6)
513 avctx->color_primaries = v->color_prim;
514 if (v->transfer_char == 1 || v->transfer_char == 7)
515 avctx->color_trc = v->transfer_char;
516 if (v->matrix_coef == 1 || v->matrix_coef == 6 || v->matrix_coef == 7)
517 avctx->colorspace = v->matrix_coef;
519 s->mb_width = (avctx->coded_width + 15) >> 4;
520 s->mb_height = (avctx->coded_height + 15) >> 4;
522 if (v->profile == PROFILE_ADVANCED || v->res_fasttx) {
523 ff_vc1_init_transposed_scantables(v);
525 memcpy(v->zz_8x8, ff_wmv1_scantable, 4*64);
530 if (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
531 v->sprite_width = avctx->coded_width;
532 v->sprite_height = avctx->coded_height;
534 avctx->coded_width = avctx->width = v->output_width;
535 avctx->coded_height = avctx->height = v->output_height;
537 // prevent 16.16 overflows
538 if (v->sprite_width > 1 << 14 ||
539 v->sprite_height > 1 << 14 ||
540 v->output_width > 1 << 14 ||
541 v->output_height > 1 << 14) return -1;
546 /** Close a VC1/WMV3 decoder
547 * @warning Initial try at using MpegEncContext stuff
549 av_cold int ff_vc1_decode_end(AVCodecContext *avctx)
551 VC1Context *v = avctx->priv_data;
554 av_frame_free(&v->sprite_output_frame);
556 for (i = 0; i < 4; i++)
557 av_freep(&v->sr_rows[i >> 1][i & 1]);
558 av_freep(&v->hrd_rate);
559 av_freep(&v->hrd_buffer);
560 ff_mpv_common_end(&v->s);
561 av_freep(&v->mv_type_mb_plane);
562 av_freep(&v->direct_mb_plane);
563 av_freep(&v->forward_mb_plane);
564 av_freep(&v->fieldtx_plane);
565 av_freep(&v->acpred_plane);
566 av_freep(&v->over_flags_plane);
567 av_freep(&v->mb_type_base);
568 av_freep(&v->blk_mv_type_base);
569 av_freep(&v->mv_f_base);
570 av_freep(&v->mv_f_next_base);
572 av_freep(&v->cbp_base);
573 av_freep(&v->ttblk_base);
574 av_freep(&v->is_intra_base); // FIXME use v->mb_type[]
575 av_freep(&v->luma_mv_base);
576 ff_intrax8_common_end(&v->x8);
581 /** Decode a VC1/WMV3 frame
582 * @todo TODO: Handle VC-1 IDUs (Transport level?)
584 static int vc1_decode_frame(AVCodecContext *avctx, void *data,
585 int *got_frame, AVPacket *avpkt)
587 const uint8_t *buf = avpkt->data;
588 int buf_size = avpkt->size, n_slices = 0, i, ret;
589 VC1Context *v = avctx->priv_data;
590 MpegEncContext *s = &v->s;
591 AVFrame *pict = data;
592 uint8_t *buf2 = NULL;
593 const uint8_t *buf_start = buf;
594 int mb_height, n_slices1;
599 } *slices = NULL, *tmp;
601 /* no supplementary picture */
602 if (buf_size == 0 || (buf_size == 4 && AV_RB32(buf) == VC1_CODE_ENDOFSEQ)) {
603 /* special case for last picture */
604 if (s->low_delay == 0 && s->next_picture_ptr) {
605 if ((ret = av_frame_ref(pict, s->next_picture_ptr->f)) < 0)
607 s->next_picture_ptr = NULL;
615 //for advanced profile we may need to parse and unescape data
616 if (avctx->codec_id == AV_CODEC_ID_VC1 || avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
618 buf2 = av_mallocz(buf_size + AV_INPUT_BUFFER_PADDING_SIZE);
620 if (IS_MARKER(AV_RB32(buf))) { /* frame starts with marker and needs to be parsed */
621 const uint8_t *start, *end, *next;
625 for (start = buf, end = buf + buf_size; next < end; start = next) {
626 next = find_next_marker(start + 4, end);
627 size = next - start - 4;
628 if (size <= 0) continue;
629 switch (AV_RB32(start)) {
633 buf_size2 = vc1_unescape_buffer(start + 4, size, buf2);
635 case VC1_CODE_FIELD: {
637 tmp = av_realloc(slices, sizeof(*slices) * (n_slices+1));
641 slices[n_slices].buf = av_mallocz(buf_size + AV_INPUT_BUFFER_PADDING_SIZE);
642 if (!slices[n_slices].buf)
644 buf_size3 = vc1_unescape_buffer(start + 4, size,
645 slices[n_slices].buf);
646 init_get_bits(&slices[n_slices].gb, slices[n_slices].buf,
648 /* assuming that the field marker is at the exact middle,
650 slices[n_slices].mby_start = s->mb_height >> 1;
651 n_slices1 = n_slices - 1; // index of the last slice of the first field
655 case VC1_CODE_ENTRYPOINT: /* it should be before frame data */
656 buf_size2 = vc1_unescape_buffer(start + 4, size, buf2);
657 init_get_bits(&s->gb, buf2, buf_size2 * 8);
658 ff_vc1_decode_entry_point(avctx, v, &s->gb);
660 case VC1_CODE_SLICE: {
662 tmp = av_realloc(slices, sizeof(*slices) * (n_slices+1));
666 slices[n_slices].buf = av_mallocz(buf_size + AV_INPUT_BUFFER_PADDING_SIZE);
667 if (!slices[n_slices].buf)
669 buf_size3 = vc1_unescape_buffer(start + 4, size,
670 slices[n_slices].buf);
671 init_get_bits(&slices[n_slices].gb, slices[n_slices].buf,
673 slices[n_slices].mby_start = get_bits(&slices[n_slices].gb, 9);
679 } else if (v->interlace && ((buf[0] & 0xC0) == 0xC0)) { /* WVC1 interlaced stores both fields divided by marker */
680 const uint8_t *divider;
683 divider = find_next_marker(buf, buf + buf_size);
684 if ((divider == (buf + buf_size)) || AV_RB32(divider) != VC1_CODE_FIELD) {
685 av_log(avctx, AV_LOG_ERROR, "Error in WVC1 interlaced frame\n");
687 } else { // found field marker, unescape second field
688 tmp = av_realloc(slices, sizeof(*slices) * (n_slices+1));
692 slices[n_slices].buf = av_mallocz(buf_size + AV_INPUT_BUFFER_PADDING_SIZE);
693 if (!slices[n_slices].buf)
695 buf_size3 = vc1_unescape_buffer(divider + 4, buf + buf_size - divider - 4, slices[n_slices].buf);
696 init_get_bits(&slices[n_slices].gb, slices[n_slices].buf,
698 slices[n_slices].mby_start = s->mb_height >> 1;
699 n_slices1 = n_slices - 1;
702 buf_size2 = vc1_unescape_buffer(buf, divider - buf, buf2);
704 buf_size2 = vc1_unescape_buffer(buf, buf_size, buf2);
706 init_get_bits(&s->gb, buf2, buf_size2*8);
708 init_get_bits(&s->gb, buf, buf_size*8);
711 v->new_sprite = !get_bits1(&s->gb);
712 v->two_sprites = get_bits1(&s->gb);
713 /* res_sprite means a Windows Media Image stream, AV_CODEC_ID_*IMAGE means
714 we're using the sprite compositor. These are intentionally kept separate
715 so you can get the raw sprites by using the wmv3 decoder for WMVP or
716 the vc1 one for WVP2 */
717 if (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
719 // switch AVCodecContext parameters to those of the sprites
720 avctx->width = avctx->coded_width = v->sprite_width;
721 avctx->height = avctx->coded_height = v->sprite_height;
728 if (s->context_initialized &&
729 (s->width != avctx->coded_width ||
730 s->height != avctx->coded_height)) {
731 ff_vc1_decode_end(avctx);
734 if (!s->context_initialized) {
735 if (ff_msmpeg4_decode_init(avctx) < 0)
737 if (ff_vc1_decode_init_alloc_tables(v) < 0) {
738 ff_mpv_common_end(s);
742 s->low_delay = !avctx->has_b_frames || v->res_sprite;
744 if (v->profile == PROFILE_ADVANCED) {
745 s->h_edge_pos = avctx->coded_width;
746 s->v_edge_pos = avctx->coded_height;
750 // do parse frame header
751 v->pic_header_flag = 0;
752 v->first_pic_header_flag = 1;
753 if (v->profile < PROFILE_ADVANCED) {
754 if (ff_vc1_parse_frame_header(v, &s->gb) < 0) {
758 if (ff_vc1_parse_frame_header_adv(v, &s->gb) < 0) {
762 v->first_pic_header_flag = 0;
764 if ((avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE)
765 && s->pict_type != AV_PICTURE_TYPE_I) {
766 av_log(v->s.avctx, AV_LOG_ERROR, "Sprite decoder: expected I-frame\n");
770 // for skipping the frame
771 s->current_picture.f->pict_type = s->pict_type;
772 s->current_picture.f->key_frame = s->pict_type == AV_PICTURE_TYPE_I;
774 /* skip B-frames if we don't have reference frames */
775 if (!s->last_picture_ptr && (s->pict_type == AV_PICTURE_TYPE_B || s->droppable)) {
778 if ((avctx->skip_frame >= AVDISCARD_NONREF && s->pict_type == AV_PICTURE_TYPE_B) ||
779 (avctx->skip_frame >= AVDISCARD_NONKEY && s->pict_type != AV_PICTURE_TYPE_I) ||
780 avctx->skip_frame >= AVDISCARD_ALL) {
784 if (s->next_p_frame_damaged) {
785 if (s->pict_type == AV_PICTURE_TYPE_B)
788 s->next_p_frame_damaged = 0;
791 if (ff_mpv_frame_start(s, avctx) < 0) {
795 // process pulldown flags
796 s->current_picture_ptr->f->repeat_pict = 0;
797 // Pulldown flags are only valid when 'broadcast' has been set.
798 // So ticks_per_frame will be 2
801 s->current_picture_ptr->f->repeat_pict = 1;
802 } else if (v->rptfrm) {
804 s->current_picture_ptr->f->repeat_pict = v->rptfrm * 2;
807 s->me.qpel_put = s->qdsp.put_qpel_pixels_tab;
808 s->me.qpel_avg = s->qdsp.avg_qpel_pixels_tab;
810 if (avctx->hwaccel) {
811 if (avctx->hwaccel->start_frame(avctx, buf, buf_size) < 0)
813 if (avctx->hwaccel->decode_slice(avctx, buf_start, (buf + buf_size) - buf_start) < 0)
815 if (avctx->hwaccel->end_frame(avctx) < 0)
820 ff_mpeg_er_frame_start(s);
822 v->bits = buf_size * 8;
823 v->end_mb_x = s->mb_width;
825 s->current_picture.f->linesize[0] <<= 1;
826 s->current_picture.f->linesize[1] <<= 1;
827 s->current_picture.f->linesize[2] <<= 1;
831 mb_height = s->mb_height >> v->field_mode;
834 av_log(v->s.avctx, AV_LOG_ERROR, "Invalid mb_height.\n");
838 for (i = 0; i <= n_slices; i++) {
839 if (i > 0 && slices[i - 1].mby_start >= mb_height) {
840 if (v->field_mode <= 0) {
841 av_log(v->s.avctx, AV_LOG_ERROR, "Slice %d starts beyond "
842 "picture boundary (%d >= %d)\n", i,
843 slices[i - 1].mby_start, mb_height);
847 v->blocks_off = s->mb_width * s->mb_height << 1;
848 v->mb_off = s->mb_stride * s->mb_height >> 1;
855 v->pic_header_flag = 0;
856 if (v->field_mode && i == n_slices1 + 2) {
857 if ((header_ret = ff_vc1_parse_frame_header_adv(v, &s->gb)) < 0) {
858 av_log(v->s.avctx, AV_LOG_ERROR, "Field header damaged\n");
859 if (avctx->err_recognition & AV_EF_EXPLODE)
863 } else if (get_bits1(&s->gb)) {
864 v->pic_header_flag = 1;
865 if ((header_ret = ff_vc1_parse_frame_header_adv(v, &s->gb)) < 0) {
866 av_log(v->s.avctx, AV_LOG_ERROR, "Slice header damaged\n");
867 if (avctx->err_recognition & AV_EF_EXPLODE)
875 s->start_mb_y = (i == 0) ? 0 : FFMAX(0, slices[i-1].mby_start % mb_height);
876 if (!v->field_mode || v->second_field)
877 s->end_mb_y = (i == n_slices ) ? mb_height : FFMIN(mb_height, slices[i].mby_start % mb_height);
879 s->end_mb_y = (i <= n_slices1 + 1) ? mb_height : FFMIN(mb_height, slices[i].mby_start % mb_height);
880 ff_vc1_decode_blocks(v);
882 s->gb = slices[i].gb;
886 s->current_picture.f->linesize[0] >>= 1;
887 s->current_picture.f->linesize[1] >>= 1;
888 s->current_picture.f->linesize[2] >>= 1;
891 if (v->s.pict_type != AV_PICTURE_TYPE_BI && v->s.pict_type != AV_PICTURE_TYPE_B) {
892 FFSWAP(uint8_t *, v->mv_f_next[0], v->mv_f[0]);
893 FFSWAP(uint8_t *, v->mv_f_next[1], v->mv_f[1]);
896 ff_dlog(s->avctx, "Consumed %i/%i bits\n",
897 get_bits_count(&s->gb), s->gb.size_in_bits);
898 // if (get_bits_count(&s->gb) > buf_size * 8)
901 ff_er_frame_end(&s->er);
906 if (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
908 avctx->width = avctx->coded_width = v->output_width;
909 avctx->height = avctx->coded_height = v->output_height;
910 if (avctx->skip_frame >= AVDISCARD_NONREF)
912 #if CONFIG_WMV3IMAGE_DECODER || CONFIG_VC1IMAGE_DECODER
913 if (vc1_decode_sprites(v, &s->gb))
916 if ((ret = av_frame_ref(pict, v->sprite_output_frame)) < 0)
920 if (s->pict_type == AV_PICTURE_TYPE_B || s->low_delay) {
921 if ((ret = av_frame_ref(pict, s->current_picture_ptr->f)) < 0)
923 ff_print_debug_info(s, s->current_picture_ptr);
925 } else if (s->last_picture_ptr) {
926 if ((ret = av_frame_ref(pict, s->last_picture_ptr->f)) < 0)
928 ff_print_debug_info(s, s->last_picture_ptr);
935 for (i = 0; i < n_slices; i++)
936 av_free(slices[i].buf);
942 for (i = 0; i < n_slices; i++)
943 av_free(slices[i].buf);
949 static const enum AVPixelFormat vc1_hwaccel_pixfmt_list_420[] = {
950 #if CONFIG_VC1_DXVA2_HWACCEL
951 AV_PIX_FMT_DXVA2_VLD,
953 #if CONFIG_VC1_D3D11VA_HWACCEL
954 AV_PIX_FMT_D3D11VA_VLD,
956 #if CONFIG_VC1_VAAPI_HWACCEL
957 AV_PIX_FMT_VAAPI_VLD,
959 #if CONFIG_VC1_VDPAU_HWACCEL
966 AVCodec ff_vc1_decoder = {
968 .long_name = NULL_IF_CONFIG_SMALL("SMPTE VC-1"),
969 .type = AVMEDIA_TYPE_VIDEO,
970 .id = AV_CODEC_ID_VC1,
971 .priv_data_size = sizeof(VC1Context),
972 .init = vc1_decode_init,
973 .close = ff_vc1_decode_end,
974 .decode = vc1_decode_frame,
975 .flush = ff_mpeg_flush,
976 .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DELAY,
977 .pix_fmts = vc1_hwaccel_pixfmt_list_420,
978 .profiles = NULL_IF_CONFIG_SMALL(ff_vc1_profiles)
981 #if CONFIG_WMV3_DECODER
982 AVCodec ff_wmv3_decoder = {
984 .long_name = NULL_IF_CONFIG_SMALL("Windows Media Video 9"),
985 .type = AVMEDIA_TYPE_VIDEO,
986 .id = AV_CODEC_ID_WMV3,
987 .priv_data_size = sizeof(VC1Context),
988 .init = vc1_decode_init,
989 .close = ff_vc1_decode_end,
990 .decode = vc1_decode_frame,
991 .flush = ff_mpeg_flush,
992 .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DELAY,
993 .pix_fmts = vc1_hwaccel_pixfmt_list_420,
994 .profiles = NULL_IF_CONFIG_SMALL(ff_vc1_profiles)
998 #if CONFIG_WMV3IMAGE_DECODER
999 AVCodec ff_wmv3image_decoder = {
1000 .name = "wmv3image",
1001 .long_name = NULL_IF_CONFIG_SMALL("Windows Media Video 9 Image"),
1002 .type = AVMEDIA_TYPE_VIDEO,
1003 .id = AV_CODEC_ID_WMV3IMAGE,
1004 .priv_data_size = sizeof(VC1Context),
1005 .init = vc1_decode_init,
1006 .close = ff_vc1_decode_end,
1007 .decode = vc1_decode_frame,
1008 .capabilities = AV_CODEC_CAP_DR1,
1009 .flush = vc1_sprite_flush,
1010 .pix_fmts = (const enum AVPixelFormat[]) {
1017 #if CONFIG_VC1IMAGE_DECODER
1018 AVCodec ff_vc1image_decoder = {
1020 .long_name = NULL_IF_CONFIG_SMALL("Windows Media Video 9 Image v2"),
1021 .type = AVMEDIA_TYPE_VIDEO,
1022 .id = AV_CODEC_ID_VC1IMAGE,
1023 .priv_data_size = sizeof(VC1Context),
1024 .init = vc1_decode_init,
1025 .close = ff_vc1_decode_end,
1026 .decode = vc1_decode_frame,
1027 .capabilities = AV_CODEC_CAP_DR1,
1028 .flush = vc1_sprite_flush,
1029 .pix_fmts = (const enum AVPixelFormat[]) {