2 * Microsoft Screen 2 (aka Windows Media Video V9 Screen) decoder
4 * This file is part of FFmpeg.
6 * FFmpeg is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2.1 of the License, or (at your option) any later version.
11 * FFmpeg is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with FFmpeg; if not, write to the Free Software
18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
23 * Microsoft Screen 2 (aka Windows Media Video V9 Screen) decoder
26 #include "libavutil/avassert.h"
27 #include "error_resilience.h"
37 typedef struct MSS2Context {
47 static void arith2_normalise(ArithCoder *c)
49 while ((c->high >> 15) - (c->low >> 15) < 2) {
50 if ((c->low ^ c->high) & 0x10000) {
55 c->high = (uint16_t)c->high << 8 | 0xFF;
56 c->value = (uint16_t)c->value << 8 | bytestream2_get_byte(c->gbc.gB);
57 c->low = (uint16_t)c->low << 8;
63 /* L. Stuiver and A. Moffat: "Piecewise Integer Mapping for Arithmetic Coding."
64 * In Proc. 8th Data Compression Conference (DCC '98), pp. 3-12, Mar. 1998 */
66 static int arith2_get_scaled_value(int value, int n, int range)
68 int split = (n << 1) - range;
71 return split + (value - split >> 1);
76 static void arith2_rescale_interval(ArithCoder *c, int range,
77 int low, int high, int n)
79 int split = (n << 1) - range;
82 c->high = split + (high - split << 1);
86 c->high += c->low - 1;
89 c->low += split + (low - split << 1);
94 static int arith2_get_number(ArithCoder *c, int n)
96 int range = c->high - c->low + 1;
97 int scale = av_log2(range) - av_log2(n);
100 if (n << scale > range)
105 val = arith2_get_scaled_value(c->value - c->low, n, range) >> scale;
107 arith2_rescale_interval(c, range, val << scale, (val + 1) << scale, n);
114 static int arith2_get_prob(ArithCoder *c, int16_t *probs)
116 int range = c->high - c->low + 1, n = *probs;
117 int scale = av_log2(range) - av_log2(n);
120 if (n << scale > range)
125 val = arith2_get_scaled_value(c->value - c->low, n, range) >> scale;
126 while (probs[++i] > val) ;
128 arith2_rescale_interval(c, range,
129 probs[i] << scale, probs[i - 1] << scale, n);
134 ARITH_GET_MODEL_SYM(arith2)
136 static int arith2_get_consumed_bytes(ArithCoder *c)
138 int diff = (c->high >> 16) - (c->low >> 16);
139 int bp = bytestream2_tell(c->gbc.gB) - 3 << 3;
142 while (!(diff & 0x80)) {
147 return (bits + bp + 7 >> 3) + ((c->low >> 16) + 1 == c->high >> 16);
150 static void arith2_init(ArithCoder *c, GetByteContext *gB)
154 c->value = bytestream2_get_be24(gB);
156 c->get_model_sym = arith2_get_model_sym;
157 c->get_number = arith2_get_number;
160 static int decode_pal_v2(MSS12Context *ctx, const uint8_t *buf, int buf_size)
163 uint32_t *pal = ctx->pal + 256 - ctx->free_colours;
165 if (!ctx->free_colours)
169 if (ncol > ctx->free_colours || buf_size < 2 + ncol * 3)
170 return AVERROR_INVALIDDATA;
171 for (i = 0; i < ncol; i++)
172 *pal++ = AV_RB24(buf + 3 * i);
177 static int decode_555(GetByteContext *gB, uint16_t *dst, ptrdiff_t stride,
178 int keyframe, int w, int h)
180 int last_symbol = 0, repeat = 0, prev_avail = 0;
183 int x, y, endx, endy, t;
185 #define READ_PAIR(a, b) \
186 a = bytestream2_get_byte(gB) << 4; \
187 t = bytestream2_get_byte(gB); \
189 b = (t & 0xF) << 8; \
190 b |= bytestream2_get_byte(gB); \
195 if (endx >= w || endy >= h || x > endx || y > endy)
196 return AVERROR_INVALIDDATA;
197 dst += x + stride * y;
208 int b = bytestream2_get_byte(gB);
210 last_symbol = b << 8 | bytestream2_get_byte(gB);
214 if (repeat >= (INT_MAX >> 8) - 1) {
215 av_log(NULL, AV_LOG_ERROR, "repeat overflow\n");
216 return AVERROR_INVALIDDATA;
218 repeat = (repeat << 8) + bytestream2_get_byte(gB) + 1;
220 if (last_symbol == -2) {
221 int skip = FFMIN((unsigned)repeat, dst + w - p);
226 last_symbol = 127 - b;
228 if (last_symbol >= 0)
230 else if (last_symbol == -1 && prev_avail)
232 } while (++p < dst + w);
240 static int decode_rle(GetBitContext *gb, uint8_t *pal_dst, ptrdiff_t pal_stride,
241 uint8_t *rgb_dst, ptrdiff_t rgb_stride, uint32_t *pal,
242 int keyframe, int kf_slipt, int slice, int w, int h)
244 uint8_t bits[270] = { 0 };
248 int current_length = 0, read_codes = 0, next_code = 0, current_codes = 0;
249 int remaining_codes, surplus_codes, i;
251 const int alphabet_size = 270 - keyframe;
253 int last_symbol = 0, repeat = 0, prev_avail = 0;
256 int x, y, clipw, cliph;
258 x = get_bits(gb, 12);
259 y = get_bits(gb, 12);
260 clipw = get_bits(gb, 12) + 1;
261 cliph = get_bits(gb, 12) + 1;
263 if (x + clipw > w || y + cliph > h)
264 return AVERROR_INVALIDDATA;
265 pal_dst += pal_stride * y + x;
266 rgb_dst += rgb_stride * y + x * 3;
273 pal_dst += pal_stride * kf_slipt;
274 rgb_dst += rgb_stride * kf_slipt;
281 /* read explicit codes */
283 while (current_codes--) {
284 int symbol = get_bits(gb, 8);
285 if (symbol >= 204 - keyframe)
286 symbol += 14 - keyframe;
287 else if (symbol > 189)
288 symbol = get_bits1(gb) + (symbol << 1) - 190;
290 return AVERROR_INVALIDDATA;
291 bits[symbol] = current_length;
292 codes[symbol] = next_code++;
297 remaining_codes = (1 << current_length) - next_code;
298 current_codes = get_bits(gb, av_ceil_log2(remaining_codes + 1));
299 if (current_length > 22 || current_codes > remaining_codes)
300 return AVERROR_INVALIDDATA;
301 } while (current_codes != remaining_codes);
303 remaining_codes = alphabet_size - read_codes;
305 /* determine the minimum length to fit the rest of the alphabet */
306 while ((surplus_codes = (2 << current_length) -
307 (next_code << 1) - remaining_codes) < 0) {
312 /* add the rest of the symbols lexicographically */
313 for (i = 0; i < alphabet_size; i++)
315 if (surplus_codes-- == 0) {
319 bits[i] = current_length;
320 codes[i] = next_code++;
323 if (next_code != 1 << current_length)
324 return AVERROR_INVALIDDATA;
326 if ((i = init_vlc(&vlc, 9, alphabet_size, bits, 1, 1, codes, 4, 4, 0)) < 0)
331 uint8_t *pp = pal_dst;
332 uint8_t *rp = rgb_dst;
335 int b = get_vlc2(gb, vlc.table, 9, 3);
341 b = get_bits(gb, 4) + 10;
346 repeat = get_bits(gb, b);
348 repeat += (1 << b) - 1;
350 if (last_symbol == -2) {
351 int skip = FFMIN(repeat, pal_dst + w - pp);
357 last_symbol = 267 - b;
359 if (last_symbol >= 0) {
361 AV_WB24(rp, pal[last_symbol]);
362 } else if (last_symbol == -1 && prev_avail) {
363 *pp = *(pp - pal_stride);
364 memcpy(rp, rp - rgb_stride, 3);
367 } while (++pp < pal_dst + w);
368 pal_dst += pal_stride;
369 rgb_dst += rgb_stride;
377 static int decode_wmv9(AVCodecContext *avctx, const uint8_t *buf, int buf_size,
378 int x, int y, int w, int h, int wmv9_mask)
380 MSS2Context *ctx = avctx->priv_data;
381 MSS12Context *c = &ctx->c;
382 VC1Context *v = avctx->priv_data;
383 MpegEncContext *s = &v->s;
387 ff_mpeg_flush(avctx);
389 if ((ret = init_get_bits8(&s->gb, buf, buf_size)) < 0)
392 s->loop_filter = avctx->skip_loop_filter < AVDISCARD_ALL;
394 if (ff_vc1_parse_frame_header(v, &s->gb) < 0) {
395 av_log(v->s.avctx, AV_LOG_ERROR, "header error\n");
396 return AVERROR_INVALIDDATA;
399 if (s->pict_type != AV_PICTURE_TYPE_I) {
400 av_log(v->s.avctx, AV_LOG_ERROR, "expected I-frame\n");
401 return AVERROR_INVALIDDATA;
404 avctx->pix_fmt = AV_PIX_FMT_YUV420P;
406 if ((ret = ff_mpv_frame_start(s, avctx)) < 0) {
407 av_log(v->s.avctx, AV_LOG_ERROR, "ff_mpv_frame_start error\n");
408 avctx->pix_fmt = AV_PIX_FMT_RGB24;
412 ff_mpeg_er_frame_start(s);
414 v->bits = buf_size * 8;
416 v->end_mb_x = (w + 15) >> 4;
417 s->end_mb_y = (h + 15) >> 4;
419 v->end_mb_x = v->end_mb_x + 1 >> 1;
421 s->end_mb_y = s->end_mb_y + 1 >> 1;
423 ff_vc1_decode_blocks(v);
425 if (v->end_mb_x == s->mb_width && s->end_mb_y == s->mb_height) {
426 ff_er_frame_end(&s->er);
428 av_log(v->s.avctx, AV_LOG_WARNING,
429 "disabling error correction due to block count mismatch %dx%d != %dx%d\n",
430 v->end_mb_x, s->end_mb_y, s->mb_width, s->mb_height);
435 f = s->current_picture.f;
437 if (v->respic == 3) {
438 ctx->dsp.upsample_plane(f->data[0], f->linesize[0], w, h);
439 ctx->dsp.upsample_plane(f->data[1], f->linesize[1], w+1 >> 1, h+1 >> 1);
440 ctx->dsp.upsample_plane(f->data[2], f->linesize[2], w+1 >> 1, h+1 >> 1);
441 } else if (v->respic)
442 avpriv_request_sample(v->s.avctx,
443 "Asymmetric WMV9 rectangle subsampling");
445 av_assert0(f->linesize[1] == f->linesize[2]);
448 ctx->dsp.mss2_blit_wmv9_masked(c->rgb_pic + y * c->rgb_stride + x * 3,
449 c->rgb_stride, wmv9_mask,
450 c->pal_pic + y * c->pal_stride + x,
452 f->data[0], f->linesize[0],
453 f->data[1], f->data[2], f->linesize[1],
456 ctx->dsp.mss2_blit_wmv9(c->rgb_pic + y * c->rgb_stride + x * 3,
458 f->data[0], f->linesize[0],
459 f->data[1], f->data[2], f->linesize[1],
462 avctx->pix_fmt = AV_PIX_FMT_RGB24;
467 typedef struct Rectangle {
468 int coded, x, y, w, h;
471 #define MAX_WMV9_RECTANGLES 20
472 #define ARITH2_PADDING 2
474 static int mss2_decode_frame(AVCodecContext *avctx, void *data, int *got_frame,
477 const uint8_t *buf = avpkt->data;
478 int buf_size = avpkt->size;
479 MSS2Context *ctx = avctx->priv_data;
480 MSS12Context *c = &ctx->c;
481 AVFrame *frame = data;
486 int keyframe, has_wmv9, has_mv, is_rle, is_555, ret;
488 Rectangle wmv9rects[MAX_WMV9_RECTANGLES], *r;
489 int used_rects = 0, i, implicit_rect = 0, av_uninit(wmv9_mask);
491 if ((ret = init_get_bits8(&gb, buf, buf_size)) < 0)
494 if (keyframe = get_bits1(&gb))
496 has_wmv9 = get_bits1(&gb);
497 has_mv = keyframe ? 0 : get_bits1(&gb);
498 is_rle = get_bits1(&gb);
499 is_555 = is_rle && get_bits1(&gb);
500 if (c->slice_split > 0)
501 ctx->split_position = c->slice_split;
502 else if (c->slice_split < 0) {
503 if (get_bits1(&gb)) {
504 if (get_bits1(&gb)) {
506 ctx->split_position = get_bits(&gb, 16);
508 ctx->split_position = get_bits(&gb, 12);
510 ctx->split_position = get_bits(&gb, 8) << 4;
513 ctx->split_position = avctx->height / 2;
516 ctx->split_position = avctx->height;
518 if (c->slice_split && (ctx->split_position < 1 - is_555 ||
519 ctx->split_position > avctx->height - 1))
520 return AVERROR_INVALIDDATA;
523 buf += get_bits_count(&gb) >> 3;
524 buf_size -= get_bits_count(&gb) >> 3;
527 return AVERROR_INVALIDDATA;
529 if (is_555 && (has_wmv9 || has_mv || c->slice_split && ctx->split_position))
530 return AVERROR_INVALIDDATA;
532 avctx->pix_fmt = is_555 ? AV_PIX_FMT_RGB555 : AV_PIX_FMT_RGB24;
533 if (ctx->last_pic->format != avctx->pix_fmt)
534 av_frame_unref(ctx->last_pic);
537 bytestream2_init(&gB, buf, buf_size + ARITH2_PADDING);
538 arith2_init(&acoder, &gB);
540 implicit_rect = !arith2_get_bit(&acoder);
542 while (arith2_get_bit(&acoder)) {
543 if (used_rects == MAX_WMV9_RECTANGLES)
544 return AVERROR_INVALIDDATA;
545 r = &wmv9rects[used_rects];
547 r->x = arith2_get_number(&acoder, avctx->width);
549 r->x = arith2_get_number(&acoder, avctx->width -
550 wmv9rects[used_rects - 1].x) +
551 wmv9rects[used_rects - 1].x;
552 r->y = arith2_get_number(&acoder, avctx->height);
553 r->w = arith2_get_number(&acoder, avctx->width - r->x) + 1;
554 r->h = arith2_get_number(&acoder, avctx->height - r->y) + 1;
558 if (implicit_rect && used_rects) {
559 av_log(avctx, AV_LOG_ERROR, "implicit_rect && used_rects > 0\n");
560 return AVERROR_INVALIDDATA;
566 wmv9rects[0].w = avctx->width;
567 wmv9rects[0].h = avctx->height;
571 for (i = 0; i < used_rects; i++) {
572 if (!implicit_rect && arith2_get_bit(&acoder)) {
573 av_log(avctx, AV_LOG_ERROR, "Unexpected grandchildren\n");
574 return AVERROR_INVALIDDATA;
577 wmv9_mask = arith2_get_bit(&acoder) - 1;
579 wmv9_mask = arith2_get_number(&acoder, 256);
581 wmv9rects[i].coded = arith2_get_number(&acoder, 2);
584 buf += arith2_get_consumed_bytes(&acoder);
585 buf_size -= arith2_get_consumed_bytes(&acoder);
587 return AVERROR_INVALIDDATA;
591 if (keyframe && !is_555) {
592 if ((i = decode_pal_v2(c, buf, buf_size)) < 0)
593 return AVERROR_INVALIDDATA;
600 return AVERROR_INVALIDDATA;
601 c->mvX = AV_RB16(buf - 4) - avctx->width;
602 c->mvY = AV_RB16(buf - 2) - avctx->height;
605 if (c->mvX < 0 || c->mvY < 0) {
606 FFSWAP(uint8_t *, c->pal_pic, c->last_pal_pic);
608 if ((ret = ff_get_buffer(avctx, frame, AV_GET_BUFFER_FLAG_REF)) < 0)
611 if (ctx->last_pic->data[0]) {
612 av_assert0(frame->linesize[0] == ctx->last_pic->linesize[0]);
613 c->last_rgb_pic = ctx->last_pic->data[0] +
614 ctx->last_pic->linesize[0] * (avctx->height - 1);
616 av_log(avctx, AV_LOG_ERROR, "Missing keyframe\n");
617 return AVERROR_INVALIDDATA;
620 if ((ret = ff_reget_buffer(avctx, ctx->last_pic)) < 0)
622 if ((ret = av_frame_ref(frame, ctx->last_pic)) < 0)
625 c->last_rgb_pic = NULL;
627 c->rgb_pic = frame->data[0] +
628 frame->linesize[0] * (avctx->height - 1);
629 c->rgb_stride = -frame->linesize[0];
631 frame->key_frame = keyframe;
632 frame->pict_type = keyframe ? AV_PICTURE_TYPE_I : AV_PICTURE_TYPE_P;
635 bytestream2_init(&gB, buf, buf_size);
637 if (decode_555(&gB, (uint16_t *)c->rgb_pic, c->rgb_stride >> 1,
638 keyframe, avctx->width, avctx->height))
639 return AVERROR_INVALIDDATA;
641 buf_size -= bytestream2_tell(&gB);
645 ff_mss12_slicecontext_reset(&ctx->sc[0]);
647 ff_mss12_slicecontext_reset(&ctx->sc[1]);
650 if ((ret = init_get_bits8(&gb, buf, buf_size)) < 0)
652 if (ret = decode_rle(&gb, c->pal_pic, c->pal_stride,
653 c->rgb_pic, c->rgb_stride, c->pal, keyframe,
654 ctx->split_position, 0,
655 avctx->width, avctx->height))
660 if (ret = decode_rle(&gb, c->pal_pic, c->pal_stride,
661 c->rgb_pic, c->rgb_stride, c->pal, keyframe,
662 ctx->split_position, 1,
663 avctx->width, avctx->height))
667 buf += get_bits_count(&gb) >> 3;
668 buf_size -= get_bits_count(&gb) >> 3;
669 } else if (!implicit_rect || wmv9_mask != -1) {
671 return AVERROR_INVALIDDATA;
672 bytestream2_init(&gB, buf, buf_size + ARITH2_PADDING);
673 arith2_init(&acoder, &gB);
674 c->keyframe = keyframe;
675 if (c->corrupted = ff_mss12_decode_rect(&ctx->sc[0], &acoder, 0, 0,
677 ctx->split_position))
678 return AVERROR_INVALIDDATA;
680 buf += arith2_get_consumed_bytes(&acoder);
681 buf_size -= arith2_get_consumed_bytes(&acoder);
682 if (c->slice_split) {
684 return AVERROR_INVALIDDATA;
685 bytestream2_init(&gB, buf, buf_size + ARITH2_PADDING);
686 arith2_init(&acoder, &gB);
687 if (c->corrupted = ff_mss12_decode_rect(&ctx->sc[1], &acoder, 0,
690 avctx->height - ctx->split_position))
691 return AVERROR_INVALIDDATA;
693 buf += arith2_get_consumed_bytes(&acoder);
694 buf_size -= arith2_get_consumed_bytes(&acoder);
697 memset(c->pal_pic, 0, c->pal_stride * avctx->height);
701 for (i = 0; i < used_rects; i++) {
702 int x = wmv9rects[i].x;
703 int y = wmv9rects[i].y;
704 int w = wmv9rects[i].w;
705 int h = wmv9rects[i].h;
706 if (wmv9rects[i].coded) {
707 int WMV9codedFrameSize;
708 if (buf_size < 4 || !(WMV9codedFrameSize = AV_RL24(buf)))
709 return AVERROR_INVALIDDATA;
710 if (ret = decode_wmv9(avctx, buf + 3, buf_size - 3,
711 x, y, w, h, wmv9_mask))
713 buf += WMV9codedFrameSize + 3;
714 buf_size -= WMV9codedFrameSize + 3;
716 uint8_t *dst = c->rgb_pic + y * c->rgb_stride + x * 3;
717 if (wmv9_mask != -1) {
718 ctx->dsp.mss2_gray_fill_masked(dst, c->rgb_stride,
720 c->pal_pic + y * c->pal_stride + x,
725 memset(dst, 0x80, w * 3);
726 dst += c->rgb_stride;
734 av_log(avctx, AV_LOG_WARNING, "buffer not fully consumed\n");
736 if (c->mvX < 0 || c->mvY < 0) {
737 av_frame_unref(ctx->last_pic);
738 ret = av_frame_ref(ctx->last_pic, frame);
748 static av_cold int wmv9_init(AVCodecContext *avctx)
750 VC1Context *v = avctx->priv_data;
755 if ((ret = ff_vc1_init_common(v)) < 0)
757 ff_vc1dsp_init(&v->vc1dsp);
759 v->profile = PROFILE_MAIN;
761 v->zz_8x4 = ff_wmv2_scantableA;
762 v->zz_4x8 = ff_wmv2_scantableB;
766 v->frmrtq_postproc = 7;
767 v->bitrtq_postproc = 31;
784 v->resync_marker = 0;
787 v->s.max_b_frames = avctx->max_b_frames = 0;
788 v->quantizer_mode = 0;
794 ff_vc1_init_transposed_scantables(v);
796 if ((ret = ff_msmpeg4_decode_init(avctx)) < 0 ||
797 (ret = ff_vc1_decode_init_alloc_tables(v)) < 0)
800 /* error concealment */
801 v->s.me.qpel_put = v->s.qdsp.put_qpel_pixels_tab;
802 v->s.me.qpel_avg = v->s.qdsp.avg_qpel_pixels_tab;
807 static av_cold int mss2_decode_end(AVCodecContext *avctx)
809 MSS2Context *const ctx = avctx->priv_data;
811 av_frame_free(&ctx->last_pic);
813 ff_mss12_decode_end(&ctx->c);
814 av_freep(&ctx->c.pal_pic);
815 av_freep(&ctx->c.last_pal_pic);
816 ff_vc1_decode_end(avctx);
821 static av_cold int mss2_decode_init(AVCodecContext *avctx)
823 MSS2Context * const ctx = avctx->priv_data;
824 MSS12Context *c = &ctx->c;
827 if (ret = ff_mss12_decode_init(c, 1, &ctx->sc[0], &ctx->sc[1]))
829 ctx->last_pic = av_frame_alloc();
830 c->pal_stride = c->mask_stride;
831 c->pal_pic = av_mallocz(c->pal_stride * avctx->height);
832 c->last_pal_pic = av_mallocz(c->pal_stride * avctx->height);
833 if (!c->pal_pic || !c->last_pal_pic || !ctx->last_pic) {
834 mss2_decode_end(avctx);
835 return AVERROR(ENOMEM);
837 if (ret = wmv9_init(avctx)) {
838 mss2_decode_end(avctx);
841 ff_mss2dsp_init(&ctx->dsp);
842 ff_qpeldsp_init(&ctx->qdsp);
844 avctx->pix_fmt = c->free_colours == 127 ? AV_PIX_FMT_RGB555
851 AVCodec ff_mss2_decoder = {
853 .long_name = NULL_IF_CONFIG_SMALL("MS Windows Media Video V9 Screen"),
854 .type = AVMEDIA_TYPE_VIDEO,
855 .id = AV_CODEC_ID_MSS2,
856 .priv_data_size = sizeof(MSS2Context),
857 .init = mss2_decode_init,
858 .close = mss2_decode_end,
859 .decode = mss2_decode_frame,
860 .capabilities = AV_CODEC_CAP_DR1,