#include "libavutil/avassert.h"
#include "error_resilience.h"
#include "internal.h"
+#include "mpeg_er.h"
#include "msmpeg4data.h"
+#include "qpeldsp.h"
#include "vc1.h"
#include "mss12.h"
#include "mss2dsp.h"
typedef struct MSS2Context {
VC1Context v;
int split_position;
- AVFrame last_pic;
+ AVFrame *last_pic;
MSS12Context c;
MSS2DSPContext dsp;
+ QpelDSPContext qdsp;
SliceContext sc[2];
} MSS2Context;
ff_mpeg_flush(avctx);
- if (s->current_picture_ptr == NULL || s->current_picture_ptr->f.data[0]) {
- int i = ff_find_unused_picture(s, 0);
- if (i < 0)
- return i;
- s->current_picture_ptr = &s->picture[i];
- }
-
init_get_bits(&s->gb, buf, buf_size * 8);
s->loop_filter = avctx->skip_loop_filter < AVDISCARD_ALL;
avctx->pix_fmt = AV_PIX_FMT_YUV420P;
- if ((ret = ff_MPV_frame_start(s, avctx)) < 0) {
- av_log(v->s.avctx, AV_LOG_ERROR, "ff_MPV_frame_start error\n");
+ if ((ret = ff_mpv_frame_start(s, avctx)) < 0) {
+ av_log(v->s.avctx, AV_LOG_ERROR, "ff_mpv_frame_start error\n");
avctx->pix_fmt = AV_PIX_FMT_RGB24;
return ret;
}
ff_er_frame_end(&s->er);
- ff_MPV_frame_end(s);
+ ff_mpv_frame_end(s);
- f = &s->current_picture.f;
+ f = s->current_picture.f;
if (v->respic == 3) {
ctx->dsp.upsample_plane(f->data[0], f->linesize[0], w, h);
return AVERROR_INVALIDDATA;
avctx->pix_fmt = is_555 ? AV_PIX_FMT_RGB555 : AV_PIX_FMT_RGB24;
- if (ctx->last_pic.format != avctx->pix_fmt)
- av_frame_unref(&ctx->last_pic);
+ if (ctx->last_pic->format != avctx->pix_fmt)
+ av_frame_unref(ctx->last_pic);
if (has_wmv9) {
bytestream2_init(&gB, buf, buf_size + ARITH2_PADDING);
return ret;
}
- if (ctx->last_pic.data[0]) {
- av_assert0(frame->linesize[0] == ctx->last_pic.linesize[0]);
- c->last_rgb_pic = ctx->last_pic.data[0] +
- ctx->last_pic.linesize[0] * (avctx->height - 1);
+ if (ctx->last_pic->data[0]) {
+ av_assert0(frame->linesize[0] == ctx->last_pic->linesize[0]);
+ c->last_rgb_pic = ctx->last_pic->data[0] +
+ ctx->last_pic->linesize[0] * (avctx->height - 1);
} else {
av_log(avctx, AV_LOG_ERROR, "Missing keyframe\n");
return AVERROR_INVALIDDATA;
}
} else {
- if ((ret = ff_reget_buffer(avctx, &ctx->last_pic)) < 0) {
+ if ((ret = ff_reget_buffer(avctx, ctx->last_pic)) < 0) {
av_log(avctx, AV_LOG_ERROR, "reget_buffer() failed\n");
return ret;
}
- if ((ret = av_frame_ref(frame, &ctx->last_pic)) < 0)
+ if ((ret = av_frame_ref(frame, ctx->last_pic)) < 0)
return ret;
c->last_rgb_pic = NULL;
av_log(avctx, AV_LOG_WARNING, "buffer not fully consumed\n");
if (c->mvX < 0 || c->mvY < 0) {
- av_frame_unref(&ctx->last_pic);
- ret = av_frame_ref(&ctx->last_pic, frame);
+ av_frame_unref(ctx->last_pic);
+ ret = av_frame_ref(ctx->last_pic, frame);
if (ret < 0)
return ret;
}
int ret;
v->s.avctx = avctx;
- avctx->flags |= CODEC_FLAG_EMU_EDGE;
- v->s.flags |= CODEC_FLAG_EMU_EDGE;
if ((ret = ff_vc1_init_common(v)) < 0)
return ret;
v->overlap = 0;
- v->s.resync_marker = 0;
+ v->resync_marker = 0;
v->rangered = 0;
v->s.max_b_frames = avctx->max_b_frames = 0;
return ret;
/* error concealment */
- v->s.me.qpel_put = v->s.dsp.put_qpel_pixels_tab;
- v->s.me.qpel_avg = v->s.dsp.avg_qpel_pixels_tab;
+ v->s.me.qpel_put = v->s.qdsp.put_qpel_pixels_tab;
+ v->s.me.qpel_avg = v->s.qdsp.avg_qpel_pixels_tab;
return 0;
}
{
MSS2Context *const ctx = avctx->priv_data;
- av_frame_unref(&ctx->last_pic);
+ av_frame_free(&ctx->last_pic);
ff_mss12_decode_end(&ctx->c);
av_freep(&ctx->c.pal_pic);
return ret;
}
ff_mss2dsp_init(&ctx->dsp);
+ ff_qpeldsp_init(&ctx->qdsp);
avctx->pix_fmt = c->free_colours == 127 ? AV_PIX_FMT_RGB555
: AV_PIX_FMT_RGB24;
+ ctx->last_pic = av_frame_alloc();
+ if (!ctx->last_pic) {
+ mss2_decode_end(avctx);
+ return AVERROR(ENOMEM);
+ }
+
return 0;
}