X-Git-Url: https://git.sesse.net/?a=blobdiff_plain;f=libavfilter%2Fvf_psnr.c;h=a8eb315445ae29a6a286deb02cf5a5f54b7e7ccf;hb=f7d4c60ac47547f69ddc96e7bed682f54436cd1a;hp=20962c41f6f94c3174968f78894ce394d60970ba;hpb=3757f8f2f6de1e3e8e553496211c213b8b894c70;p=ffmpeg diff --git a/libavfilter/vf_psnr.c b/libavfilter/vf_psnr.c index 20962c41f6f..a8eb315445a 100644 --- a/libavfilter/vf_psnr.c +++ b/libavfilter/vf_psnr.c @@ -29,16 +29,16 @@ #include "libavutil/opt.h" #include "libavutil/pixdesc.h" #include "avfilter.h" -#include "dualinput.h" #include "drawutils.h" #include "formats.h" +#include "framesync2.h" #include "internal.h" #include "psnr.h" #include "video.h" typedef struct PSNRContext { const AVClass *class; - FFDualInputContext dinput; + FFFrameSync fs; double mse, min_mse, max_mse, mse_comp[4]; uint64_t nb_frames; FILE *stats_file; @@ -68,7 +68,7 @@ static const AVOption psnr_options[] = { { NULL } }; -AVFILTER_DEFINE_CLASS(psnr); +FRAMESYNC_DEFINE_CLASS(psnr, PSNRContext, fs); static inline unsigned pow_2(unsigned base) { @@ -142,13 +142,21 @@ static void set_meta(AVDictionary **metadata, const char *key, char comp, float } } -static AVFrame *do_psnr(AVFilterContext *ctx, AVFrame *main, - const AVFrame *ref) +static int do_psnr(FFFrameSync *fs) { + AVFilterContext *ctx = fs->parent; PSNRContext *s = ctx->priv; + AVFrame *main, *ref; double comp_mse[4], mse = 0; - int j, c; - AVDictionary **metadata = &main->metadata; + int ret, j, c; + AVDictionary **metadata; + + ret = ff_framesync2_dualinput_get(fs, &main, &ref); + if (ret < 0) + return ret; + if (!ref) + return ff_filter_frame(ctx->outputs[0], main); + metadata = &main->metadata; compute_images_mse(s, (const uint8_t **)main->data, main->linesize, (const uint8_t **)ref->data, ref->linesize, @@ -214,7 +222,7 @@ static AVFrame *do_psnr(AVFilterContext *ctx, AVFrame *main, fprintf(s->stats_file, "\n"); } - return main; + return ff_filter_frame(ctx->outputs[0], main); } static av_cold int init(AVFilterContext *ctx) @@ -245,14 +253,14 @@ static av_cold int init(AVFilterContext *ctx) } } - s->dinput.process = do_psnr; + s->fs.on_event = do_psnr; return 0; } static int query_formats(AVFilterContext *ctx) { static const enum AVPixelFormat pix_fmts[] = { - AV_PIX_FMT_GRAY8, AV_PIX_FMT_GRAY10, AV_PIX_FMT_GRAY12, AV_PIX_FMT_GRAY16, + AV_PIX_FMT_GRAY8, AV_PIX_FMT_GRAY9, AV_PIX_FMT_GRAY10, AV_PIX_FMT_GRAY12, AV_PIX_FMT_GRAY16, #define PF_NOALPHA(suf) AV_PIX_FMT_YUV420##suf, AV_PIX_FMT_YUV422##suf, AV_PIX_FMT_YUV444##suf #define PF_ALPHA(suf) AV_PIX_FMT_YUVA420##suf, AV_PIX_FMT_YUVA422##suf, AV_PIX_FMT_YUVA444##suf #define PF(suf) PF_NOALPHA(suf), PF_ALPHA(suf) @@ -331,27 +339,24 @@ static int config_output(AVFilterLink *outlink) AVFilterLink *mainlink = ctx->inputs[0]; int ret; + ret = ff_framesync2_init_dualinput(&s->fs, ctx); + if (ret < 0) + return ret; outlink->w = mainlink->w; outlink->h = mainlink->h; outlink->time_base = mainlink->time_base; outlink->sample_aspect_ratio = mainlink->sample_aspect_ratio; outlink->frame_rate = mainlink->frame_rate; - if ((ret = ff_dualinput_init(ctx, &s->dinput)) < 0) + if ((ret = ff_framesync2_configure(&s->fs)) < 0) return ret; return 0; } -static int filter_frame(AVFilterLink *inlink, AVFrame *inpicref) +static int activate(AVFilterContext *ctx) { - PSNRContext *s = inlink->dst->priv; - return ff_dualinput_filter_frame(&s->dinput, inlink, inpicref); -} - -static int request_frame(AVFilterLink *outlink) -{ - PSNRContext *s = outlink->src->priv; - return ff_dualinput_request_frame(&s->dinput, outlink); + PSNRContext *s = ctx->priv; + return ff_framesync2_activate(&s->fs); } static av_cold void uninit(AVFilterContext *ctx) @@ -375,7 +380,7 @@ static av_cold void uninit(AVFilterContext *ctx) get_psnr(s->min_mse, 1, s->average_max)); } - ff_dualinput_uninit(&s->dinput); + ff_framesync2_uninit(&s->fs); if (s->stats_file && s->stats_file != stdout) fclose(s->stats_file); @@ -385,11 +390,9 @@ static const AVFilterPad psnr_inputs[] = { { .name = "main", .type = AVMEDIA_TYPE_VIDEO, - .filter_frame = filter_frame, },{ .name = "reference", .type = AVMEDIA_TYPE_VIDEO, - .filter_frame = filter_frame, .config_props = config_input_ref, }, { NULL } @@ -400,7 +403,6 @@ static const AVFilterPad psnr_outputs[] = { .name = "default", .type = AVMEDIA_TYPE_VIDEO, .config_props = config_output, - .request_frame = request_frame, }, { NULL } }; @@ -408,9 +410,11 @@ static const AVFilterPad psnr_outputs[] = { AVFilter ff_vf_psnr = { .name = "psnr", .description = NULL_IF_CONFIG_SMALL("Calculate the PSNR between two video streams."), + .preinit = psnr_framesync_preinit, .init = init, .uninit = uninit, .query_formats = query_formats, + .activate = activate, .priv_size = sizeof(PSNRContext), .priv_class = &psnr_class, .inputs = psnr_inputs,