2 * Copyright (C) 2012 Mark Himsley
4 * get_scene_score() Copyright (c) 2011 Stefano Sabatini
5 * taken from libavfilter/vf_select.c
7 * This file is part of FFmpeg.
9 * FFmpeg is free software; you can redistribute it and/or
10 * modify it under the terms of the GNU Lesser General Public
11 * License as published by the Free Software Foundation; either
12 * version 2.1 of the License, or (at your option) any later version.
14 * FFmpeg is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 * Lesser General Public License for more details.
19 * You should have received a copy of the GNU Lesser General Public
20 * License along with FFmpeg; if not, write to the Free Software
21 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
26 * filter for upsampling or downsampling a progressive source
31 #include "libavutil/avassert.h"
32 #include "libavutil/imgutils.h"
33 #include "libavutil/internal.h"
34 #include "libavutil/opt.h"
35 #include "libavutil/pixdesc.h"
36 #include "libavutil/pixelutils.h"
42 #include "framerate.h"
44 #define OFFSET(x) offsetof(FrameRateContext, x)
45 #define V AV_OPT_FLAG_VIDEO_PARAM
46 #define F AV_OPT_FLAG_FILTERING_PARAM
47 #define FRAMERATE_FLAG_SCD 01
49 static const AVOption framerate_options[] = {
50 {"fps", "required output frames per second rate", OFFSET(dest_frame_rate), AV_OPT_TYPE_VIDEO_RATE, {.str="50"}, 0, INT_MAX, V|F },
52 {"interp_start", "point to start linear interpolation", OFFSET(interp_start), AV_OPT_TYPE_INT, {.i64=15}, 0, 255, V|F },
53 {"interp_end", "point to end linear interpolation", OFFSET(interp_end), AV_OPT_TYPE_INT, {.i64=240}, 0, 255, V|F },
54 {"scene", "scene change level", OFFSET(scene_score), AV_OPT_TYPE_DOUBLE, {.dbl=8.2}, 0, INT_MAX, V|F },
56 {"flags", "set flags", OFFSET(flags), AV_OPT_TYPE_FLAGS, {.i64=1}, 0, INT_MAX, V|F, "flags" },
57 {"scene_change_detect", "enable scene change detection", 0, AV_OPT_TYPE_CONST, {.i64=FRAMERATE_FLAG_SCD}, INT_MIN, INT_MAX, V|F, "flags" },
58 {"scd", "enable scene change detection", 0, AV_OPT_TYPE_CONST, {.i64=FRAMERATE_FLAG_SCD}, INT_MIN, INT_MAX, V|F, "flags" },
63 AVFILTER_DEFINE_CLASS(framerate);
65 static av_always_inline int64_t sad_8x8_16(const uint16_t *src1, ptrdiff_t stride1,
66 const uint16_t *src2, ptrdiff_t stride2)
71 for (y = 0; y < 8; y++) {
72 for (x = 0; x < 8; x++)
73 sum += FFABS(src1[x] - src2[x]);
80 static int64_t scene_sad16(FrameRateContext *s, const uint16_t *p1, int p1_linesize, const uint16_t* p2, int p2_linesize, const int width, const int height)
84 for (sad = y = 0; y < height - 7; y += 8) {
85 for (x = 0; x < width - 7; x += 8) {
86 sad += sad_8x8_16(p1 + y * p1_linesize + x,
88 p2 + y * p2_linesize + x,
95 static int64_t scene_sad8(FrameRateContext *s, uint8_t *p1, int p1_linesize, uint8_t* p2, int p2_linesize, const int width, const int height)
99 for (sad = y = 0; y < height - 7; y += 8) {
100 for (x = 0; x < width - 7; x += 8) {
101 sad += s->sad(p1 + y * p1_linesize + x,
103 p2 + y * p2_linesize + x,
111 static double get_scene_score(AVFilterContext *ctx, AVFrame *crnt, AVFrame *next)
113 FrameRateContext *s = ctx->priv;
116 ff_dlog(ctx, "get_scene_score()\n");
118 if (crnt->height == next->height &&
119 crnt->width == next->width) {
123 ff_dlog(ctx, "get_scene_score() process\n");
124 if (s->bitdepth == 8)
125 sad = scene_sad8(s, crnt->data[0], crnt->linesize[0], next->data[0], next->linesize[0], crnt->width, crnt->height);
127 sad = scene_sad16(s, (const uint16_t*)crnt->data[0], crnt->linesize[0] / 2, (const uint16_t*)next->data[0], next->linesize[0] / 2, crnt->width, crnt->height);
129 mafd = (double)sad * 100.0 / FFMAX(1, (crnt->height & ~7) * (crnt->width & ~7)) / (1 << s->bitdepth);
130 diff = fabs(mafd - s->prev_mafd);
131 ret = av_clipf(FFMIN(mafd, diff), 0, 100.0);
134 ff_dlog(ctx, "get_scene_score() result is:%f\n", ret);
138 typedef struct ThreadData {
139 AVFrame *copy_src1, *copy_src2;
140 uint16_t src1_factor, src2_factor;
143 static int filter_slice(AVFilterContext *ctx, void *arg, int job, int nb_jobs)
145 FrameRateContext *s = ctx->priv;
146 ThreadData *td = arg;
147 uint16_t src1_factor = td->src1_factor;
148 uint16_t src2_factor = td->src2_factor;
151 for (plane = 0; plane < 4 && td->copy_src1->data[plane] && td->copy_src2->data[plane]; plane++) {
152 int cpy_line_width = s->line_size[plane];
153 uint8_t *cpy_src1_data = td->copy_src1->data[plane];
154 int cpy_src1_line_size = td->copy_src1->linesize[plane];
155 uint8_t *cpy_src2_data = td->copy_src2->data[plane];
156 int cpy_src2_line_size = td->copy_src2->linesize[plane];
157 int cpy_src_h = (plane > 0 && plane < 3) ? (td->copy_src1->height >> s->vsub) : (td->copy_src1->height);
158 uint8_t *cpy_dst_data = s->work->data[plane];
159 int cpy_dst_line_size = s->work->linesize[plane];
160 const int start = (cpy_src_h * job ) / nb_jobs;
161 const int end = (cpy_src_h * (job+1)) / nb_jobs;
162 cpy_src1_data += start * cpy_src1_line_size;
163 cpy_src2_data += start * cpy_src2_line_size;
164 cpy_dst_data += start * cpy_dst_line_size;
166 s->blend(cpy_src1_data, cpy_src1_line_size,
167 cpy_src2_data, cpy_src2_line_size,
168 cpy_dst_data, cpy_dst_line_size,
169 cpy_line_width, end - start,
170 src1_factor, src2_factor, s->blend_factor_max >> 1);
176 static int blend_frames(AVFilterContext *ctx, int interpolate)
178 FrameRateContext *s = ctx->priv;
179 AVFilterLink *outlink = ctx->outputs[0];
180 double interpolate_scene_score = 0;
182 if ((s->flags & FRAMERATE_FLAG_SCD)) {
184 interpolate_scene_score = s->score;
186 interpolate_scene_score = s->score = get_scene_score(ctx, s->f0, s->f1);
187 ff_dlog(ctx, "blend_frames() interpolate scene score:%f\n", interpolate_scene_score);
189 // decide if the shot-change detection allows us to blend two frames
190 if (interpolate_scene_score < s->scene_score) {
192 td.copy_src1 = s->f0;
193 td.copy_src2 = s->f1;
194 td.src2_factor = interpolate;
195 td.src1_factor = s->blend_factor_max - td.src2_factor;
197 // get work-space for output frame
198 s->work = ff_get_video_buffer(outlink, outlink->w, outlink->h);
200 return AVERROR(ENOMEM);
202 av_frame_copy_props(s->work, s->f0);
204 ff_dlog(ctx, "blend_frames() INTERPOLATE to create work frame\n");
205 ctx->internal->execute(ctx, filter_slice, &td, NULL, FFMIN(FFMAX(1, outlink->h >> 2), ff_filter_get_nb_threads(ctx)));
211 static int process_work_frame(AVFilterContext *ctx)
213 FrameRateContext *s = ctx->priv;
215 int64_t interpolate, interpolate8;
220 if (!s->f0 && !s->flush)
223 work_pts = s->start_pts + av_rescale_q(s->n, av_inv_q(s->dest_frame_rate), s->dest_time_base);
225 if (work_pts >= s->pts1 && !s->flush)
229 s->work = av_frame_clone(s->f1);
231 if (work_pts >= s->pts1 + s->delta && s->flush)
234 interpolate = av_rescale(work_pts - s->pts0, s->blend_factor_max, s->delta);
235 interpolate8 = av_rescale(work_pts - s->pts0, 256, s->delta);
236 ff_dlog(ctx, "process_work_frame() interpolate: %"PRId64"/256\n", interpolate8);
237 if (interpolate >= s->blend_factor_max || interpolate8 > s->interp_end) {
238 s->work = av_frame_clone(s->f1);
239 } else if (interpolate <= 0 || interpolate8 < s->interp_start) {
240 s->work = av_frame_clone(s->f0);
242 ret = blend_frames(ctx, interpolate);
246 s->work = av_frame_clone(interpolate > (s->blend_factor_max >> 1) ? s->f1 : s->f0);
251 return AVERROR(ENOMEM);
253 s->work->pts = work_pts;
259 static av_cold int init(AVFilterContext *ctx)
261 FrameRateContext *s = ctx->priv;
262 s->start_pts = AV_NOPTS_VALUE;
266 static av_cold void uninit(AVFilterContext *ctx)
268 FrameRateContext *s = ctx->priv;
269 av_frame_free(&s->f0);
270 av_frame_free(&s->f1);
273 static int query_formats(AVFilterContext *ctx)
275 static const enum AVPixelFormat pix_fmts[] = {
277 AV_PIX_FMT_YUV411P, AV_PIX_FMT_YUVJ411P,
278 AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUVJ420P,
279 AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUVJ422P,
280 AV_PIX_FMT_YUV440P, AV_PIX_FMT_YUVJ440P,
281 AV_PIX_FMT_YUV444P, AV_PIX_FMT_YUVJ444P,
282 AV_PIX_FMT_YUV420P9, AV_PIX_FMT_YUV420P10, AV_PIX_FMT_YUV420P12,
283 AV_PIX_FMT_YUV422P9, AV_PIX_FMT_YUV422P10, AV_PIX_FMT_YUV422P12,
284 AV_PIX_FMT_YUV444P9, AV_PIX_FMT_YUV444P10, AV_PIX_FMT_YUV444P12,
288 AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
290 return AVERROR(ENOMEM);
291 return ff_set_common_formats(ctx, fmts_list);
294 static void blend_frames_c(BLEND_FUNC_PARAMS)
297 for (line = 0; line < height; line++) {
298 for (pixel = 0; pixel < width; pixel++)
299 dst[pixel] = ((src1[pixel] * factor1) + (src2[pixel] * factor2) + half) >> BLEND_FACTOR_DEPTH8;
300 src1 += src1_linesize;
301 src2 += src2_linesize;
306 static void blend_frames16_c(BLEND_FUNC_PARAMS)
309 uint16_t *dstw = (uint16_t *)dst;
310 uint16_t *src1w = (uint16_t *)src1;
311 uint16_t *src2w = (uint16_t *)src2;
316 for (line = 0; line < height; line++) {
317 for (pixel = 0; pixel < width; pixel++)
318 dstw[pixel] = ((src1w[pixel] * factor1) + (src2w[pixel] * factor2) + half) >> BLEND_FACTOR_DEPTH16;
319 src1w += src1_linesize;
320 src2w += src2_linesize;
321 dstw += dst_linesize;
325 void ff_framerate_init(FrameRateContext *s)
327 if (s->bitdepth == 8) {
328 s->blend_factor_max = 1 << BLEND_FACTOR_DEPTH8;
329 s->blend = blend_frames_c;
331 s->blend_factor_max = 1 << BLEND_FACTOR_DEPTH16;
332 s->blend = blend_frames16_c;
335 ff_framerate_init_x86(s);
338 static int config_input(AVFilterLink *inlink)
340 AVFilterContext *ctx = inlink->dst;
341 FrameRateContext *s = ctx->priv;
342 const AVPixFmtDescriptor *pix_desc = av_pix_fmt_desc_get(inlink->format);
345 for (plane = 0; plane < 4; plane++) {
346 s->line_size[plane] = av_image_get_linesize(inlink->format, inlink->w,
350 s->bitdepth = pix_desc->comp[0].depth;
351 s->vsub = pix_desc->log2_chroma_h;
353 s->sad = av_pixelutils_get_sad_fn(3, 3, 2, s); // 8x8 both sources aligned
355 return AVERROR(EINVAL);
357 s->srce_time_base = inlink->time_base;
359 ff_framerate_init(s);
364 static int activate(AVFilterContext *ctx)
367 AVFilterLink *inlink = ctx->inputs[0];
368 AVFilterLink *outlink = ctx->outputs[0];
369 FrameRateContext *s = ctx->priv;
373 FF_FILTER_FORWARD_STATUS_BACK(outlink, inlink);
376 ret = process_work_frame(ctx);
380 return ff_filter_frame(outlink, s->work);
382 ret = ff_inlink_consume_frame(inlink, &inpicref);
387 if (inpicref->interlaced_frame)
388 av_log(ctx, AV_LOG_WARNING, "Interlaced frame found - the output will not be correct.\n");
390 if (inpicref->pts == AV_NOPTS_VALUE) {
391 av_log(ctx, AV_LOG_WARNING, "Ignoring frame without PTS.\n");
392 av_frame_free(&inpicref);
397 pts = av_rescale_q(inpicref->pts, s->srce_time_base, s->dest_time_base);
399 if (s->f1 && pts == s->pts1) {
400 av_log(ctx, AV_LOG_WARNING, "Ignoring frame with same PTS.\n");
401 av_frame_free(&inpicref);
406 av_frame_free(&s->f0);
411 s->delta = s->pts1 - s->pts0;
415 av_log(ctx, AV_LOG_WARNING, "PTS discontinuity.\n");
416 s->start_pts = s->pts1;
418 av_frame_free(&s->f0);
421 if (s->start_pts == AV_NOPTS_VALUE)
422 s->start_pts = s->pts1;
427 if (ff_inlink_acknowledge_status(inlink, &status, &pts)) {
432 ff_outlink_set_status(outlink, status, pts);
436 FF_FILTER_FORWARD_WANTED(outlink, inlink);
438 return FFERROR_NOT_READY;
441 static int config_output(AVFilterLink *outlink)
443 AVFilterContext *ctx = outlink->src;
444 FrameRateContext *s = ctx->priv;
447 ff_dlog(ctx, "config_output()\n");
450 "config_output() input time base:%u/%u (%f)\n",
451 ctx->inputs[0]->time_base.num,ctx->inputs[0]->time_base.den,
452 av_q2d(ctx->inputs[0]->time_base));
454 // make sure timebase is small enough to hold the framerate
456 exact = av_reduce(&s->dest_time_base.num, &s->dest_time_base.den,
457 av_gcd((int64_t)s->srce_time_base.num * s->dest_frame_rate.num,
458 (int64_t)s->srce_time_base.den * s->dest_frame_rate.den ),
459 (int64_t)s->srce_time_base.den * s->dest_frame_rate.num, INT_MAX);
461 av_log(ctx, AV_LOG_INFO,
462 "time base:%u/%u -> %u/%u exact:%d\n",
463 s->srce_time_base.num, s->srce_time_base.den,
464 s->dest_time_base.num, s->dest_time_base.den, exact);
466 av_log(ctx, AV_LOG_WARNING, "Timebase conversion is not exact\n");
469 outlink->frame_rate = s->dest_frame_rate;
470 outlink->time_base = s->dest_time_base;
473 "config_output() output time base:%u/%u (%f) w:%d h:%d\n",
474 outlink->time_base.num, outlink->time_base.den,
475 av_q2d(outlink->time_base),
476 outlink->w, outlink->h);
479 av_log(ctx, AV_LOG_INFO, "fps -> fps:%u/%u scene score:%f interpolate start:%d end:%d\n",
480 s->dest_frame_rate.num, s->dest_frame_rate.den,
481 s->scene_score, s->interp_start, s->interp_end);
486 static const AVFilterPad framerate_inputs[] = {
489 .type = AVMEDIA_TYPE_VIDEO,
490 .config_props = config_input,
495 static const AVFilterPad framerate_outputs[] = {
498 .type = AVMEDIA_TYPE_VIDEO,
499 .config_props = config_output,
504 AVFilter ff_vf_framerate = {
506 .description = NULL_IF_CONFIG_SMALL("Upsamples or downsamples progressive source between specified frame rates."),
507 .priv_size = sizeof(FrameRateContext),
508 .priv_class = &framerate_class,
511 .query_formats = query_formats,
512 .inputs = framerate_inputs,
513 .outputs = framerate_outputs,
514 .flags = AVFILTER_FLAG_SLICE_THREADS,
515 .activate = activate,