#include "libavutil/opt.h"
#include "libavutil/timestamp.h"
#include "internal.h"
-#include "dualinput.h"
#include "drawutils.h"
+#include "framesync.h"
#include "video.h"
static const char *const var_names[] = {
VAR_VARS_NB
};
-enum EOFAction {
- EOF_ACTION_REPEAT,
- EOF_ACTION_ENDALL,
- EOF_ACTION_PASS
-};
-
-static const char * const eof_action_str[] = {
- "repeat", "endall", "pass"
-};
-
#define MAIN 0
#define OVERLAY 1
OVERLAY_FORMAT_YUV444,
OVERLAY_FORMAT_RGB,
OVERLAY_FORMAT_GBRP,
+ OVERLAY_FORMAT_AUTO,
OVERLAY_FORMAT_NB
};
const AVClass *class;
int x, y; ///< position of overlaid picture
- int allow_packed_rgb;
uint8_t main_is_packed_rgb;
uint8_t main_rgba_map[4];
uint8_t main_has_alpha;
int format; ///< OverlayFormat
int eval_mode; ///< EvalMode
- FFDualInputContext dinput;
+ FFFrameSync fs;
int main_pix_step[4]; ///< steps per pixel for each plane of the main output
int overlay_pix_step[4]; ///< steps per pixel for each plane of the overlay
double var_values[VAR_VARS_NB];
char *x_expr, *y_expr;
- int eof_action; ///< action to take on EOF from source
-
AVExpr *x_pexpr, *y_pexpr;
void (*blend_image)(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y);
{
OverlayContext *s = ctx->priv;
- ff_dualinput_uninit(&s->dinput);
+ ff_framesync_uninit(&s->fs);
av_expr_free(s->x_pexpr); s->x_pexpr = NULL;
av_expr_free(s->y_pexpr); s->y_pexpr = NULL;
}
return ret;
}
+static const enum AVPixelFormat alpha_pix_fmts[] = {
+ AV_PIX_FMT_YUVA420P, AV_PIX_FMT_YUVA422P, AV_PIX_FMT_YUVA444P,
+ AV_PIX_FMT_ARGB, AV_PIX_FMT_ABGR, AV_PIX_FMT_RGBA,
+ AV_PIX_FMT_BGRA, AV_PIX_FMT_GBRAP, AV_PIX_FMT_NONE
+};
+
static int query_formats(AVFilterContext *ctx)
{
OverlayContext *s = ctx->priv;
goto fail;
}
break;
+ case OVERLAY_FORMAT_AUTO:
+ if (!(main_formats = ff_make_format_list(alpha_pix_fmts))) {
+ ret = AVERROR(ENOMEM);
+ goto fail;
+ }
+ break;
default:
av_assert0(0);
}
- if ((ret = ff_formats_ref(main_formats , &ctx->inputs[MAIN]->out_formats )) < 0 ||
- (ret = ff_formats_ref(overlay_formats, &ctx->inputs[OVERLAY]->out_formats)) < 0 ||
- (ret = ff_formats_ref(main_formats , &ctx->outputs[MAIN]->in_formats )) < 0)
+ if (s->format == OVERLAY_FORMAT_AUTO) {
+ ret = ff_set_common_formats(ctx, main_formats);
+ if (ret < 0)
goto fail;
+ } else {
+ if ((ret = ff_formats_ref(main_formats , &ctx->inputs[MAIN]->out_formats )) < 0 ||
+ (ret = ff_formats_ref(overlay_formats, &ctx->inputs[OVERLAY]->out_formats)) < 0 ||
+ (ret = ff_formats_ref(main_formats , &ctx->outputs[MAIN]->in_formats )) < 0)
+ goto fail;
+ }
return 0;
fail:
return ret;
}
-static const enum AVPixelFormat alpha_pix_fmts[] = {
- AV_PIX_FMT_YUVA420P, AV_PIX_FMT_YUVA422P, AV_PIX_FMT_YUVA444P,
- AV_PIX_FMT_ARGB, AV_PIX_FMT_ABGR, AV_PIX_FMT_RGBA,
- AV_PIX_FMT_BGRA, AV_PIX_FMT_GBRAP, AV_PIX_FMT_NONE
-};
-
static int config_input_overlay(AVFilterLink *inlink)
{
AVFilterContext *ctx = inlink->dst;
}
av_log(ctx, AV_LOG_VERBOSE,
- "main w:%d h:%d fmt:%s overlay w:%d h:%d fmt:%s eof_action:%s\n",
+ "main w:%d h:%d fmt:%s overlay w:%d h:%d fmt:%s\n",
ctx->inputs[MAIN]->w, ctx->inputs[MAIN]->h,
av_get_pix_fmt_name(ctx->inputs[MAIN]->format),
ctx->inputs[OVERLAY]->w, ctx->inputs[OVERLAY]->h,
- av_get_pix_fmt_name(ctx->inputs[OVERLAY]->format),
- eof_action_str[s->eof_action]);
+ av_get_pix_fmt_name(ctx->inputs[OVERLAY]->format));
return 0;
}
OverlayContext *s = ctx->priv;
int ret;
- if ((ret = ff_dualinput_init(ctx, &s->dinput)) < 0)
+ if ((ret = ff_framesync_init_dualinput(&s->fs, ctx)) < 0)
return ret;
outlink->w = ctx->inputs[MAIN]->w;
outlink->h = ctx->inputs[MAIN]->h;
outlink->time_base = ctx->inputs[MAIN]->time_base;
- return 0;
+ return ff_framesync_configure(&s->fs);
}
// divide by 255 and round to nearest
static void blend_image_packed_rgb(AVFilterContext *ctx,
AVFrame *dst, const AVFrame *src,
- int x, int y)
+ int main_has_alpha, int x, int y)
{
OverlayContext *s = ctx->priv;
int i, imax, j, jmax;
const int sb = s->overlay_rgba_map[B];
const int sa = s->overlay_rgba_map[A];
const int sstep = s->overlay_pix_step[0];
- const int main_has_alpha = s->main_has_alpha;
uint8_t *S, *sp, *d, *dp;
i = FFMAX(-y, 0);
int dst_hp = AV_CEIL_RSHIFT(dst_h, vsub);
int yp = y>>vsub;
int xp = x>>hsub;
- uint8_t *s, *sp, *d, *dp, *a, *ap;
+ uint8_t *s, *sp, *d, *dp, *dap, *a, *da, *ap;
int jmax, j, k, kmax;
j = FFMAX(-yp, 0);
+ (yp+j) * dst->linesize[dst_plane]
+ dst_offset;
ap = src->data[3] + (j<<vsub) * src->linesize[3];
+ dap = dst->data[3] + ((yp+j) << vsub) * dst->linesize[3];
for (jmax = FFMIN(-yp + dst_hp, src_hp); j < jmax; j++) {
k = FFMAX(-xp, 0);
d = dp + (xp+k) * dst_step;
s = sp + k;
a = ap + (k<<hsub);
+ da = dap + ((xp+k) << hsub);
for (kmax = FFMIN(-xp + dst_wp, src_wp); k < kmax; k++) {
int alpha_v, alpha_h, alpha;
// average alpha for color components, improve quality
uint8_t alpha_d;
if (hsub && vsub && j+1 < src_hp && k+1 < src_wp) {
- alpha_d = (d[0] + d[src->linesize[3]] +
- d[1] + d[src->linesize[3]+1]) >> 2;
+ alpha_d = (da[0] + da[dst->linesize[3]] +
+ da[1] + da[dst->linesize[3]+1]) >> 2;
} else if (hsub || vsub) {
alpha_h = hsub && k+1 < src_wp ?
- (d[0] + d[1]) >> 1 : d[0];
+ (da[0] + da[1]) >> 1 : da[0];
alpha_v = vsub && j+1 < src_hp ?
- (d[0] + d[src->linesize[3]]) >> 1 : d[0];
+ (da[0] + da[dst->linesize[3]]) >> 1 : da[0];
alpha_d = (alpha_v + alpha_h) >> 1;
} else
- alpha_d = d[0];
+ alpha_d = da[0];
alpha = UNPREMULTIPLY_ALPHA(alpha, alpha_d);
}
*d = FAST_DIV255(*d * (255 - alpha) + *s * alpha);
s++;
d += dst_step;
+ da += 1 << hsub;
a += 1 << hsub;
}
dp += dst->linesize[dst_plane];
sp += src->linesize[i];
ap += (1 << vsub) * src->linesize[3];
+ dap += (1 << vsub) * dst->linesize[3];
}
}
const int dst_w = dst->width;
const int dst_h = dst->height;
- if (main_has_alpha)
- alpha_composite(src, dst, src_w, src_h, dst_w, dst_h, x, y);
-
blend_plane(ctx, dst, src, src_w, src_h, dst_w, dst_h, 0, 0, 0, x, y, main_has_alpha,
s->main_desc->comp[0].plane, s->main_desc->comp[0].offset, s->main_desc->comp[0].step);
blend_plane(ctx, dst, src, src_w, src_h, dst_w, dst_h, 1, hsub, vsub, x, y, main_has_alpha,
s->main_desc->comp[1].plane, s->main_desc->comp[1].offset, s->main_desc->comp[1].step);
blend_plane(ctx, dst, src, src_w, src_h, dst_w, dst_h, 2, hsub, vsub, x, y, main_has_alpha,
s->main_desc->comp[2].plane, s->main_desc->comp[2].offset, s->main_desc->comp[2].step);
+
+ if (main_has_alpha)
+ alpha_composite(src, dst, src_w, src_h, dst_w, dst_h, x, y);
}
-static av_always_inline void blend_image_rgb(AVFilterContext *ctx,
- AVFrame *dst, const AVFrame *src,
- int hsub, int vsub,
- int main_has_alpha,
- int x, int y)
+static av_always_inline void blend_image_planar_rgb(AVFilterContext *ctx,
+ AVFrame *dst, const AVFrame *src,
+ int hsub, int vsub,
+ int main_has_alpha,
+ int x, int y)
{
OverlayContext *s = ctx->priv;
const int src_w = src->width;
const int dst_w = dst->width;
const int dst_h = dst->height;
- if (main_has_alpha)
- alpha_composite(src, dst, src_w, src_h, dst_w, dst_h, x, y);
-
blend_plane(ctx, dst, src, src_w, src_h, dst_w, dst_h, 0, 0, 0, x, y, main_has_alpha,
s->main_desc->comp[1].plane, s->main_desc->comp[1].offset, s->main_desc->comp[1].step);
blend_plane(ctx, dst, src, src_w, src_h, dst_w, dst_h, 1, hsub, vsub, x, y, main_has_alpha,
s->main_desc->comp[2].plane, s->main_desc->comp[2].offset, s->main_desc->comp[2].step);
blend_plane(ctx, dst, src, src_w, src_h, dst_w, dst_h, 2, hsub, vsub, x, y, main_has_alpha,
s->main_desc->comp[0].plane, s->main_desc->comp[0].offset, s->main_desc->comp[0].step);
+
+ if (main_has_alpha)
+ alpha_composite(src, dst, src_w, src_h, dst_w, dst_h, x, y);
}
static void blend_image_yuv420(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
{
- OverlayContext *s = ctx->priv;
+ blend_image_yuv(ctx, dst, src, 1, 1, 0, x, y);
+}
- blend_image_yuv(ctx, dst, src, 1, 1, s->main_has_alpha, x, y);
+static void blend_image_yuva420(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
+{
+ blend_image_yuv(ctx, dst, src, 1, 1, 1, x, y);
}
static void blend_image_yuv422(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
{
- OverlayContext *s = ctx->priv;
+ blend_image_yuv(ctx, dst, src, 1, 0, 0, x, y);
+}
- blend_image_yuv(ctx, dst, src, 1, 0, s->main_has_alpha, x, y);
+static void blend_image_yuva422(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
+{
+ blend_image_yuv(ctx, dst, src, 1, 0, 1, x, y);
}
static void blend_image_yuv444(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
{
- OverlayContext *s = ctx->priv;
+ blend_image_yuv(ctx, dst, src, 0, 0, 0, x, y);
+}
- blend_image_yuv(ctx, dst, src, 0, 0, s->main_has_alpha, x, y);
+static void blend_image_yuva444(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
+{
+ blend_image_yuv(ctx, dst, src, 0, 0, 1, x, y);
}
static void blend_image_gbrp(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
{
- OverlayContext *s = ctx->priv;
+ blend_image_planar_rgb(ctx, dst, src, 0, 0, 0, x, y);
+}
+
+static void blend_image_gbrap(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
+{
+ blend_image_planar_rgb(ctx, dst, src, 0, 0, 1, x, y);
+}
- blend_image_rgb(ctx, dst, src, 0, 0, s->main_has_alpha, x, y);
+static void blend_image_rgb(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
+{
+ blend_image_packed_rgb(ctx, dst, src, 0, x, y);
+}
+
+static void blend_image_rgba(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
+{
+ blend_image_packed_rgb(ctx, dst, src, 1, x, y);
}
static int config_input_main(AVFilterLink *inlink)
s->main_has_alpha = ff_fmt_is_in(inlink->format, alpha_pix_fmts);
switch (s->format) {
case OVERLAY_FORMAT_YUV420:
- s->blend_image = blend_image_yuv420;
+ s->blend_image = s->main_has_alpha ? blend_image_yuva420 : blend_image_yuv420;
break;
case OVERLAY_FORMAT_YUV422:
- s->blend_image = blend_image_yuv422;
+ s->blend_image = s->main_has_alpha ? blend_image_yuva422 : blend_image_yuv422;
break;
case OVERLAY_FORMAT_YUV444:
- s->blend_image = blend_image_yuv444;
+ s->blend_image = s->main_has_alpha ? blend_image_yuva444 : blend_image_yuv444;
break;
case OVERLAY_FORMAT_RGB:
- s->blend_image = blend_image_packed_rgb;
+ s->blend_image = s->main_has_alpha ? blend_image_rgba : blend_image_rgb;
break;
case OVERLAY_FORMAT_GBRP:
- s->blend_image = blend_image_gbrp;
+ s->blend_image = s->main_has_alpha ? blend_image_gbrap : blend_image_gbrp;
+ break;
+ case OVERLAY_FORMAT_AUTO:
+ switch (inlink->format) {
+ case AV_PIX_FMT_YUVA420P:
+ s->blend_image = blend_image_yuva420;
+ break;
+ case AV_PIX_FMT_YUVA422P:
+ s->blend_image = blend_image_yuva422;
+ break;
+ case AV_PIX_FMT_YUVA444P:
+ s->blend_image = blend_image_yuva444;
+ break;
+ case AV_PIX_FMT_ARGB:
+ case AV_PIX_FMT_RGBA:
+ case AV_PIX_FMT_BGRA:
+ case AV_PIX_FMT_ABGR:
+ s->blend_image = blend_image_rgba;
+ break;
+ case AV_PIX_FMT_GBRAP:
+ s->blend_image = blend_image_gbrap;
+ break;
+ default:
+ av_assert0(0);
+ break;
+ }
break;
}
return 0;
}
-static AVFrame *do_blend(AVFilterContext *ctx, AVFrame *mainpic,
- const AVFrame *second)
+static int do_blend(FFFrameSync *fs)
{
+ AVFilterContext *ctx = fs->parent;
+ AVFrame *mainpic, *second;
OverlayContext *s = ctx->priv;
AVFilterLink *inlink = ctx->inputs[0];
+ int ret;
+
+ ret = ff_framesync_dualinput_get_writable(fs, &mainpic, &second);
+ if (ret < 0)
+ return ret;
+ if (!second)
+ return ff_filter_frame(ctx->outputs[0], mainpic);
if (s->eval_mode == EVAL_MODE_FRAME) {
int64_t pos = mainpic->pkt_pos;
if (s->x < mainpic->width && s->x + second->width >= 0 ||
s->y < mainpic->height && s->y + second->height >= 0)
s->blend_image(ctx, mainpic, second, s->x, s->y);
- return mainpic;
+ return ff_filter_frame(ctx->outputs[0], mainpic);
}
-static int filter_frame(AVFilterLink *inlink, AVFrame *inpicref)
+static av_cold int init(AVFilterContext *ctx)
{
- OverlayContext *s = inlink->dst->priv;
- av_log(inlink->dst, AV_LOG_DEBUG, "Incoming frame (time:%s) from link #%d\n", av_ts2timestr(inpicref->pts, &inlink->time_base), FF_INLINK_IDX(inlink));
- return ff_dualinput_filter_frame(&s->dinput, inlink, inpicref);
-}
+ OverlayContext *s = ctx->priv;
-static int request_frame(AVFilterLink *outlink)
-{
- OverlayContext *s = outlink->src->priv;
- return ff_dualinput_request_frame(&s->dinput, outlink);
+ s->fs.on_event = do_blend;
+ return 0;
}
-static av_cold int init(AVFilterContext *ctx)
+static int activate(AVFilterContext *ctx)
{
OverlayContext *s = ctx->priv;
-
- if (s->allow_packed_rgb) {
- av_log(ctx, AV_LOG_WARNING,
- "The rgb option is deprecated and is overriding the format option, use format instead\n");
- s->format = OVERLAY_FORMAT_RGB;
- }
- if (!s->dinput.repeatlast || s->eof_action == EOF_ACTION_PASS) {
- s->dinput.repeatlast = 0;
- s->eof_action = EOF_ACTION_PASS;
- }
- if (s->dinput.shortest || s->eof_action == EOF_ACTION_ENDALL) {
- s->dinput.shortest = 1;
- s->eof_action = EOF_ACTION_ENDALL;
- }
-
- s->dinput.process = do_blend;
- return 0;
+ return ff_framesync_activate(&s->fs);
}
#define OFFSET(x) offsetof(OverlayContext, x)
{ "x", "set the x expression", OFFSET(x_expr), AV_OPT_TYPE_STRING, {.str = "0"}, CHAR_MIN, CHAR_MAX, FLAGS },
{ "y", "set the y expression", OFFSET(y_expr), AV_OPT_TYPE_STRING, {.str = "0"}, CHAR_MIN, CHAR_MAX, FLAGS },
{ "eof_action", "Action to take when encountering EOF from secondary input ",
- OFFSET(eof_action), AV_OPT_TYPE_INT, { .i64 = EOF_ACTION_REPEAT },
+ OFFSET(fs.opt_eof_action), AV_OPT_TYPE_INT, { .i64 = EOF_ACTION_REPEAT },
EOF_ACTION_REPEAT, EOF_ACTION_PASS, .flags = FLAGS, "eof_action" },
{ "repeat", "Repeat the previous frame.", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_REPEAT }, .flags = FLAGS, "eof_action" },
{ "endall", "End both streams.", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_ENDALL }, .flags = FLAGS, "eof_action" },
{ "eval", "specify when to evaluate expressions", OFFSET(eval_mode), AV_OPT_TYPE_INT, {.i64 = EVAL_MODE_FRAME}, 0, EVAL_MODE_NB-1, FLAGS, "eval" },
{ "init", "eval expressions once during initialization", 0, AV_OPT_TYPE_CONST, {.i64=EVAL_MODE_INIT}, .flags = FLAGS, .unit = "eval" },
{ "frame", "eval expressions per-frame", 0, AV_OPT_TYPE_CONST, {.i64=EVAL_MODE_FRAME}, .flags = FLAGS, .unit = "eval" },
- { "rgb", "force packed RGB in input and output (deprecated)", OFFSET(allow_packed_rgb), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, FLAGS },
- { "shortest", "force termination when the shortest input terminates", OFFSET(dinput.shortest), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, FLAGS },
+ { "shortest", "force termination when the shortest input terminates", OFFSET(fs.opt_shortest), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, FLAGS },
{ "format", "set output format", OFFSET(format), AV_OPT_TYPE_INT, {.i64=OVERLAY_FORMAT_YUV420}, 0, OVERLAY_FORMAT_NB-1, FLAGS, "format" },
{ "yuv420", "", 0, AV_OPT_TYPE_CONST, {.i64=OVERLAY_FORMAT_YUV420}, .flags = FLAGS, .unit = "format" },
{ "yuv422", "", 0, AV_OPT_TYPE_CONST, {.i64=OVERLAY_FORMAT_YUV422}, .flags = FLAGS, .unit = "format" },
{ "yuv444", "", 0, AV_OPT_TYPE_CONST, {.i64=OVERLAY_FORMAT_YUV444}, .flags = FLAGS, .unit = "format" },
{ "rgb", "", 0, AV_OPT_TYPE_CONST, {.i64=OVERLAY_FORMAT_RGB}, .flags = FLAGS, .unit = "format" },
{ "gbrp", "", 0, AV_OPT_TYPE_CONST, {.i64=OVERLAY_FORMAT_GBRP}, .flags = FLAGS, .unit = "format" },
- { "repeatlast", "repeat overlay of the last overlay frame", OFFSET(dinput.repeatlast), AV_OPT_TYPE_BOOL, {.i64=1}, 0, 1, FLAGS },
+ { "auto", "", 0, AV_OPT_TYPE_CONST, {.i64=OVERLAY_FORMAT_AUTO}, .flags = FLAGS, .unit = "format" },
+ { "repeatlast", "repeat overlay of the last overlay frame", OFFSET(fs.opt_repeatlast), AV_OPT_TYPE_BOOL, {.i64=1}, 0, 1, FLAGS },
{ NULL }
};
-AVFILTER_DEFINE_CLASS(overlay);
+FRAMESYNC_DEFINE_CLASS(overlay, OverlayContext, fs);
static const AVFilterPad avfilter_vf_overlay_inputs[] = {
{
.name = "main",
.type = AVMEDIA_TYPE_VIDEO,
.config_props = config_input_main,
- .filter_frame = filter_frame,
- .needs_writable = 1,
},
{
.name = "overlay",
.type = AVMEDIA_TYPE_VIDEO,
.config_props = config_input_overlay,
- .filter_frame = filter_frame,
},
{ NULL }
};
.name = "default",
.type = AVMEDIA_TYPE_VIDEO,
.config_props = config_output,
- .request_frame = request_frame,
},
{ NULL }
};
AVFilter ff_vf_overlay = {
.name = "overlay",
.description = NULL_IF_CONFIG_SMALL("Overlay a video source on top of the input."),
+ .preinit = overlay_framesync_preinit,
.init = init,
.uninit = uninit,
.priv_size = sizeof(OverlayContext),
.priv_class = &overlay_class,
.query_formats = query_formats,
+ .activate = activate,
.process_command = process_command,
.inputs = avfilter_vf_overlay_inputs,
.outputs = avfilter_vf_overlay_outputs,