#include "internal.h"
#include "video.h"
#include "framesync.h"
+#include "maskedclamp.h"
#define OFFSET(x) offsetof(MaskedClampContext, x)
-#define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
+#define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_RUNTIME_PARAM
typedef struct ThreadData {
AVFrame *b, *o, *m, *d;
int depth;
FFFrameSync fs;
- int (*maskedclamp)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs);
+ MaskedClampDSPContext dsp;
} MaskedClampContext;
static const AVOption maskedclamp_options[] = {
AV_PIX_FMT_YUV420P16, AV_PIX_FMT_YUV422P16, AV_PIX_FMT_YUV444P16,
AV_PIX_FMT_YUVA420P9, AV_PIX_FMT_YUVA422P9, AV_PIX_FMT_YUVA444P9,
AV_PIX_FMT_YUVA420P10, AV_PIX_FMT_YUVA422P10, AV_PIX_FMT_YUVA444P10,
+ AV_PIX_FMT_YUVA422P12, AV_PIX_FMT_YUVA444P12,
AV_PIX_FMT_YUVA420P16, AV_PIX_FMT_YUVA422P16, AV_PIX_FMT_YUVA444P16,
AV_PIX_FMT_GBRP, AV_PIX_FMT_GBRP9, AV_PIX_FMT_GBRP10,
AV_PIX_FMT_GBRP12, AV_PIX_FMT_GBRP14, AV_PIX_FMT_GBRP16,
AV_PIX_FMT_GBRAP, AV_PIX_FMT_GBRAP10, AV_PIX_FMT_GBRAP12, AV_PIX_FMT_GBRAP16,
- AV_PIX_FMT_GRAY8, AV_PIX_FMT_GRAY9, AV_PIX_FMT_GRAY10, AV_PIX_FMT_GRAY12, AV_PIX_FMT_GRAY16,
+ AV_PIX_FMT_GRAY8, AV_PIX_FMT_GRAY9, AV_PIX_FMT_GRAY10, AV_PIX_FMT_GRAY12, AV_PIX_FMT_GRAY14, AV_PIX_FMT_GRAY16,
AV_PIX_FMT_NONE
};
return ff_set_common_formats(ctx, ff_make_format_list(pix_fmts));
}
-static int process_frame(FFFrameSync *fs)
-{
- AVFilterContext *ctx = fs->parent;
- MaskedClampContext *s = fs->opaque;
- AVFilterLink *outlink = ctx->outputs[0];
- AVFrame *out, *base, *dark, *bright;
- int ret;
-
- if ((ret = ff_framesync_get_frame(&s->fs, 0, &base, 0)) < 0 ||
- (ret = ff_framesync_get_frame(&s->fs, 1, &dark, 0)) < 0 ||
- (ret = ff_framesync_get_frame(&s->fs, 2, &bright, 0)) < 0)
- return ret;
-
- if (ctx->is_disabled) {
- out = av_frame_clone(base);
- if (!out)
- return AVERROR(ENOMEM);
- } else {
- ThreadData td;
-
- out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
- if (!out)
- return AVERROR(ENOMEM);
- av_frame_copy_props(out, base);
-
- td.b = base;
- td.o = dark;
- td.m = bright;
- td.d = out;
-
- ctx->internal->execute(ctx, s->maskedclamp, &td, NULL, FFMIN(s->height[0],
- ff_filter_get_nb_threads(ctx)));
- }
- out->pts = av_rescale_q(s->fs.pts, s->fs.time_base, outlink->time_base);
-
- return ff_filter_frame(outlink, out);
-}
-
-static int maskedclamp8(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
+static int maskedclamp_slice(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
{
MaskedClampContext *s = ctx->priv;
ThreadData *td = arg;
uint8_t *dst = td->d->data[p] + slice_start * dlinesize;
const int undershoot = s->undershoot;
const int overshoot = s->overshoot;
- int x, y;
+ int y;
if (!((1 << p) & s->planes)) {
av_image_copy_plane(dst, dlinesize, bsrc, blinesize,
}
for (y = slice_start; y < slice_end; y++) {
- for (x = 0; x < w; x++) {
- if (bsrc[x] < darksrc[x] - undershoot)
- dst[x] = darksrc[x] - undershoot;
- else if (bsrc[x] > brightsrc[x] + overshoot)
- dst[x] = brightsrc[x] + overshoot;
- else
- dst[x] = bsrc[x];
- }
+ s->dsp.maskedclamp(bsrc, dst, darksrc, brightsrc, w, undershoot, overshoot);
dst += dlinesize;
bsrc += blinesize;
return 0;
}
-static int maskedclamp16(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
+static int process_frame(FFFrameSync *fs)
{
- MaskedClampContext *s = ctx->priv;
- ThreadData *td = arg;
- int p;
+ AVFilterContext *ctx = fs->parent;
+ MaskedClampContext *s = fs->opaque;
+ AVFilterLink *outlink = ctx->outputs[0];
+ AVFrame *out, *base, *dark, *bright;
+ int ret;
- for (p = 0; p < s->nb_planes; p++) {
- const ptrdiff_t blinesize = td->b->linesize[p] / 2;
- const ptrdiff_t brightlinesize = td->m->linesize[p] / 2;
- const ptrdiff_t darklinesize = td->o->linesize[p] / 2;
- const ptrdiff_t dlinesize = td->d->linesize[p] / 2;
- const int w = s->width[p];
- const int h = s->height[p];
- const int slice_start = (h * jobnr) / nb_jobs;
- const int slice_end = (h * (jobnr+1)) / nb_jobs;
- const uint16_t *bsrc = (const uint16_t *)td->b->data[p] + slice_start * blinesize;
- const uint16_t *darksrc = (const uint16_t *)td->o->data[p] + slice_start * darklinesize;
- const uint16_t *brightsrc = (const uint16_t *)td->m->data[p] + slice_start * brightlinesize;
- uint16_t *dst = (uint16_t *)td->d->data[p] + slice_start * dlinesize;
- const int undershoot = s->undershoot;
- const int overshoot = s->overshoot;
- int x, y;
+ if ((ret = ff_framesync_get_frame(&s->fs, 0, &base, 0)) < 0 ||
+ (ret = ff_framesync_get_frame(&s->fs, 1, &dark, 0)) < 0 ||
+ (ret = ff_framesync_get_frame(&s->fs, 2, &bright, 0)) < 0)
+ return ret;
- if (!((1 << p) & s->planes)) {
- av_image_copy_plane((uint8_t *)dst, dlinesize, (const uint8_t *)bsrc, blinesize,
- s->linesize[p], slice_end - slice_start);
- continue;
- }
+ if (ctx->is_disabled) {
+ out = av_frame_clone(base);
+ if (!out)
+ return AVERROR(ENOMEM);
+ } else {
+ ThreadData td;
- for (y = slice_start; y < slice_end; y++) {
- for (x = 0; x < w; x++) {
- if (bsrc[x] < darksrc[x] - undershoot)
- dst[x] = darksrc[x] - undershoot;
- else if (bsrc[x] > brightsrc[x] + overshoot)
- dst[x] = brightsrc[x] + overshoot;
- else
- dst[x] = bsrc[x];
- }
+ out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
+ if (!out)
+ return AVERROR(ENOMEM);
+ av_frame_copy_props(out, base);
- dst += dlinesize;
- bsrc += blinesize;
- darksrc += darklinesize;
- brightsrc += brightlinesize;
- }
+ td.b = base;
+ td.o = dark;
+ td.m = bright;
+ td.d = out;
+
+ ctx->internal->execute(ctx, maskedclamp_slice, &td, NULL, FFMIN(s->height[0],
+ ff_filter_get_nb_threads(ctx)));
}
+ out->pts = av_rescale_q(s->fs.pts, s->fs.time_base, outlink->time_base);
- return 0;
+ return ff_filter_frame(outlink, out);
}
+#define MASKEDCLAMP(type, name) \
+static void maskedclamp##name(const uint8_t *bbsrc, uint8_t *ddst, \
+ const uint8_t *ddarksrc, const uint8_t *bbrightsrc, \
+ int w, int undershoot, int overshoot) \
+{ \
+ const type *bsrc = (const type *)bbsrc; \
+ const type *darksrc = (const type *)ddarksrc; \
+ const type *brightsrc = (const type *)bbrightsrc; \
+ type *dst = (type *)ddst; \
+ \
+ for (int x = 0; x < w; x++) { \
+ dst[x] = FFMAX(bsrc[x], darksrc[x] - undershoot); \
+ dst[x] = FFMIN(dst[x], brightsrc[x] + overshoot); \
+ } \
+}
+
+MASKEDCLAMP(uint8_t, 8)
+MASKEDCLAMP(uint16_t, 16)
+
static int config_input(AVFilterLink *inlink)
{
AVFilterContext *ctx = inlink->dst;
s->width[0] = s->width[3] = inlink->w;
s->depth = desc->comp[0].depth;
+ s->undershoot = FFMIN(s->undershoot, (1 << s->depth) - 1);
+ s->overshoot = FFMIN(s->overshoot, (1 << s->depth) - 1);
- if (desc->comp[0].depth == 8)
- s->maskedclamp = maskedclamp8;
+ if (s->depth <= 8)
+ s->dsp.maskedclamp = maskedclamp8;
else
- s->maskedclamp = maskedclamp16;
+ s->dsp.maskedclamp = maskedclamp16;
+
+ if (ARCH_X86)
+ ff_maskedclamp_init_x86(&s->dsp, s->depth);
return 0;
}
outlink->w = base->w;
outlink->h = base->h;
- outlink->time_base = base->time_base;
outlink->sample_aspect_ratio = base->sample_aspect_ratio;
outlink->frame_rate = base->frame_rate;
s->fs.opaque = s;
s->fs.on_event = process_frame;
- return ff_framesync_configure(&s->fs);
+ ret = ff_framesync_configure(&s->fs);
+ outlink->time_base = s->fs.time_base;
+
+ return ret;
}
static int activate(AVFilterContext *ctx)
{ NULL }
};
-AVFilter ff_vf_maskedclamp = {
+const AVFilter ff_vf_maskedclamp = {
.name = "maskedclamp",
.description = NULL_IF_CONFIG_SMALL("Clamp first stream with second stream and third stream."),
.priv_size = sizeof(MaskedClampContext),
.outputs = maskedclamp_outputs,
.priv_class = &maskedclamp_class,
.flags = AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL | AVFILTER_FLAG_SLICE_THREADS,
+ .process_command = ff_filter_process_command,
};