int mode;
/**
- * 0: bottom field first
- * 1: top field first
+ * 0: top field first
+ * 1: bottom field first
* -1: auto-detection
*/
int parity;
int frame_pending;
+ /**
+ * 0: deinterlace all frames
+ * 1: only deinterlace frames marked as interlaced
+ */
+ int auto_enable;
+
AVFilterBufferRef *cur;
AVFilterBufferRef *next;
AVFilterBufferRef *prev;
int w = dstpic->video->w;
int h = dstpic->video->h;
int refs = yadif->cur->linesize[i];
- int df = (yadif->csp->comp[i].depth_minus1+1) / 8;
+ int df = (yadif->csp->comp[i].depth_minus1 + 8) / 8;
- if (i) {
+ if (i == 1 || i == 2) {
/* Why is this not part of the per-plane description thing? */
w >>= yadif->csp->log2_chroma_w;
h >>= yadif->csp->log2_chroma_h;
tff = yadif->parity^1;
}
- if (is_second)
+ if (is_second) {
yadif->out = avfilter_get_video_buffer(link, AV_PERM_WRITE | AV_PERM_PRESERVE |
AV_PERM_REUSE, link->w, link->h);
+ avfilter_copy_buffer_ref_props(yadif->out, yadif->cur);
+ yadif->out->video->interlaced = 0;
+ }
if (!yadif->csp)
yadif->csp = &av_pix_fmt_descriptors[link->format];
- if (yadif->csp->comp[0].depth_minus1 == 15)
+ if (yadif->csp->comp[0].depth_minus1 / 8 == 1)
yadif->filter_line = filter_line_c_16bit;
filter(ctx, yadif->out, tff ^ !is_second, tff);
if (!yadif->cur)
return;
+ if (yadif->auto_enable && !yadif->cur->video->interlaced) {
+ yadif->out = avfilter_ref_buffer(yadif->cur, AV_PERM_READ);
+ avfilter_unref_buffer(yadif->prev);
+ yadif->prev = NULL;
+ avfilter_start_frame(ctx->outputs[0], yadif->out);
+ return;
+ }
+
if (!yadif->prev)
yadif->prev = avfilter_ref_buffer(yadif->cur, AV_PERM_READ);
if (!yadif->out)
return;
+ if (yadif->auto_enable && !yadif->cur->video->interlaced) {
+ avfilter_draw_slice(ctx->outputs[0], 0, link->h, 1);
+ avfilter_end_frame(ctx->outputs[0]);
+ return;
+ }
+
return_frame(ctx, 0);
}
}
assert(yadif->next || !val);
+ if (yadif->auto_enable && yadif->next && !yadif->next->video->interlaced)
+ return val;
+
return val * ((yadif->mode&1)+1);
}
AV_NE( PIX_FMT_GRAY16BE, PIX_FMT_GRAY16LE ),
PIX_FMT_YUV440P,
PIX_FMT_YUVJ440P,
+ AV_NE( PIX_FMT_YUV420P10BE, PIX_FMT_YUV420P10LE ),
+ AV_NE( PIX_FMT_YUV422P10BE, PIX_FMT_YUV422P10LE ),
+ AV_NE( PIX_FMT_YUV444P10BE, PIX_FMT_YUV444P10LE ),
AV_NE( PIX_FMT_YUV420P16BE, PIX_FMT_YUV420P16LE ),
AV_NE( PIX_FMT_YUV422P16BE, PIX_FMT_YUV422P16LE ),
AV_NE( PIX_FMT_YUV444P16BE, PIX_FMT_YUV444P16LE ),
+ PIX_FMT_YUVA420P,
PIX_FMT_NONE
};
static av_cold int init(AVFilterContext *ctx, const char *args, void *opaque)
{
YADIFContext *yadif = ctx->priv;
- av_unused int cpu_flags = av_get_cpu_flags();
+ int cpu_flags = av_get_cpu_flags();
yadif->mode = 0;
yadif->parity = -1;
+ yadif->auto_enable = 0;
yadif->csp = NULL;
- if (args) sscanf(args, "%d:%d", &yadif->mode, &yadif->parity);
+ if (args) sscanf(args, "%d:%d:%d", &yadif->mode, &yadif->parity, &yadif->auto_enable);
yadif->filter_line = filter_line_c;
if (HAVE_SSSE3 && cpu_flags & AV_CPU_FLAG_SSSE3)
else if (HAVE_MMX && cpu_flags & AV_CPU_FLAG_MMX)
yadif->filter_line = ff_yadif_filter_line_mmx;
- av_log(ctx, AV_LOG_INFO, "mode:%d parity:%d\n", yadif->mode, yadif->parity);
+ av_log(ctx, AV_LOG_INFO, "mode:%d parity:%d auto_enable:%d\n", yadif->mode, yadif->parity, yadif->auto_enable);
return 0;
}