X-Git-Url: https://git.sesse.net/?a=blobdiff_plain;f=libavfilter%2Fvf_yadif.c;h=f200cb14744da299507d7992a91b89b6ed161285;hb=cc8104b9086d2951a8c47601117275fadc70bc02;hp=46ae1ddcce9d82c12e7209f2a27d1f29bea49cd6;hpb=039e9fe01ca27606a0ec1a944d51041541e10aab;p=ffmpeg diff --git a/libavfilter/vf_yadif.c b/libavfilter/vf_yadif.c index 46ae1ddcce9..f200cb14744 100644 --- a/libavfilter/vf_yadif.c +++ b/libavfilter/vf_yadif.c @@ -112,6 +112,7 @@ static void filter(AVFilterContext *ctx, AVFilterBufferRef *dstpic, int w = dstpic->video->w; int h = dstpic->video->h; int refs = yadif->cur->linesize[i]; + int absrefs = FFABS(refs); int df = (yadif->csp->comp[i].depth_minus1 + 8) / 8; if (i == 1 || i == 2) { @@ -120,6 +121,12 @@ static void filter(AVFilterContext *ctx, AVFilterBufferRef *dstpic, h >>= yadif->csp->log2_chroma_h; } + if(yadif->temp_line_size < absrefs) { + av_free(yadif->temp_line); + yadif->temp_line = av_mallocz(2*64 + 5*absrefs); + yadif->temp_line_size = absrefs; + } + for (y = 0; y < h; y++) { if ((y ^ parity) & 1) { uint8_t *prev = &yadif->prev->data[i][y*refs]; @@ -127,41 +134,40 @@ static void filter(AVFilterContext *ctx, AVFilterBufferRef *dstpic, uint8_t *next = &yadif->next->data[i][y*refs]; uint8_t *dst = &dstpic->data[i][y*dstpic->linesize[i]]; int mode = y==1 || y+2==h ? 2 : yadif->mode; - yadif->filter_line(dst, prev, cur, next, w, y+1=h) { + int j; + uint8_t *tmp = yadif->temp_line + 64 + 2*absrefs; + if(mode<2) + memcpy(tmp+2*mrefs, cur+2*mrefs, w*df); + memcpy(tmp+mrefs, cur+mrefs, w*df); + memcpy(tmp , cur , w*df); + if(prefs != mrefs) { + memcpy(tmp+prefs, cur+prefs, w*df); + if(mode<2) + memcpy(tmp+2*prefs, cur+2*prefs, w*df); + } + cur = tmp; + } + + yadif->filter_line(dst, prev, cur, next, w, prefs, mrefs, parity ^ tff, mode); } else { memcpy(&dstpic->data[i][y*dstpic->linesize[i]], &yadif->cur->data[i][y*refs], w*df); } } } -#if HAVE_MMX - __asm__ volatile("emms \n\t" : : : "memory"); -#endif -} - -static AVFilterBufferRef *get_video_buffer(AVFilterLink *link, int perms, int w, int h) -{ - AVFilterBufferRef *picref; - int width = FFALIGN(w, 32); - int height= FFALIGN(h+2, 32); - int i; - - picref = ff_default_get_video_buffer(link, perms, width, height); - - picref->video->w = w; - picref->video->h = h; - for (i = 0; i < 3; i++) - picref->data[i] += picref->linesize[i]; - - return picref; + emms_c(); } -static void return_frame(AVFilterContext *ctx, int is_second) +static int return_frame(AVFilterContext *ctx, int is_second) { YADIFContext *yadif = ctx->priv; AVFilterLink *link= ctx->outputs[0]; - int tff; + int tff, ret; if (yadif->parity == -1) { tff = yadif->cur->video->interlaced ? @@ -173,6 +179,9 @@ static void return_frame(AVFilterContext *ctx, int is_second) if (is_second) { yadif->out = ff_get_video_buffer(link, AV_PERM_WRITE | AV_PERM_PRESERVE | AV_PERM_REUSE, link->w, link->h); + if (!yadif->out) + return AVERROR(ENOMEM); + avfilter_copy_buffer_ref_props(yadif->out, yadif->cur); yadif->out->video->interlaced = 0; } @@ -193,21 +202,30 @@ static void return_frame(AVFilterContext *ctx, int is_second) } else { yadif->out->pts = AV_NOPTS_VALUE; } - ff_start_frame(ctx->outputs[0], yadif->out); + ret = ff_start_frame(ctx->outputs[0], yadif->out); + if (ret < 0) + return ret; } - ff_draw_slice(ctx->outputs[0], 0, link->h, 1); - ff_end_frame(ctx->outputs[0]); + if ((ret = ff_draw_slice(ctx->outputs[0], 0, link->h, 1)) < 0 || + (ret = ff_end_frame(ctx->outputs[0])) < 0) + return ret; yadif->frame_pending = (yadif->mode&1) && !is_second; + return 0; } -static void start_frame(AVFilterLink *link, AVFilterBufferRef *picref) +static int start_frame(AVFilterLink *link, AVFilterBufferRef *picref) { AVFilterContext *ctx = link->dst; YADIFContext *yadif = ctx->priv; av_assert0(picref); + if (picref->video->h < 3 || picref->video->w < 3) { + av_log(ctx, AV_LOG_ERROR, "Video of less than 3 columns or lines is not supported\n"); + return AVERROR(EINVAL); + } + if (yadif->frame_pending) return_frame(ctx, 1); @@ -216,48 +234,55 @@ static void start_frame(AVFilterLink *link, AVFilterBufferRef *picref) yadif->prev = yadif->cur; yadif->cur = yadif->next; yadif->next = picref; + link->cur_buf = NULL; if (!yadif->cur) - return; + return 0; if (yadif->auto_enable && !yadif->cur->video->interlaced) { yadif->out = avfilter_ref_buffer(yadif->cur, AV_PERM_READ); - avfilter_unref_buffer(yadif->prev); - yadif->prev = NULL; + if (!yadif->out) + return AVERROR(ENOMEM); + + avfilter_unref_bufferp(&yadif->prev); if (yadif->out->pts != AV_NOPTS_VALUE) yadif->out->pts *= 2; - ff_start_frame(ctx->outputs[0], yadif->out); - return; + return ff_start_frame(ctx->outputs[0], yadif->out); } - if (!yadif->prev) - yadif->prev = avfilter_ref_buffer(yadif->cur, AV_PERM_READ); + if (!yadif->prev && + !(yadif->prev = avfilter_ref_buffer(yadif->cur, AV_PERM_READ))) + return AVERROR(ENOMEM); yadif->out = ff_get_video_buffer(ctx->outputs[0], AV_PERM_WRITE | AV_PERM_PRESERVE | AV_PERM_REUSE, link->w, link->h); + if (!yadif->out) + return AVERROR(ENOMEM); avfilter_copy_buffer_ref_props(yadif->out, yadif->cur); yadif->out->video->interlaced = 0; if (yadif->out->pts != AV_NOPTS_VALUE) yadif->out->pts *= 2; - ff_start_frame(ctx->outputs[0], yadif->out); + return ff_start_frame(ctx->outputs[0], yadif->out); } -static void end_frame(AVFilterLink *link) +static int end_frame(AVFilterLink *link) { AVFilterContext *ctx = link->dst; YADIFContext *yadif = ctx->priv; if (!yadif->out) - return; + return 0; if (yadif->auto_enable && !yadif->cur->video->interlaced) { - ff_draw_slice(ctx->outputs[0], 0, link->h, 1); - ff_end_frame(ctx->outputs[0]); - return; + int ret = ff_draw_slice(ctx->outputs[0], 0, link->h, 1); + if (ret >= 0) + ret = ff_end_frame(ctx->outputs[0]); + return ret; } return_frame(ctx, 0); + return 0; } static int request_frame(AVFilterLink *link) @@ -280,6 +305,9 @@ static int request_frame(AVFilterLink *link) if (ret == AVERROR_EOF && yadif->cur) { AVFilterBufferRef *next = avfilter_ref_buffer(yadif->next, AV_PERM_READ); + if (!next) + return AVERROR(ENOMEM); + next->pts = yadif->next->pts * 2 - yadif->cur->pts; start_frame(link->src->inputs[0], next); @@ -324,9 +352,10 @@ static av_cold void uninit(AVFilterContext *ctx) { YADIFContext *yadif = ctx->priv; - if (yadif->prev) avfilter_unref_buffer(yadif->prev); - if (yadif->cur ) avfilter_unref_buffer(yadif->cur ); - if (yadif->next) avfilter_unref_buffer(yadif->next); + if (yadif->prev) avfilter_unref_bufferp(&yadif->prev); + if (yadif->cur ) avfilter_unref_bufferp(&yadif->cur ); + if (yadif->next) avfilter_unref_bufferp(&yadif->next); + av_freep(&yadif->temp_line); yadif->temp_line_size = 0; } static int query_formats(AVFilterContext *ctx) @@ -382,15 +411,23 @@ static av_cold int init(AVFilterContext *ctx, const char *args) return 0; } -static void null_draw_slice(AVFilterLink *link, int y, int h, int slice_dir) { } +static int null_draw_slice(AVFilterLink *link, int y, int h, int slice_dir) +{ + return 0; +} static int config_props(AVFilterLink *link) { + YADIFContext *yadif = link->src->priv; + link->time_base.num = link->src->inputs[0]->time_base.num; link->time_base.den = link->src->inputs[0]->time_base.den * 2; link->w = link->src->inputs[0]->w; link->h = link->src->inputs[0]->h; + if(yadif->mode&1) + link->frame_rate = av_mul_q(link->src->inputs[0]->frame_rate, (AVRational){2,1}); + return 0; } @@ -403,19 +440,18 @@ AVFilter avfilter_vf_yadif = { .uninit = uninit, .query_formats = query_formats, - .inputs = (const AVFilterPad[]) {{ .name = "default", - .type = AVMEDIA_TYPE_VIDEO, - .start_frame = start_frame, - .get_video_buffer = get_video_buffer, - .draw_slice = null_draw_slice, - .end_frame = end_frame, - .rej_perms = AV_PERM_REUSE2, }, - { .name = NULL}}, - - .outputs = (const AVFilterPad[]) {{ .name = "default", - .type = AVMEDIA_TYPE_VIDEO, - .poll_frame = poll_frame, - .request_frame = request_frame, - .config_props = config_props, }, - { .name = NULL}}, + .inputs = (const AVFilterPad[]) {{ .name = "default", + .type = AVMEDIA_TYPE_VIDEO, + .start_frame = start_frame, + .draw_slice = null_draw_slice, + .end_frame = end_frame, + .rej_perms = AV_PERM_REUSE2, }, + { .name = NULL}}, + + .outputs = (const AVFilterPad[]) {{ .name = "default", + .type = AVMEDIA_TYPE_VIDEO, + .poll_frame = poll_frame, + .request_frame = request_frame, + .config_props = config_props, }, + { .name = NULL}}, };