X-Git-Url: https://git.sesse.net/?a=blobdiff_plain;ds=sidebyside;f=libavfilter%2Fvf_edgedetect.c;h=a5614ea63bba86ad0f478182b9fbaf0ca675680d;hb=c36e72ed279b072bce325c7a11a85020c0a310fc;hp=476c434349dda193d27fd83fbd9ac95569f6bcd9;hpb=0bcc66571aab7e49f77ad49004c183a4e15e337a;p=ffmpeg diff --git a/libavfilter/vf_edgedetect.c b/libavfilter/vf_edgedetect.c index 476c434349d..a5614ea63bb 100644 --- a/libavfilter/vf_edgedetect.c +++ b/libavfilter/vf_edgedetect.c @@ -52,6 +52,7 @@ struct plane_info { uint8_t *tmpbuf; uint16_t *gradients; char *directions; + int width, height; }; typedef struct EdgeDetectContext { @@ -98,7 +99,7 @@ static int query_formats(AVFilterContext *ctx) { const EdgeDetectContext *edgedetect = ctx->priv; static const enum AVPixelFormat wires_pix_fmts[] = {AV_PIX_FMT_GRAY8, AV_PIX_FMT_NONE}; - static const enum AVPixelFormat canny_pix_fmts[] = {AV_PIX_FMT_YUV444P, AV_PIX_FMT_GBRP, AV_PIX_FMT_GRAY8, AV_PIX_FMT_NONE}; + static const enum AVPixelFormat canny_pix_fmts[] = {AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUV444P, AV_PIX_FMT_GBRP, AV_PIX_FMT_GRAY8, AV_PIX_FMT_NONE}; static const enum AVPixelFormat colormix_pix_fmts[] = {AV_PIX_FMT_GBRP, AV_PIX_FMT_GRAY8, AV_PIX_FMT_NONE}; AVFilterFormats *fmts_list; const enum AVPixelFormat *pix_fmts = NULL; @@ -123,14 +124,19 @@ static int config_props(AVFilterLink *inlink) int p; AVFilterContext *ctx = inlink->dst; EdgeDetectContext *edgedetect = ctx->priv; + const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(inlink->format); edgedetect->nb_planes = inlink->format == AV_PIX_FMT_GRAY8 ? 1 : 3; for (p = 0; p < edgedetect->nb_planes; p++) { struct plane_info *plane = &edgedetect->planes[p]; - - plane->tmpbuf = av_malloc(inlink->w * inlink->h); - plane->gradients = av_calloc(inlink->w * inlink->h, sizeof(*plane->gradients)); - plane->directions = av_malloc(inlink->w * inlink->h); + int vsub = p ? desc->log2_chroma_h : 0; + int hsub = p ? desc->log2_chroma_w : 0; + + plane->width = AV_CEIL_RSHIFT(inlink->w, hsub); + plane->height = AV_CEIL_RSHIFT(inlink->h, vsub); + plane->tmpbuf = av_malloc(plane->width * plane->height); + plane->gradients = av_calloc(plane->width * plane->height, sizeof(*plane->gradients)); + plane->directions = av_malloc(plane->width * plane->height); if (!plane->tmpbuf || !plane->gradients || !plane->directions) return AVERROR(ENOMEM); } @@ -144,10 +150,13 @@ static void gaussian_blur(AVFilterContext *ctx, int w, int h, int i, j; memcpy(dst, src, w); dst += dst_linesize; src += src_linesize; - memcpy(dst, src, w); dst += dst_linesize; src += src_linesize; + if (h > 1) { + memcpy(dst, src, w); dst += dst_linesize; src += src_linesize; + } for (j = 2; j < h - 2; j++) { dst[0] = src[0]; - dst[1] = src[1]; + if (w > 1) + dst[1] = src[1]; for (i = 2; i < w - 2; i++) { /* Gaussian mask of size 5x5 with sigma = 1.4 */ dst[i] = ((src[-2*src_linesize + i-2] + src[2*src_linesize + i-2]) * 2 @@ -168,14 +177,19 @@ static void gaussian_blur(AVFilterContext *ctx, int w, int h, + src[i+1] * 12 + src[i+2] * 5) / 159; } - dst[i ] = src[i ]; - dst[i + 1] = src[i + 1]; + if (w > 2) + dst[i ] = src[i ]; + if (w > 3) + dst[i + 1] = src[i + 1]; dst += dst_linesize; src += src_linesize; } - memcpy(dst, src, w); dst += dst_linesize; src += src_linesize; - memcpy(dst, src, w); + if (h > 2) { + memcpy(dst, src, w); dst += dst_linesize; src += src_linesize; + } + if (h > 3) + memcpy(dst, src, w); } enum { @@ -202,7 +216,7 @@ static int get_rounded_direction(int gx, int gy) if (gx < 0) gx = -gx, gy = -gy; - gy <<= 16; + gy *= (1 << 16); tanpi8gx = 27146 * gx; tan3pi8gx = 158218 * gx; if (gy > -tan3pi8gx && gy < -tanpi8gx) return DIRECTION_45UP; @@ -338,42 +352,44 @@ static int filter_frame(AVFilterLink *inlink, AVFrame *in) uint8_t *tmpbuf = plane->tmpbuf; uint16_t *gradients = plane->gradients; int8_t *directions = plane->directions; + const int width = plane->width; + const int height = plane->height; if (!((1 << p) & edgedetect->filter_planes)) { if (!direct) av_image_copy_plane(out->data[p], out->linesize[p], in->data[p], in->linesize[p], - inlink->w, inlink->h); + width, height); continue; } /* gaussian filter to reduce noise */ - gaussian_blur(ctx, inlink->w, inlink->h, - tmpbuf, inlink->w, + gaussian_blur(ctx, width, height, + tmpbuf, width, in->data[p], in->linesize[p]); /* compute the 16-bits gradients and directions for the next step */ - sobel(inlink->w, inlink->h, - gradients, inlink->w, - directions,inlink->w, - tmpbuf, inlink->w); + sobel(width, height, + gradients, width, + directions,width, + tmpbuf, width); /* non_maximum_suppression() will actually keep & clip what's necessary and * ignore the rest, so we need a clean output buffer */ - memset(tmpbuf, 0, inlink->w * inlink->h); - non_maximum_suppression(inlink->w, inlink->h, - tmpbuf, inlink->w, - directions,inlink->w, - gradients, inlink->w); + memset(tmpbuf, 0, width * height); + non_maximum_suppression(width, height, + tmpbuf, width, + directions,width, + gradients, width); /* keep high values, or low values surrounded by high values */ double_threshold(edgedetect->low_u8, edgedetect->high_u8, - inlink->w, inlink->h, + width, height, out->data[p], out->linesize[p], - tmpbuf, inlink->w); + tmpbuf, width); if (edgedetect->mode == MODE_COLORMIX) { - color_mix(inlink->w, inlink->h, + color_mix(width, height, out->data[p], out->linesize[p], in->data[p], in->linesize[p]); }