2 * Copyright (C) 2007 by Andrew Zabolotny (author of lensfun, from which this filter derives from)
3 * Copyright (C) 2018 Stephen Seo
5 * This file is part of FFmpeg.
7 * This program is free software: you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation, either version 3 of the License, or
10 * (at your option) any later version.
12 * This program is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 * GNU General Public License for more details.
17 * You should have received a copy of the GNU General Public License
18 * along with this program. If not, see <https://www.gnu.org/licenses/>.
23 * Lensfun filter, applies lens correction with parameters from the lensfun database
25 * @see https://lensfun.sourceforge.net/
31 #include "libavutil/avassert.h"
32 #include "libavutil/imgutils.h"
33 #include "libavutil/opt.h"
34 #include "libswscale/swscale.h"
42 #define LANCZOS_RESOLUTION 256
46 GEOMETRY_DISTORTION = 0x2,
47 SUBPIXEL_DISTORTION = 0x4
50 enum InterpolationType {
56 typedef struct VignettingThreadData {
60 int pixel_composition;
62 } VignettingThreadData;
64 typedef struct DistortionCorrectionThreadData {
66 const float *distortion_coords;
67 const uint8_t *data_in;
69 int linesize_in, linesize_out;
70 const float *interpolation;
72 int interpolation_type;
73 } DistortionCorrectionThreadData;
75 typedef struct LensfunContext {
77 const char *make, *model, *lens_model;
84 int interpolation_type;
86 float *distortion_coords;
94 #define OFFSET(x) offsetof(LensfunContext, x)
95 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
96 static const AVOption lensfun_options[] = {
97 { "make", "set camera maker", OFFSET(make), AV_OPT_TYPE_STRING, {.str=NULL}, 0, 0, FLAGS },
98 { "model", "set camera model", OFFSET(model), AV_OPT_TYPE_STRING, {.str=NULL}, 0, 0, FLAGS },
99 { "lens_model", "set lens model", OFFSET(lens_model), AV_OPT_TYPE_STRING, {.str=NULL}, 0, 0, FLAGS },
100 { "mode", "set mode", OFFSET(mode), AV_OPT_TYPE_INT, {.i64=GEOMETRY_DISTORTION}, 0, VIGNETTING | GEOMETRY_DISTORTION | SUBPIXEL_DISTORTION, FLAGS, "mode" },
101 { "vignetting", "fix lens vignetting", 0, AV_OPT_TYPE_CONST, {.i64=VIGNETTING}, 0, 0, FLAGS, "mode" },
102 { "geometry", "correct geometry distortion", 0, AV_OPT_TYPE_CONST, {.i64=GEOMETRY_DISTORTION}, 0, 0, FLAGS, "mode" },
103 { "subpixel", "fix chromatic aberrations", 0, AV_OPT_TYPE_CONST, {.i64=SUBPIXEL_DISTORTION}, 0, 0, FLAGS, "mode" },
104 { "vig_geo", "fix lens vignetting and correct geometry distortion", 0, AV_OPT_TYPE_CONST, {.i64=VIGNETTING | GEOMETRY_DISTORTION}, 0, 0, FLAGS, "mode" },
105 { "vig_subpixel", "fix lens vignetting and chromatic aberrations", 0, AV_OPT_TYPE_CONST, {.i64=VIGNETTING | SUBPIXEL_DISTORTION}, 0, 0, FLAGS, "mode" },
106 { "distortion", "correct geometry distortion and chromatic aberrations", 0, AV_OPT_TYPE_CONST, {.i64=GEOMETRY_DISTORTION | SUBPIXEL_DISTORTION}, 0, 0, FLAGS, "mode" },
107 { "all", NULL, 0, AV_OPT_TYPE_CONST, {.i64=VIGNETTING | GEOMETRY_DISTORTION | SUBPIXEL_DISTORTION}, 0, 0, FLAGS, "mode" },
108 { "focal_length", "focal length of video (zoom; constant for the duration of the use of this filter)", OFFSET(focal_length), AV_OPT_TYPE_FLOAT, {.dbl=18}, 0.0, DBL_MAX, FLAGS },
109 { "aperture", "aperture (constant for the duration of the use of this filter)", OFFSET(aperture), AV_OPT_TYPE_FLOAT, {.dbl=3.5}, 0.0, DBL_MAX, FLAGS },
110 { "focus_distance", "focus distance (constant for the duration of the use of this filter)", OFFSET(focus_distance), AV_OPT_TYPE_FLOAT, {.dbl=1000.0f}, 0.0, DBL_MAX, FLAGS },
111 { "target_geometry", "target geometry of the lens correction (only when geometry correction is enabled)", OFFSET(target_geometry), AV_OPT_TYPE_INT, {.i64=LF_RECTILINEAR}, 0, INT_MAX, FLAGS, "lens_geometry" },
112 { "rectilinear", "rectilinear lens (default)", 0, AV_OPT_TYPE_CONST, {.i64=LF_RECTILINEAR}, 0, 0, FLAGS, "lens_geometry" },
113 { "fisheye", "fisheye lens", 0, AV_OPT_TYPE_CONST, {.i64=LF_FISHEYE}, 0, 0, FLAGS, "lens_geometry" },
114 { "panoramic", "panoramic (cylindrical)", 0, AV_OPT_TYPE_CONST, {.i64=LF_PANORAMIC}, 0, 0, FLAGS, "lens_geometry" },
115 { "equirectangular", "equirectangular", 0, AV_OPT_TYPE_CONST, {.i64=LF_EQUIRECTANGULAR}, 0, 0, FLAGS, "lens_geometry" },
116 { "fisheye_orthographic", "orthographic fisheye", 0, AV_OPT_TYPE_CONST, {.i64=LF_FISHEYE_ORTHOGRAPHIC}, 0, 0, FLAGS, "lens_geometry" },
117 { "fisheye_stereographic", "stereographic fisheye", 0, AV_OPT_TYPE_CONST, {.i64=LF_FISHEYE_STEREOGRAPHIC}, 0, 0, FLAGS, "lens_geometry" },
118 { "fisheye_equisolid", "equisolid fisheye", 0, AV_OPT_TYPE_CONST, {.i64=LF_FISHEYE_EQUISOLID}, 0, 0, FLAGS, "lens_geometry" },
119 { "fisheye_thoby", "fisheye as measured by thoby", 0, AV_OPT_TYPE_CONST, {.i64=LF_FISHEYE_THOBY}, 0, 0, FLAGS, "lens_geometry" },
120 { "reverse", "Does reverse correction (regular image to lens distorted)", OFFSET(reverse), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, FLAGS },
121 { "interpolation", "Type of interpolation", OFFSET(interpolation_type), AV_OPT_TYPE_INT, {.i64=LINEAR}, 0, LANCZOS, FLAGS, "interpolation" },
122 { "nearest", NULL, 0, AV_OPT_TYPE_CONST, {.i64=NEAREST}, 0, 0, FLAGS, "interpolation" },
123 { "linear", NULL, 0, AV_OPT_TYPE_CONST, {.i64=LINEAR}, 0, 0, FLAGS, "interpolation" },
124 { "lanczos", NULL, 0, AV_OPT_TYPE_CONST, {.i64=LANCZOS}, 0, 0, FLAGS, "interpolation" },
128 AVFILTER_DEFINE_CLASS(lensfun);
130 static av_cold int init(AVFilterContext *ctx)
132 LensfunContext *lensfun = ctx->priv;
134 const lfCamera **cameras;
135 const lfLens **lenses;
137 if (!lensfun->make) {
138 av_log(ctx, AV_LOG_FATAL, "Option \"make\" not specified\n");
139 return AVERROR(EINVAL);
140 } else if (!lensfun->model) {
141 av_log(ctx, AV_LOG_FATAL, "Option \"model\" not specified\n");
142 return AVERROR(EINVAL);
143 } else if (!lensfun->lens_model) {
144 av_log(ctx, AV_LOG_FATAL, "Option \"lens_model\" not specified\n");
145 return AVERROR(EINVAL);
148 lensfun->lens = lf_lens_new();
149 lensfun->camera = lf_camera_new();
152 if (lf_db_load(db) != LF_NO_ERROR) {
154 av_log(ctx, AV_LOG_FATAL, "Failed to load lensfun database\n");
155 return AVERROR_INVALIDDATA;
158 cameras = lf_db_find_cameras(db, lensfun->make, lensfun->model);
159 if (cameras && *cameras) {
160 lf_camera_copy(lensfun->camera, *cameras);
161 av_log(ctx, AV_LOG_INFO, "Using camera %s\n", lensfun->camera->Model);
165 av_log(ctx, AV_LOG_FATAL, "Failed to find camera in lensfun database\n");
166 return AVERROR_INVALIDDATA;
170 lenses = lf_db_find_lenses_hd(db, lensfun->camera, NULL, lensfun->lens_model, 0);
171 if (lenses && *lenses) {
172 lf_lens_copy(lensfun->lens, *lenses);
173 av_log(ctx, AV_LOG_INFO, "Using lens %s\n", lensfun->lens->Model);
177 av_log(ctx, AV_LOG_FATAL, "Failed to find lens in lensfun database\n");
178 return AVERROR_INVALIDDATA;
186 static int query_formats(AVFilterContext *ctx)
188 // Some of the functions provided by lensfun require pixels in RGB format
189 static const enum AVPixelFormat fmts[] = {AV_PIX_FMT_RGB24, AV_PIX_FMT_NONE};
190 AVFilterFormats *fmts_list = ff_make_format_list(fmts);
191 return ff_set_common_formats(ctx, fmts_list);
194 static float lanczos_kernel(float x)
198 } else if (x > -2.0f && x < 2.0f) {
199 return (2.0f * sin(M_PI * x) * sin(M_PI / 2.0f * x)) / (M_PI * M_PI * x * x);
205 static int config_props(AVFilterLink *inlink)
207 AVFilterContext *ctx = inlink->dst;
208 LensfunContext *lensfun = ctx->priv;
211 int lensfun_mode = 0;
213 if (!lensfun->modifier) {
214 if (lensfun->camera && lensfun->lens) {
215 lensfun->modifier = lf_modifier_new(lensfun->lens,
216 lensfun->camera->CropFactor,
219 if (lensfun->mode & VIGNETTING)
220 lensfun_mode |= LF_MODIFY_VIGNETTING;
221 if (lensfun->mode & GEOMETRY_DISTORTION)
222 lensfun_mode |= LF_MODIFY_DISTORTION | LF_MODIFY_GEOMETRY | LF_MODIFY_SCALE;
223 if (lensfun->mode & SUBPIXEL_DISTORTION)
224 lensfun_mode |= LF_MODIFY_TCA;
225 lf_modifier_initialize(lensfun->modifier,
228 lensfun->focal_length,
230 lensfun->focus_distance,
232 lensfun->target_geometry,
236 // lensfun->camera and lensfun->lens should have been initialized
241 if (!lensfun->distortion_coords) {
242 if (lensfun->mode & SUBPIXEL_DISTORTION) {
243 lensfun->distortion_coords = av_malloc_array(inlink->w * inlink->h, sizeof(float) * 2 * 3);
244 if (!lensfun->distortion_coords)
245 return AVERROR(ENOMEM);
246 if (lensfun->mode & GEOMETRY_DISTORTION) {
247 // apply both geometry and subpixel distortion
248 lf_modifier_apply_subpixel_geometry_distortion(lensfun->modifier,
250 inlink->w, inlink->h,
251 lensfun->distortion_coords);
253 // apply only subpixel distortion
254 lf_modifier_apply_subpixel_distortion(lensfun->modifier,
256 inlink->w, inlink->h,
257 lensfun->distortion_coords);
259 } else if (lensfun->mode & GEOMETRY_DISTORTION) {
260 lensfun->distortion_coords = av_malloc_array(inlink->w * inlink->h, sizeof(float) * 2);
261 if (!lensfun->distortion_coords)
262 return AVERROR(ENOMEM);
263 // apply only geometry distortion
264 lf_modifier_apply_geometry_distortion(lensfun->modifier,
266 inlink->w, inlink->h,
267 lensfun->distortion_coords);
271 if (!lensfun->interpolation)
272 if (lensfun->interpolation_type == LANCZOS) {
273 lensfun->interpolation = av_malloc_array(LANCZOS_RESOLUTION, sizeof(float) * 4);
274 if (!lensfun->interpolation)
275 return AVERROR(ENOMEM);
276 for (index = 0; index < 4 * LANCZOS_RESOLUTION; ++index) {
278 lensfun->interpolation[index] = 1.0f;
280 a = sqrtf((float)index / LANCZOS_RESOLUTION);
281 lensfun->interpolation[index] = lanczos_kernel(a);
289 static int vignetting_filter_slice(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
291 const VignettingThreadData *thread_data = arg;
292 const int slice_start = thread_data->height * jobnr / nb_jobs;
293 const int slice_end = thread_data->height * (jobnr + 1) / nb_jobs;
295 lf_modifier_apply_color_modification(thread_data->modifier,
296 thread_data->data_in + slice_start * thread_data->linesize_in,
300 slice_end - slice_start,
301 thread_data->pixel_composition,
302 thread_data->linesize_in);
307 static float square(float x)
312 static int distortion_correction_filter_slice(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
314 const DistortionCorrectionThreadData *thread_data = arg;
315 const int slice_start = thread_data->height * jobnr / nb_jobs;
316 const int slice_end = thread_data->height * (jobnr + 1) / nb_jobs;
318 int x, y, i, j, rgb_index;
319 float interpolated, new_x, new_y, d, norm;
320 int new_x_int, new_y_int;
321 for (y = slice_start; y < slice_end; ++y)
322 for (x = 0; x < thread_data->width; ++x)
323 for (rgb_index = 0; rgb_index < 3; ++rgb_index) {
324 if (thread_data->mode & SUBPIXEL_DISTORTION) {
325 // subpixel (and possibly geometry) distortion correction was applied, correct distortion
326 switch(thread_data->interpolation_type) {
328 new_x_int = thread_data->distortion_coords[x * 2 * 3 + y * thread_data->width * 2 * 3 + rgb_index * 2] + 0.5f;
329 new_y_int = thread_data->distortion_coords[x * 2 * 3 + y * thread_data->width * 2 * 3 + rgb_index * 2 + 1] + 0.5f;
330 if (new_x_int < 0 || new_x_int >= thread_data->width || new_y_int < 0 || new_y_int >= thread_data->height) {
331 thread_data->data_out[x * 3 + rgb_index + y * thread_data->linesize_out] = 0;
333 thread_data->data_out[x * 3 + rgb_index + y * thread_data->linesize_out] = thread_data->data_in[new_x_int * 3 + rgb_index + new_y_int * thread_data->linesize_in];
338 new_x = thread_data->distortion_coords[x * 2 * 3 + y * thread_data->width * 2 * 3 + rgb_index * 2];
340 new_y = thread_data->distortion_coords[x * 2 * 3 + y * thread_data->width * 2 * 3 + rgb_index * 2 + 1];
342 if (new_x_int < 0 || new_x_int + 1 >= thread_data->width || new_y_int < 0 || new_y_int + 1 >= thread_data->height) {
343 thread_data->data_out[x * 3 + rgb_index + y * thread_data->linesize_out] = 0;
345 thread_data->data_out[x * 3 + rgb_index + y * thread_data->linesize_out] =
346 thread_data->data_in[ new_x_int * 3 + rgb_index + new_y_int * thread_data->linesize_in] * (new_x_int + 1 - new_x) * (new_y_int + 1 - new_y)
347 + thread_data->data_in[(new_x_int + 1) * 3 + rgb_index + new_y_int * thread_data->linesize_in] * (new_x - new_x_int) * (new_y_int + 1 - new_y)
348 + thread_data->data_in[ new_x_int * 3 + rgb_index + (new_y_int + 1) * thread_data->linesize_in] * (new_x_int + 1 - new_x) * (new_y - new_y_int)
349 + thread_data->data_in[(new_x_int + 1) * 3 + rgb_index + (new_y_int + 1) * thread_data->linesize_in] * (new_x - new_x_int) * (new_y - new_y_int);
355 new_x = thread_data->distortion_coords[x * 2 * 3 + y * thread_data->width * 2 * 3 + rgb_index * 2];
357 new_y = thread_data->distortion_coords[x * 2 * 3 + y * thread_data->width * 2 * 3 + rgb_index * 2 + 1];
359 for (j = 0; j < 4; ++j)
360 for (i = 0; i < 4; ++i) {
361 if (new_x_int + i - 2 < 0 || new_x_int + i - 2 >= thread_data->width || new_y_int + j - 2 < 0 || new_y_int + j - 2 >= thread_data->height)
363 d = square(new_x - (new_x_int + i - 2)) * square(new_y - (new_y_int + j - 2));
366 d = thread_data->interpolation[(int)(d * LANCZOS_RESOLUTION)];
368 interpolated += thread_data->data_in[(new_x_int + i - 2) * 3 + rgb_index + (new_y_int + j - 2) * thread_data->linesize_in] * d;
371 thread_data->data_out[x * 3 + rgb_index + y * thread_data->linesize_out] = 0;
373 interpolated /= norm;
374 thread_data->data_out[x * 3 + rgb_index + y * thread_data->linesize_out] = interpolated < 0.0f ? 0.0f : interpolated > 255.0f ? 255.0f : interpolated;
378 } else if (thread_data->mode & GEOMETRY_DISTORTION) {
379 // geometry distortion correction was applied, correct distortion
380 switch(thread_data->interpolation_type) {
382 new_x_int = thread_data->distortion_coords[x * 2 + y * thread_data->width * 2] + 0.5f;
383 new_y_int = thread_data->distortion_coords[x * 2 + y * thread_data->width * 2 + 1] + 0.5f;
384 if (new_x_int < 0 || new_x_int >= thread_data->width || new_y_int < 0 || new_y_int >= thread_data->height) {
385 thread_data->data_out[x * 3 + rgb_index + y * thread_data->linesize_out] = 0;
387 thread_data->data_out[x * 3 + rgb_index + y * thread_data->linesize_out] = thread_data->data_in[new_x_int * 3 + rgb_index + new_y_int * thread_data->linesize_in];
392 new_x = thread_data->distortion_coords[x * 2 + y * thread_data->width * 2];
394 new_y = thread_data->distortion_coords[x * 2 + y * thread_data->width * 2 + 1];
396 if (new_x_int < 0 || new_x_int + 1 >= thread_data->width || new_y_int < 0 || new_y_int + 1 >= thread_data->height) {
397 thread_data->data_out[x * 3 + rgb_index + y * thread_data->linesize_out] = 0;
399 thread_data->data_out[x * 3 + rgb_index + y * thread_data->linesize_out] =
400 thread_data->data_in[ new_x_int * 3 + rgb_index + new_y_int * thread_data->linesize_in] * (new_x_int + 1 - new_x) * (new_y_int + 1 - new_y)
401 + thread_data->data_in[(new_x_int + 1) * 3 + rgb_index + new_y_int * thread_data->linesize_in] * (new_x - new_x_int) * (new_y_int + 1 - new_y)
402 + thread_data->data_in[ new_x_int * 3 + rgb_index + (new_y_int + 1) * thread_data->linesize_in] * (new_x_int + 1 - new_x) * (new_y - new_y_int)
403 + thread_data->data_in[(new_x_int + 1) * 3 + rgb_index + (new_y_int + 1) * thread_data->linesize_in] * (new_x - new_x_int) * (new_y - new_y_int);
409 new_x = thread_data->distortion_coords[x * 2 + y * thread_data->width * 2];
411 new_y = thread_data->distortion_coords[x * 2 + 1 + y * thread_data->width * 2];
413 for (j = 0; j < 4; ++j)
414 for (i = 0; i < 4; ++i) {
415 if (new_x_int + i - 2 < 0 || new_x_int + i - 2 >= thread_data->width || new_y_int + j - 2 < 0 || new_y_int + j - 2 >= thread_data->height)
417 d = square(new_x - (new_x_int + i - 2)) * square(new_y - (new_y_int + j - 2));
420 d = thread_data->interpolation[(int)(d * LANCZOS_RESOLUTION)];
422 interpolated += thread_data->data_in[(new_x_int + i - 2) * 3 + rgb_index + (new_y_int + j - 2) * thread_data->linesize_in] * d;
425 thread_data->data_out[x * 3 + rgb_index + y * thread_data->linesize_out] = 0;
427 interpolated /= norm;
428 thread_data->data_out[x * 3 + rgb_index + y * thread_data->linesize_out] = interpolated < 0.0f ? 0.0f : interpolated > 255.0f ? 255.0f : interpolated;
433 // no distortion correction was applied
434 thread_data->data_out[x * 3 + rgb_index + y * thread_data->linesize_out] = thread_data->data_in[x * 3 + rgb_index + y * thread_data->linesize_in];
441 static int filter_frame(AVFilterLink *inlink, AVFrame *in)
443 AVFilterContext *ctx = inlink->dst;
444 LensfunContext *lensfun = ctx->priv;
445 AVFilterLink *outlink = ctx->outputs[0];
447 VignettingThreadData vignetting_thread_data;
448 DistortionCorrectionThreadData distortion_correction_thread_data;
450 if (lensfun->mode & VIGNETTING) {
451 av_frame_make_writable(in);
453 vignetting_thread_data = (VignettingThreadData) {
456 .data_in = in->data[0],
457 .linesize_in = in->linesize[0],
458 .pixel_composition = LF_CR_3(RED, GREEN, BLUE),
459 .modifier = lensfun->modifier
462 ctx->internal->execute(ctx,
463 vignetting_filter_slice,
464 &vignetting_thread_data,
466 FFMIN(outlink->h, ctx->graph->nb_threads));
469 if (lensfun->mode & (GEOMETRY_DISTORTION | SUBPIXEL_DISTORTION)) {
470 out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
473 return AVERROR(ENOMEM);
475 av_frame_copy_props(out, in);
477 distortion_correction_thread_data = (DistortionCorrectionThreadData) {
480 .distortion_coords = lensfun->distortion_coords,
481 .data_in = in->data[0],
482 .data_out = out->data[0],
483 .linesize_in = in->linesize[0],
484 .linesize_out = out->linesize[0],
485 .interpolation = lensfun->interpolation,
486 .mode = lensfun->mode,
487 .interpolation_type = lensfun->interpolation_type
490 ctx->internal->execute(ctx,
491 distortion_correction_filter_slice,
492 &distortion_correction_thread_data,
494 FFMIN(outlink->h, ctx->graph->nb_threads));
497 return ff_filter_frame(outlink, out);
499 return ff_filter_frame(outlink, in);
503 static av_cold void uninit(AVFilterContext *ctx)
505 LensfunContext *lensfun = ctx->priv;
508 lf_camera_destroy(lensfun->camera);
510 lf_lens_destroy(lensfun->lens);
511 if (lensfun->modifier)
512 lf_modifier_destroy(lensfun->modifier);
513 av_freep(&lensfun->distortion_coords);
514 av_freep(&lensfun->interpolation);
517 static const AVFilterPad lensfun_inputs[] = {
520 .type = AVMEDIA_TYPE_VIDEO,
521 .config_props = config_props,
522 .filter_frame = filter_frame,
527 static const AVFilterPad lensfun_outputs[] = {
530 .type = AVMEDIA_TYPE_VIDEO,
535 AVFilter ff_vf_lensfun = {
537 .description = NULL_IF_CONFIG_SMALL("Apply correction to an image based on info derived from the lensfun database."),
538 .priv_size = sizeof(LensfunContext),
541 .query_formats = query_formats,
542 .inputs = lensfun_inputs,
543 .outputs = lensfun_outputs,
544 .priv_class = &lensfun_class,
545 .flags = AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC | AVFILTER_FLAG_SLICE_THREADS,