blob: 9d26f5442227c3942300fc59a526c61d7eb73c2b
1 | /* |
2 | * Copyright (C) 2006 Michael Niedermayer <michaelni@gmx.at> |
3 | * Copyright (C) 2012 Clément Bœsch <u pkh me> |
4 | * |
5 | * This file is part of FFmpeg. |
6 | * |
7 | * FFmpeg is free software; you can redistribute it and/or modify |
8 | * it under the terms of the GNU General Public License as published by |
9 | * the Free Software Foundation; either version 2 of the License, or |
10 | * (at your option) any later version. |
11 | * |
12 | * FFmpeg is distributed in the hope that it will be useful, |
13 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
14 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
15 | * GNU General Public License for more details. |
16 | * |
17 | * You should have received a copy of the GNU General Public License along |
18 | * with FFmpeg; if not, write to the Free Software Foundation, Inc., |
19 | * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. |
20 | */ |
21 | |
22 | /** |
23 | * @file |
24 | * Generic equation change filter |
25 | * Originally written by Michael Niedermayer for the MPlayer project, and |
26 | * ported by Clément Bœsch for FFmpeg. |
27 | */ |
28 | |
29 | #include "libavutil/avassert.h" |
30 | #include "libavutil/avstring.h" |
31 | #include "libavutil/eval.h" |
32 | #include "libavutil/opt.h" |
33 | #include "libavutil/pixdesc.h" |
34 | #include "internal.h" |
35 | |
36 | typedef struct { |
37 | const AVClass *class; |
38 | AVExpr *e[4]; ///< expressions for each plane |
39 | char *expr_str[4+3]; ///< expression strings for each plane |
40 | AVFrame *picref; ///< current input buffer |
41 | int hsub, vsub; ///< chroma subsampling |
42 | int planes; ///< number of planes |
43 | int is_rgb; |
44 | } GEQContext; |
45 | |
46 | enum { Y = 0, U, V, A, G, B, R }; |
47 | |
48 | #define OFFSET(x) offsetof(GEQContext, x) |
49 | #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM |
50 | |
51 | static const AVOption geq_options[] = { |
52 | { "lum_expr", "set luminance expression", OFFSET(expr_str[Y]), AV_OPT_TYPE_STRING, {.str=NULL}, CHAR_MIN, CHAR_MAX, FLAGS }, |
53 | { "lum", "set luminance expression", OFFSET(expr_str[Y]), AV_OPT_TYPE_STRING, {.str=NULL}, CHAR_MIN, CHAR_MAX, FLAGS }, |
54 | { "cb_expr", "set chroma blue expression", OFFSET(expr_str[U]), AV_OPT_TYPE_STRING, {.str=NULL}, CHAR_MIN, CHAR_MAX, FLAGS }, |
55 | { "cb", "set chroma blue expression", OFFSET(expr_str[U]), AV_OPT_TYPE_STRING, {.str=NULL}, CHAR_MIN, CHAR_MAX, FLAGS }, |
56 | { "cr_expr", "set chroma red expression", OFFSET(expr_str[V]), AV_OPT_TYPE_STRING, {.str=NULL}, CHAR_MIN, CHAR_MAX, FLAGS }, |
57 | { "cr", "set chroma red expression", OFFSET(expr_str[V]), AV_OPT_TYPE_STRING, {.str=NULL}, CHAR_MIN, CHAR_MAX, FLAGS }, |
58 | { "alpha_expr", "set alpha expression", OFFSET(expr_str[A]), AV_OPT_TYPE_STRING, {.str=NULL}, CHAR_MIN, CHAR_MAX, FLAGS }, |
59 | { "a", "set alpha expression", OFFSET(expr_str[A]), AV_OPT_TYPE_STRING, {.str=NULL}, CHAR_MIN, CHAR_MAX, FLAGS }, |
60 | { "red_expr", "set red expression", OFFSET(expr_str[R]), AV_OPT_TYPE_STRING, {.str=NULL}, CHAR_MIN, CHAR_MAX, FLAGS }, |
61 | { "r", "set red expression", OFFSET(expr_str[R]), AV_OPT_TYPE_STRING, {.str=NULL}, CHAR_MIN, CHAR_MAX, FLAGS }, |
62 | { "green_expr", "set green expression", OFFSET(expr_str[G]), AV_OPT_TYPE_STRING, {.str=NULL}, CHAR_MIN, CHAR_MAX, FLAGS }, |
63 | { "g", "set green expression", OFFSET(expr_str[G]), AV_OPT_TYPE_STRING, {.str=NULL}, CHAR_MIN, CHAR_MAX, FLAGS }, |
64 | { "blue_expr", "set blue expression", OFFSET(expr_str[B]), AV_OPT_TYPE_STRING, {.str=NULL}, CHAR_MIN, CHAR_MAX, FLAGS }, |
65 | { "b", "set blue expression", OFFSET(expr_str[B]), AV_OPT_TYPE_STRING, {.str=NULL}, CHAR_MIN, CHAR_MAX, FLAGS }, |
66 | {NULL}, |
67 | }; |
68 | |
69 | AVFILTER_DEFINE_CLASS(geq); |
70 | |
71 | static inline double getpix(void *priv, double x, double y, int plane) |
72 | { |
73 | int xi, yi; |
74 | GEQContext *geq = priv; |
75 | AVFrame *picref = geq->picref; |
76 | const uint8_t *src = picref->data[plane]; |
77 | const int linesize = picref->linesize[plane]; |
78 | const int w = (plane == 1 || plane == 2) ? AV_CEIL_RSHIFT(picref->width, geq->hsub) : picref->width; |
79 | const int h = (plane == 1 || plane == 2) ? AV_CEIL_RSHIFT(picref->height, geq->vsub) : picref->height; |
80 | |
81 | if (!src) |
82 | return 0; |
83 | |
84 | xi = x = av_clipf(x, 0, w - 2); |
85 | yi = y = av_clipf(y, 0, h - 2); |
86 | |
87 | x -= xi; |
88 | y -= yi; |
89 | |
90 | return (1-y)*((1-x)*src[xi + yi * linesize] + x*src[xi + 1 + yi * linesize]) |
91 | + y *((1-x)*src[xi + (yi+1) * linesize] + x*src[xi + 1 + (yi+1) * linesize]); |
92 | } |
93 | |
94 | //TODO: cubic interpolate |
95 | //TODO: keep the last few frames |
96 | static double lum(void *priv, double x, double y) { return getpix(priv, x, y, 0); } |
97 | static double cb(void *priv, double x, double y) { return getpix(priv, x, y, 1); } |
98 | static double cr(void *priv, double x, double y) { return getpix(priv, x, y, 2); } |
99 | static double alpha(void *priv, double x, double y) { return getpix(priv, x, y, 3); } |
100 | |
101 | static const char *const var_names[] = { "X", "Y", "W", "H", "N", "SW", "SH", "T", NULL }; |
102 | enum { VAR_X, VAR_Y, VAR_W, VAR_H, VAR_N, VAR_SW, VAR_SH, VAR_T, VAR_VARS_NB }; |
103 | |
104 | static av_cold int geq_init(AVFilterContext *ctx) |
105 | { |
106 | GEQContext *geq = ctx->priv; |
107 | int plane, ret = 0; |
108 | |
109 | if (!geq->expr_str[Y] && !geq->expr_str[G] && !geq->expr_str[B] && !geq->expr_str[R]) { |
110 | av_log(ctx, AV_LOG_ERROR, "A luminance or RGB expression is mandatory\n"); |
111 | ret = AVERROR(EINVAL); |
112 | goto end; |
113 | } |
114 | geq->is_rgb = !geq->expr_str[Y]; |
115 | |
116 | if ((geq->expr_str[Y] || geq->expr_str[U] || geq->expr_str[V]) && (geq->expr_str[G] || geq->expr_str[B] || geq->expr_str[R])) { |
117 | av_log(ctx, AV_LOG_ERROR, "Either YCbCr or RGB but not both must be specified\n"); |
118 | ret = AVERROR(EINVAL); |
119 | goto end; |
120 | } |
121 | |
122 | if (!geq->expr_str[U] && !geq->expr_str[V]) { |
123 | /* No chroma at all: fallback on luma */ |
124 | geq->expr_str[U] = av_strdup(geq->expr_str[Y]); |
125 | geq->expr_str[V] = av_strdup(geq->expr_str[Y]); |
126 | } else { |
127 | /* One chroma unspecified, fallback on the other */ |
128 | if (!geq->expr_str[U]) geq->expr_str[U] = av_strdup(geq->expr_str[V]); |
129 | if (!geq->expr_str[V]) geq->expr_str[V] = av_strdup(geq->expr_str[U]); |
130 | } |
131 | |
132 | if (!geq->expr_str[A]) |
133 | geq->expr_str[A] = av_strdup("255"); |
134 | if (!geq->expr_str[G]) |
135 | geq->expr_str[G] = av_strdup("g(X,Y)"); |
136 | if (!geq->expr_str[B]) |
137 | geq->expr_str[B] = av_strdup("b(X,Y)"); |
138 | if (!geq->expr_str[R]) |
139 | geq->expr_str[R] = av_strdup("r(X,Y)"); |
140 | |
141 | if (geq->is_rgb ? |
142 | (!geq->expr_str[G] || !geq->expr_str[B] || !geq->expr_str[R]) |
143 | : |
144 | (!geq->expr_str[U] || !geq->expr_str[V] || !geq->expr_str[A])) { |
145 | ret = AVERROR(ENOMEM); |
146 | goto end; |
147 | } |
148 | |
149 | for (plane = 0; plane < 4; plane++) { |
150 | static double (*p[])(void *, double, double) = { lum, cb, cr, alpha }; |
151 | static const char *const func2_yuv_names[] = { "lum", "cb", "cr", "alpha", "p", NULL }; |
152 | static const char *const func2_rgb_names[] = { "g", "b", "r", "alpha", "p", NULL }; |
153 | const char *const *func2_names = geq->is_rgb ? func2_rgb_names : func2_yuv_names; |
154 | double (*func2[])(void *, double, double) = { lum, cb, cr, alpha, p[plane], NULL }; |
155 | |
156 | ret = av_expr_parse(&geq->e[plane], geq->expr_str[plane < 3 && geq->is_rgb ? plane+4 : plane], var_names, |
157 | NULL, NULL, func2_names, func2, 0, ctx); |
158 | if (ret < 0) |
159 | break; |
160 | } |
161 | |
162 | end: |
163 | return ret; |
164 | } |
165 | |
166 | static int geq_query_formats(AVFilterContext *ctx) |
167 | { |
168 | GEQContext *geq = ctx->priv; |
169 | static const enum AVPixelFormat yuv_pix_fmts[] = { |
170 | AV_PIX_FMT_YUV444P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUV420P, |
171 | AV_PIX_FMT_YUV411P, AV_PIX_FMT_YUV410P, AV_PIX_FMT_YUV440P, |
172 | AV_PIX_FMT_YUVA444P, AV_PIX_FMT_YUVA422P, AV_PIX_FMT_YUVA420P, |
173 | AV_PIX_FMT_GRAY8, |
174 | AV_PIX_FMT_NONE |
175 | }; |
176 | static const enum AVPixelFormat rgb_pix_fmts[] = { |
177 | AV_PIX_FMT_GBRP, AV_PIX_FMT_GBRAP, |
178 | AV_PIX_FMT_NONE |
179 | }; |
180 | AVFilterFormats *fmts_list; |
181 | |
182 | if (geq->is_rgb) { |
183 | fmts_list = ff_make_format_list(rgb_pix_fmts); |
184 | } else |
185 | fmts_list = ff_make_format_list(yuv_pix_fmts); |
186 | if (!fmts_list) |
187 | return AVERROR(ENOMEM); |
188 | return ff_set_common_formats(ctx, fmts_list); |
189 | } |
190 | |
191 | static int geq_config_props(AVFilterLink *inlink) |
192 | { |
193 | GEQContext *geq = inlink->dst->priv; |
194 | const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(inlink->format); |
195 | |
196 | av_assert0(desc); |
197 | |
198 | geq->hsub = desc->log2_chroma_w; |
199 | geq->vsub = desc->log2_chroma_h; |
200 | geq->planes = desc->nb_components; |
201 | return 0; |
202 | } |
203 | |
204 | static int geq_filter_frame(AVFilterLink *inlink, AVFrame *in) |
205 | { |
206 | int plane; |
207 | GEQContext *geq = inlink->dst->priv; |
208 | AVFilterLink *outlink = inlink->dst->outputs[0]; |
209 | AVFrame *out; |
210 | double values[VAR_VARS_NB] = { |
211 | [VAR_N] = inlink->frame_count_out, |
212 | [VAR_T] = in->pts == AV_NOPTS_VALUE ? NAN : in->pts * av_q2d(inlink->time_base), |
213 | }; |
214 | |
215 | geq->picref = in; |
216 | out = ff_get_video_buffer(outlink, outlink->w, outlink->h); |
217 | if (!out) { |
218 | av_frame_free(&in); |
219 | return AVERROR(ENOMEM); |
220 | } |
221 | av_frame_copy_props(out, in); |
222 | |
223 | for (plane = 0; plane < geq->planes && out->data[plane]; plane++) { |
224 | int x, y; |
225 | uint8_t *dst = out->data[plane]; |
226 | const int linesize = out->linesize[plane]; |
227 | const int w = (plane == 1 || plane == 2) ? AV_CEIL_RSHIFT(inlink->w, geq->hsub) : inlink->w; |
228 | const int h = (plane == 1 || plane == 2) ? AV_CEIL_RSHIFT(inlink->h, geq->vsub) : inlink->h; |
229 | |
230 | values[VAR_W] = w; |
231 | values[VAR_H] = h; |
232 | values[VAR_SW] = w / (double)inlink->w; |
233 | values[VAR_SH] = h / (double)inlink->h; |
234 | |
235 | for (y = 0; y < h; y++) { |
236 | values[VAR_Y] = y; |
237 | for (x = 0; x < w; x++) { |
238 | values[VAR_X] = x; |
239 | dst[x] = av_expr_eval(geq->e[plane], values, geq); |
240 | } |
241 | dst += linesize; |
242 | } |
243 | } |
244 | |
245 | av_frame_free(&geq->picref); |
246 | return ff_filter_frame(outlink, out); |
247 | } |
248 | |
249 | static av_cold void geq_uninit(AVFilterContext *ctx) |
250 | { |
251 | int i; |
252 | GEQContext *geq = ctx->priv; |
253 | |
254 | for (i = 0; i < FF_ARRAY_ELEMS(geq->e); i++) |
255 | av_expr_free(geq->e[i]); |
256 | } |
257 | |
258 | static const AVFilterPad geq_inputs[] = { |
259 | { |
260 | .name = "default", |
261 | .type = AVMEDIA_TYPE_VIDEO, |
262 | .config_props = geq_config_props, |
263 | .filter_frame = geq_filter_frame, |
264 | }, |
265 | { NULL } |
266 | }; |
267 | |
268 | static const AVFilterPad geq_outputs[] = { |
269 | { |
270 | .name = "default", |
271 | .type = AVMEDIA_TYPE_VIDEO, |
272 | }, |
273 | { NULL } |
274 | }; |
275 | |
276 | AVFilter ff_vf_geq = { |
277 | .name = "geq", |
278 | .description = NULL_IF_CONFIG_SMALL("Apply generic equation to each pixel."), |
279 | .priv_size = sizeof(GEQContext), |
280 | .init = geq_init, |
281 | .uninit = geq_uninit, |
282 | .query_formats = geq_query_formats, |
283 | .inputs = geq_inputs, |
284 | .outputs = geq_outputs, |
285 | .priv_class = &geq_class, |
286 | .flags = AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC, |
287 | }; |
288 |