summaryrefslogtreecommitdiff
path: root/libavfilter/vf_overlay.c (plain)
blob: bbcd6b55cde88683c9a4e04f13c5c01a27943c0d
1/*
2 * Copyright (c) 2010 Stefano Sabatini
3 * Copyright (c) 2010 Baptiste Coudurier
4 * Copyright (c) 2007 Bobby Bingham
5 *
6 * This file is part of FFmpeg.
7 *
8 * FFmpeg is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU Lesser General Public
10 * License as published by the Free Software Foundation; either
11 * version 2.1 of the License, or (at your option) any later version.
12 *
13 * FFmpeg is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * Lesser General Public License for more details.
17 *
18 * You should have received a copy of the GNU Lesser General Public
19 * License along with FFmpeg; if not, write to the Free Software
20 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21 */
22
23/**
24 * @file
25 * overlay one video on top of another
26 */
27
28#include "avfilter.h"
29#include "formats.h"
30#include "libavutil/common.h"
31#include "libavutil/eval.h"
32#include "libavutil/avstring.h"
33#include "libavutil/pixdesc.h"
34#include "libavutil/imgutils.h"
35#include "libavutil/mathematics.h"
36#include "libavutil/opt.h"
37#include "libavutil/timestamp.h"
38#include "internal.h"
39#include "dualinput.h"
40#include "drawutils.h"
41#include "video.h"
42
43static const char *const var_names[] = {
44 "main_w", "W", ///< width of the main video
45 "main_h", "H", ///< height of the main video
46 "overlay_w", "w", ///< width of the overlay video
47 "overlay_h", "h", ///< height of the overlay video
48 "hsub",
49 "vsub",
50 "x",
51 "y",
52 "n", ///< number of frame
53 "pos", ///< position in the file
54 "t", ///< timestamp expressed in seconds
55 NULL
56};
57
58enum var_name {
59 VAR_MAIN_W, VAR_MW,
60 VAR_MAIN_H, VAR_MH,
61 VAR_OVERLAY_W, VAR_OW,
62 VAR_OVERLAY_H, VAR_OH,
63 VAR_HSUB,
64 VAR_VSUB,
65 VAR_X,
66 VAR_Y,
67 VAR_N,
68 VAR_POS,
69 VAR_T,
70 VAR_VARS_NB
71};
72
73enum EOFAction {
74 EOF_ACTION_REPEAT,
75 EOF_ACTION_ENDALL,
76 EOF_ACTION_PASS
77};
78
79static const char * const eof_action_str[] = {
80 "repeat", "endall", "pass"
81};
82
83#define MAIN 0
84#define OVERLAY 1
85
86#define R 0
87#define G 1
88#define B 2
89#define A 3
90
91#define Y 0
92#define U 1
93#define V 2
94
95enum EvalMode {
96 EVAL_MODE_INIT,
97 EVAL_MODE_FRAME,
98 EVAL_MODE_NB
99};
100
101enum OverlayFormat {
102 OVERLAY_FORMAT_YUV420,
103 OVERLAY_FORMAT_YUV422,
104 OVERLAY_FORMAT_YUV444,
105 OVERLAY_FORMAT_RGB,
106 OVERLAY_FORMAT_GBRP,
107 OVERLAY_FORMAT_NB
108};
109
110typedef struct OverlayContext {
111 const AVClass *class;
112 int x, y; ///< position of overlaid picture
113
114 int allow_packed_rgb;
115 uint8_t main_is_packed_rgb;
116 uint8_t main_rgba_map[4];
117 uint8_t main_has_alpha;
118 uint8_t overlay_is_packed_rgb;
119 uint8_t overlay_rgba_map[4];
120 uint8_t overlay_has_alpha;
121 int format; ///< OverlayFormat
122 int eval_mode; ///< EvalMode
123
124 FFDualInputContext dinput;
125
126 int main_pix_step[4]; ///< steps per pixel for each plane of the main output
127 int overlay_pix_step[4]; ///< steps per pixel for each plane of the overlay
128 int hsub, vsub; ///< chroma subsampling values
129 const AVPixFmtDescriptor *main_desc; ///< format descriptor for main input
130
131 double var_values[VAR_VARS_NB];
132 char *x_expr, *y_expr;
133
134 int eof_action; ///< action to take on EOF from source
135
136 AVExpr *x_pexpr, *y_pexpr;
137
138 void (*blend_image)(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y);
139} OverlayContext;
140
141static av_cold void uninit(AVFilterContext *ctx)
142{
143 OverlayContext *s = ctx->priv;
144
145 ff_dualinput_uninit(&s->dinput);
146 av_expr_free(s->x_pexpr); s->x_pexpr = NULL;
147 av_expr_free(s->y_pexpr); s->y_pexpr = NULL;
148}
149
150static inline int normalize_xy(double d, int chroma_sub)
151{
152 if (isnan(d))
153 return INT_MAX;
154 return (int)d & ~((1 << chroma_sub) - 1);
155}
156
157static void eval_expr(AVFilterContext *ctx)
158{
159 OverlayContext *s = ctx->priv;
160
161 s->var_values[VAR_X] = av_expr_eval(s->x_pexpr, s->var_values, NULL);
162 s->var_values[VAR_Y] = av_expr_eval(s->y_pexpr, s->var_values, NULL);
163 s->var_values[VAR_X] = av_expr_eval(s->x_pexpr, s->var_values, NULL);
164 s->x = normalize_xy(s->var_values[VAR_X], s->hsub);
165 s->y = normalize_xy(s->var_values[VAR_Y], s->vsub);
166}
167
168static int set_expr(AVExpr **pexpr, const char *expr, const char *option, void *log_ctx)
169{
170 int ret;
171 AVExpr *old = NULL;
172
173 if (*pexpr)
174 old = *pexpr;
175 ret = av_expr_parse(pexpr, expr, var_names,
176 NULL, NULL, NULL, NULL, 0, log_ctx);
177 if (ret < 0) {
178 av_log(log_ctx, AV_LOG_ERROR,
179 "Error when evaluating the expression '%s' for %s\n",
180 expr, option);
181 *pexpr = old;
182 return ret;
183 }
184
185 av_expr_free(old);
186 return 0;
187}
188
189static int process_command(AVFilterContext *ctx, const char *cmd, const char *args,
190 char *res, int res_len, int flags)
191{
192 OverlayContext *s = ctx->priv;
193 int ret;
194
195 if (!strcmp(cmd, "x"))
196 ret = set_expr(&s->x_pexpr, args, cmd, ctx);
197 else if (!strcmp(cmd, "y"))
198 ret = set_expr(&s->y_pexpr, args, cmd, ctx);
199 else
200 ret = AVERROR(ENOSYS);
201
202 if (ret < 0)
203 return ret;
204
205 if (s->eval_mode == EVAL_MODE_INIT) {
206 eval_expr(ctx);
207 av_log(ctx, AV_LOG_VERBOSE, "x:%f xi:%d y:%f yi:%d\n",
208 s->var_values[VAR_X], s->x,
209 s->var_values[VAR_Y], s->y);
210 }
211 return ret;
212}
213
214static int query_formats(AVFilterContext *ctx)
215{
216 OverlayContext *s = ctx->priv;
217
218 /* overlay formats contains alpha, for avoiding conversion with alpha information loss */
219 static const enum AVPixelFormat main_pix_fmts_yuv420[] = {
220 AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVA420P,
221 AV_PIX_FMT_NV12, AV_PIX_FMT_NV21,
222 AV_PIX_FMT_NONE
223 };
224 static const enum AVPixelFormat overlay_pix_fmts_yuv420[] = {
225 AV_PIX_FMT_YUVA420P, AV_PIX_FMT_NONE
226 };
227
228 static const enum AVPixelFormat main_pix_fmts_yuv422[] = {
229 AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUVJ422P, AV_PIX_FMT_YUVA422P, AV_PIX_FMT_NONE
230 };
231 static const enum AVPixelFormat overlay_pix_fmts_yuv422[] = {
232 AV_PIX_FMT_YUVA422P, AV_PIX_FMT_NONE
233 };
234
235 static const enum AVPixelFormat main_pix_fmts_yuv444[] = {
236 AV_PIX_FMT_YUV444P, AV_PIX_FMT_YUVJ444P, AV_PIX_FMT_YUVA444P, AV_PIX_FMT_NONE
237 };
238 static const enum AVPixelFormat overlay_pix_fmts_yuv444[] = {
239 AV_PIX_FMT_YUVA444P, AV_PIX_FMT_NONE
240 };
241
242 static const enum AVPixelFormat main_pix_fmts_gbrp[] = {
243 AV_PIX_FMT_GBRP, AV_PIX_FMT_GBRAP, AV_PIX_FMT_NONE
244 };
245 static const enum AVPixelFormat overlay_pix_fmts_gbrp[] = {
246 AV_PIX_FMT_GBRAP, AV_PIX_FMT_NONE
247 };
248
249 static const enum AVPixelFormat main_pix_fmts_rgb[] = {
250 AV_PIX_FMT_ARGB, AV_PIX_FMT_RGBA,
251 AV_PIX_FMT_ABGR, AV_PIX_FMT_BGRA,
252 AV_PIX_FMT_RGB24, AV_PIX_FMT_BGR24,
253 AV_PIX_FMT_NONE
254 };
255 static const enum AVPixelFormat overlay_pix_fmts_rgb[] = {
256 AV_PIX_FMT_ARGB, AV_PIX_FMT_RGBA,
257 AV_PIX_FMT_ABGR, AV_PIX_FMT_BGRA,
258 AV_PIX_FMT_NONE
259 };
260
261 AVFilterFormats *main_formats = NULL;
262 AVFilterFormats *overlay_formats = NULL;
263 int ret;
264
265 switch (s->format) {
266 case OVERLAY_FORMAT_YUV420:
267 if (!(main_formats = ff_make_format_list(main_pix_fmts_yuv420)) ||
268 !(overlay_formats = ff_make_format_list(overlay_pix_fmts_yuv420))) {
269 ret = AVERROR(ENOMEM);
270 goto fail;
271 }
272 break;
273 case OVERLAY_FORMAT_YUV422:
274 if (!(main_formats = ff_make_format_list(main_pix_fmts_yuv422)) ||
275 !(overlay_formats = ff_make_format_list(overlay_pix_fmts_yuv422))) {
276 ret = AVERROR(ENOMEM);
277 goto fail;
278 }
279 break;
280 case OVERLAY_FORMAT_YUV444:
281 if (!(main_formats = ff_make_format_list(main_pix_fmts_yuv444)) ||
282 !(overlay_formats = ff_make_format_list(overlay_pix_fmts_yuv444))) {
283 ret = AVERROR(ENOMEM);
284 goto fail;
285 }
286 break;
287 case OVERLAY_FORMAT_RGB:
288 if (!(main_formats = ff_make_format_list(main_pix_fmts_rgb)) ||
289 !(overlay_formats = ff_make_format_list(overlay_pix_fmts_rgb))) {
290 ret = AVERROR(ENOMEM);
291 goto fail;
292 }
293 break;
294 case OVERLAY_FORMAT_GBRP:
295 if (!(main_formats = ff_make_format_list(main_pix_fmts_gbrp)) ||
296 !(overlay_formats = ff_make_format_list(overlay_pix_fmts_gbrp))) {
297 ret = AVERROR(ENOMEM);
298 goto fail;
299 }
300 break;
301 default:
302 av_assert0(0);
303 }
304
305 if ((ret = ff_formats_ref(main_formats , &ctx->inputs[MAIN]->out_formats )) < 0 ||
306 (ret = ff_formats_ref(overlay_formats, &ctx->inputs[OVERLAY]->out_formats)) < 0 ||
307 (ret = ff_formats_ref(main_formats , &ctx->outputs[MAIN]->in_formats )) < 0)
308 goto fail;
309
310 return 0;
311fail:
312 if (main_formats)
313 av_freep(&main_formats->formats);
314 av_freep(&main_formats);
315 if (overlay_formats)
316 av_freep(&overlay_formats->formats);
317 av_freep(&overlay_formats);
318 return ret;
319}
320
321static const enum AVPixelFormat alpha_pix_fmts[] = {
322 AV_PIX_FMT_YUVA420P, AV_PIX_FMT_YUVA422P, AV_PIX_FMT_YUVA444P,
323 AV_PIX_FMT_ARGB, AV_PIX_FMT_ABGR, AV_PIX_FMT_RGBA,
324 AV_PIX_FMT_BGRA, AV_PIX_FMT_GBRAP, AV_PIX_FMT_NONE
325};
326
327static int config_input_overlay(AVFilterLink *inlink)
328{
329 AVFilterContext *ctx = inlink->dst;
330 OverlayContext *s = inlink->dst->priv;
331 int ret;
332 const AVPixFmtDescriptor *pix_desc = av_pix_fmt_desc_get(inlink->format);
333
334 av_image_fill_max_pixsteps(s->overlay_pix_step, NULL, pix_desc);
335
336 /* Finish the configuration by evaluating the expressions
337 now when both inputs are configured. */
338 s->var_values[VAR_MAIN_W ] = s->var_values[VAR_MW] = ctx->inputs[MAIN ]->w;
339 s->var_values[VAR_MAIN_H ] = s->var_values[VAR_MH] = ctx->inputs[MAIN ]->h;
340 s->var_values[VAR_OVERLAY_W] = s->var_values[VAR_OW] = ctx->inputs[OVERLAY]->w;
341 s->var_values[VAR_OVERLAY_H] = s->var_values[VAR_OH] = ctx->inputs[OVERLAY]->h;
342 s->var_values[VAR_HSUB] = 1<<pix_desc->log2_chroma_w;
343 s->var_values[VAR_VSUB] = 1<<pix_desc->log2_chroma_h;
344 s->var_values[VAR_X] = NAN;
345 s->var_values[VAR_Y] = NAN;
346 s->var_values[VAR_N] = 0;
347 s->var_values[VAR_T] = NAN;
348 s->var_values[VAR_POS] = NAN;
349
350 if ((ret = set_expr(&s->x_pexpr, s->x_expr, "x", ctx)) < 0 ||
351 (ret = set_expr(&s->y_pexpr, s->y_expr, "y", ctx)) < 0)
352 return ret;
353
354 s->overlay_is_packed_rgb =
355 ff_fill_rgba_map(s->overlay_rgba_map, inlink->format) >= 0;
356 s->overlay_has_alpha = ff_fmt_is_in(inlink->format, alpha_pix_fmts);
357
358 if (s->eval_mode == EVAL_MODE_INIT) {
359 eval_expr(ctx);
360 av_log(ctx, AV_LOG_VERBOSE, "x:%f xi:%d y:%f yi:%d\n",
361 s->var_values[VAR_X], s->x,
362 s->var_values[VAR_Y], s->y);
363 }
364
365 av_log(ctx, AV_LOG_VERBOSE,
366 "main w:%d h:%d fmt:%s overlay w:%d h:%d fmt:%s eof_action:%s\n",
367 ctx->inputs[MAIN]->w, ctx->inputs[MAIN]->h,
368 av_get_pix_fmt_name(ctx->inputs[MAIN]->format),
369 ctx->inputs[OVERLAY]->w, ctx->inputs[OVERLAY]->h,
370 av_get_pix_fmt_name(ctx->inputs[OVERLAY]->format),
371 eof_action_str[s->eof_action]);
372 return 0;
373}
374
375static int config_output(AVFilterLink *outlink)
376{
377 AVFilterContext *ctx = outlink->src;
378 OverlayContext *s = ctx->priv;
379 int ret;
380
381 if ((ret = ff_dualinput_init(ctx, &s->dinput)) < 0)
382 return ret;
383
384 outlink->w = ctx->inputs[MAIN]->w;
385 outlink->h = ctx->inputs[MAIN]->h;
386 outlink->time_base = ctx->inputs[MAIN]->time_base;
387
388 return 0;
389}
390
391// divide by 255 and round to nearest
392// apply a fast variant: (X+127)/255 = ((X+127)*257+257)>>16 = ((X+128)*257)>>16
393#define FAST_DIV255(x) ((((x) + 128) * 257) >> 16)
394
395// calculate the unpremultiplied alpha, applying the general equation:
396// alpha = alpha_overlay / ( (alpha_main + alpha_overlay) - (alpha_main * alpha_overlay) )
397// (((x) << 16) - ((x) << 9) + (x)) is a faster version of: 255 * 255 * x
398// ((((x) + (y)) << 8) - ((x) + (y)) - (y) * (x)) is a faster version of: 255 * (x + y)
399#define UNPREMULTIPLY_ALPHA(x, y) ((((x) << 16) - ((x) << 9) + (x)) / ((((x) + (y)) << 8) - ((x) + (y)) - (y) * (x)))
400
401/**
402 * Blend image in src to destination buffer dst at position (x, y).
403 */
404
405static void blend_image_packed_rgb(AVFilterContext *ctx,
406 AVFrame *dst, const AVFrame *src,
407 int x, int y)
408{
409 OverlayContext *s = ctx->priv;
410 int i, imax, j, jmax;
411 const int src_w = src->width;
412 const int src_h = src->height;
413 const int dst_w = dst->width;
414 const int dst_h = dst->height;
415 uint8_t alpha; ///< the amount of overlay to blend on to main
416 const int dr = s->main_rgba_map[R];
417 const int dg = s->main_rgba_map[G];
418 const int db = s->main_rgba_map[B];
419 const int da = s->main_rgba_map[A];
420 const int dstep = s->main_pix_step[0];
421 const int sr = s->overlay_rgba_map[R];
422 const int sg = s->overlay_rgba_map[G];
423 const int sb = s->overlay_rgba_map[B];
424 const int sa = s->overlay_rgba_map[A];
425 const int sstep = s->overlay_pix_step[0];
426 const int main_has_alpha = s->main_has_alpha;
427 uint8_t *S, *sp, *d, *dp;
428
429 i = FFMAX(-y, 0);
430 sp = src->data[0] + i * src->linesize[0];
431 dp = dst->data[0] + (y+i) * dst->linesize[0];
432
433 for (imax = FFMIN(-y + dst_h, src_h); i < imax; i++) {
434 j = FFMAX(-x, 0);
435 S = sp + j * sstep;
436 d = dp + (x+j) * dstep;
437
438 for (jmax = FFMIN(-x + dst_w, src_w); j < jmax; j++) {
439 alpha = S[sa];
440
441 // if the main channel has an alpha channel, alpha has to be calculated
442 // to create an un-premultiplied (straight) alpha value
443 if (main_has_alpha && alpha != 0 && alpha != 255) {
444 uint8_t alpha_d = d[da];
445 alpha = UNPREMULTIPLY_ALPHA(alpha, alpha_d);
446 }
447
448 switch (alpha) {
449 case 0:
450 break;
451 case 255:
452 d[dr] = S[sr];
453 d[dg] = S[sg];
454 d[db] = S[sb];
455 break;
456 default:
457 // main_value = main_value * (1 - alpha) + overlay_value * alpha
458 // since alpha is in the range 0-255, the result must divided by 255
459 d[dr] = FAST_DIV255(d[dr] * (255 - alpha) + S[sr] * alpha);
460 d[dg] = FAST_DIV255(d[dg] * (255 - alpha) + S[sg] * alpha);
461 d[db] = FAST_DIV255(d[db] * (255 - alpha) + S[sb] * alpha);
462 }
463 if (main_has_alpha) {
464 switch (alpha) {
465 case 0:
466 break;
467 case 255:
468 d[da] = S[sa];
469 break;
470 default:
471 // apply alpha compositing: main_alpha += (1-main_alpha) * overlay_alpha
472 d[da] += FAST_DIV255((255 - d[da]) * S[sa]);
473 }
474 }
475 d += dstep;
476 S += sstep;
477 }
478 dp += dst->linesize[0];
479 sp += src->linesize[0];
480 }
481}
482
483static av_always_inline void blend_plane(AVFilterContext *ctx,
484 AVFrame *dst, const AVFrame *src,
485 int src_w, int src_h,
486 int dst_w, int dst_h,
487 int i, int hsub, int vsub,
488 int x, int y,
489 int main_has_alpha,
490 int dst_plane,
491 int dst_offset,
492 int dst_step)
493{
494 int src_wp = AV_CEIL_RSHIFT(src_w, hsub);
495 int src_hp = AV_CEIL_RSHIFT(src_h, vsub);
496 int dst_wp = AV_CEIL_RSHIFT(dst_w, hsub);
497 int dst_hp = AV_CEIL_RSHIFT(dst_h, vsub);
498 int yp = y>>vsub;
499 int xp = x>>hsub;
500 uint8_t *s, *sp, *d, *dp, *a, *ap;
501 int jmax, j, k, kmax;
502
503 j = FFMAX(-yp, 0);
504 sp = src->data[i] + j * src->linesize[i];
505 dp = dst->data[dst_plane]
506 + (yp+j) * dst->linesize[dst_plane]
507 + dst_offset;
508 ap = src->data[3] + (j<<vsub) * src->linesize[3];
509
510 for (jmax = FFMIN(-yp + dst_hp, src_hp); j < jmax; j++) {
511 k = FFMAX(-xp, 0);
512 d = dp + (xp+k) * dst_step;
513 s = sp + k;
514 a = ap + (k<<hsub);
515
516 for (kmax = FFMIN(-xp + dst_wp, src_wp); k < kmax; k++) {
517 int alpha_v, alpha_h, alpha;
518
519 // average alpha for color components, improve quality
520 if (hsub && vsub && j+1 < src_hp && k+1 < src_wp) {
521 alpha = (a[0] + a[src->linesize[3]] +
522 a[1] + a[src->linesize[3]+1]) >> 2;
523 } else if (hsub || vsub) {
524 alpha_h = hsub && k+1 < src_wp ?
525 (a[0] + a[1]) >> 1 : a[0];
526 alpha_v = vsub && j+1 < src_hp ?
527 (a[0] + a[src->linesize[3]]) >> 1 : a[0];
528 alpha = (alpha_v + alpha_h) >> 1;
529 } else
530 alpha = a[0];
531 // if the main channel has an alpha channel, alpha has to be calculated
532 // to create an un-premultiplied (straight) alpha value
533 if (main_has_alpha && alpha != 0 && alpha != 255) {
534 // average alpha for color components, improve quality
535 uint8_t alpha_d;
536 if (hsub && vsub && j+1 < src_hp && k+1 < src_wp) {
537 alpha_d = (d[0] + d[src->linesize[3]] +
538 d[1] + d[src->linesize[3]+1]) >> 2;
539 } else if (hsub || vsub) {
540 alpha_h = hsub && k+1 < src_wp ?
541 (d[0] + d[1]) >> 1 : d[0];
542 alpha_v = vsub && j+1 < src_hp ?
543 (d[0] + d[src->linesize[3]]) >> 1 : d[0];
544 alpha_d = (alpha_v + alpha_h) >> 1;
545 } else
546 alpha_d = d[0];
547 alpha = UNPREMULTIPLY_ALPHA(alpha, alpha_d);
548 }
549 *d = FAST_DIV255(*d * (255 - alpha) + *s * alpha);
550 s++;
551 d += dst_step;
552 a += 1 << hsub;
553 }
554 dp += dst->linesize[dst_plane];
555 sp += src->linesize[i];
556 ap += (1 << vsub) * src->linesize[3];
557 }
558}
559
560static inline void alpha_composite(const AVFrame *src, const AVFrame *dst,
561 int src_w, int src_h,
562 int dst_w, int dst_h,
563 int x, int y)
564{
565 uint8_t alpha; ///< the amount of overlay to blend on to main
566 uint8_t *s, *sa, *d, *da;
567 int i, imax, j, jmax;
568
569 i = FFMAX(-y, 0);
570 sa = src->data[3] + i * src->linesize[3];
571 da = dst->data[3] + (y+i) * dst->linesize[3];
572
573 for (imax = FFMIN(-y + dst_h, src_h); i < imax; i++) {
574 j = FFMAX(-x, 0);
575 s = sa + j;
576 d = da + x+j;
577
578 for (jmax = FFMIN(-x + dst_w, src_w); j < jmax; j++) {
579 alpha = *s;
580 if (alpha != 0 && alpha != 255) {
581 uint8_t alpha_d = *d;
582 alpha = UNPREMULTIPLY_ALPHA(alpha, alpha_d);
583 }
584 switch (alpha) {
585 case 0:
586 break;
587 case 255:
588 *d = *s;
589 break;
590 default:
591 // apply alpha compositing: main_alpha += (1-main_alpha) * overlay_alpha
592 *d += FAST_DIV255((255 - *d) * *s);
593 }
594 d += 1;
595 s += 1;
596 }
597 da += dst->linesize[3];
598 sa += src->linesize[3];
599 }
600}
601
602static av_always_inline void blend_image_yuv(AVFilterContext *ctx,
603 AVFrame *dst, const AVFrame *src,
604 int hsub, int vsub,
605 int main_has_alpha,
606 int x, int y)
607{
608 OverlayContext *s = ctx->priv;
609 const int src_w = src->width;
610 const int src_h = src->height;
611 const int dst_w = dst->width;
612 const int dst_h = dst->height;
613
614 if (main_has_alpha)
615 alpha_composite(src, dst, src_w, src_h, dst_w, dst_h, x, y);
616
617 blend_plane(ctx, dst, src, src_w, src_h, dst_w, dst_h, 0, 0, 0, x, y, main_has_alpha,
618 s->main_desc->comp[0].plane, s->main_desc->comp[0].offset, s->main_desc->comp[0].step);
619 blend_plane(ctx, dst, src, src_w, src_h, dst_w, dst_h, 1, hsub, vsub, x, y, main_has_alpha,
620 s->main_desc->comp[1].plane, s->main_desc->comp[1].offset, s->main_desc->comp[1].step);
621 blend_plane(ctx, dst, src, src_w, src_h, dst_w, dst_h, 2, hsub, vsub, x, y, main_has_alpha,
622 s->main_desc->comp[2].plane, s->main_desc->comp[2].offset, s->main_desc->comp[2].step);
623}
624
625static av_always_inline void blend_image_rgb(AVFilterContext *ctx,
626 AVFrame *dst, const AVFrame *src,
627 int hsub, int vsub,
628 int main_has_alpha,
629 int x, int y)
630{
631 OverlayContext *s = ctx->priv;
632 const int src_w = src->width;
633 const int src_h = src->height;
634 const int dst_w = dst->width;
635 const int dst_h = dst->height;
636
637 if (main_has_alpha)
638 alpha_composite(src, dst, src_w, src_h, dst_w, dst_h, x, y);
639
640 blend_plane(ctx, dst, src, src_w, src_h, dst_w, dst_h, 0, 0, 0, x, y, main_has_alpha,
641 s->main_desc->comp[1].plane, s->main_desc->comp[1].offset, s->main_desc->comp[1].step);
642 blend_plane(ctx, dst, src, src_w, src_h, dst_w, dst_h, 1, hsub, vsub, x, y, main_has_alpha,
643 s->main_desc->comp[2].plane, s->main_desc->comp[2].offset, s->main_desc->comp[2].step);
644 blend_plane(ctx, dst, src, src_w, src_h, dst_w, dst_h, 2, hsub, vsub, x, y, main_has_alpha,
645 s->main_desc->comp[0].plane, s->main_desc->comp[0].offset, s->main_desc->comp[0].step);
646}
647
648static void blend_image_yuv420(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
649{
650 OverlayContext *s = ctx->priv;
651
652 blend_image_yuv(ctx, dst, src, 1, 1, s->main_has_alpha, x, y);
653}
654
655static void blend_image_yuv422(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
656{
657 OverlayContext *s = ctx->priv;
658
659 blend_image_yuv(ctx, dst, src, 1, 0, s->main_has_alpha, x, y);
660}
661
662static void blend_image_yuv444(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
663{
664 OverlayContext *s = ctx->priv;
665
666 blend_image_yuv(ctx, dst, src, 0, 0, s->main_has_alpha, x, y);
667}
668
669static void blend_image_gbrp(AVFilterContext *ctx, AVFrame *dst, const AVFrame *src, int x, int y)
670{
671 OverlayContext *s = ctx->priv;
672
673 blend_image_rgb(ctx, dst, src, 0, 0, s->main_has_alpha, x, y);
674}
675
676static int config_input_main(AVFilterLink *inlink)
677{
678 OverlayContext *s = inlink->dst->priv;
679 const AVPixFmtDescriptor *pix_desc = av_pix_fmt_desc_get(inlink->format);
680
681 av_image_fill_max_pixsteps(s->main_pix_step, NULL, pix_desc);
682
683 s->hsub = pix_desc->log2_chroma_w;
684 s->vsub = pix_desc->log2_chroma_h;
685
686 s->main_desc = pix_desc;
687
688 s->main_is_packed_rgb =
689 ff_fill_rgba_map(s->main_rgba_map, inlink->format) >= 0;
690 s->main_has_alpha = ff_fmt_is_in(inlink->format, alpha_pix_fmts);
691 switch (s->format) {
692 case OVERLAY_FORMAT_YUV420:
693 s->blend_image = blend_image_yuv420;
694 break;
695 case OVERLAY_FORMAT_YUV422:
696 s->blend_image = blend_image_yuv422;
697 break;
698 case OVERLAY_FORMAT_YUV444:
699 s->blend_image = blend_image_yuv444;
700 break;
701 case OVERLAY_FORMAT_RGB:
702 s->blend_image = blend_image_packed_rgb;
703 break;
704 case OVERLAY_FORMAT_GBRP:
705 s->blend_image = blend_image_gbrp;
706 break;
707 }
708 return 0;
709}
710
711static AVFrame *do_blend(AVFilterContext *ctx, AVFrame *mainpic,
712 const AVFrame *second)
713{
714 OverlayContext *s = ctx->priv;
715 AVFilterLink *inlink = ctx->inputs[0];
716
717 if (s->eval_mode == EVAL_MODE_FRAME) {
718 int64_t pos = av_frame_get_pkt_pos(mainpic);
719
720 s->var_values[VAR_N] = inlink->frame_count_out;
721 s->var_values[VAR_T] = mainpic->pts == AV_NOPTS_VALUE ?
722 NAN : mainpic->pts * av_q2d(inlink->time_base);
723 s->var_values[VAR_POS] = pos == -1 ? NAN : pos;
724
725 s->var_values[VAR_OVERLAY_W] = s->var_values[VAR_OW] = second->width;
726 s->var_values[VAR_OVERLAY_H] = s->var_values[VAR_OH] = second->height;
727 s->var_values[VAR_MAIN_W ] = s->var_values[VAR_MW] = mainpic->width;
728 s->var_values[VAR_MAIN_H ] = s->var_values[VAR_MH] = mainpic->height;
729
730 eval_expr(ctx);
731 av_log(ctx, AV_LOG_DEBUG, "n:%f t:%f pos:%f x:%f xi:%d y:%f yi:%d\n",
732 s->var_values[VAR_N], s->var_values[VAR_T], s->var_values[VAR_POS],
733 s->var_values[VAR_X], s->x,
734 s->var_values[VAR_Y], s->y);
735 }
736
737 if (s->x < mainpic->width && s->x + second->width >= 0 ||
738 s->y < mainpic->height && s->y + second->height >= 0)
739 s->blend_image(ctx, mainpic, second, s->x, s->y);
740 return mainpic;
741}
742
743static int filter_frame(AVFilterLink *inlink, AVFrame *inpicref)
744{
745 OverlayContext *s = inlink->dst->priv;
746 av_log(inlink->dst, AV_LOG_DEBUG, "Incoming frame (time:%s) from link #%d\n", av_ts2timestr(inpicref->pts, &inlink->time_base), FF_INLINK_IDX(inlink));
747 return ff_dualinput_filter_frame(&s->dinput, inlink, inpicref);
748}
749
750static int request_frame(AVFilterLink *outlink)
751{
752 OverlayContext *s = outlink->src->priv;
753 return ff_dualinput_request_frame(&s->dinput, outlink);
754}
755
756static av_cold int init(AVFilterContext *ctx)
757{
758 OverlayContext *s = ctx->priv;
759
760 if (s->allow_packed_rgb) {
761 av_log(ctx, AV_LOG_WARNING,
762 "The rgb option is deprecated and is overriding the format option, use format instead\n");
763 s->format = OVERLAY_FORMAT_RGB;
764 }
765 if (!s->dinput.repeatlast || s->eof_action == EOF_ACTION_PASS) {
766 s->dinput.repeatlast = 0;
767 s->eof_action = EOF_ACTION_PASS;
768 }
769 if (s->dinput.shortest || s->eof_action == EOF_ACTION_ENDALL) {
770 s->dinput.shortest = 1;
771 s->eof_action = EOF_ACTION_ENDALL;
772 }
773
774 s->dinput.process = do_blend;
775 return 0;
776}
777
778#define OFFSET(x) offsetof(OverlayContext, x)
779#define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
780
781static const AVOption overlay_options[] = {
782 { "x", "set the x expression", OFFSET(x_expr), AV_OPT_TYPE_STRING, {.str = "0"}, CHAR_MIN, CHAR_MAX, FLAGS },
783 { "y", "set the y expression", OFFSET(y_expr), AV_OPT_TYPE_STRING, {.str = "0"}, CHAR_MIN, CHAR_MAX, FLAGS },
784 { "eof_action", "Action to take when encountering EOF from secondary input ",
785 OFFSET(eof_action), AV_OPT_TYPE_INT, { .i64 = EOF_ACTION_REPEAT },
786 EOF_ACTION_REPEAT, EOF_ACTION_PASS, .flags = FLAGS, "eof_action" },
787 { "repeat", "Repeat the previous frame.", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_REPEAT }, .flags = FLAGS, "eof_action" },
788 { "endall", "End both streams.", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_ENDALL }, .flags = FLAGS, "eof_action" },
789 { "pass", "Pass through the main input.", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_PASS }, .flags = FLAGS, "eof_action" },
790 { "eval", "specify when to evaluate expressions", OFFSET(eval_mode), AV_OPT_TYPE_INT, {.i64 = EVAL_MODE_FRAME}, 0, EVAL_MODE_NB-1, FLAGS, "eval" },
791 { "init", "eval expressions once during initialization", 0, AV_OPT_TYPE_CONST, {.i64=EVAL_MODE_INIT}, .flags = FLAGS, .unit = "eval" },
792 { "frame", "eval expressions per-frame", 0, AV_OPT_TYPE_CONST, {.i64=EVAL_MODE_FRAME}, .flags = FLAGS, .unit = "eval" },
793 { "rgb", "force packed RGB in input and output (deprecated)", OFFSET(allow_packed_rgb), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, FLAGS },
794 { "shortest", "force termination when the shortest input terminates", OFFSET(dinput.shortest), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, FLAGS },
795 { "format", "set output format", OFFSET(format), AV_OPT_TYPE_INT, {.i64=OVERLAY_FORMAT_YUV420}, 0, OVERLAY_FORMAT_NB-1, FLAGS, "format" },
796 { "yuv420", "", 0, AV_OPT_TYPE_CONST, {.i64=OVERLAY_FORMAT_YUV420}, .flags = FLAGS, .unit = "format" },
797 { "yuv422", "", 0, AV_OPT_TYPE_CONST, {.i64=OVERLAY_FORMAT_YUV422}, .flags = FLAGS, .unit = "format" },
798 { "yuv444", "", 0, AV_OPT_TYPE_CONST, {.i64=OVERLAY_FORMAT_YUV444}, .flags = FLAGS, .unit = "format" },
799 { "rgb", "", 0, AV_OPT_TYPE_CONST, {.i64=OVERLAY_FORMAT_RGB}, .flags = FLAGS, .unit = "format" },
800 { "gbrp", "", 0, AV_OPT_TYPE_CONST, {.i64=OVERLAY_FORMAT_GBRP}, .flags = FLAGS, .unit = "format" },
801 { "repeatlast", "repeat overlay of the last overlay frame", OFFSET(dinput.repeatlast), AV_OPT_TYPE_BOOL, {.i64=1}, 0, 1, FLAGS },
802 { NULL }
803};
804
805AVFILTER_DEFINE_CLASS(overlay);
806
807static const AVFilterPad avfilter_vf_overlay_inputs[] = {
808 {
809 .name = "main",
810 .type = AVMEDIA_TYPE_VIDEO,
811 .config_props = config_input_main,
812 .filter_frame = filter_frame,
813 .needs_writable = 1,
814 },
815 {
816 .name = "overlay",
817 .type = AVMEDIA_TYPE_VIDEO,
818 .config_props = config_input_overlay,
819 .filter_frame = filter_frame,
820 },
821 { NULL }
822};
823
824static const AVFilterPad avfilter_vf_overlay_outputs[] = {
825 {
826 .name = "default",
827 .type = AVMEDIA_TYPE_VIDEO,
828 .config_props = config_output,
829 .request_frame = request_frame,
830 },
831 { NULL }
832};
833
834AVFilter ff_vf_overlay = {
835 .name = "overlay",
836 .description = NULL_IF_CONFIG_SMALL("Overlay a video source on top of the input."),
837 .init = init,
838 .uninit = uninit,
839 .priv_size = sizeof(OverlayContext),
840 .priv_class = &overlay_class,
841 .query_formats = query_formats,
842 .process_command = process_command,
843 .inputs = avfilter_vf_overlay_inputs,
844 .outputs = avfilter_vf_overlay_outputs,
845 .flags = AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL,
846};
847