summaryrefslogtreecommitdiff
path: root/libavfilter/vf_signalstats.c (plain)
blob: 22a1db196f0e3ab7a5ea016f186c7a47686487f2
1/*
2 * Copyright (c) 2010 Mark Heath mjpeg0 @ silicontrip dot org
3 * Copyright (c) 2014 Clément Bœsch
4 * Copyright (c) 2014 Dave Rice @dericed
5 *
6 * This file is part of FFmpeg.
7 *
8 * FFmpeg is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU Lesser General Public
10 * License as published by the Free Software Foundation; either
11 * version 2.1 of the License, or (at your option) any later version.
12 *
13 * FFmpeg is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * Lesser General Public License for more details.
17 *
18 * You should have received a copy of the GNU Lesser General Public
19 * License along with FFmpeg; if not, write to the Free Software
20 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21 */
22
23#include "libavutil/intreadwrite.h"
24#include "libavutil/opt.h"
25#include "libavutil/pixdesc.h"
26#include "internal.h"
27
28enum FilterMode {
29 FILTER_NONE = -1,
30 FILTER_TOUT,
31 FILTER_VREP,
32 FILTER_BRNG,
33 FILT_NUMB
34};
35
36typedef struct {
37 const AVClass *class;
38 int chromah; // height of chroma plane
39 int chromaw; // width of chroma plane
40 int hsub; // horizontal subsampling
41 int vsub; // vertical subsampling
42 int depth; // pixel depth
43 int fs; // pixel count per frame
44 int cfs; // pixel count per frame of chroma planes
45 int outfilter; // FilterMode
46 int filters;
47 AVFrame *frame_prev;
48 uint8_t rgba_color[4];
49 int yuv_color[3];
50 int nb_jobs;
51 int *jobs_rets;
52
53 int *histy, *histu, *histv, *histsat;
54
55 AVFrame *frame_sat;
56 AVFrame *frame_hue;
57} SignalstatsContext;
58
59typedef struct ThreadData {
60 const AVFrame *in;
61 AVFrame *out;
62} ThreadData;
63
64typedef struct ThreadDataHueSatMetrics {
65 const AVFrame *src;
66 AVFrame *dst_sat, *dst_hue;
67} ThreadDataHueSatMetrics;
68
69#define OFFSET(x) offsetof(SignalstatsContext, x)
70#define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
71
72static const AVOption signalstats_options[] = {
73 {"stat", "set statistics filters", OFFSET(filters), AV_OPT_TYPE_FLAGS, {.i64=0}, 0, INT_MAX, FLAGS, "filters"},
74 {"tout", "analyze pixels for temporal outliers", 0, AV_OPT_TYPE_CONST, {.i64=1<<FILTER_TOUT}, 0, 0, FLAGS, "filters"},
75 {"vrep", "analyze video lines for vertical line repetition", 0, AV_OPT_TYPE_CONST, {.i64=1<<FILTER_VREP}, 0, 0, FLAGS, "filters"},
76 {"brng", "analyze for pixels outside of broadcast range", 0, AV_OPT_TYPE_CONST, {.i64=1<<FILTER_BRNG}, 0, 0, FLAGS, "filters"},
77 {"out", "set video filter", OFFSET(outfilter), AV_OPT_TYPE_INT, {.i64=FILTER_NONE}, -1, FILT_NUMB-1, FLAGS, "out"},
78 {"tout", "highlight pixels that depict temporal outliers", 0, AV_OPT_TYPE_CONST, {.i64=FILTER_TOUT}, 0, 0, FLAGS, "out"},
79 {"vrep", "highlight video lines that depict vertical line repetition", 0, AV_OPT_TYPE_CONST, {.i64=FILTER_VREP}, 0, 0, FLAGS, "out"},
80 {"brng", "highlight pixels that are outside of broadcast range", 0, AV_OPT_TYPE_CONST, {.i64=FILTER_BRNG}, 0, 0, FLAGS, "out"},
81 {"c", "set highlight color", OFFSET(rgba_color), AV_OPT_TYPE_COLOR, {.str="yellow"}, .flags=FLAGS},
82 {"color", "set highlight color", OFFSET(rgba_color), AV_OPT_TYPE_COLOR, {.str="yellow"}, .flags=FLAGS},
83 {NULL}
84};
85
86AVFILTER_DEFINE_CLASS(signalstats);
87
88static av_cold int init(AVFilterContext *ctx)
89{
90 uint8_t r, g, b;
91 SignalstatsContext *s = ctx->priv;
92
93 if (s->outfilter != FILTER_NONE)
94 s->filters |= 1 << s->outfilter;
95
96 r = s->rgba_color[0];
97 g = s->rgba_color[1];
98 b = s->rgba_color[2];
99 s->yuv_color[0] = (( 66*r + 129*g + 25*b + (1<<7)) >> 8) + 16;
100 s->yuv_color[1] = ((-38*r + -74*g + 112*b + (1<<7)) >> 8) + 128;
101 s->yuv_color[2] = ((112*r + -94*g + -18*b + (1<<7)) >> 8) + 128;
102 return 0;
103}
104
105static av_cold void uninit(AVFilterContext *ctx)
106{
107 SignalstatsContext *s = ctx->priv;
108 av_frame_free(&s->frame_prev);
109 av_frame_free(&s->frame_sat);
110 av_frame_free(&s->frame_hue);
111 av_freep(&s->jobs_rets);
112 av_freep(&s->histy);
113 av_freep(&s->histu);
114 av_freep(&s->histv);
115 av_freep(&s->histsat);
116}
117
118static int query_formats(AVFilterContext *ctx)
119{
120 // TODO: add more
121 static const enum AVPixelFormat pix_fmts[] = {
122 AV_PIX_FMT_YUV444P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV411P,
123 AV_PIX_FMT_YUV440P,
124 AV_PIX_FMT_YUVJ422P, AV_PIX_FMT_YUVJ444P, AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ411P,
125 AV_PIX_FMT_YUVJ440P,
126 AV_PIX_FMT_YUV444P9, AV_PIX_FMT_YUV422P9, AV_PIX_FMT_YUV420P9,
127 AV_PIX_FMT_YUV444P10, AV_PIX_FMT_YUV422P10, AV_PIX_FMT_YUV420P10,
128 AV_PIX_FMT_YUV440P10,
129 AV_PIX_FMT_YUV444P12, AV_PIX_FMT_YUV422P12, AV_PIX_FMT_YUV420P12,
130 AV_PIX_FMT_YUV440P12,
131 AV_PIX_FMT_YUV444P14, AV_PIX_FMT_YUV422P14, AV_PIX_FMT_YUV420P14,
132 AV_PIX_FMT_YUV444P16, AV_PIX_FMT_YUV422P16, AV_PIX_FMT_YUV420P16,
133 AV_PIX_FMT_NONE
134 };
135
136 AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
137 if (!fmts_list)
138 return AVERROR(ENOMEM);
139 return ff_set_common_formats(ctx, fmts_list);
140}
141
142static AVFrame *alloc_frame(enum AVPixelFormat pixfmt, int w, int h)
143{
144 AVFrame *frame = av_frame_alloc();
145 if (!frame)
146 return NULL;
147
148 frame->format = pixfmt;
149 frame->width = w;
150 frame->height = h;
151
152 if (av_frame_get_buffer(frame, 32) < 0) {
153 av_frame_free(&frame);
154 return NULL;
155 }
156
157 return frame;
158}
159
160static int config_props(AVFilterLink *outlink)
161{
162 AVFilterContext *ctx = outlink->src;
163 SignalstatsContext *s = ctx->priv;
164 AVFilterLink *inlink = outlink->src->inputs[0];
165 const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(outlink->format);
166 s->hsub = desc->log2_chroma_w;
167 s->vsub = desc->log2_chroma_h;
168 s->depth = desc->comp[0].depth;
169 if (s->depth > 8) {
170 s->histy = av_malloc_array(1 << s->depth, sizeof(*s->histy));
171 s->histu = av_malloc_array(1 << s->depth, sizeof(*s->histu));
172 s->histv = av_malloc_array(1 << s->depth, sizeof(*s->histv));
173 s->histsat = av_malloc_array(1 << s->depth, sizeof(*s->histsat));
174
175 if (!s->histy || !s->histu || !s->histv || !s->histsat)
176 return AVERROR(ENOMEM);
177 }
178
179 outlink->w = inlink->w;
180 outlink->h = inlink->h;
181
182 s->chromaw = AV_CEIL_RSHIFT(inlink->w, s->hsub);
183 s->chromah = AV_CEIL_RSHIFT(inlink->h, s->vsub);
184
185 s->fs = inlink->w * inlink->h;
186 s->cfs = s->chromaw * s->chromah;
187
188 s->nb_jobs = FFMAX(1, FFMIN(inlink->h, ff_filter_get_nb_threads(ctx)));
189 s->jobs_rets = av_malloc_array(s->nb_jobs, sizeof(*s->jobs_rets));
190 if (!s->jobs_rets)
191 return AVERROR(ENOMEM);
192
193 s->frame_sat = alloc_frame(s->depth > 8 ? AV_PIX_FMT_GRAY16 : AV_PIX_FMT_GRAY8, inlink->w, inlink->h);
194 s->frame_hue = alloc_frame(AV_PIX_FMT_GRAY16, inlink->w, inlink->h);
195 if (!s->frame_sat || !s->frame_hue)
196 return AVERROR(ENOMEM);
197
198 return 0;
199}
200
201static void burn_frame8(const SignalstatsContext *s, AVFrame *f, int x, int y)
202{
203 const int chromax = x >> s->hsub;
204 const int chromay = y >> s->vsub;
205 f->data[0][y * f->linesize[0] + x] = s->yuv_color[0];
206 f->data[1][chromay * f->linesize[1] + chromax] = s->yuv_color[1];
207 f->data[2][chromay * f->linesize[2] + chromax] = s->yuv_color[2];
208}
209
210static void burn_frame16(const SignalstatsContext *s, AVFrame *f, int x, int y)
211{
212 const int chromax = x >> s->hsub;
213 const int chromay = y >> s->vsub;
214 const int mult = 1 << (s->depth - 8);
215 AV_WN16(f->data[0] + y * f->linesize[0] + x * 2, s->yuv_color[0] * mult);
216 AV_WN16(f->data[1] + chromay * f->linesize[1] + chromax * 2, s->yuv_color[1] * mult);
217 AV_WN16(f->data[2] + chromay * f->linesize[2] + chromax * 2, s->yuv_color[2] * mult);
218}
219
220static int filter8_brng(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
221{
222 ThreadData *td = arg;
223 const SignalstatsContext *s = ctx->priv;
224 const AVFrame *in = td->in;
225 AVFrame *out = td->out;
226 const int w = in->width;
227 const int h = in->height;
228 const int slice_start = (h * jobnr ) / nb_jobs;
229 const int slice_end = (h * (jobnr+1)) / nb_jobs;
230 int x, y, score = 0;
231
232 for (y = slice_start; y < slice_end; y++) {
233 const int yc = y >> s->vsub;
234 const uint8_t *pluma = &in->data[0][y * in->linesize[0]];
235 const uint8_t *pchromau = &in->data[1][yc * in->linesize[1]];
236 const uint8_t *pchromav = &in->data[2][yc * in->linesize[2]];
237
238 for (x = 0; x < w; x++) {
239 const int xc = x >> s->hsub;
240 const int luma = pluma[x];
241 const int chromau = pchromau[xc];
242 const int chromav = pchromav[xc];
243 const int filt = luma < 16 || luma > 235 ||
244 chromau < 16 || chromau > 240 ||
245 chromav < 16 || chromav > 240;
246 score += filt;
247 if (out && filt)
248 burn_frame8(s, out, x, y);
249 }
250 }
251 return score;
252}
253
254static int filter16_brng(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
255{
256 ThreadData *td = arg;
257 const SignalstatsContext *s = ctx->priv;
258 const AVFrame *in = td->in;
259 AVFrame *out = td->out;
260 const int mult = 1 << (s->depth - 8);
261 const int w = in->width;
262 const int h = in->height;
263 const int slice_start = (h * jobnr ) / nb_jobs;
264 const int slice_end = (h * (jobnr+1)) / nb_jobs;
265 int x, y, score = 0;
266
267 for (y = slice_start; y < slice_end; y++) {
268 const int yc = y >> s->vsub;
269 const uint16_t *pluma = (uint16_t *)&in->data[0][y * in->linesize[0]];
270 const uint16_t *pchromau = (uint16_t *)&in->data[1][yc * in->linesize[1]];
271 const uint16_t *pchromav = (uint16_t *)&in->data[2][yc * in->linesize[2]];
272
273 for (x = 0; x < w; x++) {
274 const int xc = x >> s->hsub;
275 const int luma = pluma[x];
276 const int chromau = pchromau[xc];
277 const int chromav = pchromav[xc];
278 const int filt = luma < 16 * mult || luma > 235 * mult ||
279 chromau < 16 * mult || chromau > 240 * mult ||
280 chromav < 16 * mult || chromav > 240 * mult;
281 score += filt;
282 if (out && filt)
283 burn_frame16(s, out, x, y);
284 }
285 }
286 return score;
287}
288
289static int filter_tout_outlier(uint8_t x, uint8_t y, uint8_t z)
290{
291 return ((abs(x - y) + abs (z - y)) / 2) - abs(z - x) > 4; // make 4 configurable?
292}
293
294static int filter8_tout(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
295{
296 ThreadData *td = arg;
297 const SignalstatsContext *s = ctx->priv;
298 const AVFrame *in = td->in;
299 AVFrame *out = td->out;
300 const int w = in->width;
301 const int h = in->height;
302 const int slice_start = (h * jobnr ) / nb_jobs;
303 const int slice_end = (h * (jobnr+1)) / nb_jobs;
304 const uint8_t *p = in->data[0];
305 int lw = in->linesize[0];
306 int x, y, score = 0, filt;
307
308 for (y = slice_start; y < slice_end; y++) {
309
310 if (y - 1 < 0 || y + 1 >= h)
311 continue;
312
313 // detect two pixels above and below (to eliminate interlace artefacts)
314 // should check that video format is infact interlaced.
315
316#define FILTER(i, j) \
317 filter_tout_outlier(p[(y-j) * lw + x + i], \
318 p[ y * lw + x + i], \
319 p[(y+j) * lw + x + i])
320
321#define FILTER3(j) (FILTER(-1, j) && FILTER(0, j) && FILTER(1, j))
322
323 if (y - 2 >= 0 && y + 2 < h) {
324 for (x = 1; x < w - 1; x++) {
325 filt = FILTER3(2) && FILTER3(1);
326 score += filt;
327 if (filt && out)
328 burn_frame8(s, out, x, y);
329 }
330 } else {
331 for (x = 1; x < w - 1; x++) {
332 filt = FILTER3(1);
333 score += filt;
334 if (filt && out)
335 burn_frame8(s, out, x, y);
336 }
337 }
338 }
339 return score;
340}
341
342static int filter16_tout(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
343{
344 ThreadData *td = arg;
345 const SignalstatsContext *s = ctx->priv;
346 const AVFrame *in = td->in;
347 AVFrame *out = td->out;
348 const int w = in->width;
349 const int h = in->height;
350 const int slice_start = (h * jobnr ) / nb_jobs;
351 const int slice_end = (h * (jobnr+1)) / nb_jobs;
352 const uint16_t *p = (uint16_t *)in->data[0];
353 int lw = in->linesize[0] / 2;
354 int x, y, score = 0, filt;
355
356 for (y = slice_start; y < slice_end; y++) {
357
358 if (y - 1 < 0 || y + 1 >= h)
359 continue;
360
361 // detect two pixels above and below (to eliminate interlace artefacts)
362 // should check that video format is infact interlaced.
363
364 if (y - 2 >= 0 && y + 2 < h) {
365 for (x = 1; x < w - 1; x++) {
366 filt = FILTER3(2) && FILTER3(1);
367 score += filt;
368 if (filt && out)
369 burn_frame16(s, out, x, y);
370 }
371 } else {
372 for (x = 1; x < w - 1; x++) {
373 filt = FILTER3(1);
374 score += filt;
375 if (filt && out)
376 burn_frame16(s, out, x, y);
377 }
378 }
379 }
380 return score;
381}
382
383#define VREP_START 4
384
385static int filter8_vrep(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
386{
387 ThreadData *td = arg;
388 const SignalstatsContext *s = ctx->priv;
389 const AVFrame *in = td->in;
390 AVFrame *out = td->out;
391 const int w = in->width;
392 const int h = in->height;
393 const int slice_start = (h * jobnr ) / nb_jobs;
394 const int slice_end = (h * (jobnr+1)) / nb_jobs;
395 const uint8_t *p = in->data[0];
396 const int lw = in->linesize[0];
397 int x, y, score = 0;
398
399 for (y = slice_start; y < slice_end; y++) {
400 const int y2lw = (y - VREP_START) * lw;
401 const int ylw = y * lw;
402 int filt, totdiff = 0;
403
404 if (y < VREP_START)
405 continue;
406
407 for (x = 0; x < w; x++)
408 totdiff += abs(p[y2lw + x] - p[ylw + x]);
409 filt = totdiff < w;
410
411 score += filt;
412 if (filt && out)
413 for (x = 0; x < w; x++)
414 burn_frame8(s, out, x, y);
415 }
416 return score * w;
417}
418
419static int filter16_vrep(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
420{
421 ThreadData *td = arg;
422 const SignalstatsContext *s = ctx->priv;
423 const AVFrame *in = td->in;
424 AVFrame *out = td->out;
425 const int w = in->width;
426 const int h = in->height;
427 const int slice_start = (h * jobnr ) / nb_jobs;
428 const int slice_end = (h * (jobnr+1)) / nb_jobs;
429 const uint16_t *p = (uint16_t *)in->data[0];
430 const int lw = in->linesize[0] / 2;
431 int x, y, score = 0;
432
433 for (y = slice_start; y < slice_end; y++) {
434 const int y2lw = (y - VREP_START) * lw;
435 const int ylw = y * lw;
436 int64_t totdiff = 0;
437 int filt;
438
439 if (y < VREP_START)
440 continue;
441
442 for (x = 0; x < w; x++)
443 totdiff += abs(p[y2lw + x] - p[ylw + x]);
444 filt = totdiff < w;
445
446 score += filt;
447 if (filt && out)
448 for (x = 0; x < w; x++)
449 burn_frame16(s, out, x, y);
450 }
451 return score * w;
452}
453
454static const struct {
455 const char *name;
456 int (*process8)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs);
457 int (*process16)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs);
458} filters_def[] = {
459 {"TOUT", filter8_tout, filter16_tout},
460 {"VREP", filter8_vrep, filter16_vrep},
461 {"BRNG", filter8_brng, filter16_brng},
462 {NULL}
463};
464
465#define DEPTH 256
466
467static int compute_sat_hue_metrics8(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
468{
469 int i, j;
470 ThreadDataHueSatMetrics *td = arg;
471 const SignalstatsContext *s = ctx->priv;
472 const AVFrame *src = td->src;
473 AVFrame *dst_sat = td->dst_sat;
474 AVFrame *dst_hue = td->dst_hue;
475
476 const int slice_start = (s->chromah * jobnr ) / nb_jobs;
477 const int slice_end = (s->chromah * (jobnr+1)) / nb_jobs;
478
479 const int lsz_u = src->linesize[1];
480 const int lsz_v = src->linesize[2];
481 const uint8_t *p_u = src->data[1] + slice_start * lsz_u;
482 const uint8_t *p_v = src->data[2] + slice_start * lsz_v;
483
484 const int lsz_sat = dst_sat->linesize[0];
485 const int lsz_hue = dst_hue->linesize[0];
486 uint8_t *p_sat = dst_sat->data[0] + slice_start * lsz_sat;
487 uint8_t *p_hue = dst_hue->data[0] + slice_start * lsz_hue;
488
489 for (j = slice_start; j < slice_end; j++) {
490 for (i = 0; i < s->chromaw; i++) {
491 const int yuvu = p_u[i];
492 const int yuvv = p_v[i];
493 p_sat[i] = hypot(yuvu - 128, yuvv - 128); // int or round?
494 ((int16_t*)p_hue)[i] = floor((180 / M_PI) * atan2f(yuvu-128, yuvv-128) + 180);
495 }
496 p_u += lsz_u;
497 p_v += lsz_v;
498 p_sat += lsz_sat;
499 p_hue += lsz_hue;
500 }
501
502 return 0;
503}
504
505static int compute_sat_hue_metrics16(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
506{
507 int i, j;
508 ThreadDataHueSatMetrics *td = arg;
509 const SignalstatsContext *s = ctx->priv;
510 const AVFrame *src = td->src;
511 AVFrame *dst_sat = td->dst_sat;
512 AVFrame *dst_hue = td->dst_hue;
513 const int mid = 1 << (s->depth - 1);
514
515 const int slice_start = (s->chromah * jobnr ) / nb_jobs;
516 const int slice_end = (s->chromah * (jobnr+1)) / nb_jobs;
517
518 const int lsz_u = src->linesize[1] / 2;
519 const int lsz_v = src->linesize[2] / 2;
520 const uint16_t *p_u = (uint16_t*)src->data[1] + slice_start * lsz_u;
521 const uint16_t *p_v = (uint16_t*)src->data[2] + slice_start * lsz_v;
522
523 const int lsz_sat = dst_sat->linesize[0] / 2;
524 const int lsz_hue = dst_hue->linesize[0] / 2;
525 uint16_t *p_sat = (uint16_t*)dst_sat->data[0] + slice_start * lsz_sat;
526 uint16_t *p_hue = (uint16_t*)dst_hue->data[0] + slice_start * lsz_hue;
527
528 for (j = slice_start; j < slice_end; j++) {
529 for (i = 0; i < s->chromaw; i++) {
530 const int yuvu = p_u[i];
531 const int yuvv = p_v[i];
532 p_sat[i] = hypot(yuvu - mid, yuvv - mid); // int or round?
533 ((int16_t*)p_hue)[i] = floor((180 / M_PI) * atan2f(yuvu-mid, yuvv-mid) + 180);
534 }
535 p_u += lsz_u;
536 p_v += lsz_v;
537 p_sat += lsz_sat;
538 p_hue += lsz_hue;
539 }
540
541 return 0;
542}
543
544static unsigned compute_bit_depth(uint16_t mask)
545{
546 return av_popcount(mask);
547}
548
549static int filter_frame8(AVFilterLink *link, AVFrame *in)
550{
551 AVFilterContext *ctx = link->dst;
552 SignalstatsContext *s = ctx->priv;
553 AVFilterLink *outlink = ctx->outputs[0];
554 AVFrame *out = in;
555 int i, j;
556 int w = 0, cw = 0, // in
557 pw = 0, cpw = 0; // prev
558 int fil;
559 char metabuf[128];
560 unsigned int histy[DEPTH] = {0},
561 histu[DEPTH] = {0},
562 histv[DEPTH] = {0},
563 histhue[360] = {0},
564 histsat[DEPTH] = {0}; // limited to 8 bit data.
565 int miny = -1, minu = -1, minv = -1;
566 int maxy = -1, maxu = -1, maxv = -1;
567 int lowy = -1, lowu = -1, lowv = -1;
568 int highy = -1, highu = -1, highv = -1;
569 int minsat = -1, maxsat = -1, lowsat = -1, highsat = -1;
570 int lowp, highp, clowp, chighp;
571 int accy, accu, accv;
572 int accsat, acchue = 0;
573 int medhue, maxhue;
574 int toty = 0, totu = 0, totv = 0, totsat=0;
575 int tothue = 0;
576 int dify = 0, difu = 0, difv = 0;
577 uint16_t masky = 0, masku = 0, maskv = 0;
578
579 int filtot[FILT_NUMB] = {0};
580 AVFrame *prev;
581
582 AVFrame *sat = s->frame_sat;
583 AVFrame *hue = s->frame_hue;
584 const uint8_t *p_sat = sat->data[0];
585 const uint8_t *p_hue = hue->data[0];
586 const int lsz_sat = sat->linesize[0];
587 const int lsz_hue = hue->linesize[0];
588 ThreadDataHueSatMetrics td_huesat = {
589 .src = in,
590 .dst_sat = sat,
591 .dst_hue = hue,
592 };
593
594 if (!s->frame_prev)
595 s->frame_prev = av_frame_clone(in);
596
597 prev = s->frame_prev;
598
599 if (s->outfilter != FILTER_NONE) {
600 out = av_frame_clone(in);
601 av_frame_make_writable(out);
602 }
603
604 ctx->internal->execute(ctx, compute_sat_hue_metrics8, &td_huesat,
605 NULL, FFMIN(s->chromah, ff_filter_get_nb_threads(ctx)));
606
607 // Calculate luma histogram and difference with previous frame or field.
608 for (j = 0; j < link->h; j++) {
609 for (i = 0; i < link->w; i++) {
610 const int yuv = in->data[0][w + i];
611
612 masky |= yuv;
613 histy[yuv]++;
614 dify += abs(yuv - prev->data[0][pw + i]);
615 }
616 w += in->linesize[0];
617 pw += prev->linesize[0];
618 }
619
620 // Calculate chroma histogram and difference with previous frame or field.
621 for (j = 0; j < s->chromah; j++) {
622 for (i = 0; i < s->chromaw; i++) {
623 const int yuvu = in->data[1][cw+i];
624 const int yuvv = in->data[2][cw+i];
625
626 masku |= yuvu;
627 maskv |= yuvv;
628 histu[yuvu]++;
629 difu += abs(yuvu - prev->data[1][cpw+i]);
630 histv[yuvv]++;
631 difv += abs(yuvv - prev->data[2][cpw+i]);
632
633 histsat[p_sat[i]]++;
634 histhue[((int16_t*)p_hue)[i]]++;
635 }
636 cw += in->linesize[1];
637 cpw += prev->linesize[1];
638 p_sat += lsz_sat;
639 p_hue += lsz_hue;
640 }
641
642 for (fil = 0; fil < FILT_NUMB; fil ++) {
643 if (s->filters & 1<<fil) {
644 ThreadData td = {
645 .in = in,
646 .out = out != in && s->outfilter == fil ? out : NULL,
647 };
648 memset(s->jobs_rets, 0, s->nb_jobs * sizeof(*s->jobs_rets));
649 ctx->internal->execute(ctx, filters_def[fil].process8,
650 &td, s->jobs_rets, s->nb_jobs);
651 for (i = 0; i < s->nb_jobs; i++)
652 filtot[fil] += s->jobs_rets[i];
653 }
654 }
655
656 // find low / high based on histogram percentile
657 // these only need to be calculated once.
658
659 lowp = lrint(s->fs * 10 / 100.);
660 highp = lrint(s->fs * 90 / 100.);
661 clowp = lrint(s->cfs * 10 / 100.);
662 chighp = lrint(s->cfs * 90 / 100.);
663
664 accy = accu = accv = accsat = 0;
665 for (fil = 0; fil < DEPTH; fil++) {
666 if (miny < 0 && histy[fil]) miny = fil;
667 if (minu < 0 && histu[fil]) minu = fil;
668 if (minv < 0 && histv[fil]) minv = fil;
669 if (minsat < 0 && histsat[fil]) minsat = fil;
670
671 if (histy[fil]) maxy = fil;
672 if (histu[fil]) maxu = fil;
673 if (histv[fil]) maxv = fil;
674 if (histsat[fil]) maxsat = fil;
675
676 toty += histy[fil] * fil;
677 totu += histu[fil] * fil;
678 totv += histv[fil] * fil;
679 totsat += histsat[fil] * fil;
680
681 accy += histy[fil];
682 accu += histu[fil];
683 accv += histv[fil];
684 accsat += histsat[fil];
685
686 if (lowy == -1 && accy >= lowp) lowy = fil;
687 if (lowu == -1 && accu >= clowp) lowu = fil;
688 if (lowv == -1 && accv >= clowp) lowv = fil;
689 if (lowsat == -1 && accsat >= clowp) lowsat = fil;
690
691 if (highy == -1 && accy >= highp) highy = fil;
692 if (highu == -1 && accu >= chighp) highu = fil;
693 if (highv == -1 && accv >= chighp) highv = fil;
694 if (highsat == -1 && accsat >= chighp) highsat = fil;
695 }
696
697 maxhue = histhue[0];
698 medhue = -1;
699 for (fil = 0; fil < 360; fil++) {
700 tothue += histhue[fil] * fil;
701 acchue += histhue[fil];
702
703 if (medhue == -1 && acchue > s->cfs / 2)
704 medhue = fil;
705 if (histhue[fil] > maxhue) {
706 maxhue = histhue[fil];
707 }
708 }
709
710 av_frame_free(&s->frame_prev);
711 s->frame_prev = av_frame_clone(in);
712
713#define SET_META(key, fmt, val) do { \
714 snprintf(metabuf, sizeof(metabuf), fmt, val); \
715 av_dict_set(&out->metadata, "lavfi.signalstats." key, metabuf, 0); \
716} while (0)
717
718 SET_META("YMIN", "%d", miny);
719 SET_META("YLOW", "%d", lowy);
720 SET_META("YAVG", "%g", 1.0 * toty / s->fs);
721 SET_META("YHIGH", "%d", highy);
722 SET_META("YMAX", "%d", maxy);
723
724 SET_META("UMIN", "%d", minu);
725 SET_META("ULOW", "%d", lowu);
726 SET_META("UAVG", "%g", 1.0 * totu / s->cfs);
727 SET_META("UHIGH", "%d", highu);
728 SET_META("UMAX", "%d", maxu);
729
730 SET_META("VMIN", "%d", minv);
731 SET_META("VLOW", "%d", lowv);
732 SET_META("VAVG", "%g", 1.0 * totv / s->cfs);
733 SET_META("VHIGH", "%d", highv);
734 SET_META("VMAX", "%d", maxv);
735
736 SET_META("SATMIN", "%d", minsat);
737 SET_META("SATLOW", "%d", lowsat);
738 SET_META("SATAVG", "%g", 1.0 * totsat / s->cfs);
739 SET_META("SATHIGH", "%d", highsat);
740 SET_META("SATMAX", "%d", maxsat);
741
742 SET_META("HUEMED", "%d", medhue);
743 SET_META("HUEAVG", "%g", 1.0 * tothue / s->cfs);
744
745 SET_META("YDIF", "%g", 1.0 * dify / s->fs);
746 SET_META("UDIF", "%g", 1.0 * difu / s->cfs);
747 SET_META("VDIF", "%g", 1.0 * difv / s->cfs);
748
749 SET_META("YBITDEPTH", "%d", compute_bit_depth(masky));
750 SET_META("UBITDEPTH", "%d", compute_bit_depth(masku));
751 SET_META("VBITDEPTH", "%d", compute_bit_depth(maskv));
752
753 for (fil = 0; fil < FILT_NUMB; fil ++) {
754 if (s->filters & 1<<fil) {
755 char metaname[128];
756 snprintf(metabuf, sizeof(metabuf), "%g", 1.0 * filtot[fil] / s->fs);
757 snprintf(metaname, sizeof(metaname), "lavfi.signalstats.%s", filters_def[fil].name);
758 av_dict_set(&out->metadata, metaname, metabuf, 0);
759 }
760 }
761
762 if (in != out)
763 av_frame_free(&in);
764 return ff_filter_frame(outlink, out);
765}
766
767static int filter_frame16(AVFilterLink *link, AVFrame *in)
768{
769 AVFilterContext *ctx = link->dst;
770 SignalstatsContext *s = ctx->priv;
771 AVFilterLink *outlink = ctx->outputs[0];
772 AVFrame *out = in;
773 int i, j;
774 int w = 0, cw = 0, // in
775 pw = 0, cpw = 0; // prev
776 int fil;
777 char metabuf[128];
778 unsigned int *histy = s->histy,
779 *histu = s->histu,
780 *histv = s->histv,
781 histhue[360] = {0},
782 *histsat = s->histsat;
783 int miny = -1, minu = -1, minv = -1;
784 int maxy = -1, maxu = -1, maxv = -1;
785 int lowy = -1, lowu = -1, lowv = -1;
786 int highy = -1, highu = -1, highv = -1;
787 int minsat = -1, maxsat = -1, lowsat = -1, highsat = -1;
788 int lowp, highp, clowp, chighp;
789 int accy, accu, accv;
790 int accsat, acchue = 0;
791 int medhue, maxhue;
792 int64_t toty = 0, totu = 0, totv = 0, totsat=0;
793 int64_t tothue = 0;
794 int64_t dify = 0, difu = 0, difv = 0;
795 uint16_t masky = 0, masku = 0, maskv = 0;
796
797 int filtot[FILT_NUMB] = {0};
798 AVFrame *prev;
799
800 AVFrame *sat = s->frame_sat;
801 AVFrame *hue = s->frame_hue;
802 const uint16_t *p_sat = (uint16_t *)sat->data[0];
803 const uint16_t *p_hue = (uint16_t *)hue->data[0];
804 const int lsz_sat = sat->linesize[0] / 2;
805 const int lsz_hue = hue->linesize[0] / 2;
806 ThreadDataHueSatMetrics td_huesat = {
807 .src = in,
808 .dst_sat = sat,
809 .dst_hue = hue,
810 };
811
812 if (!s->frame_prev)
813 s->frame_prev = av_frame_clone(in);
814
815 prev = s->frame_prev;
816
817 if (s->outfilter != FILTER_NONE) {
818 out = av_frame_clone(in);
819 av_frame_make_writable(out);
820 }
821
822 ctx->internal->execute(ctx, compute_sat_hue_metrics16, &td_huesat,
823 NULL, FFMIN(s->chromah, ff_filter_get_nb_threads(ctx)));
824
825 // Calculate luma histogram and difference with previous frame or field.
826 memset(s->histy, 0, (1 << s->depth) * sizeof(*s->histy));
827 for (j = 0; j < link->h; j++) {
828 for (i = 0; i < link->w; i++) {
829 const int yuv = AV_RN16(in->data[0] + w + i * 2);
830
831 masky |= yuv;
832 histy[yuv]++;
833 dify += abs(yuv - AV_RN16(prev->data[0] + pw + i * 2));
834 }
835 w += in->linesize[0];
836 pw += prev->linesize[0];
837 }
838
839 // Calculate chroma histogram and difference with previous frame or field.
840 memset(s->histu, 0, (1 << s->depth) * sizeof(*s->histu));
841 memset(s->histv, 0, (1 << s->depth) * sizeof(*s->histv));
842 memset(s->histsat, 0, (1 << s->depth) * sizeof(*s->histsat));
843 for (j = 0; j < s->chromah; j++) {
844 for (i = 0; i < s->chromaw; i++) {
845 const int yuvu = AV_RN16(in->data[1] + cw + i * 2);
846 const int yuvv = AV_RN16(in->data[2] + cw + i * 2);
847
848 masku |= yuvu;
849 maskv |= yuvv;
850 histu[yuvu]++;
851 difu += abs(yuvu - AV_RN16(prev->data[1] + cpw + i * 2));
852 histv[yuvv]++;
853 difv += abs(yuvv - AV_RN16(prev->data[2] + cpw + i * 2));
854
855 histsat[p_sat[i]]++;
856 histhue[((int16_t*)p_hue)[i]]++;
857 }
858 cw += in->linesize[1];
859 cpw += prev->linesize[1];
860 p_sat += lsz_sat;
861 p_hue += lsz_hue;
862 }
863
864 for (fil = 0; fil < FILT_NUMB; fil ++) {
865 if (s->filters & 1<<fil) {
866 ThreadData td = {
867 .in = in,
868 .out = out != in && s->outfilter == fil ? out : NULL,
869 };
870 memset(s->jobs_rets, 0, s->nb_jobs * sizeof(*s->jobs_rets));
871 ctx->internal->execute(ctx, filters_def[fil].process16,
872 &td, s->jobs_rets, s->nb_jobs);
873 for (i = 0; i < s->nb_jobs; i++)
874 filtot[fil] += s->jobs_rets[i];
875 }
876 }
877
878 // find low / high based on histogram percentile
879 // these only need to be calculated once.
880
881 lowp = lrint(s->fs * 10 / 100.);
882 highp = lrint(s->fs * 90 / 100.);
883 clowp = lrint(s->cfs * 10 / 100.);
884 chighp = lrint(s->cfs * 90 / 100.);
885
886 accy = accu = accv = accsat = 0;
887 for (fil = 0; fil < 1 << s->depth; fil++) {
888 if (miny < 0 && histy[fil]) miny = fil;
889 if (minu < 0 && histu[fil]) minu = fil;
890 if (minv < 0 && histv[fil]) minv = fil;
891 if (minsat < 0 && histsat[fil]) minsat = fil;
892
893 if (histy[fil]) maxy = fil;
894 if (histu[fil]) maxu = fil;
895 if (histv[fil]) maxv = fil;
896 if (histsat[fil]) maxsat = fil;
897
898 toty += histy[fil] * fil;
899 totu += histu[fil] * fil;
900 totv += histv[fil] * fil;
901 totsat += histsat[fil] * fil;
902
903 accy += histy[fil];
904 accu += histu[fil];
905 accv += histv[fil];
906 accsat += histsat[fil];
907
908 if (lowy == -1 && accy >= lowp) lowy = fil;
909 if (lowu == -1 && accu >= clowp) lowu = fil;
910 if (lowv == -1 && accv >= clowp) lowv = fil;
911 if (lowsat == -1 && accsat >= clowp) lowsat = fil;
912
913 if (highy == -1 && accy >= highp) highy = fil;
914 if (highu == -1 && accu >= chighp) highu = fil;
915 if (highv == -1 && accv >= chighp) highv = fil;
916 if (highsat == -1 && accsat >= chighp) highsat = fil;
917 }
918
919 maxhue = histhue[0];
920 medhue = -1;
921 for (fil = 0; fil < 360; fil++) {
922 tothue += histhue[fil] * fil;
923 acchue += histhue[fil];
924
925 if (medhue == -1 && acchue > s->cfs / 2)
926 medhue = fil;
927 if (histhue[fil] > maxhue) {
928 maxhue = histhue[fil];
929 }
930 }
931
932 av_frame_free(&s->frame_prev);
933 s->frame_prev = av_frame_clone(in);
934
935 SET_META("YMIN", "%d", miny);
936 SET_META("YLOW", "%d", lowy);
937 SET_META("YAVG", "%g", 1.0 * toty / s->fs);
938 SET_META("YHIGH", "%d", highy);
939 SET_META("YMAX", "%d", maxy);
940
941 SET_META("UMIN", "%d", minu);
942 SET_META("ULOW", "%d", lowu);
943 SET_META("UAVG", "%g", 1.0 * totu / s->cfs);
944 SET_META("UHIGH", "%d", highu);
945 SET_META("UMAX", "%d", maxu);
946
947 SET_META("VMIN", "%d", minv);
948 SET_META("VLOW", "%d", lowv);
949 SET_META("VAVG", "%g", 1.0 * totv / s->cfs);
950 SET_META("VHIGH", "%d", highv);
951 SET_META("VMAX", "%d", maxv);
952
953 SET_META("SATMIN", "%d", minsat);
954 SET_META("SATLOW", "%d", lowsat);
955 SET_META("SATAVG", "%g", 1.0 * totsat / s->cfs);
956 SET_META("SATHIGH", "%d", highsat);
957 SET_META("SATMAX", "%d", maxsat);
958
959 SET_META("HUEMED", "%d", medhue);
960 SET_META("HUEAVG", "%g", 1.0 * tothue / s->cfs);
961
962 SET_META("YDIF", "%g", 1.0 * dify / s->fs);
963 SET_META("UDIF", "%g", 1.0 * difu / s->cfs);
964 SET_META("VDIF", "%g", 1.0 * difv / s->cfs);
965
966 SET_META("YBITDEPTH", "%d", compute_bit_depth(masky));
967 SET_META("UBITDEPTH", "%d", compute_bit_depth(masku));
968 SET_META("VBITDEPTH", "%d", compute_bit_depth(maskv));
969
970 for (fil = 0; fil < FILT_NUMB; fil ++) {
971 if (s->filters & 1<<fil) {
972 char metaname[128];
973 snprintf(metabuf, sizeof(metabuf), "%g", 1.0 * filtot[fil] / s->fs);
974 snprintf(metaname, sizeof(metaname), "lavfi.signalstats.%s", filters_def[fil].name);
975 av_dict_set(&out->metadata, metaname, metabuf, 0);
976 }
977 }
978
979 if (in != out)
980 av_frame_free(&in);
981 return ff_filter_frame(outlink, out);
982}
983
984static int filter_frame(AVFilterLink *link, AVFrame *in)
985{
986 AVFilterContext *ctx = link->dst;
987 SignalstatsContext *s = ctx->priv;
988
989 if (s->depth > 8)
990 return filter_frame16(link, in);
991 else
992 return filter_frame8(link, in);
993}
994
995static const AVFilterPad signalstats_inputs[] = {
996 {
997 .name = "default",
998 .type = AVMEDIA_TYPE_VIDEO,
999 .filter_frame = filter_frame,
1000 },
1001 { NULL }
1002};
1003
1004static const AVFilterPad signalstats_outputs[] = {
1005 {
1006 .name = "default",
1007 .config_props = config_props,
1008 .type = AVMEDIA_TYPE_VIDEO,
1009 },
1010 { NULL }
1011};
1012
1013AVFilter ff_vf_signalstats = {
1014 .name = "signalstats",
1015 .description = "Generate statistics from video analysis.",
1016 .init = init,
1017 .uninit = uninit,
1018 .query_formats = query_formats,
1019 .priv_size = sizeof(SignalstatsContext),
1020 .inputs = signalstats_inputs,
1021 .outputs = signalstats_outputs,
1022 .priv_class = &signalstats_class,
1023 .flags = AVFILTER_FLAG_SLICE_THREADS,
1024};
1025