summaryrefslogtreecommitdiff
path: root/libavfilter/vf_coreimage.m (plain)
blob: 9c8db028582634b79fe2f14a09d0800667e892be
1/*
2 * Copyright (c) 2016 Thilo Borgmann
3 *
4 * This file is part of FFmpeg.
5 *
6 * FFmpeg is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2.1 of the License, or (at your option) any later version.
10 *
11 * FFmpeg is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with FFmpeg; if not, write to the Free Software
18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 */
20
21/**
22 * @file
23 * Video processing based on Apple's CoreImage API
24 */
25
26#import <QuartzCore/CoreImage.h>
27#import <AppKit/AppKit.h>
28
29#include "avfilter.h"
30#include "formats.h"
31#include "internal.h"
32#include "video.h"
33#include "libavutil/internal.h"
34#include "libavutil/opt.h"
35#include "libavutil/pixdesc.h"
36
37typedef struct CoreImageContext {
38 const AVClass *class;
39
40 int is_video_source; ///< filter is used as video source
41
42 int w, h; ///< video size
43 AVRational sar; ///< sample aspect ratio
44 AVRational frame_rate; ///< video frame rate
45 AVRational time_base; ///< stream time base
46 int64_t duration; ///< duration expressed in microseconds
47 int64_t pts; ///< increasing presentation time stamp
48 AVFrame *picref; ///< cached reference containing the painted picture
49
50 CFTypeRef glctx; ///< OpenGL context
51 CGContextRef cgctx; ///< Bitmap context for image copy
52 CFTypeRef input_image; ///< Input image container for passing into Core Image API
53 CGColorSpaceRef color_space; ///< Common color space for input image and cgcontext
54 int bits_per_component; ///< Shared bpc for input-output operation
55
56 char *filter_string; ///< The complete user provided filter definition
57 CFTypeRef *filters; ///< CIFilter object for all requested filters
58 int num_filters; ///< Amount of filters in *filters
59
60 char *output_rect; ///< Rectangle to be filled with filter intput
61 int list_filters; ///< Option used to list all available filters including generators
62 int list_generators; ///< Option used to list all available generators
63} CoreImageContext;
64
65static int config_output(AVFilterLink *link)
66{
67 CoreImageContext *ctx = link->src->priv;
68
69 link->w = ctx->w;
70 link->h = ctx->h;
71 link->sample_aspect_ratio = ctx->sar;
72 link->frame_rate = ctx->frame_rate;
73 link->time_base = ctx->time_base;
74
75 const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(link->format);
76 ctx->bits_per_component = av_get_bits_per_pixel(desc) / desc->nb_components;
77
78 return 0;
79}
80
81/** Determine image properties from input link of filter chain.
82 */
83static int config_input(AVFilterLink *link)
84{
85 CoreImageContext *ctx = link->dst->priv;
86 const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(link->format);
87 ctx->bits_per_component = av_get_bits_per_pixel(desc) / desc->nb_components;
88
89 return 0;
90}
91
92/** Print a list of all available filters including options and respective value ranges and defaults.
93 */
94static void list_filters(CoreImageContext *ctx)
95{
96 // querying filters and attributes
97 NSArray *filter_categories = nil;
98
99 if (ctx->list_generators && !ctx->list_filters) {
100 filter_categories = [NSArray arrayWithObjects:kCICategoryGenerator, nil];
101 }
102
103 NSArray *filter_names = [CIFilter filterNamesInCategories:filter_categories];
104 NSEnumerator *filters = [filter_names objectEnumerator];
105
106 NSString *filter_name;
107 while (filter_name = [filters nextObject]) {
108 av_log(ctx, AV_LOG_INFO, "Filter: %s\n", [filter_name UTF8String]);
109 NSString *input;
110
111 CIFilter *filter = [CIFilter filterWithName:filter_name];
112 NSDictionary *filter_attribs = [filter attributes]; // <nsstring, id>
113 NSArray *filter_inputs = [filter inputKeys]; // <nsstring>
114
115 for (input in filter_inputs) {
116 NSDictionary *input_attribs = [filter_attribs valueForKey:input];
117 NSString *input_class = [input_attribs valueForKey:kCIAttributeClass];
118 if ([input_class isEqualToString:@"NSNumber"]) {
119 NSNumber *value_default = [input_attribs valueForKey:kCIAttributeDefault];
120 NSNumber *value_min = [input_attribs valueForKey:kCIAttributeSliderMin];
121 NSNumber *value_max = [input_attribs valueForKey:kCIAttributeSliderMax];
122
123 av_log(ctx, AV_LOG_INFO, "\tOption: %s\t[%s]\t[%s %s][%s]\n",
124 [input UTF8String],
125 [input_class UTF8String],
126 [[value_min stringValue] UTF8String],
127 [[value_max stringValue] UTF8String],
128 [[value_default stringValue] UTF8String]);
129 } else {
130 av_log(ctx, AV_LOG_INFO, "\tOption: %s\t[%s]\n",
131 [input UTF8String],
132 [input_class UTF8String]);
133 }
134 }
135 }
136}
137
138static int query_formats(AVFilterContext *fctx)
139{
140 static const enum AVPixelFormat inout_fmts_rgb[] = {
141 AV_PIX_FMT_ARGB,
142 AV_PIX_FMT_NONE
143 };
144
145 AVFilterFormats *inout_formats;
146 int ret;
147
148 if (!(inout_formats = ff_make_format_list(inout_fmts_rgb))) {
149 return AVERROR(ENOMEM);
150 }
151
152 if ((ret = ff_formats_ref(inout_formats, &fctx->inputs[0]->out_formats)) < 0 ||
153 (ret = ff_formats_ref(inout_formats, &fctx->outputs[0]->in_formats)) < 0) {
154 return ret;
155 }
156
157 return 0;
158}
159
160static int query_formats_src(AVFilterContext *fctx)
161{
162 static const enum AVPixelFormat inout_fmts_rgb[] = {
163 AV_PIX_FMT_ARGB,
164 AV_PIX_FMT_NONE
165 };
166
167 AVFilterFormats *inout_formats;
168 int ret;
169
170 if (!(inout_formats = ff_make_format_list(inout_fmts_rgb))) {
171 return AVERROR(ENOMEM);
172 }
173
174 if ((ret = ff_formats_ref(inout_formats, &fctx->outputs[0]->in_formats)) < 0) {
175 return ret;
176 }
177
178 return 0;
179}
180
181static int apply_filter(CoreImageContext *ctx, AVFilterLink *link, AVFrame *frame)
182{
183 int i;
184
185 // (re-)initialize input image
186 const CGSize frame_size = {
187 frame->width,
188 frame->height
189 };
190
191 NSData *data = [NSData dataWithBytesNoCopy:frame->data[0]
192 length:frame->height*frame->linesize[0]
193 freeWhenDone:NO];
194
195 CIImage *ret = [(__bridge CIImage*)ctx->input_image initWithBitmapData:data
196 bytesPerRow:frame->linesize[0]
197 size:frame_size
198 format:kCIFormatARGB8
199 colorSpace:ctx->color_space]; //kCGColorSpaceGenericRGB
200 if (!ret) {
201 av_log(ctx, AV_LOG_ERROR, "Input image could not be initialized.\n");
202 return AVERROR_EXTERNAL;
203 }
204
205 CIFilter *filter = NULL;
206 CIImage *filter_input = (__bridge CIImage*)ctx->input_image;
207 CIImage *filter_output = NULL;
208
209 // successively apply all filters
210 for (i = 0; i < ctx->num_filters; i++) {
211 if (i) {
212 // set filter input to previous filter output
213 filter_input = [(__bridge CIImage*)ctx->filters[i-1] valueForKey:kCIOutputImageKey];
214 CGRect out_rect = [filter_input extent];
215 if (out_rect.size.width > frame->width || out_rect.size.height > frame->height) {
216 // do not keep padded image regions after filtering
217 out_rect.origin.x = 0.0f;
218 out_rect.origin.y = 0.0f;
219 out_rect.size.width = frame->width;
220 out_rect.size.height = frame->height;
221 }
222 filter_input = [filter_input imageByCroppingToRect:out_rect];
223 }
224
225 filter = (__bridge CIFilter*)ctx->filters[i];
226
227 // do not set input image for the first filter if used as video source
228 if (!ctx->is_video_source || i) {
229 @try {
230 [filter setValue:filter_input forKey:kCIInputImageKey];
231 } @catch (NSException *exception) {
232 if (![[exception name] isEqualToString:NSUndefinedKeyException]) {
233 av_log(ctx, AV_LOG_ERROR, "An error occurred: %s.", [exception.reason UTF8String]);
234 return AVERROR_EXTERNAL;
235 } else {
236 av_log(ctx, AV_LOG_WARNING, "Selected filter does not accept an input image.\n");
237 }
238 }
239 }
240 }
241
242 // get output of last filter
243 filter_output = [filter valueForKey:kCIOutputImageKey];
244
245 if (!filter_output) {
246 av_log(ctx, AV_LOG_ERROR, "Filter output not available.\n");
247 return AVERROR_EXTERNAL;
248 }
249
250 // do not keep padded image regions after filtering
251 CGRect out_rect = [filter_output extent];
252 if (out_rect.size.width > frame->width || out_rect.size.height > frame->height) {
253 av_log(ctx, AV_LOG_DEBUG, "Cropping output image.\n");
254 out_rect.origin.x = 0.0f;
255 out_rect.origin.y = 0.0f;
256 out_rect.size.width = frame->width;
257 out_rect.size.height = frame->height;
258 }
259
260 CGImageRef out = [(__bridge CIContext*)ctx->glctx createCGImage:filter_output
261 fromRect:out_rect];
262
263 if (!out) {
264 av_log(ctx, AV_LOG_ERROR, "Cannot create valid output image.\n");
265 }
266
267 // create bitmap context on the fly for rendering into current frame->data[]
268 if (ctx->cgctx) {
269 CGContextRelease(ctx->cgctx);
270 ctx->cgctx = NULL;
271 }
272 size_t out_width = CGImageGetWidth(out);
273 size_t out_height = CGImageGetHeight(out);
274
275 if (out_width > frame->width || out_height > frame->height) { // this might result in segfault
276 av_log(ctx, AV_LOG_WARNING, "Output image has unexpected size: %lux%lu (expected: %ix%i). This may crash...\n",
277 out_width, out_height, frame->width, frame->height);
278 }
279 ctx->cgctx = CGBitmapContextCreate(frame->data[0],
280 frame->width,
281 frame->height,
282 ctx->bits_per_component,
283 frame->linesize[0],
284 ctx->color_space,
285 (uint32_t)kCGImageAlphaPremultipliedFirst); // ARGB
286 if (!ctx->cgctx) {
287 av_log(ctx, AV_LOG_ERROR, "CGBitmap context cannot be created.\n");
288 return AVERROR_EXTERNAL;
289 }
290
291 // copy ("draw") the output image into the frame data
292 CGRect rect = {{0,0},{frame->width, frame->height}};
293 if (ctx->output_rect) {
294 @try {
295 NSString *tmp_string = [NSString stringWithUTF8String:ctx->output_rect];
296 NSRect tmp = NSRectFromString(tmp_string);
297 rect = NSRectToCGRect(tmp);
298 } @catch (NSException *exception) {
299 av_log(ctx, AV_LOG_ERROR, "An error occurred: %s.", [exception.reason UTF8String]);
300 return AVERROR_EXTERNAL;
301 }
302 if (rect.size.width == 0.0f) {
303 av_log(ctx, AV_LOG_WARNING, "Width of output rect is zero.\n");
304 }
305 if (rect.size.height == 0.0f) {
306 av_log(ctx, AV_LOG_WARNING, "Height of output rect is zero.\n");
307 }
308 }
309
310 CGContextDrawImage(ctx->cgctx, rect, out);
311
312 return ff_filter_frame(link, frame);
313}
314
315/** Apply all valid filters successively to the input image.
316 * The final output image is copied from the GPU by "drawing" using a bitmap context.
317 */
318static int filter_frame(AVFilterLink *link, AVFrame *frame)
319{
320 return apply_filter(link->dst->priv, link->dst->outputs[0], frame);
321}
322
323static int request_frame(AVFilterLink *link)
324{
325 CoreImageContext *ctx = link->src->priv;
326 AVFrame *frame;
327
328 if (ctx->duration >= 0 &&
329 av_rescale_q(ctx->pts, ctx->time_base, AV_TIME_BASE_Q) >= ctx->duration) {
330 return AVERROR_EOF;
331 }
332
333 if (!ctx->picref) {
334 ctx->picref = ff_get_video_buffer(link, ctx->w, ctx->h);
335 if (!ctx->picref) {
336 return AVERROR(ENOMEM);
337 }
338 }
339
340 frame = av_frame_clone(ctx->picref);
341 if (!frame) {
342 return AVERROR(ENOMEM);
343 }
344
345 frame->pts = ctx->pts;
346 frame->key_frame = 1;
347 frame->interlaced_frame = 0;
348 frame->pict_type = AV_PICTURE_TYPE_I;
349 frame->sample_aspect_ratio = ctx->sar;
350
351 ctx->pts++;
352
353 return apply_filter(ctx, link, frame);
354}
355
356/** Set an option of the given filter to the provided key-value pair.
357 */
358static void set_option(CoreImageContext *ctx, CIFilter *filter, const char *key, const char *value)
359{
360 NSString *input_key = [NSString stringWithUTF8String:key];
361 NSString *input_val = [NSString stringWithUTF8String:value];
362
363 NSDictionary *filter_attribs = [filter attributes]; // <nsstring, id>
364 NSDictionary *input_attribs = [filter_attribs valueForKey:input_key];
365
366 NSString *input_class = [input_attribs valueForKey:kCIAttributeClass];
367 NSString *input_type = [input_attribs valueForKey:kCIAttributeType];
368
369 if (!input_attribs) {
370 av_log(ctx, AV_LOG_WARNING, "Skipping unknown option: \"%s\".\n",
371 [input_key UTF8String]); // [[filter name] UTF8String]) not currently defined...
372 return;
373 }
374
375 av_log(ctx, AV_LOG_DEBUG, "key: %s, val: %s, #attribs: %lu, class: %s, type: %s\n",
376 [input_key UTF8String],
377 [input_val UTF8String],
378 input_attribs ? (unsigned long)[input_attribs count] : -1,
379 [input_class UTF8String],
380 [input_type UTF8String]);
381
382 if ([input_class isEqualToString:@"NSNumber"]) {
383 float input = input_val.floatValue;
384 NSNumber *max_value = [input_attribs valueForKey:kCIAttributeSliderMax];
385 NSNumber *min_value = [input_attribs valueForKey:kCIAttributeSliderMin];
386 NSNumber *used_value = nil;
387
388#define CLAMP_WARNING do { \
389av_log(ctx, AV_LOG_WARNING, "Value of \"%f\" for option \"%s\" is out of range [%f %f], clamping to \"%f\".\n", \
390 input, \
391 [input_key UTF8String], \
392 min_value.floatValue, \
393 max_value.floatValue, \
394 used_value.floatValue); \
395} while(0)
396 if (input > max_value.floatValue) {
397 used_value = max_value;
398 CLAMP_WARNING;
399 } else if (input < min_value.floatValue) {
400 used_value = min_value;
401 CLAMP_WARNING;
402 } else {
403 used_value = [NSNumber numberWithFloat:input];
404 }
405
406 [filter setValue:used_value forKey:input_key];
407 } else if ([input_class isEqualToString:@"CIVector"]) {
408 CIVector *input = [CIVector vectorWithString:input_val];
409
410 if (!input) {
411 av_log(ctx, AV_LOG_WARNING, "Skipping invalid CIVctor description: \"%s\".\n",
412 [input_val UTF8String]);
413 return;
414 }
415
416 [filter setValue:input forKey:input_key];
417 } else if ([input_class isEqualToString:@"CIColor"]) {
418 CIColor *input = [CIColor colorWithString:input_val];
419
420 if (!input) {
421 av_log(ctx, AV_LOG_WARNING, "Skipping invalid CIColor description: \"%s\".\n",
422 [input_val UTF8String]);
423 return;
424 }
425
426 [filter setValue:input forKey:input_key];
427 } else if ([input_class isEqualToString:@"NSString"]) { // set display name as string with latin1 encoding
428 [filter setValue:input_val forKey:input_key];
429 } else if ([input_class isEqualToString:@"NSData"]) { // set display name as string with latin1 encoding
430 NSData *input = [NSData dataWithBytes:(const void*)[input_val cStringUsingEncoding:NSISOLatin1StringEncoding]
431 length:[input_val lengthOfBytesUsingEncoding:NSISOLatin1StringEncoding]];
432
433 if (!input) {
434 av_log(ctx, AV_LOG_WARNING, "Skipping invalid NSData description: \"%s\".\n",
435 [input_val UTF8String]);
436 return;
437 }
438
439 [filter setValue:input forKey:input_key];
440 } else {
441 av_log(ctx, AV_LOG_WARNING, "Skipping unsupported option class: \"%s\".\n",
442 [input_class UTF8String]);
443 avpriv_report_missing_feature(ctx, "Handling of some option classes");
444 return;
445 }
446}
447
448/** Create a filter object by a given name and set all options to defaults.
449 * Overwrite any option given by the user to the provided value in filter_options.
450 */
451static CIFilter* create_filter(CoreImageContext *ctx, const char *filter_name, AVDictionary *filter_options)
452{
453 // create filter object
454 CIFilter *filter = [CIFilter filterWithName:[NSString stringWithUTF8String:filter_name]];
455
456 // set default options
457 [filter setDefaults];
458
459 // set user options
460 if (filter_options) {
461 AVDictionaryEntry *o = NULL;
462 while ((o = av_dict_get(filter_options, "", o, AV_DICT_IGNORE_SUFFIX))) {
463 set_option(ctx, filter, o->key, o->value);
464 }
465 }
466
467 return filter;
468}
469
470static av_cold int init(AVFilterContext *fctx)
471{
472 CoreImageContext *ctx = fctx->priv;
473 AVDictionary *filter_dict = NULL;
474 AVDictionaryEntry *f = NULL;
475 AVDictionaryEntry *o = NULL;
476 int ret;
477 int i;
478
479 if (ctx->list_filters || ctx->list_generators) {
480 list_filters(ctx);
481 return AVERROR_EXIT;
482 }
483
484 if (ctx->filter_string) {
485 // parse filter string (filter=name@opt=val@opt2=val2#name2@opt3=val3) for filters separated by #
486 av_log(ctx, AV_LOG_DEBUG, "Filter_string: %s\n", ctx->filter_string);
487 ret = av_dict_parse_string(&filter_dict, ctx->filter_string, "@", "#", AV_DICT_MULTIKEY); // parse filter_name:all_filter_options
488 if (ret) {
489 av_log(ctx, AV_LOG_ERROR, "Parsing of filters failed.\n");
490 return AVERROR(EIO);
491 }
492 ctx->num_filters = av_dict_count(filter_dict);
493 av_log(ctx, AV_LOG_DEBUG, "Filter count: %i\n", ctx->num_filters);
494
495 // allocate CIFilter array
496 ctx->filters = av_mallocz_array(ctx->num_filters, sizeof(CIFilter*));
497 if (!ctx->filters) {
498 av_log(ctx, AV_LOG_ERROR, "Could not allocate filter array.\n");
499 return AVERROR(ENOMEM);
500 }
501
502 // parse filters for option key-value pairs (opt=val@opt2=val2) separated by @
503 i = 0;
504 while ((f = av_dict_get(filter_dict, "", f, AV_DICT_IGNORE_SUFFIX))) {
505 AVDictionary *filter_options = NULL;
506
507 if (strncmp(f->value, "default", 7)) { // not default
508 ret = av_dict_parse_string(&filter_options, f->value, "=", "@", 0); // parse option_name:option_value
509 if (ret) {
510 av_log(ctx, AV_LOG_ERROR, "Parsing of filter options for \"%s\" failed.\n", f->key);
511 return AVERROR(EIO);
512 }
513 }
514
515 if (av_log_get_level() >= AV_LOG_DEBUG) {
516 av_log(ctx, AV_LOG_DEBUG, "Creating filter %i: \"%s\":\n", i, f->key);
517 if (!filter_options) {
518 av_log(ctx, AV_LOG_DEBUG, "\tusing default options\n");
519 } else {
520 while ((o = av_dict_get(filter_options, "", o, AV_DICT_IGNORE_SUFFIX))) {
521 av_log(ctx, AV_LOG_DEBUG, "\t%s: %s\n", o->key, o->value);
522 }
523 }
524 }
525
526 ctx->filters[i] = CFBridgingRetain(create_filter(ctx, f->key, filter_options));
527 if (!ctx->filters[i]) {
528 av_log(ctx, AV_LOG_ERROR, "Could not create filter \"%s\".\n", f->key);
529 return AVERROR(EINVAL);
530 }
531
532 i++;
533 }
534 } else {
535 av_log(ctx, AV_LOG_ERROR, "No filters specified.\n");
536 return AVERROR(EINVAL);
537 }
538
539 // create GPU context on OSX
540 const NSOpenGLPixelFormatAttribute attr[] = {
541 NSOpenGLPFAAccelerated,
542 NSOpenGLPFANoRecovery,
543 NSOpenGLPFAColorSize, 32,
544 0
545 };
546
547 NSOpenGLPixelFormat *pixel_format = [[NSOpenGLPixelFormat alloc] initWithAttributes:(void *)&attr];
548 ctx->color_space = CGColorSpaceCreateWithName(kCGColorSpaceGenericRGB);
549 ctx->glctx = CFBridgingRetain([CIContext contextWithCGLContext:CGLGetCurrentContext()
550 pixelFormat:[pixel_format CGLPixelFormatObj]
551 colorSpace:ctx->color_space
552 options:nil]);
553
554 if (!ctx->glctx) {
555 av_log(ctx, AV_LOG_ERROR, "CIContext not created.\n");
556 return AVERROR_EXTERNAL;
557 }
558
559 // Creating an empty input image as input container for the context
560 ctx->input_image = CFBridgingRetain([CIImage emptyImage]);
561
562 return 0;
563}
564
565static av_cold int init_src(AVFilterContext *fctx)
566{
567 CoreImageContext *ctx = fctx->priv;
568
569 ctx->is_video_source = 1;
570 ctx->time_base = av_inv_q(ctx->frame_rate);
571 ctx->pts = 0;
572
573 return init(fctx);
574}
575
576static av_cold void uninit(AVFilterContext *fctx)
577{
578#define SafeCFRelease(ptr) do { \
579 if (ptr) { \
580 CFRelease(ptr); \
581 ptr = NULL; \
582 } \
583} while (0)
584
585 CoreImageContext *ctx = fctx->priv;
586
587 SafeCFRelease(ctx->glctx);
588 SafeCFRelease(ctx->cgctx);
589 SafeCFRelease(ctx->color_space);
590 SafeCFRelease(ctx->input_image);
591
592 if (ctx->filters) {
593 for (int i = 0; i < ctx->num_filters; i++) {
594 SafeCFRelease(ctx->filters[i]);
595 }
596 av_freep(&ctx->filters);
597 }
598
599 av_frame_free(&ctx->picref);
600}
601
602static const AVFilterPad vf_coreimage_inputs[] = {
603 {
604 .name = "default",
605 .type = AVMEDIA_TYPE_VIDEO,
606 .filter_frame = filter_frame,
607 .config_props = config_input,
608 },
609 { NULL }
610};
611
612static const AVFilterPad vf_coreimage_outputs[] = {
613 {
614 .name = "default",
615 .type = AVMEDIA_TYPE_VIDEO,
616 },
617 { NULL }
618};
619
620static const AVFilterPad vsrc_coreimagesrc_outputs[] = {
621 {
622 .name = "default",
623 .type = AVMEDIA_TYPE_VIDEO,
624 .request_frame = request_frame,
625 .config_props = config_output,
626 },
627 { NULL }
628};
629
630#define OFFSET(x) offsetof(CoreImageContext, x)
631#define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
632
633#define GENERATOR_OPTIONS \
634 {"size", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str = "320x240"}, 0, 0, FLAGS}, \
635 {"s", "set video size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str = "320x240"}, 0, 0, FLAGS}, \
636 {"rate", "set video rate", OFFSET(frame_rate), AV_OPT_TYPE_VIDEO_RATE, {.str = "25"}, 0, INT_MAX, FLAGS}, \
637 {"r", "set video rate", OFFSET(frame_rate), AV_OPT_TYPE_VIDEO_RATE, {.str = "25"}, 0, INT_MAX, FLAGS}, \
638 {"duration", "set video duration", OFFSET(duration), AV_OPT_TYPE_DURATION, {.i64 = -1}, -1, INT64_MAX, FLAGS}, \
639 {"d", "set video duration", OFFSET(duration), AV_OPT_TYPE_DURATION, {.i64 = -1}, -1, INT64_MAX, FLAGS}, \
640 {"sar", "set video sample aspect ratio", OFFSET(sar), AV_OPT_TYPE_RATIONAL, {.dbl = 1}, 0, INT_MAX, FLAGS},
641
642#define FILTER_OPTIONS \
643 {"list_filters", "list available filters", OFFSET(list_filters), AV_OPT_TYPE_BOOL, {.i64 = 0}, 0, 1, .flags = FLAGS}, \
644 {"list_generators", "list available generators", OFFSET(list_generators), AV_OPT_TYPE_BOOL, {.i64 = 0}, 0, 1, .flags = FLAGS}, \
645 {"filter", "names and options of filters to apply", OFFSET(filter_string), AV_OPT_TYPE_STRING, {.str = NULL}, .flags = FLAGS}, \
646 {"output_rect", "output rectangle within output image", OFFSET(output_rect), AV_OPT_TYPE_STRING, {.str = NULL}, .flags = FLAGS},
647
648
649// definitions for coreimage video filter
650static const AVOption coreimage_options[] = {
651 FILTER_OPTIONS
652 { NULL }
653};
654
655AVFILTER_DEFINE_CLASS(coreimage);
656
657AVFilter ff_vf_coreimage = {
658 .name = "coreimage",
659 .description = NULL_IF_CONFIG_SMALL("Video filtering using CoreImage API."),
660 .init = init,
661 .uninit = uninit,
662 .priv_size = sizeof(CoreImageContext),
663 .priv_class = &coreimage_class,
664 .inputs = vf_coreimage_inputs,
665 .outputs = vf_coreimage_outputs,
666 .query_formats = query_formats,
667};
668
669// definitions for coreimagesrc video source
670static const AVOption coreimagesrc_options[] = {
671 GENERATOR_OPTIONS
672 FILTER_OPTIONS
673 { NULL }
674};
675
676AVFILTER_DEFINE_CLASS(coreimagesrc);
677
678AVFilter ff_vsrc_coreimagesrc = {
679 .name = "coreimagesrc",
680 .description = NULL_IF_CONFIG_SMALL("Video source using image generators of CoreImage API."),
681 .init = init_src,
682 .uninit = uninit,
683 .priv_size = sizeof(CoreImageContext),
684 .priv_class = &coreimagesrc_class,
685 .inputs = NULL,
686 .outputs = vsrc_coreimagesrc_outputs,
687 .query_formats = query_formats_src,
688};
689