summaryrefslogtreecommitdiff
path: root/libavcodec/pngenc.c (plain)
blob: 69b44954049a5b3448d921d78a1d2a5c0273d8af
1/*
2 * PNG image format
3 * Copyright (c) 2003 Fabrice Bellard
4 *
5 * This file is part of FFmpeg.
6 *
7 * FFmpeg is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2.1 of the License, or (at your option) any later version.
11 *
12 * FFmpeg is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
16 *
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with FFmpeg; if not, write to the Free Software
19 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20 */
21
22#include "avcodec.h"
23#include "internal.h"
24#include "bytestream.h"
25#include "lossless_videoencdsp.h"
26#include "png.h"
27#include "apng.h"
28
29#include "libavutil/avassert.h"
30#include "libavutil/crc.h"
31#include "libavutil/libm.h"
32#include "libavutil/opt.h"
33#include "libavutil/color_utils.h"
34#include "libavutil/stereo3d.h"
35
36#include <zlib.h>
37
38#define IOBUF_SIZE 4096
39
40typedef struct APNGFctlChunk {
41 uint32_t sequence_number;
42 uint32_t width, height;
43 uint32_t x_offset, y_offset;
44 uint16_t delay_num, delay_den;
45 uint8_t dispose_op, blend_op;
46} APNGFctlChunk;
47
48typedef struct PNGEncContext {
49 AVClass *class;
50 LLVidEncDSPContext llvidencdsp;
51
52 uint8_t *bytestream;
53 uint8_t *bytestream_start;
54 uint8_t *bytestream_end;
55
56 int filter_type;
57
58 z_stream zstream;
59 uint8_t buf[IOBUF_SIZE];
60 int dpi; ///< Physical pixel density, in dots per inch, if set
61 int dpm; ///< Physical pixel density, in dots per meter, if set
62
63 int is_progressive;
64 int bit_depth;
65 int color_type;
66 int bits_per_pixel;
67
68 // APNG
69 uint32_t palette_checksum; // Used to ensure a single unique palette
70 uint32_t sequence_number;
71 int extra_data_updated;
72 uint8_t *extra_data;
73 int extra_data_size;
74
75 AVFrame *prev_frame;
76 AVFrame *last_frame;
77 APNGFctlChunk last_frame_fctl;
78 uint8_t *last_frame_packet;
79 size_t last_frame_packet_size;
80} PNGEncContext;
81
82static void png_get_interlaced_row(uint8_t *dst, int row_size,
83 int bits_per_pixel, int pass,
84 const uint8_t *src, int width)
85{
86 int x, mask, dst_x, j, b, bpp;
87 uint8_t *d;
88 const uint8_t *s;
89 static const int masks[] = {0x80, 0x08, 0x88, 0x22, 0xaa, 0x55, 0xff};
90
91 mask = masks[pass];
92 switch (bits_per_pixel) {
93 case 1:
94 memset(dst, 0, row_size);
95 dst_x = 0;
96 for (x = 0; x < width; x++) {
97 j = (x & 7);
98 if ((mask << j) & 0x80) {
99 b = (src[x >> 3] >> (7 - j)) & 1;
100 dst[dst_x >> 3] |= b << (7 - (dst_x & 7));
101 dst_x++;
102 }
103 }
104 break;
105 default:
106 bpp = bits_per_pixel >> 3;
107 d = dst;
108 s = src;
109 for (x = 0; x < width; x++) {
110 j = x & 7;
111 if ((mask << j) & 0x80) {
112 memcpy(d, s, bpp);
113 d += bpp;
114 }
115 s += bpp;
116 }
117 break;
118 }
119}
120
121static void sub_png_paeth_prediction(uint8_t *dst, uint8_t *src, uint8_t *top,
122 int w, int bpp)
123{
124 int i;
125 for (i = 0; i < w; i++) {
126 int a, b, c, p, pa, pb, pc;
127
128 a = src[i - bpp];
129 b = top[i];
130 c = top[i - bpp];
131
132 p = b - c;
133 pc = a - c;
134
135 pa = abs(p);
136 pb = abs(pc);
137 pc = abs(p + pc);
138
139 if (pa <= pb && pa <= pc)
140 p = a;
141 else if (pb <= pc)
142 p = b;
143 else
144 p = c;
145 dst[i] = src[i] - p;
146 }
147}
148
149static void sub_left_prediction(PNGEncContext *c, uint8_t *dst, const uint8_t *src, int bpp, int size)
150{
151 const uint8_t *src1 = src + bpp;
152 const uint8_t *src2 = src;
153 int x, unaligned_w;
154
155 memcpy(dst, src, bpp);
156 dst += bpp;
157 size -= bpp;
158 unaligned_w = FFMIN(32 - bpp, size);
159 for (x = 0; x < unaligned_w; x++)
160 *dst++ = *src1++ - *src2++;
161 size -= unaligned_w;
162 c->llvidencdsp.diff_bytes(dst, src1, src2, size);
163}
164
165static void png_filter_row(PNGEncContext *c, uint8_t *dst, int filter_type,
166 uint8_t *src, uint8_t *top, int size, int bpp)
167{
168 int i;
169
170 switch (filter_type) {
171 case PNG_FILTER_VALUE_NONE:
172 memcpy(dst, src, size);
173 break;
174 case PNG_FILTER_VALUE_SUB:
175 sub_left_prediction(c, dst, src, bpp, size);
176 break;
177 case PNG_FILTER_VALUE_UP:
178 c->llvidencdsp.diff_bytes(dst, src, top, size);
179 break;
180 case PNG_FILTER_VALUE_AVG:
181 for (i = 0; i < bpp; i++)
182 dst[i] = src[i] - (top[i] >> 1);
183 for (; i < size; i++)
184 dst[i] = src[i] - ((src[i - bpp] + top[i]) >> 1);
185 break;
186 case PNG_FILTER_VALUE_PAETH:
187 for (i = 0; i < bpp; i++)
188 dst[i] = src[i] - top[i];
189 sub_png_paeth_prediction(dst + i, src + i, top + i, size - i, bpp);
190 break;
191 }
192}
193
194static uint8_t *png_choose_filter(PNGEncContext *s, uint8_t *dst,
195 uint8_t *src, uint8_t *top, int size, int bpp)
196{
197 int pred = s->filter_type;
198 av_assert0(bpp || !pred);
199 if (!top && pred)
200 pred = PNG_FILTER_VALUE_SUB;
201 if (pred == PNG_FILTER_VALUE_MIXED) {
202 int i;
203 int cost, bcost = INT_MAX;
204 uint8_t *buf1 = dst, *buf2 = dst + size + 16;
205 for (pred = 0; pred < 5; pred++) {
206 png_filter_row(s, buf1 + 1, pred, src, top, size, bpp);
207 buf1[0] = pred;
208 cost = 0;
209 for (i = 0; i <= size; i++)
210 cost += abs((int8_t) buf1[i]);
211 if (cost < bcost) {
212 bcost = cost;
213 FFSWAP(uint8_t *, buf1, buf2);
214 }
215 }
216 return buf2;
217 } else {
218 png_filter_row(s, dst + 1, pred, src, top, size, bpp);
219 dst[0] = pred;
220 return dst;
221 }
222}
223
224static void png_write_chunk(uint8_t **f, uint32_t tag,
225 const uint8_t *buf, int length)
226{
227 const AVCRC *crc_table = av_crc_get_table(AV_CRC_32_IEEE_LE);
228 uint32_t crc = ~0U;
229 uint8_t tagbuf[4];
230
231 bytestream_put_be32(f, length);
232 AV_WL32(tagbuf, tag);
233 crc = av_crc(crc_table, crc, tagbuf, 4);
234 bytestream_put_be32(f, av_bswap32(tag));
235 if (length > 0) {
236 crc = av_crc(crc_table, crc, buf, length);
237 memcpy(*f, buf, length);
238 *f += length;
239 }
240 bytestream_put_be32(f, ~crc);
241}
242
243static void png_write_image_data(AVCodecContext *avctx,
244 const uint8_t *buf, int length)
245{
246 PNGEncContext *s = avctx->priv_data;
247 const AVCRC *crc_table = av_crc_get_table(AV_CRC_32_IEEE_LE);
248 uint32_t crc = ~0U;
249
250 if (avctx->codec_id == AV_CODEC_ID_PNG || avctx->frame_number == 0) {
251 png_write_chunk(&s->bytestream, MKTAG('I', 'D', 'A', 'T'), buf, length);
252 return;
253 }
254
255 bytestream_put_be32(&s->bytestream, length + 4);
256
257 bytestream_put_be32(&s->bytestream, MKBETAG('f', 'd', 'A', 'T'));
258 bytestream_put_be32(&s->bytestream, s->sequence_number);
259 crc = av_crc(crc_table, crc, s->bytestream - 8, 8);
260
261 crc = av_crc(crc_table, crc, buf, length);
262 memcpy(s->bytestream, buf, length);
263 s->bytestream += length;
264
265 bytestream_put_be32(&s->bytestream, ~crc);
266
267 ++s->sequence_number;
268}
269
270/* XXX: do filtering */
271static int png_write_row(AVCodecContext *avctx, const uint8_t *data, int size)
272{
273 PNGEncContext *s = avctx->priv_data;
274 int ret;
275
276 s->zstream.avail_in = size;
277 s->zstream.next_in = data;
278 while (s->zstream.avail_in > 0) {
279 ret = deflate(&s->zstream, Z_NO_FLUSH);
280 if (ret != Z_OK)
281 return -1;
282 if (s->zstream.avail_out == 0) {
283 if (s->bytestream_end - s->bytestream > IOBUF_SIZE + 100)
284 png_write_image_data(avctx, s->buf, IOBUF_SIZE);
285 s->zstream.avail_out = IOBUF_SIZE;
286 s->zstream.next_out = s->buf;
287 }
288 }
289 return 0;
290}
291
292#define AV_WB32_PNG(buf, n) AV_WB32(buf, lrint((n) * 100000))
293static int png_get_chrm(enum AVColorPrimaries prim, uint8_t *buf)
294{
295 double rx, ry, gx, gy, bx, by, wx = 0.3127, wy = 0.3290;
296 switch (prim) {
297 case AVCOL_PRI_BT709:
298 rx = 0.640; ry = 0.330;
299 gx = 0.300; gy = 0.600;
300 bx = 0.150; by = 0.060;
301 break;
302 case AVCOL_PRI_BT470M:
303 rx = 0.670; ry = 0.330;
304 gx = 0.210; gy = 0.710;
305 bx = 0.140; by = 0.080;
306 wx = 0.310; wy = 0.316;
307 break;
308 case AVCOL_PRI_BT470BG:
309 rx = 0.640; ry = 0.330;
310 gx = 0.290; gy = 0.600;
311 bx = 0.150; by = 0.060;
312 break;
313 case AVCOL_PRI_SMPTE170M:
314 case AVCOL_PRI_SMPTE240M:
315 rx = 0.630; ry = 0.340;
316 gx = 0.310; gy = 0.595;
317 bx = 0.155; by = 0.070;
318 break;
319 case AVCOL_PRI_BT2020:
320 rx = 0.708; ry = 0.292;
321 gx = 0.170; gy = 0.797;
322 bx = 0.131; by = 0.046;
323 break;
324 default:
325 return 0;
326 }
327
328 AV_WB32_PNG(buf , wx); AV_WB32_PNG(buf + 4 , wy);
329 AV_WB32_PNG(buf + 8 , rx); AV_WB32_PNG(buf + 12, ry);
330 AV_WB32_PNG(buf + 16, gx); AV_WB32_PNG(buf + 20, gy);
331 AV_WB32_PNG(buf + 24, bx); AV_WB32_PNG(buf + 28, by);
332 return 1;
333}
334
335static int png_get_gama(enum AVColorTransferCharacteristic trc, uint8_t *buf)
336{
337 double gamma = avpriv_get_gamma_from_trc(trc);
338 if (gamma <= 1e-6)
339 return 0;
340
341 AV_WB32_PNG(buf, 1.0 / gamma);
342 return 1;
343}
344
345static int encode_headers(AVCodecContext *avctx, const AVFrame *pict)
346{
347 AVFrameSideData *side_data;
348 PNGEncContext *s = avctx->priv_data;
349
350 /* write png header */
351 AV_WB32(s->buf, avctx->width);
352 AV_WB32(s->buf + 4, avctx->height);
353 s->buf[8] = s->bit_depth;
354 s->buf[9] = s->color_type;
355 s->buf[10] = 0; /* compression type */
356 s->buf[11] = 0; /* filter type */
357 s->buf[12] = s->is_progressive; /* interlace type */
358 png_write_chunk(&s->bytestream, MKTAG('I', 'H', 'D', 'R'), s->buf, 13);
359
360 /* write physical information */
361 if (s->dpm) {
362 AV_WB32(s->buf, s->dpm);
363 AV_WB32(s->buf + 4, s->dpm);
364 s->buf[8] = 1; /* unit specifier is meter */
365 } else {
366 AV_WB32(s->buf, avctx->sample_aspect_ratio.num);
367 AV_WB32(s->buf + 4, avctx->sample_aspect_ratio.den);
368 s->buf[8] = 0; /* unit specifier is unknown */
369 }
370 png_write_chunk(&s->bytestream, MKTAG('p', 'H', 'Y', 's'), s->buf, 9);
371
372 /* write stereoscopic information */
373 side_data = av_frame_get_side_data(pict, AV_FRAME_DATA_STEREO3D);
374 if (side_data) {
375 AVStereo3D *stereo3d = (AVStereo3D *)side_data->data;
376 switch (stereo3d->type) {
377 case AV_STEREO3D_SIDEBYSIDE:
378 s->buf[0] = ((stereo3d->flags & AV_STEREO3D_FLAG_INVERT) == 0) ? 1 : 0;
379 png_write_chunk(&s->bytestream, MKTAG('s', 'T', 'E', 'R'), s->buf, 1);
380 break;
381 case AV_STEREO3D_2D:
382 break;
383 default:
384 av_log(avctx, AV_LOG_WARNING, "Only side-by-side stereo3d flag can be defined within sTER chunk\n");
385 break;
386 }
387 }
388
389 /* write colorspace information */
390 if (pict->color_primaries == AVCOL_PRI_BT709 &&
391 pict->color_trc == AVCOL_TRC_IEC61966_2_1) {
392 s->buf[0] = 1; /* rendering intent, relative colorimetric by default */
393 png_write_chunk(&s->bytestream, MKTAG('s', 'R', 'G', 'B'), s->buf, 1);
394 }
395
396 if (png_get_chrm(pict->color_primaries, s->buf))
397 png_write_chunk(&s->bytestream, MKTAG('c', 'H', 'R', 'M'), s->buf, 32);
398 if (png_get_gama(pict->color_trc, s->buf))
399 png_write_chunk(&s->bytestream, MKTAG('g', 'A', 'M', 'A'), s->buf, 4);
400
401 /* put the palette if needed */
402 if (s->color_type == PNG_COLOR_TYPE_PALETTE) {
403 int has_alpha, alpha, i;
404 unsigned int v;
405 uint32_t *palette;
406 uint8_t *ptr, *alpha_ptr;
407
408 palette = (uint32_t *)pict->data[1];
409 ptr = s->buf;
410 alpha_ptr = s->buf + 256 * 3;
411 has_alpha = 0;
412 for (i = 0; i < 256; i++) {
413 v = palette[i];
414 alpha = v >> 24;
415 if (alpha != 0xff)
416 has_alpha = 1;
417 *alpha_ptr++ = alpha;
418 bytestream_put_be24(&ptr, v);
419 }
420 png_write_chunk(&s->bytestream,
421 MKTAG('P', 'L', 'T', 'E'), s->buf, 256 * 3);
422 if (has_alpha) {
423 png_write_chunk(&s->bytestream,
424 MKTAG('t', 'R', 'N', 'S'), s->buf + 256 * 3, 256);
425 }
426 }
427
428 return 0;
429}
430
431static int encode_frame(AVCodecContext *avctx, const AVFrame *pict)
432{
433 PNGEncContext *s = avctx->priv_data;
434 const AVFrame *const p = pict;
435 int y, len, ret;
436 int row_size, pass_row_size;
437 uint8_t *ptr, *top, *crow_buf, *crow;
438 uint8_t *crow_base = NULL;
439 uint8_t *progressive_buf = NULL;
440 uint8_t *top_buf = NULL;
441
442 row_size = (pict->width * s->bits_per_pixel + 7) >> 3;
443
444 crow_base = av_malloc((row_size + 32) << (s->filter_type == PNG_FILTER_VALUE_MIXED));
445 if (!crow_base) {
446 ret = AVERROR(ENOMEM);
447 goto the_end;
448 }
449 // pixel data should be aligned, but there's a control byte before it
450 crow_buf = crow_base + 15;
451 if (s->is_progressive) {
452 progressive_buf = av_malloc(row_size + 1);
453 top_buf = av_malloc(row_size + 1);
454 if (!progressive_buf || !top_buf) {
455 ret = AVERROR(ENOMEM);
456 goto the_end;
457 }
458 }
459
460 /* put each row */
461 s->zstream.avail_out = IOBUF_SIZE;
462 s->zstream.next_out = s->buf;
463 if (s->is_progressive) {
464 int pass;
465
466 for (pass = 0; pass < NB_PASSES; pass++) {
467 /* NOTE: a pass is completely omitted if no pixels would be
468 * output */
469 pass_row_size = ff_png_pass_row_size(pass, s->bits_per_pixel, pict->width);
470 if (pass_row_size > 0) {
471 top = NULL;
472 for (y = 0; y < pict->height; y++)
473 if ((ff_png_pass_ymask[pass] << (y & 7)) & 0x80) {
474 ptr = p->data[0] + y * p->linesize[0];
475 FFSWAP(uint8_t *, progressive_buf, top_buf);
476 png_get_interlaced_row(progressive_buf, pass_row_size,
477 s->bits_per_pixel, pass,
478 ptr, pict->width);
479 crow = png_choose_filter(s, crow_buf, progressive_buf,
480 top, pass_row_size, s->bits_per_pixel >> 3);
481 png_write_row(avctx, crow, pass_row_size + 1);
482 top = progressive_buf;
483 }
484 }
485 }
486 } else {
487 top = NULL;
488 for (y = 0; y < pict->height; y++) {
489 ptr = p->data[0] + y * p->linesize[0];
490 crow = png_choose_filter(s, crow_buf, ptr, top,
491 row_size, s->bits_per_pixel >> 3);
492 png_write_row(avctx, crow, row_size + 1);
493 top = ptr;
494 }
495 }
496 /* compress last bytes */
497 for (;;) {
498 ret = deflate(&s->zstream, Z_FINISH);
499 if (ret == Z_OK || ret == Z_STREAM_END) {
500 len = IOBUF_SIZE - s->zstream.avail_out;
501 if (len > 0 && s->bytestream_end - s->bytestream > len + 100) {
502 png_write_image_data(avctx, s->buf, len);
503 }
504 s->zstream.avail_out = IOBUF_SIZE;
505 s->zstream.next_out = s->buf;
506 if (ret == Z_STREAM_END)
507 break;
508 } else {
509 ret = -1;
510 goto the_end;
511 }
512 }
513
514 ret = 0;
515
516the_end:
517 av_freep(&crow_base);
518 av_freep(&progressive_buf);
519 av_freep(&top_buf);
520 deflateReset(&s->zstream);
521 return ret;
522}
523
524static int encode_png(AVCodecContext *avctx, AVPacket *pkt,
525 const AVFrame *pict, int *got_packet)
526{
527 PNGEncContext *s = avctx->priv_data;
528 int ret;
529 int enc_row_size;
530 size_t max_packet_size;
531
532 enc_row_size = deflateBound(&s->zstream, (avctx->width * s->bits_per_pixel + 7) >> 3);
533 max_packet_size =
534 AV_INPUT_BUFFER_MIN_SIZE + // headers
535 avctx->height * (
536 enc_row_size +
537 12 * (((int64_t)enc_row_size + IOBUF_SIZE - 1) / IOBUF_SIZE) // IDAT * ceil(enc_row_size / IOBUF_SIZE)
538 );
539 if (max_packet_size > INT_MAX)
540 return AVERROR(ENOMEM);
541 ret = ff_alloc_packet2(avctx, pkt, max_packet_size, 0);
542 if (ret < 0)
543 return ret;
544
545 s->bytestream_start =
546 s->bytestream = pkt->data;
547 s->bytestream_end = pkt->data + pkt->size;
548
549 AV_WB64(s->bytestream, PNGSIG);
550 s->bytestream += 8;
551
552 ret = encode_headers(avctx, pict);
553 if (ret < 0)
554 return ret;
555
556 ret = encode_frame(avctx, pict);
557 if (ret < 0)
558 return ret;
559
560 png_write_chunk(&s->bytestream, MKTAG('I', 'E', 'N', 'D'), NULL, 0);
561
562 pkt->size = s->bytestream - s->bytestream_start;
563 pkt->flags |= AV_PKT_FLAG_KEY;
564 *got_packet = 1;
565
566 return 0;
567}
568
569static int apng_do_inverse_blend(AVFrame *output, const AVFrame *input,
570 APNGFctlChunk *fctl_chunk, uint8_t bpp)
571{
572 // output: background, input: foreground
573 // output the image such that when blended with the background, will produce the foreground
574
575 unsigned int x, y;
576 unsigned int leftmost_x = input->width;
577 unsigned int rightmost_x = 0;
578 unsigned int topmost_y = input->height;
579 unsigned int bottommost_y = 0;
580 const uint8_t *input_data = input->data[0];
581 uint8_t *output_data = output->data[0];
582 ptrdiff_t input_linesize = input->linesize[0];
583 ptrdiff_t output_linesize = output->linesize[0];
584
585 // Find bounding box of changes
586 for (y = 0; y < input->height; ++y) {
587 for (x = 0; x < input->width; ++x) {
588 if (!memcmp(input_data + bpp * x, output_data + bpp * x, bpp))
589 continue;
590
591 if (x < leftmost_x)
592 leftmost_x = x;
593 if (x >= rightmost_x)
594 rightmost_x = x + 1;
595 if (y < topmost_y)
596 topmost_y = y;
597 if (y >= bottommost_y)
598 bottommost_y = y + 1;
599 }
600
601 input_data += input_linesize;
602 output_data += output_linesize;
603 }
604
605 if (leftmost_x == input->width && rightmost_x == 0) {
606 // Empty frame
607 // APNG does not support empty frames, so we make it a 1x1 frame
608 leftmost_x = topmost_y = 0;
609 rightmost_x = bottommost_y = 1;
610 }
611
612 // Do actual inverse blending
613 if (fctl_chunk->blend_op == APNG_BLEND_OP_SOURCE) {
614 output_data = output->data[0];
615 for (y = topmost_y; y < bottommost_y; ++y) {
616 memcpy(output_data,
617 input->data[0] + input_linesize * y + bpp * leftmost_x,
618 bpp * (rightmost_x - leftmost_x));
619 output_data += output_linesize;
620 }
621 } else { // APNG_BLEND_OP_OVER
622 size_t transparent_palette_index;
623 uint32_t *palette;
624
625 switch (input->format) {
626 case AV_PIX_FMT_RGBA64BE:
627 case AV_PIX_FMT_YA16BE:
628 case AV_PIX_FMT_RGBA:
629 case AV_PIX_FMT_GRAY8A:
630 break;
631
632 case AV_PIX_FMT_PAL8:
633 palette = (uint32_t*)input->data[1];
634 for (transparent_palette_index = 0; transparent_palette_index < 256; ++transparent_palette_index)
635 if (palette[transparent_palette_index] >> 24 == 0)
636 break;
637 break;
638
639 default:
640 // No alpha, so blending not possible
641 return -1;
642 }
643
644 for (y = topmost_y; y < bottommost_y; ++y) {
645 uint8_t *foreground = input->data[0] + input_linesize * y + bpp * leftmost_x;
646 uint8_t *background = output->data[0] + output_linesize * y + bpp * leftmost_x;
647 output_data = output->data[0] + output_linesize * (y - topmost_y);
648 for (x = leftmost_x; x < rightmost_x; ++x, foreground += bpp, background += bpp, output_data += bpp) {
649 if (!memcmp(foreground, background, bpp)) {
650 if (input->format == AV_PIX_FMT_PAL8) {
651 if (transparent_palette_index == 256) {
652 // Need fully transparent colour, but none exists
653 return -1;
654 }
655
656 *output_data = transparent_palette_index;
657 } else {
658 memset(output_data, 0, bpp);
659 }
660 continue;
661 }
662
663 // Check for special alpha values, since full inverse
664 // alpha-on-alpha blending is rarely possible, and when
665 // possible, doesn't compress much better than
666 // APNG_BLEND_OP_SOURCE blending
667 switch (input->format) {
668 case AV_PIX_FMT_RGBA64BE:
669 if (((uint16_t*)foreground)[3] == 0xffff ||
670 ((uint16_t*)background)[3] == 0)
671 break;
672 return -1;
673
674 case AV_PIX_FMT_YA16BE:
675 if (((uint16_t*)foreground)[1] == 0xffff ||
676 ((uint16_t*)background)[1] == 0)
677 break;
678 return -1;
679
680 case AV_PIX_FMT_RGBA:
681 if (foreground[3] == 0xff || background[3] == 0)
682 break;
683 return -1;
684
685 case AV_PIX_FMT_GRAY8A:
686 if (foreground[1] == 0xff || background[1] == 0)
687 break;
688 return -1;
689
690 case AV_PIX_FMT_PAL8:
691 if (palette[*foreground] >> 24 == 0xff ||
692 palette[*background] >> 24 == 0)
693 break;
694 return -1;
695 }
696
697 memmove(output_data, foreground, bpp);
698 }
699 }
700 }
701
702 output->width = rightmost_x - leftmost_x;
703 output->height = bottommost_y - topmost_y;
704 fctl_chunk->width = output->width;
705 fctl_chunk->height = output->height;
706 fctl_chunk->x_offset = leftmost_x;
707 fctl_chunk->y_offset = topmost_y;
708
709 return 0;
710}
711
712static int apng_encode_frame(AVCodecContext *avctx, const AVFrame *pict,
713 APNGFctlChunk *best_fctl_chunk, APNGFctlChunk *best_last_fctl_chunk)
714{
715 PNGEncContext *s = avctx->priv_data;
716 int ret;
717 unsigned int y;
718 AVFrame* diffFrame;
719 uint8_t bpp = (s->bits_per_pixel + 7) >> 3;
720 uint8_t *original_bytestream, *original_bytestream_end;
721 uint8_t *temp_bytestream = 0, *temp_bytestream_end;
722 uint32_t best_sequence_number;
723 uint8_t *best_bytestream;
724 size_t best_bytestream_size = SIZE_MAX;
725 APNGFctlChunk last_fctl_chunk = *best_last_fctl_chunk;
726 APNGFctlChunk fctl_chunk = *best_fctl_chunk;
727
728 if (avctx->frame_number == 0) {
729 best_fctl_chunk->width = pict->width;
730 best_fctl_chunk->height = pict->height;
731 best_fctl_chunk->x_offset = 0;
732 best_fctl_chunk->y_offset = 0;
733 best_fctl_chunk->blend_op = APNG_BLEND_OP_SOURCE;
734 return encode_frame(avctx, pict);
735 }
736
737 diffFrame = av_frame_alloc();
738 if (!diffFrame)
739 return AVERROR(ENOMEM);
740
741 diffFrame->format = pict->format;
742 diffFrame->width = pict->width;
743 diffFrame->height = pict->height;
744 if ((ret = av_frame_get_buffer(diffFrame, 32)) < 0)
745 goto fail;
746
747 original_bytestream = s->bytestream;
748 original_bytestream_end = s->bytestream_end;
749
750 temp_bytestream = av_malloc(original_bytestream_end - original_bytestream);
751 temp_bytestream_end = temp_bytestream + (original_bytestream_end - original_bytestream);
752 if (!temp_bytestream) {
753 ret = AVERROR(ENOMEM);
754 goto fail;
755 }
756
757 for (last_fctl_chunk.dispose_op = 0; last_fctl_chunk.dispose_op < 3; ++last_fctl_chunk.dispose_op) {
758 // 0: APNG_DISPOSE_OP_NONE
759 // 1: APNG_DISPOSE_OP_BACKGROUND
760 // 2: APNG_DISPOSE_OP_PREVIOUS
761
762 for (fctl_chunk.blend_op = 0; fctl_chunk.blend_op < 2; ++fctl_chunk.blend_op) {
763 // 0: APNG_BLEND_OP_SOURCE
764 // 1: APNG_BLEND_OP_OVER
765
766 uint32_t original_sequence_number = s->sequence_number, sequence_number;
767 uint8_t *bytestream_start = s->bytestream;
768 size_t bytestream_size;
769
770 // Do disposal
771 if (last_fctl_chunk.dispose_op != APNG_DISPOSE_OP_PREVIOUS) {
772 diffFrame->width = pict->width;
773 diffFrame->height = pict->height;
774 ret = av_frame_copy(diffFrame, s->last_frame);
775 if (ret < 0)
776 goto fail;
777
778 if (last_fctl_chunk.dispose_op == APNG_DISPOSE_OP_BACKGROUND) {
779 for (y = last_fctl_chunk.y_offset; y < last_fctl_chunk.y_offset + last_fctl_chunk.height; ++y) {
780 size_t row_start = diffFrame->linesize[0] * y + bpp * last_fctl_chunk.x_offset;
781 memset(diffFrame->data[0] + row_start, 0, bpp * last_fctl_chunk.width);
782 }
783 }
784 } else {
785 if (!s->prev_frame)
786 continue;
787
788 diffFrame->width = pict->width;
789 diffFrame->height = pict->height;
790 ret = av_frame_copy(diffFrame, s->prev_frame);
791 if (ret < 0)
792 goto fail;
793 }
794
795 // Do inverse blending
796 if (apng_do_inverse_blend(diffFrame, pict, &fctl_chunk, bpp) < 0)
797 continue;
798
799 // Do encoding
800 ret = encode_frame(avctx, diffFrame);
801 sequence_number = s->sequence_number;
802 s->sequence_number = original_sequence_number;
803 bytestream_size = s->bytestream - bytestream_start;
804 s->bytestream = bytestream_start;
805 if (ret < 0)
806 goto fail;
807
808 if (bytestream_size < best_bytestream_size) {
809 *best_fctl_chunk = fctl_chunk;
810 *best_last_fctl_chunk = last_fctl_chunk;
811
812 best_sequence_number = sequence_number;
813 best_bytestream = s->bytestream;
814 best_bytestream_size = bytestream_size;
815
816 if (best_bytestream == original_bytestream) {
817 s->bytestream = temp_bytestream;
818 s->bytestream_end = temp_bytestream_end;
819 } else {
820 s->bytestream = original_bytestream;
821 s->bytestream_end = original_bytestream_end;
822 }
823 }
824 }
825 }
826
827 s->sequence_number = best_sequence_number;
828 s->bytestream = original_bytestream + best_bytestream_size;
829 s->bytestream_end = original_bytestream_end;
830 if (best_bytestream != original_bytestream)
831 memcpy(original_bytestream, best_bytestream, best_bytestream_size);
832
833 ret = 0;
834
835fail:
836 av_freep(&temp_bytestream);
837 av_frame_free(&diffFrame);
838 return ret;
839}
840
841static int encode_apng(AVCodecContext *avctx, AVPacket *pkt,
842 const AVFrame *pict, int *got_packet)
843{
844 PNGEncContext *s = avctx->priv_data;
845 int ret;
846 int enc_row_size;
847 size_t max_packet_size;
848 APNGFctlChunk fctl_chunk = {0};
849
850 if (pict && avctx->codec_id == AV_CODEC_ID_APNG && s->color_type == PNG_COLOR_TYPE_PALETTE) {
851 uint32_t checksum = ~av_crc(av_crc_get_table(AV_CRC_32_IEEE_LE), ~0U, pict->data[1], 256 * sizeof(uint32_t));
852
853 if (avctx->frame_number == 0) {
854 s->palette_checksum = checksum;
855 } else if (checksum != s->palette_checksum) {
856 av_log(avctx, AV_LOG_ERROR,
857 "Input contains more than one unique palette. APNG does not support multiple palettes.\n");
858 return -1;
859 }
860 }
861
862 enc_row_size = deflateBound(&s->zstream, (avctx->width * s->bits_per_pixel + 7) >> 3);
863 max_packet_size =
864 AV_INPUT_BUFFER_MIN_SIZE + // headers
865 avctx->height * (
866 enc_row_size +
867 (4 + 12) * (((int64_t)enc_row_size + IOBUF_SIZE - 1) / IOBUF_SIZE) // fdAT * ceil(enc_row_size / IOBUF_SIZE)
868 );
869 if (max_packet_size > INT_MAX)
870 return AVERROR(ENOMEM);
871
872 if (avctx->frame_number == 0) {
873 if (!pict)
874 return AVERROR(EINVAL);
875
876 s->bytestream = s->extra_data = av_malloc(AV_INPUT_BUFFER_MIN_SIZE);
877 if (!s->extra_data)
878 return AVERROR(ENOMEM);
879
880 ret = encode_headers(avctx, pict);
881 if (ret < 0)
882 return ret;
883
884 s->extra_data_size = s->bytestream - s->extra_data;
885
886 s->last_frame_packet = av_malloc(max_packet_size);
887 if (!s->last_frame_packet)
888 return AVERROR(ENOMEM);
889 } else if (s->last_frame) {
890 ret = ff_alloc_packet2(avctx, pkt, max_packet_size, 0);
891 if (ret < 0)
892 return ret;
893
894 memcpy(pkt->data, s->last_frame_packet, s->last_frame_packet_size);
895 pkt->size = s->last_frame_packet_size;
896 pkt->pts = pkt->dts = s->last_frame->pts;
897 }
898
899 if (pict) {
900 s->bytestream_start =
901 s->bytestream = s->last_frame_packet;
902 s->bytestream_end = s->bytestream + max_packet_size;
903
904 // We're encoding the frame first, so we have to do a bit of shuffling around
905 // to have the image data write to the correct place in the buffer
906 fctl_chunk.sequence_number = s->sequence_number;
907 ++s->sequence_number;
908 s->bytestream += 26 + 12;
909
910 ret = apng_encode_frame(avctx, pict, &fctl_chunk, &s->last_frame_fctl);
911 if (ret < 0)
912 return ret;
913
914 fctl_chunk.delay_num = 0; // delay filled in during muxing
915 fctl_chunk.delay_den = 0;
916 } else {
917 s->last_frame_fctl.dispose_op = APNG_DISPOSE_OP_NONE;
918 }
919
920 if (s->last_frame) {
921 uint8_t* last_fctl_chunk_start = pkt->data;
922 uint8_t buf[26];
923 if (!s->extra_data_updated) {
924 uint8_t *side_data = av_packet_new_side_data(pkt, AV_PKT_DATA_NEW_EXTRADATA, s->extra_data_size);
925 if (!side_data)
926 return AVERROR(ENOMEM);
927 memcpy(side_data, s->extra_data, s->extra_data_size);
928 s->extra_data_updated = 1;
929 }
930
931 AV_WB32(buf + 0, s->last_frame_fctl.sequence_number);
932 AV_WB32(buf + 4, s->last_frame_fctl.width);
933 AV_WB32(buf + 8, s->last_frame_fctl.height);
934 AV_WB32(buf + 12, s->last_frame_fctl.x_offset);
935 AV_WB32(buf + 16, s->last_frame_fctl.y_offset);
936 AV_WB16(buf + 20, s->last_frame_fctl.delay_num);
937 AV_WB16(buf + 22, s->last_frame_fctl.delay_den);
938 buf[24] = s->last_frame_fctl.dispose_op;
939 buf[25] = s->last_frame_fctl.blend_op;
940 png_write_chunk(&last_fctl_chunk_start, MKTAG('f', 'c', 'T', 'L'), buf, 26);
941
942 *got_packet = 1;
943 }
944
945 if (pict) {
946 if (!s->last_frame) {
947 s->last_frame = av_frame_alloc();
948 if (!s->last_frame)
949 return AVERROR(ENOMEM);
950 } else if (s->last_frame_fctl.dispose_op != APNG_DISPOSE_OP_PREVIOUS) {
951 if (!s->prev_frame) {
952 s->prev_frame = av_frame_alloc();
953 if (!s->prev_frame)
954 return AVERROR(ENOMEM);
955
956 s->prev_frame->format = pict->format;
957 s->prev_frame->width = pict->width;
958 s->prev_frame->height = pict->height;
959 if ((ret = av_frame_get_buffer(s->prev_frame, 32)) < 0)
960 return ret;
961 }
962
963 // Do disposal, but not blending
964 av_frame_copy(s->prev_frame, s->last_frame);
965 if (s->last_frame_fctl.dispose_op == APNG_DISPOSE_OP_BACKGROUND) {
966 uint32_t y;
967 uint8_t bpp = (s->bits_per_pixel + 7) >> 3;
968 for (y = s->last_frame_fctl.y_offset; y < s->last_frame_fctl.y_offset + s->last_frame_fctl.height; ++y) {
969 size_t row_start = s->prev_frame->linesize[0] * y + bpp * s->last_frame_fctl.x_offset;
970 memset(s->prev_frame->data[0] + row_start, 0, bpp * s->last_frame_fctl.width);
971 }
972 }
973 }
974
975 av_frame_unref(s->last_frame);
976 ret = av_frame_ref(s->last_frame, (AVFrame*)pict);
977 if (ret < 0)
978 return ret;
979
980 s->last_frame_fctl = fctl_chunk;
981 s->last_frame_packet_size = s->bytestream - s->bytestream_start;
982 } else {
983 av_frame_free(&s->last_frame);
984 }
985
986 return 0;
987}
988
989static av_cold int png_enc_init(AVCodecContext *avctx)
990{
991 PNGEncContext *s = avctx->priv_data;
992 int compression_level;
993
994 switch (avctx->pix_fmt) {
995 case AV_PIX_FMT_RGBA:
996 avctx->bits_per_coded_sample = 32;
997 break;
998 case AV_PIX_FMT_RGB24:
999 avctx->bits_per_coded_sample = 24;
1000 break;
1001 case AV_PIX_FMT_GRAY8:
1002 avctx->bits_per_coded_sample = 0x28;
1003 break;
1004 case AV_PIX_FMT_MONOBLACK:
1005 avctx->bits_per_coded_sample = 1;
1006 break;
1007 case AV_PIX_FMT_PAL8:
1008 avctx->bits_per_coded_sample = 8;
1009 }
1010
1011#if FF_API_CODED_FRAME
1012FF_DISABLE_DEPRECATION_WARNINGS
1013 avctx->coded_frame->pict_type = AV_PICTURE_TYPE_I;
1014 avctx->coded_frame->key_frame = 1;
1015FF_ENABLE_DEPRECATION_WARNINGS
1016#endif
1017
1018 ff_llvidencdsp_init(&s->llvidencdsp);
1019
1020#if FF_API_PRIVATE_OPT
1021FF_DISABLE_DEPRECATION_WARNINGS
1022 if (avctx->prediction_method)
1023 s->filter_type = av_clip(avctx->prediction_method,
1024 PNG_FILTER_VALUE_NONE,
1025 PNG_FILTER_VALUE_MIXED);
1026FF_ENABLE_DEPRECATION_WARNINGS
1027#endif
1028
1029 if (avctx->pix_fmt == AV_PIX_FMT_MONOBLACK)
1030 s->filter_type = PNG_FILTER_VALUE_NONE;
1031
1032 if (s->dpi && s->dpm) {
1033 av_log(avctx, AV_LOG_ERROR, "Only one of 'dpi' or 'dpm' options should be set\n");
1034 return AVERROR(EINVAL);
1035 } else if (s->dpi) {
1036 s->dpm = s->dpi * 10000 / 254;
1037 }
1038
1039 s->is_progressive = !!(avctx->flags & AV_CODEC_FLAG_INTERLACED_DCT);
1040 switch (avctx->pix_fmt) {
1041 case AV_PIX_FMT_RGBA64BE:
1042 s->bit_depth = 16;
1043 s->color_type = PNG_COLOR_TYPE_RGB_ALPHA;
1044 break;
1045 case AV_PIX_FMT_RGB48BE:
1046 s->bit_depth = 16;
1047 s->color_type = PNG_COLOR_TYPE_RGB;
1048 break;
1049 case AV_PIX_FMT_RGBA:
1050 s->bit_depth = 8;
1051 s->color_type = PNG_COLOR_TYPE_RGB_ALPHA;
1052 break;
1053 case AV_PIX_FMT_RGB24:
1054 s->bit_depth = 8;
1055 s->color_type = PNG_COLOR_TYPE_RGB;
1056 break;
1057 case AV_PIX_FMT_GRAY16BE:
1058 s->bit_depth = 16;
1059 s->color_type = PNG_COLOR_TYPE_GRAY;
1060 break;
1061 case AV_PIX_FMT_GRAY8:
1062 s->bit_depth = 8;
1063 s->color_type = PNG_COLOR_TYPE_GRAY;
1064 break;
1065 case AV_PIX_FMT_GRAY8A:
1066 s->bit_depth = 8;
1067 s->color_type = PNG_COLOR_TYPE_GRAY_ALPHA;
1068 break;
1069 case AV_PIX_FMT_YA16BE:
1070 s->bit_depth = 16;
1071 s->color_type = PNG_COLOR_TYPE_GRAY_ALPHA;
1072 break;
1073 case AV_PIX_FMT_MONOBLACK:
1074 s->bit_depth = 1;
1075 s->color_type = PNG_COLOR_TYPE_GRAY;
1076 break;
1077 case AV_PIX_FMT_PAL8:
1078 s->bit_depth = 8;
1079 s->color_type = PNG_COLOR_TYPE_PALETTE;
1080 break;
1081 default:
1082 return -1;
1083 }
1084 s->bits_per_pixel = ff_png_get_nb_channels(s->color_type) * s->bit_depth;
1085
1086 s->zstream.zalloc = ff_png_zalloc;
1087 s->zstream.zfree = ff_png_zfree;
1088 s->zstream.opaque = NULL;
1089 compression_level = avctx->compression_level == FF_COMPRESSION_DEFAULT
1090 ? Z_DEFAULT_COMPRESSION
1091 : av_clip(avctx->compression_level, 0, 9);
1092 if (deflateInit2(&s->zstream, compression_level, Z_DEFLATED, 15, 8, Z_DEFAULT_STRATEGY) != Z_OK)
1093 return -1;
1094
1095 return 0;
1096}
1097
1098static av_cold int png_enc_close(AVCodecContext *avctx)
1099{
1100 PNGEncContext *s = avctx->priv_data;
1101
1102 deflateEnd(&s->zstream);
1103 av_frame_free(&s->last_frame);
1104 av_frame_free(&s->prev_frame);
1105 av_freep(&s->last_frame_packet);
1106 av_freep(&s->extra_data);
1107 s->extra_data_size = 0;
1108 return 0;
1109}
1110
1111#define OFFSET(x) offsetof(PNGEncContext, x)
1112#define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM
1113static const AVOption options[] = {
1114 {"dpi", "Set image resolution (in dots per inch)", OFFSET(dpi), AV_OPT_TYPE_INT, {.i64 = 0}, 0, 0x10000, VE},
1115 {"dpm", "Set image resolution (in dots per meter)", OFFSET(dpm), AV_OPT_TYPE_INT, {.i64 = 0}, 0, 0x10000, VE},
1116 { "pred", "Prediction method", OFFSET(filter_type), AV_OPT_TYPE_INT, { .i64 = PNG_FILTER_VALUE_NONE }, PNG_FILTER_VALUE_NONE, PNG_FILTER_VALUE_MIXED, VE, "pred" },
1117 { "none", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_NONE }, INT_MIN, INT_MAX, VE, "pred" },
1118 { "sub", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_SUB }, INT_MIN, INT_MAX, VE, "pred" },
1119 { "up", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_UP }, INT_MIN, INT_MAX, VE, "pred" },
1120 { "avg", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_AVG }, INT_MIN, INT_MAX, VE, "pred" },
1121 { "paeth", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_PAETH }, INT_MIN, INT_MAX, VE, "pred" },
1122 { "mixed", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PNG_FILTER_VALUE_MIXED }, INT_MIN, INT_MAX, VE, "pred" },
1123 { NULL},
1124};
1125
1126static const AVClass pngenc_class = {
1127 .class_name = "PNG encoder",
1128 .item_name = av_default_item_name,
1129 .option = options,
1130 .version = LIBAVUTIL_VERSION_INT,
1131};
1132
1133static const AVClass apngenc_class = {
1134 .class_name = "APNG encoder",
1135 .item_name = av_default_item_name,
1136 .option = options,
1137 .version = LIBAVUTIL_VERSION_INT,
1138};
1139
1140AVCodec ff_png_encoder = {
1141 .name = "png",
1142 .long_name = NULL_IF_CONFIG_SMALL("PNG (Portable Network Graphics) image"),
1143 .type = AVMEDIA_TYPE_VIDEO,
1144 .id = AV_CODEC_ID_PNG,
1145 .priv_data_size = sizeof(PNGEncContext),
1146 .init = png_enc_init,
1147 .close = png_enc_close,
1148 .encode2 = encode_png,
1149 .capabilities = AV_CODEC_CAP_FRAME_THREADS | AV_CODEC_CAP_INTRA_ONLY,
1150 .pix_fmts = (const enum AVPixelFormat[]) {
1151 AV_PIX_FMT_RGB24, AV_PIX_FMT_RGBA,
1152 AV_PIX_FMT_RGB48BE, AV_PIX_FMT_RGBA64BE,
1153 AV_PIX_FMT_PAL8,
1154 AV_PIX_FMT_GRAY8, AV_PIX_FMT_GRAY8A,
1155 AV_PIX_FMT_GRAY16BE, AV_PIX_FMT_YA16BE,
1156 AV_PIX_FMT_MONOBLACK, AV_PIX_FMT_NONE
1157 },
1158 .priv_class = &pngenc_class,
1159};
1160
1161AVCodec ff_apng_encoder = {
1162 .name = "apng",
1163 .long_name = NULL_IF_CONFIG_SMALL("APNG (Animated Portable Network Graphics) image"),
1164 .type = AVMEDIA_TYPE_VIDEO,
1165 .id = AV_CODEC_ID_APNG,
1166 .priv_data_size = sizeof(PNGEncContext),
1167 .init = png_enc_init,
1168 .close = png_enc_close,
1169 .encode2 = encode_apng,
1170 .capabilities = AV_CODEC_CAP_DELAY,
1171 .pix_fmts = (const enum AVPixelFormat[]) {
1172 AV_PIX_FMT_RGB24, AV_PIX_FMT_RGBA,
1173 AV_PIX_FMT_RGB48BE, AV_PIX_FMT_RGBA64BE,
1174 AV_PIX_FMT_PAL8,
1175 AV_PIX_FMT_GRAY8, AV_PIX_FMT_GRAY8A,
1176 AV_PIX_FMT_GRAY16BE, AV_PIX_FMT_YA16BE,
1177 AV_PIX_FMT_MONOBLACK, AV_PIX_FMT_NONE
1178 },
1179 .priv_class = &apngenc_class,
1180};
1181