blob: c92cd85f12a2134721394d4e56721955192838a0
1 | /* |
2 | * QuickDraw (qdrw) codec |
3 | * Copyright (c) 2004 Konstantin Shishkov |
4 | * Copyright (c) 2015 Vittorio Giovara |
5 | * |
6 | * This file is part of FFmpeg. |
7 | * |
8 | * FFmpeg is free software; you can redistribute it and/or |
9 | * modify it under the terms of the GNU Lesser General Public |
10 | * License as published by the Free Software Foundation; either |
11 | * version 2.1 of the License, or (at your option) any later version. |
12 | * |
13 | * FFmpeg is distributed in the hope that it will be useful, |
14 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
15 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
16 | * Lesser General Public License for more details. |
17 | * |
18 | * You should have received a copy of the GNU Lesser General Public |
19 | * License along with FFmpeg; if not, write to the Free Software |
20 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
21 | */ |
22 | |
23 | /** |
24 | * @file |
25 | * Apple QuickDraw codec. |
26 | * https://developer.apple.com/legacy/library/documentation/mac/QuickDraw/QuickDraw-461.html |
27 | */ |
28 | |
29 | #include "libavutil/common.h" |
30 | #include "libavutil/intreadwrite.h" |
31 | #include "avcodec.h" |
32 | #include "bytestream.h" |
33 | #include "internal.h" |
34 | |
35 | enum QuickdrawOpcodes { |
36 | CLIP = 0x0001, |
37 | PACKBITSRECT = 0x0098, |
38 | PACKBITSRGN, |
39 | DIRECTBITSRECT, |
40 | DIRECTBITSRGN, |
41 | SHORTCOMMENT = 0x00A0, |
42 | LONGCOMMENT, |
43 | |
44 | EOP = 0x00FF, |
45 | }; |
46 | |
47 | static int parse_palette(AVCodecContext *avctx, GetByteContext *gbc, |
48 | uint32_t *pal, int colors) |
49 | { |
50 | int i; |
51 | |
52 | for (i = 0; i <= colors; i++) { |
53 | uint8_t r, g, b; |
54 | unsigned int idx = bytestream2_get_be16(gbc); /* color index */ |
55 | if (idx > 255) { |
56 | av_log(avctx, AV_LOG_WARNING, |
57 | "Palette index out of range: %u\n", idx); |
58 | bytestream2_skip(gbc, 6); |
59 | continue; |
60 | } |
61 | r = bytestream2_get_byte(gbc); |
62 | bytestream2_skip(gbc, 1); |
63 | g = bytestream2_get_byte(gbc); |
64 | bytestream2_skip(gbc, 1); |
65 | b = bytestream2_get_byte(gbc); |
66 | bytestream2_skip(gbc, 1); |
67 | pal[idx] = (0xFFU << 24) | (r << 16) | (g << 8) | b; |
68 | } |
69 | return 0; |
70 | } |
71 | |
72 | static int decode_rle_bpp2(AVCodecContext *avctx, AVFrame *p, GetByteContext *gbc) |
73 | { |
74 | int offset = avctx->width; |
75 | uint8_t *outdata = p->data[0]; |
76 | int i, j; |
77 | |
78 | for (i = 0; i < avctx->height; i++) { |
79 | int size, left, code, pix; |
80 | uint8_t *out = outdata; |
81 | int pos = 0; |
82 | |
83 | /* size of packed line */ |
84 | if (offset / 4 > 200) |
85 | size = left = bytestream2_get_be16(gbc); |
86 | else |
87 | size = left = bytestream2_get_byte(gbc); |
88 | if (bytestream2_get_bytes_left(gbc) < size) |
89 | return AVERROR_INVALIDDATA; |
90 | |
91 | /* decode line */ |
92 | while (left > 0) { |
93 | code = bytestream2_get_byte(gbc); |
94 | if (code & 0x80 ) { /* run */ |
95 | pix = bytestream2_get_byte(gbc); |
96 | for (j = 0; j < 257 - code; j++) { |
97 | if (pos < offset) |
98 | out[pos++] = (pix & 0xC0) >> 6; |
99 | if (pos < offset) |
100 | out[pos++] = (pix & 0x30) >> 4; |
101 | if (pos < offset) |
102 | out[pos++] = (pix & 0x0C) >> 2; |
103 | if (pos < offset) |
104 | out[pos++] = (pix & 0x03); |
105 | } |
106 | left -= 2; |
107 | } else { /* copy */ |
108 | for (j = 0; j < code + 1; j++) { |
109 | pix = bytestream2_get_byte(gbc); |
110 | if (pos < offset) |
111 | out[pos++] = (pix & 0xC0) >> 6; |
112 | if (pos < offset) |
113 | out[pos++] = (pix & 0x30) >> 4; |
114 | if (pos < offset) |
115 | out[pos++] = (pix & 0x0C) >> 2; |
116 | if (pos < offset) |
117 | out[pos++] = (pix & 0x03); |
118 | } |
119 | left -= 1 + (code + 1); |
120 | } |
121 | } |
122 | outdata += p->linesize[0]; |
123 | } |
124 | return 0; |
125 | } |
126 | |
127 | static int decode_rle_bpp4(AVCodecContext *avctx, AVFrame *p, GetByteContext *gbc) |
128 | { |
129 | int offset = avctx->width; |
130 | uint8_t *outdata = p->data[0]; |
131 | int i, j; |
132 | |
133 | for (i = 0; i < avctx->height; i++) { |
134 | int size, left, code, pix; |
135 | uint8_t *out = outdata; |
136 | int pos = 0; |
137 | |
138 | /* size of packed line */ |
139 | size = left = bytestream2_get_be16(gbc); |
140 | if (bytestream2_get_bytes_left(gbc) < size) |
141 | return AVERROR_INVALIDDATA; |
142 | |
143 | /* decode line */ |
144 | while (left > 0) { |
145 | code = bytestream2_get_byte(gbc); |
146 | if (code & 0x80 ) { /* run */ |
147 | pix = bytestream2_get_byte(gbc); |
148 | for (j = 0; j < 257 - code; j++) { |
149 | if (pos < offset) |
150 | out[pos++] = (pix & 0xF0) >> 4; |
151 | if (pos < offset) |
152 | out[pos++] = pix & 0xF; |
153 | } |
154 | left -= 2; |
155 | } else { /* copy */ |
156 | for (j = 0; j < code + 1; j++) { |
157 | pix = bytestream2_get_byte(gbc); |
158 | if (pos < offset) |
159 | out[pos++] = (pix & 0xF0) >> 4; |
160 | if (pos < offset) |
161 | out[pos++] = pix & 0xF; |
162 | } |
163 | left -= 1 + (code + 1); |
164 | } |
165 | } |
166 | outdata += p->linesize[0]; |
167 | } |
168 | return 0; |
169 | } |
170 | |
171 | static int decode_rle16(AVCodecContext *avctx, AVFrame *p, GetByteContext *gbc) |
172 | { |
173 | int offset = avctx->width; |
174 | uint8_t *outdata = p->data[0]; |
175 | int i, j; |
176 | |
177 | for (i = 0; i < avctx->height; i++) { |
178 | int size, left, code, pix; |
179 | uint16_t *out = (uint16_t *)outdata; |
180 | int pos = 0; |
181 | |
182 | /* size of packed line */ |
183 | size = left = bytestream2_get_be16(gbc); |
184 | if (bytestream2_get_bytes_left(gbc) < size) |
185 | return AVERROR_INVALIDDATA; |
186 | |
187 | /* decode line */ |
188 | while (left > 0) { |
189 | code = bytestream2_get_byte(gbc); |
190 | if (code & 0x80 ) { /* run */ |
191 | pix = bytestream2_get_be16(gbc); |
192 | for (j = 0; j < 257 - code; j++) { |
193 | if (pos < offset) { |
194 | out[pos++] = pix; |
195 | } |
196 | } |
197 | left -= 3; |
198 | } else { /* copy */ |
199 | for (j = 0; j < code + 1; j++) { |
200 | if (pos < offset) { |
201 | out[pos++] = bytestream2_get_be16(gbc); |
202 | } else { |
203 | bytestream2_skip(gbc, 2); |
204 | } |
205 | } |
206 | left -= 1 + (code + 1) * 2; |
207 | } |
208 | } |
209 | outdata += p->linesize[0]; |
210 | } |
211 | return 0; |
212 | } |
213 | |
214 | static int decode_rle(AVCodecContext *avctx, AVFrame *p, GetByteContext *gbc, |
215 | int step) |
216 | { |
217 | int i, j; |
218 | int offset = avctx->width * step; |
219 | uint8_t *outdata = p->data[0]; |
220 | |
221 | for (i = 0; i < avctx->height; i++) { |
222 | int size, left, code, pix; |
223 | uint8_t *out = outdata; |
224 | int pos = 0; |
225 | |
226 | /* size of packed line */ |
227 | size = left = bytestream2_get_be16(gbc); |
228 | if (bytestream2_get_bytes_left(gbc) < size) |
229 | return AVERROR_INVALIDDATA; |
230 | |
231 | /* decode line */ |
232 | while (left > 0) { |
233 | code = bytestream2_get_byte(gbc); |
234 | if (code & 0x80 ) { /* run */ |
235 | pix = bytestream2_get_byte(gbc); |
236 | for (j = 0; j < 257 - code; j++) { |
237 | if (pos < offset) |
238 | out[pos] = pix; |
239 | pos += step; |
240 | if (pos >= offset && step > 1) { |
241 | pos -= offset; |
242 | pos++; |
243 | } |
244 | } |
245 | left -= 2; |
246 | } else { /* copy */ |
247 | for (j = 0; j < code + 1; j++) { |
248 | pix = bytestream2_get_byte(gbc); |
249 | if (pos < offset) |
250 | out[pos] = pix; |
251 | pos += step; |
252 | if (pos >= offset && step > 1) { |
253 | pos -= offset; |
254 | pos++; |
255 | } |
256 | } |
257 | left -= 2 + code; |
258 | } |
259 | } |
260 | outdata += p->linesize[0]; |
261 | } |
262 | return 0; |
263 | } |
264 | |
265 | static int check_header(const char *buf, int buf_size) |
266 | { |
267 | unsigned w, h, v0, v1; |
268 | |
269 | if (buf_size < 40) |
270 | return 0; |
271 | |
272 | w = AV_RB16(buf+6); |
273 | h = AV_RB16(buf+8); |
274 | v0 = AV_RB16(buf+10); |
275 | v1 = AV_RB16(buf+12); |
276 | |
277 | if (!w || !h) |
278 | return 0; |
279 | |
280 | if (v0 == 0x1101) |
281 | return 1; |
282 | if (v0 == 0x0011 && v1 == 0x02FF) |
283 | return 2; |
284 | return 0; |
285 | } |
286 | |
287 | |
288 | static int decode_frame(AVCodecContext *avctx, |
289 | void *data, int *got_frame, |
290 | AVPacket *avpkt) |
291 | { |
292 | AVFrame * const p = data; |
293 | GetByteContext gbc; |
294 | int colors; |
295 | int w, h, ret; |
296 | int ver; |
297 | |
298 | bytestream2_init(&gbc, avpkt->data, avpkt->size); |
299 | if ( bytestream2_get_bytes_left(&gbc) >= 552 |
300 | && check_header(gbc.buffer + 512, bytestream2_get_bytes_left(&gbc) - 512) |
301 | ) |
302 | bytestream2_skip(&gbc, 512); |
303 | |
304 | ver = check_header(gbc.buffer, bytestream2_get_bytes_left(&gbc)); |
305 | |
306 | /* smallest PICT header */ |
307 | if (bytestream2_get_bytes_left(&gbc) < 40) { |
308 | av_log(avctx, AV_LOG_ERROR, "Frame is too small %d\n", |
309 | bytestream2_get_bytes_left(&gbc)); |
310 | return AVERROR_INVALIDDATA; |
311 | } |
312 | |
313 | bytestream2_skip(&gbc, 6); |
314 | h = bytestream2_get_be16(&gbc); |
315 | w = bytestream2_get_be16(&gbc); |
316 | |
317 | ret = ff_set_dimensions(avctx, w, h); |
318 | if (ret < 0) |
319 | return ret; |
320 | |
321 | /* version 1 is identified by 0x1101 |
322 | * it uses byte-aligned opcodes rather than word-aligned */ |
323 | if (ver == 1) { |
324 | avpriv_request_sample(avctx, "QuickDraw version 1"); |
325 | return AVERROR_PATCHWELCOME; |
326 | } else if (ver != 2) { |
327 | avpriv_request_sample(avctx, "QuickDraw version unknown (%X)", bytestream2_get_be32(&gbc)); |
328 | return AVERROR_PATCHWELCOME; |
329 | } |
330 | |
331 | bytestream2_skip(&gbc, 4+26); |
332 | |
333 | while (bytestream2_get_bytes_left(&gbc) >= 4) { |
334 | int bppcnt, bpp; |
335 | int rowbytes, pack_type; |
336 | int opcode = bytestream2_get_be16(&gbc); |
337 | |
338 | switch(opcode) { |
339 | case CLIP: |
340 | bytestream2_skip(&gbc, 10); |
341 | break; |
342 | case PACKBITSRECT: |
343 | case PACKBITSRGN: |
344 | av_log(avctx, AV_LOG_DEBUG, "Parsing Packbit opcode\n"); |
345 | |
346 | bytestream2_skip(&gbc, 30); |
347 | bppcnt = bytestream2_get_be16(&gbc); /* cmpCount */ |
348 | bpp = bytestream2_get_be16(&gbc); /* cmpSize */ |
349 | |
350 | av_log(avctx, AV_LOG_DEBUG, "bppcount %d bpp %d\n", bppcnt, bpp); |
351 | if (bppcnt == 1 && bpp == 8) { |
352 | avctx->pix_fmt = AV_PIX_FMT_PAL8; |
353 | } else if (bppcnt == 1 && (bpp == 4 || bpp == 2)) { |
354 | avctx->pix_fmt = AV_PIX_FMT_PAL8; |
355 | } else if (bppcnt == 3 && bpp == 5) { |
356 | avctx->pix_fmt = AV_PIX_FMT_RGB555; |
357 | } else { |
358 | av_log(avctx, AV_LOG_ERROR, |
359 | "Invalid pixel format (bppcnt %d bpp %d) in Packbit\n", |
360 | bppcnt, bpp); |
361 | return AVERROR_INVALIDDATA; |
362 | } |
363 | |
364 | /* jump to palette */ |
365 | bytestream2_skip(&gbc, 18); |
366 | colors = bytestream2_get_be16(&gbc); |
367 | |
368 | if (colors < 0 || colors > 256) { |
369 | av_log(avctx, AV_LOG_ERROR, |
370 | "Error color count - %i(0x%X)\n", colors, colors); |
371 | return AVERROR_INVALIDDATA; |
372 | } |
373 | if (bytestream2_get_bytes_left(&gbc) < (colors + 1) * 8) { |
374 | av_log(avctx, AV_LOG_ERROR, "Palette is too small %d\n", |
375 | bytestream2_get_bytes_left(&gbc)); |
376 | return AVERROR_INVALIDDATA; |
377 | } |
378 | if ((ret = ff_get_buffer(avctx, p, 0)) < 0) |
379 | return ret; |
380 | |
381 | parse_palette(avctx, &gbc, (uint32_t *)p->data[1], colors); |
382 | p->palette_has_changed = 1; |
383 | |
384 | /* jump to image data */ |
385 | bytestream2_skip(&gbc, 18); |
386 | |
387 | if (opcode == PACKBITSRGN) { |
388 | bytestream2_skip(&gbc, 2 + 8); /* size + rect */ |
389 | avpriv_report_missing_feature(avctx, "Packbit mask region"); |
390 | } |
391 | |
392 | if (avctx->pix_fmt == AV_PIX_FMT_RGB555) |
393 | ret = decode_rle16(avctx, p, &gbc); |
394 | else if (bpp == 2) |
395 | ret = decode_rle_bpp2(avctx, p, &gbc); |
396 | else if (bpp == 4) |
397 | ret = decode_rle_bpp4(avctx, p, &gbc); |
398 | else |
399 | ret = decode_rle(avctx, p, &gbc, bppcnt); |
400 | if (ret < 0) |
401 | return ret; |
402 | *got_frame = 1; |
403 | break; |
404 | case DIRECTBITSRECT: |
405 | case DIRECTBITSRGN: |
406 | av_log(avctx, AV_LOG_DEBUG, "Parsing Directbit opcode\n"); |
407 | |
408 | bytestream2_skip(&gbc, 4); |
409 | rowbytes = bytestream2_get_be16(&gbc) & 0x3FFF; |
410 | if (rowbytes <= 250) { |
411 | avpriv_report_missing_feature(avctx, "Short rowbytes"); |
412 | return AVERROR_PATCHWELCOME; |
413 | } |
414 | |
415 | bytestream2_skip(&gbc, 4); |
416 | h = bytestream2_get_be16(&gbc); |
417 | w = bytestream2_get_be16(&gbc); |
418 | bytestream2_skip(&gbc, 2); |
419 | |
420 | ret = ff_set_dimensions(avctx, w, h); |
421 | if (ret < 0) |
422 | return ret; |
423 | |
424 | pack_type = bytestream2_get_be16(&gbc); |
425 | |
426 | bytestream2_skip(&gbc, 16); |
427 | bppcnt = bytestream2_get_be16(&gbc); /* cmpCount */ |
428 | bpp = bytestream2_get_be16(&gbc); /* cmpSize */ |
429 | |
430 | av_log(avctx, AV_LOG_DEBUG, "bppcount %d bpp %d\n", bppcnt, bpp); |
431 | if (bppcnt == 3 && bpp == 8) { |
432 | avctx->pix_fmt = AV_PIX_FMT_RGB24; |
433 | } else if (bppcnt == 3 && bpp == 5) { |
434 | avctx->pix_fmt = AV_PIX_FMT_RGB555; |
435 | } else if (bppcnt == 4 && bpp == 8) { |
436 | avctx->pix_fmt = AV_PIX_FMT_ARGB; |
437 | } else { |
438 | av_log(avctx, AV_LOG_ERROR, |
439 | "Invalid pixel format (bppcnt %d bpp %d) in Directbit\n", |
440 | bppcnt, bpp); |
441 | return AVERROR_INVALIDDATA; |
442 | } |
443 | |
444 | /* set packing when default is selected */ |
445 | if (pack_type == 0) |
446 | pack_type = bppcnt; |
447 | |
448 | if (pack_type != 3 && pack_type != 4) { |
449 | avpriv_request_sample(avctx, "Pack type %d", pack_type); |
450 | return AVERROR_PATCHWELCOME; |
451 | } |
452 | if ((ret = ff_get_buffer(avctx, p, 0)) < 0) |
453 | return ret; |
454 | |
455 | /* jump to data */ |
456 | bytestream2_skip(&gbc, 30); |
457 | |
458 | if (opcode == DIRECTBITSRGN) { |
459 | bytestream2_skip(&gbc, 2 + 8); /* size + rect */ |
460 | avpriv_report_missing_feature(avctx, "DirectBit mask region"); |
461 | } |
462 | |
463 | if (avctx->pix_fmt == AV_PIX_FMT_RGB555) |
464 | ret = decode_rle16(avctx, p, &gbc); |
465 | else |
466 | ret = decode_rle(avctx, p, &gbc, bppcnt); |
467 | if (ret < 0) |
468 | return ret; |
469 | *got_frame = 1; |
470 | break; |
471 | case LONGCOMMENT: |
472 | bytestream2_get_be16(&gbc); |
473 | bytestream2_skip(&gbc, bytestream2_get_be16(&gbc)); |
474 | break; |
475 | default: |
476 | av_log(avctx, AV_LOG_TRACE, "Unknown 0x%04X opcode\n", opcode); |
477 | break; |
478 | } |
479 | /* exit the loop when a known pixel block has been found */ |
480 | if (*got_frame) { |
481 | int eop, trail; |
482 | |
483 | /* re-align to a word */ |
484 | bytestream2_skip(&gbc, bytestream2_get_bytes_left(&gbc) % 2); |
485 | |
486 | eop = bytestream2_get_be16(&gbc); |
487 | trail = bytestream2_get_bytes_left(&gbc); |
488 | if (eop != EOP) |
489 | av_log(avctx, AV_LOG_WARNING, |
490 | "Missing end of picture opcode (found 0x%04X)\n", eop); |
491 | if (trail) |
492 | av_log(avctx, AV_LOG_WARNING, "Got %d trailing bytes\n", trail); |
493 | break; |
494 | } |
495 | } |
496 | |
497 | if (*got_frame) { |
498 | p->pict_type = AV_PICTURE_TYPE_I; |
499 | p->key_frame = 1; |
500 | |
501 | return avpkt->size; |
502 | } else { |
503 | av_log(avctx, AV_LOG_ERROR, "Frame contained no usable data\n"); |
504 | |
505 | return AVERROR_INVALIDDATA; |
506 | } |
507 | } |
508 | |
509 | AVCodec ff_qdraw_decoder = { |
510 | .name = "qdraw", |
511 | .long_name = NULL_IF_CONFIG_SMALL("Apple QuickDraw"), |
512 | .type = AVMEDIA_TYPE_VIDEO, |
513 | .id = AV_CODEC_ID_QDRAW, |
514 | .decode = decode_frame, |
515 | .capabilities = AV_CODEC_CAP_DR1, |
516 | }; |
517 |