summaryrefslogtreecommitdiff
path: root/libavcodec/vc1dec.c (plain)
blob: 75d336583cc6ecbbd07dd5e336db2859fe5136d0
1/*
2 * VC-1 and WMV3 decoder
3 * Copyright (c) 2011 Mashiat Sarker Shakkhar
4 * Copyright (c) 2006-2007 Konstantin Shishkov
5 * Partly based on vc9.c (c) 2005 Anonymous, Alex Beregszaszi, Michael Niedermayer
6 *
7 * This file is part of FFmpeg.
8 *
9 * FFmpeg is free software; you can redistribute it and/or
10 * modify it under the terms of the GNU Lesser General Public
11 * License as published by the Free Software Foundation; either
12 * version 2.1 of the License, or (at your option) any later version.
13 *
14 * FFmpeg is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 * Lesser General Public License for more details.
18 *
19 * You should have received a copy of the GNU Lesser General Public
20 * License along with FFmpeg; if not, write to the Free Software
21 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22 */
23
24/**
25 * @file
26 * VC-1 and WMV3 decoder
27 */
28
29#include "avcodec.h"
30#include "blockdsp.h"
31#include "get_bits.h"
32#include "internal.h"
33#include "mpeg_er.h"
34#include "mpegvideo.h"
35#include "msmpeg4.h"
36#include "msmpeg4data.h"
37#include "profiles.h"
38#include "vc1.h"
39#include "vc1data.h"
40#include "vdpau_compat.h"
41#include "libavutil/avassert.h"
42
43
44#if CONFIG_WMV3IMAGE_DECODER || CONFIG_VC1IMAGE_DECODER
45
46typedef struct SpriteData {
47 /**
48 * Transform coefficients for both sprites in 16.16 fixed point format,
49 * in the order they appear in the bitstream:
50 * x scale
51 * rotation 1 (unused)
52 * x offset
53 * rotation 2 (unused)
54 * y scale
55 * y offset
56 * alpha
57 */
58 int coefs[2][7];
59
60 int effect_type, effect_flag;
61 int effect_pcount1, effect_pcount2; ///< amount of effect parameters stored in effect_params
62 int effect_params1[15], effect_params2[10]; ///< effect parameters in 16.16 fixed point format
63} SpriteData;
64
65static inline int get_fp_val(GetBitContext* gb)
66{
67 return (get_bits_long(gb, 30) - (1 << 29)) << 1;
68}
69
70static void vc1_sprite_parse_transform(GetBitContext* gb, int c[7])
71{
72 c[1] = c[3] = 0;
73
74 switch (get_bits(gb, 2)) {
75 case 0:
76 c[0] = 1 << 16;
77 c[2] = get_fp_val(gb);
78 c[4] = 1 << 16;
79 break;
80 case 1:
81 c[0] = c[4] = get_fp_val(gb);
82 c[2] = get_fp_val(gb);
83 break;
84 case 2:
85 c[0] = get_fp_val(gb);
86 c[2] = get_fp_val(gb);
87 c[4] = get_fp_val(gb);
88 break;
89 case 3:
90 c[0] = get_fp_val(gb);
91 c[1] = get_fp_val(gb);
92 c[2] = get_fp_val(gb);
93 c[3] = get_fp_val(gb);
94 c[4] = get_fp_val(gb);
95 break;
96 }
97 c[5] = get_fp_val(gb);
98 if (get_bits1(gb))
99 c[6] = get_fp_val(gb);
100 else
101 c[6] = 1 << 16;
102}
103
104static int vc1_parse_sprites(VC1Context *v, GetBitContext* gb, SpriteData* sd)
105{
106 AVCodecContext *avctx = v->s.avctx;
107 int sprite, i;
108
109 for (sprite = 0; sprite <= v->two_sprites; sprite++) {
110 vc1_sprite_parse_transform(gb, sd->coefs[sprite]);
111 if (sd->coefs[sprite][1] || sd->coefs[sprite][3])
112 avpriv_request_sample(avctx, "Non-zero rotation coefficients");
113 av_log(avctx, AV_LOG_DEBUG, sprite ? "S2:" : "S1:");
114 for (i = 0; i < 7; i++)
115 av_log(avctx, AV_LOG_DEBUG, " %d.%.3d",
116 sd->coefs[sprite][i] / (1<<16),
117 (abs(sd->coefs[sprite][i]) & 0xFFFF) * 1000 / (1 << 16));
118 av_log(avctx, AV_LOG_DEBUG, "\n");
119 }
120
121 skip_bits(gb, 2);
122 if (sd->effect_type = get_bits_long(gb, 30)) {
123 switch (sd->effect_pcount1 = get_bits(gb, 4)) {
124 case 7:
125 vc1_sprite_parse_transform(gb, sd->effect_params1);
126 break;
127 case 14:
128 vc1_sprite_parse_transform(gb, sd->effect_params1);
129 vc1_sprite_parse_transform(gb, sd->effect_params1 + 7);
130 break;
131 default:
132 for (i = 0; i < sd->effect_pcount1; i++)
133 sd->effect_params1[i] = get_fp_val(gb);
134 }
135 if (sd->effect_type != 13 || sd->effect_params1[0] != sd->coefs[0][6]) {
136 // effect 13 is simple alpha blending and matches the opacity above
137 av_log(avctx, AV_LOG_DEBUG, "Effect: %d; params: ", sd->effect_type);
138 for (i = 0; i < sd->effect_pcount1; i++)
139 av_log(avctx, AV_LOG_DEBUG, " %d.%.2d",
140 sd->effect_params1[i] / (1 << 16),
141 (abs(sd->effect_params1[i]) & 0xFFFF) * 1000 / (1 << 16));
142 av_log(avctx, AV_LOG_DEBUG, "\n");
143 }
144
145 sd->effect_pcount2 = get_bits(gb, 16);
146 if (sd->effect_pcount2 > 10) {
147 av_log(avctx, AV_LOG_ERROR, "Too many effect parameters\n");
148 return AVERROR_INVALIDDATA;
149 } else if (sd->effect_pcount2) {
150 i = -1;
151 av_log(avctx, AV_LOG_DEBUG, "Effect params 2: ");
152 while (++i < sd->effect_pcount2) {
153 sd->effect_params2[i] = get_fp_val(gb);
154 av_log(avctx, AV_LOG_DEBUG, " %d.%.2d",
155 sd->effect_params2[i] / (1 << 16),
156 (abs(sd->effect_params2[i]) & 0xFFFF) * 1000 / (1 << 16));
157 }
158 av_log(avctx, AV_LOG_DEBUG, "\n");
159 }
160 }
161 if (sd->effect_flag = get_bits1(gb))
162 av_log(avctx, AV_LOG_DEBUG, "Effect flag set\n");
163
164 if (get_bits_count(gb) >= gb->size_in_bits +
165 (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE ? 64 : 0)) {
166 av_log(avctx, AV_LOG_ERROR, "Buffer overrun\n");
167 return AVERROR_INVALIDDATA;
168 }
169 if (get_bits_count(gb) < gb->size_in_bits - 8)
170 av_log(avctx, AV_LOG_WARNING, "Buffer not fully read\n");
171
172 return 0;
173}
174
175static void vc1_draw_sprites(VC1Context *v, SpriteData* sd)
176{
177 int i, plane, row, sprite;
178 int sr_cache[2][2] = { { -1, -1 }, { -1, -1 } };
179 uint8_t* src_h[2][2];
180 int xoff[2], xadv[2], yoff[2], yadv[2], alpha;
181 int ysub[2];
182 MpegEncContext *s = &v->s;
183
184 for (i = 0; i <= v->two_sprites; i++) {
185 xoff[i] = av_clip(sd->coefs[i][2], 0, v->sprite_width-1 << 16);
186 xadv[i] = sd->coefs[i][0];
187 if (xadv[i] != 1<<16 || (v->sprite_width << 16) - (v->output_width << 16) - xoff[i])
188 xadv[i] = av_clip(xadv[i], 0, ((v->sprite_width<<16) - xoff[i] - 1) / v->output_width);
189
190 yoff[i] = av_clip(sd->coefs[i][5], 0, v->sprite_height-1 << 16);
191 yadv[i] = av_clip(sd->coefs[i][4], 0, ((v->sprite_height << 16) - yoff[i]) / v->output_height);
192 }
193 alpha = av_clip_uint16(sd->coefs[1][6]);
194
195 for (plane = 0; plane < (CONFIG_GRAY && s->avctx->flags & AV_CODEC_FLAG_GRAY ? 1 : 3); plane++) {
196 int width = v->output_width>>!!plane;
197
198 for (row = 0; row < v->output_height>>!!plane; row++) {
199 uint8_t *dst = v->sprite_output_frame->data[plane] +
200 v->sprite_output_frame->linesize[plane] * row;
201
202 for (sprite = 0; sprite <= v->two_sprites; sprite++) {
203 uint8_t *iplane = s->current_picture.f->data[plane];
204 int iline = s->current_picture.f->linesize[plane];
205 int ycoord = yoff[sprite] + yadv[sprite] * row;
206 int yline = ycoord >> 16;
207 int next_line;
208 ysub[sprite] = ycoord & 0xFFFF;
209 if (sprite) {
210 iplane = s->last_picture.f->data[plane];
211 iline = s->last_picture.f->linesize[plane];
212 }
213 next_line = FFMIN(yline + 1, (v->sprite_height >> !!plane) - 1) * iline;
214 if (!(xoff[sprite] & 0xFFFF) && xadv[sprite] == 1 << 16) {
215 src_h[sprite][0] = iplane + (xoff[sprite] >> 16) + yline * iline;
216 if (ysub[sprite])
217 src_h[sprite][1] = iplane + (xoff[sprite] >> 16) + next_line;
218 } else {
219 if (sr_cache[sprite][0] != yline) {
220 if (sr_cache[sprite][1] == yline) {
221 FFSWAP(uint8_t*, v->sr_rows[sprite][0], v->sr_rows[sprite][1]);
222 FFSWAP(int, sr_cache[sprite][0], sr_cache[sprite][1]);
223 } else {
224 v->vc1dsp.sprite_h(v->sr_rows[sprite][0], iplane + yline * iline, xoff[sprite], xadv[sprite], width);
225 sr_cache[sprite][0] = yline;
226 }
227 }
228 if (ysub[sprite] && sr_cache[sprite][1] != yline + 1) {
229 v->vc1dsp.sprite_h(v->sr_rows[sprite][1],
230 iplane + next_line, xoff[sprite],
231 xadv[sprite], width);
232 sr_cache[sprite][1] = yline + 1;
233 }
234 src_h[sprite][0] = v->sr_rows[sprite][0];
235 src_h[sprite][1] = v->sr_rows[sprite][1];
236 }
237 }
238
239 if (!v->two_sprites) {
240 if (ysub[0]) {
241 v->vc1dsp.sprite_v_single(dst, src_h[0][0], src_h[0][1], ysub[0], width);
242 } else {
243 memcpy(dst, src_h[0][0], width);
244 }
245 } else {
246 if (ysub[0] && ysub[1]) {
247 v->vc1dsp.sprite_v_double_twoscale(dst, src_h[0][0], src_h[0][1], ysub[0],
248 src_h[1][0], src_h[1][1], ysub[1], alpha, width);
249 } else if (ysub[0]) {
250 v->vc1dsp.sprite_v_double_onescale(dst, src_h[0][0], src_h[0][1], ysub[0],
251 src_h[1][0], alpha, width);
252 } else if (ysub[1]) {
253 v->vc1dsp.sprite_v_double_onescale(dst, src_h[1][0], src_h[1][1], ysub[1],
254 src_h[0][0], (1<<16)-1-alpha, width);
255 } else {
256 v->vc1dsp.sprite_v_double_noscale(dst, src_h[0][0], src_h[1][0], alpha, width);
257 }
258 }
259 }
260
261 if (!plane) {
262 for (i = 0; i <= v->two_sprites; i++) {
263 xoff[i] >>= 1;
264 yoff[i] >>= 1;
265 }
266 }
267
268 }
269}
270
271
272static int vc1_decode_sprites(VC1Context *v, GetBitContext* gb)
273{
274 int ret;
275 MpegEncContext *s = &v->s;
276 AVCodecContext *avctx = s->avctx;
277 SpriteData sd;
278
279 memset(&sd, 0, sizeof(sd));
280
281 ret = vc1_parse_sprites(v, gb, &sd);
282 if (ret < 0)
283 return ret;
284
285 if (!s->current_picture.f || !s->current_picture.f->data[0]) {
286 av_log(avctx, AV_LOG_ERROR, "Got no sprites\n");
287 return AVERROR_UNKNOWN;
288 }
289
290 if (v->two_sprites && (!s->last_picture_ptr || !s->last_picture.f->data[0])) {
291 av_log(avctx, AV_LOG_WARNING, "Need two sprites, only got one\n");
292 v->two_sprites = 0;
293 }
294
295 av_frame_unref(v->sprite_output_frame);
296 if ((ret = ff_get_buffer(avctx, v->sprite_output_frame, 0)) < 0)
297 return ret;
298
299 vc1_draw_sprites(v, &sd);
300
301 return 0;
302}
303
304static void vc1_sprite_flush(AVCodecContext *avctx)
305{
306 VC1Context *v = avctx->priv_data;
307 MpegEncContext *s = &v->s;
308 AVFrame *f = s->current_picture.f;
309 int plane, i;
310
311 /* Windows Media Image codecs have a convergence interval of two keyframes.
312 Since we can't enforce it, clear to black the missing sprite. This is
313 wrong but it looks better than doing nothing. */
314
315 if (f && f->data[0])
316 for (plane = 0; plane < (CONFIG_GRAY && s->avctx->flags & AV_CODEC_FLAG_GRAY ? 1 : 3); plane++)
317 for (i = 0; i < v->sprite_height>>!!plane; i++)
318 memset(f->data[plane] + i * f->linesize[plane],
319 plane ? 128 : 0, f->linesize[plane]);
320}
321
322#endif
323
324av_cold int ff_vc1_decode_init_alloc_tables(VC1Context *v)
325{
326 MpegEncContext *s = &v->s;
327 int i, ret = AVERROR(ENOMEM);
328 int mb_height = FFALIGN(s->mb_height, 2);
329
330 /* Allocate mb bitplanes */
331 v->mv_type_mb_plane = av_malloc (s->mb_stride * mb_height);
332 v->direct_mb_plane = av_malloc (s->mb_stride * mb_height);
333 v->forward_mb_plane = av_malloc (s->mb_stride * mb_height);
334 v->fieldtx_plane = av_mallocz(s->mb_stride * mb_height);
335 v->acpred_plane = av_malloc (s->mb_stride * mb_height);
336 v->over_flags_plane = av_malloc (s->mb_stride * mb_height);
337 if (!v->mv_type_mb_plane || !v->direct_mb_plane || !v->forward_mb_plane ||
338 !v->fieldtx_plane || !v->acpred_plane || !v->over_flags_plane)
339 goto error;
340
341 v->n_allocated_blks = s->mb_width + 2;
342 v->block = av_malloc(sizeof(*v->block) * v->n_allocated_blks);
343 v->cbp_base = av_malloc(sizeof(v->cbp_base[0]) * 2 * s->mb_stride);
344 if (!v->block || !v->cbp_base)
345 goto error;
346 v->cbp = v->cbp_base + s->mb_stride;
347 v->ttblk_base = av_malloc(sizeof(v->ttblk_base[0]) * 2 * s->mb_stride);
348 if (!v->ttblk_base)
349 goto error;
350 v->ttblk = v->ttblk_base + s->mb_stride;
351 v->is_intra_base = av_mallocz(sizeof(v->is_intra_base[0]) * 2 * s->mb_stride);
352 if (!v->is_intra_base)
353 goto error;
354 v->is_intra = v->is_intra_base + s->mb_stride;
355 v->luma_mv_base = av_mallocz(sizeof(v->luma_mv_base[0]) * 2 * s->mb_stride);
356 if (!v->luma_mv_base)
357 goto error;
358 v->luma_mv = v->luma_mv_base + s->mb_stride;
359
360 /* allocate block type info in that way so it could be used with s->block_index[] */
361 v->mb_type_base = av_malloc(s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2);
362 if (!v->mb_type_base)
363 goto error;
364 v->mb_type[0] = v->mb_type_base + s->b8_stride + 1;
365 v->mb_type[1] = v->mb_type_base + s->b8_stride * (mb_height * 2 + 1) + s->mb_stride + 1;
366 v->mb_type[2] = v->mb_type[1] + s->mb_stride * (mb_height + 1);
367
368 /* allocate memory to store block level MV info */
369 v->blk_mv_type_base = av_mallocz( s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2);
370 if (!v->blk_mv_type_base)
371 goto error;
372 v->blk_mv_type = v->blk_mv_type_base + s->b8_stride + 1;
373 v->mv_f_base = av_mallocz(2 * (s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2));
374 if (!v->mv_f_base)
375 goto error;
376 v->mv_f[0] = v->mv_f_base + s->b8_stride + 1;
377 v->mv_f[1] = v->mv_f[0] + (s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2);
378 v->mv_f_next_base = av_mallocz(2 * (s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2));
379 if (!v->mv_f_next_base)
380 goto error;
381 v->mv_f_next[0] = v->mv_f_next_base + s->b8_stride + 1;
382 v->mv_f_next[1] = v->mv_f_next[0] + (s->b8_stride * (mb_height * 2 + 1) + s->mb_stride * (mb_height + 1) * 2);
383
384 if (s->avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || s->avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
385 for (i = 0; i < 4; i++)
386 if (!(v->sr_rows[i >> 1][i & 1] = av_malloc(v->output_width)))
387 return AVERROR(ENOMEM);
388 }
389
390 ret = ff_intrax8_common_init(s->avctx, &v->x8, &s->idsp,
391 s->block, s->block_last_index,
392 s->mb_width, s->mb_height);
393 if (ret < 0)
394 goto error;
395
396 return 0;
397
398error:
399 ff_vc1_decode_end(s->avctx);
400 return ret;
401}
402
403av_cold void ff_vc1_init_transposed_scantables(VC1Context *v)
404{
405 int i;
406 for (i = 0; i < 64; i++) {
407#define transpose(x) (((x) >> 3) | (((x) & 7) << 3))
408 v->zz_8x8[0][i] = transpose(ff_wmv1_scantable[0][i]);
409 v->zz_8x8[1][i] = transpose(ff_wmv1_scantable[1][i]);
410 v->zz_8x8[2][i] = transpose(ff_wmv1_scantable[2][i]);
411 v->zz_8x8[3][i] = transpose(ff_wmv1_scantable[3][i]);
412 v->zzi_8x8[i] = transpose(ff_vc1_adv_interlaced_8x8_zz[i]);
413 }
414 v->left_blk_sh = 0;
415 v->top_blk_sh = 3;
416}
417
418/** Initialize a VC1/WMV3 decoder
419 * @todo TODO: Handle VC-1 IDUs (Transport level?)
420 * @todo TODO: Decipher remaining bits in extra_data
421 */
422static av_cold int vc1_decode_init(AVCodecContext *avctx)
423{
424 VC1Context *v = avctx->priv_data;
425 MpegEncContext *s = &v->s;
426 GetBitContext gb;
427 int ret;
428
429 /* save the container output size for WMImage */
430 v->output_width = avctx->width;
431 v->output_height = avctx->height;
432
433 if (!avctx->extradata_size || !avctx->extradata)
434 return -1;
435 v->s.avctx = avctx;
436
437 if ((ret = ff_vc1_init_common(v)) < 0)
438 return ret;
439
440 if (avctx->codec_id == AV_CODEC_ID_WMV3 || avctx->codec_id == AV_CODEC_ID_WMV3IMAGE) {
441 int count = 0;
442
443 // looks like WMV3 has a sequence header stored in the extradata
444 // advanced sequence header may be before the first frame
445 // the last byte of the extradata is a version number, 1 for the
446 // samples we can decode
447
448 init_get_bits(&gb, avctx->extradata, avctx->extradata_size*8);
449
450 if ((ret = ff_vc1_decode_sequence_header(avctx, v, &gb)) < 0)
451 return ret;
452
453 count = avctx->extradata_size*8 - get_bits_count(&gb);
454 if (count > 0) {
455 av_log(avctx, AV_LOG_INFO, "Extra data: %i bits left, value: %X\n",
456 count, get_bits_long(&gb, FFMIN(count, 32)));
457 } else if (count < 0) {
458 av_log(avctx, AV_LOG_INFO, "Read %i bits in overflow\n", -count);
459 }
460 } else { // VC1/WVC1/WVP2
461 const uint8_t *start = avctx->extradata;
462 uint8_t *end = avctx->extradata + avctx->extradata_size;
463 const uint8_t *next;
464 int size, buf2_size;
465 uint8_t *buf2 = NULL;
466 int seq_initialized = 0, ep_initialized = 0;
467
468 if (avctx->extradata_size < 16) {
469 av_log(avctx, AV_LOG_ERROR, "Extradata size too small: %i\n", avctx->extradata_size);
470 return -1;
471 }
472
473 buf2 = av_mallocz(avctx->extradata_size + AV_INPUT_BUFFER_PADDING_SIZE);
474 if (!buf2)
475 return AVERROR(ENOMEM);
476
477 start = find_next_marker(start, end); // in WVC1 extradata first byte is its size, but can be 0 in mkv
478 next = start;
479 for (; next < end; start = next) {
480 next = find_next_marker(start + 4, end);
481 size = next - start - 4;
482 if (size <= 0)
483 continue;
484 buf2_size = vc1_unescape_buffer(start + 4, size, buf2);
485 init_get_bits(&gb, buf2, buf2_size * 8);
486 switch (AV_RB32(start)) {
487 case VC1_CODE_SEQHDR:
488 if ((ret = ff_vc1_decode_sequence_header(avctx, v, &gb)) < 0) {
489 av_free(buf2);
490 return ret;
491 }
492 seq_initialized = 1;
493 break;
494 case VC1_CODE_ENTRYPOINT:
495 if ((ret = ff_vc1_decode_entry_point(avctx, v, &gb)) < 0) {
496 av_free(buf2);
497 return ret;
498 }
499 ep_initialized = 1;
500 break;
501 }
502 }
503 av_free(buf2);
504 if (!seq_initialized || !ep_initialized) {
505 av_log(avctx, AV_LOG_ERROR, "Incomplete extradata\n");
506 return -1;
507 }
508 v->res_sprite = (avctx->codec_id == AV_CODEC_ID_VC1IMAGE);
509 }
510
511 avctx->profile = v->profile;
512 if (v->profile == PROFILE_ADVANCED)
513 avctx->level = v->level;
514
515 if (!CONFIG_GRAY || !(avctx->flags & AV_CODEC_FLAG_GRAY))
516 avctx->pix_fmt = ff_get_format(avctx, avctx->codec->pix_fmts);
517 else {
518 avctx->pix_fmt = AV_PIX_FMT_GRAY8;
519 if (avctx->color_range == AVCOL_RANGE_UNSPECIFIED)
520 avctx->color_range = AVCOL_RANGE_MPEG;
521 }
522
523 // ensure static VLC tables are initialized
524 if ((ret = ff_msmpeg4_decode_init(avctx)) < 0)
525 return ret;
526 if ((ret = ff_vc1_decode_init_alloc_tables(v)) < 0)
527 return ret;
528 // Hack to ensure the above functions will be called
529 // again once we know all necessary settings.
530 // That this is necessary might indicate a bug.
531 ff_vc1_decode_end(avctx);
532
533 ff_blockdsp_init(&s->bdsp, avctx);
534 ff_h264chroma_init(&v->h264chroma, 8);
535 ff_qpeldsp_init(&s->qdsp);
536
537 // Must happen after calling ff_vc1_decode_end
538 // to avoid de-allocating the sprite_output_frame
539 v->sprite_output_frame = av_frame_alloc();
540 if (!v->sprite_output_frame)
541 return AVERROR(ENOMEM);
542
543 avctx->has_b_frames = !!avctx->max_b_frames;
544
545 if (v->color_prim == 1 || v->color_prim == 5 || v->color_prim == 6)
546 avctx->color_primaries = v->color_prim;
547 if (v->transfer_char == 1 || v->transfer_char == 7)
548 avctx->color_trc = v->transfer_char;
549 if (v->matrix_coef == 1 || v->matrix_coef == 6 || v->matrix_coef == 7)
550 avctx->colorspace = v->matrix_coef;
551
552 s->mb_width = (avctx->coded_width + 15) >> 4;
553 s->mb_height = (avctx->coded_height + 15) >> 4;
554
555 if (v->profile == PROFILE_ADVANCED || v->res_fasttx) {
556 ff_vc1_init_transposed_scantables(v);
557 } else {
558 memcpy(v->zz_8x8, ff_wmv1_scantable, 4*64);
559 v->left_blk_sh = 3;
560 v->top_blk_sh = 0;
561 }
562
563 if (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
564 v->sprite_width = avctx->coded_width;
565 v->sprite_height = avctx->coded_height;
566
567 avctx->coded_width = avctx->width = v->output_width;
568 avctx->coded_height = avctx->height = v->output_height;
569
570 // prevent 16.16 overflows
571 if (v->sprite_width > 1 << 14 ||
572 v->sprite_height > 1 << 14 ||
573 v->output_width > 1 << 14 ||
574 v->output_height > 1 << 14) return -1;
575
576 if ((v->sprite_width&1) || (v->sprite_height&1)) {
577 avpriv_request_sample(avctx, "odd sprites support");
578 return AVERROR_PATCHWELCOME;
579 }
580 }
581 return 0;
582}
583
584/** Close a VC1/WMV3 decoder
585 * @warning Initial try at using MpegEncContext stuff
586 */
587av_cold int ff_vc1_decode_end(AVCodecContext *avctx)
588{
589 VC1Context *v = avctx->priv_data;
590 int i;
591
592 av_frame_free(&v->sprite_output_frame);
593
594 for (i = 0; i < 4; i++)
595 av_freep(&v->sr_rows[i >> 1][i & 1]);
596 av_freep(&v->hrd_rate);
597 av_freep(&v->hrd_buffer);
598 ff_mpv_common_end(&v->s);
599 av_freep(&v->mv_type_mb_plane);
600 av_freep(&v->direct_mb_plane);
601 av_freep(&v->forward_mb_plane);
602 av_freep(&v->fieldtx_plane);
603 av_freep(&v->acpred_plane);
604 av_freep(&v->over_flags_plane);
605 av_freep(&v->mb_type_base);
606 av_freep(&v->blk_mv_type_base);
607 av_freep(&v->mv_f_base);
608 av_freep(&v->mv_f_next_base);
609 av_freep(&v->block);
610 av_freep(&v->cbp_base);
611 av_freep(&v->ttblk_base);
612 av_freep(&v->is_intra_base); // FIXME use v->mb_type[]
613 av_freep(&v->luma_mv_base);
614 ff_intrax8_common_end(&v->x8);
615 return 0;
616}
617
618
619/** Decode a VC1/WMV3 frame
620 * @todo TODO: Handle VC-1 IDUs (Transport level?)
621 */
622static int vc1_decode_frame(AVCodecContext *avctx, void *data,
623 int *got_frame, AVPacket *avpkt)
624{
625 const uint8_t *buf = avpkt->data;
626 int buf_size = avpkt->size, n_slices = 0, i, ret;
627 VC1Context *v = avctx->priv_data;
628 MpegEncContext *s = &v->s;
629 AVFrame *pict = data;
630 uint8_t *buf2 = NULL;
631 const uint8_t *buf_start = buf, *buf_start_second_field = NULL;
632 int mb_height, n_slices1=-1;
633 struct {
634 uint8_t *buf;
635 GetBitContext gb;
636 int mby_start;
637 const uint8_t *rawbuf;
638 int raw_size;
639 } *slices = NULL, *tmp;
640
641 v->second_field = 0;
642
643 if(s->avctx->flags & AV_CODEC_FLAG_LOW_DELAY)
644 s->low_delay = 1;
645
646 /* no supplementary picture */
647 if (buf_size == 0 || (buf_size == 4 && AV_RB32(buf) == VC1_CODE_ENDOFSEQ)) {
648 /* special case for last picture */
649 if (s->low_delay == 0 && s->next_picture_ptr) {
650 if ((ret = av_frame_ref(pict, s->next_picture_ptr->f)) < 0)
651 return ret;
652 s->next_picture_ptr = NULL;
653
654 *got_frame = 1;
655 }
656
657 return buf_size;
658 }
659
660#if FF_API_CAP_VDPAU
661 if (s->avctx->codec->capabilities&AV_CODEC_CAP_HWACCEL_VDPAU) {
662 if (v->profile < PROFILE_ADVANCED)
663 avctx->pix_fmt = AV_PIX_FMT_VDPAU_WMV3;
664 else
665 avctx->pix_fmt = AV_PIX_FMT_VDPAU_VC1;
666 }
667#endif
668
669 //for advanced profile we may need to parse and unescape data
670 if (avctx->codec_id == AV_CODEC_ID_VC1 || avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
671 int buf_size2 = 0;
672 buf2 = av_mallocz(buf_size + AV_INPUT_BUFFER_PADDING_SIZE);
673 if (!buf2)
674 return AVERROR(ENOMEM);
675
676 if (IS_MARKER(AV_RB32(buf))) { /* frame starts with marker and needs to be parsed */
677 const uint8_t *start, *end, *next;
678 int size;
679
680 next = buf;
681 for (start = buf, end = buf + buf_size; next < end; start = next) {
682 next = find_next_marker(start + 4, end);
683 size = next - start - 4;
684 if (size <= 0) continue;
685 switch (AV_RB32(start)) {
686 case VC1_CODE_FRAME:
687 if (avctx->hwaccel
688#if FF_API_CAP_VDPAU
689 || s->avctx->codec->capabilities&AV_CODEC_CAP_HWACCEL_VDPAU
690#endif
691 )
692 buf_start = start;
693 buf_size2 = vc1_unescape_buffer(start + 4, size, buf2);
694 break;
695 case VC1_CODE_FIELD: {
696 int buf_size3;
697 if (avctx->hwaccel
698#if FF_API_CAP_VDPAU
699 || s->avctx->codec->capabilities&AV_CODEC_CAP_HWACCEL_VDPAU
700#endif
701 )
702 buf_start_second_field = start;
703 tmp = av_realloc_array(slices, sizeof(*slices), (n_slices+1));
704 if (!tmp) {
705 ret = AVERROR(ENOMEM);
706 goto err;
707 }
708 slices = tmp;
709 slices[n_slices].buf = av_mallocz(buf_size + AV_INPUT_BUFFER_PADDING_SIZE);
710 if (!slices[n_slices].buf) {
711 ret = AVERROR(ENOMEM);
712 goto err;
713 }
714 buf_size3 = vc1_unescape_buffer(start + 4, size,
715 slices[n_slices].buf);
716 init_get_bits(&slices[n_slices].gb, slices[n_slices].buf,
717 buf_size3 << 3);
718 /* assuming that the field marker is at the exact middle,
719 hope it's correct */
720 slices[n_slices].mby_start = s->mb_height + 1 >> 1;
721 slices[n_slices].rawbuf = start;
722 slices[n_slices].raw_size = size + 4;
723 n_slices1 = n_slices - 1; // index of the last slice of the first field
724 n_slices++;
725 break;
726 }
727 case VC1_CODE_ENTRYPOINT: /* it should be before frame data */
728 buf_size2 = vc1_unescape_buffer(start + 4, size, buf2);
729 init_get_bits(&s->gb, buf2, buf_size2 * 8);
730 ff_vc1_decode_entry_point(avctx, v, &s->gb);
731 break;
732 case VC1_CODE_SLICE: {
733 int buf_size3;
734 tmp = av_realloc_array(slices, sizeof(*slices), (n_slices+1));
735 if (!tmp) {
736 ret = AVERROR(ENOMEM);
737 goto err;
738 }
739 slices = tmp;
740 slices[n_slices].buf = av_mallocz(buf_size + AV_INPUT_BUFFER_PADDING_SIZE);
741 if (!slices[n_slices].buf) {
742 ret = AVERROR(ENOMEM);
743 goto err;
744 }
745 buf_size3 = vc1_unescape_buffer(start + 4, size,
746 slices[n_slices].buf);
747 init_get_bits(&slices[n_slices].gb, slices[n_slices].buf,
748 buf_size3 << 3);
749 slices[n_slices].mby_start = get_bits(&slices[n_slices].gb, 9);
750 slices[n_slices].rawbuf = start;
751 slices[n_slices].raw_size = size + 4;
752 n_slices++;
753 break;
754 }
755 }
756 }
757 } else if (v->interlace && ((buf[0] & 0xC0) == 0xC0)) { /* WVC1 interlaced stores both fields divided by marker */
758 const uint8_t *divider;
759 int buf_size3;
760
761 divider = find_next_marker(buf, buf + buf_size);
762 if ((divider == (buf + buf_size)) || AV_RB32(divider) != VC1_CODE_FIELD) {
763 av_log(avctx, AV_LOG_ERROR, "Error in WVC1 interlaced frame\n");
764 ret = AVERROR_INVALIDDATA;
765 goto err;
766 } else { // found field marker, unescape second field
767 if (avctx->hwaccel
768#if FF_API_CAP_VDPAU
769 || s->avctx->codec->capabilities&AV_CODEC_CAP_HWACCEL_VDPAU
770#endif
771 )
772 buf_start_second_field = divider;
773 tmp = av_realloc_array(slices, sizeof(*slices), (n_slices+1));
774 if (!tmp) {
775 ret = AVERROR(ENOMEM);
776 goto err;
777 }
778 slices = tmp;
779 slices[n_slices].buf = av_mallocz(buf_size + AV_INPUT_BUFFER_PADDING_SIZE);
780 if (!slices[n_slices].buf) {
781 ret = AVERROR(ENOMEM);
782 goto err;
783 }
784 buf_size3 = vc1_unescape_buffer(divider + 4, buf + buf_size - divider - 4, slices[n_slices].buf);
785 init_get_bits(&slices[n_slices].gb, slices[n_slices].buf,
786 buf_size3 << 3);
787 slices[n_slices].mby_start = s->mb_height + 1 >> 1;
788 slices[n_slices].rawbuf = divider;
789 slices[n_slices].raw_size = buf + buf_size - divider;
790 n_slices1 = n_slices - 1;
791 n_slices++;
792 }
793 buf_size2 = vc1_unescape_buffer(buf, divider - buf, buf2);
794 } else {
795 buf_size2 = vc1_unescape_buffer(buf, buf_size, buf2);
796 }
797 init_get_bits(&s->gb, buf2, buf_size2*8);
798 } else
799 init_get_bits(&s->gb, buf, buf_size*8);
800
801 if (v->res_sprite) {
802 v->new_sprite = !get_bits1(&s->gb);
803 v->two_sprites = get_bits1(&s->gb);
804 /* res_sprite means a Windows Media Image stream, AV_CODEC_ID_*IMAGE means
805 we're using the sprite compositor. These are intentionally kept separate
806 so you can get the raw sprites by using the wmv3 decoder for WMVP or
807 the vc1 one for WVP2 */
808 if (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
809 if (v->new_sprite) {
810 // switch AVCodecContext parameters to those of the sprites
811 avctx->width = avctx->coded_width = v->sprite_width;
812 avctx->height = avctx->coded_height = v->sprite_height;
813 } else {
814 goto image;
815 }
816 }
817 }
818
819 if (s->context_initialized &&
820 (s->width != avctx->coded_width ||
821 s->height != avctx->coded_height)) {
822 ff_vc1_decode_end(avctx);
823 }
824
825 if (!s->context_initialized) {
826 if ((ret = ff_msmpeg4_decode_init(avctx)) < 0)
827 goto err;
828 if ((ret = ff_vc1_decode_init_alloc_tables(v)) < 0) {
829 ff_mpv_common_end(s);
830 goto err;
831 }
832
833 s->low_delay = !avctx->has_b_frames || v->res_sprite;
834
835 if (v->profile == PROFILE_ADVANCED) {
836 if(avctx->coded_width<=1 || avctx->coded_height<=1) {
837 ret = AVERROR_INVALIDDATA;
838 goto err;
839 }
840 s->h_edge_pos = avctx->coded_width;
841 s->v_edge_pos = avctx->coded_height;
842 }
843 }
844
845 // do parse frame header
846 v->pic_header_flag = 0;
847 v->first_pic_header_flag = 1;
848 if (v->profile < PROFILE_ADVANCED) {
849 if ((ret = ff_vc1_parse_frame_header(v, &s->gb)) < 0) {
850 goto err;
851 }
852 } else {
853 if ((ret = ff_vc1_parse_frame_header_adv(v, &s->gb)) < 0) {
854 goto err;
855 }
856 }
857 v->first_pic_header_flag = 0;
858
859 if (avctx->debug & FF_DEBUG_PICT_INFO)
860 av_log(v->s.avctx, AV_LOG_DEBUG, "pict_type: %c\n", av_get_picture_type_char(s->pict_type));
861
862 if ((avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE)
863 && s->pict_type != AV_PICTURE_TYPE_I) {
864 av_log(v->s.avctx, AV_LOG_ERROR, "Sprite decoder: expected I-frame\n");
865 ret = AVERROR_INVALIDDATA;
866 goto err;
867 }
868
869 if ((s->mb_height >> v->field_mode) == 0) {
870 av_log(v->s.avctx, AV_LOG_ERROR, "image too short\n");
871 ret = AVERROR_INVALIDDATA;
872 goto err;
873 }
874
875 // for skipping the frame
876 s->current_picture.f->pict_type = s->pict_type;
877 s->current_picture.f->key_frame = s->pict_type == AV_PICTURE_TYPE_I;
878
879 /* skip B-frames if we don't have reference frames */
880 if (!s->last_picture_ptr && (s->pict_type == AV_PICTURE_TYPE_B || s->droppable)) {
881 av_log(v->s.avctx, AV_LOG_DEBUG, "Skipping B frame without reference frames\n");
882 goto end;
883 }
884 if ((avctx->skip_frame >= AVDISCARD_NONREF && s->pict_type == AV_PICTURE_TYPE_B) ||
885 (avctx->skip_frame >= AVDISCARD_NONKEY && s->pict_type != AV_PICTURE_TYPE_I) ||
886 avctx->skip_frame >= AVDISCARD_ALL) {
887 goto end;
888 }
889
890 if (s->next_p_frame_damaged) {
891 if (s->pict_type == AV_PICTURE_TYPE_B)
892 goto end;
893 else
894 s->next_p_frame_damaged = 0;
895 }
896
897 if ((ret = ff_mpv_frame_start(s, avctx)) < 0) {
898 goto err;
899 }
900
901 v->s.current_picture_ptr->field_picture = v->field_mode;
902 v->s.current_picture_ptr->f->interlaced_frame = (v->fcm != PROGRESSIVE);
903 v->s.current_picture_ptr->f->top_field_first = v->tff;
904
905 // process pulldown flags
906 s->current_picture_ptr->f->repeat_pict = 0;
907 // Pulldown flags are only valid when 'broadcast' has been set.
908 // So ticks_per_frame will be 2
909 if (v->rff) {
910 // repeat field
911 s->current_picture_ptr->f->repeat_pict = 1;
912 } else if (v->rptfrm) {
913 // repeat frames
914 s->current_picture_ptr->f->repeat_pict = v->rptfrm * 2;
915 }
916
917 s->me.qpel_put = s->qdsp.put_qpel_pixels_tab;
918 s->me.qpel_avg = s->qdsp.avg_qpel_pixels_tab;
919
920#if FF_API_CAP_VDPAU
921 if ((CONFIG_VC1_VDPAU_DECODER)
922 &&s->avctx->codec->capabilities&AV_CODEC_CAP_HWACCEL_VDPAU) {
923 if (v->field_mode && buf_start_second_field) {
924 ff_vdpau_vc1_decode_picture(s, buf_start, buf_start_second_field - buf_start);
925 ff_vdpau_vc1_decode_picture(s, buf_start_second_field, (buf + buf_size) - buf_start_second_field);
926 } else {
927 ff_vdpau_vc1_decode_picture(s, buf_start, (buf + buf_size) - buf_start);
928 }
929 } else
930#endif
931 if (avctx->hwaccel) {
932 s->mb_y = 0;
933 if (v->field_mode && buf_start_second_field) {
934 // decode first field
935 s->picture_structure = PICT_BOTTOM_FIELD - v->tff;
936 if ((ret = avctx->hwaccel->start_frame(avctx, buf_start, buf_start_second_field - buf_start)) < 0)
937 goto err;
938 if ((ret = avctx->hwaccel->decode_slice(avctx, buf_start, buf_start_second_field - buf_start)) < 0)
939 goto err;
940 if ((ret = avctx->hwaccel->end_frame(avctx)) < 0)
941 goto err;
942
943 // decode second field
944 s->gb = slices[n_slices1 + 1].gb;
945 s->picture_structure = PICT_TOP_FIELD + v->tff;
946 v->second_field = 1;
947 v->pic_header_flag = 0;
948 if (ff_vc1_parse_frame_header_adv(v, &s->gb) < 0) {
949 av_log(avctx, AV_LOG_ERROR, "parsing header for second field failed");
950 ret = AVERROR_INVALIDDATA;
951 goto err;
952 }
953 v->s.current_picture_ptr->f->pict_type = v->s.pict_type;
954
955 if ((ret = avctx->hwaccel->start_frame(avctx, buf_start_second_field, (buf + buf_size) - buf_start_second_field)) < 0)
956 goto err;
957 if ((ret = avctx->hwaccel->decode_slice(avctx, buf_start_second_field, (buf + buf_size) - buf_start_second_field)) < 0)
958 goto err;
959 if ((ret = avctx->hwaccel->end_frame(avctx)) < 0)
960 goto err;
961 } else {
962 s->picture_structure = PICT_FRAME;
963 if ((ret = avctx->hwaccel->start_frame(avctx, buf_start, (buf + buf_size) - buf_start)) < 0)
964 goto err;
965
966 if (n_slices == 0) {
967 // no slices, decode the frame as-is
968 if ((ret = avctx->hwaccel->decode_slice(avctx, buf_start, (buf + buf_size) - buf_start)) < 0)
969 goto err;
970 } else {
971 // decode the frame part as the first slice
972 if ((ret = avctx->hwaccel->decode_slice(avctx, buf_start, slices[0].rawbuf - buf_start)) < 0)
973 goto err;
974
975 // and process the slices as additional slices afterwards
976 for (i = 0 ; i < n_slices; i++) {
977 s->gb = slices[i].gb;
978 s->mb_y = slices[i].mby_start;
979
980 v->pic_header_flag = get_bits1(&s->gb);
981 if (v->pic_header_flag) {
982 if (ff_vc1_parse_frame_header_adv(v, &s->gb) < 0) {
983 av_log(v->s.avctx, AV_LOG_ERROR, "Slice header damaged\n");
984 ret = AVERROR_INVALIDDATA;
985 if (avctx->err_recognition & AV_EF_EXPLODE)
986 goto err;
987 continue;
988 }
989 }
990
991 if ((ret = avctx->hwaccel->decode_slice(avctx, slices[i].rawbuf, slices[i].raw_size)) < 0)
992 goto err;
993 }
994 }
995 if ((ret = avctx->hwaccel->end_frame(avctx)) < 0)
996 goto err;
997 }
998 } else {
999 int header_ret = 0;
1000
1001 ff_mpeg_er_frame_start(s);
1002
1003 v->bits = buf_size * 8;
1004 v->end_mb_x = s->mb_width;
1005 if (v->field_mode) {
1006 s->current_picture.f->linesize[0] <<= 1;
1007 s->current_picture.f->linesize[1] <<= 1;
1008 s->current_picture.f->linesize[2] <<= 1;
1009 s->linesize <<= 1;
1010 s->uvlinesize <<= 1;
1011 }
1012 mb_height = s->mb_height >> v->field_mode;
1013
1014 av_assert0 (mb_height > 0);
1015
1016 for (i = 0; i <= n_slices; i++) {
1017 if (i > 0 && slices[i - 1].mby_start >= mb_height) {
1018 if (v->field_mode <= 0) {
1019 av_log(v->s.avctx, AV_LOG_ERROR, "Slice %d starts beyond "
1020 "picture boundary (%d >= %d)\n", i,
1021 slices[i - 1].mby_start, mb_height);
1022 continue;
1023 }
1024 v->second_field = 1;
1025 av_assert0((s->mb_height & 1) == 0);
1026 v->blocks_off = s->b8_stride * (s->mb_height&~1);
1027 v->mb_off = s->mb_stride * s->mb_height >> 1;
1028 } else {
1029 v->second_field = 0;
1030 v->blocks_off = 0;
1031 v->mb_off = 0;
1032 }
1033 if (i) {
1034 v->pic_header_flag = 0;
1035 if (v->field_mode && i == n_slices1 + 2) {
1036 if ((header_ret = ff_vc1_parse_frame_header_adv(v, &s->gb)) < 0) {
1037 av_log(v->s.avctx, AV_LOG_ERROR, "Field header damaged\n");
1038 ret = AVERROR_INVALIDDATA;
1039 if (avctx->err_recognition & AV_EF_EXPLODE)
1040 goto err;
1041 continue;
1042 }
1043 } else if (get_bits1(&s->gb)) {
1044 v->pic_header_flag = 1;
1045 if ((header_ret = ff_vc1_parse_frame_header_adv(v, &s->gb)) < 0) {
1046 av_log(v->s.avctx, AV_LOG_ERROR, "Slice header damaged\n");
1047 ret = AVERROR_INVALIDDATA;
1048 if (avctx->err_recognition & AV_EF_EXPLODE)
1049 goto err;
1050 continue;
1051 }
1052 }
1053 }
1054 if (header_ret < 0)
1055 continue;
1056 s->start_mb_y = (i == 0) ? 0 : FFMAX(0, slices[i-1].mby_start % mb_height);
1057 if (!v->field_mode || v->second_field)
1058 s->end_mb_y = (i == n_slices ) ? mb_height : FFMIN(mb_height, slices[i].mby_start % mb_height);
1059 else {
1060 if (i >= n_slices) {
1061 av_log(v->s.avctx, AV_LOG_ERROR, "first field slice count too large\n");
1062 continue;
1063 }
1064 s->end_mb_y = (i <= n_slices1 + 1) ? mb_height : FFMIN(mb_height, slices[i].mby_start % mb_height);
1065 }
1066 if (s->end_mb_y <= s->start_mb_y) {
1067 av_log(v->s.avctx, AV_LOG_ERROR, "end mb y %d %d invalid\n", s->end_mb_y, s->start_mb_y);
1068 continue;
1069 }
1070 if (!v->p_frame_skipped && s->pict_type != AV_PICTURE_TYPE_I && !v->cbpcy_vlc) {
1071 av_log(v->s.avctx, AV_LOG_ERROR, "missing cbpcy_vlc\n");
1072 continue;
1073 }
1074 ff_vc1_decode_blocks(v);
1075 if (i != n_slices)
1076 s->gb = slices[i].gb;
1077 }
1078 if (v->field_mode) {
1079 v->second_field = 0;
1080 s->current_picture.f->linesize[0] >>= 1;
1081 s->current_picture.f->linesize[1] >>= 1;
1082 s->current_picture.f->linesize[2] >>= 1;
1083 s->linesize >>= 1;
1084 s->uvlinesize >>= 1;
1085 if (v->s.pict_type != AV_PICTURE_TYPE_BI && v->s.pict_type != AV_PICTURE_TYPE_B) {
1086 FFSWAP(uint8_t *, v->mv_f_next[0], v->mv_f[0]);
1087 FFSWAP(uint8_t *, v->mv_f_next[1], v->mv_f[1]);
1088 }
1089 }
1090 ff_dlog(s->avctx, "Consumed %i/%i bits\n",
1091 get_bits_count(&s->gb), s->gb.size_in_bits);
1092// if (get_bits_count(&s->gb) > buf_size * 8)
1093// return -1;
1094 if(s->er.error_occurred && s->pict_type == AV_PICTURE_TYPE_B) {
1095 ret = AVERROR_INVALIDDATA;
1096 goto err;
1097 }
1098 if (!v->field_mode)
1099 ff_er_frame_end(&s->er);
1100 }
1101
1102 ff_mpv_frame_end(s);
1103
1104 if (avctx->codec_id == AV_CODEC_ID_WMV3IMAGE || avctx->codec_id == AV_CODEC_ID_VC1IMAGE) {
1105image:
1106 avctx->width = avctx->coded_width = v->output_width;
1107 avctx->height = avctx->coded_height = v->output_height;
1108 if (avctx->skip_frame >= AVDISCARD_NONREF)
1109 goto end;
1110#if CONFIG_WMV3IMAGE_DECODER || CONFIG_VC1IMAGE_DECODER
1111 if ((ret = vc1_decode_sprites(v, &s->gb)) < 0)
1112 goto err;
1113#endif
1114 if ((ret = av_frame_ref(pict, v->sprite_output_frame)) < 0)
1115 goto err;
1116 *got_frame = 1;
1117 } else {
1118 if (s->pict_type == AV_PICTURE_TYPE_B || s->low_delay) {
1119 if ((ret = av_frame_ref(pict, s->current_picture_ptr->f)) < 0)
1120 goto err;
1121 ff_print_debug_info(s, s->current_picture_ptr, pict);
1122 *got_frame = 1;
1123 } else if (s->last_picture_ptr) {
1124 if ((ret = av_frame_ref(pict, s->last_picture_ptr->f)) < 0)
1125 goto err;
1126 ff_print_debug_info(s, s->last_picture_ptr, pict);
1127 *got_frame = 1;
1128 }
1129 }
1130
1131end:
1132 av_free(buf2);
1133 for (i = 0; i < n_slices; i++)
1134 av_free(slices[i].buf);
1135 av_free(slices);
1136 return buf_size;
1137
1138err:
1139 av_free(buf2);
1140 for (i = 0; i < n_slices; i++)
1141 av_free(slices[i].buf);
1142 av_free(slices);
1143 return ret;
1144}
1145
1146
1147static const enum AVPixelFormat vc1_hwaccel_pixfmt_list_420[] = {
1148#if CONFIG_VC1_DXVA2_HWACCEL
1149 AV_PIX_FMT_DXVA2_VLD,
1150#endif
1151#if CONFIG_VC1_D3D11VA_HWACCEL
1152 AV_PIX_FMT_D3D11VA_VLD,
1153#endif
1154#if CONFIG_VC1_VAAPI_HWACCEL
1155 AV_PIX_FMT_VAAPI,
1156#endif
1157#if CONFIG_VC1_VDPAU_HWACCEL
1158 AV_PIX_FMT_VDPAU,
1159#endif
1160 AV_PIX_FMT_YUV420P,
1161 AV_PIX_FMT_NONE
1162};
1163
1164AVCodec ff_vc1_decoder = {
1165 .name = "vc1",
1166 .long_name = NULL_IF_CONFIG_SMALL("SMPTE VC-1"),
1167 .type = AVMEDIA_TYPE_VIDEO,
1168 .id = AV_CODEC_ID_VC1,
1169 .priv_data_size = sizeof(VC1Context),
1170 .init = vc1_decode_init,
1171 .close = ff_vc1_decode_end,
1172 .decode = vc1_decode_frame,
1173 .flush = ff_mpeg_flush,
1174 .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DELAY,
1175 .pix_fmts = vc1_hwaccel_pixfmt_list_420,
1176 .profiles = NULL_IF_CONFIG_SMALL(ff_vc1_profiles)
1177};
1178
1179#if CONFIG_WMV3_DECODER
1180AVCodec ff_wmv3_decoder = {
1181 .name = "wmv3",
1182 .long_name = NULL_IF_CONFIG_SMALL("Windows Media Video 9"),
1183 .type = AVMEDIA_TYPE_VIDEO,
1184 .id = AV_CODEC_ID_WMV3,
1185 .priv_data_size = sizeof(VC1Context),
1186 .init = vc1_decode_init,
1187 .close = ff_vc1_decode_end,
1188 .decode = vc1_decode_frame,
1189 .flush = ff_mpeg_flush,
1190 .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DELAY,
1191 .pix_fmts = vc1_hwaccel_pixfmt_list_420,
1192 .profiles = NULL_IF_CONFIG_SMALL(ff_vc1_profiles)
1193};
1194#endif
1195
1196#if CONFIG_WMV3_VDPAU_DECODER && FF_API_VDPAU
1197AVCodec ff_wmv3_vdpau_decoder = {
1198 .name = "wmv3_vdpau",
1199 .long_name = NULL_IF_CONFIG_SMALL("Windows Media Video 9 VDPAU"),
1200 .type = AVMEDIA_TYPE_VIDEO,
1201 .id = AV_CODEC_ID_WMV3,
1202 .priv_data_size = sizeof(VC1Context),
1203 .init = vc1_decode_init,
1204 .close = ff_vc1_decode_end,
1205 .decode = vc1_decode_frame,
1206 .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DELAY | AV_CODEC_CAP_HWACCEL_VDPAU,
1207 .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_VDPAU_WMV3, AV_PIX_FMT_NONE },
1208 .profiles = NULL_IF_CONFIG_SMALL(ff_vc1_profiles)
1209};
1210#endif
1211
1212#if CONFIG_VC1_VDPAU_DECODER && FF_API_VDPAU
1213AVCodec ff_vc1_vdpau_decoder = {
1214 .name = "vc1_vdpau",
1215 .long_name = NULL_IF_CONFIG_SMALL("SMPTE VC-1 VDPAU"),
1216 .type = AVMEDIA_TYPE_VIDEO,
1217 .id = AV_CODEC_ID_VC1,
1218 .priv_data_size = sizeof(VC1Context),
1219 .init = vc1_decode_init,
1220 .close = ff_vc1_decode_end,
1221 .decode = vc1_decode_frame,
1222 .capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DELAY | AV_CODEC_CAP_HWACCEL_VDPAU,
1223 .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_VDPAU_VC1, AV_PIX_FMT_NONE },
1224 .profiles = NULL_IF_CONFIG_SMALL(ff_vc1_profiles)
1225};
1226#endif
1227
1228#if CONFIG_WMV3IMAGE_DECODER
1229AVCodec ff_wmv3image_decoder = {
1230 .name = "wmv3image",
1231 .long_name = NULL_IF_CONFIG_SMALL("Windows Media Video 9 Image"),
1232 .type = AVMEDIA_TYPE_VIDEO,
1233 .id = AV_CODEC_ID_WMV3IMAGE,
1234 .priv_data_size = sizeof(VC1Context),
1235 .init = vc1_decode_init,
1236 .close = ff_vc1_decode_end,
1237 .decode = vc1_decode_frame,
1238 .capabilities = AV_CODEC_CAP_DR1,
1239 .flush = vc1_sprite_flush,
1240 .pix_fmts = (const enum AVPixelFormat[]) {
1241 AV_PIX_FMT_YUV420P,
1242 AV_PIX_FMT_NONE
1243 },
1244};
1245#endif
1246
1247#if CONFIG_VC1IMAGE_DECODER
1248AVCodec ff_vc1image_decoder = {
1249 .name = "vc1image",
1250 .long_name = NULL_IF_CONFIG_SMALL("Windows Media Video 9 Image v2"),
1251 .type = AVMEDIA_TYPE_VIDEO,
1252 .id = AV_CODEC_ID_VC1IMAGE,
1253 .priv_data_size = sizeof(VC1Context),
1254 .init = vc1_decode_init,
1255 .close = ff_vc1_decode_end,
1256 .decode = vc1_decode_frame,
1257 .capabilities = AV_CODEC_CAP_DR1,
1258 .flush = vc1_sprite_flush,
1259 .pix_fmts = (const enum AVPixelFormat[]) {
1260 AV_PIX_FMT_YUV420P,
1261 AV_PIX_FMT_NONE
1262 },
1263};
1264#endif
1265