summaryrefslogtreecommitdiff
path: root/libavcodec/mediacodecdec_common.c (plain)
blob: 2ec25c581d34f8f5009b84161a79589dbaf21683
1/*
2 * Android MediaCodec decoder
3 *
4 * Copyright (c) 2015-2016 Matthieu Bouron <matthieu.bouron stupeflix.com>
5 *
6 * This file is part of FFmpeg.
7 *
8 * FFmpeg is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU Lesser General Public
10 * License as published by the Free Software Foundation; either
11 * version 2.1 of the License, or (at your option) any later version.
12 *
13 * FFmpeg is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * Lesser General Public License for more details.
17 *
18 * You should have received a copy of the GNU Lesser General Public
19 * License along with FFmpeg; if not, write to the Free Software
20 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21 */
22
23#include <string.h>
24#include <sys/types.h>
25
26#include "libavutil/common.h"
27#include "libavutil/mem.h"
28#include "libavutil/log.h"
29#include "libavutil/pixfmt.h"
30#include "libavutil/time.h"
31#include "libavutil/timestamp.h"
32
33#include "avcodec.h"
34#include "internal.h"
35
36#include "mediacodec.h"
37#include "mediacodec_surface.h"
38#include "mediacodec_sw_buffer.h"
39#include "mediacodec_wrapper.h"
40#include "mediacodecdec_common.h"
41
42/**
43 * OMX.k3.video.decoder.avc, OMX.NVIDIA.* OMX.SEC.avc.dec and OMX.google
44 * codec workarounds used in various place are taken from the Gstreamer
45 * project.
46 *
47 * Gstreamer references:
48 * https://cgit.freedesktop.org/gstreamer/gst-plugins-bad/tree/sys/androidmedia/
49 *
50 * Gstreamer copyright notice:
51 *
52 * Copyright (C) 2012, Collabora Ltd.
53 * Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>
54 *
55 * Copyright (C) 2012, Rafaël Carré <funman@videolanorg>
56 *
57 * Copyright (C) 2015, Sebastian Dröge <sebastian@centricular.com>
58 *
59 * Copyright (C) 2014-2015, Collabora Ltd.
60 * Author: Matthieu Bouron <matthieu.bouron@gcollabora.com>
61 *
62 * Copyright (C) 2015, Edward Hervey
63 * Author: Edward Hervey <bilboed@gmail.com>
64 *
65 * Copyright (C) 2015, Matthew Waters <matthew@centricular.com>
66 *
67 * This library is free software; you can redistribute it and/or
68 * modify it under the terms of the GNU Lesser General Public
69 * License as published by the Free Software Foundation
70 * version 2.1 of the License.
71 *
72 * This library is distributed in the hope that it will be useful,
73 * but WITHOUT ANY WARRANTY; without even the implied warranty of
74 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
75 * Lesser General Public License for more details.
76 *
77 * You should have received a copy of the GNU Lesser General Public
78 * License along with this library; if not, write to the Free Software
79 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
80 *
81 */
82
83#define INPUT_DEQUEUE_TIMEOUT_US 8000
84#define OUTPUT_DEQUEUE_TIMEOUT_US 8000
85#define OUTPUT_DEQUEUE_BLOCK_TIMEOUT_US 1000000
86
87enum {
88 COLOR_FormatYUV420Planar = 0x13,
89 COLOR_FormatYUV420SemiPlanar = 0x15,
90 COLOR_FormatYCbYCr = 0x19,
91 COLOR_FormatAndroidOpaque = 0x7F000789,
92 COLOR_QCOM_FormatYUV420SemiPlanar = 0x7fa30c00,
93 COLOR_QCOM_FormatYUV420SemiPlanar32m = 0x7fa30c04,
94 COLOR_QCOM_FormatYUV420PackedSemiPlanar64x32Tile2m8ka = 0x7fa30c03,
95 COLOR_TI_FormatYUV420PackedSemiPlanar = 0x7f000100,
96 COLOR_TI_FormatYUV420PackedSemiPlanarInterlaced = 0x7f000001,
97};
98
99static const struct {
100
101 int color_format;
102 enum AVPixelFormat pix_fmt;
103
104} color_formats[] = {
105
106 { COLOR_FormatYUV420Planar, AV_PIX_FMT_YUV420P },
107 { COLOR_FormatYUV420SemiPlanar, AV_PIX_FMT_NV12 },
108 { COLOR_QCOM_FormatYUV420SemiPlanar, AV_PIX_FMT_NV12 },
109 { COLOR_QCOM_FormatYUV420SemiPlanar32m, AV_PIX_FMT_NV12 },
110 { COLOR_QCOM_FormatYUV420PackedSemiPlanar64x32Tile2m8ka, AV_PIX_FMT_NV12 },
111 { COLOR_TI_FormatYUV420PackedSemiPlanar, AV_PIX_FMT_NV12 },
112 { COLOR_TI_FormatYUV420PackedSemiPlanarInterlaced, AV_PIX_FMT_NV12 },
113 { 0 }
114};
115
116static enum AVPixelFormat mcdec_map_color_format(AVCodecContext *avctx,
117 MediaCodecDecContext *s,
118 int color_format)
119{
120 int i;
121 enum AVPixelFormat ret = AV_PIX_FMT_NONE;
122
123 if (s->surface) {
124 return AV_PIX_FMT_MEDIACODEC;
125 }
126
127 if (!strcmp(s->codec_name, "OMX.k3.video.decoder.avc") && color_format == COLOR_FormatYCbYCr) {
128 s->color_format = color_format = COLOR_TI_FormatYUV420PackedSemiPlanar;
129 }
130
131 for (i = 0; i < FF_ARRAY_ELEMS(color_formats); i++) {
132 if (color_formats[i].color_format == color_format) {
133 return color_formats[i].pix_fmt;
134 }
135 }
136
137 av_log(avctx, AV_LOG_ERROR, "Output color format 0x%x (value=%d) is not supported\n",
138 color_format, color_format);
139
140 return ret;
141}
142
143static void ff_mediacodec_dec_ref(MediaCodecDecContext *s)
144{
145 atomic_fetch_add(&s->refcount, 1);
146}
147
148static void ff_mediacodec_dec_unref(MediaCodecDecContext *s)
149{
150 if (!s)
151 return;
152
153 if (atomic_fetch_sub(&s->refcount, 1) == 1) {
154 if (s->codec) {
155 ff_AMediaCodec_delete(s->codec);
156 s->codec = NULL;
157 }
158
159 if (s->format) {
160 ff_AMediaFormat_delete(s->format);
161 s->format = NULL;
162 }
163
164 if (s->surface) {
165 ff_mediacodec_surface_unref(s->surface, NULL);
166 s->surface = NULL;
167 }
168
169 av_freep(&s->codec_name);
170 av_freep(&s);
171 }
172}
173
174static void mediacodec_buffer_release(void *opaque, uint8_t *data)
175{
176 AVMediaCodecBuffer *buffer = opaque;
177 MediaCodecDecContext *ctx = buffer->ctx;
178 int released = atomic_load(&buffer->released);
179
180 if (!released) {
181 ff_AMediaCodec_releaseOutputBuffer(ctx->codec, buffer->index, 0);
182 }
183
184 ff_mediacodec_dec_unref(ctx);
185 av_freep(&buffer);
186}
187
188static int mediacodec_wrap_hw_buffer(AVCodecContext *avctx,
189 MediaCodecDecContext *s,
190 ssize_t index,
191 FFAMediaCodecBufferInfo *info,
192 AVFrame *frame)
193{
194 int ret = 0;
195 int status = 0;
196 AVMediaCodecBuffer *buffer = NULL;
197
198 frame->buf[0] = NULL;
199 frame->width = avctx->width;
200 frame->height = avctx->height;
201 frame->format = avctx->pix_fmt;
202
203 if (avctx->pkt_timebase.num && avctx->pkt_timebase.den) {
204 frame->pts = av_rescale_q(info->presentationTimeUs,
205 av_make_q(1, 1000000),
206 avctx->pkt_timebase);
207 } else {
208 frame->pts = info->presentationTimeUs;
209 }
210#if FF_API_PKT_PTS
211FF_DISABLE_DEPRECATION_WARNINGS
212 frame->pkt_pts = frame->pts;
213FF_ENABLE_DEPRECATION_WARNINGS
214#endif
215 frame->pkt_dts = AV_NOPTS_VALUE;
216
217 buffer = av_mallocz(sizeof(AVMediaCodecBuffer));
218 if (!buffer) {
219 ret = AVERROR(ENOMEM);
220 goto fail;
221 }
222
223 atomic_init(&buffer->released, 0);
224
225 frame->buf[0] = av_buffer_create(NULL,
226 0,
227 mediacodec_buffer_release,
228 buffer,
229 AV_BUFFER_FLAG_READONLY);
230
231 if (!frame->buf[0]) {
232 ret = AVERROR(ENOMEM);
233 goto fail;
234
235 }
236
237 buffer->ctx = s;
238 ff_mediacodec_dec_ref(s);
239
240 buffer->index = index;
241 buffer->pts = info->presentationTimeUs;
242
243 frame->data[3] = (uint8_t *)buffer;
244
245 return 0;
246fail:
247 av_freep(buffer);
248 av_buffer_unref(&frame->buf[0]);
249 status = ff_AMediaCodec_releaseOutputBuffer(s->codec, index, 0);
250 if (status < 0) {
251 av_log(avctx, AV_LOG_ERROR, "Failed to release output buffer\n");
252 ret = AVERROR_EXTERNAL;
253 }
254
255 return ret;
256}
257
258static int mediacodec_wrap_sw_buffer(AVCodecContext *avctx,
259 MediaCodecDecContext *s,
260 uint8_t *data,
261 size_t size,
262 ssize_t index,
263 FFAMediaCodecBufferInfo *info,
264 AVFrame *frame)
265{
266 int ret = 0;
267 int status = 0;
268
269 frame->width = avctx->width;
270 frame->height = avctx->height;
271 frame->format = avctx->pix_fmt;
272
273 /* MediaCodec buffers needs to be copied to our own refcounted buffers
274 * because the flush command invalidates all input and output buffers.
275 */
276 if ((ret = ff_get_buffer(avctx, frame, 0)) < 0) {
277 av_log(avctx, AV_LOG_ERROR, "Could not allocate buffer\n");
278 goto done;
279 }
280
281 /* Override frame->pkt_pts as ff_get_buffer will override its value based
282 * on the last avpacket received which is not in sync with the frame:
283 * * N avpackets can be pushed before 1 frame is actually returned
284 * * 0-sized avpackets are pushed to flush remaining frames at EOS */
285 frame->pts = info->presentationTimeUs;
286#if FF_API_PKT_PTS
287FF_DISABLE_DEPRECATION_WARNINGS
288 frame->pkt_pts = info->presentationTimeUs;
289FF_ENABLE_DEPRECATION_WARNINGS
290#endif
291 frame->pkt_dts = AV_NOPTS_VALUE;
292
293 av_log(avctx, AV_LOG_DEBUG,
294 "Frame: width=%d stride=%d height=%d slice-height=%d "
295 "crop-top=%d crop-bottom=%d crop-left=%d crop-right=%d encoder=%s\n"
296 "destination linesizes=%d,%d,%d\n" ,
297 avctx->width, s->stride, avctx->height, s->slice_height,
298 s->crop_top, s->crop_bottom, s->crop_left, s->crop_right, s->codec_name,
299 frame->linesize[0], frame->linesize[1], frame->linesize[2]);
300
301 switch (s->color_format) {
302 case COLOR_FormatYUV420Planar:
303 ff_mediacodec_sw_buffer_copy_yuv420_planar(avctx, s, data, size, info, frame);
304 break;
305 case COLOR_FormatYUV420SemiPlanar:
306 case COLOR_QCOM_FormatYUV420SemiPlanar:
307 case COLOR_QCOM_FormatYUV420SemiPlanar32m:
308 ff_mediacodec_sw_buffer_copy_yuv420_semi_planar(avctx, s, data, size, info, frame);
309 break;
310 case COLOR_TI_FormatYUV420PackedSemiPlanar:
311 case COLOR_TI_FormatYUV420PackedSemiPlanarInterlaced:
312 ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar(avctx, s, data, size, info, frame);
313 break;
314 case COLOR_QCOM_FormatYUV420PackedSemiPlanar64x32Tile2m8ka:
315 ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar_64x32Tile2m8ka(avctx, s, data, size, info, frame);
316 break;
317 default:
318 av_log(avctx, AV_LOG_ERROR, "Unsupported color format 0x%x (value=%d)\n",
319 s->color_format, s->color_format);
320 ret = AVERROR(EINVAL);
321 goto done;
322 }
323
324 ret = 0;
325done:
326 status = ff_AMediaCodec_releaseOutputBuffer(s->codec, index, 0);
327 if (status < 0) {
328 av_log(avctx, AV_LOG_ERROR, "Failed to release output buffer\n");
329 ret = AVERROR_EXTERNAL;
330 }
331
332 return ret;
333}
334
335static int mediacodec_dec_parse_format(AVCodecContext *avctx, MediaCodecDecContext *s)
336{
337 int width = 0;
338 int height = 0;
339 int32_t value = 0;
340 char *format = NULL;
341
342 if (!s->format) {
343 av_log(avctx, AV_LOG_ERROR, "Output MediaFormat is not set\n");
344 return AVERROR(EINVAL);
345 }
346
347 format = ff_AMediaFormat_toString(s->format);
348 if (!format) {
349 return AVERROR_EXTERNAL;
350 }
351 av_log(avctx, AV_LOG_DEBUG, "Parsing MediaFormat %s\n", format);
352 av_freep(&format);
353
354 /* Mandatory fields */
355 if (!ff_AMediaFormat_getInt32(s->format, "width", &value)) {
356 format = ff_AMediaFormat_toString(s->format);
357 av_log(avctx, AV_LOG_ERROR, "Could not get %s from format %s\n", "width", format);
358 av_freep(&format);
359 return AVERROR_EXTERNAL;
360 }
361 s->width = value;
362
363 if (!ff_AMediaFormat_getInt32(s->format, "height", &value)) {
364 format = ff_AMediaFormat_toString(s->format);
365 av_log(avctx, AV_LOG_ERROR, "Could not get %s from format %s\n", "height", format);
366 av_freep(&format);
367 return AVERROR_EXTERNAL;
368 }
369 s->height = value;
370
371 if (!ff_AMediaFormat_getInt32(s->format, "stride", &value)) {
372 format = ff_AMediaFormat_toString(s->format);
373 av_log(avctx, AV_LOG_ERROR, "Could not get %s from format %s\n", "stride", format);
374 av_freep(&format);
375 return AVERROR_EXTERNAL;
376 }
377 s->stride = value > 0 ? value : s->width;
378
379 if (!ff_AMediaFormat_getInt32(s->format, "slice-height", &value)) {
380 format = ff_AMediaFormat_toString(s->format);
381 av_log(avctx, AV_LOG_ERROR, "Could not get %s from format %s\n", "slice-height", format);
382 av_freep(&format);
383 return AVERROR_EXTERNAL;
384 }
385 s->slice_height = value > 0 ? value : s->height;
386
387 if (strstr(s->codec_name, "OMX.Nvidia.")) {
388 s->slice_height = FFALIGN(s->height, 16);
389 } else if (strstr(s->codec_name, "OMX.SEC.avc.dec")) {
390 s->slice_height = avctx->height;
391 s->stride = avctx->width;
392 }
393
394 if (!ff_AMediaFormat_getInt32(s->format, "color-format", &value)) {
395 format = ff_AMediaFormat_toString(s->format);
396 av_log(avctx, AV_LOG_ERROR, "Could not get %s from format %s\n", "color-format", format);
397 av_freep(&format);
398 return AVERROR_EXTERNAL;
399 }
400 s->color_format = value;
401
402 s->pix_fmt = avctx->pix_fmt = mcdec_map_color_format(avctx, s, value);
403 if (avctx->pix_fmt == AV_PIX_FMT_NONE) {
404 av_log(avctx, AV_LOG_ERROR, "Output color format is not supported\n");
405 return AVERROR(EINVAL);
406 }
407
408 /* Optional fields */
409 if (ff_AMediaFormat_getInt32(s->format, "crop-top", &value))
410 s->crop_top = value;
411
412 if (ff_AMediaFormat_getInt32(s->format, "crop-bottom", &value))
413 s->crop_bottom = value;
414
415 if (ff_AMediaFormat_getInt32(s->format, "crop-left", &value))
416 s->crop_left = value;
417
418 if (ff_AMediaFormat_getInt32(s->format, "crop-right", &value))
419 s->crop_right = value;
420
421 width = s->crop_right + 1 - s->crop_left;
422 height = s->crop_bottom + 1 - s->crop_top;
423
424 av_log(avctx, AV_LOG_INFO,
425 "Output crop parameters top=%d bottom=%d left=%d right=%d, "
426 "resulting dimensions width=%d height=%d\n",
427 s->crop_top, s->crop_bottom, s->crop_left, s->crop_right,
428 width, height);
429
430 return ff_set_dimensions(avctx, width, height);
431}
432
433
434static int mediacodec_dec_flush_codec(AVCodecContext *avctx, MediaCodecDecContext *s)
435{
436 FFAMediaCodec *codec = s->codec;
437 int status;
438
439 s->output_buffer_count = 0;
440
441 s->draining = 0;
442 s->flushing = 0;
443 s->eos = 0;
444
445 status = ff_AMediaCodec_flush(codec);
446 if (status < 0) {
447 av_log(avctx, AV_LOG_ERROR, "Failed to flush codec\n");
448 return AVERROR_EXTERNAL;
449 }
450
451 return 0;
452}
453
454int ff_mediacodec_dec_init(AVCodecContext *avctx, MediaCodecDecContext *s,
455 const char *mime, FFAMediaFormat *format)
456{
457 int ret = 0;
458 int status;
459 int profile;
460
461 enum AVPixelFormat pix_fmt;
462 static const enum AVPixelFormat pix_fmts[] = {
463 AV_PIX_FMT_MEDIACODEC,
464 AV_PIX_FMT_NONE,
465 };
466
467 atomic_init(&s->refcount, 1);
468
469 pix_fmt = ff_get_format(avctx, pix_fmts);
470 if (pix_fmt == AV_PIX_FMT_MEDIACODEC) {
471 AVMediaCodecContext *user_ctx = avctx->hwaccel_context;
472
473 if (user_ctx && user_ctx->surface) {
474 s->surface = ff_mediacodec_surface_ref(user_ctx->surface, avctx);
475 av_log(avctx, AV_LOG_INFO, "Using surface %p\n", s->surface);
476 }
477 }
478
479 profile = ff_AMediaCodecProfile_getProfileFromAVCodecContext(avctx);
480 if (profile < 0) {
481 av_log(avctx, AV_LOG_WARNING, "Unsupported or unknown profile");
482 }
483
484 s->codec_name = ff_AMediaCodecList_getCodecNameByType(mime, profile, 0, avctx);
485 if (!s->codec_name) {
486 ret = AVERROR_EXTERNAL;
487 goto fail;
488 }
489
490 av_log(avctx, AV_LOG_DEBUG, "Found decoder %s\n", s->codec_name);
491 s->codec = ff_AMediaCodec_createCodecByName(s->codec_name);
492 if (!s->codec) {
493 av_log(avctx, AV_LOG_ERROR, "Failed to create media decoder for type %s and name %s\n", mime, s->codec_name);
494 ret = AVERROR_EXTERNAL;
495 goto fail;
496 }
497
498 status = ff_AMediaCodec_configure(s->codec, format, s->surface, NULL, 0);
499 if (status < 0) {
500 char *desc = ff_AMediaFormat_toString(format);
501 av_log(avctx, AV_LOG_ERROR,
502 "Failed to configure codec (status = %d) with format %s\n",
503 status, desc);
504 av_freep(&desc);
505
506 ret = AVERROR_EXTERNAL;
507 goto fail;
508 }
509
510 status = ff_AMediaCodec_start(s->codec);
511 if (status < 0) {
512 char *desc = ff_AMediaFormat_toString(format);
513 av_log(avctx, AV_LOG_ERROR,
514 "Failed to start codec (status = %d) with format %s\n",
515 status, desc);
516 av_freep(&desc);
517 ret = AVERROR_EXTERNAL;
518 goto fail;
519 }
520
521 s->format = ff_AMediaCodec_getOutputFormat(s->codec);
522 if (s->format) {
523 if ((ret = mediacodec_dec_parse_format(avctx, s)) < 0) {
524 av_log(avctx, AV_LOG_ERROR,
525 "Failed to configure context\n");
526 goto fail;
527 }
528 }
529
530 av_log(avctx, AV_LOG_DEBUG, "MediaCodec %p started successfully\n", s->codec);
531
532 return 0;
533
534fail:
535 av_log(avctx, AV_LOG_ERROR, "MediaCodec %p failed to start\n", s->codec);
536 ff_mediacodec_dec_close(avctx, s);
537 return ret;
538}
539
540int ff_mediacodec_dec_decode(AVCodecContext *avctx, MediaCodecDecContext *s,
541 AVFrame *frame, int *got_frame,
542 AVPacket *pkt)
543{
544 int ret;
545 int offset = 0;
546 int need_draining = 0;
547 uint8_t *data;
548 ssize_t index;
549 size_t size;
550 FFAMediaCodec *codec = s->codec;
551 FFAMediaCodecBufferInfo info = { 0 };
552
553 int status;
554
555 int64_t input_dequeue_timeout_us = INPUT_DEQUEUE_TIMEOUT_US;
556 int64_t output_dequeue_timeout_us = OUTPUT_DEQUEUE_TIMEOUT_US;
557
558 if (s->flushing) {
559 av_log(avctx, AV_LOG_ERROR, "Decoder is flushing and cannot accept new buffer "
560 "until all output buffers have been released\n");
561 return AVERROR_EXTERNAL;
562 }
563
564 if (pkt->size == 0) {
565 need_draining = 1;
566 }
567
568 if (s->draining && s->eos) {
569 return 0;
570 }
571
572 while (offset < pkt->size || (need_draining && !s->draining)) {
573
574 index = ff_AMediaCodec_dequeueInputBuffer(codec, input_dequeue_timeout_us);
575 if (ff_AMediaCodec_infoTryAgainLater(codec, index)) {
576 break;
577 }
578
579 if (index < 0) {
580 av_log(avctx, AV_LOG_ERROR, "Failed to dequeue input buffer (status=%zd)\n", index);
581 return AVERROR_EXTERNAL;
582 }
583
584 data = ff_AMediaCodec_getInputBuffer(codec, index, &size);
585 if (!data) {
586 av_log(avctx, AV_LOG_ERROR, "Failed to get input buffer\n");
587 return AVERROR_EXTERNAL;
588 }
589
590 if (need_draining) {
591 int64_t pts = pkt->pts;
592 uint32_t flags = ff_AMediaCodec_getBufferFlagEndOfStream(codec);
593
594 if (s->surface) {
595 pts = av_rescale_q(pts, avctx->pkt_timebase, av_make_q(1, 1000000));
596 }
597
598 av_log(avctx, AV_LOG_DEBUG, "Sending End Of Stream signal\n");
599
600 status = ff_AMediaCodec_queueInputBuffer(codec, index, 0, 0, pts, flags);
601 if (status < 0) {
602 av_log(avctx, AV_LOG_ERROR, "Failed to queue input empty buffer (status = %d)\n", status);
603 return AVERROR_EXTERNAL;
604 }
605
606 s->draining = 1;
607 break;
608 } else {
609 int64_t pts = pkt->pts;
610
611 size = FFMIN(pkt->size - offset, size);
612
613 memcpy(data, pkt->data + offset, size);
614 offset += size;
615
616 if (s->surface && avctx->pkt_timebase.num && avctx->pkt_timebase.den) {
617 pts = av_rescale_q(pts, avctx->pkt_timebase, av_make_q(1, 1000000));
618 }
619
620 status = ff_AMediaCodec_queueInputBuffer(codec, index, 0, size, pts, 0);
621 if (status < 0) {
622 av_log(avctx, AV_LOG_ERROR, "Failed to queue input buffer (status = %d)\n", status);
623 return AVERROR_EXTERNAL;
624 }
625 }
626 }
627
628 if (need_draining || s->draining) {
629 /* If the codec is flushing or need to be flushed, block for a fair
630 * amount of time to ensure we got a frame */
631 output_dequeue_timeout_us = OUTPUT_DEQUEUE_BLOCK_TIMEOUT_US;
632 } else if (s->output_buffer_count == 0) {
633 /* If the codec hasn't produced any frames, do not block so we
634 * can push data to it as fast as possible, and get the first
635 * frame */
636 output_dequeue_timeout_us = 0;
637 }
638
639 index = ff_AMediaCodec_dequeueOutputBuffer(codec, &info, output_dequeue_timeout_us);
640 if (index >= 0) {
641 int ret;
642
643 av_log(avctx, AV_LOG_DEBUG, "Got output buffer %zd"
644 " offset=%" PRIi32 " size=%" PRIi32 " ts=%" PRIi64
645 " flags=%" PRIu32 "\n", index, info.offset, info.size,
646 info.presentationTimeUs, info.flags);
647
648 if (info.flags & ff_AMediaCodec_getBufferFlagEndOfStream(codec)) {
649 s->eos = 1;
650 }
651
652 if (info.size) {
653 if (s->surface) {
654 if ((ret = mediacodec_wrap_hw_buffer(avctx, s, index, &info, frame)) < 0) {
655 av_log(avctx, AV_LOG_ERROR, "Failed to wrap MediaCodec buffer\n");
656 return ret;
657 }
658 } else {
659 data = ff_AMediaCodec_getOutputBuffer(codec, index, &size);
660 if (!data) {
661 av_log(avctx, AV_LOG_ERROR, "Failed to get output buffer\n");
662 return AVERROR_EXTERNAL;
663 }
664
665 if ((ret = mediacodec_wrap_sw_buffer(avctx, s, data, size, index, &info, frame)) < 0) {
666 av_log(avctx, AV_LOG_ERROR, "Failed to wrap MediaCodec buffer\n");
667 return ret;
668 }
669 }
670
671 *got_frame = 1;
672 s->output_buffer_count++;
673 } else {
674 status = ff_AMediaCodec_releaseOutputBuffer(codec, index, 0);
675 if (status < 0) {
676 av_log(avctx, AV_LOG_ERROR, "Failed to release output buffer\n");
677 }
678 }
679
680 } else if (ff_AMediaCodec_infoOutputFormatChanged(codec, index)) {
681 char *format = NULL;
682
683 if (s->format) {
684 status = ff_AMediaFormat_delete(s->format);
685 if (status < 0) {
686 av_log(avctx, AV_LOG_ERROR, "Failed to delete MediaFormat %p\n", s->format);
687 }
688 }
689
690 s->format = ff_AMediaCodec_getOutputFormat(codec);
691 if (!s->format) {
692 av_log(avctx, AV_LOG_ERROR, "Failed to get output format\n");
693 return AVERROR_EXTERNAL;
694 }
695
696 format = ff_AMediaFormat_toString(s->format);
697 if (!format) {
698 return AVERROR_EXTERNAL;
699 }
700 av_log(avctx, AV_LOG_INFO, "Output MediaFormat changed to %s\n", format);
701 av_freep(&format);
702
703 if ((ret = mediacodec_dec_parse_format(avctx, s)) < 0) {
704 return ret;
705 }
706
707 } else if (ff_AMediaCodec_infoOutputBuffersChanged(codec, index)) {
708 ff_AMediaCodec_cleanOutputBuffers(codec);
709 } else if (ff_AMediaCodec_infoTryAgainLater(codec, index)) {
710 if (s->draining) {
711 av_log(avctx, AV_LOG_ERROR, "Failed to dequeue output buffer within %" PRIi64 "ms "
712 "while draining remaining frames, output will probably lack frames\n",
713 output_dequeue_timeout_us / 1000);
714 } else {
715 av_log(avctx, AV_LOG_DEBUG, "No output buffer available, try again later\n");
716 }
717 } else {
718 av_log(avctx, AV_LOG_ERROR, "Failed to dequeue output buffer (status=%zd)\n", index);
719 return AVERROR_EXTERNAL;
720 }
721
722 return offset;
723}
724
725int ff_mediacodec_dec_flush(AVCodecContext *avctx, MediaCodecDecContext *s)
726{
727 if (!s->surface || atomic_load(&s->refcount) == 1) {
728 int ret;
729
730 /* No frames (holding a reference to the codec) are retained by the
731 * user, thus we can flush the codec and returns accordingly */
732 if ((ret = mediacodec_dec_flush_codec(avctx, s)) < 0) {
733 return ret;
734 }
735
736 return 1;
737 }
738
739 s->flushing = 1;
740 return 0;
741}
742
743int ff_mediacodec_dec_close(AVCodecContext *avctx, MediaCodecDecContext *s)
744{
745 ff_mediacodec_dec_unref(s);
746
747 return 0;
748}
749
750int ff_mediacodec_dec_is_flushing(AVCodecContext *avctx, MediaCodecDecContext *s)
751{
752 return s->flushing;
753}
754
755AVHWAccel ff_h264_mediacodec_hwaccel = {
756 .name = "mediacodec",
757 .type = AVMEDIA_TYPE_VIDEO,
758 .id = AV_CODEC_ID_H264,
759 .pix_fmt = AV_PIX_FMT_MEDIACODEC,
760};
761
762AVHWAccel ff_hevc_mediacodec_hwaccel = {
763 .name = "mediacodec",
764 .type = AVMEDIA_TYPE_VIDEO,
765 .id = AV_CODEC_ID_HEVC,
766 .pix_fmt = AV_PIX_FMT_MEDIACODEC,
767};
768
769AVHWAccel ff_mpeg4_mediacodec_hwaccel = {
770 .name = "mediacodec",
771 .type = AVMEDIA_TYPE_VIDEO,
772 .id = AV_CODEC_ID_MPEG4,
773 .pix_fmt = AV_PIX_FMT_MEDIACODEC,
774};
775
776AVHWAccel ff_vp8_mediacodec_hwaccel = {
777 .name = "mediacodec",
778 .type = AVMEDIA_TYPE_VIDEO,
779 .id = AV_CODEC_ID_VP8,
780 .pix_fmt = AV_PIX_FMT_MEDIACODEC,
781};
782
783AVHWAccel ff_vp9_mediacodec_hwaccel = {
784 .name = "mediacodec",
785 .type = AVMEDIA_TYPE_VIDEO,
786 .id = AV_CODEC_ID_VP9,
787 .pix_fmt = AV_PIX_FMT_MEDIACODEC,
788};
789