FFmpeg: libavcodec/mediacodecdec_common.c Source File

FFmpeg
mediacodecdec_common.c
Go to the documentation of this file.
1 /*
2  * Android MediaCodec decoder
3  *
4  * Copyright (c) 2015-2016 Matthieu Bouron <matthieu.bouron stupeflix.com>
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 #include <string.h>
24 #include <sys/types.h>
25 
26 #include "libavutil/avassert.h"
27 #include "libavutil/common.h"
28 #include "libavutil/hwcontext_mediacodec.h"
29 #include "libavutil/mem.h"
30 #include "libavutil/log.h"
31 #include "libavutil/pixfmt.h"
32 #include "libavutil/time.h"
33 #include "libavutil/timestamp.h"
34 #include "libavutil/channel_layout.h"
35 
36 #include "avcodec.h"
37 #include "decode.h"
38 
39 #include "mediacodec.h"
40 #include "mediacodec_surface.h"
41 #include "mediacodec_sw_buffer.h"
42 #include "mediacodec_wrapper.h"
43 #include "mediacodecdec_common.h"
44 
45 /**
46  * OMX.k3.video.decoder.avc, OMX.NVIDIA.* OMX.SEC.avc.dec and OMX.google
47  * codec workarounds used in various place are taken from the Gstreamer
48  * project.
49  *
50  * Gstreamer references:
51  * https://cgit.freedesktop.org/gstreamer/gst-plugins-bad/tree/sys/androidmedia/
52  *
53  * Gstreamer copyright notice:
54  *
55  * Copyright (C) 2012, Collabora Ltd.
56  * Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>
57  *
58  * Copyright (C) 2012, Rafaël Carré <funman@videolanorg>
59  *
60  * Copyright (C) 2015, Sebastian Dröge <sebastian@centricular.com>
61  *
62  * Copyright (C) 2014-2015, Collabora Ltd.
63  * Author: Matthieu Bouron <matthieu.bouron@gcollabora.com>
64  *
65  * Copyright (C) 2015, Edward Hervey
66  * Author: Edward Hervey <bilboed@gmail.com>
67  *
68  * Copyright (C) 2015, Matthew Waters <matthew@centricular.com>
69  *
70  * This library is free software; you can redistribute it and/or
71  * modify it under the terms of the GNU Lesser General Public
72  * License as published by the Free Software Foundation
73  * version 2.1 of the License.
74  *
75  * This library is distributed in the hope that it will be useful,
76  * but WITHOUT ANY WARRANTY; without even the implied warranty of
77  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
78  * Lesser General Public License for more details.
79  *
80  * You should have received a copy of the GNU Lesser General Public
81  * License along with this library; if not, write to the Free Software
82  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
83  *
84  */
85 
86  #define INPUT_DEQUEUE_TIMEOUT_US 8000
87  #define OUTPUT_DEQUEUE_TIMEOUT_US 8000
88  #define OUTPUT_DEQUEUE_BLOCK_TIMEOUT_US 1000000
89 
90 enum {
91   ENCODING_PCM_16BIT = 0x00000002,
92   ENCODING_PCM_8BIT = 0x00000003,
93   ENCODING_PCM_FLOAT = 0x00000004,
94   ENCODING_PCM_24BIT_PACKED = 0x00000015,
95   ENCODING_PCM_32BIT = 0x00000016,
96 };
97 
98 static const struct {
99 
100   int pcm_format;
101   enum AVSampleFormat sample_format;
102 
103 } sample_formats[] = {
104 
105  { ENCODING_PCM_16BIT, AV_SAMPLE_FMT_S16 },
106  { ENCODING_PCM_8BIT, AV_SAMPLE_FMT_U8 },
107  { ENCODING_PCM_FLOAT, AV_SAMPLE_FMT_FLT },
108  { ENCODING_PCM_32BIT, AV_SAMPLE_FMT_S32 },
109  { 0 }
110 };
111 
112  static enum AVSampleFormat mcdec_map_pcm_format(AVCodecContext *avctx,
113  MediaCodecDecContext *s,
114  int pcm_format)
115 {
116  enum AVSampleFormat ret = AV_SAMPLE_FMT_NONE;
117 
118  for (int i = 0; i < FF_ARRAY_ELEMS(sample_formats); i++) {
119  if (sample_formats[i].pcm_format == pcm_format) {
120  return sample_formats[i].sample_format;
121  }
122  }
123 
124  av_log(avctx, AV_LOG_ERROR, "Output sample format 0x%x (value=%d) is not supported\n",
125  pcm_format, pcm_format);
126 
127  return ret;
128 }
129 
130 enum
131 {
132   CHANNEL_OUT_FRONT_LEFT = 0x4,
133   CHANNEL_OUT_FRONT_RIGHT = 0x8,
134   CHANNEL_OUT_FRONT_CENTER = 0x10,
135   CHANNEL_OUT_LOW_FREQUENCY = 0x20,
136   CHANNEL_OUT_BACK_LEFT = 0x40,
137   CHANNEL_OUT_BACK_RIGHT = 0x80,
138   CHANNEL_OUT_FRONT_LEFT_OF_CENTER = 0x100,
139   CHANNEL_OUT_FRONT_RIGHT_OF_CENTER = 0x200,
140   CHANNEL_OUT_BACK_CENTER = 0x400,
141   CHANNEL_OUT_SIDE_LEFT = 0x800,
142   CHANNEL_OUT_SIDE_RIGHT = 0x1000,
143   CHANNEL_OUT_TOP_CENTER = 0x2000,
144   CHANNEL_OUT_TOP_FRONT_LEFT = 0x4000,
145   CHANNEL_OUT_TOP_FRONT_CENTER = 0x8000,
146   CHANNEL_OUT_TOP_FRONT_RIGHT = 0x10000,
147   CHANNEL_OUT_TOP_BACK_LEFT = 0x20000,
148   CHANNEL_OUT_TOP_BACK_CENTER = 0x40000,
149   CHANNEL_OUT_TOP_BACK_RIGHT = 0x80000,
150 };
151 
152 static const struct {
153 
154   int mask;
155   uint64_t layout;
156 
157 } channel_masks[] = {
158  { CHANNEL_OUT_FRONT_LEFT, AV_CH_FRONT_LEFT },
159  { CHANNEL_OUT_FRONT_RIGHT, AV_CH_FRONT_RIGHT },
160  { CHANNEL_OUT_FRONT_CENTER, AV_CH_FRONT_CENTER },
161  { CHANNEL_OUT_LOW_FREQUENCY, AV_CH_LOW_FREQUENCY },
162  { CHANNEL_OUT_BACK_LEFT, AV_CH_BACK_LEFT },
163  { CHANNEL_OUT_BACK_RIGHT, AV_CH_BACK_RIGHT },
164  { CHANNEL_OUT_FRONT_LEFT_OF_CENTER, AV_CH_FRONT_LEFT_OF_CENTER },
165  { CHANNEL_OUT_FRONT_RIGHT_OF_CENTER, AV_CH_FRONT_RIGHT_OF_CENTER },
166  { CHANNEL_OUT_BACK_CENTER, AV_CH_BACK_CENTER },
167  { CHANNEL_OUT_SIDE_LEFT, AV_CH_SIDE_LEFT },
168  { CHANNEL_OUT_SIDE_RIGHT, AV_CH_SIDE_RIGHT },
169  { CHANNEL_OUT_TOP_CENTER, AV_CH_TOP_CENTER },
170  { CHANNEL_OUT_TOP_FRONT_LEFT, AV_CH_TOP_FRONT_LEFT },
171  { CHANNEL_OUT_TOP_FRONT_CENTER, AV_CH_TOP_FRONT_CENTER },
172  { CHANNEL_OUT_TOP_FRONT_RIGHT, AV_CH_TOP_FRONT_RIGHT },
173  { CHANNEL_OUT_TOP_BACK_LEFT, AV_CH_TOP_BACK_LEFT },
174  { CHANNEL_OUT_TOP_BACK_CENTER, AV_CH_TOP_BACK_CENTER },
175  { CHANNEL_OUT_TOP_BACK_RIGHT, AV_CH_TOP_BACK_RIGHT },
176 };
177 
178  static uint64_t mcdec_map_channel_mask(AVCodecContext *avctx,
179  int channel_mask)
180 {
181  uint64_t channel_layout = 0;
182 
183  for (int i = 0; i < FF_ARRAY_ELEMS(channel_masks); i++) {
184  if (channel_mask & channel_masks[i].mask)
185  channel_layout |= channel_masks[i].layout;
186  }
187 
188  return channel_layout;
189 }
190 
191 enum {
192   COLOR_FormatYUV420Planar = 0x13,
193   COLOR_FormatYUV420SemiPlanar = 0x15,
194   COLOR_FormatYCbYCr = 0x19,
195   COLOR_FormatAndroidOpaque = 0x7F000789,
196   COLOR_QCOM_FormatYUV420SemiPlanar = 0x7fa30c00,
197   COLOR_QCOM_FormatYUV420SemiPlanar32m = 0x7fa30c04,
198   COLOR_QCOM_FormatYUV420PackedSemiPlanar64x32Tile2m8ka = 0x7fa30c03,
199   COLOR_TI_FormatYUV420PackedSemiPlanar = 0x7f000100,
200   COLOR_TI_FormatYUV420PackedSemiPlanarInterlaced = 0x7f000001,
201 };
202 
203 static const struct {
204 
205   int color_format;
206   enum AVPixelFormat pix_fmt;
207 
208 } color_formats[] = {
209 
210  { COLOR_FormatYUV420Planar, AV_PIX_FMT_YUV420P },
211  { COLOR_FormatYUV420SemiPlanar, AV_PIX_FMT_NV12 },
212  { COLOR_QCOM_FormatYUV420SemiPlanar, AV_PIX_FMT_NV12 },
213  { COLOR_QCOM_FormatYUV420SemiPlanar32m, AV_PIX_FMT_NV12 },
214  { COLOR_QCOM_FormatYUV420PackedSemiPlanar64x32Tile2m8ka, AV_PIX_FMT_NV12 },
215  { COLOR_TI_FormatYUV420PackedSemiPlanar, AV_PIX_FMT_NV12 },
216  { COLOR_TI_FormatYUV420PackedSemiPlanarInterlaced, AV_PIX_FMT_NV12 },
217  { 0 }
218 };
219 
220  static enum AVPixelFormat mcdec_map_color_format(AVCodecContext *avctx,
221  MediaCodecDecContext *s,
222  int color_format)
223 {
224  int i;
225  enum AVPixelFormat ret = AV_PIX_FMT_NONE;
226 
227  if (s->surface) {
228  return AV_PIX_FMT_MEDIACODEC;
229  }
230 
231  if (!strcmp(s->codec_name, "OMX.k3.video.decoder.avc") && color_format == COLOR_FormatYCbYCr) {
232  s->color_format = color_format = COLOR_TI_FormatYUV420PackedSemiPlanar;
233  }
234 
235  for (i = 0; i < FF_ARRAY_ELEMS(color_formats); i++) {
236  if (color_formats[i].color_format == color_format) {
237  return color_formats[i].pix_fmt;
238  }
239  }
240 
241  av_log(avctx, AV_LOG_ERROR, "Output color format 0x%x (value=%d) is not supported\n",
242  color_format, color_format);
243 
244  return ret;
245 }
246 
247  static void ff_mediacodec_dec_ref(MediaCodecDecContext *s)
248 {
249  atomic_fetch_add(&s->refcount, 1);
250 }
251 
252  static void ff_mediacodec_dec_unref(MediaCodecDecContext *s)
253 {
254  if (!s)
255  return;
256 
257  if (atomic_fetch_sub(&s->refcount, 1) == 1) {
258  if (s->codec) {
259  ff_AMediaCodec_delete(s->codec);
260  s->codec = NULL;
261  }
262 
263  if (s->format) {
264  ff_AMediaFormat_delete(s->format);
265  s->format = NULL;
266  }
267 
268  if (s->surface) {
269  ff_mediacodec_surface_unref(s->surface, NULL);
270  s->surface = NULL;
271  }
272 
273  av_freep(&s->codec_name);
274  av_freep(&s);
275  }
276 }
277 
278  static void mediacodec_buffer_release(void *opaque, uint8_t *data)
279 {
280  AVMediaCodecBuffer *buffer = opaque;
281  MediaCodecDecContext *ctx = buffer->ctx;
282  int released = atomic_load(&buffer->released);
283 
284  if (!released && (ctx->delay_flush || buffer->serial == atomic_load(&ctx->serial))) {
285  atomic_fetch_sub(&ctx->hw_buffer_count, 1);
286  av_log(ctx->avctx, AV_LOG_DEBUG,
287  "Releasing output buffer %zd (%p) ts=%"PRId64" on free() [%d pending]\n",
288  buffer->index, buffer, buffer->pts, atomic_load(&ctx->hw_buffer_count));
289  ff_AMediaCodec_releaseOutputBuffer(ctx->codec, buffer->index, 0);
290  }
291 
292  ff_mediacodec_dec_unref(ctx);
293  av_freep(&buffer);
294 }
295 
296  static int mediacodec_wrap_hw_buffer(AVCodecContext *avctx,
297  MediaCodecDecContext *s,
298  ssize_t index,
299  FFAMediaCodecBufferInfo *info,
300  AVFrame *frame)
301 {
302  int ret = 0;
303  int status = 0;
304  AVMediaCodecBuffer *buffer = NULL;
305 
306  frame->buf[0] = NULL;
307  frame->width = avctx->width;
308  frame->height = avctx->height;
309  frame->format = avctx->pix_fmt;
310  frame->sample_aspect_ratio = avctx->sample_aspect_ratio;
311 
312  if (avctx->pkt_timebase.num && avctx->pkt_timebase.den) {
313  frame->pts = av_rescale_q(info->presentationTimeUs,
314  AV_TIME_BASE_Q,
315  avctx->pkt_timebase);
316  } else {
317  frame->pts = info->presentationTimeUs;
318  }
319  frame->pkt_dts = AV_NOPTS_VALUE;
320  frame->color_range = avctx->color_range;
321  frame->color_primaries = avctx->color_primaries;
322  frame->color_trc = avctx->color_trc;
323  frame->colorspace = avctx->colorspace;
324 
325  buffer = av_mallocz(sizeof(AVMediaCodecBuffer));
326  if (!buffer) {
327  ret = AVERROR(ENOMEM);
328  goto fail;
329  }
330 
331  atomic_init(&buffer->released, 0);
332 
333  frame->buf[0] = av_buffer_create(NULL,
334  0,
335  mediacodec_buffer_release,
336  buffer,
337  AV_BUFFER_FLAG_READONLY);
338 
339  if (!frame->buf[0]) {
340  ret = AVERROR(ENOMEM);
341  goto fail;
342 
343  }
344 
345  buffer->ctx = s;
346  buffer->serial = atomic_load(&s->serial);
347  ff_mediacodec_dec_ref(s);
348 
349  buffer->index = index;
350  buffer->pts = info->presentationTimeUs;
351 
352  frame->data[3] = (uint8_t *)buffer;
353 
354  atomic_fetch_add(&s->hw_buffer_count, 1);
355  av_log(avctx, AV_LOG_DEBUG,
356  "Wrapping output buffer %zd (%p) ts=%"PRId64" [%d pending]\n",
357  buffer->index, buffer, buffer->pts, atomic_load(&s->hw_buffer_count));
358 
359  return 0;
360 fail:
361  av_freep(&buffer);
362  status = ff_AMediaCodec_releaseOutputBuffer(s->codec, index, 0);
363  if (status < 0) {
364  av_log(avctx, AV_LOG_ERROR, "Failed to release output buffer\n");
365  ret = AVERROR_EXTERNAL;
366  }
367 
368  return ret;
369 }
370 
371  static int mediacodec_wrap_sw_audio_buffer(AVCodecContext *avctx,
372  MediaCodecDecContext *s,
373  uint8_t *data,
374  size_t size,
375  ssize_t index,
376  FFAMediaCodecBufferInfo *info,
377  AVFrame *frame)
378 {
379  int ret = 0;
380  int status = 0;
381  const int sample_size = av_get_bytes_per_sample(avctx->sample_fmt);
382  if (!sample_size) {
383  av_log(avctx, AV_LOG_ERROR, "Could not get bytes per sample\n");
384  ret = AVERROR(ENOSYS);
385  goto done;
386  }
387 
388  frame->format = avctx->sample_fmt;
389  frame->sample_rate = avctx->sample_rate;
390  frame->nb_samples = info->size / (sample_size * avctx->ch_layout.nb_channels);
391 
392  ret = av_channel_layout_copy(&frame->ch_layout, &avctx->ch_layout);
393  if (ret < 0) {
394  av_log(avctx, AV_LOG_ERROR, "Could not copy channel layout\n");
395  goto done;
396  }
397 
398  /* MediaCodec buffers needs to be copied to our own refcounted buffers
399  * because the flush command invalidates all input and output buffers.
400  */
401  ret = ff_get_buffer(avctx, frame, 0);
402  if (ret < 0) {
403  av_log(avctx, AV_LOG_ERROR, "Could not allocate buffer\n");
404  goto done;
405  }
406 
407  /* Override frame->pts as ff_get_buffer will override its value based
408  * on the last avpacket received which is not in sync with the frame:
409  * * N avpackets can be pushed before 1 frame is actually returned
410  * * 0-sized avpackets are pushed to flush remaining frames at EOS */
411  if (avctx->pkt_timebase.num && avctx->pkt_timebase.den) {
412  frame->pts = av_rescale_q(info->presentationTimeUs,
413  AV_TIME_BASE_Q,
414  avctx->pkt_timebase);
415  } else {
416  frame->pts = info->presentationTimeUs;
417  }
418  frame->pkt_dts = AV_NOPTS_VALUE;
419  frame->flags |= AV_FRAME_FLAG_KEY;
420 
421  av_log(avctx, AV_LOG_TRACE,
422  "Frame: format=%d channels=%d sample_rate=%d nb_samples=%d",
423  avctx->sample_fmt, avctx->ch_layout.nb_channels, avctx->sample_rate, frame->nb_samples);
424 
425  memcpy(frame->data[0], data, info->size);
426 
427  ret = 0;
428 done:
429  status = ff_AMediaCodec_releaseOutputBuffer(s->codec, index, 0);
430  if (status < 0) {
431  av_log(avctx, AV_LOG_ERROR, "Failed to release output buffer\n");
432  ret = AVERROR_EXTERNAL;
433  }
434 
435  return ret;
436 }
437 
438  static int mediacodec_wrap_sw_video_buffer(AVCodecContext *avctx,
439  MediaCodecDecContext *s,
440  uint8_t *data,
441  size_t size,
442  ssize_t index,
443  FFAMediaCodecBufferInfo *info,
444  AVFrame *frame)
445 {
446  int ret = 0;
447  int status = 0;
448 
449  frame->width = avctx->width;
450  frame->height = avctx->height;
451  frame->format = avctx->pix_fmt;
452 
453  /* MediaCodec buffers needs to be copied to our own refcounted buffers
454  * because the flush command invalidates all input and output buffers.
455  */
456  if ((ret = ff_get_buffer(avctx, frame, 0)) < 0) {
457  av_log(avctx, AV_LOG_ERROR, "Could not allocate buffer\n");
458  goto done;
459  }
460 
461  /* Override frame->pkt_pts as ff_get_buffer will override its value based
462  * on the last avpacket received which is not in sync with the frame:
463  * * N avpackets can be pushed before 1 frame is actually returned
464  * * 0-sized avpackets are pushed to flush remaining frames at EOS */
465  if (avctx->pkt_timebase.num && avctx->pkt_timebase.den) {
466  frame->pts = av_rescale_q(info->presentationTimeUs,
467  AV_TIME_BASE_Q,
468  avctx->pkt_timebase);
469  } else {
470  frame->pts = info->presentationTimeUs;
471  }
472  frame->pkt_dts = AV_NOPTS_VALUE;
473 
474  av_log(avctx, AV_LOG_TRACE,
475  "Frame: width=%d stride=%d height=%d slice-height=%d "
476  "crop-top=%d crop-bottom=%d crop-left=%d crop-right=%d encoder=%s "
477  "destination linesizes=%d,%d,%d\n" ,
478  avctx->width, s->stride, avctx->height, s->slice_height,
479  s->crop_top, s->crop_bottom, s->crop_left, s->crop_right, s->codec_name,
480  frame->linesize[0], frame->linesize[1], frame->linesize[2]);
481 
482  switch (s->color_format) {
483  case COLOR_FormatYUV420Planar:
484  ff_mediacodec_sw_buffer_copy_yuv420_planar(avctx, s, data, size, info, frame);
485  break;
486  case COLOR_FormatYUV420SemiPlanar:
487  case COLOR_QCOM_FormatYUV420SemiPlanar:
488  case COLOR_QCOM_FormatYUV420SemiPlanar32m:
489  ff_mediacodec_sw_buffer_copy_yuv420_semi_planar(avctx, s, data, size, info, frame);
490  break;
491  case COLOR_TI_FormatYUV420PackedSemiPlanar:
492  case COLOR_TI_FormatYUV420PackedSemiPlanarInterlaced:
493  ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar(avctx, s, data, size, info, frame);
494  break;
495  case COLOR_QCOM_FormatYUV420PackedSemiPlanar64x32Tile2m8ka:
496  ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar_64x32Tile2m8ka(avctx, s, data, size, info, frame);
497  break;
498  default:
499  av_log(avctx, AV_LOG_ERROR, "Unsupported color format 0x%x (value=%d)\n",
500  s->color_format, s->color_format);
501  ret = AVERROR(EINVAL);
502  goto done;
503  }
504 
505  ret = 0;
506 done:
507  status = ff_AMediaCodec_releaseOutputBuffer(s->codec, index, 0);
508  if (status < 0) {
509  av_log(avctx, AV_LOG_ERROR, "Failed to release output buffer\n");
510  ret = AVERROR_EXTERNAL;
511  }
512 
513  return ret;
514 }
515 
516  static int mediacodec_wrap_sw_buffer(AVCodecContext *avctx,
517  MediaCodecDecContext *s,
518  uint8_t *data,
519  size_t size,
520  ssize_t index,
521  FFAMediaCodecBufferInfo *info,
522  AVFrame *frame)
523 {
524  if (avctx->codec_type == AVMEDIA_TYPE_AUDIO)
525  return mediacodec_wrap_sw_audio_buffer(avctx, s, data, size, index, info, frame);
526  else if (avctx->codec_type == AVMEDIA_TYPE_VIDEO)
527  return mediacodec_wrap_sw_video_buffer(avctx, s, data, size, index, info, frame);
528  else
529  av_assert0(0);
530 }
531 
532  #define AMEDIAFORMAT_GET_INT32(name, key, mandatory) do { \
533  int32_t value = 0; \
534  if (ff_AMediaFormat_getInt32(s->format, key, &value)) { \
535  (name) = value; \
536  } else if (mandatory) { \
537  av_log(avctx, AV_LOG_ERROR, "Could not get %s from format %s\n", key, format); \
538  ret = AVERROR_EXTERNAL; \
539  goto fail; \
540  } \
541 } while (0) \
542 
543  static int mediacodec_dec_parse_video_format(AVCodecContext *avctx, MediaCodecDecContext *s)
544 {
545  int ret = 0;
546  int width = 0;
547  int height = 0;
548  int color_range = 0;
549  int color_standard = 0;
550  int color_transfer = 0;
551  char *format = NULL;
552 
553  if (!s->format) {
554  av_log(avctx, AV_LOG_ERROR, "Output MediaFormat is not set\n");
555  return AVERROR(EINVAL);
556  }
557 
558  format = ff_AMediaFormat_toString(s->format);
559  if (!format) {
560  return AVERROR_EXTERNAL;
561  }
562  av_log(avctx, AV_LOG_DEBUG, "Parsing MediaFormat %s\n", format);
563 
564  /* Mandatory fields */
565  AMEDIAFORMAT_GET_INT32(s->width, "width", 1);
566  AMEDIAFORMAT_GET_INT32(s->height, "height", 1);
567 
568  AMEDIAFORMAT_GET_INT32(s->stride, "stride", 0);
569  s->stride = s->stride > 0 ? s->stride : s->width;
570 
571  AMEDIAFORMAT_GET_INT32(s->slice_height, "slice-height", 0);
572 
573  if (strstr(s->codec_name, "OMX.Nvidia.") && s->slice_height == 0) {
574  s->slice_height = FFALIGN(s->height, 16);
575  } else if (strstr(s->codec_name, "OMX.SEC.avc.dec")) {
576  s->slice_height = avctx->height;
577  s->stride = avctx->width;
578  } else if (s->slice_height == 0) {
579  s->slice_height = s->height;
580  }
581 
582  AMEDIAFORMAT_GET_INT32(s->color_format, "color-format", 1);
583  avctx->pix_fmt = mcdec_map_color_format(avctx, s, s->color_format);
584  if (avctx->pix_fmt == AV_PIX_FMT_NONE) {
585  av_log(avctx, AV_LOG_ERROR, "Output color format is not supported\n");
586  ret = AVERROR(EINVAL);
587  goto fail;
588  }
589 
590  /* Optional fields */
591  AMEDIAFORMAT_GET_INT32(s->crop_top, "crop-top", 0);
592  AMEDIAFORMAT_GET_INT32(s->crop_bottom, "crop-bottom", 0);
593  AMEDIAFORMAT_GET_INT32(s->crop_left, "crop-left", 0);
594  AMEDIAFORMAT_GET_INT32(s->crop_right, "crop-right", 0);
595 
596  // Try "crop" for NDK
597  // MediaTek SOC return some default value like Rect(0, 0, 318, 238)
598  if (!(s->crop_right && s->crop_bottom) && s->use_ndk_codec && !strstr(s->codec_name, ".mtk."))
599  ff_AMediaFormat_getRect(s->format, "crop", &s->crop_left, &s->crop_top, &s->crop_right, &s->crop_bottom);
600 
601  if (s->crop_right && s->crop_bottom) {
602  width = s->crop_right + 1 - s->crop_left;
603  height = s->crop_bottom + 1 - s->crop_top;
604  } else {
605  /* TODO: NDK MediaFormat should try getRect() first.
606  * Try crop-width/crop-height, it works on NVIDIA Shield.
607  */
608  AMEDIAFORMAT_GET_INT32(width, "crop-width", 0);
609  AMEDIAFORMAT_GET_INT32(height, "crop-height", 0);
610  }
611  if (!width || !height) {
612  width = s->width;
613  height = s->height;
614  }
615 
616  AMEDIAFORMAT_GET_INT32(s->display_width, "display-width", 0);
617  AMEDIAFORMAT_GET_INT32(s->display_height, "display-height", 0);
618 
619  if (s->display_width && s->display_height) {
620  AVRational sar = av_div_q(
621  (AVRational){ s->display_width, s->display_height },
622  (AVRational){ width, height });
623  ff_set_sar(avctx, sar);
624  }
625 
626  AMEDIAFORMAT_GET_INT32(color_range, "color-range", 0);
627  if (color_range)
628  avctx->color_range = ff_AMediaFormatColorRange_to_AVColorRange(color_range);
629 
630  AMEDIAFORMAT_GET_INT32(color_standard, "color-standard", 0);
631  if (color_standard) {
632  avctx->colorspace = ff_AMediaFormatColorStandard_to_AVColorSpace(color_standard);
633  avctx->color_primaries = ff_AMediaFormatColorStandard_to_AVColorPrimaries(color_standard);
634  }
635 
636  AMEDIAFORMAT_GET_INT32(color_transfer, "color-transfer", 0);
637  if (color_transfer)
638  avctx->color_trc = ff_AMediaFormatColorTransfer_to_AVColorTransfer(color_transfer);
639 
640  av_log(avctx, AV_LOG_INFO,
641  "Output crop parameters top=%d bottom=%d left=%d right=%d, "
642  "resulting dimensions width=%d height=%d\n",
643  s->crop_top, s->crop_bottom, s->crop_left, s->crop_right,
644  width, height);
645 
646  av_freep(&format);
647  return ff_set_dimensions(avctx, width, height);
648 fail:
649  av_freep(&format);
650  return ret;
651 }
652 
653  static int mediacodec_dec_parse_audio_format(AVCodecContext *avctx, MediaCodecDecContext *s)
654 {
655  int ret = 0;
656  int sample_rate = 0;
657  int channel_count = 0;
658  int channel_mask = 0;
659  int pcm_encoding = 0;
660  char *format = NULL;
661 
662  if (!s->format) {
663  av_log(avctx, AV_LOG_ERROR, "Output MediaFormat is not set\n");
664  return AVERROR(EINVAL);
665  }
666 
667  format = ff_AMediaFormat_toString(s->format);
668  if (!format) {
669  return AVERROR_EXTERNAL;
670  }
671  av_log(avctx, AV_LOG_DEBUG, "Parsing MediaFormat %s\n", format);
672 
673  /* Mandatory fields */
674  AMEDIAFORMAT_GET_INT32(channel_count, "channel-count", 1);
675  AMEDIAFORMAT_GET_INT32(sample_rate, "sample-rate", 1);
676 
677  AMEDIAFORMAT_GET_INT32(pcm_encoding, "pcm-encoding", 0);
678  if (pcm_encoding)
679  avctx->sample_fmt = mcdec_map_pcm_format(avctx, s, pcm_encoding);
680  else
681  avctx->sample_fmt = AV_SAMPLE_FMT_S16;
682 
683  avctx->sample_rate = sample_rate;
684 
685  AMEDIAFORMAT_GET_INT32(channel_mask, "channel-mask", 0);
686  if (channel_mask)
687  av_channel_layout_from_mask(&avctx->ch_layout, mcdec_map_channel_mask(avctx, channel_mask));
688  else
689  av_channel_layout_default(&avctx->ch_layout, channel_count);
690 
691  av_log(avctx, AV_LOG_INFO,
692  "Output parameters channel-count=%d channel-layout=%x sample-rate=%d\n",
693  channel_count, channel_mask, sample_rate);
694 
695 fail:
696  av_freep(&format);
697  return ret;
698 }
699 
700  static int mediacodec_dec_parse_format(AVCodecContext *avctx, MediaCodecDecContext *s)
701 {
702  if (avctx->codec_type == AVMEDIA_TYPE_AUDIO)
703  return mediacodec_dec_parse_audio_format(avctx, s);
704  else if (avctx->codec_type == AVMEDIA_TYPE_VIDEO)
705  return mediacodec_dec_parse_video_format(avctx, s);
706  else
707  av_assert0(0);
708 }
709 
710  static int mediacodec_dec_flush_codec(AVCodecContext *avctx, MediaCodecDecContext *s)
711 {
712  FFAMediaCodec *codec = s->codec;
713  int status;
714 
715  s->output_buffer_count = 0;
716 
717  s->draining = 0;
718  s->flushing = 0;
719  s->eos = 0;
720  atomic_fetch_add(&s->serial, 1);
721  atomic_init(&s->hw_buffer_count, 0);
722  s->current_input_buffer = -1;
723 
724  status = ff_AMediaCodec_flush(codec);
725  if (status < 0) {
726  av_log(avctx, AV_LOG_ERROR, "Failed to flush codec\n");
727  return AVERROR_EXTERNAL;
728  }
729 
730  return 0;
731 }
732 
733  static int mediacodec_dec_get_video_codec(AVCodecContext *avctx, MediaCodecDecContext *s,
734  const char *mime, FFAMediaFormat *format)
735 {
736  int profile;
737 
738  enum AVPixelFormat pix_fmt;
739  static const enum AVPixelFormat pix_fmts[] = {
740  AV_PIX_FMT_MEDIACODEC,
741  AV_PIX_FMT_NONE,
742  };
743 
744  pix_fmt = ff_get_format(avctx, pix_fmts);
745  if (pix_fmt == AV_PIX_FMT_MEDIACODEC) {
746  AVMediaCodecContext *user_ctx = avctx->hwaccel_context;
747 
748  if (avctx->hw_device_ctx) {
749  AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)(avctx->hw_device_ctx->data);
750  if (device_ctx->type == AV_HWDEVICE_TYPE_MEDIACODEC) {
751  if (device_ctx->hwctx) {
752  AVMediaCodecDeviceContext *mediacodec_ctx = (AVMediaCodecDeviceContext *)device_ctx->hwctx;
753  s->surface = ff_mediacodec_surface_ref(mediacodec_ctx->surface, mediacodec_ctx->native_window, avctx);
754  av_log(avctx, AV_LOG_INFO, "Using surface %p\n", s->surface);
755  }
756  }
757  }
758 
759  if (!s->surface && user_ctx && user_ctx->surface) {
760  s->surface = ff_mediacodec_surface_ref(user_ctx->surface, NULL, avctx);
761  av_log(avctx, AV_LOG_INFO, "Using surface %p\n", s->surface);
762  }
763  }
764 
765  profile = ff_AMediaCodecProfile_getProfileFromAVCodecContext(avctx);
766  if (profile < 0) {
767  av_log(avctx, AV_LOG_WARNING, "Unsupported or unknown profile\n");
768  }
769 
770  s->codec_name = ff_AMediaCodecList_getCodecNameByType(mime, profile, 0, avctx);
771  if (!s->codec_name) {
772  // getCodecNameByType() can fail due to missing JVM, while NDK
773  // mediacodec can be used without JVM.
774  if (!s->use_ndk_codec) {
775  return AVERROR_EXTERNAL;
776  }
777  av_log(avctx, AV_LOG_INFO, "Failed to getCodecNameByType\n");
778  } else {
779  av_log(avctx, AV_LOG_DEBUG, "Found decoder %s\n", s->codec_name);
780  }
781 
782  if (s->codec_name)
783  s->codec = ff_AMediaCodec_createCodecByName(s->codec_name, s->use_ndk_codec);
784  else {
785  s->codec = ff_AMediaCodec_createDecoderByType(mime, s->use_ndk_codec);
786  if (s->codec) {
787  s->codec_name = ff_AMediaCodec_getName(s->codec);
788  if (!s->codec_name)
789  s->codec_name = av_strdup(mime);
790  }
791  }
792  if (!s->codec) {
793  av_log(avctx, AV_LOG_ERROR, "Failed to create media decoder for type %s and name %s\n", mime, s->codec_name);
794  return AVERROR_EXTERNAL;
795  }
796 
797  return 0;
798 }
799 
800  static int mediacodec_dec_get_audio_codec(AVCodecContext *avctx, MediaCodecDecContext *s,
801  const char *mime, FFAMediaFormat *format)
802 {
803  s->codec = ff_AMediaCodec_createDecoderByType(mime, s->use_ndk_codec);
804  if (!s->codec) {
805  av_log(avctx, AV_LOG_ERROR, "Failed to create media decoder for mime %s\n", mime);
806  return AVERROR_EXTERNAL;
807  }
808 
809  s->codec_name = ff_AMediaCodec_getName(s->codec);
810  if (!s->codec_name) {
811  s->codec_name = av_strdup(mime);
812  if (!s->codec_name)
813  return AVERROR(ENOMEM);
814  }
815 
816  return 0;
817 }
818 
819  int ff_mediacodec_dec_init(AVCodecContext *avctx, MediaCodecDecContext *s,
820  const char *mime, FFAMediaFormat *format)
821 {
822  int ret;
823  int status;
824 
825  s->avctx = avctx;
826  atomic_init(&s->refcount, 1);
827  atomic_init(&s->hw_buffer_count, 0);
828  atomic_init(&s->serial, 1);
829  s->current_input_buffer = -1;
830 
831  if (avctx->codec_type == AVMEDIA_TYPE_AUDIO)
832  ret = mediacodec_dec_get_audio_codec(avctx, s, mime, format);
833  else if (avctx->codec_type == AVMEDIA_TYPE_VIDEO)
834  ret = mediacodec_dec_get_video_codec(avctx, s, mime, format);
835  else
836  av_assert0(0);
837  if (ret < 0)
838  goto fail;
839 
840  status = ff_AMediaCodec_configure(s->codec, format, s->surface, NULL, 0);
841  if (status < 0) {
842  char *desc = ff_AMediaFormat_toString(format);
843  av_log(avctx, AV_LOG_ERROR,
844  "Failed to configure codec %s (status = %d) with format %s\n",
845  s->codec_name, status, desc);
846  av_freep(&desc);
847 
848  ret = AVERROR_EXTERNAL;
849  goto fail;
850  }
851 
852  status = ff_AMediaCodec_start(s->codec);
853  if (status < 0) {
854  char *desc = ff_AMediaFormat_toString(format);
855  av_log(avctx, AV_LOG_ERROR,
856  "Failed to start codec %s (status = %d) with format %s\n",
857  s->codec_name, status, desc);
858  av_freep(&desc);
859  ret = AVERROR_EXTERNAL;
860  goto fail;
861  }
862 
863  if (avctx->codec_type == AVMEDIA_TYPE_VIDEO) {
864  s->format = ff_AMediaCodec_getOutputFormat(s->codec);
865  if (s->format) {
866  if ((ret = mediacodec_dec_parse_format(avctx, s)) < 0) {
867  av_log(avctx, AV_LOG_ERROR,
868  "Failed to configure context\n");
869  goto fail;
870  }
871  }
872  }
873 
874  av_log(avctx, AV_LOG_DEBUG, "MediaCodec %p started successfully\n", s->codec);
875 
876  return 0;
877 
878 fail:
879  av_log(avctx, AV_LOG_ERROR, "MediaCodec %p failed to start\n", s->codec);
880  ff_mediacodec_dec_close(avctx, s);
881  return ret;
882 }
883 
884  int ff_mediacodec_dec_send(AVCodecContext *avctx, MediaCodecDecContext *s,
885  AVPacket *pkt, bool wait)
886 {
887  int offset = 0;
888  int need_draining = 0;
889  uint8_t *data;
890  size_t size;
891  FFAMediaCodec *codec = s->codec;
892  int status;
893  int64_t input_dequeue_timeout_us = wait ? INPUT_DEQUEUE_TIMEOUT_US : 0;
894  int64_t pts;
895 
896  if (s->flushing) {
897  av_log(avctx, AV_LOG_ERROR, "Decoder is flushing and cannot accept new buffer "
898  "until all output buffers have been released\n");
899  return AVERROR_EXTERNAL;
900  }
901 
902  if (pkt->size == 0) {
903  need_draining = 1;
904  }
905 
906  if (s->draining && s->eos) {
907  return AVERROR_EOF;
908  }
909 
910  while (offset < pkt->size || (need_draining && !s->draining)) {
911  ssize_t index = s->current_input_buffer;
912  if (index < 0) {
913  index = ff_AMediaCodec_dequeueInputBuffer(codec, input_dequeue_timeout_us);
914  if (ff_AMediaCodec_infoTryAgainLater(codec, index)) {
915  av_log(avctx, AV_LOG_TRACE, "No input buffer available, try again later\n");
916  break;
917  }
918 
919  if (index < 0) {
920  av_log(avctx, AV_LOG_ERROR, "Failed to dequeue input buffer (status=%zd)\n", index);
921  return AVERROR_EXTERNAL;
922  }
923  }
924  s->current_input_buffer = -1;
925 
926  data = ff_AMediaCodec_getInputBuffer(codec, index, &size);
927  if (!data) {
928  av_log(avctx, AV_LOG_ERROR, "Failed to get input buffer\n");
929  return AVERROR_EXTERNAL;
930  }
931 
932  pts = pkt->pts;
933  if (pts == AV_NOPTS_VALUE) {
934  av_log(avctx, AV_LOG_WARNING, "Input packet is missing PTS\n");
935  pts = 0;
936  }
937  if (pts && avctx->pkt_timebase.num && avctx->pkt_timebase.den) {
938  pts = av_rescale_q(pts, avctx->pkt_timebase, AV_TIME_BASE_Q);
939  }
940 
941  if (need_draining) {
942  uint32_t flags = ff_AMediaCodec_getBufferFlagEndOfStream(codec);
943 
944  av_log(avctx, AV_LOG_DEBUG, "Sending End Of Stream signal\n");
945 
946  status = ff_AMediaCodec_queueInputBuffer(codec, index, 0, 0, pts, flags);
947  if (status < 0) {
948  av_log(avctx, AV_LOG_ERROR, "Failed to queue input empty buffer (status = %d)\n", status);
949  return AVERROR_EXTERNAL;
950  }
951 
952  av_log(avctx, AV_LOG_TRACE,
953  "Queued empty EOS input buffer %zd with flags=%d\n", index, flags);
954 
955  s->draining = 1;
956  return 0;
957  }
958 
959  size = FFMIN(pkt->size - offset, size);
960  memcpy(data, pkt->data + offset, size);
961  offset += size;
962 
963  status = ff_AMediaCodec_queueInputBuffer(codec, index, 0, size, pts, 0);
964  if (status < 0) {
965  av_log(avctx, AV_LOG_ERROR, "Failed to queue input buffer (status = %d)\n", status);
966  return AVERROR_EXTERNAL;
967  }
968 
969  av_log(avctx, AV_LOG_TRACE,
970  "Queued input buffer %zd size=%zd ts=%"PRIi64"\n", index, size, pts);
971  }
972 
973  if (offset == 0)
974  return AVERROR(EAGAIN);
975  return offset;
976 }
977 
978  int ff_mediacodec_dec_receive(AVCodecContext *avctx, MediaCodecDecContext *s,
979  AVFrame *frame, bool wait)
980 {
981  int ret;
982  uint8_t *data;
983  ssize_t index;
984  size_t size;
985  FFAMediaCodec *codec = s->codec;
986  FFAMediaCodecBufferInfo info = { 0 };
987  int status;
988  int64_t output_dequeue_timeout_us = OUTPUT_DEQUEUE_TIMEOUT_US;
989 
990  if (s->draining && s->eos) {
991  return AVERROR_EOF;
992  }
993 
994  if (s->draining) {
995  /* If the codec is flushing or need to be flushed, block for a fair
996  * amount of time to ensure we got a frame */
997  output_dequeue_timeout_us = OUTPUT_DEQUEUE_BLOCK_TIMEOUT_US;
998  } else if (s->output_buffer_count == 0 || !wait) {
999  /* If the codec hasn't produced any frames, do not block so we
1000  * can push data to it as fast as possible, and get the first
1001  * frame */
1002  output_dequeue_timeout_us = 0;
1003  }
1004 
1005  index = ff_AMediaCodec_dequeueOutputBuffer(codec, &info, output_dequeue_timeout_us);
1006  if (index >= 0) {
1007  av_log(avctx, AV_LOG_TRACE, "Got output buffer %zd"
1008  " offset=%" PRIi32 " size=%" PRIi32 " ts=%" PRIi64
1009  " flags=%" PRIu32 "\n", index, info.offset, info.size,
1010  info.presentationTimeUs, info.flags);
1011 
1012  if (info.flags & ff_AMediaCodec_getBufferFlagEndOfStream(codec)) {
1013  s->eos = 1;
1014  }
1015 
1016  if (info.size) {
1017  if (s->surface) {
1018  if ((ret = mediacodec_wrap_hw_buffer(avctx, s, index, &info, frame)) < 0) {
1019  av_log(avctx, AV_LOG_ERROR, "Failed to wrap MediaCodec buffer\n");
1020  return ret;
1021  }
1022  } else {
1023  data = ff_AMediaCodec_getOutputBuffer(codec, index, &size);
1024  if (!data) {
1025  av_log(avctx, AV_LOG_ERROR, "Failed to get output buffer\n");
1026  return AVERROR_EXTERNAL;
1027  }
1028 
1029  if ((ret = mediacodec_wrap_sw_buffer(avctx, s, data, size, index, &info, frame)) < 0) {
1030  av_log(avctx, AV_LOG_ERROR, "Failed to wrap MediaCodec buffer\n");
1031  return ret;
1032  }
1033  }
1034 
1035  s->output_buffer_count++;
1036  return 0;
1037  } else {
1038  status = ff_AMediaCodec_releaseOutputBuffer(codec, index, 0);
1039  if (status < 0) {
1040  av_log(avctx, AV_LOG_ERROR, "Failed to release output buffer\n");
1041  }
1042  }
1043 
1044  } else if (ff_AMediaCodec_infoOutputFormatChanged(codec, index)) {
1045  char *format = NULL;
1046 
1047  if (s->format) {
1048  status = ff_AMediaFormat_delete(s->format);
1049  if (status < 0) {
1050  av_log(avctx, AV_LOG_ERROR, "Failed to delete MediaFormat %p\n", s->format);
1051  }
1052  }
1053 
1054  s->format = ff_AMediaCodec_getOutputFormat(codec);
1055  if (!s->format) {
1056  av_log(avctx, AV_LOG_ERROR, "Failed to get output format\n");
1057  return AVERROR_EXTERNAL;
1058  }
1059 
1060  format = ff_AMediaFormat_toString(s->format);
1061  if (!format) {
1062  return AVERROR_EXTERNAL;
1063  }
1064  av_log(avctx, AV_LOG_INFO, "Output MediaFormat changed to %s\n", format);
1065  av_freep(&format);
1066 
1067  if ((ret = mediacodec_dec_parse_format(avctx, s)) < 0) {
1068  return ret;
1069  }
1070 
1071  } else if (ff_AMediaCodec_infoOutputBuffersChanged(codec, index)) {
1072  ff_AMediaCodec_cleanOutputBuffers(codec);
1073  } else if (ff_AMediaCodec_infoTryAgainLater(codec, index)) {
1074  if (s->draining) {
1075  av_log(avctx, AV_LOG_ERROR, "Failed to dequeue output buffer within %" PRIi64 "ms "
1076  "while draining remaining frames, output will probably lack frames\n",
1077  output_dequeue_timeout_us / 1000);
1078  } else {
1079  av_log(avctx, AV_LOG_TRACE, "No output buffer available, try again later\n");
1080  }
1081  } else {
1082  av_log(avctx, AV_LOG_ERROR, "Failed to dequeue output buffer (status=%zd)\n", index);
1083  return AVERROR_EXTERNAL;
1084  }
1085 
1086  if (s->draining && s->eos)
1087  return AVERROR_EOF;
1088  return AVERROR(EAGAIN);
1089 }
1090 
1091 /*
1092 * ff_mediacodec_dec_flush returns 0 if the flush cannot be performed on
1093 * the codec (because the user retains frames). The codec stays in the
1094 * flushing state.
1095 *
1096 * ff_mediacodec_dec_flush returns 1 if the flush can actually be
1097 * performed on the codec. The codec leaves the flushing state and can
1098 * process again packets.
1099 *
1100 * ff_mediacodec_dec_flush returns a negative value if an error has
1101 * occurred.
1102 */
1103  int ff_mediacodec_dec_flush(AVCodecContext *avctx, MediaCodecDecContext *s)
1104 {
1105  if (!s->surface || !s->delay_flush || atomic_load(&s->refcount) == 1) {
1106  int ret;
1107 
1108  /* No frames (holding a reference to the codec) are retained by the
1109  * user, thus we can flush the codec and returns accordingly */
1110  if ((ret = mediacodec_dec_flush_codec(avctx, s)) < 0) {
1111  return ret;
1112  }
1113 
1114  return 1;
1115  }
1116 
1117  s->flushing = 1;
1118  return 0;
1119 }
1120 
1121  int ff_mediacodec_dec_close(AVCodecContext *avctx, MediaCodecDecContext *s)
1122 {
1123  if (!s)
1124  return 0;
1125 
1126  if (s->codec) {
1127  if (atomic_load(&s->hw_buffer_count) == 0) {
1128  ff_AMediaCodec_stop(s->codec);
1129  av_log(avctx, AV_LOG_DEBUG, "MediaCodec %p stopped\n", s->codec);
1130  } else {
1131  av_log(avctx, AV_LOG_DEBUG, "Not stopping MediaCodec (there are buffers pending)\n");
1132  }
1133  }
1134 
1135  ff_mediacodec_dec_unref(s);
1136 
1137  return 0;
1138 }
1139 
1140  int ff_mediacodec_dec_is_flushing(AVCodecContext *avctx, MediaCodecDecContext *s)
1141 {
1142  return s->flushing;
1143 }
ff_AMediaCodec_getInputBuffer
static uint8_t * ff_AMediaCodec_getInputBuffer(FFAMediaCodec *codec, size_t idx, size_t *out_size)
Definition: mediacodec_wrapper.h:261
ENCODING_PCM_24BIT_PACKED
@ ENCODING_PCM_24BIT_PACKED
Definition: mediacodecdec_common.c:94
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:85
AVCodecContext::hwaccel_context
void * hwaccel_context
Legacy hardware accelerator context.
Definition: avcodec.h:1461
ff_AMediaCodecList_getCodecNameByType
char * ff_AMediaCodecList_getCodecNameByType(const char *mime, int profile, int encoder, void *log_ctx)
Definition: mediacodec_wrapper.c:470
COLOR_FormatYUV420Planar
@ COLOR_FormatYUV420Planar
Definition: mediacodecdec_common.c:192
ff_AMediaFormat_delete
static int ff_AMediaFormat_delete(FFAMediaFormat *format)
Definition: mediacodec_wrapper.h:92
MediaCodecDecContext
Definition: mediacodecdec_common.h:37
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
ff_AMediaCodec_delete
static int ff_AMediaCodec_delete(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:256
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
sample_formats
static const struct @172 sample_formats[]
AVERROR
Filter the word "frame" indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
mediacodec_dec_get_audio_codec
static int mediacodec_dec_get_audio_codec(AVCodecContext *avctx, MediaCodecDecContext *s, const char *mime, FFAMediaFormat *format)
Definition: mediacodecdec_common.c:800
mcdec_map_channel_mask
static uint64_t mcdec_map_channel_mask(AVCodecContext *avctx, int channel_mask)
Definition: mediacodecdec_common.c:178
AVCodecContext::colorspace
enum AVColorSpace colorspace
YUV colorspace type.
Definition: avcodec.h:691
ff_get_format
int ff_get_format(AVCodecContext *avctx, const enum AVPixelFormat *fmt)
Select the (possibly hardware accelerated) pixel format.
Definition: decode.c:1272
AV_CH_TOP_FRONT_CENTER
#define AV_CH_TOP_FRONT_CENTER
Definition: channel_layout.h:185
ff_AMediaCodec_start
static int ff_AMediaCodec_start(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:241
AVCodecContext::sample_rate
int sample_rate
samples per second
Definition: avcodec.h:1056
atomic_fetch_add
#define atomic_fetch_add(object, operand)
Definition: stdatomic.h:137
mediacodec_surface.h
AVERROR_EOF
#define AVERROR_EOF
End of file.
Definition: error.h:57
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
ff_AMediaFormatColorStandard_to_AVColorSpace
enum AVColorSpace ff_AMediaFormatColorStandard_to_AVColorSpace(int color_standard)
Map MediaFormat color standard to AVColorSpace.
Definition: mediacodec_wrapper.c:2517
av_div_q
AVRational av_div_q(AVRational b, AVRational c)
Divide one rational by another.
Definition: rational.c:88
AV_TIME_BASE_Q
#define AV_TIME_BASE_Q
Internal time base represented as fractional value.
Definition: avutil.h:264
int64_t
long long int64_t
Definition: coverity.c:34
mediacodec_wrap_sw_video_buffer
static int mediacodec_wrap_sw_video_buffer(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, ssize_t index, FFAMediaCodecBufferInfo *info, AVFrame *frame)
Definition: mediacodecdec_common.c:438
AVMediaCodecDeviceContext::surface
void * surface
android/view/Surface handle, to be filled by the user.
Definition: hwcontext_mediacodec.h:33
CHANNEL_OUT_SIDE_RIGHT
@ CHANNEL_OUT_SIDE_RIGHT
Definition: mediacodecdec_common.c:142
OUTPUT_DEQUEUE_BLOCK_TIMEOUT_US
#define OUTPUT_DEQUEUE_BLOCK_TIMEOUT_US
Definition: mediacodecdec_common.c:88
mask
int mask
Definition: mediacodecdec_common.c:154
ff_mediacodec_dec_close
int ff_mediacodec_dec_close(AVCodecContext *avctx, MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:1121
AV_CH_TOP_FRONT_RIGHT
#define AV_CH_TOP_FRONT_RIGHT
Definition: channel_layout.h:186
ff_AMediaFormat_getRect
static int ff_AMediaFormat_getRect(FFAMediaFormat *format, const char *name, int32_t *left, int32_t *top, int32_t *right, int32_t *bottom)
Definition: mediacodec_wrapper.h:127
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:389
AVFrame::pts
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:501
AVCodecContext::color_trc
enum AVColorTransferCharacteristic color_trc
Color Transfer Characteristic.
Definition: avcodec.h:684
ff_mediacodec_dec_receive
int ff_mediacodec_dec_receive(AVCodecContext *avctx, MediaCodecDecContext *s, AVFrame *frame, bool wait)
Definition: mediacodecdec_common.c:978
AVPacket::data
uint8_t * data
Definition: packet.h:539
CHANNEL_OUT_TOP_FRONT_RIGHT
@ CHANNEL_OUT_TOP_FRONT_RIGHT
Definition: mediacodecdec_common.c:146
ff_AMediaCodec_infoOutputFormatChanged
static int ff_AMediaCodec_infoOutputFormatChanged(FFAMediaCodec *codec, ssize_t idx)
Definition: mediacodec_wrapper.h:311
OUTPUT_DEQUEUE_TIMEOUT_US
#define OUTPUT_DEQUEUE_TIMEOUT_US
Definition: mediacodecdec_common.c:87
ff_AMediaCodec_infoOutputBuffersChanged
static int ff_AMediaCodec_infoOutputBuffersChanged(FFAMediaCodec *codec, ssize_t idx)
Definition: mediacodec_wrapper.h:306
data
const char data[16]
Definition: mxf.c:148
AV_HWDEVICE_TYPE_MEDIACODEC
@ AV_HWDEVICE_TYPE_MEDIACODEC
Definition: hwcontext.h:38
ff_AMediaCodec_queueInputBuffer
static int ff_AMediaCodec_queueInputBuffer(FFAMediaCodec *codec, size_t idx, off_t offset, size_t size, uint64_t time, uint32_t flags)
Definition: mediacodec_wrapper.h:276
ff_mediacodec_dec_is_flushing
int ff_mediacodec_dec_is_flushing(AVCodecContext *avctx, MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:1140
AV_CH_TOP_FRONT_LEFT
#define AV_CH_TOP_FRONT_LEFT
Definition: channel_layout.h:184
atomic_fetch_sub
#define atomic_fetch_sub(object, operand)
Definition: stdatomic.h:140
CHANNEL_OUT_TOP_FRONT_CENTER
@ CHANNEL_OUT_TOP_FRONT_CENTER
Definition: mediacodecdec_common.c:145
COLOR_QCOM_FormatYUV420SemiPlanar
@ COLOR_QCOM_FormatYUV420SemiPlanar
Definition: mediacodecdec_common.c:196
AVChannelLayout::nb_channels
int nb_channels
Number of channels in this layout.
Definition: channel_layout.h:321
ff_set_dimensions
int ff_set_dimensions(AVCodecContext *s, int width, int height)
Check that the provided frame dimensions are valid and set them on the codec context.
Definition: utils.c:94
hwcontext_mediacodec.h
CHANNEL_OUT_TOP_BACK_CENTER
@ CHANNEL_OUT_TOP_BACK_CENTER
Definition: mediacodecdec_common.c:148
CHANNEL_OUT_BACK_RIGHT
@ CHANNEL_OUT_BACK_RIGHT
Definition: mediacodecdec_common.c:137
ENCODING_PCM_8BIT
@ ENCODING_PCM_8BIT
Definition: mediacodecdec_common.c:92
CHANNEL_OUT_FRONT_RIGHT
@ CHANNEL_OUT_FRONT_RIGHT
Definition: mediacodecdec_common.c:133
AV_CH_TOP_BACK_LEFT
#define AV_CH_TOP_BACK_LEFT
Definition: channel_layout.h:187
ff_AMediaCodec_configure
static int ff_AMediaCodec_configure(FFAMediaCodec *codec, const FFAMediaFormat *format, FFANativeWindow *surface, void *crypto, uint32_t flags)
Definition: mediacodec_wrapper.h:233
mediacodec_wrap_sw_audio_buffer
static int mediacodec_wrap_sw_audio_buffer(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, ssize_t index, FFAMediaCodecBufferInfo *info, AVFrame *frame)
Definition: mediacodecdec_common.c:371
ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar_64x32Tile2m8ka
void ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar_64x32Tile2m8ka(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame)
Definition: mediacodec_sw_buffer.c:272
AV_CH_TOP_BACK_CENTER
#define AV_CH_TOP_BACK_CENTER
Definition: channel_layout.h:188
AVCodecContext::ch_layout
AVChannelLayout ch_layout
Audio channel layout.
Definition: avcodec.h:1071
fail
#define fail()
Definition: checkasm.h:188
COLOR_TI_FormatYUV420PackedSemiPlanarInterlaced
@ COLOR_TI_FormatYUV420PackedSemiPlanarInterlaced
Definition: mediacodecdec_common.c:200
AV_CH_BACK_LEFT
#define AV_CH_BACK_LEFT
Definition: channel_layout.h:176
CHANNEL_OUT_FRONT_LEFT_OF_CENTER
@ CHANNEL_OUT_FRONT_LEFT_OF_CENTER
Definition: mediacodecdec_common.c:138
ff_mediacodec_dec_flush
int ff_mediacodec_dec_flush(AVCodecContext *avctx, MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:1103
pts
static int64_t pts
Definition: transcode_aac.c:644
AVRational::num
int num
Numerator.
Definition: rational.h:59
mediacodecdec_common.h
INPUT_DEQUEUE_TIMEOUT_US
#define INPUT_DEQUEUE_TIMEOUT_US
OMX.k3.video.decoder.avc, OMX.NVIDIA.
Definition: mediacodecdec_common.c:86
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:60
avassert.h
AVCodecContext::color_primaries
enum AVColorPrimaries color_primaries
Chromaticity coordinates of the source primaries.
Definition: avcodec.h:677
AV_LOG_TRACE
#define AV_LOG_TRACE
Extremely verbose debugging, useful for libav* development.
Definition: log.h:206
pkt
AVPacket * pkt
Definition: movenc.c:60
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
CHANNEL_OUT_SIDE_LEFT
@ CHANNEL_OUT_SIDE_LEFT
Definition: mediacodecdec_common.c:141
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
CHANNEL_OUT_BACK_CENTER
@ CHANNEL_OUT_BACK_CENTER
Definition: mediacodecdec_common.c:140
AV_FRAME_FLAG_KEY
#define AV_FRAME_FLAG_KEY
A flag to mark frames that are keyframes.
Definition: frame.h:640
AV_CH_LOW_FREQUENCY
#define AV_CH_LOW_FREQUENCY
Definition: channel_layout.h:175
mcdec_map_color_format
static enum AVPixelFormat mcdec_map_color_format(AVCodecContext *avctx, MediaCodecDecContext *s, int color_format)
Definition: mediacodecdec_common.c:220
ff_AMediaCodec_getName
static char * ff_AMediaCodec_getName(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:224
ff_AMediaCodec_getBufferFlagEndOfStream
static int ff_AMediaCodec_getBufferFlagEndOfStream(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:321
s
#define s(width, name)
Definition: cbs_vp9.c:198
AV_BUFFER_FLAG_READONLY
#define AV_BUFFER_FLAG_READONLY
Always treat the buffer as read-only, even when it has only one reference.
Definition: buffer.h:114
format
Filter the word "frame" indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate. The lists are not just lists
CHANNEL_OUT_FRONT_RIGHT_OF_CENTER
@ CHANNEL_OUT_FRONT_RIGHT_OF_CENTER
Definition: mediacodecdec_common.c:139
AVMEDIA_TYPE_AUDIO
@ AVMEDIA_TYPE_AUDIO
Definition: avutil.h:202
av_channel_layout_from_mask
int av_channel_layout_from_mask(AVChannelLayout *channel_layout, uint64_t mask)
Initialize a native channel layout from a bitmask indicating which channels are present.
Definition: channel_layout.c:247
info
MIPS optimizations info
Definition: mips.txt:2
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:40
CHANNEL_OUT_TOP_CENTER
@ CHANNEL_OUT_TOP_CENTER
Definition: mediacodecdec_common.c:143
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:304
COLOR_FormatYCbYCr
@ COLOR_FormatYCbYCr
Definition: mediacodecdec_common.c:194
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:201
ctx
AVFormatContext * ctx
Definition: movenc.c:49
decode.h
av_rescale_q
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
CHANNEL_OUT_TOP_FRONT_LEFT
@ CHANNEL_OUT_TOP_FRONT_LEFT
Definition: mediacodecdec_common.c:144
CHANNEL_OUT_FRONT_LEFT
@ CHANNEL_OUT_FRONT_LEFT
Definition: mediacodecdec_common.c:132
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar
void ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame)
Definition: mediacodec_sw_buffer.c:181
color_range
color_range
Definition: vf_selectivecolor.c:43
atomic_load
#define atomic_load(object)
Definition: stdatomic.h:93
AV_PIX_FMT_MEDIACODEC
@ AV_PIX_FMT_MEDIACODEC
hardware decoding through MediaCodec
Definition: pixfmt.h:316
ff_AMediaCodec_getOutputFormat
static FFAMediaFormat * ff_AMediaCodec_getOutputFormat(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:286
ff_AMediaCodec_createCodecByName
FFAMediaCodec * ff_AMediaCodec_createCodecByName(const char *name, int ndk)
Definition: mediacodec_wrapper.c:2408
AV_CH_TOP_CENTER
#define AV_CH_TOP_CENTER
Definition: channel_layout.h:183
NULL
#define NULL
Definition: coverity.c:32
ff_AMediaCodec_flush
static int ff_AMediaCodec_flush(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:251
FFAMediaCodecBufferInfo
Definition: mediacodec_wrapper.h:172
AVCodecContext::color_range
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: avcodec.h:701
COLOR_FormatYUV420SemiPlanar
@ COLOR_FormatYUV420SemiPlanar
Definition: mediacodecdec_common.c:193
COLOR_QCOM_FormatYUV420SemiPlanar32m
@ COLOR_QCOM_FormatYUV420SemiPlanar32m
Definition: mediacodecdec_common.c:197
mcdec_map_pcm_format
static enum AVSampleFormat mcdec_map_pcm_format(AVCodecContext *avctx, MediaCodecDecContext *s, int pcm_format)
Definition: mediacodecdec_common.c:112
AVMediaCodecContext
This structure holds a reference to a android/view/Surface object that will be used as output by the ...
Definition: mediacodec.h:33
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
layout
uint64_t layout
Definition: mediacodecdec_common.c:155
ff_set_sar
int ff_set_sar(AVCodecContext *avctx, AVRational sar)
Check that the provided sample aspect ratio is valid and set it on the codec context.
Definition: utils.c:109
ff_AMediaCodec_stop
static int ff_AMediaCodec_stop(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:246
mediacodec_sw_buffer.h
ff_mediacodec_surface_unref
int ff_mediacodec_surface_unref(FFANativeWindow *window, void *log_ctx)
Definition: mediacodec_surface.c:59
time.h
ff_mediacodec_dec_ref
static void ff_mediacodec_dec_ref(MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:247
CHANNEL_OUT_TOP_BACK_LEFT
@ CHANNEL_OUT_TOP_BACK_LEFT
Definition: mediacodecdec_common.c:147
AV_CH_FRONT_CENTER
#define AV_CH_FRONT_CENTER
Definition: channel_layout.h:174
AV_CH_FRONT_LEFT_OF_CENTER
#define AV_CH_FRONT_LEFT_OF_CENTER
Definition: channel_layout.h:178
index
int index
Definition: gxfenc.c:90
AVMediaCodecDeviceContext
MediaCodec details.
Definition: hwcontext_mediacodec.h:27
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
ff_AMediaFormat_toString
static char * ff_AMediaFormat_toString(FFAMediaFormat *format)
Definition: mediacodec_wrapper.h:97
AMEDIAFORMAT_GET_INT32
#define AMEDIAFORMAT_GET_INT32(name, key, mandatory)
Definition: mediacodecdec_common.c:532
ff_mediacodec_sw_buffer_copy_yuv420_semi_planar
void ff_mediacodec_sw_buffer_copy_yuv420_semi_planar(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame)
Definition: mediacodec_sw_buffer.c:131
mediacodec_dec_parse_format
static int mediacodec_dec_parse_format(AVCodecContext *avctx, MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:700
ff_mediacodec_sw_buffer_copy_yuv420_planar
void ff_mediacodec_sw_buffer_copy_yuv420_planar(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame)
The code handling the various YUV color formats is taken from the GStreamer project.
Definition: mediacodec_sw_buffer.c:76
ff_get_buffer
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
Definition: decode.c:1719
ff_AMediaFormatColorStandard_to_AVColorPrimaries
enum AVColorPrimaries ff_AMediaFormatColorStandard_to_AVColorPrimaries(int color_standard)
Map MediaFormat color standard to AVColorPrimaries.
Definition: mediacodec_wrapper.c:2535
AVPacket::size
int size
Definition: packet.h:540
height
#define height
Definition: dsp.h:85
AVCodecContext::sample_fmt
enum AVSampleFormat sample_fmt
audio sample format
Definition: avcodec.h:1063
AVCodecContext::pkt_timebase
AVRational pkt_timebase
Timebase in which pkt_dts/pts and AVPacket.dts/pts are expressed.
Definition: avcodec.h:557
AV_SAMPLE_FMT_NONE
@ AV_SAMPLE_FMT_NONE
Definition: samplefmt.h:56
mediacodec_dec_get_video_codec
static int mediacodec_dec_get_video_codec(AVCodecContext *avctx, MediaCodecDecContext *s, const char *mime, FFAMediaFormat *format)
Definition: mediacodecdec_common.c:733
size
int size
Definition: twinvq_data.h:10344
CHANNEL_OUT_TOP_BACK_RIGHT
@ CHANNEL_OUT_TOP_BACK_RIGHT
Definition: mediacodecdec_common.c:149
AV_NOPTS_VALUE
#define AV_NOPTS_VALUE
Undefined timestamp value.
Definition: avutil.h:248
COLOR_FormatAndroidOpaque
@ COLOR_FormatAndroidOpaque
Definition: mediacodecdec_common.c:195
CHANNEL_OUT_FRONT_CENTER
@ CHANNEL_OUT_FRONT_CENTER
Definition: mediacodecdec_common.c:134
COLOR_TI_FormatYUV420PackedSemiPlanar
@ COLOR_TI_FormatYUV420PackedSemiPlanar
Definition: mediacodecdec_common.c:199
pix_fmt
enum AVPixelFormat pix_fmt
Definition: mediacodecdec_common.c:206
AVERROR_EXTERNAL
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:59
offset
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf offset
Definition: writing_filters.txt:86
ff_AMediaCodecProfile_getProfileFromAVCodecContext
int ff_AMediaCodecProfile_getProfileFromAVCodecContext(AVCodecContext *avctx)
The following API around MediaCodec and MediaFormat is based on the NDK one provided by Google since ...
Definition: mediacodec_wrapper.c:309
AV_CH_TOP_BACK_RIGHT
#define AV_CH_TOP_BACK_RIGHT
Definition: channel_layout.h:189
AV_CH_FRONT_RIGHT_OF_CENTER
#define AV_CH_FRONT_RIGHT_OF_CENTER
Definition: channel_layout.h:179
ff_AMediaCodec_createDecoderByType
FFAMediaCodec * ff_AMediaCodec_createDecoderByType(const char *mime_type, int ndk)
Definition: mediacodec_wrapper.c:2415
mediacodec_dec_flush_codec
static int mediacodec_dec_flush_codec(AVCodecContext *avctx, MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:710
mediacodec_buffer_release
static void mediacodec_buffer_release(void *opaque, uint8_t *data)
Definition: mediacodecdec_common.c:278
AV_LOG_INFO
#define AV_LOG_INFO
Standard information.
Definition: log.h:191
av_channel_layout_default
void av_channel_layout_default(AVChannelLayout *ch_layout, int nb_channels)
Get the default channel layout for a given number of channels.
Definition: channel_layout.c:834
mediacodec_wrap_sw_buffer
static int mediacodec_wrap_sw_buffer(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, ssize_t index, FFAMediaCodecBufferInfo *info, AVFrame *frame)
Definition: mediacodecdec_common.c:516
ff_mediacodec_dec_unref
static void ff_mediacodec_dec_unref(MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:252
ff_AMediaFormatColorTransfer_to_AVColorTransfer
enum AVColorTransferCharacteristic ff_AMediaFormatColorTransfer_to_AVColorTransfer(int color_transfer)
Map MediaFormat color transfer to AVColorTransferCharacteristic.
Definition: mediacodec_wrapper.c:2545
log.h
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
mediacodec_wrapper.h
AVPacket::pts
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: packet.h:532
av_get_bytes_per_sample
int av_get_bytes_per_sample(enum AVSampleFormat sample_fmt)
Return number of bytes per sample.
Definition: samplefmt.c:108
FFAMediaCodec
Definition: mediacodec_wrapper.h:181
AV_SAMPLE_FMT_U8
@ AV_SAMPLE_FMT_U8
unsigned 8 bits
Definition: samplefmt.h:57
common.h
AVSampleFormat
AVSampleFormat
Audio sample formats.
Definition: samplefmt.h:55
mediacodec_dec_parse_audio_format
static int mediacodec_dec_parse_audio_format(AVCodecContext *avctx, MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:653
AV_CH_BACK_CENTER
#define AV_CH_BACK_CENTER
Definition: channel_layout.h:180
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
AV_CH_FRONT_LEFT
#define AV_CH_FRONT_LEFT
Definition: channel_layout.h:172
AV_SAMPLE_FMT_S16
@ AV_SAMPLE_FMT_S16
signed 16 bits
Definition: samplefmt.h:58
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:256
AVCodecContext::hw_device_ctx
AVBufferRef * hw_device_ctx
A reference to the AVHWDeviceContext describing the device which will be used by a hardware encoder/d...
Definition: avcodec.h:1507
AVMediaCodecContext::surface
void * surface
android/view/Surface object reference.
Definition: mediacodec.h:38
ff_mediacodec_surface_ref
FFANativeWindow * ff_mediacodec_surface_ref(void *surface, void *native_window, void *log_ctx)
Definition: mediacodec_surface.c:30
AV_CH_SIDE_RIGHT
#define AV_CH_SIDE_RIGHT
Definition: channel_layout.h:182
profile
int profile
Definition: mxfenc.c:2228
AVCodecContext::height
int height
Definition: avcodec.h:624
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:663
AVMediaCodecDeviceContext::native_window
void * native_window
Pointer to ANativeWindow.
Definition: hwcontext_mediacodec.h:45
ff_AMediaCodec_dequeueInputBuffer
static ssize_t ff_AMediaCodec_dequeueInputBuffer(FFAMediaCodec *codec, int64_t timeoutUs)
Definition: mediacodec_wrapper.h:271
avcodec.h
ff_AMediaFormatColorRange_to_AVColorRange
enum AVColorRange ff_AMediaFormatColorRange_to_AVColorRange(int color_range)
Map MediaFormat color range to AVColorRange.
Definition: mediacodec_wrapper.c:2500
ret
ret
Definition: filter_design.txt:187
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:72
pixfmt.h
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:96
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
CHANNEL_OUT_BACK_LEFT
@ CHANNEL_OUT_BACK_LEFT
Definition: mediacodecdec_common.c:136
channel_masks
static const struct @174 channel_masks[]
AVCodecContext
main external API structure.
Definition: avcodec.h:451
status
ov_status_e status
Definition: dnn_backend_openvino.c:100
channel_layout.h
buffer
the frame and frame reference mechanism is intended to as much as expensive copies of that data while still allowing the filters to produce correct results The data is stored in buffers represented by AVFrame structures Several references can point to the same frame buffer
Definition: filter_design.txt:49
AVRational::den
int den
Denominator.
Definition: rational.h:60
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
CHANNEL_OUT_LOW_FREQUENCY
@ CHANNEL_OUT_LOW_FREQUENCY
Definition: mediacodecdec_common.c:135
pcm_format
int pcm_format
Definition: mediacodecdec_common.c:100
color_format
int color_format
Definition: mediacodecdec_common.c:205
AV_CH_FRONT_RIGHT
#define AV_CH_FRONT_RIGHT
Definition: channel_layout.h:173
av_channel_layout_copy
int av_channel_layout_copy(AVChannelLayout *dst, const AVChannelLayout *src)
Make a copy of a channel layout.
Definition: channel_layout.c:444
AVCodecContext::codec_type
enum AVMediaType codec_type
Definition: avcodec.h:459
av_strdup
char * av_strdup(const char *s)
Duplicate a string.
Definition: mem.c:272
desc
const char * desc
Definition: libsvtav1.c:79
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
mem.h
ff_AMediaCodec_getOutputBuffer
static uint8_t * ff_AMediaCodec_getOutputBuffer(FFAMediaCodec *codec, size_t idx, size_t *out_size)
Definition: mediacodec_wrapper.h:266
sample_format
enum AVSampleFormat sample_format
Definition: mediacodecdec_common.c:101
ENCODING_PCM_FLOAT
@ ENCODING_PCM_FLOAT
Definition: mediacodecdec_common.c:93
mediacodec_wrap_hw_buffer
static int mediacodec_wrap_hw_buffer(AVCodecContext *avctx, MediaCodecDecContext *s, ssize_t index, FFAMediaCodecBufferInfo *info, AVFrame *frame)
Definition: mediacodecdec_common.c:296
ff_AMediaCodec_cleanOutputBuffers
static int ff_AMediaCodec_cleanOutputBuffers(FFAMediaCodec *codec)
Definition: mediacodec_wrapper.h:336
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
AVPacket
This structure stores compressed data.
Definition: packet.h:516
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
ENCODING_PCM_32BIT
@ ENCODING_PCM_32BIT
Definition: mediacodecdec_common.c:95
COLOR_QCOM_FormatYUV420PackedSemiPlanar64x32Tile2m8ka
@ COLOR_QCOM_FormatYUV420PackedSemiPlanar64x32Tile2m8ka
Definition: mediacodecdec_common.c:198
ff_AMediaCodec_infoTryAgainLater
static int ff_AMediaCodec_infoTryAgainLater(FFAMediaCodec *codec, ssize_t idx)
Definition: mediacodec_wrapper.h:301
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:624
color_formats
static const struct @176 color_formats[]
ff_mediacodec_dec_send
int ff_mediacodec_dec_send(AVCodecContext *avctx, MediaCodecDecContext *s, AVPacket *pkt, bool wait)
Definition: mediacodecdec_common.c:884
timestamp.h
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:482
ff_AMediaCodec_releaseOutputBuffer
static int ff_AMediaCodec_releaseOutputBuffer(FFAMediaCodec *codec, size_t idx, int render)
Definition: mediacodec_wrapper.h:291
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
AV_CH_BACK_RIGHT
#define AV_CH_BACK_RIGHT
Definition: channel_layout.h:177
ENCODING_PCM_16BIT
@ ENCODING_PCM_16BIT
Definition: mediacodecdec_common.c:91
atomic_init
#define atomic_init(obj, value)
Definition: stdatomic.h:33
width
#define width
Definition: dsp.h:85
FFAMediaFormat
Definition: mediacodec_wrapper.h:63
mediacodec_dec_parse_video_format
static int mediacodec_dec_parse_video_format(AVCodecContext *avctx, MediaCodecDecContext *s)
Definition: mediacodecdec_common.c:543
AV_SAMPLE_FMT_S32
@ AV_SAMPLE_FMT_S32
signed 32 bits
Definition: samplefmt.h:59
ff_AMediaCodec_dequeueOutputBuffer
static ssize_t ff_AMediaCodec_dequeueOutputBuffer(FFAMediaCodec *codec, FFAMediaCodecBufferInfo *info, int64_t timeoutUs)
Definition: mediacodec_wrapper.h:281
AV_SAMPLE_FMT_FLT
@ AV_SAMPLE_FMT_FLT
float
Definition: samplefmt.h:60
AVCodecContext::sample_aspect_ratio
AVRational sample_aspect_ratio
sample aspect ratio (0 if unknown) That is the width of a pixel divided by the height of the pixel.
Definition: avcodec.h:648
AV_CH_SIDE_LEFT
#define AV_CH_SIDE_LEFT
Definition: channel_layout.h:181
mediacodec.h
ff_mediacodec_dec_init
int ff_mediacodec_dec_init(AVCodecContext *avctx, MediaCodecDecContext *s, const char *mime, FFAMediaFormat *format)
Definition: mediacodecdec_common.c:819

Generated on Fri Aug 22 2025 13:58:32 for FFmpeg by   doxygen 1.8.17

AltStyle によって変換されたページ (->オリジナル) /