libstagefright.cpp
Go to the documentation of this file.
1 /*
2  * Interface to the Android Stagefright library for
3  * H/W accelerated H.264 decoding
4  *
5  * Copyright (C) 2011 Mohamed Naufal
6  * Copyright (C) 2011 Martin Storsjö
7  *
8  * This file is part of FFmpeg.
9  *
10  * FFmpeg is free software; you can redistribute it and/or
11  * modify it under the terms of the GNU Lesser General Public
12  * License as published by the Free Software Foundation; either
13  * version 2.1 of the License, or (at your option) any later version.
14  *
15  * FFmpeg is distributed in the hope that it will be useful,
16  * but WITHOUT ANY WARRANTY; without even the implied warranty of
17  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18  * Lesser General Public License for more details.
19  *
20  * You should have received a copy of the GNU Lesser General Public
21  * License along with FFmpeg; if not, write to the Free Software
22  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
23  */
24 
25 #include <binder/ProcessState.h>
26 #include <media/stagefright/MetaData.h>
27 #include <media/stagefright/MediaBufferGroup.h>
28 #include <media/stagefright/MediaDebug.h>
29 #include <media/stagefright/MediaDefs.h>
30 #include <media/stagefright/OMXClient.h>
31 #include <media/stagefright/OMXCodec.h>
32 #include <utils/List.h>
33 #include <new>
34 #include <map>
35 
36 extern "C" {
37 #include "avcodec.h"
38 #include "libavutil/imgutils.h"
39 }
40 
41 #define OMX_QCOM_COLOR_FormatYVU420SemiPlanar 0x7FA30C00
42 
43 using namespace android;
44 
45 struct Frame {
46  status_t status;
47  size_t size;
48  int64_t time;
49  int key;
52 };
53 
54 struct TimeStamp {
55  int64_t pts;
57 };
58 
59 class CustomSource;
60 
66  sp<MediaSource> *source;
67  List<Frame*> *in_queue, *out_queue;
71 
74  volatile sig_atomic_t thread_started, thread_exited, stop_decode;
75 
77  std::map<int64_t, TimeStamp> *ts_map;
78  int64_t frame_index;
79 
82 
83  OMXClient *client;
84  sp<MediaSource> *decoder;
85  const char *decoder_component;
86 };
87 
88 class CustomSource : public MediaSource {
89 public:
90  CustomSource(AVCodecContext *avctx, sp<MetaData> meta) {
91  s = (StagefrightContext*)avctx->priv_data;
92  source_meta = meta;
93  frame_size = (avctx->width * avctx->height * 3) / 2;
94  buf_group.add_buffer(new MediaBuffer(frame_size));
95  }
96 
97  virtual sp<MetaData> getFormat() {
98  return source_meta;
99  }
100 
101  virtual status_t start(MetaData *params) {
102  return OK;
103  }
104 
105  virtual status_t stop() {
106  return OK;
107  }
108 
109  virtual status_t read(MediaBuffer **buffer,
110  const MediaSource::ReadOptions *options) {
111  Frame *frame;
112  status_t ret;
113 
114  if (s->thread_exited)
115  return ERROR_END_OF_STREAM;
116  pthread_mutex_lock(&s->in_mutex);
117 
118  while (s->in_queue->empty())
119  pthread_cond_wait(&s->condition, &s->in_mutex);
120 
121  frame = *s->in_queue->begin();
122  ret = frame->status;
123 
124  if (ret == OK) {
125  ret = buf_group.acquire_buffer(buffer);
126  if (ret == OK) {
127  memcpy((*buffer)->data(), frame->buffer, frame->size);
128  (*buffer)->set_range(0, frame->size);
129  (*buffer)->meta_data()->clear();
130  (*buffer)->meta_data()->setInt32(kKeyIsSyncFrame,frame->key);
131  (*buffer)->meta_data()->setInt64(kKeyTime, frame->time);
132  } else {
133  av_log(s->avctx, AV_LOG_ERROR, "Failed to acquire MediaBuffer\n");
134  }
135  av_freep(&frame->buffer);
136  }
137 
138  s->in_queue->erase(s->in_queue->begin());
139  pthread_mutex_unlock(&s->in_mutex);
140 
141  av_freep(&frame);
142  return ret;
143  }
144 
145 private:
146  MediaBufferGroup buf_group;
147  sp<MetaData> source_meta;
150 };
151 
152 void* decode_thread(void *arg)
153 {
154  AVCodecContext *avctx = (AVCodecContext*)arg;
156  const AVPixFmtDescriptor *pix_desc = av_pix_fmt_desc_get(avctx->pix_fmt);
157  Frame* frame;
158  MediaBuffer *buffer;
159  int32_t w, h;
160  int decode_done = 0;
161  int ret;
162  int src_linesize[3];
163  const uint8_t *src_data[3];
164  int64_t out_frame_index = 0;
165 
166  do {
167  buffer = NULL;
168  frame = (Frame*)av_mallocz(sizeof(Frame));
169  if (!frame) {
170  frame = s->end_frame;
171  frame->status = AVERROR(ENOMEM);
172  decode_done = 1;
173  s->end_frame = NULL;
174  goto push_frame;
175  }
176  frame->status = (*s->decoder)->read(&buffer);
177  if (frame->status == OK) {
178  sp<MetaData> outFormat = (*s->decoder)->getFormat();
179  outFormat->findInt32(kKeyWidth , &w);
180  outFormat->findInt32(kKeyHeight, &h);
181  frame->vframe = (AVFrame*)av_mallocz(sizeof(AVFrame));
182  if (!frame->vframe) {
183  frame->status = AVERROR(ENOMEM);
184  decode_done = 1;
185  buffer->release();
186  goto push_frame;
187  }
188  ret = ff_get_buffer(avctx, frame->vframe);
189  if (ret < 0) {
190  frame->status = ret;
191  decode_done = 1;
192  buffer->release();
193  goto push_frame;
194  }
195 
196  // The OMX.SEC decoder doesn't signal the modified width/height
197  if (s->decoder_component && !strncmp(s->decoder_component, "OMX.SEC", 7) &&
198  (w & 15 || h & 15)) {
199  if (((w + 15)&~15) * ((h + 15)&~15) * 3/2 == buffer->range_length()) {
200  w = (w + 15)&~15;
201  h = (h + 15)&~15;
202  }
203  }
204 
205  if (!avctx->width || !avctx->height || avctx->width > w || avctx->height > h) {
206  avctx->width = w;
207  avctx->height = h;
208  }
209 
210  src_linesize[0] = av_image_get_linesize(avctx->pix_fmt, w, 0);
211  src_linesize[1] = av_image_get_linesize(avctx->pix_fmt, w, 1);
212  src_linesize[2] = av_image_get_linesize(avctx->pix_fmt, w, 2);
213 
214  src_data[0] = (uint8_t*)buffer->data();
215  src_data[1] = src_data[0] + src_linesize[0] * h;
216  src_data[2] = src_data[1] + src_linesize[1] * -(-h>>pix_desc->log2_chroma_h);
217  av_image_copy(frame->vframe->data, frame->vframe->linesize,
218  src_data, src_linesize,
219  avctx->pix_fmt, avctx->width, avctx->height);
220 
221  buffer->meta_data()->findInt64(kKeyTime, &out_frame_index);
222  if (out_frame_index && s->ts_map->count(out_frame_index) > 0) {
223  frame->vframe->pts = (*s->ts_map)[out_frame_index].pts;
224  frame->vframe->reordered_opaque = (*s->ts_map)[out_frame_index].reordered_opaque;
225  s->ts_map->erase(out_frame_index);
226  }
227  buffer->release();
228  } else if (frame->status == INFO_FORMAT_CHANGED) {
229  if (buffer)
230  buffer->release();
231  av_free(frame);
232  continue;
233  } else {
234  decode_done = 1;
235  }
236 push_frame:
237  while (true) {
239  if (s->out_queue->size() >= 10) {
241  usleep(10000);
242  continue;
243  }
244  break;
245  }
246  s->out_queue->push_back(frame);
248  } while (!decode_done && !s->stop_decode);
249 
250  s->thread_exited = true;
251 
252  return 0;
253 }
254 
256 {
258  sp<MetaData> meta, outFormat;
259  int32_t colorFormat = 0;
260  int ret;
261 
262  if (!avctx->extradata || !avctx->extradata_size || avctx->extradata[0] != 1)
263  return -1;
264 
265  s->avctx = avctx;
266  s->bsfc = av_bitstream_filter_init("h264_mp4toannexb");
267  if (!s->bsfc) {
268  av_log(avctx, AV_LOG_ERROR, "Cannot open the h264_mp4toannexb BSF!\n");
269  return -1;
270  }
271 
275  if (!s->orig_extradata) {
276  ret = AVERROR(ENOMEM);
277  goto fail;
278  }
279  memcpy(s->orig_extradata, avctx->extradata, avctx->extradata_size);
280 
281  meta = new MetaData;
282  if (meta == NULL) {
283  ret = AVERROR(ENOMEM);
284  goto fail;
285  }
286  meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
287  meta->setInt32(kKeyWidth, avctx->width);
288  meta->setInt32(kKeyHeight, avctx->height);
289  meta->setData(kKeyAVCC, kTypeAVCC, avctx->extradata, avctx->extradata_size);
290 
291  android::ProcessState::self()->startThreadPool();
292 
293  s->source = new sp<MediaSource>();
294  *s->source = new CustomSource(avctx, meta);
295  s->in_queue = new List<Frame*>;
296  s->out_queue = new List<Frame*>;
297  s->ts_map = new std::map<int64_t, TimeStamp>;
298  s->client = new OMXClient;
299  s->end_frame = (Frame*)av_mallocz(sizeof(Frame));
300  if (s->source == NULL || !s->in_queue || !s->out_queue || !s->client ||
301  !s->ts_map || !s->end_frame) {
302  ret = AVERROR(ENOMEM);
303  goto fail;
304  }
305 
306  if (s->client->connect() != OK) {
307  av_log(avctx, AV_LOG_ERROR, "Cannot connect OMX client\n");
308  ret = -1;
309  goto fail;
310  }
311 
312  s->decoder = new sp<MediaSource>();
313  *s->decoder = OMXCodec::Create(s->client->interface(), meta,
314  false, *s->source, NULL,
315  OMXCodec::kClientNeedsFramebuffer);
316  if ((*s->decoder)->start() != OK) {
317  av_log(avctx, AV_LOG_ERROR, "Cannot start decoder\n");
318  ret = -1;
319  s->client->disconnect();
320  goto fail;
321  }
322 
323  outFormat = (*s->decoder)->getFormat();
324  outFormat->findInt32(kKeyColorFormat, &colorFormat);
325  if (colorFormat == OMX_QCOM_COLOR_FormatYVU420SemiPlanar ||
326  colorFormat == OMX_COLOR_FormatYUV420SemiPlanar)
327  avctx->pix_fmt = AV_PIX_FMT_NV21;
328  else if (colorFormat == OMX_COLOR_FormatYCbYCr)
329  avctx->pix_fmt = AV_PIX_FMT_YUYV422;
330  else if (colorFormat == OMX_COLOR_FormatCbYCrY)
331  avctx->pix_fmt = AV_PIX_FMT_UYVY422;
332  else
333  avctx->pix_fmt = AV_PIX_FMT_YUV420P;
334 
335  outFormat->findCString(kKeyDecoderComponent, &s->decoder_component);
336  if (s->decoder_component)
338 
342  return 0;
343 
344 fail:
347  av_freep(&s->end_frame);
348  delete s->in_queue;
349  delete s->out_queue;
350  delete s->ts_map;
351  delete s->client;
352  return ret;
353 }
354 
356  int *got_frame, AVPacket *avpkt)
357 {
359  Frame *frame;
360  status_t status;
361  int orig_size = avpkt->size;
362  AVPacket pkt = *avpkt;
363  AVFrame *ret_frame;
364 
365  if (!s->thread_started) {
367  s->thread_started = true;
368  }
369 
370  if (avpkt && avpkt->data) {
372  avpkt->data, avpkt->size, avpkt->flags & AV_PKT_FLAG_KEY);
373  avpkt = &pkt;
374  }
375 
376  if (!s->source_done) {
377  if(!s->dummy_buf) {
378  s->dummy_buf = (uint8_t*)av_malloc(avpkt->size);
379  if (!s->dummy_buf)
380  return AVERROR(ENOMEM);
381  s->dummy_bufsize = avpkt->size;
382  memcpy(s->dummy_buf, avpkt->data, avpkt->size);
383  }
384 
385  frame = (Frame*)av_mallocz(sizeof(Frame));
386  if (avpkt->data) {
387  frame->status = OK;
388  frame->size = avpkt->size;
389  frame->key = avpkt->flags & AV_PKT_FLAG_KEY ? 1 : 0;
390  frame->buffer = (uint8_t*)av_malloc(avpkt->size);
391  if (!frame->buffer) {
392  av_freep(&frame);
393  return AVERROR(ENOMEM);
394  }
395  uint8_t *ptr = avpkt->data;
396  // The OMX.SEC decoder fails without this.
397  if (avpkt->size == orig_size + avctx->extradata_size) {
398  ptr += avctx->extradata_size;
399  frame->size = orig_size;
400  }
401  memcpy(frame->buffer, ptr, orig_size);
402  if (avpkt == &pkt)
403  av_free(avpkt->data);
404 
405  frame->time = ++s->frame_index;
406  (*s->ts_map)[s->frame_index].pts = avpkt->pts;
407  (*s->ts_map)[s->frame_index].reordered_opaque = avctx->reordered_opaque;
408  } else {
409  frame->status = ERROR_END_OF_STREAM;
410  s->source_done = true;
411  }
412 
413  while (true) {
414  if (s->thread_exited) {
415  s->source_done = true;
416  break;
417  }
419  if (s->in_queue->size() >= 10) {
421  usleep(10000);
422  continue;
423  }
424  s->in_queue->push_back(frame);
427  break;
428  }
429  }
430  while (true) {
432  if (!s->out_queue->empty()) break;
434  if (s->source_done) {
435  usleep(10000);
436  continue;
437  } else {
438  return orig_size;
439  }
440  }
441 
442  frame = *s->out_queue->begin();
443  s->out_queue->erase(s->out_queue->begin());
445 
446  ret_frame = frame->vframe;
447  status = frame->status;
448  av_freep(&frame);
449 
450  if (status == ERROR_END_OF_STREAM)
451  return 0;
452  if (status != OK) {
453  if (status == AVERROR(ENOMEM))
454  return status;
455  av_log(avctx, AV_LOG_ERROR, "Decode failed: %x\n", status);
456  return -1;
457  }
458 
459  if (s->prev_frame) {
460  avctx->release_buffer(avctx, s->prev_frame);
461  av_freep(&s->prev_frame);
462  }
463  s->prev_frame = ret_frame;
464 
465  *got_frame = 1;
466  *(AVFrame*)data = *ret_frame;
467  return orig_size;
468 }
469 
471 {
473  Frame *frame;
474 
475  if (s->thread_started) {
476  if (!s->thread_exited) {
477  s->stop_decode = 1;
478 
479  // Make sure decode_thread() doesn't get stuck
481  while (!s->out_queue->empty()) {
482  frame = *s->out_queue->begin();
483  s->out_queue->erase(s->out_queue->begin());
484  if (frame->vframe) {
485  avctx->release_buffer(avctx, frame->vframe);
486  av_freep(&frame->vframe);
487  }
488  av_freep(&frame);
489  }
491 
492  // Feed a dummy frame prior to signalling EOF.
493  // This is required to terminate the decoder(OMX.SEC)
494  // when only one frame is read during stream info detection.
495  if (s->dummy_buf && (frame = (Frame*)av_mallocz(sizeof(Frame)))) {
496  frame->status = OK;
497  frame->size = s->dummy_bufsize;
498  frame->key = 1;
499  frame->buffer = s->dummy_buf;
501  s->in_queue->push_back(frame);
504  s->dummy_buf = NULL;
505  }
506 
508  s->end_frame->status = ERROR_END_OF_STREAM;
509  s->in_queue->push_back(s->end_frame);
512  s->end_frame = NULL;
513  }
514 
516 
517  if (s->prev_frame) {
518  avctx->release_buffer(avctx, s->prev_frame);
519  av_freep(&s->prev_frame);
520  }
521 
522  s->thread_started = false;
523  }
524 
525  while (!s->in_queue->empty()) {
526  frame = *s->in_queue->begin();
527  s->in_queue->erase(s->in_queue->begin());
528  if (frame->size)
529  av_freep(&frame->buffer);
530  av_freep(&frame);
531  }
532 
533  while (!s->out_queue->empty()) {
534  frame = *s->out_queue->begin();
535  s->out_queue->erase(s->out_queue->begin());
536  if (frame->vframe) {
537  avctx->release_buffer(avctx, frame->vframe);
538  av_freep(&frame->vframe);
539  }
540  av_freep(&frame);
541  }
542 
543  (*s->decoder)->stop();
544  s->client->disconnect();
545 
546  if (s->decoder_component)
548  av_freep(&s->dummy_buf);
549  av_freep(&s->end_frame);
550 
551  // Reset the extradata back to the original mp4 format, so that
552  // the next invocation (both when decoding and when called from
553  // av_find_stream_info) get the original mp4 format extradata.
554  av_freep(&avctx->extradata);
555  avctx->extradata = s->orig_extradata;
557 
558  delete s->in_queue;
559  delete s->out_queue;
560  delete s->ts_map;
561  delete s->client;
562  delete s->decoder;
563  delete s->source;
564 
569  return 0;
570 }
571 
573  "libstagefright_h264",
574  NULL_IF_CONFIG_SMALL("libstagefright H.264"),
578  NULL, //supported_framerates
579  NULL, //pix_fmts
580  NULL, //supported_samplerates
581  NULL, //sample_fmts
582  NULL, //channel_layouts
583  0, //max_lowres
584  NULL, //priv_class
585  NULL, //profiles
586  sizeof(StagefrightContext),
587  NULL, //next
588  NULL, //init_thread_copy
589  NULL, //update_thread_context
590  NULL, //defaults
591  NULL, //init_static_data
593  NULL, //encode
594  NULL, //encode2
597 };
packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
Definition: pixfmt.h:85
void * av_mallocz(size_t size)
Allocate a block of size bytes with alignment suitable for all memory accesses (including vectors if ...
Definition: mem.c:205
int av_image_get_linesize(enum AVPixelFormat pix_fmt, int width, int plane)
Compute the size of an image line with format pix_fmt and width width for the plane plane...
Definition: imgutils.c:73
const char * s
Definition: avisynth_c.h:668
pthread_mutex_t in_mutex
static av_always_inline int pthread_mutex_destroy(pthread_mutex_t *mutex)
Definition: os2threads.h:90
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:1778
This structure describes decoded (raw) audio or video data.
Definition: frame.h:76
static av_always_inline int pthread_cond_wait(pthread_cond_t *cond, pthread_mutex_t *mutex)
Definition: os2threads.h:149
sp< MediaSource > * decoder
sp< MetaData > source_meta
misc image utilities
virtual status_t start(MetaData *params)
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
virtual status_t stop()
uint8_t * buffer
static av_always_inline int pthread_cond_destroy(pthread_cond_t *cond)
Definition: os2threads.h:120
pthread_cond_t condition
output residual component w
void av_freep(void *arg)
Free a memory block which has been allocated with av_malloc(z)() or av_realloc() and set the pointer ...
Definition: mem.c:198
initialize output if(nPeaks >3)%at least 3 peaks in spectrum for trying to find f0 nf0peaks
HMTX pthread_mutex_t
Definition: os2threads.h:38
uint8_t
#define av_cold
Definition: attributes.h:78
static AVPacket pkt
Definition: demuxing.c:56
#define OMX_QCOM_COLOR_FormatYVU420SemiPlanar
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:159
uint8_t * extradata
some codecs need / can use extradata like Huffman tables.
MediaBufferGroup buf_group
uint8_t * data
static int push_frame(AVFilterContext *ctx, unsigned in_no, AVFrame *buf)
Definition: avf_concat.c:159
static av_cold int Stagefright_init(AVCodecContext *avctx)
static av_always_inline int pthread_cond_signal(pthread_cond_t *cond)
Definition: os2threads.h:127
CustomSource(AVCodecContext *avctx, sp< MetaData > meta)
const OptionDef options[]
Definition: ffserver.c:4697
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
frame
Definition: stft.m:14
static const uint8_t frame_size[4]
Definition: g723_1_data.h:58
void av_free(void *ptr)
Free a memory block which has been allocated with av_malloc(z)() or av_realloc(). ...
Definition: mem.c:183
#define CODEC_CAP_DELAY
Encoder or decoder requires flushing with NULL input at the end in order to give the complete and cor...
const char * decoder_component
size_t size
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Spectrum Plot time data
void av_bitstream_filter_close(AVBitStreamFilterContext *bsf)
void * decode_thread(void *arg)
const char * arg
static int Stagefright_decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
void av_log(void *avcl, int level, const char *fmt,...)
Definition: log.c:246
status_t status
AVFrame * vframe
external API header
void av_image_copy(uint8_t *dst_data[4], int dst_linesizes[4], const uint8_t *src_data[4], const int src_linesizes[4], enum AVPixelFormat pix_fmt, int width, int height)
Copy image in src_data to dst_data.
Definition: imgutils.c:257
int flags
A combination of AV_PKT_FLAG values.
#define FF_INPUT_BUFFER_PADDING_SIZE
Required number of additionally allocated bytes at the end of the input bitstream for decoding...
as above, but U and V bytes are swapped
Definition: pixfmt.h:94
volatile sig_atomic_t thread_started
ret
Definition: avfilter.c:821
int width
picture width / height.
AVBitStreamFilterContext * av_bitstream_filter_init(const char *name)
StagefrightContext * s
int32_t
static av_always_inline int pthread_join(pthread_t thread, void **value_ptr)
Definition: os2threads.h:76
static av_always_inline int pthread_mutex_init(pthread_mutex_t *mutex, const pthread_mutexattr_t *attr)
Definition: os2threads.h:83
AVBitStreamFilterContext * bsfc
int64_t reordered_opaque
opaque 64bit number (generally a PTS) that will be reordered and output in AVFrame.reordered_opaque
virtual status_t read(MediaBuffer **buffer, const MediaSource::ReadOptions *options)
volatile sig_atomic_t stop_decode
AVCodecContext * avctx
static av_always_inline int pthread_create(pthread_t *thread, const pthread_attr_t *attr, void *(*start_routine)(void *), void *arg)
Definition: os2threads.h:62
NULL
Definition: eval.c:55
int av_bitstream_filter_filter(AVBitStreamFilterContext *bsfc, AVCodecContext *avctx, const char *args, uint8_t **poutbuf, int *poutbuf_size, const uint8_t *buf, int buf_size, int keyframe)
char * av_strdup(const char *s)
Duplicate the string s.
Definition: mem.c:220
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:101
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:55
main external API structure.
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:148
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:69
void * av_malloc(size_t size)
Allocate a block of size bytes with alignment suitable for all memory accesses (including vectors if ...
Definition: mem.c:73
int64_t reordered_opaque
reordered opaque 64bit (generally an integer or a double precision float PTS but can be anything)...
Definition: frame.h:302
pthread_mutex_t out_mutex
pthread_t decode_thread_id
int64_t time
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFilterBuffer structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Buffer references ownership and permissions
List< Frame * > * out_queue
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:87
const char const char * params
Definition: avisynth_c.h:675
List< Frame * > * in_queue
std::map< int64_t, TimeStamp > * ts_map
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:68
the buffer and buffer reference mechanism is intended to as much as expensive copies of that data while still allowing the filters to produce correct results The data is stored in buffers represented by AVFilterBuffer structures They must not be accessed but through references stored in AVFilterBufferRef structures Several references can point to the same buffer
static av_always_inline int pthread_cond_init(pthread_cond_t *cond, const pthread_condattr_t *attr)
Definition: os2threads.h:111
AVCodec ff_libstagefright_h264_decoder
virtual sp< MetaData > getFormat()
int64_t reordered_opaque
static av_always_inline int pthread_mutex_unlock(pthread_mutex_t *mutex)
Definition: os2threads.h:104
static av_cold int Stagefright_close(AVCodecContext *avctx)
volatile sig_atomic_t thread_exited
static av_always_inline int pthread_mutex_lock(pthread_mutex_t *mutex)
Definition: os2threads.h:97
This structure stores compressed data.
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
sp< MediaSource > * source