v4l2.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2000,2001 Fabrice Bellard
3  * Copyright (c) 2006 Luca Abeni
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 /**
23  * @file
24  * Video4Linux2 grab interface
25  *
26  * Part of this file is based on the V4L2 video capture example
27  * (http://linuxtv.org/downloads/v4l-dvb-apis/capture-example.html)
28  *
29  * Thanks to Michael Niedermayer for providing the mapping between
30  * V4L2_PIX_FMT_* and AV_PIX_FMT_*
31  */
32 
33 #undef __STRICT_ANSI__ //workaround due to broken kernel headers
34 #include "config.h"
35 #include "libavformat/internal.h"
36 #include <unistd.h>
37 #include <fcntl.h>
38 #include <sys/ioctl.h>
39 #include <sys/mman.h>
40 #include <sys/time.h>
41 #if HAVE_SYS_VIDEOIO_H
42 #include <sys/videoio.h>
43 #else
44 #if HAVE_ASM_TYPES_H
45 #include <asm/types.h>
46 #endif
47 #include <linux/videodev2.h>
48 #endif
49 #include "libavutil/atomic.h"
50 #include "libavutil/avassert.h"
51 #include "libavutil/imgutils.h"
52 #include "libavutil/log.h"
53 #include "libavutil/opt.h"
54 #include "avdevice.h"
55 #include "timefilter.h"
56 #include "libavutil/parseutils.h"
57 #include "libavutil/pixdesc.h"
58 #include "libavutil/time.h"
59 #include "libavutil/avstring.h"
60 
61 #if CONFIG_LIBV4L2
62 #include <libv4l2.h>
63 #else
64 #define v4l2_open open
65 #define v4l2_close close
66 #define v4l2_dup dup
67 #define v4l2_ioctl ioctl
68 #define v4l2_read read
69 #define v4l2_mmap mmap
70 #define v4l2_munmap munmap
71 #endif
72 
73 static const int desired_video_buffers = 256;
74 
75 #define V4L_ALLFORMATS 3
76 #define V4L_RAWFORMATS 1
77 #define V4L_COMPFORMATS 2
78 
79 /**
80  * Return timestamps to the user exactly as returned by the kernel
81  */
82 #define V4L_TS_DEFAULT 0
83 /**
84  * Autodetect the kind of timestamps returned by the kernel and convert to
85  * absolute (wall clock) timestamps.
86  */
87 #define V4L_TS_ABS 1
88 /**
89  * Assume kernel timestamps are from the monotonic clock and convert to
90  * absolute timestamps.
91  */
92 #define V4L_TS_MONO2ABS 2
93 
94 /**
95  * Once the kind of timestamps returned by the kernel have been detected,
96  * the value of the timefilter (NULL or not) determines whether a conversion
97  * takes place.
98  */
99 #define V4L_TS_CONVERT_READY V4L_TS_DEFAULT
100 
101 struct video_data {
102  AVClass *class;
103  int fd;
104  int frame_format; /* V4L2_PIX_FMT_* */
105  int width, height;
109  int ts_mode;
111  int64_t last_time_m;
112 
113  int buffers;
114  volatile int buffers_queued;
115  void **buf_start;
116  unsigned int *buf_len;
117  char *standard;
118  v4l2_std_id std_id;
119  int channel;
120  char *pixel_format; /**< Set by a private option. */
121  int list_format; /**< Set by a private option. */
122  int list_standard; /**< Set by a private option. */
123  char *framerate; /**< Set by a private option. */
124 };
125 
126 struct buff_data {
127  struct video_data *s;
128  int index;
129 };
130 
131 struct fmt_map {
132  enum AVPixelFormat ff_fmt;
134  uint32_t v4l2_fmt;
135 };
136 
137 static struct fmt_map fmt_conversion_table[] = {
138  //ff_fmt codec_id v4l2_fmt
139  { AV_PIX_FMT_YUV420P, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YUV420 },
140  { AV_PIX_FMT_YUV420P, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YVU420 },
141  { AV_PIX_FMT_YUV422P, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YUV422P },
142  { AV_PIX_FMT_YUYV422, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YUYV },
143  { AV_PIX_FMT_UYVY422, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_UYVY },
144  { AV_PIX_FMT_YUV411P, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YUV411P },
145  { AV_PIX_FMT_YUV410P, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YUV410 },
146  { AV_PIX_FMT_YUV410P, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_YVU410 },
147  { AV_PIX_FMT_RGB555LE,AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_RGB555 },
148  { AV_PIX_FMT_RGB555BE,AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_RGB555X },
149  { AV_PIX_FMT_RGB565LE,AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_RGB565 },
150  { AV_PIX_FMT_RGB565BE,AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_RGB565X },
151  { AV_PIX_FMT_BGR24, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_BGR24 },
152  { AV_PIX_FMT_RGB24, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_RGB24 },
153  { AV_PIX_FMT_BGR0, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_BGR32 },
154  { AV_PIX_FMT_0RGB, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_RGB32 },
155  { AV_PIX_FMT_GRAY8, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_GREY },
156 #ifdef V4L2_PIX_FMT_Y16
157  { AV_PIX_FMT_GRAY16LE,AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_Y16 },
158 #endif
159  { AV_PIX_FMT_NV12, AV_CODEC_ID_RAWVIDEO, V4L2_PIX_FMT_NV12 },
160  { AV_PIX_FMT_NONE, AV_CODEC_ID_MJPEG, V4L2_PIX_FMT_MJPEG },
161  { AV_PIX_FMT_NONE, AV_CODEC_ID_MJPEG, V4L2_PIX_FMT_JPEG },
162 #ifdef V4L2_PIX_FMT_H264
163  { AV_PIX_FMT_NONE, AV_CODEC_ID_H264, V4L2_PIX_FMT_H264 },
164 #endif
165 #ifdef V4L2_PIX_FMT_CPIA1
166  { AV_PIX_FMT_NONE, AV_CODEC_ID_CPIA, V4L2_PIX_FMT_CPIA1 },
167 #endif
168 };
169 
170 static int device_open(AVFormatContext *ctx)
171 {
172  struct v4l2_capability cap;
173  int fd;
174  int ret;
175  int flags = O_RDWR;
176 
177  if (ctx->flags & AVFMT_FLAG_NONBLOCK) {
178  flags |= O_NONBLOCK;
179  }
180 
181  fd = v4l2_open(ctx->filename, flags, 0);
182  if (fd < 0) {
183  ret = AVERROR(errno);
184  av_log(ctx, AV_LOG_ERROR, "Cannot open video device %s: %s\n",
185  ctx->filename, av_err2str(ret));
186  return ret;
187  }
188 
189  if (v4l2_ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) {
190  ret = AVERROR(errno);
191  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYCAP): %s\n",
192  av_err2str(ret));
193  goto fail;
194  }
195 
196  av_log(ctx, AV_LOG_VERBOSE, "fd:%d capabilities:%x\n",
197  fd, cap.capabilities);
198 
199  if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
200  av_log(ctx, AV_LOG_ERROR, "Not a video capture device.\n");
201  ret = AVERROR(ENODEV);
202  goto fail;
203  }
204 
205  if (!(cap.capabilities & V4L2_CAP_STREAMING)) {
206  av_log(ctx, AV_LOG_ERROR,
207  "The device does not support the streaming I/O method.\n");
208  ret = AVERROR(ENOSYS);
209  goto fail;
210  }
211 
212  return fd;
213 
214 fail:
215  v4l2_close(fd);
216  return ret;
217 }
218 
219 static int device_init(AVFormatContext *ctx, int *width, int *height,
220  uint32_t pix_fmt)
221 {
222  struct video_data *s = ctx->priv_data;
223  int fd = s->fd;
224  struct v4l2_format fmt = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE };
225  struct v4l2_pix_format *pix = &fmt.fmt.pix;
226 
227  int res = 0;
228 
229  pix->width = *width;
230  pix->height = *height;
231  pix->pixelformat = pix_fmt;
232  pix->field = V4L2_FIELD_ANY;
233 
234  if (v4l2_ioctl(fd, VIDIOC_S_FMT, &fmt) < 0)
235  res = AVERROR(errno);
236 
237  if ((*width != fmt.fmt.pix.width) || (*height != fmt.fmt.pix.height)) {
238  av_log(ctx, AV_LOG_INFO,
239  "The V4L2 driver changed the video from %dx%d to %dx%d\n",
240  *width, *height, fmt.fmt.pix.width, fmt.fmt.pix.height);
241  *width = fmt.fmt.pix.width;
242  *height = fmt.fmt.pix.height;
243  }
244 
245  if (pix_fmt != fmt.fmt.pix.pixelformat) {
246  av_log(ctx, AV_LOG_DEBUG,
247  "The V4L2 driver changed the pixel format "
248  "from 0x%08X to 0x%08X\n",
249  pix_fmt, fmt.fmt.pix.pixelformat);
250  res = AVERROR(EINVAL);
251  }
252 
253  if (fmt.fmt.pix.field == V4L2_FIELD_INTERLACED) {
254  av_log(ctx, AV_LOG_DEBUG,
255  "The V4L2 driver is using the interlaced mode\n");
256  s->interlaced = 1;
257  }
258 
259  return res;
260 }
261 
262 static int first_field(int fd)
263 {
264  int res;
265  v4l2_std_id std;
266 
267  res = v4l2_ioctl(fd, VIDIOC_G_STD, &std);
268  if (res < 0) {
269  return 0;
270  }
271  if (std & V4L2_STD_NTSC) {
272  return 0;
273  }
274 
275  return 1;
276 }
277 
278 static uint32_t fmt_ff2v4l(enum AVPixelFormat pix_fmt, enum AVCodecID codec_id)
279 {
280  int i;
281 
282  for (i = 0; i < FF_ARRAY_ELEMS(fmt_conversion_table); i++) {
283  if ((codec_id == AV_CODEC_ID_NONE ||
284  fmt_conversion_table[i].codec_id == codec_id) &&
285  (pix_fmt == AV_PIX_FMT_NONE ||
286  fmt_conversion_table[i].ff_fmt == pix_fmt)) {
287  return fmt_conversion_table[i].v4l2_fmt;
288  }
289  }
290 
291  return 0;
292 }
293 
294 static enum AVPixelFormat fmt_v4l2ff(uint32_t v4l2_fmt, enum AVCodecID codec_id)
295 {
296  int i;
297 
298  for (i = 0; i < FF_ARRAY_ELEMS(fmt_conversion_table); i++) {
299  if (fmt_conversion_table[i].v4l2_fmt == v4l2_fmt &&
300  fmt_conversion_table[i].codec_id == codec_id) {
301  return fmt_conversion_table[i].ff_fmt;
302  }
303  }
304 
305  return AV_PIX_FMT_NONE;
306 }
307 
308 static enum AVCodecID fmt_v4l2codec(uint32_t v4l2_fmt)
309 {
310  int i;
311 
312  for (i = 0; i < FF_ARRAY_ELEMS(fmt_conversion_table); i++) {
313  if (fmt_conversion_table[i].v4l2_fmt == v4l2_fmt) {
314  return fmt_conversion_table[i].codec_id;
315  }
316  }
317 
318  return AV_CODEC_ID_NONE;
319 }
320 
321 #if HAVE_STRUCT_V4L2_FRMIVALENUM_DISCRETE
322 static void list_framesizes(AVFormatContext *ctx, int fd, uint32_t pixelformat)
323 {
324  struct v4l2_frmsizeenum vfse = { .pixel_format = pixelformat };
325 
326  while(!v4l2_ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &vfse)) {
327  switch (vfse.type) {
328  case V4L2_FRMSIZE_TYPE_DISCRETE:
329  av_log(ctx, AV_LOG_INFO, " %ux%u",
330  vfse.discrete.width, vfse.discrete.height);
331  break;
332  case V4L2_FRMSIZE_TYPE_CONTINUOUS:
333  case V4L2_FRMSIZE_TYPE_STEPWISE:
334  av_log(ctx, AV_LOG_INFO, " {%u-%u, %u}x{%u-%u, %u}",
335  vfse.stepwise.min_width,
336  vfse.stepwise.max_width,
337  vfse.stepwise.step_width,
338  vfse.stepwise.min_height,
339  vfse.stepwise.max_height,
340  vfse.stepwise.step_height);
341  }
342  vfse.index++;
343  }
344 }
345 #endif
346 
347 static void list_formats(AVFormatContext *ctx, int fd, int type)
348 {
349  struct v4l2_fmtdesc vfd = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE };
350 
351  while(!v4l2_ioctl(fd, VIDIOC_ENUM_FMT, &vfd)) {
352  enum AVCodecID codec_id = fmt_v4l2codec(vfd.pixelformat);
353  enum AVPixelFormat pix_fmt = fmt_v4l2ff(vfd.pixelformat, codec_id);
354 
355  vfd.index++;
356 
357  if (!(vfd.flags & V4L2_FMT_FLAG_COMPRESSED) &&
358  type & V4L_RAWFORMATS) {
359  const char *fmt_name = av_get_pix_fmt_name(pix_fmt);
360  av_log(ctx, AV_LOG_INFO, "Raw : %9s : %20s :",
361  fmt_name ? fmt_name : "Unsupported",
362  vfd.description);
363  } else if (vfd.flags & V4L2_FMT_FLAG_COMPRESSED &&
364  type & V4L_COMPFORMATS) {
365  AVCodec *codec = avcodec_find_decoder(codec_id);
366  av_log(ctx, AV_LOG_INFO, "Compressed: %9s : %20s :",
367  codec ? codec->name : "Unsupported",
368  vfd.description);
369  } else {
370  continue;
371  }
372 
373 #ifdef V4L2_FMT_FLAG_EMULATED
374  if (vfd.flags & V4L2_FMT_FLAG_EMULATED)
375  av_log(ctx, AV_LOG_INFO, " Emulated :");
376 #endif
377 #if HAVE_STRUCT_V4L2_FRMIVALENUM_DISCRETE
378  list_framesizes(ctx, fd, vfd.pixelformat);
379 #endif
380  av_log(ctx, AV_LOG_INFO, "\n");
381  }
382 }
383 
385 {
386  int ret;
387  struct video_data *s = ctx->priv_data;
388  struct v4l2_standard standard;
389 
390  if (s->std_id == 0)
391  return;
392 
393  for (standard.index = 0; ; standard.index++) {
394  if (v4l2_ioctl(s->fd, VIDIOC_ENUMSTD, &standard) < 0) {
395  ret = AVERROR(errno);
396  if (ret == AVERROR(EINVAL)) {
397  break;
398  } else {
399  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_ENUMSTD): %s\n", av_err2str(ret));
400  return;
401  }
402  }
403  av_log(ctx, AV_LOG_INFO, "%2d, %16llx, %s\n",
404  standard.index, standard.id, standard.name);
405  }
406 }
407 
408 static int mmap_init(AVFormatContext *ctx)
409 {
410  int i, res;
411  struct video_data *s = ctx->priv_data;
412  struct v4l2_requestbuffers req = {
413  .type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
414  .count = desired_video_buffers,
415  .memory = V4L2_MEMORY_MMAP
416  };
417 
418  if (v4l2_ioctl(s->fd, VIDIOC_REQBUFS, &req) < 0) {
419  res = AVERROR(errno);
420  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_REQBUFS): %s\n", av_err2str(res));
421  return res;
422  }
423 
424  if (req.count < 2) {
425  av_log(ctx, AV_LOG_ERROR, "Insufficient buffer memory\n");
426  return AVERROR(ENOMEM);
427  }
428  s->buffers = req.count;
429  s->buf_start = av_malloc(sizeof(void *) * s->buffers);
430  if (s->buf_start == NULL) {
431  av_log(ctx, AV_LOG_ERROR, "Cannot allocate buffer pointers\n");
432  return AVERROR(ENOMEM);
433  }
434  s->buf_len = av_malloc(sizeof(unsigned int) * s->buffers);
435  if (s->buf_len == NULL) {
436  av_log(ctx, AV_LOG_ERROR, "Cannot allocate buffer sizes\n");
437  av_free(s->buf_start);
438  return AVERROR(ENOMEM);
439  }
440 
441  for (i = 0; i < req.count; i++) {
442  struct v4l2_buffer buf = {
443  .type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
444  .index = i,
445  .memory = V4L2_MEMORY_MMAP
446  };
447  if (v4l2_ioctl(s->fd, VIDIOC_QUERYBUF, &buf) < 0) {
448  res = AVERROR(errno);
449  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYBUF): %s\n", av_err2str(res));
450  return res;
451  }
452 
453  s->buf_len[i] = buf.length;
454  if (s->frame_size > 0 && s->buf_len[i] < s->frame_size) {
455  av_log(ctx, AV_LOG_ERROR,
456  "buf_len[%d] = %d < expected frame size %d\n",
457  i, s->buf_len[i], s->frame_size);
458  return AVERROR(ENOMEM);
459  }
460  s->buf_start[i] = v4l2_mmap(NULL, buf.length,
461  PROT_READ | PROT_WRITE, MAP_SHARED,
462  s->fd, buf.m.offset);
463 
464  if (s->buf_start[i] == MAP_FAILED) {
465  res = AVERROR(errno);
466  av_log(ctx, AV_LOG_ERROR, "mmap: %s\n", av_err2str(res));
467  return res;
468  }
469  }
470 
471  return 0;
472 }
473 
474 #if FF_API_DESTRUCT_PACKET
475 static void dummy_release_buffer(AVPacket *pkt)
476 {
477  av_assert0(0);
478 }
479 #endif
480 
481 static void mmap_release_buffer(void *opaque, uint8_t *data)
482 {
483  struct v4l2_buffer buf = { 0 };
484  int res;
485  struct buff_data *buf_descriptor = opaque;
486  struct video_data *s = buf_descriptor->s;
487 
488  buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
489  buf.memory = V4L2_MEMORY_MMAP;
490  buf.index = buf_descriptor->index;
491  av_free(buf_descriptor);
492 
493  if (v4l2_ioctl(s->fd, VIDIOC_QBUF, &buf) < 0) {
494  res = AVERROR(errno);
495  av_log(NULL, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF): %s\n",
496  av_err2str(res));
497  }
498 
500 }
501 
502 #if HAVE_CLOCK_GETTIME && defined(CLOCK_MONOTONIC)
503 static int64_t av_gettime_monotonic(void)
504 {
505  struct timespec tv;
506 
507  clock_gettime(CLOCK_MONOTONIC, &tv);
508  return (int64_t)tv.tv_sec * 1000000 + tv.tv_nsec / 1000;
509 }
510 #endif
511 
512 static int init_convert_timestamp(AVFormatContext *ctx, int64_t ts)
513 {
514  struct video_data *s = ctx->priv_data;
515  int64_t now;
516 
517  now = av_gettime();
518  if (s->ts_mode == V4L_TS_ABS &&
519  ts <= now + 1 * AV_TIME_BASE && ts >= now - 10 * AV_TIME_BASE) {
520  av_log(ctx, AV_LOG_INFO, "Detected absolute timestamps\n");
522  return 0;
523  }
524 #if HAVE_CLOCK_GETTIME && defined(CLOCK_MONOTONIC)
525  now = av_gettime_monotonic();
526  if (s->ts_mode == V4L_TS_MONO2ABS ||
527  (ts <= now + 1 * AV_TIME_BASE && ts >= now - 10 * AV_TIME_BASE)) {
528  AVRational tb = {AV_TIME_BASE, 1};
529  int64_t period = av_rescale_q(1, tb, ctx->streams[0]->avg_frame_rate);
530  av_log(ctx, AV_LOG_INFO, "Detected monotonic timestamps, converting\n");
531  /* microseconds instead of seconds, MHz instead of Hz */
532  s->timefilter = ff_timefilter_new(1, period, 1.0E-6);
534  return 0;
535  }
536 #endif
537  av_log(ctx, AV_LOG_ERROR, "Unknown timestamps\n");
538  return AVERROR(EIO);
539 }
540 
541 static int convert_timestamp(AVFormatContext *ctx, int64_t *ts)
542 {
543  struct video_data *s = ctx->priv_data;
544 
545  if (s->ts_mode) {
546  int r = init_convert_timestamp(ctx, *ts);
547  if (r < 0)
548  return r;
549  }
550 #if HAVE_CLOCK_GETTIME && defined(CLOCK_MONOTONIC)
551  if (s->timefilter) {
552  int64_t nowa = av_gettime();
553  int64_t nowm = av_gettime_monotonic();
554  ff_timefilter_update(s->timefilter, nowa, nowm - s->last_time_m);
555  s->last_time_m = nowm;
556  *ts = ff_timefilter_eval(s->timefilter, *ts - nowm);
557  }
558 #endif
559  return 0;
560 }
561 
563 {
564  struct video_data *s = ctx->priv_data;
565  struct v4l2_buffer buf = {
566  .type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
567  .memory = V4L2_MEMORY_MMAP
568  };
569  int res;
570 
571  /* FIXME: Some special treatment might be needed in case of loss of signal... */
572  while ((res = v4l2_ioctl(s->fd, VIDIOC_DQBUF, &buf)) < 0 && (errno == EINTR));
573  if (res < 0) {
574  if (errno == EAGAIN) {
575  pkt->size = 0;
576  return AVERROR(EAGAIN);
577  }
578  res = AVERROR(errno);
579  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_DQBUF): %s\n", av_err2str(res));
580  return res;
581  }
582 
583  if (buf.index >= s->buffers) {
584  av_log(ctx, AV_LOG_ERROR, "Invalid buffer index received.\n");
585  return AVERROR(EINVAL);
586  }
588  // always keep at least one buffer queued
590 
591  /* CPIA is a compressed format and we don't know the exact number of bytes
592  * used by a frame, so set it here as the driver announces it.
593  */
594  if (ctx->video_codec_id == AV_CODEC_ID_CPIA)
595  s->frame_size = buf.bytesused;
596 
597  if (s->frame_size > 0 && buf.bytesused != s->frame_size) {
598  av_log(ctx, AV_LOG_ERROR,
599  "The v4l2 frame is %d bytes, but %d bytes are expected\n",
600  buf.bytesused, s->frame_size);
601  return AVERROR_INVALIDDATA;
602  }
603 
604  /* Image is at s->buff_start[buf.index] */
605  if (avpriv_atomic_int_get(&s->buffers_queued) == FFMAX(s->buffers / 8, 1)) {
606  /* when we start getting low on queued buffers, fallback to copying data */
607  res = av_new_packet(pkt, buf.bytesused);
608  if (res < 0) {
609  av_log(ctx, AV_LOG_ERROR, "Error allocating a packet.\n");
610  if (v4l2_ioctl(s->fd, VIDIOC_QBUF, &buf) == 0)
612  return res;
613  }
614  memcpy(pkt->data, s->buf_start[buf.index], buf.bytesused);
615 
616  if (v4l2_ioctl(s->fd, VIDIOC_QBUF, &buf) < 0) {
617  res = AVERROR(errno);
618  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF): %s\n", av_err2str(res));
619  av_free_packet(pkt);
620  return res;
621  }
623  } else {
624  struct buff_data *buf_descriptor;
625 
626  pkt->data = s->buf_start[buf.index];
627  pkt->size = buf.bytesused;
628 #if FF_API_DESTRUCT_PACKET
629  pkt->destruct = dummy_release_buffer;
630 #endif
631 
632  buf_descriptor = av_malloc(sizeof(struct buff_data));
633  if (buf_descriptor == NULL) {
634  /* Something went wrong... Since av_malloc() failed, we cannot even
635  * allocate a buffer for memcpying into it
636  */
637  av_log(ctx, AV_LOG_ERROR, "Failed to allocate a buffer descriptor\n");
638  if (v4l2_ioctl(s->fd, VIDIOC_QBUF, &buf) == 0)
640 
641  return AVERROR(ENOMEM);
642  }
643  buf_descriptor->index = buf.index;
644  buf_descriptor->s = s;
645 
646  pkt->buf = av_buffer_create(pkt->data, pkt->size, mmap_release_buffer,
647  buf_descriptor, 0);
648  if (!pkt->buf) {
649  av_log(ctx, AV_LOG_ERROR, "Failed to create a buffer\n");
650  if (v4l2_ioctl(s->fd, VIDIOC_QBUF, &buf) == 0)
652  av_freep(&buf_descriptor);
653  return AVERROR(ENOMEM);
654  }
655  }
656  pkt->pts = buf.timestamp.tv_sec * INT64_C(1000000) + buf.timestamp.tv_usec;
657  convert_timestamp(ctx, &pkt->pts);
658 
659  return s->buf_len[buf.index];
660 }
661 
662 static int mmap_start(AVFormatContext *ctx)
663 {
664  struct video_data *s = ctx->priv_data;
665  enum v4l2_buf_type type;
666  int i, res;
667 
668  for (i = 0; i < s->buffers; i++) {
669  struct v4l2_buffer buf = {
670  .type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
671  .index = i,
672  .memory = V4L2_MEMORY_MMAP
673  };
674 
675  if (v4l2_ioctl(s->fd, VIDIOC_QBUF, &buf) < 0) {
676  res = AVERROR(errno);
677  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF): %s\n", av_err2str(res));
678  return res;
679  }
680  }
681  s->buffers_queued = s->buffers;
682 
683  type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
684  if (v4l2_ioctl(s->fd, VIDIOC_STREAMON, &type) < 0) {
685  res = AVERROR(errno);
686  av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_STREAMON): %s\n", av_err2str(res));
687  return res;
688  }
689 
690  return 0;
691 }
692 
693 static void mmap_close(struct video_data *s)
694 {
695  enum v4l2_buf_type type;
696  int i;
697 
698  type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
699  /* We do not check for the result, because we could
700  * not do anything about it anyway...
701  */
702  v4l2_ioctl(s->fd, VIDIOC_STREAMOFF, &type);
703  for (i = 0; i < s->buffers; i++) {
704  v4l2_munmap(s->buf_start[i], s->buf_len[i]);
705  }
706  av_free(s->buf_start);
707  av_free(s->buf_len);
708 }
709 
711 {
712  struct video_data *s = s1->priv_data;
713  struct v4l2_standard standard = { 0 };
714  struct v4l2_streamparm streamparm = { 0 };
715  struct v4l2_fract *tpf;
716  AVRational framerate_q = { 0 };
717  int i, ret;
718 
719  if (s->framerate &&
720  (ret = av_parse_video_rate(&framerate_q, s->framerate)) < 0) {
721  av_log(s1, AV_LOG_ERROR, "Could not parse framerate '%s'.\n",
722  s->framerate);
723  return ret;
724  }
725 
726  if (s->standard) {
727  if (s->std_id) {
728  ret = 0;
729  av_log(s1, AV_LOG_DEBUG, "Setting standard: %s\n", s->standard);
730  /* set tv standard */
731  for (i = 0; ; i++) {
732  standard.index = i;
733  if (v4l2_ioctl(s->fd, VIDIOC_ENUMSTD, &standard) < 0) {
734  ret = AVERROR(errno);
735  break;
736  }
737  if (!av_strcasecmp(standard.name, s->standard))
738  break;
739  }
740  if (ret < 0) {
741  av_log(s1, AV_LOG_ERROR, "Unknown or unsupported standard '%s'\n", s->standard);
742  return ret;
743  }
744 
745  if (v4l2_ioctl(s->fd, VIDIOC_S_STD, &standard.id) < 0) {
746  ret = AVERROR(errno);
747  av_log(s1, AV_LOG_ERROR, "ioctl(VIDIOC_S_STD): %s\n", av_err2str(ret));
748  return ret;
749  }
750  } else {
752  "This device does not support any standard\n");
753  }
754  }
755 
756  /* get standard */
757  if (v4l2_ioctl(s->fd, VIDIOC_G_STD, &s->std_id) == 0) {
758  tpf = &standard.frameperiod;
759  for (i = 0; ; i++) {
760  standard.index = i;
761  if (v4l2_ioctl(s->fd, VIDIOC_ENUMSTD, &standard) < 0) {
762  ret = AVERROR(errno);
763  av_log(s1, AV_LOG_ERROR, "ioctl(VIDIOC_ENUMSTD): %s\n", av_err2str(ret));
764  return ret;
765  }
766  if (standard.id == s->std_id) {
767  av_log(s1, AV_LOG_DEBUG,
768  "Current standard: %s, id: %"PRIu64", frameperiod: %d/%d\n",
769  standard.name, (uint64_t)standard.id, tpf->numerator, tpf->denominator);
770  break;
771  }
772  }
773  } else {
774  tpf = &streamparm.parm.capture.timeperframe;
775  }
776 
777  streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
778  if (v4l2_ioctl(s->fd, VIDIOC_G_PARM, &streamparm) < 0) {
779  ret = AVERROR(errno);
780  av_log(s1, AV_LOG_ERROR, "ioctl(VIDIOC_G_PARM): %s\n", av_err2str(ret));
781  return ret;
782  }
783 
784  if (framerate_q.num && framerate_q.den) {
785  if (streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) {
786  tpf = &streamparm.parm.capture.timeperframe;
787 
788  av_log(s1, AV_LOG_DEBUG, "Setting time per frame to %d/%d\n",
789  framerate_q.den, framerate_q.num);
790  tpf->numerator = framerate_q.den;
791  tpf->denominator = framerate_q.num;
792 
793  if (v4l2_ioctl(s->fd, VIDIOC_S_PARM, &streamparm) < 0) {
794  ret = AVERROR(errno);
795  av_log(s1, AV_LOG_ERROR, "ioctl(VIDIOC_S_PARM): %s\n", av_err2str(ret));
796  return ret;
797  }
798 
799  if (framerate_q.num != tpf->denominator ||
800  framerate_q.den != tpf->numerator) {
801  av_log(s1, AV_LOG_INFO,
802  "The driver changed the time per frame from "
803  "%d/%d to %d/%d\n",
804  framerate_q.den, framerate_q.num,
805  tpf->numerator, tpf->denominator);
806  }
807  } else {
809  "The driver does not allow to change time per frame\n");
810  }
811  }
812  s1->streams[0]->avg_frame_rate.num = tpf->denominator;
813  s1->streams[0]->avg_frame_rate.den = tpf->numerator;
814  s1->streams[0]->r_frame_rate = s1->streams[0]->avg_frame_rate;
815 
816  return 0;
817 }
818 
820  enum AVPixelFormat pix_fmt,
821  int *width,
822  int *height,
823  uint32_t *desired_format,
824  enum AVCodecID *codec_id)
825 {
826  int ret, i;
827 
828  *desired_format = fmt_ff2v4l(pix_fmt, s1->video_codec_id);
829 
830  if (*desired_format) {
831  ret = device_init(s1, width, height, *desired_format);
832  if (ret < 0) {
833  *desired_format = 0;
834  if (ret != AVERROR(EINVAL))
835  return ret;
836  }
837  }
838 
839  if (!*desired_format) {
840  for (i = 0; i<FF_ARRAY_ELEMS(fmt_conversion_table); i++) {
841  if (s1->video_codec_id == AV_CODEC_ID_NONE ||
842  fmt_conversion_table[i].codec_id == s1->video_codec_id) {
843  av_log(s1, AV_LOG_DEBUG, "Trying to set codec:%s pix_fmt:%s\n",
844  avcodec_get_name(fmt_conversion_table[i].codec_id),
845  (char *)av_x_if_null(av_get_pix_fmt_name(fmt_conversion_table[i].ff_fmt), "none"));
846 
847  *desired_format = fmt_conversion_table[i].v4l2_fmt;
848  ret = device_init(s1, width, height, *desired_format);
849  if (ret >= 0)
850  break;
851  else if (ret != AVERROR(EINVAL))
852  return ret;
853  *desired_format = 0;
854  }
855  }
856 
857  if (*desired_format == 0) {
858  av_log(s1, AV_LOG_ERROR, "Cannot find a proper format for "
859  "codec '%s' (id %d), pixel format '%s' (id %d)\n",
861  (char *)av_x_if_null(av_get_pix_fmt_name(pix_fmt), "none"), pix_fmt);
862  ret = AVERROR(EINVAL);
863  }
864  }
865 
866  *codec_id = fmt_v4l2codec(*desired_format);
867  av_assert0(*codec_id != AV_CODEC_ID_NONE);
868  return ret;
869 }
870 
872 {
873  struct video_data *s = s1->priv_data;
874  AVStream *st;
875  int res = 0;
876  uint32_t desired_format;
879  struct v4l2_input input = { 0 };
880 
881  st = avformat_new_stream(s1, NULL);
882  if (!st)
883  return AVERROR(ENOMEM);
884 
885 #if CONFIG_LIBV4L2
886  /* silence libv4l2 logging. if fopen() fails v4l2_log_file will be NULL
887  and errors will get sent to stderr */
888  v4l2_log_file = fopen("/dev/null", "w");
889 #endif
890 
891  s->fd = device_open(s1);
892  if (s->fd < 0)
893  return s->fd;
894 
895  if (s->channel != -1) {
896  /* set video input */
897  av_log(s1, AV_LOG_DEBUG, "Selecting input_channel: %d\n", s->channel);
898  if (v4l2_ioctl(s->fd, VIDIOC_S_INPUT, &s->channel) < 0) {
899  res = AVERROR(errno);
900  av_log(s1, AV_LOG_ERROR, "ioctl(VIDIOC_S_INPUT): %s\n", av_err2str(res));
901  return res;
902  }
903  } else {
904  /* get current video input */
905  if (v4l2_ioctl(s->fd, VIDIOC_G_INPUT, &s->channel) < 0) {
906  res = AVERROR(errno);
907  av_log(s1, AV_LOG_ERROR, "ioctl(VIDIOC_G_INPUT): %s\n", av_err2str(res));
908  return res;
909  }
910  }
911 
912  /* enum input */
913  input.index = s->channel;
914  if (v4l2_ioctl(s->fd, VIDIOC_ENUMINPUT, &input) < 0) {
915  res = AVERROR(errno);
916  av_log(s1, AV_LOG_ERROR, "ioctl(VIDIOC_ENUMINPUT): %s\n", av_err2str(res));
917  return res;
918  }
919  s->std_id = input.std;
920  av_log(s1, AV_LOG_DEBUG, "Current input_channel: %d, input_name: %s\n",
921  s->channel, input.name);
922 
923  if (s->list_format) {
924  list_formats(s1, s->fd, s->list_format);
925  return AVERROR_EXIT;
926  }
927 
928  if (s->list_standard) {
929  list_standards(s1);
930  return AVERROR_EXIT;
931  }
932 
933  avpriv_set_pts_info(st, 64, 1, 1000000); /* 64 bits pts in us */
934 
935  if (s->pixel_format) {
937 
938  if (codec)
939  s1->video_codec_id = codec->id;
940 
941  pix_fmt = av_get_pix_fmt(s->pixel_format);
942 
943  if (pix_fmt == AV_PIX_FMT_NONE && !codec) {
944  av_log(s1, AV_LOG_ERROR, "No such input format: %s.\n",
945  s->pixel_format);
946 
947  return AVERROR(EINVAL);
948  }
949  }
950 
951  if (!s->width && !s->height) {
952  struct v4l2_format fmt = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE };
953 
955  "Querying the device for the current frame size\n");
956  if (v4l2_ioctl(s->fd, VIDIOC_G_FMT, &fmt) < 0) {
957  res = AVERROR(errno);
958  av_log(s1, AV_LOG_ERROR, "ioctl(VIDIOC_G_FMT): %s\n", av_err2str(res));
959  return res;
960  }
961 
962  s->width = fmt.fmt.pix.width;
963  s->height = fmt.fmt.pix.height;
965  "Setting frame size to %dx%d\n", s->width, s->height);
966  }
967 
968  res = device_try_init(s1, pix_fmt, &s->width, &s->height, &desired_format, &codec_id);
969  if (res < 0) {
970  v4l2_close(s->fd);
971  return res;
972  }
973 
974  /* If no pixel_format was specified, the codec_id was not known up
975  * until now. Set video_codec_id in the context, as codec_id will
976  * not be available outside this function
977  */
978  if (codec_id != AV_CODEC_ID_NONE && s1->video_codec_id == AV_CODEC_ID_NONE)
979  s1->video_codec_id = codec_id;
980 
981  if ((res = av_image_check_size(s->width, s->height, 0, s1)) < 0)
982  return res;
983 
984  s->frame_format = desired_format;
985 
986  if ((res = v4l2_set_parameters(s1)) < 0)
987  return res;
988 
989  st->codec->pix_fmt = fmt_v4l2ff(desired_format, codec_id);
990  s->frame_size =
992 
993  if ((res = mmap_init(s1)) ||
994  (res = mmap_start(s1)) < 0) {
995  v4l2_close(s->fd);
996  return res;
997  }
998 
999  s->top_field_first = first_field(s->fd);
1000 
1002  st->codec->codec_id = codec_id;
1003  if (codec_id == AV_CODEC_ID_RAWVIDEO)
1004  st->codec->codec_tag =
1006  if (desired_format == V4L2_PIX_FMT_YVU420)
1007  st->codec->codec_tag = MKTAG('Y', 'V', '1', '2');
1008  else if (desired_format == V4L2_PIX_FMT_YVU410)
1009  st->codec->codec_tag = MKTAG('Y', 'V', 'U', '9');
1010  st->codec->width = s->width;
1011  st->codec->height = s->height;
1012  st->codec->bit_rate = s->frame_size * av_q2d(st->avg_frame_rate) * 8;
1013 
1014  return 0;
1015 }
1016 
1018 {
1019  struct video_data *s = s1->priv_data;
1020  AVFrame *frame = s1->streams[0]->codec->coded_frame;
1021  int res;
1022 
1023  av_init_packet(pkt);
1024  if ((res = mmap_read_frame(s1, pkt)) < 0) {
1025  return res;
1026  }
1027 
1028  if (frame && s->interlaced) {
1029  frame->interlaced_frame = 1;
1030  frame->top_field_first = s->top_field_first;
1031  }
1032 
1033  return pkt->size;
1034 }
1035 
1037 {
1038  struct video_data *s = s1->priv_data;
1039 
1041  av_log(s1, AV_LOG_WARNING, "Some buffers are still owned by the caller on "
1042  "close.\n");
1043 
1044  mmap_close(s);
1045 
1046  v4l2_close(s->fd);
1047  return 0;
1048 }
1049 
1050 #define OFFSET(x) offsetof(struct video_data, x)
1051 #define DEC AV_OPT_FLAG_DECODING_PARAM
1052 
1053 static const AVOption options[] = {
1054  { "standard", "set TV standard, used only by analog frame grabber", OFFSET(standard), AV_OPT_TYPE_STRING, {.str = NULL }, 0, 0, DEC },
1055  { "channel", "set TV channel, used only by frame grabber", OFFSET(channel), AV_OPT_TYPE_INT, {.i64 = -1 }, -1, INT_MAX, DEC },
1056  { "video_size", "set frame size", OFFSET(width), AV_OPT_TYPE_IMAGE_SIZE, {.str = NULL}, 0, 0, DEC },
1057  { "pixel_format", "set preferred pixel format", OFFSET(pixel_format), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC },
1058  { "input_format", "set preferred pixel format (for raw video) or codec name", OFFSET(pixel_format), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC },
1059  { "framerate", "set frame rate", OFFSET(framerate), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, DEC },
1060 
1061  { "list_formats", "list available formats and exit", OFFSET(list_format), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, INT_MAX, DEC, "list_formats" },
1062  { "all", "show all available formats", OFFSET(list_format), AV_OPT_TYPE_CONST, {.i64 = V4L_ALLFORMATS }, 0, INT_MAX, DEC, "list_formats" },
1063  { "raw", "show only non-compressed formats", OFFSET(list_format), AV_OPT_TYPE_CONST, {.i64 = V4L_RAWFORMATS }, 0, INT_MAX, DEC, "list_formats" },
1064  { "compressed", "show only compressed formats", OFFSET(list_format), AV_OPT_TYPE_CONST, {.i64 = V4L_COMPFORMATS }, 0, INT_MAX, DEC, "list_formats" },
1065 
1066  { "list_standards", "list supported standards and exit", OFFSET(list_standard), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, 1, DEC, "list_standards" },
1067  { "all", "show all supported standards", OFFSET(list_standard), AV_OPT_TYPE_CONST, {.i64 = 1 }, 0, 0, DEC, "list_standards" },
1068 
1069  { "timestamps", "set type of timestamps for grabbed frames", OFFSET(ts_mode), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, 2, DEC, "timestamps" },
1070  { "ts", "set type of timestamps for grabbed frames", OFFSET(ts_mode), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, 2, DEC, "timestamps" },
1071  { "default", "use timestamps from the kernel", OFFSET(ts_mode), AV_OPT_TYPE_CONST, {.i64 = V4L_TS_DEFAULT }, 0, 2, DEC, "timestamps" },
1072  { "abs", "use absolute timestamps (wall clock)", OFFSET(ts_mode), AV_OPT_TYPE_CONST, {.i64 = V4L_TS_ABS }, 0, 2, DEC, "timestamps" },
1073  { "mono2abs", "force conversion from monotonic to absolute timestamps", OFFSET(ts_mode), AV_OPT_TYPE_CONST, {.i64 = V4L_TS_MONO2ABS }, 0, 2, DEC, "timestamps" },
1074 
1075  { NULL },
1076 };
1077 
1078 static const AVClass v4l2_class = {
1079  .class_name = "V4L2 indev",
1080  .item_name = av_default_item_name,
1081  .option = options,
1082  .version = LIBAVUTIL_VERSION_INT,
1083 };
1084 
1086  .name = "video4linux2,v4l2",
1087  .long_name = NULL_IF_CONFIG_SMALL("Video4Linux2 device grab"),
1088  .priv_data_size = sizeof(struct video_data),
1089  .read_header = v4l2_read_header,
1090  .read_packet = v4l2_read_packet,
1091  .read_close = v4l2_read_close,
1092  .flags = AVFMT_NOFILE,
1093  .priv_class = &v4l2_class,
1094 };
packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
Definition: pixfmt.h:85
#define avpriv_atomic_int_add_and_fetch
Definition: atomic_gcc.h:42
const char * s
Definition: avisynth_c.h:668
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:59
#define v4l2_mmap
Definition: v4l2.c:69
void av_free_packet(AVPacket *pkt)
Free a packet.
Definition: avpacket.c:242
This structure describes decoded (raw) audio or video data.
Definition: frame.h:76
int frame_size
Definition: v4l2.c:106
int av_parse_video_rate(AVRational *rate, const char *arg)
Parse str and store the detected values in *rate.
Definition: parseutils.c:162
AVOption.
Definition: opt.h:251
av_default_item_name
misc image utilities
packed RGB 8:8:8, 24bpp, RGBRGB...
Definition: pixfmt.h:70
void avpriv_set_pts_info(AVStream *s, int pts_wrap_bits, unsigned int pts_num, unsigned int pts_den)
Set the time base and wrapping info for a given stream.
AVFrame * coded_frame
the picture in the bitstream
#define v4l2_open
Definition: v4l2.c:64
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:154
char * pixel_format
Set by a private option.
Definition: v4l2.c:120
int num
numerator
Definition: rational.h:44
packed RGB 5:5:5, 16bpp, (msb)1A 5R 5G 5B(lsb), little-endian, most significant bit to 0 ...
Definition: pixfmt.h:117
enum AVCodecID codec_id
Definition: v4l2.c:133
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
#define FF_ARRAY_ELEMS(a)
double ff_timefilter_eval(TimeFilter *self, double delta)
Evaluate the filter at a specified time.
Definition: timefilter.c:84
unsigned int * buf_len
Definition: v4l2.c:116
void av_freep(void *arg)
Free a memory block which has been allocated with av_malloc(z)() or av_realloc() and set the pointer ...
Definition: mem.c:198
packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), little-endian
Definition: pixfmt.h:115
Format I/O context.
Definition: avformat.h:944
int channel
Definition: v4l2.c:119
volatile int buffers_queued
Definition: v4l2.c:114
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:55
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
#define AVFMT_FLAG_NONBLOCK
Do not block when reading packets from input.
Definition: avformat.h:1024
v4l2_std_id std_id
Definition: v4l2.c:118
#define V4L_TS_ABS
Autodetect the kind of timestamps returned by the kernel and convert to absolute (wall clock) timesta...
Definition: v4l2.c:87
uint8_t
static int mmap_read_frame(AVFormatContext *ctx, AVPacket *pkt)
Definition: v4l2.c:562
AVOptions.
static int init_convert_timestamp(AVFormatContext *ctx, int64_t ts)
Definition: v4l2.c:512
unsigned int avcodec_pix_fmt_to_codec_tag(enum AVPixelFormat pix_fmt)
Return a value representing the fourCC code associated to the pixel format pix_fmt, or 0 if no associated fourCC code can be found.
Definition: raw.c:199
static AVPacket pkt
Definition: demuxing.c:56
#define v4l2_close
Definition: v4l2.c:65
static int mmap_start(AVFormatContext *ctx)
Definition: v4l2.c:662
AVStream * avformat_new_stream(AVFormatContext *s, const AVCodec *c)
Add a new stream to a media file.
AVStream ** streams
Definition: avformat.h:992
packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), big-endian
Definition: pixfmt.h:114
static void list_formats(AVFormatContext *ctx, int fd, int type)
Definition: v4l2.c:347
char * standard
Definition: v4l2.c:117
static const int desired_video_buffers
Definition: v4l2.c:73
static double av_q2d(AVRational a)
Convert rational to double.
Definition: rational.h:69
uint8_t * data
enum AVPixelFormat pix_fmt
Definition: v4l.c:63
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:270
#define V4L_TS_DEFAULT
Return timestamps to the user exactly as returned by the kernel.
Definition: v4l2.c:82
int frame_format
Definition: v4l2.c:104
static int device_open(AVFormatContext *ctx)
Definition: v4l2.c:170
enum AVCodecID video_codec_id
Forced video codec_id.
Definition: avformat.h:1057
#define V4L_TS_CONVERT_READY
Once the kind of timestamps returned by the kernel have been detected, the value of the timefilter (N...
Definition: v4l2.c:99
frame
Definition: stft.m:14
static void * av_x_if_null(const void *p, const void *x)
Return x default pointer in case p is NULL.
Definition: avutil.h:250
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:130
Main libavdevice API header.
int height
Definition: v4l2.c:105
int av_new_packet(AVPacket *pkt, int size)
Allocate the payload of a packet and initialize its fields with default values.
Definition: avpacket.c:73
int interlaced
Definition: v4l2.c:107
enum AVCodecID id
AVCodecID
Identify the syntax and semantics of the bitstream.
void av_free(void *ptr)
Free a memory block which has been allocated with av_malloc(z)() or av_realloc(). ...
Definition: mem.c:183
static int v4l2_read_packet(AVFormatContext *s1, AVPacket *pkt)
Definition: v4l2.c:1017
#define avpriv_atomic_int_get
Definition: atomic_gcc.h:28
#define v4l2_munmap
Definition: v4l2.c:70
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
static const AVOption options[]
Definition: v4l2.c:1053
Spectrum Plot time data
const char * r
Definition: vf_curves.c:94
AVBufferRef * buf
A reference to the reference-counted buffer where the packet data is stored.
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:93
void ** buf_start
Definition: v4l2.c:115
simple assert() macros that are a bit more flexible than ISO C assert().
void av_log(void *avcl, int level, const char *fmt,...)
Definition: log.c:246
const char * name
Name of the codec implementation.
AVBufferRef * av_buffer_create(uint8_t *data, int size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
enum AVCodecID codec_id
Definition: mov_chan.c:433
#define OFFSET(x)
Definition: v4l2.c:1050
AVRational avg_frame_rate
Average framerate.
Definition: avformat.h:716
static uint32_t fmt_ff2v4l(enum AVPixelFormat pix_fmt, enum AVCodecID codec_id)
Definition: v4l2.c:278
#define FFMAX(a, b)
Definition: common.h:56
#define V4L_TS_MONO2ABS
Assume kernel timestamps are from the monotonic clock and convert to absolute timestamps.
Definition: v4l2.c:92
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:72
AVCodecContext * codec
Codec context associated with this stream.
Definition: avformat.h:662
TimeFilter * timefilter
Definition: v4l2.c:110
#define AV_LOG_VERBOSE
Definition: log.h:157
int av_image_check_size(unsigned int w, unsigned int h, int log_offset, void *log_ctx)
Check if the given dimension of an image is valid, meaning that all bytes of the image can be address...
Definition: imgutils.c:231
int bit_rate
the average bitrate
char filename[1024]
input or output filename
Definition: avformat.h:994
#define AV_TIME_BASE
Internal time base represented as integer.
Definition: avutil.h:196
static void list_standards(AVFormatContext *ctx)
Definition: v4l2.c:384
int av_strcasecmp(const char *a, const char *b)
Locale-independent case-insensitive compare.
Definition: avstring.c:212
ret
Definition: avfilter.c:821
int width
picture width / height.
static int convert_timestamp(AVFormatContext *ctx, int64_t *ts)
Definition: v4l2.c:541
static void mmap_close(struct video_data *s)
Definition: v4l2.c:693
#define av_err2str(errnum)
Convenience macro, the return value should be used only directly in function arguments but never stan...
Definition: error.h:110
int ts_mode
Definition: v4l2.c:109
static int device_init(AVFormatContext *ctx, int *width, int *height, uint32_t pix_fmt)
Definition: v4l2.c:219
static int device_try_init(AVFormatContext *s1, enum AVPixelFormat pix_fmt, int *width, int *height, uint32_t *desired_format, enum AVCodecID *codec_id)
Definition: v4l2.c:819
packed RGB 8:8:8, 24bpp, BGRBGR...
Definition: pixfmt.h:71
#define E
#define AVERROR_EXIT
Immediate exit was requested; the called function should not be restarted.
Definition: error.h:56
int list_format
Set by a private option.
Definition: v4l2.c:121
#define v4l2_ioctl
Definition: v4l2.c:67
LIBAVUTIL_VERSION_INT
Definition: eval.c:55
const char * avcodec_get_name(enum AVCodecID id)
Get the name of a codec.
static enum AVPixelFormat fmt_v4l2ff(uint32_t v4l2_fmt, enum AVCodecID codec_id)
Definition: v4l2.c:294
int64_t av_gettime(void)
Get the current time in microseconds.
Definition: time.c:39
Stream structure.
Definition: avformat.h:643
NULL
Definition: eval.c:55
TimeFilter * ff_timefilter_new(double time_base, double period, double bandwidth)
Create a new Delay Locked Loop time filter.
Definition: timefilter.c:46
enum AVMediaType codec_type
#define V4L_ALLFORMATS
Definition: v4l2.c:75
enum AVCodecID codec_id
AVCodec * avcodec_find_decoder(enum AVCodecID id)
Find a registered decoder with a matching codec ID.
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:148
unsigned int codec_tag
fourcc (LSB first, so "ABCD" -> (&#39;D&#39;<<24) + (&#39;C&#39;<<16) + (&#39;B&#39;<<8) + &#39;A&#39;).
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:69
int fd
Definition: v4l2.c:103
struct video_data * s
Definition: v4l2.c:127
int64_t last_time_m
Definition: v4l2.c:111
planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
Definition: pixfmt.h:74
void * av_malloc(size_t size)
Allocate a block of size bytes with alignment suitable for all memory accesses (including vectors if ...
Definition: mem.c:73
Describe the class of an AVClass context structure.
Definition: log.h:50
synthesis window for stochastic i
rational number numerator/denominator
Definition: rational.h:43
int width
Definition: v4l2.c:105
packed BGR 8:8:8, 32bpp, BGR0BGR0...
Definition: pixfmt.h:217
Definition: v4l2.c:131
#define s1
Definition: regdef.h:38
offset must point to two consecutive integers
Definition: opt.h:230
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFilterBuffer structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Buffer references ownership and permissions
misc parsing utilities
#define type
int list_standard
Set by a private option.
Definition: v4l2.c:122
static int mmap_init(AVFormatContext *ctx)
Definition: v4l2.c:408
static const AVClass v4l2_class
Definition: v4l2.c:1078
AVCodec * avcodec_find_decoder_by_name(const char *name)
Find a registered decoder with the specified name.
static int flags
Definition: cpu.c:23
packed RGB 5:5:5, 16bpp, (msb)1A 5R 5G 5B(lsb), big-endian, most significant bit to 0 ...
Definition: pixfmt.h:116
int index
Definition: v4l2.c:128
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:68
Y , 8bpp.
Definition: pixfmt.h:76
double ff_timefilter_update(TimeFilter *self, double system_time, double period)
Update the filter.
Definition: timefilter.c:68
int buffers
Definition: v4l2.c:113
static int v4l2_set_parameters(AVFormatContext *s1)
Definition: v4l2.c:710
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:162
#define AVFMT_NOFILE
Demuxer will use avio_open, no opened file should be provided by the caller.
Definition: avformat.h:345
char * framerate
Set by a private option.
Definition: v4l2.c:123
#define V4L_COMPFORMATS
Definition: v4l2.c:77
static enum AVCodecID fmt_v4l2codec(uint32_t v4l2_fmt)
Definition: v4l2.c:308
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
Definition: pixfmt.h:75
void av_init_packet(AVPacket *pkt)
Initialize optional fields of a packet with default values.
Definition: avpacket.c:56
int den
denominator
Definition: rational.h:45
#define V4L_RAWFORMATS
Definition: v4l2.c:76
int top_field_first
If the content is interlaced, is top field displayed first.
Definition: frame.h:275
static int v4l2_read_header(AVFormatContext *s1)
Definition: v4l2.c:871
void * priv_data
Format private data.
Definition: avformat.h:964
int avpicture_get_size(enum AVPixelFormat pix_fmt, int width, int height)
Calculate the size in bytes that a picture of the given width and height would occupy if stored in th...
Definition: avpicture.c:49
Y , 16bpp, little-endian.
Definition: pixfmt.h:102
enum AVPixelFormat ff_fmt
Definition: v4l2.c:132
AVInputFormat ff_v4l2_demuxer
Definition: v4l2.c:1085
#define DEC
Definition: v4l2.c:1051
#define AV_LOG_INFO
Definition: log.h:156
static int v4l2_read_close(AVFormatContext *s1)
Definition: v4l2.c:1036
const char * name
A comma separated list of short names for the format.
Definition: avformat.h:461
uint32_t v4l2_fmt
Definition: v4l2.c:134
static void mmap_release_buffer(void *opaque, uint8_t *data)
Definition: v4l2.c:481
enum AVPixelFormat av_get_pix_fmt(const char *name)
Return the pixel format corresponding to name.
Definition: pixdesc.c:1712
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:1700
static int first_field(int fd)
Definition: v4l2.c:262
#define MKTAG(a, b, c, d)
Definition: common.h:282
packed RGB 8:8:8, 32bpp, 0RGB0RGB...
Definition: pixfmt.h:214
AVRational r_frame_rate
Real base framerate of the stream.
Definition: avformat.h:738
static struct fmt_map fmt_conversion_table[]
Definition: v4l2.c:137
AVPixelFormat
Pixel format.
Definition: pixfmt.h:66
This structure stores compressed data.
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
#define tb
Definition: regdef.h:68
int top_field_first
Definition: v4l2.c:108