rtpdec_jpeg.c
Go to the documentation of this file.
1 /*
2  * RTP JPEG-compressed Video Depacketizer, RFC 2435
3  * Copyright (c) 2012 Samuel Pitoiset
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 #include "avformat.h"
23 #include "rtpdec.h"
24 #include "rtpdec_formats.h"
25 #include "libavutil/intreadwrite.h"
26 #include "libavcodec/mjpeg.h"
27 #include "libavcodec/bytestream.h"
28 
29 /**
30  * RTP/JPEG specific private data.
31  */
32 struct PayloadContext {
33  AVIOContext *frame; ///< current frame buffer
34  uint32_t timestamp; ///< current frame timestamp
35  int hdr_size; ///< size of the current frame header
36  uint8_t qtables[128][128];
38 };
39 
40 static const uint8_t default_quantizers[128] = {
41  /* luma table */
42  16, 11, 12, 14, 12, 10, 16, 14,
43  13, 14, 18, 17, 16, 19, 24, 40,
44  26, 24, 22, 22, 24, 49, 35, 37,
45  29, 40, 58, 51, 61, 60, 57, 51,
46  56, 55, 64, 72, 92, 78, 64, 68,
47  87, 69, 55, 56, 80, 109, 81, 87,
48  95, 98, 103, 104, 103, 62, 77, 113,
49  121, 112, 100, 120, 92, 101, 103, 99,
50 
51  /* chroma table */
52  17, 18, 18, 24, 21, 24, 47, 26,
53  26, 47, 99, 66, 56, 66, 99, 99,
54  99, 99, 99, 99, 99, 99, 99, 99,
55  99, 99, 99, 99, 99, 99, 99, 99,
56  99, 99, 99, 99, 99, 99, 99, 99,
57  99, 99, 99, 99, 99, 99, 99, 99,
58  99, 99, 99, 99, 99, 99, 99, 99,
59  99, 99, 99, 99, 99, 99, 99, 99
60 };
61 
63 {
64  return av_mallocz(sizeof(PayloadContext));
65 }
66 
67 static inline void free_frame_if_needed(PayloadContext *jpeg)
68 {
69  if (jpeg->frame) {
70  uint8_t *p;
71  avio_close_dyn_buf(jpeg->frame, &p);
72  av_free(p);
73  jpeg->frame = NULL;
74  }
75 }
76 
78 {
80  av_free(jpeg);
81 }
82 
83 static int jpeg_create_huffman_table(PutByteContext *p, int table_class,
84  int table_id, const uint8_t *bits_table,
85  const uint8_t *value_table)
86 {
87  int i, n = 0;
88 
89  bytestream2_put_byte(p, table_class << 4 | table_id);
90 
91  for (i = 1; i <= 16; i++) {
92  n += bits_table[i];
93  bytestream2_put_byte(p, bits_table[i]);
94  }
95 
96  for (i = 0; i < n; i++) {
97  bytestream2_put_byte(p, value_table[i]);
98  }
99  return n + 17;
100 }
101 
102 static void jpeg_put_marker(PutByteContext *pbc, int code)
103 {
104  bytestream2_put_byte(pbc, 0xff);
105  bytestream2_put_byte(pbc, code);
106 }
107 
108 static int jpeg_create_header(uint8_t *buf, int size, uint32_t type, uint32_t w,
109  uint32_t h, const uint8_t *qtable, int nb_qtable)
110 {
111  PutByteContext pbc;
112  uint8_t *dht_size_ptr;
113  int dht_size, i;
114 
115  bytestream2_init_writer(&pbc, buf, size);
116 
117  /* Convert from blocks to pixels. */
118  w <<= 3;
119  h <<= 3;
120 
121  /* SOI */
122  jpeg_put_marker(&pbc, SOI);
123 
124  /* JFIF header */
125  jpeg_put_marker(&pbc, APP0);
126  bytestream2_put_be16(&pbc, 16);
127  bytestream2_put_buffer(&pbc, "JFIF", 5);
128  bytestream2_put_be16(&pbc, 0x0201);
129  bytestream2_put_byte(&pbc, 0);
130  bytestream2_put_be16(&pbc, 1);
131  bytestream2_put_be16(&pbc, 1);
132  bytestream2_put_byte(&pbc, 0);
133  bytestream2_put_byte(&pbc, 0);
134 
135  /* DQT */
136  jpeg_put_marker(&pbc, DQT);
137  bytestream2_put_be16(&pbc, 2 + nb_qtable * (1 + 64));
138 
139  for (i = 0; i < nb_qtable; i++) {
140  bytestream2_put_byte(&pbc, i);
141 
142  /* Each table is an array of 64 values given in zig-zag
143  * order, identical to the format used in a JFIF DQT
144  * marker segment. */
145  bytestream2_put_buffer(&pbc, qtable + 64 * i, 64);
146  }
147 
148  /* DHT */
149  jpeg_put_marker(&pbc, DHT);
150  dht_size_ptr = pbc.buffer;
151  bytestream2_put_be16(&pbc, 0);
152 
153  dht_size = 2;
162  AV_WB16(dht_size_ptr, dht_size);
163 
164  /* SOF0 */
165  jpeg_put_marker(&pbc, SOF0);
166  bytestream2_put_be16(&pbc, 17); /* size */
167  bytestream2_put_byte(&pbc, 8); /* bits per component */
168  bytestream2_put_be16(&pbc, h);
169  bytestream2_put_be16(&pbc, w);
170  bytestream2_put_byte(&pbc, 3); /* number of components */
171  bytestream2_put_byte(&pbc, 1); /* component number */
172  bytestream2_put_byte(&pbc, (2 << 4) | (type ? 2 : 1)); /* hsample/vsample */
173  bytestream2_put_byte(&pbc, 0); /* matrix number */
174  bytestream2_put_byte(&pbc, 2); /* component number */
175  bytestream2_put_byte(&pbc, 1 << 4 | 1); /* hsample/vsample */
176  bytestream2_put_byte(&pbc, nb_qtable == 2 ? 1 : 0); /* matrix number */
177  bytestream2_put_byte(&pbc, 3); /* component number */
178  bytestream2_put_byte(&pbc, 1 << 4 | 1); /* hsample/vsample */
179  bytestream2_put_byte(&pbc, nb_qtable == 2 ? 1 : 0); /* matrix number */
180 
181  /* SOS */
182  jpeg_put_marker(&pbc, SOS);
183  bytestream2_put_be16(&pbc, 12);
184  bytestream2_put_byte(&pbc, 3);
185  bytestream2_put_byte(&pbc, 1);
186  bytestream2_put_byte(&pbc, 0);
187  bytestream2_put_byte(&pbc, 2);
188  bytestream2_put_byte(&pbc, 17);
189  bytestream2_put_byte(&pbc, 3);
190  bytestream2_put_byte(&pbc, 17);
191  bytestream2_put_byte(&pbc, 0);
192  bytestream2_put_byte(&pbc, 63);
193  bytestream2_put_byte(&pbc, 0);
194 
195  /* Return the length in bytes of the JPEG header. */
196  return bytestream2_tell_p(&pbc);
197 }
198 
200 {
201  int factor = q;
202  int i;
203 
204  factor = av_clip(q, 1, 99);
205 
206  if (q < 50)
207  q = 5000 / factor;
208  else
209  q = 200 - factor * 2;
210 
211  for (i = 0; i < 128; i++) {
212  int val = (default_quantizers[i] * q + 50) / 100;
213 
214  /* Limit the quantizers to 1 <= q <= 255. */
215  val = av_clip(val, 1, 255);
216  qtables[i] = val;
217  }
218 }
219 
221  AVStream *st, AVPacket *pkt, uint32_t *timestamp,
222  const uint8_t *buf, int len, uint16_t seq,
223  int flags)
224 {
225  uint8_t type, q, width, height;
226  const uint8_t *qtables = NULL;
227  uint16_t qtable_len;
228  uint32_t off;
229  int ret;
230 
231  if (len < 8) {
232  av_log(ctx, AV_LOG_ERROR, "Too short RTP/JPEG packet.\n");
233  return AVERROR_INVALIDDATA;
234  }
235 
236  /* Parse the main JPEG header. */
237  off = AV_RB24(buf + 1); /* fragment byte offset */
238  type = AV_RB8(buf + 4); /* id of jpeg decoder params */
239  q = AV_RB8(buf + 5); /* quantization factor (or table id) */
240  width = AV_RB8(buf + 6); /* frame width in 8 pixel blocks */
241  height = AV_RB8(buf + 7); /* frame height in 8 pixel blocks */
242  buf += 8;
243  len -= 8;
244 
245  /* Parse the restart marker header. */
246  if (type > 63) {
247  av_log(ctx, AV_LOG_ERROR,
248  "Unimplemented RTP/JPEG restart marker header.\n");
249  return AVERROR_PATCHWELCOME;
250  }
251  if (type > 1) {
252  av_log(ctx, AV_LOG_ERROR, "Unimplemented RTP/JPEG type %d\n", type);
253  return AVERROR_PATCHWELCOME;
254  }
255 
256  /* Parse the quantization table header. */
257  if (off == 0) {
258  /* Start of JPEG data packet. */
259  uint8_t new_qtables[128];
260  uint8_t hdr[1024];
261 
262  if (q > 127) {
263  uint8_t precision;
264  if (len < 4) {
265  av_log(ctx, AV_LOG_ERROR, "Too short RTP/JPEG packet.\n");
266  return AVERROR_INVALIDDATA;
267  }
268 
269  /* The first byte is reserved for future use. */
270  precision = AV_RB8(buf + 1); /* size of coefficients */
271  qtable_len = AV_RB16(buf + 2); /* length in bytes */
272  buf += 4;
273  len -= 4;
274 
275  if (precision)
276  av_log(ctx, AV_LOG_WARNING, "Only 8-bit precision is supported.\n");
277 
278  if (qtable_len > 0) {
279  if (len < qtable_len) {
280  av_log(ctx, AV_LOG_ERROR, "Too short RTP/JPEG packet.\n");
281  return AVERROR_INVALIDDATA;
282  }
283  qtables = buf;
284  buf += qtable_len;
285  len -= qtable_len;
286  if (q < 255) {
287  if (jpeg->qtables_len[q - 128] &&
288  (jpeg->qtables_len[q - 128] != qtable_len ||
289  memcmp(qtables, &jpeg->qtables[q - 128][0], qtable_len))) {
290  av_log(ctx, AV_LOG_WARNING,
291  "Quantization tables for q=%d changed\n", q);
292  } else if (!jpeg->qtables_len[q - 128] && qtable_len <= 128) {
293  memcpy(&jpeg->qtables[q - 128][0], qtables,
294  qtable_len);
295  jpeg->qtables_len[q - 128] = qtable_len;
296  }
297  }
298  } else {
299  if (q == 255) {
300  av_log(ctx, AV_LOG_ERROR,
301  "Invalid RTP/JPEG packet. Quantization tables not found.\n");
302  return AVERROR_INVALIDDATA;
303  }
304  if (!jpeg->qtables_len[q - 128]) {
305  av_log(ctx, AV_LOG_ERROR,
306  "No quantization tables known for q=%d yet.\n", q);
307  return AVERROR_INVALIDDATA;
308  }
309  qtables = &jpeg->qtables[q - 128][0];
310  qtable_len = jpeg->qtables_len[q - 128];
311  }
312  } else { /* q <= 127 */
313  if (q == 0 || q > 99) {
314  av_log(ctx, AV_LOG_ERROR, "Reserved q value %d\n", q);
315  return AVERROR_INVALIDDATA;
316  }
317  create_default_qtables(new_qtables, q);
318  qtables = new_qtables;
319  qtable_len = sizeof(new_qtables);
320  }
321 
322  /* Skip the current frame in case of the end packet
323  * has been lost somewhere. */
324  free_frame_if_needed(jpeg);
325 
326  if ((ret = avio_open_dyn_buf(&jpeg->frame)) < 0)
327  return ret;
328  jpeg->timestamp = *timestamp;
329 
330  /* Generate a frame and scan headers that can be prepended to the
331  * RTP/JPEG data payload to produce a JPEG compressed image in
332  * interchange format. */
333  jpeg->hdr_size = jpeg_create_header(hdr, sizeof(hdr), type, width,
334  height, qtables,
335  qtable_len / 64);
336 
337  /* Copy JPEG header to frame buffer. */
338  avio_write(jpeg->frame, hdr, jpeg->hdr_size);
339  }
340 
341  if (!jpeg->frame) {
342  av_log(ctx, AV_LOG_ERROR,
343  "Received packet without a start chunk; dropping frame.\n");
344  return AVERROR(EAGAIN);
345  }
346 
347  if (jpeg->timestamp != *timestamp) {
348  /* Skip the current frame if timestamp is incorrect.
349  * A start packet has been lost somewhere. */
350  free_frame_if_needed(jpeg);
351  av_log(ctx, AV_LOG_ERROR, "RTP timestamps don't match.\n");
352  return AVERROR_INVALIDDATA;
353  }
354 
355  if (off != avio_tell(jpeg->frame) - jpeg->hdr_size) {
356  av_log(ctx, AV_LOG_ERROR,
357  "Missing packets; dropping frame.\n");
358  return AVERROR(EAGAIN);
359  }
360 
361  /* Copy data to frame buffer. */
362  avio_write(jpeg->frame, buf, len);
363 
364  if (flags & RTP_FLAG_MARKER) {
365  /* End of JPEG data packet. */
366  uint8_t buf[2] = { 0xff, EOI };
367 
368  /* Put EOI marker. */
369  avio_write(jpeg->frame, buf, sizeof(buf));
370 
371  /* Prepare the JPEG packet. */
372  if ((ret = ff_rtp_finalize_packet(pkt, &jpeg->frame, st->index)) < 0) {
373  av_log(ctx, AV_LOG_ERROR,
374  "Error occurred when getting frame buffer.\n");
375  return ret;
376  }
377 
378  return 0;
379  }
380 
381  return AVERROR(EAGAIN);
382 }
383 
385  .enc_name = "JPEG",
386  .codec_type = AVMEDIA_TYPE_VIDEO,
387  .codec_id = AV_CODEC_ID_MJPEG,
388  .alloc = jpeg_new_context,
389  .free = jpeg_free_context,
390  .parse_packet = jpeg_parse_packet,
391  .static_payload_id = 26,
392 };
AVPacket pkt
Definition: rtpdec_qt.c:37
#define AV_RB8(x)
Definition: intreadwrite.h:387
void * av_mallocz(size_t size)
Allocate a block of size bytes with alignment suitable for all memory accesses (including vectors if ...
Definition: mem.c:205
Bytestream IO Context.
Definition: avio.h:68
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:59
int avio_close_dyn_buf(AVIOContext *s, uint8_t **pbuffer)
Return the written size and a pointer to the buffer.
Definition: aviobuf.c:988
const uint8_t avpriv_mjpeg_bits_ac_luminance[17]
Definition: mjpeg.c:73
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:154
RTP/JPEG specific private data.
Definition: rdt.c:83
int index
stream index in AVFormatContext
Definition: avformat.h:644
static void free_frame_if_needed(PayloadContext *jpeg)
Definition: rtpdec_jpeg.c:67
static av_always_inline void bytestream2_init_writer(PutByteContext *p, uint8_t *buf, int buf_size)
Definition: bytestream.h:139
#define AV_RB24
Definition: mjpeg.h:74
static PayloadContext * jpeg_new_context(void)
Definition: rtpdec_jpeg.c:62
const uint8_t avpriv_mjpeg_bits_dc_chrominance[17]
Definition: mjpeg.c:70
const uint8_t avpriv_mjpeg_bits_ac_chrominance[17]
Definition: mjpeg.c:99
uint8_t * buf
the temporary storage buffer
Definition: rtpdec_asf.c:160
MJPEG encoder and decoder.
int avio_open_dyn_buf(AVIOContext **s)
Open a write only memory stream.
Definition: aviobuf.c:976
AVIOContext * frame
current frame buffer
Definition: rtpdec_jpeg.c:33
output residual component w
Format I/O context.
Definition: avformat.h:944
uint8_t
static int jpeg_create_huffman_table(PutByteContext *p, int table_class, int table_id, const uint8_t *bits_table, const uint8_t *value_table)
Definition: rtpdec_jpeg.c:83
static void jpeg_put_marker(PutByteContext *pbc, int code)
Definition: rtpdec_jpeg.c:102
static av_always_inline int64_t avio_tell(AVIOContext *s)
ftell() equivalent for AVIOContext.
Definition: avio.h:248
Definition: mjpeg.h:77
void avio_write(AVIOContext *s, const unsigned char *buf, int size)
Definition: aviobuf.c:173
Definition: mjpeg.h:83
RTPDynamicProtocolHandler ff_jpeg_dynamic_handler
Definition: rtpdec_jpeg.c:384
uint32_t timestamp
current frame timestamp
void av_free(void *ptr)
Free a memory block which has been allocated with av_malloc(z)() or av_realloc(). ...
Definition: mem.c:183
#define RTP_FLAG_MARKER
RTP marker bit was set for this packet.
Definition: rtpdec.h:97
#define AV_RB16
uint8_t qtables[128][128]
Definition: rtpdec_jpeg.c:36
Definition: mjpeg.h:43
Definition: mjpeg.h:60
void av_log(void *avcl, int level, const char *fmt,...)
Definition: log.c:246
static av_always_inline int bytestream2_tell_p(PutByteContext *p)
Definition: bytestream.h:188
int size
Definition: mjpeg.h:76
ret
Definition: avfilter.c:821
const uint8_t avpriv_mjpeg_bits_dc_luminance[17]
Definition: mjpeg.c:65
const uint8_t avpriv_mjpeg_val_dc[12]
Definition: mjpeg.c:67
static av_always_inline unsigned int bytestream2_put_buffer(PutByteContext *p, const uint8_t *src, unsigned int size)
Definition: bytestream.h:277
static void create_default_qtables(uint8_t *qtables, uint8_t q)
Definition: rtpdec_jpeg.c:199
Definition: mjpeg.h:75
static int jpeg_parse_packet(AVFormatContext *ctx, PayloadContext *jpeg, AVStream *st, AVPacket *pkt, uint32_t *timestamp, const uint8_t *buf, int len, uint16_t seq, int flags)
Definition: rtpdec_jpeg.c:220
Stream structure.
Definition: avformat.h:643
#define AVERROR_PATCHWELCOME
Not yet implemented in FFmpeg, patches welcome.
Definition: error.h:62
NULL
Definition: eval.c:55
or the Software in violation of any applicable export control laws in any jurisdiction Except as provided by mandatorily applicable UPF has no obligation to provide you with source code to the Software In the event Software contains any source code
uint8_t * buffer
Definition: bytestream.h:37
static int width
Definition: tests/utils.c:158
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:148
static void jpeg_free_context(PayloadContext *jpeg)
Definition: rtpdec_jpeg.c:77
BYTE int const BYTE int int int height
Definition: avisynth_c.h:713
uint8_t qtables_len[128]
Definition: rtpdec_jpeg.c:37
synthesis window for stochastic i
#define AV_WB16(p, darg)
Definition: intreadwrite.h:237
static const int factor[16]
Definition: vf_pp7.c:202
const char enc_name[50]
Definition: rtpdec.h:120
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFilterBuffer structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Buffer references ownership and permissions
#define type
static int flags
Definition: cpu.c:23
static int jpeg_create_header(uint8_t *buf, int size, uint32_t type, uint32_t w, uint32_t h, const uint8_t *qtable, int nb_qtable)
Definition: rtpdec_jpeg.c:108
const uint8_t avpriv_mjpeg_val_ac_luminance[]
Definition: mjpeg.c:75
Main libavformat public API header.
static const uint8_t default_quantizers[128]
Definition: rtpdec_jpeg.c:40
int ff_rtp_finalize_packet(AVPacket *pkt, AVIOContext **dyn_buf, int stream_idx)
Close the dynamic buffer and make a packet from it.
Definition: rtpdec.c:865
const uint8_t avpriv_mjpeg_val_ac_chrominance[]
Definition: mjpeg.c:102
int hdr_size
size of the current frame header
Definition: rtpdec_jpeg.c:35
This structure stores compressed data.