truemotion2.c
Go to the documentation of this file.
1 /*
2  * Duck/ON2 TrueMotion 2 Decoder
3  * Copyright (c) 2005 Konstantin Shishkov
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 /**
23  * @file
24  * Duck TrueMotion2 decoder.
25  */
26 
27 #include "avcodec.h"
28 #include "bytestream.h"
29 #include "get_bits.h"
30 #include "dsputil.h"
31 #include "internal.h"
32 
33 #define TM2_ESCAPE 0x80000000
34 #define TM2_DELTAS 64
35 
36 /* Huffman-coded streams of different types of blocks */
38  TM2_C_HI = 0,
46 };
47 
48 /* Block types */
49 enum TM2_BLOCKS {
57 };
58 
59 typedef struct TM2Context {
62 
65 
68 
69  /* TM2 streams */
74  /* for blocks decoding */
75  int D[4];
76  int CD[4];
77  int *last;
78  int *clast;
79 
80  /* data for current and previous frame */
82  int *Y1, *U1, *V1, *Y2, *U2, *V2;
84  int cur;
85 } TM2Context;
86 
87 /**
88 * Huffman codes for each of streams
89 */
90 typedef struct TM2Codes {
91  VLC vlc; ///< table for FFmpeg bitstream reader
92  int bits;
93  int *recode; ///< table for converting from code indexes to values
94  int length;
95 } TM2Codes;
96 
97 /**
98 * structure for gathering Huffman codes information
99 */
100 typedef struct TM2Huff {
101  int val_bits; ///< length of literal
102  int max_bits; ///< maximum length of code
103  int min_bits; ///< minimum length of code
104  int nodes; ///< total number of nodes in tree
105  int num; ///< current number filled
106  int max_num; ///< total number of codes
107  int *nums; ///< literals
108  uint32_t *bits; ///< codes
109  int *lens; ///< codelengths
110 } TM2Huff;
111 
112 static int tm2_read_tree(TM2Context *ctx, uint32_t prefix, int length, TM2Huff *huff)
113 {
114  int ret;
115  if (length > huff->max_bits) {
116  av_log(ctx->avctx, AV_LOG_ERROR, "Tree exceeded its given depth (%i)\n",
117  huff->max_bits);
118  return AVERROR_INVALIDDATA;
119  }
120 
121  if (!get_bits1(&ctx->gb)) { /* literal */
122  if (length == 0) {
123  length = 1;
124  }
125  if (huff->num >= huff->max_num) {
126  av_log(ctx->avctx, AV_LOG_DEBUG, "Too many literals\n");
127  return AVERROR_INVALIDDATA;
128  }
129  huff->nums[huff->num] = get_bits_long(&ctx->gb, huff->val_bits);
130  huff->bits[huff->num] = prefix;
131  huff->lens[huff->num] = length;
132  huff->num++;
133  return 0;
134  } else { /* non-terminal node */
135  if ((ret = tm2_read_tree(ctx, prefix << 1, length + 1, huff)) < 0)
136  return ret;
137  if ((ret = tm2_read_tree(ctx, (prefix << 1) | 1, length + 1, huff)) < 0)
138  return ret;
139  }
140  return 0;
141 }
142 
144 {
145  TM2Huff huff;
146  int res = 0;
147 
148  huff.val_bits = get_bits(&ctx->gb, 5);
149  huff.max_bits = get_bits(&ctx->gb, 5);
150  huff.min_bits = get_bits(&ctx->gb, 5);
151  huff.nodes = get_bits_long(&ctx->gb, 17);
152  huff.num = 0;
153 
154  /* check for correct codes parameters */
155  if ((huff.val_bits < 1) || (huff.val_bits > 32) ||
156  (huff.max_bits < 0) || (huff.max_bits > 25)) {
157  av_log(ctx->avctx, AV_LOG_ERROR, "Incorrect tree parameters - literal "
158  "length: %i, max code length: %i\n", huff.val_bits, huff.max_bits);
159  return AVERROR_INVALIDDATA;
160  }
161  if ((huff.nodes <= 0) || (huff.nodes > 0x10000)) {
162  av_log(ctx->avctx, AV_LOG_ERROR, "Incorrect number of Huffman tree "
163  "nodes: %i\n", huff.nodes);
164  return AVERROR_INVALIDDATA;
165  }
166  /* one-node tree */
167  if (huff.max_bits == 0)
168  huff.max_bits = 1;
169 
170  /* allocate space for codes - it is exactly ceil(nodes / 2) entries */
171  huff.max_num = (huff.nodes + 1) >> 1;
172  huff.nums = av_mallocz(huff.max_num * sizeof(int));
173  huff.bits = av_mallocz(huff.max_num * sizeof(uint32_t));
174  huff.lens = av_mallocz(huff.max_num * sizeof(int));
175 
176  res = tm2_read_tree(ctx, 0, 0, &huff);
177 
178  if (huff.num != huff.max_num) {
179  av_log(ctx->avctx, AV_LOG_ERROR, "Got less codes than expected: %i of %i\n",
180  huff.num, huff.max_num);
181  res = AVERROR_INVALIDDATA;
182  }
183 
184  /* convert codes to vlc_table */
185  if (res >= 0) {
186  int i;
187 
188  res = init_vlc(&code->vlc, huff.max_bits, huff.max_num,
189  huff.lens, sizeof(int), sizeof(int),
190  huff.bits, sizeof(uint32_t), sizeof(uint32_t), 0);
191  if (res < 0)
192  av_log(ctx->avctx, AV_LOG_ERROR, "Cannot build VLC table\n");
193  else {
194  code->bits = huff.max_bits;
195  code->length = huff.max_num;
196  code->recode = av_malloc(code->length * sizeof(int));
197  for (i = 0; i < code->length; i++)
198  code->recode[i] = huff.nums[i];
199  }
200  }
201  /* free allocated memory */
202  av_free(huff.nums);
203  av_free(huff.bits);
204  av_free(huff.lens);
205 
206  return res;
207 }
208 
210 {
211  av_free(code->recode);
212  if (code->vlc.table)
213  ff_free_vlc(&code->vlc);
214 }
215 
217 {
218  int val;
219  val = get_vlc2(gb, code->vlc.table, code->bits, 1);
220  if(val<0)
221  return -1;
222  return code->recode[val];
223 }
224 
225 #define TM2_OLD_HEADER_MAGIC 0x00000100
226 #define TM2_NEW_HEADER_MAGIC 0x00000101
227 
228 static inline int tm2_read_header(TM2Context *ctx, const uint8_t *buf)
229 {
230  uint32_t magic = AV_RL32(buf);
231 
232  switch (magic) {
234  avpriv_request_sample(ctx->avctx, "Old TM2 header");
235  return 0;
237  return 0;
238  default:
239  av_log(ctx->avctx, AV_LOG_ERROR, "Not a TM2 header: 0x%08X\n", magic);
240  return AVERROR_INVALIDDATA;
241  }
242 }
243 
244 static int tm2_read_deltas(TM2Context *ctx, int stream_id)
245 {
246  int d, mb;
247  int i, v;
248 
249  d = get_bits(&ctx->gb, 9);
250  mb = get_bits(&ctx->gb, 5);
251 
252  if ((d < 1) || (d > TM2_DELTAS) || (mb < 1) || (mb > 32)) {
253  av_log(ctx->avctx, AV_LOG_ERROR, "Incorrect delta table: %i deltas x %i bits\n", d, mb);
254  return AVERROR_INVALIDDATA;
255  }
256 
257  for (i = 0; i < d; i++) {
258  v = get_bits_long(&ctx->gb, mb);
259  if (v & (1 << (mb - 1)))
260  ctx->deltas[stream_id][i] = v - (1 << mb);
261  else
262  ctx->deltas[stream_id][i] = v;
263  }
264  for (; i < TM2_DELTAS; i++)
265  ctx->deltas[stream_id][i] = 0;
266 
267  return 0;
268 }
269 
270 static int tm2_read_stream(TM2Context *ctx, const uint8_t *buf, int stream_id, int buf_size)
271 {
272  int i, ret;
273  int skip = 0;
274  int len, toks, pos;
275  TM2Codes codes;
277 
278  if (buf_size < 4) {
279  av_log(ctx->avctx, AV_LOG_ERROR, "not enough space for len left\n");
280  return AVERROR_INVALIDDATA;
281  }
282 
283  /* get stream length in dwords */
284  bytestream2_init(&gb, buf, buf_size);
285  len = bytestream2_get_be32(&gb);
286  skip = len * 4 + 4;
287 
288  if (len == 0)
289  return 4;
290 
291  if (len >= INT_MAX/4-1 || len < 0 || skip > buf_size) {
292  av_log(ctx->avctx, AV_LOG_ERROR, "invalid stream size\n");
293  return AVERROR_INVALIDDATA;
294  }
295 
296  toks = bytestream2_get_be32(&gb);
297  if (toks & 1) {
298  len = bytestream2_get_be32(&gb);
299  if (len == TM2_ESCAPE) {
300  len = bytestream2_get_be32(&gb);
301  }
302  if (len > 0) {
303  pos = bytestream2_tell(&gb);
304  if (skip <= pos)
305  return AVERROR_INVALIDDATA;
306  init_get_bits(&ctx->gb, buf + pos, (skip - pos) * 8);
307  if ((ret = tm2_read_deltas(ctx, stream_id)) < 0)
308  return ret;
309  bytestream2_skip(&gb, ((get_bits_count(&ctx->gb) + 31) >> 5) << 2);
310  }
311  }
312  /* skip unused fields */
313  len = bytestream2_get_be32(&gb);
314  if (len == TM2_ESCAPE) { /* some unknown length - could be escaped too */
315  bytestream2_skip(&gb, 8); /* unused by decoder */
316  } else {
317  bytestream2_skip(&gb, 4); /* unused by decoder */
318  }
319 
320  pos = bytestream2_tell(&gb);
321  if (skip <= pos)
322  return AVERROR_INVALIDDATA;
323  init_get_bits(&ctx->gb, buf + pos, (skip - pos) * 8);
324  if ((ret = tm2_build_huff_table(ctx, &codes)) < 0)
325  return ret;
326  bytestream2_skip(&gb, ((get_bits_count(&ctx->gb) + 31) >> 5) << 2);
327 
328  toks >>= 1;
329  /* check if we have sane number of tokens */
330  if ((toks < 0) || (toks > 0xFFFFFF)) {
331  av_log(ctx->avctx, AV_LOG_ERROR, "Incorrect number of tokens: %i\n", toks);
332  tm2_free_codes(&codes);
333  return AVERROR_INVALIDDATA;
334  }
335  ctx->tokens[stream_id] = av_realloc(ctx->tokens[stream_id], toks * sizeof(int));
336  ctx->tok_lens[stream_id] = toks;
337  len = bytestream2_get_be32(&gb);
338  if (len > 0) {
339  pos = bytestream2_tell(&gb);
340  if (skip <= pos)
341  return AVERROR_INVALIDDATA;
342  init_get_bits(&ctx->gb, buf + pos, (skip - pos) * 8);
343  for (i = 0; i < toks; i++) {
344  if (get_bits_left(&ctx->gb) <= 0) {
345  av_log(ctx->avctx, AV_LOG_ERROR, "Incorrect number of tokens: %i\n", toks);
346  return AVERROR_INVALIDDATA;
347  }
348  ctx->tokens[stream_id][i] = tm2_get_token(&ctx->gb, &codes);
349  if (stream_id <= TM2_MOT && ctx->tokens[stream_id][i] >= TM2_DELTAS || ctx->tokens[stream_id][i]<0) {
350  av_log(ctx->avctx, AV_LOG_ERROR, "Invalid delta token index %d for type %d, n=%d\n",
351  ctx->tokens[stream_id][i], stream_id, i);
352  return AVERROR_INVALIDDATA;
353  }
354  }
355  } else {
356  for (i = 0; i < toks; i++) {
357  ctx->tokens[stream_id][i] = codes.recode[0];
358  if (stream_id <= TM2_MOT && ctx->tokens[stream_id][i] >= TM2_DELTAS) {
359  av_log(ctx->avctx, AV_LOG_ERROR, "Invalid delta token index %d for type %d, n=%d\n",
360  ctx->tokens[stream_id][i], stream_id, i);
361  return AVERROR_INVALIDDATA;
362  }
363  }
364  }
365  tm2_free_codes(&codes);
366 
367  return skip;
368 }
369 
370 static inline int GET_TOK(TM2Context *ctx,int type)
371 {
372  if (ctx->tok_ptrs[type] >= ctx->tok_lens[type]) {
373  av_log(ctx->avctx, AV_LOG_ERROR, "Read token from stream %i out of bounds (%i>=%i)\n", type, ctx->tok_ptrs[type], ctx->tok_lens[type]);
374  return 0;
375  }
376  if (type <= TM2_MOT) {
377  if (ctx->tokens[type][ctx->tok_ptrs[type]] >= TM2_DELTAS) {
378  av_log(ctx->avctx, AV_LOG_ERROR, "token %d is too large\n", ctx->tokens[type][ctx->tok_ptrs[type]]);
379  return 0;
380  }
381  return ctx->deltas[type][ctx->tokens[type][ctx->tok_ptrs[type]++]];
382  }
383  return ctx->tokens[type][ctx->tok_ptrs[type]++];
384 }
385 
386 /* blocks decoding routines */
387 
388 /* common Y, U, V pointers initialisation */
389 #define TM2_INIT_POINTERS() \
390  int *last, *clast; \
391  int *Y, *U, *V;\
392  int Ystride, Ustride, Vstride;\
393 \
394  Ystride = ctx->y_stride;\
395  Vstride = ctx->uv_stride;\
396  Ustride = ctx->uv_stride;\
397  Y = (ctx->cur?ctx->Y2:ctx->Y1) + by * 4 * Ystride + bx * 4;\
398  V = (ctx->cur?ctx->V2:ctx->V1) + by * 2 * Vstride + bx * 2;\
399  U = (ctx->cur?ctx->U2:ctx->U1) + by * 2 * Ustride + bx * 2;\
400  last = ctx->last + bx * 4;\
401  clast = ctx->clast + bx * 4;
402 
403 #define TM2_INIT_POINTERS_2() \
404  int *Yo, *Uo, *Vo;\
405  int oYstride, oUstride, oVstride;\
406 \
407  TM2_INIT_POINTERS();\
408  oYstride = Ystride;\
409  oVstride = Vstride;\
410  oUstride = Ustride;\
411  Yo = (ctx->cur?ctx->Y1:ctx->Y2) + by * 4 * oYstride + bx * 4;\
412  Vo = (ctx->cur?ctx->V1:ctx->V2) + by * 2 * oVstride + bx * 2;\
413  Uo = (ctx->cur?ctx->U1:ctx->U2) + by * 2 * oUstride + bx * 2;
414 
415 /* recalculate last and delta values for next blocks */
416 #define TM2_RECALC_BLOCK(CHR, stride, last, CD) {\
417  CD[0] = CHR[1] - last[1];\
418  CD[1] = (int)CHR[stride + 1] - (int)CHR[1];\
419  last[0] = (int)CHR[stride + 0];\
420  last[1] = (int)CHR[stride + 1];}
421 
422 /* common operations - add deltas to 4x4 block of luma or 2x2 blocks of chroma */
423 static inline void tm2_apply_deltas(TM2Context *ctx, int* Y, int stride, int *deltas, int *last)
424 {
425  int ct, d;
426  int i, j;
427 
428  for (j = 0; j < 4; j++){
429  ct = ctx->D[j];
430  for (i = 0; i < 4; i++){
431  d = deltas[i + j * 4];
432  ct += d;
433  last[i] += ct;
434  Y[i] = av_clip_uint8(last[i]);
435  }
436  Y += stride;
437  ctx->D[j] = ct;
438  }
439 }
440 
441 static inline void tm2_high_chroma(int *data, int stride, int *last, int *CD, int *deltas)
442 {
443  int i, j;
444  for (j = 0; j < 2; j++) {
445  for (i = 0; i < 2; i++) {
446  CD[j] += deltas[i + j * 2];
447  last[i] += CD[j];
448  data[i] = last[i];
449  }
450  data += stride;
451  }
452 }
453 
454 static inline void tm2_low_chroma(int *data, int stride, int *clast, int *CD, int *deltas, int bx)
455 {
456  int t;
457  int l;
458  int prev;
459 
460  if (bx > 0)
461  prev = clast[-3];
462  else
463  prev = 0;
464  t = (CD[0] + CD[1]) >> 1;
465  l = (prev - CD[0] - CD[1] + clast[1]) >> 1;
466  CD[1] = CD[0] + CD[1] - t;
467  CD[0] = t;
468  clast[0] = l;
469 
470  tm2_high_chroma(data, stride, clast, CD, deltas);
471 }
472 
473 static inline void tm2_hi_res_block(TM2Context *ctx, AVFrame *pic, int bx, int by)
474 {
475  int i;
476  int deltas[16];
478 
479  /* hi-res chroma */
480  for (i = 0; i < 4; i++) {
481  deltas[i] = GET_TOK(ctx, TM2_C_HI);
482  deltas[i + 4] = GET_TOK(ctx, TM2_C_HI);
483  }
484  tm2_high_chroma(U, Ustride, clast, ctx->CD, deltas);
485  tm2_high_chroma(V, Vstride, clast + 2, ctx->CD + 2, deltas + 4);
486 
487  /* hi-res luma */
488  for (i = 0; i < 16; i++)
489  deltas[i] = GET_TOK(ctx, TM2_L_HI);
490 
491  tm2_apply_deltas(ctx, Y, Ystride, deltas, last);
492 }
493 
494 static inline void tm2_med_res_block(TM2Context *ctx, AVFrame *pic, int bx, int by)
495 {
496  int i;
497  int deltas[16];
499 
500  /* low-res chroma */
501  deltas[0] = GET_TOK(ctx, TM2_C_LO);
502  deltas[1] = deltas[2] = deltas[3] = 0;
503  tm2_low_chroma(U, Ustride, clast, ctx->CD, deltas, bx);
504 
505  deltas[0] = GET_TOK(ctx, TM2_C_LO);
506  deltas[1] = deltas[2] = deltas[3] = 0;
507  tm2_low_chroma(V, Vstride, clast + 2, ctx->CD + 2, deltas, bx);
508 
509  /* hi-res luma */
510  for (i = 0; i < 16; i++)
511  deltas[i] = GET_TOK(ctx, TM2_L_HI);
512 
513  tm2_apply_deltas(ctx, Y, Ystride, deltas, last);
514 }
515 
516 static inline void tm2_low_res_block(TM2Context *ctx, AVFrame *pic, int bx, int by)
517 {
518  int i;
519  int t1, t2;
520  int deltas[16];
522 
523  /* low-res chroma */
524  deltas[0] = GET_TOK(ctx, TM2_C_LO);
525  deltas[1] = deltas[2] = deltas[3] = 0;
526  tm2_low_chroma(U, Ustride, clast, ctx->CD, deltas, bx);
527 
528  deltas[0] = GET_TOK(ctx, TM2_C_LO);
529  deltas[1] = deltas[2] = deltas[3] = 0;
530  tm2_low_chroma(V, Vstride, clast + 2, ctx->CD + 2, deltas, bx);
531 
532  /* low-res luma */
533  for (i = 0; i < 16; i++)
534  deltas[i] = 0;
535 
536  deltas[ 0] = GET_TOK(ctx, TM2_L_LO);
537  deltas[ 2] = GET_TOK(ctx, TM2_L_LO);
538  deltas[ 8] = GET_TOK(ctx, TM2_L_LO);
539  deltas[10] = GET_TOK(ctx, TM2_L_LO);
540 
541  if (bx > 0)
542  last[0] = (last[-1] - ctx->D[0] - ctx->D[1] - ctx->D[2] - ctx->D[3] + last[1]) >> 1;
543  else
544  last[0] = (last[1] - ctx->D[0] - ctx->D[1] - ctx->D[2] - ctx->D[3])>> 1;
545  last[2] = (last[1] + last[3]) >> 1;
546 
547  t1 = ctx->D[0] + ctx->D[1];
548  ctx->D[0] = t1 >> 1;
549  ctx->D[1] = t1 - (t1 >> 1);
550  t2 = ctx->D[2] + ctx->D[3];
551  ctx->D[2] = t2 >> 1;
552  ctx->D[3] = t2 - (t2 >> 1);
553 
554  tm2_apply_deltas(ctx, Y, Ystride, deltas, last);
555 }
556 
557 static inline void tm2_null_res_block(TM2Context *ctx, AVFrame *pic, int bx, int by)
558 {
559  int i;
560  int ct;
561  int left, right, diff;
562  int deltas[16];
564 
565  /* null chroma */
566  deltas[0] = deltas[1] = deltas[2] = deltas[3] = 0;
567  tm2_low_chroma(U, Ustride, clast, ctx->CD, deltas, bx);
568 
569  deltas[0] = deltas[1] = deltas[2] = deltas[3] = 0;
570  tm2_low_chroma(V, Vstride, clast + 2, ctx->CD + 2, deltas, bx);
571 
572  /* null luma */
573  for (i = 0; i < 16; i++)
574  deltas[i] = 0;
575 
576  ct = ctx->D[0] + ctx->D[1] + ctx->D[2] + ctx->D[3];
577 
578  if (bx > 0)
579  left = last[-1] - ct;
580  else
581  left = 0;
582 
583  right = last[3];
584  diff = right - left;
585  last[0] = left + (diff >> 2);
586  last[1] = left + (diff >> 1);
587  last[2] = right - (diff >> 2);
588  last[3] = right;
589  {
590  int tp = left;
591 
592  ctx->D[0] = (tp + (ct >> 2)) - left;
593  left += ctx->D[0];
594  ctx->D[1] = (tp + (ct >> 1)) - left;
595  left += ctx->D[1];
596  ctx->D[2] = ((tp + ct) - (ct >> 2)) - left;
597  left += ctx->D[2];
598  ctx->D[3] = (tp + ct) - left;
599  }
600  tm2_apply_deltas(ctx, Y, Ystride, deltas, last);
601 }
602 
603 static inline void tm2_still_block(TM2Context *ctx, AVFrame *pic, int bx, int by)
604 {
605  int i, j;
607 
608  /* update chroma */
609  for (j = 0; j < 2; j++) {
610  for (i = 0; i < 2; i++){
611  U[i] = Uo[i];
612  V[i] = Vo[i];
613  }
614  U += Ustride; V += Vstride;
615  Uo += oUstride; Vo += oVstride;
616  }
617  U -= Ustride * 2;
618  V -= Vstride * 2;
619  TM2_RECALC_BLOCK(U, Ustride, clast, ctx->CD);
620  TM2_RECALC_BLOCK(V, Vstride, (clast + 2), (ctx->CD + 2));
621 
622  /* update deltas */
623  ctx->D[0] = Yo[3] - last[3];
624  ctx->D[1] = Yo[3 + oYstride] - Yo[3];
625  ctx->D[2] = Yo[3 + oYstride * 2] - Yo[3 + oYstride];
626  ctx->D[3] = Yo[3 + oYstride * 3] - Yo[3 + oYstride * 2];
627 
628  for (j = 0; j < 4; j++) {
629  for (i = 0; i < 4; i++) {
630  Y[i] = Yo[i];
631  last[i] = Yo[i];
632  }
633  Y += Ystride;
634  Yo += oYstride;
635  }
636 }
637 
638 static inline void tm2_update_block(TM2Context *ctx, AVFrame *pic, int bx, int by)
639 {
640  int i, j;
641  int d;
643 
644  /* update chroma */
645  for (j = 0; j < 2; j++) {
646  for (i = 0; i < 2; i++) {
647  U[i] = Uo[i] + GET_TOK(ctx, TM2_UPD);
648  V[i] = Vo[i] + GET_TOK(ctx, TM2_UPD);
649  }
650  U += Ustride;
651  V += Vstride;
652  Uo += oUstride;
653  Vo += oVstride;
654  }
655  U -= Ustride * 2;
656  V -= Vstride * 2;
657  TM2_RECALC_BLOCK(U, Ustride, clast, ctx->CD);
658  TM2_RECALC_BLOCK(V, Vstride, (clast + 2), (ctx->CD + 2));
659 
660  /* update deltas */
661  ctx->D[0] = Yo[3] - last[3];
662  ctx->D[1] = Yo[3 + oYstride] - Yo[3];
663  ctx->D[2] = Yo[3 + oYstride * 2] - Yo[3 + oYstride];
664  ctx->D[3] = Yo[3 + oYstride * 3] - Yo[3 + oYstride * 2];
665 
666  for (j = 0; j < 4; j++) {
667  d = last[3];
668  for (i = 0; i < 4; i++) {
669  Y[i] = Yo[i] + GET_TOK(ctx, TM2_UPD);
670  last[i] = Y[i];
671  }
672  ctx->D[j] = last[3] - d;
673  Y += Ystride;
674  Yo += oYstride;
675  }
676 }
677 
678 static inline void tm2_motion_block(TM2Context *ctx, AVFrame *pic, int bx, int by)
679 {
680  int i, j;
681  int mx, my;
683 
684  mx = GET_TOK(ctx, TM2_MOT);
685  my = GET_TOK(ctx, TM2_MOT);
686  mx = av_clip(mx, -(bx * 4 + 4), ctx->avctx->width - bx * 4);
687  my = av_clip(my, -(by * 4 + 4), ctx->avctx->height - by * 4);
688 
689  if (4*bx+mx<0 || 4*by+my<0 || 4*bx+mx+4 > ctx->avctx->width || 4*by+my+4 > ctx->avctx->height) {
690  av_log(ctx->avctx, AV_LOG_ERROR, "MV out of picture\n");
691  return;
692  }
693 
694  Yo += my * oYstride + mx;
695  Uo += (my >> 1) * oUstride + (mx >> 1);
696  Vo += (my >> 1) * oVstride + (mx >> 1);
697 
698  /* copy chroma */
699  for (j = 0; j < 2; j++) {
700  for (i = 0; i < 2; i++) {
701  U[i] = Uo[i];
702  V[i] = Vo[i];
703  }
704  U += Ustride;
705  V += Vstride;
706  Uo += oUstride;
707  Vo += oVstride;
708  }
709  U -= Ustride * 2;
710  V -= Vstride * 2;
711  TM2_RECALC_BLOCK(U, Ustride, clast, ctx->CD);
712  TM2_RECALC_BLOCK(V, Vstride, (clast + 2), (ctx->CD + 2));
713 
714  /* copy luma */
715  for (j = 0; j < 4; j++) {
716  for (i = 0; i < 4; i++) {
717  Y[i] = Yo[i];
718  }
719  Y += Ystride;
720  Yo += oYstride;
721  }
722  /* calculate deltas */
723  Y -= Ystride * 4;
724  ctx->D[0] = Y[3] - last[3];
725  ctx->D[1] = Y[3 + Ystride] - Y[3];
726  ctx->D[2] = Y[3 + Ystride * 2] - Y[3 + Ystride];
727  ctx->D[3] = Y[3 + Ystride * 3] - Y[3 + Ystride * 2];
728  for (i = 0; i < 4; i++)
729  last[i] = Y[i + Ystride * 3];
730 }
731 
733 {
734  int i, j;
735  int w = ctx->avctx->width, h = ctx->avctx->height, bw = w >> 2, bh = h >> 2, cw = w >> 1;
736  int type;
737  int keyframe = 1;
738  int *Y, *U, *V;
739  uint8_t *dst;
740 
741  for (i = 0; i < TM2_NUM_STREAMS; i++)
742  ctx->tok_ptrs[i] = 0;
743 
744  if (ctx->tok_lens[TM2_TYPE]<bw*bh) {
745  av_log(ctx->avctx,AV_LOG_ERROR,"Got %i tokens for %i blocks\n",ctx->tok_lens[TM2_TYPE],bw*bh);
746  return AVERROR_INVALIDDATA;
747  }
748 
749  memset(ctx->last, 0, 4 * bw * sizeof(int));
750  memset(ctx->clast, 0, 4 * bw * sizeof(int));
751 
752  for (j = 0; j < bh; j++) {
753  memset(ctx->D, 0, 4 * sizeof(int));
754  memset(ctx->CD, 0, 4 * sizeof(int));
755  for (i = 0; i < bw; i++) {
756  type = GET_TOK(ctx, TM2_TYPE);
757  switch(type) {
758  case TM2_HI_RES:
759  tm2_hi_res_block(ctx, p, i, j);
760  break;
761  case TM2_MED_RES:
762  tm2_med_res_block(ctx, p, i, j);
763  break;
764  case TM2_LOW_RES:
765  tm2_low_res_block(ctx, p, i, j);
766  break;
767  case TM2_NULL_RES:
768  tm2_null_res_block(ctx, p, i, j);
769  break;
770  case TM2_UPDATE:
771  tm2_update_block(ctx, p, i, j);
772  keyframe = 0;
773  break;
774  case TM2_STILL:
775  tm2_still_block(ctx, p, i, j);
776  keyframe = 0;
777  break;
778  case TM2_MOTION:
779  tm2_motion_block(ctx, p, i, j);
780  keyframe = 0;
781  break;
782  default:
783  av_log(ctx->avctx, AV_LOG_ERROR, "Skipping unknown block type %i\n", type);
784  }
785  }
786  }
787 
788  /* copy data from our buffer to AVFrame */
789  Y = (ctx->cur?ctx->Y2:ctx->Y1);
790  U = (ctx->cur?ctx->U2:ctx->U1);
791  V = (ctx->cur?ctx->V2:ctx->V1);
792  dst = p->data[0];
793  for (j = 0; j < h; j++) {
794  for (i = 0; i < w; i++) {
795  int y = Y[i], u = U[i >> 1], v = V[i >> 1];
796  dst[3*i+0] = av_clip_uint8(y + v);
797  dst[3*i+1] = av_clip_uint8(y);
798  dst[3*i+2] = av_clip_uint8(y + u);
799  }
800 
801  /* horizontal edge extension */
802  Y[-4] = Y[-3] = Y[-2] = Y[-1] = Y[0];
803  Y[w + 3] = Y[w + 2] = Y[w + 1] = Y[w] = Y[w - 1];
804 
805  /* vertical edge extension */
806  if (j == 0) {
807  memcpy(Y - 4 - 1 * ctx->y_stride, Y - 4, ctx->y_stride);
808  memcpy(Y - 4 - 2 * ctx->y_stride, Y - 4, ctx->y_stride);
809  memcpy(Y - 4 - 3 * ctx->y_stride, Y - 4, ctx->y_stride);
810  memcpy(Y - 4 - 4 * ctx->y_stride, Y - 4, ctx->y_stride);
811  } else if (j == h - 1) {
812  memcpy(Y - 4 + 1 * ctx->y_stride, Y - 4, ctx->y_stride);
813  memcpy(Y - 4 + 2 * ctx->y_stride, Y - 4, ctx->y_stride);
814  memcpy(Y - 4 + 3 * ctx->y_stride, Y - 4, ctx->y_stride);
815  memcpy(Y - 4 + 4 * ctx->y_stride, Y - 4, ctx->y_stride);
816  }
817 
818  Y += ctx->y_stride;
819  if (j & 1) {
820  /* horizontal edge extension */
821  U[-2] = U[-1] = U[0];
822  V[-2] = V[-1] = V[0];
823  U[cw + 1] = U[cw] = U[cw - 1];
824  V[cw + 1] = V[cw] = V[cw - 1];
825 
826  /* vertical edge extension */
827  if (j == 1) {
828  memcpy(U - 2 - 1 * ctx->uv_stride, U - 2, ctx->uv_stride);
829  memcpy(V - 2 - 1 * ctx->uv_stride, V - 2, ctx->uv_stride);
830  memcpy(U - 2 - 2 * ctx->uv_stride, U - 2, ctx->uv_stride);
831  memcpy(V - 2 - 2 * ctx->uv_stride, V - 2, ctx->uv_stride);
832  } else if (j == h - 1) {
833  memcpy(U - 2 + 1 * ctx->uv_stride, U - 2, ctx->uv_stride);
834  memcpy(V - 2 + 1 * ctx->uv_stride, V - 2, ctx->uv_stride);
835  memcpy(U - 2 + 2 * ctx->uv_stride, U - 2, ctx->uv_stride);
836  memcpy(V - 2 + 2 * ctx->uv_stride, V - 2, ctx->uv_stride);
837  }
838 
839  U += ctx->uv_stride;
840  V += ctx->uv_stride;
841  }
842  dst += p->linesize[0];
843  }
844 
845  return keyframe;
846 }
847 
848 static const int tm2_stream_order[TM2_NUM_STREAMS] = {
850 };
851 
852 #define TM2_HEADER_SIZE 40
853 
855  void *data, int *got_frame,
856  AVPacket *avpkt)
857 {
858  TM2Context * const l = avctx->priv_data;
859  const uint8_t *buf = avpkt->data;
860  int buf_size = avpkt->size & ~3;
861  AVFrame * const p = &l->pic;
862  int offset = TM2_HEADER_SIZE;
863  int i, t, ret;
864 
865  av_fast_padded_malloc(&l->buffer, &l->buffer_size, buf_size);
866  if (!l->buffer) {
867  av_log(avctx, AV_LOG_ERROR, "Cannot allocate temporary buffer\n");
868  return AVERROR(ENOMEM);
869  }
870 
871  if ((ret = ff_reget_buffer(avctx, p)) < 0)
872  return ret;
873 
874  l->dsp.bswap_buf((uint32_t*)l->buffer, (const uint32_t*)buf, buf_size >> 2);
875 
876  if ((ret = tm2_read_header(l, l->buffer)) < 0) {
877  return ret;
878  }
879 
880  for (i = 0; i < TM2_NUM_STREAMS; i++) {
881  if (offset >= buf_size) {
882  av_log(avctx, AV_LOG_ERROR, "no space for tm2_read_stream\n");
883  return AVERROR_INVALIDDATA;
884  }
885 
886  t = tm2_read_stream(l, l->buffer + offset, tm2_stream_order[i],
887  buf_size - offset);
888  if (t < 0) {
889  int j = tm2_stream_order[i];
890  memset(l->tokens[j], 0, sizeof(**l->tokens) * l->tok_lens[j]);
891  return t;
892  }
893  offset += t;
894  }
895  p->key_frame = tm2_decode_blocks(l, p);
896  if (p->key_frame)
898  else
900 
901  l->cur = !l->cur;
902  *got_frame = 1;
903  ret = av_frame_ref(data, &l->pic);
904 
905  return (ret < 0) ? ret : buf_size;
906 }
907 
909 {
910  TM2Context * const l = avctx->priv_data;
911  int i, w = avctx->width, h = avctx->height;
912 
913  if ((avctx->width & 3) || (avctx->height & 3)) {
914  av_log(avctx, AV_LOG_ERROR, "Width and height must be multiple of 4\n");
915  return AVERROR(EINVAL);
916  }
917 
918  l->avctx = avctx;
920  avctx->pix_fmt = AV_PIX_FMT_BGR24;
921 
922  ff_dsputil_init(&l->dsp, avctx);
923 
924  l->last = av_malloc(4 * sizeof(*l->last) * (w >> 2));
925  l->clast = av_malloc(4 * sizeof(*l->clast) * (w >> 2));
926 
927  for (i = 0; i < TM2_NUM_STREAMS; i++) {
928  l->tokens[i] = NULL;
929  l->tok_lens[i] = 0;
930  }
931 
932  w += 8;
933  h += 8;
934  l->Y1_base = av_mallocz(sizeof(*l->Y1_base) * w * h);
935  l->Y2_base = av_mallocz(sizeof(*l->Y2_base) * w * h);
936  l->y_stride = w;
937  w = (w + 1) >> 1;
938  h = (h + 1) >> 1;
939  l->U1_base = av_mallocz(sizeof(*l->U1_base) * w * h);
940  l->V1_base = av_mallocz(sizeof(*l->V1_base) * w * h);
941  l->U2_base = av_mallocz(sizeof(*l->U2_base) * w * h);
942  l->V2_base = av_mallocz(sizeof(*l->V1_base) * w * h);
943  l->uv_stride = w;
944  l->cur = 0;
945  if (!l->Y1_base || !l->Y2_base || !l->U1_base ||
946  !l->V1_base || !l->U2_base || !l->V2_base ||
947  !l->last || !l->clast) {
948  av_freep(l->Y1_base);
949  av_freep(l->Y2_base);
950  av_freep(l->U1_base);
951  av_freep(l->U2_base);
952  av_freep(l->V1_base);
953  av_freep(l->V2_base);
954  av_freep(l->last);
955  av_freep(l->clast);
956  return AVERROR(ENOMEM);
957  }
958  l->Y1 = l->Y1_base + l->y_stride * 4 + 4;
959  l->Y2 = l->Y2_base + l->y_stride * 4 + 4;
960  l->U1 = l->U1_base + l->uv_stride * 2 + 2;
961  l->U2 = l->U2_base + l->uv_stride * 2 + 2;
962  l->V1 = l->V1_base + l->uv_stride * 2 + 2;
963  l->V2 = l->V2_base + l->uv_stride * 2 + 2;
964 
965  return 0;
966 }
967 
969 {
970  TM2Context * const l = avctx->priv_data;
971  AVFrame *pic = &l->pic;
972  int i;
973 
974  av_free(l->last);
975  av_free(l->clast);
976  for (i = 0; i < TM2_NUM_STREAMS; i++)
977  av_free(l->tokens[i]);
978  if (l->Y1) {
979  av_free(l->Y1_base);
980  av_free(l->U1_base);
981  av_free(l->V1_base);
982  av_free(l->Y2_base);
983  av_free(l->U2_base);
984  av_free(l->V2_base);
985  }
986  av_freep(&l->buffer);
987  l->buffer_size = 0;
988 
989  av_frame_unref(pic);
990 
991  return 0;
992 }
993 
995  .name = "truemotion2",
996  .type = AVMEDIA_TYPE_VIDEO,
998  .priv_data_size = sizeof(TM2Context),
999  .init = decode_init,
1000  .close = decode_end,
1001  .decode = decode_frame,
1002  .capabilities = CODEC_CAP_DR1,
1003  .long_name = NULL_IF_CONFIG_SMALL("Duck TrueMotion 2.0"),
1004 };
void * av_mallocz(size_t size)
Allocate a block of size bytes with alignment suitable for all memory accesses (including vectors if ...
Definition: mem.c:205
static void tm2_low_chroma(int *data, int stride, int *clast, int *CD, int *deltas, int bx)
Definition: truemotion2.c:454
av_cold void ff_dsputil_init(DSPContext *c, AVCodecContext *avctx)
Definition: dsputil.c:2675
float v
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:59
struct TM2Huff TM2Huff
structure for gathering Huffman codes information
This structure describes decoded (raw) audio or video data.
Definition: frame.h:76
int * V2
Definition: truemotion2.c:82
static unsigned int get_bits(GetBitContext *s, int n)
Read 1-25 bits.
Definition: get_bits.h:240
int * U1_base
Definition: truemotion2.c:81
int D[4]
Definition: truemotion2.c:75
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:35
int * last
Definition: truemotion2.c:77
int * recode
table for converting from code indexes to values
Definition: truemotion2.c:93
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
static av_always_inline void bytestream2_init(GetByteContext *g, const uint8_t *buf, int buf_size)
Definition: bytestream.h:130
void av_fast_padded_malloc(void *ptr, unsigned int *size, size_t min_size)
Same behaviour av_fast_malloc but the buffer has additional FF_INPUT_BUFFER_PADDING_SIZE at the end w...
void * av_realloc(void *ptr, size_t size)
Allocate or reallocate a block of memory.
Definition: mem.c:141
AVCodec ff_truemotion2_decoder
Definition: truemotion2.c:994
int stride
Definition: mace.c:144
output residual component w
void av_freep(void *arg)
Free a memory block which has been allocated with av_malloc(z)() or av_realloc() and set the pointer ...
Definition: mem.c:198
set threshold d
initialize output if(nPeaks >3)%at least 3 peaks in spectrum for trying to find f0 nf0peaks
int num
current number filled
Definition: truemotion2.c:105
static int tm2_read_header(TM2Context *ctx, const uint8_t *buf)
Definition: truemotion2.c:228
#define TM2_HEADER_SIZE
Definition: truemotion2.c:852
void void avpriv_request_sample(void *avc, const char *msg,...) av_printf_format(2
Log a generic warning message about a missing feature.
structure for gathering Huffman codes information
Definition: truemotion2.c:100
uint8_t
#define av_cold
Definition: attributes.h:78
#define mb
#define Y
Definition: vf_boxblur.c:76
TM2_STREAMS
Definition: truemotion2.c:37
static const int tm2_stream_order[TM2_NUM_STREAMS]
Definition: truemotion2.c:848
AVCodecContext * avctx
Definition: truemotion2.c:60
int max_bits
maximum length of code
Definition: truemotion2.c:102
static int tm2_read_deltas(TM2Context *ctx, int stream_id)
Definition: truemotion2.c:244
#define CODEC_CAP_DR1
Codec uses get_buffer() for allocating buffers and supports custom allocators.
static void tm2_free_codes(TM2Codes *code)
Definition: truemotion2.c:209
uint8_t * data
DSPContext dsp
Definition: truemotion2.c:64
int min_bits
minimum length of code
Definition: truemotion2.c:103
int * U2
Definition: truemotion2.c:82
static int get_bits_count(const GetBitContext *s)
Definition: get_bits.h:193
static int tm2_read_tree(TM2Context *ctx, uint32_t prefix, int length, TM2Huff *huff)
Definition: truemotion2.c:112
VLC vlc
table for FFmpeg bitstream reader
Definition: truemotion2.c:91
bitstream reader API header.
static void tm2_hi_res_block(TM2Context *ctx, AVFrame *pic, int bx, int by)
Definition: truemotion2.c:473
static int tm2_read_stream(TM2Context *ctx, const uint8_t *buf, int stream_id, int buf_size)
Definition: truemotion2.c:270
static void tm2_update_block(TM2Context *ctx, AVFrame *pic, int bx, int by)
Definition: truemotion2.c:638
#define U(x)
static int get_bits_left(GetBitContext *gb)
Definition: get_bits.h:557
void av_free(void *ptr)
Free a memory block which has been allocated with av_malloc(z)() or av_realloc(). ...
Definition: mem.c:183
int val_bits
length of literal
Definition: truemotion2.c:101
static int tm2_build_huff_table(TM2Context *ctx, TM2Codes *code)
Definition: truemotion2.c:143
static av_always_inline void bytestream2_skip(GetByteContext *g, unsigned int size)
Definition: bytestream.h:159
static void tm2_low_res_block(TM2Context *ctx, AVFrame *pic, int bx, int by)
Definition: truemotion2.c:516
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Spectrum Plot time data
static int tm2_decode_blocks(TM2Context *ctx, AVFrame *p)
Definition: truemotion2.c:732
int * tokens[TM2_NUM_STREAMS]
Definition: truemotion2.c:70
#define t1
Definition: regdef.h:29
void av_log(void *avcl, int level, const char *fmt,...)
Definition: log.c:246
const char * name
Name of the codec implementation.
int * clast
Definition: truemotion2.c:78
static const uint8_t offset[127][2]
Definition: vf_spp.c:70
external API header
Definition: get_bits.h:63
int * V1
Definition: truemotion2.c:82
int * V2_base
Definition: truemotion2.c:81
struct TM2Codes TM2Codes
Huffman codes for each of streams.
int max_num
total number of codes
Definition: truemotion2.c:106
#define V
int ff_reget_buffer(AVCodecContext *avctx, AVFrame *frame)
Identical in function to av_frame_make_writable(), except it uses ff_get_buffer() to allocate the buf...
int bits
Definition: truemotion2.c:92
enum AVPictureType pict_type
Picture type of the frame.
Definition: frame.h:144
static int GET_TOK(TM2Context *ctx, int type)
Definition: truemotion2.c:370
ret
Definition: avfilter.c:821
int width
picture width / height.
#define TM2_OLD_HEADER_MAGIC
Definition: truemotion2.c:225
t
Definition: genspecsines3.m:6
int uv_stride
Definition: truemotion2.c:83
uint8_t * buffer
Definition: truemotion2.c:66
static av_always_inline int get_vlc2(GetBitContext *s, VLC_TYPE(*table)[2], int bits, int max_depth)
Parse a vlc code.
Definition: get_bits.h:524
#define AV_RL32
float u
packed RGB 8:8:8, 24bpp, BGRBGR...
Definition: pixfmt.h:71
#define diff(a, as, b, bs)
Definition: vf_phase.c:80
static int tm2_get_token(GetBitContext *gb, TM2Codes *code)
Definition: truemotion2.c:216
int CD[4]
Definition: truemotion2.c:76
int * nums
literals
Definition: truemotion2.c:107
int nodes
total number of nodes in tree
Definition: truemotion2.c:104
static av_cold int decode_end(AVCodecContext *avctx)
Definition: truemotion2.c:968
int * Y1
Definition: truemotion2.c:82
int * V1_base
Definition: truemotion2.c:81
static av_always_inline int bytestream2_tell(GetByteContext *g)
Definition: bytestream.h:183
NULL
Definition: eval.c:55
or the Software in violation of any applicable export control laws in any jurisdiction Except as provided by mandatorily applicable UPF has no obligation to provide you with source code to the Software In the event Software contains any source code
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:101
#define TM2_INIT_POINTERS_2()
Definition: truemotion2.c:403
main external API structure.
static void close(AVCodecParserContext *s)
Definition: h264_parser.c:375
static void tm2_still_block(TM2Context *ctx, AVFrame *pic, int bx, int by)
Definition: truemotion2.c:603
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:148
AVFrame pic
Definition: truemotion2.c:61
#define TM2_DELTAS
Definition: truemotion2.c:34
#define init_vlc(vlc, nb_bits, nb_codes,bits, bits_wrap, bits_size,codes, codes_wrap, codes_size,flags)
Definition: get_bits.h:426
void * buf
Definition: avisynth_c.h:594
static unsigned int get_bits1(GetBitContext *s)
Definition: get_bits.h:273
void * av_malloc(size_t size)
Allocate a block of size bytes with alignment suitable for all memory accesses (including vectors if ...
Definition: mem.c:73
void avcodec_get_frame_defaults(AVFrame *frame)
Set the fields of the given AVFrame to default values.
#define TM2_INIT_POINTERS()
Definition: truemotion2.c:389
int * Y1_base
Definition: truemotion2.c:81
synthesis window for stochastic i
void(* bswap_buf)(uint32_t *dst, const uint32_t *src, int w)
Definition: dsputil.h:208
static int init_get_bits(GetBitContext *s, const uint8_t *buffer, int bit_size)
Initialize GetBitContext.
Definition: get_bits.h:379
static void tm2_med_res_block(TM2Context *ctx, AVFrame *pic, int bx, int by)
Definition: truemotion2.c:494
static void tm2_motion_block(TM2Context *ctx, AVFrame *pic, int bx, int by)
Definition: truemotion2.c:678
uint32_t * bits
codes
Definition: truemotion2.c:108
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:330
int length
Definition: truemotion2.c:94
static unsigned int get_bits_long(GetBitContext *s, int n)
Read 0-32 bits.
Definition: get_bits.h:306
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFilterBuffer structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Buffer references ownership and permissions
#define type
static void tm2_apply_deltas(TM2Context *ctx, int *Y, int stride, int *deltas, int *last)
Definition: truemotion2.c:423
#define TM2_ESCAPE
Definition: truemotion2.c:33
int av_frame_ref(AVFrame *dst, AVFrame *src)
Setup a new reference to the data described by an given frame.
Definition: frame.c:228
#define TM2_RECALC_BLOCK(CHR, stride, last, CD)
Definition: truemotion2.c:416
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:87
int * U1
Definition: truemotion2.c:82
int * Y2
Definition: truemotion2.c:82
common internal api header.
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:162
int y_stride
Definition: truemotion2.c:83
static void tm2_null_res_block(TM2Context *ctx, AVFrame *pic, int bx, int by)
Definition: truemotion2.c:557
int deltas[TM2_NUM_STREAMS][TM2_DELTAS]
Definition: truemotion2.c:73
int * lens
codelengths
Definition: truemotion2.c:109
static av_cold int decode_init(AVCodecContext *avctx)
Definition: truemotion2.c:908
int * Y2_base
Definition: truemotion2.c:81
int buffer_size
Definition: truemotion2.c:67
function y
Definition: D.m:1
DSP utils.
#define TM2_NEW_HEADER_MAGIC
Definition: truemotion2.c:226
int * U2_base
Definition: truemotion2.c:81
TM2_BLOCKS
Definition: truemotion2.c:49
GetBitContext gb
Definition: truemotion2.c:63
int len
else dst[i][x+y *dst_stride[i]]
Definition: vf_mcdeint.c:160
VLC_TYPE(* table)[2]
code, bits
Definition: get_bits.h:65
int tok_lens[TM2_NUM_STREAMS]
Definition: truemotion2.c:71
int key_frame
1 -> keyframe, 0-> not
Definition: frame.h:139
static void tm2_high_chroma(int *data, int stride, int *last, int *CD, int *deltas)
Definition: truemotion2.c:441
struct TM2Context TM2Context
Huffman codes for each of streams.
Definition: truemotion2.c:90
static int decode(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
Definition: crystalhd.c:868
const char int length
Definition: avisynth_c.h:668
int tok_ptrs[TM2_NUM_STREAMS]
Definition: truemotion2.c:72
This structure stores compressed data.
void ff_free_vlc(VLC *vlc)
Definition: bitstream.c:344
#define t2
Definition: regdef.h:30
Predicted.
Definition: avutil.h:217
DSPContext.
Definition: dsputil.h:127
static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
Definition: truemotion2.c:854