vf_yadif.c
Go to the documentation of this file.
1 /*
2  * Copyright (C) 2006-2011 Michael Niedermayer <michaelni@gmx.at>
3  * 2010 James Darnley <james.darnley@gmail.com>
4  *
5  * FFmpeg is free software; you can redistribute it and/or modify
6  * it under the terms of the GNU General Public License as published by
7  * the Free Software Foundation; either version 2 of the License, or
8  * (at your option) any later version.
9  *
10  * FFmpeg is distributed in the hope that it will be useful,
11  * but WITHOUT ANY WARRANTY; without even the implied warranty of
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13  * GNU General Public License for more details.
14  *
15  * You should have received a copy of the GNU General Public License along
16  * with FFmpeg; if not, write to the Free Software Foundation, Inc.,
17  * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
18  */
19 
20 #include "libavutil/avassert.h"
21 #include "libavutil/cpu.h"
22 #include "libavutil/common.h"
23 #include "libavutil/opt.h"
24 #include "libavutil/pixdesc.h"
25 #include "avfilter.h"
26 #include "formats.h"
27 #include "internal.h"
28 #include "video.h"
29 #include "yadif.h"
30 
31 #undef NDEBUG
32 #include <assert.h>
33 
34 #define CHECK(j)\
35  { int score = FFABS(cur[mrefs - 1 + (j)] - cur[prefs - 1 - (j)])\
36  + FFABS(cur[mrefs +(j)] - cur[prefs -(j)])\
37  + FFABS(cur[mrefs + 1 + (j)] - cur[prefs + 1 - (j)]);\
38  if (score < spatial_score) {\
39  spatial_score= score;\
40  spatial_pred= (cur[mrefs +(j)] + cur[prefs -(j)])>>1;\
41 
42 /* The is_not_edge argument here controls when the code will enter a branch
43  * which reads up to and including x-3 and x+3. */
44 
45 #define FILTER(start, end, is_not_edge) \
46  for (x = start; x < end; x++) { \
47  int c = cur[mrefs]; \
48  int d = (prev2[0] + next2[0])>>1; \
49  int e = cur[prefs]; \
50  int temporal_diff0 = FFABS(prev2[0] - next2[0]); \
51  int temporal_diff1 =(FFABS(prev[mrefs] - c) + FFABS(prev[prefs] - e) )>>1; \
52  int temporal_diff2 =(FFABS(next[mrefs] - c) + FFABS(next[prefs] - e) )>>1; \
53  int diff = FFMAX3(temporal_diff0 >> 1, temporal_diff1, temporal_diff2); \
54  int spatial_pred = (c+e) >> 1; \
55  \
56  if (is_not_edge) {\
57  int spatial_score = FFABS(cur[mrefs - 1] - cur[prefs - 1]) + FFABS(c-e) \
58  + FFABS(cur[mrefs + 1] - cur[prefs + 1]) - 1; \
59  CHECK(-1) CHECK(-2) }} }} \
60  CHECK( 1) CHECK( 2) }} }} \
61  }\
62  \
63  if (mode < 2) { \
64  int b = (prev2[2 * mrefs] + next2[2 * mrefs])>>1; \
65  int f = (prev2[2 * prefs] + next2[2 * prefs])>>1; \
66  int max = FFMAX3(d - e, d - c, FFMIN(b - c, f - e)); \
67  int min = FFMIN3(d - e, d - c, FFMAX(b - c, f - e)); \
68  \
69  diff = FFMAX3(diff, min, -max); \
70  } \
71  \
72  if (spatial_pred > d + diff) \
73  spatial_pred = d + diff; \
74  else if (spatial_pred < d - diff) \
75  spatial_pred = d - diff; \
76  \
77  dst[0] = spatial_pred; \
78  \
79  dst++; \
80  cur++; \
81  prev++; \
82  next++; \
83  prev2++; \
84  next2++; \
85  }
86 
87 static void filter_line_c(void *dst1,
88  void *prev1, void *cur1, void *next1,
89  int w, int prefs, int mrefs, int parity, int mode)
90 {
91  uint8_t *dst = dst1;
92  uint8_t *prev = prev1;
93  uint8_t *cur = cur1;
94  uint8_t *next = next1;
95  int x;
96  uint8_t *prev2 = parity ? prev : cur ;
97  uint8_t *next2 = parity ? cur : next;
98 
99  /* The function is called with the pointers already pointing to data[3] and
100  * with 6 subtracted from the width. This allows the FILTER macro to be
101  * called so that it processes all the pixels normally. A constant value of
102  * true for is_not_edge lets the compiler ignore the if statement. */
103  FILTER(0, w, 1)
104 }
105 
106 static void filter_edges(void *dst1, void *prev1, void *cur1, void *next1,
107  int w, int prefs, int mrefs, int parity, int mode)
108 {
109  uint8_t *dst = dst1;
110  uint8_t *prev = prev1;
111  uint8_t *cur = cur1;
112  uint8_t *next = next1;
113  int x;
114  uint8_t *prev2 = parity ? prev : cur ;
115  uint8_t *next2 = parity ? cur : next;
116 
117  /* Only edge pixels need to be processed here. A constant value of false
118  * for is_not_edge should let the compiler ignore the whole branch. */
119  FILTER(0, 3, 0)
120 
121  dst = (uint8_t*)dst1 + w - 3;
122  prev = (uint8_t*)prev1 + w - 3;
123  cur = (uint8_t*)cur1 + w - 3;
124  next = (uint8_t*)next1 + w - 3;
125  prev2 = (uint8_t*)(parity ? prev : cur);
126  next2 = (uint8_t*)(parity ? cur : next);
127 
128  FILTER(w - 3, w, 0)
129 }
130 
131 
132 static void filter_line_c_16bit(void *dst1,
133  void *prev1, void *cur1, void *next1,
134  int w, int prefs, int mrefs, int parity,
135  int mode)
136 {
137  uint16_t *dst = dst1;
138  uint16_t *prev = prev1;
139  uint16_t *cur = cur1;
140  uint16_t *next = next1;
141  int x;
142  uint16_t *prev2 = parity ? prev : cur ;
143  uint16_t *next2 = parity ? cur : next;
144  mrefs /= 2;
145  prefs /= 2;
146 
147  FILTER(0, w, 1)
148 }
149 
150 static void filter_edges_16bit(void *dst1, void *prev1, void *cur1, void *next1,
151  int w, int prefs, int mrefs, int parity, int mode)
152 {
153  uint16_t *dst = dst1;
154  uint16_t *prev = prev1;
155  uint16_t *cur = cur1;
156  uint16_t *next = next1;
157  int x;
158  uint16_t *prev2 = parity ? prev : cur ;
159  uint16_t *next2 = parity ? cur : next;
160  mrefs /= 2;
161  prefs /= 2;
162 
163  FILTER(0, 3, 0)
164 
165  dst = (uint16_t*)dst1 + w - 3;
166  prev = (uint16_t*)prev1 + w - 3;
167  cur = (uint16_t*)cur1 + w - 3;
168  next = (uint16_t*)next1 + w - 3;
169  prev2 = (uint16_t*)(parity ? prev : cur);
170  next2 = (uint16_t*)(parity ? cur : next);
171 
172  FILTER(w - 3, w, 0)
173 }
174 
175 static void filter(AVFilterContext *ctx, AVFrame *dstpic,
176  int parity, int tff)
177 {
178  YADIFContext *yadif = ctx->priv;
179  int y, i;
180 
181  for (i = 0; i < yadif->csp->nb_components; i++) {
182  int w = dstpic->width;
183  int h = dstpic->height;
184  int refs = yadif->cur->linesize[i];
185  int df = (yadif->csp->comp[i].depth_minus1 + 8) / 8;
186  int pix_3 = 3 * df;
187 
188  if (i == 1 || i == 2) {
189  /* Why is this not part of the per-plane description thing? */
190  w >>= yadif->csp->log2_chroma_w;
191  h >>= yadif->csp->log2_chroma_h;
192  }
193 
194  /* filtering reads 3 pixels to the left/right; to avoid invalid reads,
195  * we need to call the c variant which avoids this for border pixels
196  */
197 
198  for (y = 0; y < h; y++) {
199  if ((y ^ parity) & 1) {
200  uint8_t *prev = &yadif->prev->data[i][y * refs];
201  uint8_t *cur = &yadif->cur ->data[i][y * refs];
202  uint8_t *next = &yadif->next->data[i][y * refs];
203  uint8_t *dst = &dstpic->data[i][y * dstpic->linesize[i]];
204  int mode = y == 1 || y + 2 == h ? 2 : yadif->mode;
205  yadif->filter_line(dst + pix_3, prev + pix_3, cur + pix_3,
206  next + pix_3, w - 6,
207  y + 1 < h ? refs : -refs,
208  y ? -refs : refs,
209  parity ^ tff, mode);
210  yadif->filter_edges(dst, prev, cur, next, w,
211  y + 1 < h ? refs : -refs,
212  y ? -refs : refs,
213  parity ^ tff, mode);
214  } else {
215  memcpy(&dstpic->data[i][y * dstpic->linesize[i]],
216  &yadif->cur->data[i][y * refs], w * df);
217  }
218  }
219  }
220 
221  emms_c();
222 }
223 
224 static int return_frame(AVFilterContext *ctx, int is_second)
225 {
226  YADIFContext *yadif = ctx->priv;
227  AVFilterLink *link = ctx->outputs[0];
228  int tff, ret;
229 
230  if (yadif->parity == -1) {
231  tff = yadif->cur->interlaced_frame ?
232  yadif->cur->top_field_first : 1;
233  } else {
234  tff = yadif->parity ^ 1;
235  }
236 
237  if (is_second) {
238  yadif->out = ff_get_video_buffer(link, link->w, link->h);
239  if (!yadif->out)
240  return AVERROR(ENOMEM);
241 
242  av_frame_copy_props(yadif->out, yadif->cur);
243  yadif->out->interlaced_frame = 0;
244  }
245 
246  filter(ctx, yadif->out, tff ^ !is_second, tff);
247 
248  if (is_second) {
249  int64_t cur_pts = yadif->cur->pts;
250  int64_t next_pts = yadif->next->pts;
251 
252  if (next_pts != AV_NOPTS_VALUE && cur_pts != AV_NOPTS_VALUE) {
253  yadif->out->pts = cur_pts + next_pts;
254  } else {
255  yadif->out->pts = AV_NOPTS_VALUE;
256  }
257  }
258  ret = ff_filter_frame(ctx->outputs[0], yadif->out);
259 
260  yadif->frame_pending = (yadif->mode&1) && !is_second;
261  return ret;
262 }
263 
265 {
266  AVFilterContext *ctx = link->dst;
267  YADIFContext *yadif = ctx->priv;
268 
269  av_assert0(frame);
270 
271  if (yadif->frame_pending)
272  return_frame(ctx, 1);
273 
274  if (yadif->prev)
275  av_frame_free(&yadif->prev);
276  yadif->prev = yadif->cur;
277  yadif->cur = yadif->next;
278  yadif->next = frame;
279 
280  if (!yadif->cur)
281  return 0;
282 
283  if (yadif->deint && !yadif->cur->interlaced_frame) {
284  yadif->out = av_frame_clone(yadif->cur);
285  if (!yadif->out)
286  return AVERROR(ENOMEM);
287 
288  av_frame_free(&yadif->prev);
289  if (yadif->out->pts != AV_NOPTS_VALUE)
290  yadif->out->pts *= 2;
291  return ff_filter_frame(ctx->outputs[0], yadif->out);
292  }
293 
294  if (!yadif->prev &&
295  !(yadif->prev = av_frame_clone(yadif->cur)))
296  return AVERROR(ENOMEM);
297 
298  yadif->out = ff_get_video_buffer(ctx->outputs[0], link->w, link->h);
299  if (!yadif->out)
300  return AVERROR(ENOMEM);
301 
302  av_frame_copy_props(yadif->out, yadif->cur);
303  yadif->out->interlaced_frame = 0;
304 
305  if (yadif->out->pts != AV_NOPTS_VALUE)
306  yadif->out->pts *= 2;
307 
308  return return_frame(ctx, 0);
309 }
310 
312 {
313  AVFilterContext *ctx = link->src;
314  YADIFContext *yadif = ctx->priv;
315 
316  if (yadif->frame_pending) {
317  return_frame(ctx, 1);
318  return 0;
319  }
320 
321  do {
322  int ret;
323 
324  if (yadif->eof)
325  return AVERROR_EOF;
326 
327  ret = ff_request_frame(link->src->inputs[0]);
328 
329  if (ret == AVERROR_EOF && yadif->cur) {
330  AVFrame *next = av_frame_clone(yadif->next);
331 
332  if (!next)
333  return AVERROR(ENOMEM);
334 
335  next->pts = yadif->next->pts * 2 - yadif->cur->pts;
336 
337  filter_frame(link->src->inputs[0], next);
338  yadif->eof = 1;
339  } else if (ret < 0) {
340  return ret;
341  }
342  } while (!yadif->cur);
343 
344  return 0;
345 }
346 
347 static av_cold void uninit(AVFilterContext *ctx)
348 {
349  YADIFContext *yadif = ctx->priv;
350 
351  av_frame_free(&yadif->prev);
352  av_frame_free(&yadif->cur );
353  av_frame_free(&yadif->next);
354 }
355 
357 {
358  static const enum AVPixelFormat pix_fmts[] = {
390  };
391 
393 
394  return 0;
395 }
396 
398 {
399  AVFilterContext *ctx = link->src;
400  YADIFContext *s = link->src->priv;
401 
402  link->time_base.num = link->src->inputs[0]->time_base.num;
403  link->time_base.den = link->src->inputs[0]->time_base.den * 2;
404  link->w = link->src->inputs[0]->w;
405  link->h = link->src->inputs[0]->h;
406 
407  if(s->mode&1)
408  link->frame_rate = av_mul_q(link->src->inputs[0]->frame_rate, (AVRational){2,1});
409 
410  if (link->w < 3 || link->h < 3) {
411  av_log(ctx, AV_LOG_ERROR, "Video of less than 3 columns or lines is not supported\n");
412  return AVERROR(EINVAL);
413  }
414 
415  s->csp = av_pix_fmt_desc_get(link->format);
416  if (s->csp->comp[0].depth_minus1 / 8 == 1) {
419  } else {
422  }
423 
424  if (ARCH_X86)
426 
427  return 0;
428 }
429 
430 
431 #define OFFSET(x) offsetof(YADIFContext, x)
432 #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
433 
434 #define CONST(name, help, val, unit) { name, help, 0, AV_OPT_TYPE_CONST, {.i64=val}, INT_MIN, INT_MAX, FLAGS, unit }
435 
436 static const AVOption yadif_options[] = {
437  { "mode", "specify the interlacing mode", OFFSET(mode), AV_OPT_TYPE_INT, {.i64=YADIF_MODE_SEND_FRAME}, 0, 3, FLAGS, "mode"},
438  CONST("send_frame", "send one frame for each frame", YADIF_MODE_SEND_FRAME, "mode"),
439  CONST("send_field", "send one frame for each field", YADIF_MODE_SEND_FIELD, "mode"),
440  CONST("send_frame_nospatial", "send one frame for each frame, but skip spatial interlacing check", YADIF_MODE_SEND_FRAME_NOSPATIAL, "mode"),
441  CONST("send_field_nospatial", "send one frame for each field, but skip spatial interlacing check", YADIF_MODE_SEND_FIELD_NOSPATIAL, "mode"),
442 
443  { "parity", "specify the assumed picture field parity", OFFSET(parity), AV_OPT_TYPE_INT, {.i64=YADIF_PARITY_AUTO}, -1, 1, FLAGS, "parity" },
444  CONST("tff", "assume top field first", YADIF_PARITY_TFF, "parity"),
445  CONST("bff", "assume bottom field first", YADIF_PARITY_BFF, "parity"),
446  CONST("auto", "auto detect parity", YADIF_PARITY_AUTO, "parity"),
447 
448  { "deint", "specify which frames to deinterlace", OFFSET(deint), AV_OPT_TYPE_INT, {.i64=YADIF_DEINT_ALL}, 0, 1, FLAGS, "deint" },
449  CONST("all", "deinterlace all frames", YADIF_DEINT_ALL, "deint"),
450  CONST("interlaced", "only deinterlace frames marked as interlaced", YADIF_DEINT_INTERLACED, "deint"),
451 
452  {NULL},
453 };
454 
455 AVFILTER_DEFINE_CLASS(yadif);
456 
458  {
459  .name = "default",
460  .type = AVMEDIA_TYPE_VIDEO,
461  .filter_frame = filter_frame,
462  },
463  { NULL }
464 };
465 
467  {
468  .name = "default",
469  .type = AVMEDIA_TYPE_VIDEO,
470  .request_frame = request_frame,
471  .config_props = config_props,
472  },
473  { NULL }
474 };
475 
477  .name = "yadif",
478  .description = NULL_IF_CONFIG_SMALL("Deinterlace the input image."),
479 
480  .priv_size = sizeof(YADIFContext),
481  .priv_class = &yadif_class,
482  .uninit = uninit,
484 
485  .inputs = avfilter_vf_yadif_inputs,
486  .outputs = avfilter_vf_yadif_outputs,
487 };
planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
Definition: pixfmt.h:158
static const AVFilterPad avfilter_vf_yadif_inputs[]
Definition: vf_yadif.c:457
const char * s
Definition: avisynth_c.h:668
planar YUV 4:4:4,42bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
Definition: pixfmt.h:231
planar YUV 4:2:0,21bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
Definition: pixfmt.h:224
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:424
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:1778
This structure describes decoded (raw) audio or video data.
Definition: frame.h:76
AVOption.
Definition: opt.h:251
planar YUV 4:2:2,28bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
Definition: pixfmt.h:228
planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
Definition: pixfmt.h:151
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:73
static const AVFilterPad outputs[]
Definition: af_ashowinfo.c:117
external API header
planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
Definition: pixfmt.h:154
planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
Definition: pixfmt.h:229
int num
numerator
Definition: rational.h:44
send 1 frame for each frame but skips spatial interlacing check
Definition: yadif.h:28
int frame_pending
Definition: yadif.h:50
static void filter_line_c(void *dst1, void *prev1, void *cur1, void *next1, int w, int prefs, int mrefs, int parity, int mode)
Definition: vf_yadif.c:87
planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
Definition: pixfmt.h:148
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:143
planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
Definition: pixfmt.h:128
uint8_t log2_chroma_w
Amount to shift the luma width right to find the chroma width.
Definition: pixdesc.h:66
output residual component w
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:308
const char * name
Pad name.
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:532
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
static int request_frame(AVFilterLink *link)
Definition: vf_yadif.c:311
AVFrame * cur
Definition: yadif.h:52
planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
Definition: pixfmt.h:105
AVComponentDescriptor comp[4]
Parameters that describe how pixels are packed.
Definition: pixdesc.h:86
uint8_t
it can be given away to ff_start_frame *A reference passed to ff_filter_frame(or the deprecated ff_start_frame) is given away and must no longer be used.*A reference created with avfilter_ref_buffer belongs to the code that created it.*A reference obtained with ff_get_video_buffer or ff_get_audio_buffer belongs to the code that requested it.*A reference given as return value by the get_video_buffer or get_audio_buffer method is given away and must no longer be used.Link reference fields---------------------The AVFilterLink structure has a few AVFilterBufferRef fields.The cur_buf and out_buf were used with the deprecated start_frame/draw_slice/end_frame API and should no longer be used.src_buf
#define av_cold
Definition: attributes.h:78
enum YADIFParity parity
Definition: yadif.h:47
mode
Definition: f_perms.c:27
AVOptions.
#define AV_NE(be, le)
Definition: common.h:44
planar YUV 4:4:4,36bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
Definition: pixfmt.h:230
#define emms_c()
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:159
AVFrame * next
Definition: yadif.h:53
bottom field first
Definition: yadif.h:34
planar YUV 4:4:0 full scale (JPEG), deprecated in favor of PIX_FMT_YUV440P and setting color_range ...
Definition: pixfmt.h:104
AVRational av_mul_q(AVRational b, AVRational c)
Multiply two rationals.
Definition: rational.c:80
planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of PIX_FMT_YUV422P and setting color_...
Definition: pixfmt.h:81
#define AVERROR_EOF
End of file.
Definition: error.h:55
static void filter(AVFilterContext *ctx, AVFrame *dstpic, int parity, int tff)
Definition: vf_yadif.c:175
static void filter_edges(void *dst1, void *prev1, void *cur1, void *next1, int w, int prefs, int mrefs, int parity, int mode)
Definition: vf_yadif.c:106
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:270
send 1 frame for each field
Definition: yadif.h:27
void ff_set_common_formats(AVFilterContext *ctx, AVFilterFormats *formats)
A helper for query_formats() which sets all links to the same list of formats.
Definition: formats.c:545
AVFrame * prev
Definition: yadif.h:54
frame
Definition: stft.m:14
A filter pad used for either input or output.
planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
Definition: pixfmt.h:149
Discrete Time axis x
auto detection
Definition: yadif.h:35
planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
Definition: pixfmt.h:157
planar YUV 4:2:2 24bpp, (1 Cr & Cb sample per 2x1 Y & A samples)
Definition: pixfmt.h:219
uint16_t depth_minus1
number of bits in the component minus 1
Definition: pixdesc.h:43
int width
width and height of the video frame
Definition: frame.h:122
enum YADIFMode mode
Definition: yadif.h:46
uint8_t log2_chroma_h
Amount to shift the luma height right to find the chroma height.
Definition: pixdesc.h:75
#define ARCH_X86
Definition: config.h:35
planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
Definition: pixfmt.h:133
frequency sampling df
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
void * priv
private data for use by the filter
Definition: avfilter.h:545
planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
Definition: pixfmt.h:153
simple assert() macros that are a bit more flexible than ISO C assert().
planar YUV 4:2:2,28bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
Definition: pixfmt.h:227
void av_log(void *avcl, int level, const char *fmt,...)
Definition: log.c:246
planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
Definition: pixfmt.h:131
static int query_formats(AVFilterContext *ctx)
Definition: vf_yadif.c:356
#define OFFSET(x)
Definition: vf_yadif.c:431
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFilterBuffer structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:72
void(* filter_edges)(void *dst, void *prev, void *cur, void *next, int w, int prefs, int mrefs, int parity, int mode)
Definition: yadif.h:63
static const AVFilterPad avfilter_vf_yadif_outputs[]
Definition: vf_yadif.c:466
uint8_t nb_components
The number of components each pixel has, (1-4)
Definition: pixdesc.h:57
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of PIX_FMT_YUV420P and setting color_...
Definition: pixfmt.h:80
ret
Definition: avfilter.c:821
enum YADIFDeint deint
Definition: yadif.h:48
static int config_props(AVFilterLink *link)
Definition: vf_yadif.c:397
planar YUV 4:2:0,18bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
Definition: pixfmt.h:222
planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
Definition: pixfmt.h:150
int eof
Definition: yadif.h:67
planar YUV 4:2:2, 18bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
Definition: pixfmt.h:159
AVFrame * av_frame_clone(AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:317
NULL
Definition: eval.c:55
planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
Definition: pixfmt.h:129
static const AVOption yadif_options[]
Definition: vf_yadif.c:436
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:101
planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples)
Definition: pixfmt.h:218
AVFrame * out
Definition: yadif.h:55
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:148
planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
Definition: pixfmt.h:74
Filter definition.
Definition: avfilter.h:436
static int filter_frame(AVFilterLink *link, AVFrame *frame)
Definition: vf_yadif.c:264
Y , 16bpp, big-endian.
Definition: pixfmt.h:101
synthesis window for stochastic i
rational number numerator/denominator
Definition: rational.h:43
AVFilter avfilter_vf_yadif
Definition: vf_yadif.c:476
planar YUV 4:2:0,21bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
Definition: pixfmt.h:223
send 1 frame for each field but skips spatial interlacing check
Definition: yadif.h:29
const char * name
filter name
Definition: avfilter.h:437
AVFILTER_DEFINE_CLASS(yadif)
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFilterBuffer structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Buffer references ownership and permissions
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:539
void(* filter_line)(void *dst, void *prev, void *cur, void *next, int w, int prefs, int mrefs, int parity, int mode)
Required alignment for filter_line.
Definition: yadif.h:60
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:87
planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
Definition: pixfmt.h:155
planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
Definition: pixfmt.h:132
const AVPixFmtDescriptor * csp
Definition: yadif.h:66
#define FILTER(start, end, is_not_edge)
Definition: vf_yadif.c:45
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:68
planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
Definition: pixfmt.h:226
Y , 8bpp.
Definition: pixfmt.h:76
common internal and external API header
#define FLAGS
Definition: vf_yadif.c:432
static void filter_edges_16bit(void *dst1, void *prev1, void *cur1, void *next1, int w, int prefs, int mrefs, int parity, int mode)
Definition: vf_yadif.c:150
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:108
planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
Definition: pixfmt.h:130
planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of PIX_FMT_YUV444P and setting color_...
Definition: pixfmt.h:82
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
Definition: pixfmt.h:75
int den
denominator
Definition: rational.h:45
function y
Definition: D.m:1
deinterlace all frames
Definition: yadif.h:39
static av_cold void uninit(AVFilterContext *ctx)
Definition: vf_yadif.c:347
int top_field_first
If the content is interlaced, is top field displayed first.
Definition: frame.h:275
planar YUV 4:4:4,42bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
Definition: pixfmt.h:232
av_cold void ff_yadif_init_x86(YADIFContext *yadif)
Definition: vf_yadif_init.c:62
else dst[i][x+y *dst_stride[i]]
Definition: vf_mcdeint.c:160
Y , 16bpp, little-endian.
Definition: pixfmt.h:102
send 1 frame for each frame
Definition: yadif.h:26
#define CONST(name, help, val, unit)
Definition: vf_yadif.c:434
An instance of a filter.
Definition: avfilter.h:524
struct YADIFContext YADIFContext
int height
Definition: frame.h:122
p parity
Definition: vf_mcdeint.c:178
planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
Definition: pixfmt.h:103
static int return_frame(AVFilterContext *ctx, int is_second)
Definition: vf_yadif.c:224
int ff_request_frame(AVFilterLink *link)
Request an input frame from the filter at the other end of the link.
Definition: avfilter.c:319
internal API functions
only deinterlace frames marked as interlaced
Definition: yadif.h:40
AVPixelFormat
Pixel format.
Definition: pixfmt.h:66
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several inputs
top field first
Definition: yadif.h:33
planar YUV 4:2:0,18bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
Definition: pixfmt.h:221
planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
Definition: pixfmt.h:156
planar YUV 4:2:2,24bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
Definition: pixfmt.h:225
#define AV_NOPTS_VALUE
Undefined timestamp value.
Definition: avutil.h:190
static void filter_line_c_16bit(void *dst1, void *prev1, void *cur1, void *next1, int w, int prefs, int mrefs, int parity, int mode)
Definition: vf_yadif.c:132
planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
Definition: pixfmt.h:152