annotate ffmpeg/libavfilter/vf_yadif.c @ 13:844d341cf643 tip

Back up before ISMIR
author Yading Song <yading.song@eecs.qmul.ac.uk>
date Thu, 31 Oct 2013 13:17:06 +0000
parents f445c3017523
children
rev   line source
yading@11 1 /*
yading@11 2 * Copyright (C) 2006-2011 Michael Niedermayer <michaelni@gmx.at>
yading@11 3 * 2010 James Darnley <james.darnley@gmail.com>
yading@11 4 *
yading@11 5 * FFmpeg is free software; you can redistribute it and/or modify
yading@11 6 * it under the terms of the GNU General Public License as published by
yading@11 7 * the Free Software Foundation; either version 2 of the License, or
yading@11 8 * (at your option) any later version.
yading@11 9 *
yading@11 10 * FFmpeg is distributed in the hope that it will be useful,
yading@11 11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
yading@11 12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
yading@11 13 * GNU General Public License for more details.
yading@11 14 *
yading@11 15 * You should have received a copy of the GNU General Public License along
yading@11 16 * with FFmpeg; if not, write to the Free Software Foundation, Inc.,
yading@11 17 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
yading@11 18 */
yading@11 19
yading@11 20 #include "libavutil/avassert.h"
yading@11 21 #include "libavutil/cpu.h"
yading@11 22 #include "libavutil/common.h"
yading@11 23 #include "libavutil/opt.h"
yading@11 24 #include "libavutil/pixdesc.h"
yading@11 25 #include "avfilter.h"
yading@11 26 #include "formats.h"
yading@11 27 #include "internal.h"
yading@11 28 #include "video.h"
yading@11 29 #include "yadif.h"
yading@11 30
yading@11 31 #undef NDEBUG
yading@11 32 #include <assert.h>
yading@11 33
yading@11 34 #define CHECK(j)\
yading@11 35 { int score = FFABS(cur[mrefs - 1 + (j)] - cur[prefs - 1 - (j)])\
yading@11 36 + FFABS(cur[mrefs +(j)] - cur[prefs -(j)])\
yading@11 37 + FFABS(cur[mrefs + 1 + (j)] - cur[prefs + 1 - (j)]);\
yading@11 38 if (score < spatial_score) {\
yading@11 39 spatial_score= score;\
yading@11 40 spatial_pred= (cur[mrefs +(j)] + cur[prefs -(j)])>>1;\
yading@11 41
yading@11 42 /* The is_not_edge argument here controls when the code will enter a branch
yading@11 43 * which reads up to and including x-3 and x+3. */
yading@11 44
yading@11 45 #define FILTER(start, end, is_not_edge) \
yading@11 46 for (x = start; x < end; x++) { \
yading@11 47 int c = cur[mrefs]; \
yading@11 48 int d = (prev2[0] + next2[0])>>1; \
yading@11 49 int e = cur[prefs]; \
yading@11 50 int temporal_diff0 = FFABS(prev2[0] - next2[0]); \
yading@11 51 int temporal_diff1 =(FFABS(prev[mrefs] - c) + FFABS(prev[prefs] - e) )>>1; \
yading@11 52 int temporal_diff2 =(FFABS(next[mrefs] - c) + FFABS(next[prefs] - e) )>>1; \
yading@11 53 int diff = FFMAX3(temporal_diff0 >> 1, temporal_diff1, temporal_diff2); \
yading@11 54 int spatial_pred = (c+e) >> 1; \
yading@11 55 \
yading@11 56 if (is_not_edge) {\
yading@11 57 int spatial_score = FFABS(cur[mrefs - 1] - cur[prefs - 1]) + FFABS(c-e) \
yading@11 58 + FFABS(cur[mrefs + 1] - cur[prefs + 1]) - 1; \
yading@11 59 CHECK(-1) CHECK(-2) }} }} \
yading@11 60 CHECK( 1) CHECK( 2) }} }} \
yading@11 61 }\
yading@11 62 \
yading@11 63 if (mode < 2) { \
yading@11 64 int b = (prev2[2 * mrefs] + next2[2 * mrefs])>>1; \
yading@11 65 int f = (prev2[2 * prefs] + next2[2 * prefs])>>1; \
yading@11 66 int max = FFMAX3(d - e, d - c, FFMIN(b - c, f - e)); \
yading@11 67 int min = FFMIN3(d - e, d - c, FFMAX(b - c, f - e)); \
yading@11 68 \
yading@11 69 diff = FFMAX3(diff, min, -max); \
yading@11 70 } \
yading@11 71 \
yading@11 72 if (spatial_pred > d + diff) \
yading@11 73 spatial_pred = d + diff; \
yading@11 74 else if (spatial_pred < d - diff) \
yading@11 75 spatial_pred = d - diff; \
yading@11 76 \
yading@11 77 dst[0] = spatial_pred; \
yading@11 78 \
yading@11 79 dst++; \
yading@11 80 cur++; \
yading@11 81 prev++; \
yading@11 82 next++; \
yading@11 83 prev2++; \
yading@11 84 next2++; \
yading@11 85 }
yading@11 86
yading@11 87 static void filter_line_c(void *dst1,
yading@11 88 void *prev1, void *cur1, void *next1,
yading@11 89 int w, int prefs, int mrefs, int parity, int mode)
yading@11 90 {
yading@11 91 uint8_t *dst = dst1;
yading@11 92 uint8_t *prev = prev1;
yading@11 93 uint8_t *cur = cur1;
yading@11 94 uint8_t *next = next1;
yading@11 95 int x;
yading@11 96 uint8_t *prev2 = parity ? prev : cur ;
yading@11 97 uint8_t *next2 = parity ? cur : next;
yading@11 98
yading@11 99 /* The function is called with the pointers already pointing to data[3] and
yading@11 100 * with 6 subtracted from the width. This allows the FILTER macro to be
yading@11 101 * called so that it processes all the pixels normally. A constant value of
yading@11 102 * true for is_not_edge lets the compiler ignore the if statement. */
yading@11 103 FILTER(0, w, 1)
yading@11 104 }
yading@11 105
yading@11 106 static void filter_edges(void *dst1, void *prev1, void *cur1, void *next1,
yading@11 107 int w, int prefs, int mrefs, int parity, int mode)
yading@11 108 {
yading@11 109 uint8_t *dst = dst1;
yading@11 110 uint8_t *prev = prev1;
yading@11 111 uint8_t *cur = cur1;
yading@11 112 uint8_t *next = next1;
yading@11 113 int x;
yading@11 114 uint8_t *prev2 = parity ? prev : cur ;
yading@11 115 uint8_t *next2 = parity ? cur : next;
yading@11 116
yading@11 117 /* Only edge pixels need to be processed here. A constant value of false
yading@11 118 * for is_not_edge should let the compiler ignore the whole branch. */
yading@11 119 FILTER(0, 3, 0)
yading@11 120
yading@11 121 dst = (uint8_t*)dst1 + w - 3;
yading@11 122 prev = (uint8_t*)prev1 + w - 3;
yading@11 123 cur = (uint8_t*)cur1 + w - 3;
yading@11 124 next = (uint8_t*)next1 + w - 3;
yading@11 125 prev2 = (uint8_t*)(parity ? prev : cur);
yading@11 126 next2 = (uint8_t*)(parity ? cur : next);
yading@11 127
yading@11 128 FILTER(w - 3, w, 0)
yading@11 129 }
yading@11 130
yading@11 131
yading@11 132 static void filter_line_c_16bit(void *dst1,
yading@11 133 void *prev1, void *cur1, void *next1,
yading@11 134 int w, int prefs, int mrefs, int parity,
yading@11 135 int mode)
yading@11 136 {
yading@11 137 uint16_t *dst = dst1;
yading@11 138 uint16_t *prev = prev1;
yading@11 139 uint16_t *cur = cur1;
yading@11 140 uint16_t *next = next1;
yading@11 141 int x;
yading@11 142 uint16_t *prev2 = parity ? prev : cur ;
yading@11 143 uint16_t *next2 = parity ? cur : next;
yading@11 144 mrefs /= 2;
yading@11 145 prefs /= 2;
yading@11 146
yading@11 147 FILTER(0, w, 1)
yading@11 148 }
yading@11 149
yading@11 150 static void filter_edges_16bit(void *dst1, void *prev1, void *cur1, void *next1,
yading@11 151 int w, int prefs, int mrefs, int parity, int mode)
yading@11 152 {
yading@11 153 uint16_t *dst = dst1;
yading@11 154 uint16_t *prev = prev1;
yading@11 155 uint16_t *cur = cur1;
yading@11 156 uint16_t *next = next1;
yading@11 157 int x;
yading@11 158 uint16_t *prev2 = parity ? prev : cur ;
yading@11 159 uint16_t *next2 = parity ? cur : next;
yading@11 160 mrefs /= 2;
yading@11 161 prefs /= 2;
yading@11 162
yading@11 163 FILTER(0, 3, 0)
yading@11 164
yading@11 165 dst = (uint16_t*)dst1 + w - 3;
yading@11 166 prev = (uint16_t*)prev1 + w - 3;
yading@11 167 cur = (uint16_t*)cur1 + w - 3;
yading@11 168 next = (uint16_t*)next1 + w - 3;
yading@11 169 prev2 = (uint16_t*)(parity ? prev : cur);
yading@11 170 next2 = (uint16_t*)(parity ? cur : next);
yading@11 171
yading@11 172 FILTER(w - 3, w, 0)
yading@11 173 }
yading@11 174
yading@11 175 static void filter(AVFilterContext *ctx, AVFrame *dstpic,
yading@11 176 int parity, int tff)
yading@11 177 {
yading@11 178 YADIFContext *yadif = ctx->priv;
yading@11 179 int y, i;
yading@11 180
yading@11 181 for (i = 0; i < yadif->csp->nb_components; i++) {
yading@11 182 int w = dstpic->width;
yading@11 183 int h = dstpic->height;
yading@11 184 int refs = yadif->cur->linesize[i];
yading@11 185 int df = (yadif->csp->comp[i].depth_minus1 + 8) / 8;
yading@11 186 int pix_3 = 3 * df;
yading@11 187
yading@11 188 if (i == 1 || i == 2) {
yading@11 189 /* Why is this not part of the per-plane description thing? */
yading@11 190 w >>= yadif->csp->log2_chroma_w;
yading@11 191 h >>= yadif->csp->log2_chroma_h;
yading@11 192 }
yading@11 193
yading@11 194 /* filtering reads 3 pixels to the left/right; to avoid invalid reads,
yading@11 195 * we need to call the c variant which avoids this for border pixels
yading@11 196 */
yading@11 197
yading@11 198 for (y = 0; y < h; y++) {
yading@11 199 if ((y ^ parity) & 1) {
yading@11 200 uint8_t *prev = &yadif->prev->data[i][y * refs];
yading@11 201 uint8_t *cur = &yadif->cur ->data[i][y * refs];
yading@11 202 uint8_t *next = &yadif->next->data[i][y * refs];
yading@11 203 uint8_t *dst = &dstpic->data[i][y * dstpic->linesize[i]];
yading@11 204 int mode = y == 1 || y + 2 == h ? 2 : yadif->mode;
yading@11 205 yadif->filter_line(dst + pix_3, prev + pix_3, cur + pix_3,
yading@11 206 next + pix_3, w - 6,
yading@11 207 y + 1 < h ? refs : -refs,
yading@11 208 y ? -refs : refs,
yading@11 209 parity ^ tff, mode);
yading@11 210 yadif->filter_edges(dst, prev, cur, next, w,
yading@11 211 y + 1 < h ? refs : -refs,
yading@11 212 y ? -refs : refs,
yading@11 213 parity ^ tff, mode);
yading@11 214 } else {
yading@11 215 memcpy(&dstpic->data[i][y * dstpic->linesize[i]],
yading@11 216 &yadif->cur->data[i][y * refs], w * df);
yading@11 217 }
yading@11 218 }
yading@11 219 }
yading@11 220
yading@11 221 emms_c();
yading@11 222 }
yading@11 223
yading@11 224 static int return_frame(AVFilterContext *ctx, int is_second)
yading@11 225 {
yading@11 226 YADIFContext *yadif = ctx->priv;
yading@11 227 AVFilterLink *link = ctx->outputs[0];
yading@11 228 int tff, ret;
yading@11 229
yading@11 230 if (yadif->parity == -1) {
yading@11 231 tff = yadif->cur->interlaced_frame ?
yading@11 232 yadif->cur->top_field_first : 1;
yading@11 233 } else {
yading@11 234 tff = yadif->parity ^ 1;
yading@11 235 }
yading@11 236
yading@11 237 if (is_second) {
yading@11 238 yadif->out = ff_get_video_buffer(link, link->w, link->h);
yading@11 239 if (!yadif->out)
yading@11 240 return AVERROR(ENOMEM);
yading@11 241
yading@11 242 av_frame_copy_props(yadif->out, yadif->cur);
yading@11 243 yadif->out->interlaced_frame = 0;
yading@11 244 }
yading@11 245
yading@11 246 filter(ctx, yadif->out, tff ^ !is_second, tff);
yading@11 247
yading@11 248 if (is_second) {
yading@11 249 int64_t cur_pts = yadif->cur->pts;
yading@11 250 int64_t next_pts = yadif->next->pts;
yading@11 251
yading@11 252 if (next_pts != AV_NOPTS_VALUE && cur_pts != AV_NOPTS_VALUE) {
yading@11 253 yadif->out->pts = cur_pts + next_pts;
yading@11 254 } else {
yading@11 255 yadif->out->pts = AV_NOPTS_VALUE;
yading@11 256 }
yading@11 257 }
yading@11 258 ret = ff_filter_frame(ctx->outputs[0], yadif->out);
yading@11 259
yading@11 260 yadif->frame_pending = (yadif->mode&1) && !is_second;
yading@11 261 return ret;
yading@11 262 }
yading@11 263
yading@11 264 static int filter_frame(AVFilterLink *link, AVFrame *frame)
yading@11 265 {
yading@11 266 AVFilterContext *ctx = link->dst;
yading@11 267 YADIFContext *yadif = ctx->priv;
yading@11 268
yading@11 269 av_assert0(frame);
yading@11 270
yading@11 271 if (yadif->frame_pending)
yading@11 272 return_frame(ctx, 1);
yading@11 273
yading@11 274 if (yadif->prev)
yading@11 275 av_frame_free(&yadif->prev);
yading@11 276 yadif->prev = yadif->cur;
yading@11 277 yadif->cur = yadif->next;
yading@11 278 yadif->next = frame;
yading@11 279
yading@11 280 if (!yadif->cur)
yading@11 281 return 0;
yading@11 282
yading@11 283 if (yadif->deint && !yadif->cur->interlaced_frame) {
yading@11 284 yadif->out = av_frame_clone(yadif->cur);
yading@11 285 if (!yadif->out)
yading@11 286 return AVERROR(ENOMEM);
yading@11 287
yading@11 288 av_frame_free(&yadif->prev);
yading@11 289 if (yadif->out->pts != AV_NOPTS_VALUE)
yading@11 290 yadif->out->pts *= 2;
yading@11 291 return ff_filter_frame(ctx->outputs[0], yadif->out);
yading@11 292 }
yading@11 293
yading@11 294 if (!yadif->prev &&
yading@11 295 !(yadif->prev = av_frame_clone(yadif->cur)))
yading@11 296 return AVERROR(ENOMEM);
yading@11 297
yading@11 298 yadif->out = ff_get_video_buffer(ctx->outputs[0], link->w, link->h);
yading@11 299 if (!yadif->out)
yading@11 300 return AVERROR(ENOMEM);
yading@11 301
yading@11 302 av_frame_copy_props(yadif->out, yadif->cur);
yading@11 303 yadif->out->interlaced_frame = 0;
yading@11 304
yading@11 305 if (yadif->out->pts != AV_NOPTS_VALUE)
yading@11 306 yadif->out->pts *= 2;
yading@11 307
yading@11 308 return return_frame(ctx, 0);
yading@11 309 }
yading@11 310
yading@11 311 static int request_frame(AVFilterLink *link)
yading@11 312 {
yading@11 313 AVFilterContext *ctx = link->src;
yading@11 314 YADIFContext *yadif = ctx->priv;
yading@11 315
yading@11 316 if (yadif->frame_pending) {
yading@11 317 return_frame(ctx, 1);
yading@11 318 return 0;
yading@11 319 }
yading@11 320
yading@11 321 do {
yading@11 322 int ret;
yading@11 323
yading@11 324 if (yadif->eof)
yading@11 325 return AVERROR_EOF;
yading@11 326
yading@11 327 ret = ff_request_frame(link->src->inputs[0]);
yading@11 328
yading@11 329 if (ret == AVERROR_EOF && yadif->cur) {
yading@11 330 AVFrame *next = av_frame_clone(yadif->next);
yading@11 331
yading@11 332 if (!next)
yading@11 333 return AVERROR(ENOMEM);
yading@11 334
yading@11 335 next->pts = yadif->next->pts * 2 - yadif->cur->pts;
yading@11 336
yading@11 337 filter_frame(link->src->inputs[0], next);
yading@11 338 yadif->eof = 1;
yading@11 339 } else if (ret < 0) {
yading@11 340 return ret;
yading@11 341 }
yading@11 342 } while (!yadif->cur);
yading@11 343
yading@11 344 return 0;
yading@11 345 }
yading@11 346
yading@11 347 static av_cold void uninit(AVFilterContext *ctx)
yading@11 348 {
yading@11 349 YADIFContext *yadif = ctx->priv;
yading@11 350
yading@11 351 av_frame_free(&yadif->prev);
yading@11 352 av_frame_free(&yadif->cur );
yading@11 353 av_frame_free(&yadif->next);
yading@11 354 }
yading@11 355
yading@11 356 static int query_formats(AVFilterContext *ctx)
yading@11 357 {
yading@11 358 static const enum AVPixelFormat pix_fmts[] = {
yading@11 359 AV_PIX_FMT_YUV420P,
yading@11 360 AV_PIX_FMT_YUV422P,
yading@11 361 AV_PIX_FMT_YUV444P,
yading@11 362 AV_PIX_FMT_YUV410P,
yading@11 363 AV_PIX_FMT_YUV411P,
yading@11 364 AV_PIX_FMT_GRAY8,
yading@11 365 AV_PIX_FMT_YUVJ420P,
yading@11 366 AV_PIX_FMT_YUVJ422P,
yading@11 367 AV_PIX_FMT_YUVJ444P,
yading@11 368 AV_NE( AV_PIX_FMT_GRAY16BE, AV_PIX_FMT_GRAY16LE ),
yading@11 369 AV_PIX_FMT_YUV440P,
yading@11 370 AV_PIX_FMT_YUVJ440P,
yading@11 371 AV_NE( AV_PIX_FMT_YUV420P9BE, AV_PIX_FMT_YUV420P9LE ),
yading@11 372 AV_NE( AV_PIX_FMT_YUV422P9BE, AV_PIX_FMT_YUV422P9LE ),
yading@11 373 AV_NE( AV_PIX_FMT_YUV444P9BE, AV_PIX_FMT_YUV444P9LE ),
yading@11 374 AV_NE( AV_PIX_FMT_YUV420P10BE, AV_PIX_FMT_YUV420P10LE ),
yading@11 375 AV_NE( AV_PIX_FMT_YUV422P10BE, AV_PIX_FMT_YUV422P10LE ),
yading@11 376 AV_NE( AV_PIX_FMT_YUV444P10BE, AV_PIX_FMT_YUV444P10LE ),
yading@11 377 AV_NE( AV_PIX_FMT_YUV420P12BE, AV_PIX_FMT_YUV420P12LE ),
yading@11 378 AV_NE( AV_PIX_FMT_YUV422P12BE, AV_PIX_FMT_YUV422P12LE ),
yading@11 379 AV_NE( AV_PIX_FMT_YUV444P12BE, AV_PIX_FMT_YUV444P12LE ),
yading@11 380 AV_NE( AV_PIX_FMT_YUV420P14BE, AV_PIX_FMT_YUV420P14LE ),
yading@11 381 AV_NE( AV_PIX_FMT_YUV422P14BE, AV_PIX_FMT_YUV422P14LE ),
yading@11 382 AV_NE( AV_PIX_FMT_YUV444P14BE, AV_PIX_FMT_YUV444P14LE ),
yading@11 383 AV_NE( AV_PIX_FMT_YUV420P16BE, AV_PIX_FMT_YUV420P16LE ),
yading@11 384 AV_NE( AV_PIX_FMT_YUV422P16BE, AV_PIX_FMT_YUV422P16LE ),
yading@11 385 AV_NE( AV_PIX_FMT_YUV444P16BE, AV_PIX_FMT_YUV444P16LE ),
yading@11 386 AV_PIX_FMT_YUVA420P,
yading@11 387 AV_PIX_FMT_YUVA422P,
yading@11 388 AV_PIX_FMT_YUVA444P,
yading@11 389 AV_PIX_FMT_NONE
yading@11 390 };
yading@11 391
yading@11 392 ff_set_common_formats(ctx, ff_make_format_list(pix_fmts));
yading@11 393
yading@11 394 return 0;
yading@11 395 }
yading@11 396
yading@11 397 static int config_props(AVFilterLink *link)
yading@11 398 {
yading@11 399 AVFilterContext *ctx = link->src;
yading@11 400 YADIFContext *s = link->src->priv;
yading@11 401
yading@11 402 link->time_base.num = link->src->inputs[0]->time_base.num;
yading@11 403 link->time_base.den = link->src->inputs[0]->time_base.den * 2;
yading@11 404 link->w = link->src->inputs[0]->w;
yading@11 405 link->h = link->src->inputs[0]->h;
yading@11 406
yading@11 407 if(s->mode&1)
yading@11 408 link->frame_rate = av_mul_q(link->src->inputs[0]->frame_rate, (AVRational){2,1});
yading@11 409
yading@11 410 if (link->w < 3 || link->h < 3) {
yading@11 411 av_log(ctx, AV_LOG_ERROR, "Video of less than 3 columns or lines is not supported\n");
yading@11 412 return AVERROR(EINVAL);
yading@11 413 }
yading@11 414
yading@11 415 s->csp = av_pix_fmt_desc_get(link->format);
yading@11 416 if (s->csp->comp[0].depth_minus1 / 8 == 1) {
yading@11 417 s->filter_line = filter_line_c_16bit;
yading@11 418 s->filter_edges = filter_edges_16bit;
yading@11 419 } else {
yading@11 420 s->filter_line = filter_line_c;
yading@11 421 s->filter_edges = filter_edges;
yading@11 422 }
yading@11 423
yading@11 424 if (ARCH_X86)
yading@11 425 ff_yadif_init_x86(s);
yading@11 426
yading@11 427 return 0;
yading@11 428 }
yading@11 429
yading@11 430
yading@11 431 #define OFFSET(x) offsetof(YADIFContext, x)
yading@11 432 #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
yading@11 433
yading@11 434 #define CONST(name, help, val, unit) { name, help, 0, AV_OPT_TYPE_CONST, {.i64=val}, INT_MIN, INT_MAX, FLAGS, unit }
yading@11 435
yading@11 436 static const AVOption yadif_options[] = {
yading@11 437 { "mode", "specify the interlacing mode", OFFSET(mode), AV_OPT_TYPE_INT, {.i64=YADIF_MODE_SEND_FRAME}, 0, 3, FLAGS, "mode"},
yading@11 438 CONST("send_frame", "send one frame for each frame", YADIF_MODE_SEND_FRAME, "mode"),
yading@11 439 CONST("send_field", "send one frame for each field", YADIF_MODE_SEND_FIELD, "mode"),
yading@11 440 CONST("send_frame_nospatial", "send one frame for each frame, but skip spatial interlacing check", YADIF_MODE_SEND_FRAME_NOSPATIAL, "mode"),
yading@11 441 CONST("send_field_nospatial", "send one frame for each field, but skip spatial interlacing check", YADIF_MODE_SEND_FIELD_NOSPATIAL, "mode"),
yading@11 442
yading@11 443 { "parity", "specify the assumed picture field parity", OFFSET(parity), AV_OPT_TYPE_INT, {.i64=YADIF_PARITY_AUTO}, -1, 1, FLAGS, "parity" },
yading@11 444 CONST("tff", "assume top field first", YADIF_PARITY_TFF, "parity"),
yading@11 445 CONST("bff", "assume bottom field first", YADIF_PARITY_BFF, "parity"),
yading@11 446 CONST("auto", "auto detect parity", YADIF_PARITY_AUTO, "parity"),
yading@11 447
yading@11 448 { "deint", "specify which frames to deinterlace", OFFSET(deint), AV_OPT_TYPE_INT, {.i64=YADIF_DEINT_ALL}, 0, 1, FLAGS, "deint" },
yading@11 449 CONST("all", "deinterlace all frames", YADIF_DEINT_ALL, "deint"),
yading@11 450 CONST("interlaced", "only deinterlace frames marked as interlaced", YADIF_DEINT_INTERLACED, "deint"),
yading@11 451
yading@11 452 {NULL},
yading@11 453 };
yading@11 454
yading@11 455 AVFILTER_DEFINE_CLASS(yadif);
yading@11 456
yading@11 457 static const AVFilterPad avfilter_vf_yadif_inputs[] = {
yading@11 458 {
yading@11 459 .name = "default",
yading@11 460 .type = AVMEDIA_TYPE_VIDEO,
yading@11 461 .filter_frame = filter_frame,
yading@11 462 },
yading@11 463 { NULL }
yading@11 464 };
yading@11 465
yading@11 466 static const AVFilterPad avfilter_vf_yadif_outputs[] = {
yading@11 467 {
yading@11 468 .name = "default",
yading@11 469 .type = AVMEDIA_TYPE_VIDEO,
yading@11 470 .request_frame = request_frame,
yading@11 471 .config_props = config_props,
yading@11 472 },
yading@11 473 { NULL }
yading@11 474 };
yading@11 475
yading@11 476 AVFilter avfilter_vf_yadif = {
yading@11 477 .name = "yadif",
yading@11 478 .description = NULL_IF_CONFIG_SMALL("Deinterlace the input image."),
yading@11 479
yading@11 480 .priv_size = sizeof(YADIFContext),
yading@11 481 .priv_class = &yadif_class,
yading@11 482 .uninit = uninit,
yading@11 483 .query_formats = query_formats,
yading@11 484
yading@11 485 .inputs = avfilter_vf_yadif_inputs,
yading@11 486 .outputs = avfilter_vf_yadif_outputs,
yading@11 487 };