yading@11: /* yading@11: * Copyright (c) 2011 Stefano Sabatini yading@11: * Copyright (c) 2010 Baptiste Coudurier yading@11: * Copyright (c) 2003 Michael Zucchi yading@11: * yading@11: * This file is part of FFmpeg. yading@11: * yading@11: * FFmpeg is free software; you can redistribute it and/or modify yading@11: * it under the terms of the GNU General Public License as published by yading@11: * the Free Software Foundation; either version 2 of the License, or yading@11: * (at your option) any later version. yading@11: * yading@11: * FFmpeg is distributed in the hope that it will be useful, yading@11: * but WITHOUT ANY WARRANTY; without even the implied warranty of yading@11: * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the yading@11: * GNU General Public License for more details. yading@11: * yading@11: * You should have received a copy of the GNU General Public License along yading@11: * with FFmpeg if not, write to the Free Software Foundation, Inc., yading@11: * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. yading@11: */ yading@11: yading@11: /** yading@11: * @file yading@11: * temporal field interlace filter, ported from MPlayer/libmpcodecs yading@11: */ yading@11: yading@11: #include "libavutil/opt.h" yading@11: #include "libavutil/imgutils.h" yading@11: #include "libavutil/avassert.h" yading@11: #include "avfilter.h" yading@11: #include "internal.h" yading@11: yading@11: enum TInterlaceMode { yading@11: MODE_MERGE = 0, yading@11: MODE_DROP_EVEN, yading@11: MODE_DROP_ODD, yading@11: MODE_PAD, yading@11: MODE_INTERLEAVE_TOP, yading@11: MODE_INTERLEAVE_BOTTOM, yading@11: MODE_INTERLACEX2, yading@11: MODE_NB, yading@11: }; yading@11: yading@11: typedef struct { yading@11: const AVClass *class; yading@11: enum TInterlaceMode mode; ///< interlace mode selected yading@11: int flags; ///< flags affecting interlacing algorithm yading@11: int frame; ///< number of the output frame yading@11: int vsub; ///< chroma vertical subsampling yading@11: AVFrame *cur; yading@11: AVFrame *next; yading@11: uint8_t *black_data[4]; ///< buffer used to fill padded lines yading@11: int black_linesize[4]; yading@11: } TInterlaceContext; yading@11: yading@11: #define OFFSET(x) offsetof(TInterlaceContext, x) yading@11: #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM yading@11: #define TINTERLACE_FLAG_VLPF 01 yading@11: yading@11: static const AVOption tinterlace_options[] = { yading@11: {"mode", "select interlace mode", OFFSET(mode), AV_OPT_TYPE_INT, {.i64=MODE_MERGE}, 0, MODE_NB-1, FLAGS, "mode"}, yading@11: {"merge", "merge fields", 0, AV_OPT_TYPE_CONST, {.i64=MODE_MERGE}, INT_MIN, INT_MAX, FLAGS, "mode"}, yading@11: {"drop_even", "drop even fields", 0, AV_OPT_TYPE_CONST, {.i64=MODE_DROP_EVEN}, INT_MIN, INT_MAX, FLAGS, "mode"}, yading@11: {"drop_odd", "drop odd fields", 0, AV_OPT_TYPE_CONST, {.i64=MODE_DROP_ODD}, INT_MIN, INT_MAX, FLAGS, "mode"}, yading@11: {"pad", "pad alternate lines with black", 0, AV_OPT_TYPE_CONST, {.i64=MODE_PAD}, INT_MIN, INT_MAX, FLAGS, "mode"}, yading@11: {"interleave_top", "interleave top and bottom fields", 0, AV_OPT_TYPE_CONST, {.i64=MODE_INTERLEAVE_TOP}, INT_MIN, INT_MAX, FLAGS, "mode"}, yading@11: {"interleave_bottom", "interleave bottom and top fields", 0, AV_OPT_TYPE_CONST, {.i64=MODE_INTERLEAVE_BOTTOM}, INT_MIN, INT_MAX, FLAGS, "mode"}, yading@11: {"interlacex2", "interlace fields from two consecutive frames", 0, AV_OPT_TYPE_CONST, {.i64=MODE_INTERLACEX2}, INT_MIN, INT_MAX, FLAGS, "mode"}, yading@11: yading@11: {"flags", "set flags", OFFSET(flags), AV_OPT_TYPE_FLAGS, {.i64 = 0}, 0, INT_MAX, 0, "flags" }, yading@11: {"low_pass_filter", "enable vertical low-pass filter", 0, AV_OPT_TYPE_CONST, {.i64 = TINTERLACE_FLAG_VLPF}, INT_MIN, INT_MAX, FLAGS, "flags" }, yading@11: {"vlpf", "enable vertical low-pass filter", 0, AV_OPT_TYPE_CONST, {.i64 = TINTERLACE_FLAG_VLPF}, INT_MIN, INT_MAX, FLAGS, "flags" }, yading@11: yading@11: {NULL} yading@11: }; yading@11: yading@11: AVFILTER_DEFINE_CLASS(tinterlace); yading@11: yading@11: #define FULL_SCALE_YUVJ_FORMATS \ yading@11: AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ422P, AV_PIX_FMT_YUVJ444P, AV_PIX_FMT_YUVJ440P yading@11: yading@11: static enum AVPixelFormat full_scale_yuvj_pix_fmts[] = { yading@11: FULL_SCALE_YUVJ_FORMATS, AV_PIX_FMT_NONE yading@11: }; yading@11: yading@11: static int query_formats(AVFilterContext *ctx) yading@11: { yading@11: static const enum AVPixelFormat pix_fmts[] = { yading@11: AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUV444P, yading@11: AV_PIX_FMT_YUV444P, AV_PIX_FMT_YUV410P, AV_PIX_FMT_YUVA420P, yading@11: AV_PIX_FMT_GRAY8, FULL_SCALE_YUVJ_FORMATS, yading@11: AV_PIX_FMT_NONE yading@11: }; yading@11: yading@11: ff_set_common_formats(ctx, ff_make_format_list(pix_fmts)); yading@11: return 0; yading@11: } yading@11: yading@11: static av_cold void uninit(AVFilterContext *ctx) yading@11: { yading@11: TInterlaceContext *tinterlace = ctx->priv; yading@11: yading@11: av_frame_free(&tinterlace->cur ); yading@11: av_frame_free(&tinterlace->next); yading@11: av_freep(&tinterlace->black_data[0]); yading@11: } yading@11: yading@11: static int config_out_props(AVFilterLink *outlink) yading@11: { yading@11: AVFilterContext *ctx = outlink->src; yading@11: AVFilterLink *inlink = outlink->src->inputs[0]; yading@11: const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(outlink->format); yading@11: TInterlaceContext *tinterlace = ctx->priv; yading@11: yading@11: tinterlace->vsub = desc->log2_chroma_h; yading@11: outlink->w = inlink->w; yading@11: outlink->h = tinterlace->mode == MODE_MERGE || tinterlace->mode == MODE_PAD ? yading@11: inlink->h*2 : inlink->h; yading@11: yading@11: if (tinterlace->mode == MODE_PAD) { yading@11: uint8_t black[4] = { 16, 128, 128, 16 }; yading@11: int i, ret; yading@11: if (ff_fmt_is_in(outlink->format, full_scale_yuvj_pix_fmts)) yading@11: black[0] = black[3] = 0; yading@11: ret = av_image_alloc(tinterlace->black_data, tinterlace->black_linesize, yading@11: outlink->w, outlink->h, outlink->format, 1); yading@11: if (ret < 0) yading@11: return ret; yading@11: yading@11: /* fill black picture with black */ yading@11: for (i = 0; i < 4 && tinterlace->black_data[i]; i++) { yading@11: int h = i == 1 || i == 2 ? outlink->h >> desc->log2_chroma_h : outlink->h; yading@11: memset(tinterlace->black_data[i], black[i], yading@11: tinterlace->black_linesize[i] * h); yading@11: } yading@11: } yading@11: if ((tinterlace->flags & TINTERLACE_FLAG_VLPF) yading@11: && !(tinterlace->mode == MODE_INTERLEAVE_TOP yading@11: || tinterlace->mode == MODE_INTERLEAVE_BOTTOM)) { yading@11: av_log(ctx, AV_LOG_WARNING, "low_pass_filter flag ignored with mode %d\n", yading@11: tinterlace->mode); yading@11: tinterlace->flags &= ~TINTERLACE_FLAG_VLPF; yading@11: } yading@11: av_log(ctx, AV_LOG_VERBOSE, "mode:%d filter:%s h:%d -> h:%d\n", yading@11: tinterlace->mode, (tinterlace->flags & TINTERLACE_FLAG_VLPF) ? "on" : "off", yading@11: inlink->h, outlink->h); yading@11: yading@11: return 0; yading@11: } yading@11: yading@11: #define FIELD_UPPER 0 yading@11: #define FIELD_LOWER 1 yading@11: #define FIELD_UPPER_AND_LOWER 2 yading@11: yading@11: /** yading@11: * Copy picture field from src to dst. yading@11: * yading@11: * @param src_field copy from upper, lower field or both yading@11: * @param interleave leave a padding line between each copied line yading@11: * @param dst_field copy to upper or lower field, yading@11: * only meaningful when interleave is selected yading@11: * @param flags context flags yading@11: */ yading@11: static inline yading@11: void copy_picture_field(uint8_t *dst[4], int dst_linesize[4], yading@11: const uint8_t *src[4], int src_linesize[4], yading@11: enum AVPixelFormat format, int w, int src_h, yading@11: int src_field, int interleave, int dst_field, yading@11: int flags) yading@11: { yading@11: const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(format); yading@11: int plane, vsub = desc->log2_chroma_h; yading@11: int k = src_field == FIELD_UPPER_AND_LOWER ? 1 : 2; yading@11: int h, i; yading@11: yading@11: for (plane = 0; plane < desc->nb_components; plane++) { yading@11: int lines = plane == 1 || plane == 2 ? src_h >> vsub : src_h; yading@11: int linesize = av_image_get_linesize(format, w, plane); yading@11: uint8_t *dstp = dst[plane]; yading@11: const uint8_t *srcp = src[plane]; yading@11: yading@11: if (linesize < 0) yading@11: return; yading@11: yading@11: lines /= k; yading@11: if (src_field == FIELD_LOWER) yading@11: srcp += src_linesize[plane]; yading@11: if (interleave && dst_field == FIELD_LOWER) yading@11: dstp += dst_linesize[plane]; yading@11: if (flags & TINTERLACE_FLAG_VLPF) { yading@11: // Low-pass filtering is required when creating an interlaced destination from yading@11: // a progressive source which contains high-frequency vertical detail. yading@11: // Filtering will reduce interlace 'twitter' and Moire patterning. yading@11: int srcp_linesize = src_linesize[plane] * k; yading@11: int dstp_linesize = dst_linesize[plane] * (interleave ? 2 : 1); yading@11: for (h = lines; h > 0; h--) { yading@11: const uint8_t *srcp_above = srcp - src_linesize[plane]; yading@11: const uint8_t *srcp_below = srcp + src_linesize[plane]; yading@11: if (h == lines) srcp_above = srcp; // there is no line above yading@11: if (h == 1) srcp_below = srcp; // there is no line below yading@11: for (i = 0; i < linesize; i++) { yading@11: // this calculation is an integer representation of yading@11: // '0.5 * current + 0.25 * above + 0.25 + below' yading@11: // '1 +' is for rounding. */ yading@11: dstp[i] = (1 + srcp[i] + srcp[i] + srcp_above[i] + srcp_below[i]) >> 2; yading@11: } yading@11: dstp += dstp_linesize; yading@11: srcp += srcp_linesize; yading@11: } yading@11: } else { yading@11: av_image_copy_plane(dstp, dst_linesize[plane] * (interleave ? 2 : 1), yading@11: srcp, src_linesize[plane]*k, linesize, lines); yading@11: } yading@11: } yading@11: } yading@11: yading@11: static int filter_frame(AVFilterLink *inlink, AVFrame *picref) yading@11: { yading@11: AVFilterContext *ctx = inlink->dst; yading@11: AVFilterLink *outlink = ctx->outputs[0]; yading@11: TInterlaceContext *tinterlace = ctx->priv; yading@11: AVFrame *cur, *next, *out; yading@11: int field, tff, ret; yading@11: yading@11: av_frame_free(&tinterlace->cur); yading@11: tinterlace->cur = tinterlace->next; yading@11: tinterlace->next = picref; yading@11: yading@11: cur = tinterlace->cur; yading@11: next = tinterlace->next; yading@11: /* we need at least two frames */ yading@11: if (!tinterlace->cur) yading@11: return 0; yading@11: yading@11: switch (tinterlace->mode) { yading@11: case MODE_MERGE: /* move the odd frame into the upper field of the new image, even into yading@11: * the lower field, generating a double-height video at half framerate */ yading@11: out = ff_get_video_buffer(outlink, outlink->w, outlink->h); yading@11: if (!out) yading@11: return AVERROR(ENOMEM); yading@11: av_frame_copy_props(out, cur); yading@11: out->height = outlink->h; yading@11: out->interlaced_frame = 1; yading@11: out->top_field_first = 1; yading@11: yading@11: /* write odd frame lines into the upper field of the new frame */ yading@11: copy_picture_field(out->data, out->linesize, yading@11: (const uint8_t **)cur->data, cur->linesize, yading@11: inlink->format, inlink->w, inlink->h, yading@11: FIELD_UPPER_AND_LOWER, 1, FIELD_UPPER, tinterlace->flags); yading@11: /* write even frame lines into the lower field of the new frame */ yading@11: copy_picture_field(out->data, out->linesize, yading@11: (const uint8_t **)next->data, next->linesize, yading@11: inlink->format, inlink->w, inlink->h, yading@11: FIELD_UPPER_AND_LOWER, 1, FIELD_LOWER, tinterlace->flags); yading@11: av_frame_free(&tinterlace->next); yading@11: break; yading@11: yading@11: case MODE_DROP_ODD: /* only output even frames, odd frames are dropped; height unchanged, half framerate */ yading@11: case MODE_DROP_EVEN: /* only output odd frames, even frames are dropped; height unchanged, half framerate */ yading@11: out = av_frame_clone(tinterlace->mode == MODE_DROP_EVEN ? cur : next); yading@11: av_frame_free(&tinterlace->next); yading@11: break; yading@11: yading@11: case MODE_PAD: /* expand each frame to double height, but pad alternate yading@11: * lines with black; framerate unchanged */ yading@11: out = ff_get_video_buffer(outlink, outlink->w, outlink->h); yading@11: if (!out) yading@11: return AVERROR(ENOMEM); yading@11: av_frame_copy_props(out, cur); yading@11: out->height = outlink->h; yading@11: yading@11: field = (1 + tinterlace->frame) & 1 ? FIELD_UPPER : FIELD_LOWER; yading@11: /* copy upper and lower fields */ yading@11: copy_picture_field(out->data, out->linesize, yading@11: (const uint8_t **)cur->data, cur->linesize, yading@11: inlink->format, inlink->w, inlink->h, yading@11: FIELD_UPPER_AND_LOWER, 1, field, tinterlace->flags); yading@11: /* pad with black the other field */ yading@11: copy_picture_field(out->data, out->linesize, yading@11: (const uint8_t **)tinterlace->black_data, tinterlace->black_linesize, yading@11: inlink->format, inlink->w, inlink->h, yading@11: FIELD_UPPER_AND_LOWER, 1, !field, tinterlace->flags); yading@11: break; yading@11: yading@11: /* interleave upper/lower lines from odd frames with lower/upper lines from even frames, yading@11: * halving the frame rate and preserving image height */ yading@11: case MODE_INTERLEAVE_TOP: /* top field first */ yading@11: case MODE_INTERLEAVE_BOTTOM: /* bottom field first */ yading@11: tff = tinterlace->mode == MODE_INTERLEAVE_TOP; yading@11: out = ff_get_video_buffer(outlink, outlink->w, outlink->h); yading@11: if (!out) yading@11: return AVERROR(ENOMEM); yading@11: av_frame_copy_props(out, cur); yading@11: out->interlaced_frame = 1; yading@11: out->top_field_first = tff; yading@11: yading@11: /* copy upper/lower field from cur */ yading@11: copy_picture_field(out->data, out->linesize, yading@11: (const uint8_t **)cur->data, cur->linesize, yading@11: inlink->format, inlink->w, inlink->h, yading@11: tff ? FIELD_UPPER : FIELD_LOWER, 1, tff ? FIELD_UPPER : FIELD_LOWER, yading@11: tinterlace->flags); yading@11: /* copy lower/upper field from next */ yading@11: copy_picture_field(out->data, out->linesize, yading@11: (const uint8_t **)next->data, next->linesize, yading@11: inlink->format, inlink->w, inlink->h, yading@11: tff ? FIELD_LOWER : FIELD_UPPER, 1, tff ? FIELD_LOWER : FIELD_UPPER, yading@11: tinterlace->flags); yading@11: av_frame_free(&tinterlace->next); yading@11: break; yading@11: case MODE_INTERLACEX2: /* re-interlace preserving image height, double frame rate */ yading@11: /* output current frame first */ yading@11: out = av_frame_clone(cur); yading@11: if (!out) yading@11: return AVERROR(ENOMEM); yading@11: out->interlaced_frame = 1; yading@11: yading@11: if ((ret = ff_filter_frame(outlink, out)) < 0) yading@11: return ret; yading@11: yading@11: /* output mix of current and next frame */ yading@11: tff = next->top_field_first; yading@11: out = ff_get_video_buffer(outlink, outlink->w, outlink->h); yading@11: if (!out) yading@11: return AVERROR(ENOMEM); yading@11: av_frame_copy_props(out, next); yading@11: out->interlaced_frame = 1; yading@11: yading@11: /* write current frame second field lines into the second field of the new frame */ yading@11: copy_picture_field(out->data, out->linesize, yading@11: (const uint8_t **)cur->data, cur->linesize, yading@11: inlink->format, inlink->w, inlink->h, yading@11: tff ? FIELD_LOWER : FIELD_UPPER, 1, tff ? FIELD_LOWER : FIELD_UPPER, yading@11: tinterlace->flags); yading@11: /* write next frame first field lines into the first field of the new frame */ yading@11: copy_picture_field(out->data, out->linesize, yading@11: (const uint8_t **)next->data, next->linesize, yading@11: inlink->format, inlink->w, inlink->h, yading@11: tff ? FIELD_UPPER : FIELD_LOWER, 1, tff ? FIELD_UPPER : FIELD_LOWER, yading@11: tinterlace->flags); yading@11: break; yading@11: default: yading@11: av_assert0(0); yading@11: } yading@11: yading@11: ret = ff_filter_frame(outlink, out); yading@11: tinterlace->frame++; yading@11: yading@11: return ret; yading@11: } yading@11: yading@11: static int request_frame(AVFilterLink *outlink) yading@11: { yading@11: TInterlaceContext *tinterlace = outlink->src->priv; yading@11: AVFilterLink *inlink = outlink->src->inputs[0]; yading@11: yading@11: do { yading@11: int ret; yading@11: yading@11: if ((ret = ff_request_frame(inlink)) < 0) yading@11: return ret; yading@11: } while (!tinterlace->cur); yading@11: yading@11: return 0; yading@11: } yading@11: yading@11: static const AVFilterPad tinterlace_inputs[] = { yading@11: { yading@11: .name = "default", yading@11: .type = AVMEDIA_TYPE_VIDEO, yading@11: .filter_frame = filter_frame, yading@11: }, yading@11: { NULL } yading@11: }; yading@11: yading@11: static const AVFilterPad tinterlace_outputs[] = { yading@11: { yading@11: .name = "default", yading@11: .type = AVMEDIA_TYPE_VIDEO, yading@11: .config_props = config_out_props, yading@11: .request_frame = request_frame, yading@11: }, yading@11: { NULL } yading@11: }; yading@11: yading@11: AVFilter avfilter_vf_tinterlace = { yading@11: .name = "tinterlace", yading@11: .description = NULL_IF_CONFIG_SMALL("Perform temporal field interlacing."), yading@11: .priv_size = sizeof(TInterlaceContext), yading@11: .uninit = uninit, yading@11: .query_formats = query_formats, yading@11: .inputs = tinterlace_inputs, yading@11: .outputs = tinterlace_outputs, yading@11: .priv_class = &tinterlace_class, yading@11: };