vf_super2xsai.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2010 Niel van der Westhuizen <nielkie@gmail.com>
3  * Copyright (c) 2002 A'rpi
4  * Copyright (c) 1997-2001 ZSNES Team ( zsknight@zsnes.com / _demo_@zsnes.com )
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or modify
9  * it under the terms of the GNU General Public License as published by
10  * the Free Software Foundation; either version 2 of the License, or
11  * (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16  * GNU General Public License for more details.
17  *
18  * You should have received a copy of the GNU General Public License along
19  * with FFmpeg; if not, write to the Free Software Foundation, Inc.,
20  * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21  */
22 
23 /**
24  * @file
25  * Super 2xSaI video filter
26  * Ported from MPlayer libmpcodecs/vf_2xsai.c.
27  */
28 
29 #include "libavutil/pixdesc.h"
30 #include "libavutil/intreadwrite.h"
31 #include "avfilter.h"
32 #include "formats.h"
33 #include "internal.h"
34 #include "video.h"
35 
36 typedef struct {
37  /* masks used for two pixels interpolation */
38  uint32_t hi_pixel_mask;
39  uint32_t lo_pixel_mask;
40 
41  /* masks used for four pixels interpolation */
42  uint32_t q_hi_pixel_mask;
43  uint32_t q_lo_pixel_mask;
44 
45  int bpp; ///< bytes per pixel, pixel stride for each (packed) pixel
46  int is_be;
48 
49 #define GET_RESULT(A, B, C, D) ((A != C || A != D) - (B != C || B != D))
50 
51 #define INTERPOLATE(A, B) (((A & hi_pixel_mask) >> 1) + ((B & hi_pixel_mask) >> 1) + (A & B & lo_pixel_mask))
52 
53 #define Q_INTERPOLATE(A, B, C, D) ((A & q_hi_pixel_mask) >> 2) + ((B & q_hi_pixel_mask) >> 2) + ((C & q_hi_pixel_mask) >> 2) + ((D & q_hi_pixel_mask) >> 2) \
54  + ((((A & q_lo_pixel_mask) + (B & q_lo_pixel_mask) + (C & q_lo_pixel_mask) + (D & q_lo_pixel_mask)) >> 2) & q_lo_pixel_mask)
55 
56 static void super2xsai(AVFilterContext *ctx,
57  uint8_t *src, int src_linesize,
58  uint8_t *dst, int dst_linesize,
59  int width, int height)
60 {
61  Super2xSaIContext *sai = ctx->priv;
62  unsigned int x, y;
63  uint32_t color[4][4];
64  unsigned char *src_line[4];
65  const int bpp = sai->bpp;
66  const uint32_t hi_pixel_mask = sai->hi_pixel_mask;
67  const uint32_t lo_pixel_mask = sai->lo_pixel_mask;
68  const uint32_t q_hi_pixel_mask = sai->q_hi_pixel_mask;
69  const uint32_t q_lo_pixel_mask = sai->q_lo_pixel_mask;
70 
71  /* Point to the first 4 lines, first line is duplicated */
72  src_line[0] = src;
73  src_line[1] = src;
74  src_line[2] = src + src_linesize*FFMIN(1, height-1);
75  src_line[3] = src + src_linesize*FFMIN(2, height-1);
76 
77 #define READ_COLOR4(dst, src_line, off) dst = *((const uint32_t *)src_line + off)
78 #define READ_COLOR3(dst, src_line, off) dst = AV_RL24 (src_line + 3*off)
79 #define READ_COLOR2(dst, src_line, off) dst = sai->is_be ? AV_RB16(src_line + 2 * off) : AV_RL16(src_line + 2 * off)
80 
81  for (y = 0; y < height; y++) {
82  uint8_t *dst_line[2];
83 
84  dst_line[0] = dst + dst_linesize*2*y;
85  dst_line[1] = dst + dst_linesize*(2*y+1);
86 
87  switch (bpp) {
88  case 4:
89  READ_COLOR4(color[0][0], src_line[0], 0); color[0][1] = color[0][0]; READ_COLOR4(color[0][2], src_line[0], 1); READ_COLOR4(color[0][3], src_line[0], 2);
90  READ_COLOR4(color[1][0], src_line[1], 0); color[1][1] = color[1][0]; READ_COLOR4(color[1][2], src_line[1], 1); READ_COLOR4(color[1][3], src_line[1], 2);
91  READ_COLOR4(color[2][0], src_line[2], 0); color[2][1] = color[2][0]; READ_COLOR4(color[2][2], src_line[2], 1); READ_COLOR4(color[2][3], src_line[2], 2);
92  READ_COLOR4(color[3][0], src_line[3], 0); color[3][1] = color[3][0]; READ_COLOR4(color[3][2], src_line[3], 1); READ_COLOR4(color[3][3], src_line[3], 2);
93  break;
94  case 3:
95  READ_COLOR3(color[0][0], src_line[0], 0); color[0][1] = color[0][0]; READ_COLOR3(color[0][2], src_line[0], 1); READ_COLOR3(color[0][3], src_line[0], 2);
96  READ_COLOR3(color[1][0], src_line[1], 0); color[1][1] = color[1][0]; READ_COLOR3(color[1][2], src_line[1], 1); READ_COLOR3(color[1][3], src_line[1], 2);
97  READ_COLOR3(color[2][0], src_line[2], 0); color[2][1] = color[2][0]; READ_COLOR3(color[2][2], src_line[2], 1); READ_COLOR3(color[2][3], src_line[2], 2);
98  READ_COLOR3(color[3][0], src_line[3], 0); color[3][1] = color[3][0]; READ_COLOR3(color[3][2], src_line[3], 1); READ_COLOR3(color[3][3], src_line[3], 2);
99  break;
100  default:
101  READ_COLOR2(color[0][0], src_line[0], 0); color[0][1] = color[0][0]; READ_COLOR2(color[0][2], src_line[0], 1); READ_COLOR2(color[0][3], src_line[0], 2);
102  READ_COLOR2(color[1][0], src_line[1], 0); color[1][1] = color[1][0]; READ_COLOR2(color[1][2], src_line[1], 1); READ_COLOR2(color[1][3], src_line[1], 2);
103  READ_COLOR2(color[2][0], src_line[2], 0); color[2][1] = color[2][0]; READ_COLOR2(color[2][2], src_line[2], 1); READ_COLOR2(color[2][3], src_line[2], 2);
104  READ_COLOR2(color[3][0], src_line[3], 0); color[3][1] = color[3][0]; READ_COLOR2(color[3][2], src_line[3], 1); READ_COLOR2(color[3][3], src_line[3], 2);
105  }
106 
107  for (x = 0; x < width; x++) {
108  uint32_t product1a, product1b, product2a, product2b;
109 
110 //--------------------------------------- B0 B1 B2 B3 0 1 2 3
111 // 4 5* 6 S2 -> 4 5* 6 7
112 // 1 2 3 S1 8 9 10 11
113 // A0 A1 A2 A3 12 13 14 15
114 //--------------------------------------
115  if (color[2][1] == color[1][2] && color[1][1] != color[2][2]) {
116  product2b = color[2][1];
117  product1b = product2b;
118  } else if (color[1][1] == color[2][2] && color[2][1] != color[1][2]) {
119  product2b = color[1][1];
120  product1b = product2b;
121  } else if (color[1][1] == color[2][2] && color[2][1] == color[1][2]) {
122  int r = 0;
123 
124  r += GET_RESULT(color[1][2], color[1][1], color[1][0], color[3][1]);
125  r += GET_RESULT(color[1][2], color[1][1], color[2][0], color[0][1]);
126  r += GET_RESULT(color[1][2], color[1][1], color[3][2], color[2][3]);
127  r += GET_RESULT(color[1][2], color[1][1], color[0][2], color[1][3]);
128 
129  if (r > 0)
130  product1b = color[1][2];
131  else if (r < 0)
132  product1b = color[1][1];
133  else
134  product1b = INTERPOLATE(color[1][1], color[1][2]);
135 
136  product2b = product1b;
137  } else {
138  if (color[1][2] == color[2][2] && color[2][2] == color[3][1] && color[2][1] != color[3][2] && color[2][2] != color[3][0])
139  product2b = Q_INTERPOLATE(color[2][2], color[2][2], color[2][2], color[2][1]);
140  else if (color[1][1] == color[2][1] && color[2][1] == color[3][2] && color[3][1] != color[2][2] && color[2][1] != color[3][3])
141  product2b = Q_INTERPOLATE(color[2][1], color[2][1], color[2][1], color[2][2]);
142  else
143  product2b = INTERPOLATE(color[2][1], color[2][2]);
144 
145  if (color[1][2] == color[2][2] && color[1][2] == color[0][1] && color[1][1] != color[0][2] && color[1][2] != color[0][0])
146  product1b = Q_INTERPOLATE(color[1][2], color[1][2], color[1][2], color[1][1]);
147  else if (color[1][1] == color[2][1] && color[1][1] == color[0][2] && color[0][1] != color[1][2] && color[1][1] != color[0][3])
148  product1b = Q_INTERPOLATE(color[1][2], color[1][1], color[1][1], color[1][1]);
149  else
150  product1b = INTERPOLATE(color[1][1], color[1][2]);
151  }
152 
153  if (color[1][1] == color[2][2] && color[2][1] != color[1][2] && color[1][0] == color[1][1] && color[1][1] != color[3][2])
154  product2a = INTERPOLATE(color[2][1], color[1][1]);
155  else if (color[1][1] == color[2][0] && color[1][2] == color[1][1] && color[1][0] != color[2][1] && color[1][1] != color[3][0])
156  product2a = INTERPOLATE(color[2][1], color[1][1]);
157  else
158  product2a = color[2][1];
159 
160  if (color[2][1] == color[1][2] && color[1][1] != color[2][2] && color[2][0] == color[2][1] && color[2][1] != color[0][2])
161  product1a = INTERPOLATE(color[2][1], color[1][1]);
162  else if (color[1][0] == color[2][1] && color[2][2] == color[2][1] && color[2][0] != color[1][1] && color[2][1] != color[0][0])
163  product1a = INTERPOLATE(color[2][1], color[1][1]);
164  else
165  product1a = color[1][1];
166 
167  /* Set the calculated pixels */
168  switch (bpp) {
169  case 4:
170  AV_WN32A(dst_line[0] + x * 8, product1a);
171  AV_WN32A(dst_line[0] + x * 8 + 4, product1b);
172  AV_WN32A(dst_line[1] + x * 8, product2a);
173  AV_WN32A(dst_line[1] + x * 8 + 4, product2b);
174  break;
175  case 3:
176  AV_WL24(dst_line[0] + x * 6, product1a);
177  AV_WL24(dst_line[0] + x * 6 + 3, product1b);
178  AV_WL24(dst_line[1] + x * 6, product2a);
179  AV_WL24(dst_line[1] + x * 6 + 3, product2b);
180  break;
181  default: // bpp = 2
182  if (sai->is_be) {
183  AV_WB32(dst_line[0] + x * 4, product1a | (product1b << 16));
184  AV_WB32(dst_line[1] + x * 4, product2a | (product2b << 16));
185  } else {
186  AV_WL32(dst_line[0] + x * 4, product1a | (product1b << 16));
187  AV_WL32(dst_line[1] + x * 4, product2a | (product2b << 16));
188  }
189  }
190 
191  /* Move color matrix forward */
192  color[0][0] = color[0][1]; color[0][1] = color[0][2]; color[0][2] = color[0][3];
193  color[1][0] = color[1][1]; color[1][1] = color[1][2]; color[1][2] = color[1][3];
194  color[2][0] = color[2][1]; color[2][1] = color[2][2]; color[2][2] = color[2][3];
195  color[3][0] = color[3][1]; color[3][1] = color[3][2]; color[3][2] = color[3][3];
196 
197  if (x < width - 3) {
198  x += 3;
199  switch (bpp) {
200  case 4:
201  READ_COLOR4(color[0][3], src_line[0], x);
202  READ_COLOR4(color[1][3], src_line[1], x);
203  READ_COLOR4(color[2][3], src_line[2], x);
204  READ_COLOR4(color[3][3], src_line[3], x);
205  break;
206  case 3:
207  READ_COLOR3(color[0][3], src_line[0], x);
208  READ_COLOR3(color[1][3], src_line[1], x);
209  READ_COLOR3(color[2][3], src_line[2], x);
210  READ_COLOR3(color[3][3], src_line[3], x);
211  break;
212  default: /* case 2 */
213  READ_COLOR2(color[0][3], src_line[0], x);
214  READ_COLOR2(color[1][3], src_line[1], x);
215  READ_COLOR2(color[2][3], src_line[2], x);
216  READ_COLOR2(color[3][3], src_line[3], x);
217  }
218  x -= 3;
219  }
220  }
221 
222  /* We're done with one line, so we shift the source lines up */
223  src_line[0] = src_line[1];
224  src_line[1] = src_line[2];
225  src_line[2] = src_line[3];
226 
227  /* Read next line */
228  src_line[3] = src_line[2];
229  if (y < height - 3)
230  src_line[3] += src_linesize;
231  } // y loop
232 }
233 
235 {
236  static const enum AVPixelFormat pix_fmts[] = {
242  };
243 
245  return 0;
246 }
247 
248 static int config_input(AVFilterLink *inlink)
249 {
250  Super2xSaIContext *sai = inlink->dst->priv;
251 
252  sai->hi_pixel_mask = 0xFEFEFEFE;
253  sai->lo_pixel_mask = 0x01010101;
254  sai->q_hi_pixel_mask = 0xFCFCFCFC;
255  sai->q_lo_pixel_mask = 0x03030303;
256  sai->bpp = 4;
257 
258  switch (inlink->format) {
259  case AV_PIX_FMT_RGB24:
260  case AV_PIX_FMT_BGR24:
261  sai->bpp = 3;
262  break;
263 
264  case AV_PIX_FMT_RGB565BE:
265  case AV_PIX_FMT_BGR565BE:
266  sai->is_be = 1;
267  case AV_PIX_FMT_RGB565LE:
268  case AV_PIX_FMT_BGR565LE:
269  sai->hi_pixel_mask = 0xF7DEF7DE;
270  sai->lo_pixel_mask = 0x08210821;
271  sai->q_hi_pixel_mask = 0xE79CE79C;
272  sai->q_lo_pixel_mask = 0x18631863;
273  sai->bpp = 2;
274  break;
275 
276  case AV_PIX_FMT_BGR555BE:
277  case AV_PIX_FMT_RGB555BE:
278  sai->is_be = 1;
279  case AV_PIX_FMT_BGR555LE:
280  case AV_PIX_FMT_RGB555LE:
281  sai->hi_pixel_mask = 0x7BDE7BDE;
282  sai->lo_pixel_mask = 0x04210421;
283  sai->q_hi_pixel_mask = 0x739C739C;
284  sai->q_lo_pixel_mask = 0x0C630C63;
285  sai->bpp = 2;
286  break;
287  }
288 
289  return 0;
290 }
291 
292 static int config_output(AVFilterLink *outlink)
293 {
294  AVFilterLink *inlink = outlink->src->inputs[0];
295 
296  outlink->w = inlink->w*2;
297  outlink->h = inlink->h*2;
298 
299  av_log(inlink->dst, AV_LOG_VERBOSE, "fmt:%s size:%dx%d -> size:%dx%d\n",
300  av_get_pix_fmt_name(inlink->format),
301  inlink->w, inlink->h, outlink->w, outlink->h);
302 
303  return 0;
304 }
305 
306 static int filter_frame(AVFilterLink *inlink, AVFrame *inpicref)
307 {
308  AVFilterLink *outlink = inlink->dst->outputs[0];
309  AVFrame *outpicref = ff_get_video_buffer(outlink, outlink->w, outlink->h);
310  if (!outpicref) {
311  av_frame_free(&inpicref);
312  return AVERROR(ENOMEM);
313  }
314  av_frame_copy_props(outpicref, inpicref);
315  outpicref->width = outlink->w;
316  outpicref->height = outlink->h;
317 
318  super2xsai(inlink->dst, inpicref->data[0], inpicref->linesize[0],
319  outpicref->data[0], outpicref->linesize[0],
320  inlink->w, inlink->h);
321 
322  av_frame_free(&inpicref);
323  return ff_filter_frame(outlink, outpicref);
324 }
325 
326 static const AVFilterPad super2xsai_inputs[] = {
327  {
328  .name = "default",
329  .type = AVMEDIA_TYPE_VIDEO,
330  .config_props = config_input,
331  .filter_frame = filter_frame,
332  },
333  { NULL }
334 };
335 
336 static const AVFilterPad super2xsai_outputs[] = {
337  {
338  .name = "default",
339  .type = AVMEDIA_TYPE_VIDEO,
340  .config_props = config_output,
341  },
342  { NULL }
343 };
344 
346  .name = "super2xsai",
347  .description = NULL_IF_CONFIG_SMALL("Scale the input by 2x using the Super2xSaI pixel art algorithm."),
348  .priv_size = sizeof(Super2xSaIContext),
350  .inputs = super2xsai_inputs,
351  .outputs = super2xsai_outputs,
352 };
int bpp
bytes per pixel, pixel stride for each (packed) pixel
Definition: vf_super2xsai.c:45
uint32_t q_hi_pixel_mask
Definition: vf_super2xsai.c:42
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:424
This structure describes decoded (raw) audio or video data.
Definition: frame.h:76
static const AVFilterPad outputs[]
Definition: af_ashowinfo.c:117
external API header
packed RGB 8:8:8, 24bpp, RGBRGB...
Definition: pixfmt.h:70
#define READ_COLOR3(dst, src_line, off)
#define INTERPOLATE(A, B)
Definition: vf_super2xsai.c:51
packed RGB 5:5:5, 16bpp, (msb)1A 5R 5G 5B(lsb), little-endian, most significant bit to 0 ...
Definition: pixfmt.h:117
packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), little-endian
Definition: pixfmt.h:120
uint32_t q_lo_pixel_mask
Definition: vf_super2xsai.c:43
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:143
#define GET_RESULT(A, B, C, D)
Definition: vf_super2xsai.c:49
#define AV_WN32A(p, v)
Definition: intreadwrite.h:530
static void super2xsai(AVFilterContext *ctx, uint8_t *src, int src_linesize, uint8_t *dst, int dst_linesize, int width, int height)
Definition: vf_super2xsai.c:56
AVFilter avfilter_vf_super2xsai
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:308
packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), little-endian
Definition: pixfmt.h:115
#define AV_WB32(p, darg)
Definition: intreadwrite.h:265
const char * name
Pad name.
#define AV_WL32(p, darg)
Definition: intreadwrite.h:282
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:532
#define AV_WL24(p, d)
Definition: intreadwrite.h:456
uint8_t
it can be given away to ff_start_frame *A reference passed to ff_filter_frame(or the deprecated ff_start_frame) is given away and must no longer be used.*A reference created with avfilter_ref_buffer belongs to the code that created it.*A reference obtained with ff_get_video_buffer or ff_get_audio_buffer belongs to the code that requested it.*A reference given as return value by the get_video_buffer or get_audio_buffer method is given away and must no longer be used.Link reference fields---------------------The AVFilterLink structure has a few AVFilterBufferRef fields.The cur_buf and out_buf were used with the deprecated start_frame/draw_slice/end_frame API and should no longer be used.src_buf
static const uint32_t color[16+AV_CLASS_CATEGORY_NB]
Definition: log.c:77
packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), big-endian
Definition: pixfmt.h:114
packed ABGR 8:8:8:8, 32bpp, ABGRABGR...
Definition: pixfmt.h:98
void ff_set_common_formats(AVFilterContext *ctx, AVFilterFormats *formats)
A helper for query_formats() which sets all links to the same list of formats.
Definition: formats.c:545
A filter pad used for either input or output.
static int config_input(AVFilterLink *inlink)
Discrete Time axis x
int width
width and height of the video frame
Definition: frame.h:122
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:99
const char * r
Definition: vf_curves.c:94
void * priv
private data for use by the filter
Definition: avfilter.h:545
void av_log(void *avcl, int level, const char *fmt,...)
Definition: log.c:246
#define READ_COLOR4(dst, src_line, off)
packed ARGB 8:8:8:8, 32bpp, ARGBARGB...
Definition: pixfmt.h:96
packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
Definition: pixfmt.h:97
static const AVFilterPad super2xsai_outputs[]
#define AV_LOG_VERBOSE
Definition: log.h:157
uint32_t lo_pixel_mask
Definition: vf_super2xsai.c:39
#define FFMIN(a, b)
Definition: common.h:58
packed RGB 8:8:8, 24bpp, BGRBGR...
Definition: pixfmt.h:71
packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), big-endian
Definition: pixfmt.h:119
static int config_output(AVFilterLink *outlink)
#define READ_COLOR2(dst, src_line, off)
static const AVFilterPad super2xsai_inputs[]
NULL
Definition: eval.c:55
static int width
Definition: tests/utils.c:158
AVS_Value src
Definition: avisynth_c.h:523
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:101
static int filter_frame(AVFilterLink *inlink, AVFrame *inpicref)
BYTE int const BYTE int int int height
Definition: avisynth_c.h:713
Filter definition.
Definition: avfilter.h:436
uint32_t hi_pixel_mask
Definition: vf_super2xsai.c:38
const char * name
filter name
Definition: avfilter.h:437
packed BGR 5:5:5, 16bpp, (msb)1A 5B 5G 5R(lsb), little-endian, most significant bit to 1 ...
Definition: pixfmt.h:122
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFilterBuffer structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Buffer references ownership and permissions
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:539
#define Q_INTERPOLATE(A, B, C, D)
Definition: vf_super2xsai.c:53
packed RGB 5:5:5, 16bpp, (msb)1A 5R 5G 5B(lsb), big-endian, most significant bit to 0 ...
Definition: pixfmt.h:116
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:87
static int query_formats(AVFilterContext *ctx)
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:108
function y
Definition: D.m:1
else dst[i][x+y *dst_stride[i]]
Definition: vf_mcdeint.c:160
An instance of a filter.
Definition: avfilter.h:524
packed BGR 5:5:5, 16bpp, (msb)1A 5B 5G 5R(lsb), big-endian, most significant bit to 1 ...
Definition: pixfmt.h:121
int height
Definition: frame.h:122
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:1700
internal API functions
AVPixelFormat
Pixel format.
Definition: pixfmt.h:66
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several inputs