vsrc_mandelbrot.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2011 Michael Niedermayer
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  *
20  * The vsrc_color filter from Stefano Sabatini was used as template to create
21  * this
22  */
23 
24 /**
25  * @file
26  * Mandelbrot fraktal renderer
27  */
28 
29 #include "avfilter.h"
30 #include "formats.h"
31 #include "video.h"
32 #include "internal.h"
33 #include "libavutil/imgutils.h"
34 #include "libavutil/opt.h"
35 #include "libavutil/parseutils.h"
36 #include <float.h>
37 #include <math.h>
38 
39 #define SQR(a) ((a)*(a))
40 
41 enum Outer{
46 };
47 
48 enum Inner{
53 };
54 
55 typedef struct Point {
56  double p[2];
57  uint32_t val;
58 } Point;
59 
60 typedef struct {
61  const AVClass *class;
62  int w, h;
64  uint64_t pts;
65  int maxiter;
66  double start_x;
67  double start_y;
68  double start_scale;
69  double end_scale;
70  double end_pts;
71  double bailout;
72  enum Outer outer;
73  enum Inner inner;
78  double (*zyklus)[2];
79  uint32_t dither;
80 
81  double morphxf;
82  double morphyf;
83  double morphamp;
84 } MBContext;
85 
86 #define OFFSET(x) offsetof(MBContext, x)
87 #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
88 
89 static const AVOption mandelbrot_options[] = {
90  {"size", "set frame size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str="640x480"}, CHAR_MIN, CHAR_MAX, FLAGS },
91  {"s", "set frame size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str="640x480"}, CHAR_MIN, CHAR_MAX, FLAGS },
92  {"rate", "set frame rate", OFFSET(frame_rate), AV_OPT_TYPE_VIDEO_RATE, {.str="25"}, CHAR_MIN, CHAR_MAX, FLAGS },
93  {"r", "set frame rate", OFFSET(frame_rate), AV_OPT_TYPE_VIDEO_RATE, {.str="25"}, CHAR_MIN, CHAR_MAX, FLAGS },
94  {"maxiter", "set max iterations number", OFFSET(maxiter), AV_OPT_TYPE_INT, {.i64=7189}, 1, INT_MAX, FLAGS },
95  {"start_x", "set the initial x position", OFFSET(start_x), AV_OPT_TYPE_DOUBLE, {.dbl=-0.743643887037158704752191506114774}, -100, 100, FLAGS },
96  {"start_y", "set the initial y position", OFFSET(start_y), AV_OPT_TYPE_DOUBLE, {.dbl=-0.131825904205311970493132056385139}, -100, 100, FLAGS },
97  {"start_scale", "set the initial scale value", OFFSET(start_scale), AV_OPT_TYPE_DOUBLE, {.dbl=3.0}, 0, FLT_MAX, FLAGS },
98  {"end_scale", "set the terminal scale value", OFFSET(end_scale), AV_OPT_TYPE_DOUBLE, {.dbl=0.3}, 0, FLT_MAX, FLAGS },
99  {"end_pts", "set the terminal pts value", OFFSET(end_pts), AV_OPT_TYPE_DOUBLE, {.dbl=400}, 0, INT64_MAX, FLAGS },
100  {"bailout", "set the bailout value", OFFSET(bailout), AV_OPT_TYPE_DOUBLE, {.dbl=10}, 0, FLT_MAX, FLAGS },
101  {"morphxf", "set morph x frequency", OFFSET(morphxf), AV_OPT_TYPE_DOUBLE, {.dbl=0.01}, -FLT_MAX, FLT_MAX, FLAGS },
102  {"morphyf", "set morph y frequency", OFFSET(morphyf), AV_OPT_TYPE_DOUBLE, {.dbl=0.0123}, -FLT_MAX, FLT_MAX, FLAGS },
103  {"morphamp", "set morph amplitude", OFFSET(morphamp), AV_OPT_TYPE_DOUBLE, {.dbl=0}, -FLT_MAX, FLT_MAX, FLAGS },
104 
105  {"outer", "set outer coloring mode", OFFSET(outer), AV_OPT_TYPE_INT, {.i64=NORMALIZED_ITERATION_COUNT}, 0, INT_MAX, FLAGS, "outer" },
106  {"iteration_count", "set iteration count mode", 0, AV_OPT_TYPE_CONST, {.i64=ITERATION_COUNT}, INT_MIN, INT_MAX, FLAGS, "outer" },
107  {"normalized_iteration_count", "set normalized iteration count mode", 0, AV_OPT_TYPE_CONST, {.i64=NORMALIZED_ITERATION_COUNT}, INT_MIN, INT_MAX, FLAGS, "outer" },
108  {"white", "set white mode", 0, AV_OPT_TYPE_CONST, {.i64=WHITE}, INT_MIN, INT_MAX, FLAGS, "outer" },
109  {"outz", "set outz mode", 0, AV_OPT_TYPE_CONST, {.i64=OUTZ}, INT_MIN, INT_MAX, FLAGS, "outer" },
110 
111  {"inner", "set inner coloring mode", OFFSET(inner), AV_OPT_TYPE_INT, {.i64=MINCOL}, 0, INT_MAX, FLAGS, "inner" },
112  {"black", "set black mode", 0, AV_OPT_TYPE_CONST, {.i64=BLACK}, INT_MIN, INT_MAX, FLAGS, "inner"},
113  {"period", "set period mode", 0, AV_OPT_TYPE_CONST, {.i64=PERIOD}, INT_MIN, INT_MAX, FLAGS, "inner"},
114  {"convergence", "show time until convergence", 0, AV_OPT_TYPE_CONST, {.i64=CONVTIME}, INT_MIN, INT_MAX, FLAGS, "inner"},
115  {"mincol", "color based on point closest to the origin of the iterations", 0, AV_OPT_TYPE_CONST, {.i64=MINCOL}, INT_MIN, INT_MAX, FLAGS, "inner"},
116 
117  {NULL},
118 };
119 
120 AVFILTER_DEFINE_CLASS(mandelbrot);
121 
122 static av_cold int init(AVFilterContext *ctx)
123 {
124  MBContext *mb = ctx->priv;
125 
126  mb->bailout *= mb->bailout;
127 
128  mb->start_scale /=mb->h;
129  mb->end_scale /=mb->h;
130 
131  mb->cache_allocated = mb->w * mb->h * 3;
132  mb->cache_used = 0;
133  mb->point_cache= av_malloc(sizeof(*mb->point_cache)*mb->cache_allocated);
134  mb-> next_cache= av_malloc(sizeof(*mb-> next_cache)*mb->cache_allocated);
135  mb-> zyklus = av_malloc(sizeof(*mb->zyklus) * (mb->maxiter+16));
136 
137  return 0;
138 }
139 
140 static av_cold void uninit(AVFilterContext *ctx)
141 {
142  MBContext *mb = ctx->priv;
143 
144  av_freep(&mb->point_cache);
145  av_freep(&mb-> next_cache);
146  av_freep(&mb->zyklus);
147 }
148 
150 {
151  static const enum AVPixelFormat pix_fmts[] = {
154  };
155 
157  return 0;
158 }
159 
160 static int config_props(AVFilterLink *inlink)
161 {
162  AVFilterContext *ctx = inlink->src;
163  MBContext *mb = ctx->priv;
164 
165  if (av_image_check_size(mb->w, mb->h, 0, ctx) < 0)
166  return AVERROR(EINVAL);
167 
168  inlink->w = mb->w;
169  inlink->h = mb->h;
170  inlink->time_base = av_inv_q(mb->frame_rate);
171 
172  return 0;
173 }
174 
175 static void fill_from_cache(AVFilterContext *ctx, uint32_t *color, int *in_cidx, int *out_cidx, double py, double scale){
176  MBContext *mb = ctx->priv;
177  if(mb->morphamp)
178  return;
179  for(; *in_cidx < mb->cache_used; (*in_cidx)++){
180  Point *p= &mb->point_cache[*in_cidx];
181  int x;
182  if(p->p[1] > py)
183  break;
184  x= round((p->p[0] - mb->start_x) / scale + mb->w/2);
185  if(x<0 || x >= mb->w)
186  continue;
187  if(color) color[x] = p->val;
188  if(out_cidx && *out_cidx < mb->cache_allocated)
189  mb->next_cache[(*out_cidx)++]= *p;
190  }
191 }
192 
193 static int interpol(MBContext *mb, uint32_t *color, int x, int y, int linesize)
194 {
195  uint32_t a,b,c,d, i;
196  uint32_t ipol=0xFF000000;
197  int dist;
198 
199  if(!x || !y || x+1==mb->w || y+1==mb->h)
200  return 0;
201 
202  dist= FFMAX(FFABS(x-(mb->w>>1))*mb->h, FFABS(y-(mb->h>>1))*mb->w);
203 
204  if(dist<(mb->w*mb->h>>3))
205  return 0;
206 
207  a=color[(x+1) + (y+0)*linesize];
208  b=color[(x-1) + (y+1)*linesize];
209  c=color[(x+0) + (y+1)*linesize];
210  d=color[(x+1) + (y+1)*linesize];
211 
212  if(a&&c){
213  b= color[(x-1) + (y+0)*linesize];
214  d= color[(x+0) + (y-1)*linesize];
215  }else if(b&&d){
216  a= color[(x+1) + (y-1)*linesize];
217  c= color[(x-1) + (y-1)*linesize];
218  }else if(c){
219  d= color[(x+0) + (y-1)*linesize];
220  a= color[(x-1) + (y+0)*linesize];
221  b= color[(x+1) + (y-1)*linesize];
222  }else if(d){
223  c= color[(x-1) + (y-1)*linesize];
224  a= color[(x-1) + (y+0)*linesize];
225  b= color[(x+1) + (y-1)*linesize];
226  }else
227  return 0;
228 
229  for(i=0; i<3; i++){
230  int s= 8*i;
231  uint8_t ac= a>>s;
232  uint8_t bc= b>>s;
233  uint8_t cc= c>>s;
234  uint8_t dc= d>>s;
235  int ipolab= (ac + bc);
236  int ipolcd= (cc + dc);
237  if(FFABS(ipolab - ipolcd) > 5)
238  return 0;
239  if(FFABS(ac-bc)+FFABS(cc-dc) > 20)
240  return 0;
241  ipol |= ((ipolab + ipolcd + 2)/4)<<s;
242  }
243  color[x + y*linesize]= ipol;
244  return 1;
245 }
246 
247 static void draw_mandelbrot(AVFilterContext *ctx, uint32_t *color, int linesize, int64_t pts)
248 {
249  MBContext *mb = ctx->priv;
250  int x,y,i, in_cidx=0, next_cidx=0, tmp_cidx;
251  double scale= mb->start_scale*pow(mb->end_scale/mb->start_scale, pts/mb->end_pts);
252  int use_zyklus=0;
253  fill_from_cache(ctx, NULL, &in_cidx, NULL, mb->start_y+scale*(-mb->h/2-0.5), scale);
254  tmp_cidx= in_cidx;
255  memset(color, 0, sizeof(*color)*mb->w);
256  for(y=0; y<mb->h; y++){
257  int y1= y+1;
258  const double ci=mb->start_y+scale*(y-mb->h/2);
259  fill_from_cache(ctx, NULL, &in_cidx, &next_cidx, ci, scale);
260  if(y1<mb->h){
261  memset(color+linesize*y1, 0, sizeof(*color)*mb->w);
262  fill_from_cache(ctx, color+linesize*y1, &tmp_cidx, NULL, ci + 3*scale/2, scale);
263  }
264 
265  for(x=0; x<mb->w; x++){
266  float av_uninit(epsilon);
267  const double cr=mb->start_x+scale*(x-mb->w/2);
268  double zr=cr;
269  double zi=ci;
270  uint32_t c=0;
271  double dv= mb->dither / (double)(1LL<<32);
272  mb->dither= mb->dither*1664525+1013904223;
273 
274  if(color[x + y*linesize] & 0xFF000000)
275  continue;
276  if(!mb->morphamp){
277  if(interpol(mb, color, x, y, linesize)){
278  if(next_cidx < mb->cache_allocated){
279  mb->next_cache[next_cidx ].p[0]= cr;
280  mb->next_cache[next_cidx ].p[1]= ci;
281  mb->next_cache[next_cidx++].val = color[x + y*linesize];
282  }
283  continue;
284  }
285  }else{
286  zr += cos(pts * mb->morphxf) * mb->morphamp;
287  zi += sin(pts * mb->morphyf) * mb->morphamp;
288  }
289 
290  use_zyklus= (x==0 || mb->inner!=BLACK ||color[x-1 + y*linesize] == 0xFF000000);
291  if(use_zyklus)
292  epsilon= scale*1*sqrt(SQR(x-mb->w/2) + SQR(y-mb->h/2))/mb->w;
293 
294 #define Z_Z2_C(outr,outi,inr,ini)\
295  outr= inr*inr - ini*ini + cr;\
296  outi= 2*inr*ini + ci;
297 
298 #define Z_Z2_C_ZYKLUS(outr,outi,inr,ini, Z)\
299  Z_Z2_C(outr,outi,inr,ini)\
300  if(use_zyklus){\
301  if(Z && fabs(mb->zyklus[i>>1][0]-outr)+fabs(mb->zyklus[i>>1][1]-outi) <= epsilon)\
302  break;\
303  }\
304  mb->zyklus[i][0]= outr;\
305  mb->zyklus[i][1]= outi;\
306 
307 
308 
309  for(i=0; i<mb->maxiter-8; i++){
310  double t;
311  Z_Z2_C_ZYKLUS(t, zi, zr, zi, 0)
312  i++;
313  Z_Z2_C_ZYKLUS(zr, zi, t, zi, 1)
314  i++;
315  Z_Z2_C_ZYKLUS(t, zi, zr, zi, 0)
316  i++;
317  Z_Z2_C_ZYKLUS(zr, zi, t, zi, 1)
318  i++;
319  Z_Z2_C_ZYKLUS(t, zi, zr, zi, 0)
320  i++;
321  Z_Z2_C_ZYKLUS(zr, zi, t, zi, 1)
322  i++;
323  Z_Z2_C_ZYKLUS(t, zi, zr, zi, 0)
324  i++;
325  Z_Z2_C_ZYKLUS(zr, zi, t, zi, 1)
326  if(zr*zr + zi*zi > mb->bailout){
327  i-= FFMIN(7, i);
328  for(; i<mb->maxiter; i++){
329  zr= mb->zyklus[i][0];
330  zi= mb->zyklus[i][1];
331  if(zr*zr + zi*zi > mb->bailout){
332  switch(mb->outer){
333  case ITERATION_COUNT:
334  zr = i;
335  c = lrintf((sin(zr)+1)*127) + lrintf((sin(zr/1.234)+1)*127)*256*256 + lrintf((sin(zr/100)+1)*127)*256;
336  break;
338  zr = i + log2(log(mb->bailout) / log(zr*zr + zi*zi));
339  c = lrintf((sin(zr)+1)*127) + lrintf((sin(zr/1.234)+1)*127)*256*256 + lrintf((sin(zr/100)+1)*127)*256;
340  break;
341  case WHITE:
342  c = 0xFFFFFF;
343  break;
344  case OUTZ:
345  zr /= mb->bailout;
346  zi /= mb->bailout;
347  c = (((int)(zr*128+128))&0xFF)*256 + (((int)(zi*128+128))&0xFF);
348  }
349  break;
350  }
351  }
352  break;
353  }
354  }
355  if(!c){
356  if(mb->inner==PERIOD){
357  int j;
358  for(j=i-1; j; j--)
359  if(SQR(mb->zyklus[j][0]-zr) + SQR(mb->zyklus[j][1]-zi) < epsilon*epsilon*10)
360  break;
361  if(j){
362  c= i-j;
363  c= ((c<<5)&0xE0) + ((c<<10)&0xE000) + ((c<<15)&0xE00000);
364  }
365  }else if(mb->inner==CONVTIME){
366  c= floor(i*255.0/mb->maxiter+dv)*0x010101;
367  } else if(mb->inner==MINCOL){
368  int j;
369  double closest=9999;
370  int closest_index=0;
371  for(j=i-1; j>=0; j--)
372  if(SQR(mb->zyklus[j][0]) + SQR(mb->zyklus[j][1]) < closest){
373  closest= SQR(mb->zyklus[j][0]) + SQR(mb->zyklus[j][1]);
374  closest_index= j;
375  }
376  closest = sqrt(closest);
377  c= lrintf((mb->zyklus[closest_index][0]/closest+1)*127+dv) + lrintf((mb->zyklus[closest_index][1]/closest+1)*127+dv)*256;
378  }
379  }
380  c |= 0xFF000000;
381  color[x + y*linesize]= c;
382  if(next_cidx < mb->cache_allocated){
383  mb->next_cache[next_cidx ].p[0]= cr;
384  mb->next_cache[next_cidx ].p[1]= ci;
385  mb->next_cache[next_cidx++].val = c;
386  }
387  }
388  fill_from_cache(ctx, NULL, &in_cidx, &next_cidx, ci + scale/2, scale);
389  }
390  FFSWAP(void*, mb->next_cache, mb->point_cache);
391  mb->cache_used = next_cidx;
392  if(mb->cache_used == mb->cache_allocated)
393  av_log(ctx, AV_LOG_INFO, "Mandelbrot cache is too small!\n");
394 }
395 
397 {
398  MBContext *mb = link->src->priv;
399  AVFrame *picref = ff_get_video_buffer(link, mb->w, mb->h);
400  if (!picref)
401  return AVERROR(ENOMEM);
402 
403  picref->sample_aspect_ratio = (AVRational) {1, 1};
404  picref->pts = mb->pts++;
405 
406  draw_mandelbrot(link->src, (uint32_t*)picref->data[0], picref->linesize[0]/4, picref->pts);
407  return ff_filter_frame(link, picref);
408 }
409 
410 static const AVFilterPad mandelbrot_outputs[] = {
411  {
412  .name = "default",
413  .type = AVMEDIA_TYPE_VIDEO,
414  .request_frame = request_frame,
415  .config_props = config_props,
416  },
417  { NULL },
418 };
419 
421  .name = "mandelbrot",
422  .description = NULL_IF_CONFIG_SMALL("Render a Mandelbrot fractal."),
423 
424  .priv_size = sizeof(MBContext),
425  .init = init,
426  .uninit = uninit,
427 
429  .inputs = NULL,
430  .outputs = mandelbrot_outputs,
431  .priv_class = &mandelbrot_class,
432 };
enum Outer outer
const char * s
Definition: avisynth_c.h:668
This structure describes decoded (raw) audio or video data.
Definition: frame.h:76
AVOption.
Definition: opt.h:251
static int query_formats(AVFilterContext *ctx)
static av_cold int init(AVFilterContext *ctx)
double p[2]
uint64_t pts
misc image utilities
static const AVFilterPad outputs[]
Definition: af_ashowinfo.c:117
external API header
double start_y
y1
Definition: lab5.m:33
static int interpol(MBContext *mb, uint32_t *color, int x, int y, int linesize)
AVFilter avfilter_vsrc_mandelbrot
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:143
static const AVOption mandelbrot_options[]
static av_cold void uninit(AVFilterContext *ctx)
#define FLAGS
output residual component w
void av_freep(void *arg)
Free a memory block which has been allocated with av_malloc(z)() or av_realloc() and set the pointer ...
Definition: mem.c:198
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:308
set threshold d
double(* zyklus)[2]
const char * name
Pad name.
double morphamp
uint8_t
it can be given away to ff_start_frame *A reference passed to ff_filter_frame(or the deprecated ff_start_frame) is given away and must no longer be used.*A reference created with avfilter_ref_buffer belongs to the code that created it.*A reference obtained with ff_get_video_buffer or ff_get_audio_buffer belongs to the code that requested it.*A reference given as return value by the get_video_buffer or get_audio_buffer method is given away and must no longer be used.Link reference fields---------------------The AVFilterLink structure has a few AVFilterBufferRef fields.The cur_buf and out_buf were used with the deprecated start_frame/draw_slice/end_frame API and should no longer be used.src_buf
static void draw_mandelbrot(AVFilterContext *ctx, uint32_t *color, int linesize, int64_t pts)
#define av_cold
Definition: attributes.h:78
#define mb
AVOptions.
static const uint32_t color[16+AV_CLASS_CATEGORY_NB]
Definition: log.c:77
#define b
Definition: input.c:42
Inner
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:159
double end_pts
#define lrintf(x)
Definition: libm_mips.h:70
end end ac
integer sqrt
Definition: avutil.txt:2
void ff_set_common_formats(AVFilterContext *ctx, AVFilterFormats *formats)
A helper for query_formats() which sets all links to the same list of formats.
Definition: formats.c:545
static const AVFilterPad mandelbrot_outputs[]
A filter pad used for either input or output.
Discrete Time axis x
#define OFFSET(x)
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
double morphyf
void * priv
private data for use by the filter
Definition: avfilter.h:545
#define Z_Z2_C_ZYKLUS(outr, outi, inr, ini, Z)
void av_log(void *avcl, int level, const char *fmt,...)
Definition: log.c:246
static av_always_inline av_const double round(double x)
Definition: libm.h:162
#define FFMAX(a, b)
Definition: common.h:56
Outer
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFilterBuffer structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
Point * point_cache
struct AVRational AVRational
rational number numerator/denominator
int av_image_check_size(unsigned int w, unsigned int h, int log_offset, void *log_ctx)
Check if the given dimension of an image is valid, meaning that all bytes of the image can be address...
Definition: imgutils.c:231
#define FFMIN(a, b)
Definition: common.h:58
t
Definition: genspecsines3.m:6
#define FFABS(a)
Definition: common.h:53
double morphxf
static void fill_from_cache(AVFilterContext *ctx, uint32_t *color, int *in_cidx, int *out_cidx, double py, double scale)
enum Inner inner
static int request_frame(AVFilterLink *link)
#define AV_PIX_FMT_BGR32
Definition: pixfmt.h:261
NULL
Definition: eval.c:55
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:101
double start_scale
FIXME Range Coding of cr are mx and my are Motion Vector top and top right vectors is used as motion vector prediction the used motion vector is the sum of the predictor and(mvx_diff, mvy_diff)*mv_scale Intra DC Predicton block[y][x] dc[1]
Definition: snow.txt:392
double end_scale
double start_x
AVRational sample_aspect_ratio
Sample aspect ratio for the video frame, 0/1 if unknown/unspecified.
Definition: frame.h:154
#define SQR(a)
void * av_malloc(size_t size)
Allocate a block of size bytes with alignment suitable for all memory accesses (including vectors if ...
Definition: mem.c:73
Describe the class of an AVClass context structure.
Definition: log.h:50
Filter definition.
Definition: avfilter.h:436
synthesis window for stochastic i
rational number numerator/denominator
Definition: rational.h:43
offset must point to AVRational
Definition: opt.h:233
const char * name
filter name
Definition: avfilter.h:437
double bailout
offset must point to two consecutive integers
Definition: opt.h:230
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFilterBuffer structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later.That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another.Buffer references ownership and permissions
misc parsing utilities
static av_always_inline AVRational av_inv_q(AVRational q)
Invert a rational.
Definition: rational.h:122
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:87
AVRational frame_rate
Point * next_cache
static double c[64]
Same thing on a dB scale
function y
Definition: D.m:1
uint32_t val
AVFILTER_DEFINE_CLASS(mandelbrot)
An instance of a filter.
Definition: avfilter.h:524
#define av_uninit(x)
Definition: attributes.h:137
static int ipol(uint8_t *src, int x, int y)
Definition: rotozoom.c:65
#define AV_LOG_INFO
Definition: log.h:156
struct Point Point
integer integer log2
Definition: avutil.txt:2
#define FFSWAP(type, a, b)
Definition: common.h:61
internal API functions
static double cr(void *priv, double x, double y)
Definition: vf_geq.c:85
uint32_t dither
static int config_props(AVFilterLink *inlink)
AVPixelFormat
Pixel format.
Definition: pixfmt.h:66
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several inputs
int cache_allocated