FFmpeg  4.4.5
f_graphmonitor.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2018 Paul B Mahol
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 #include "float.h"
22 
23 #include "libavutil/pixdesc.h"
24 #include "libavutil/eval.h"
25 #include "libavutil/intreadwrite.h"
26 #include "libavutil/opt.h"
27 #include "libavutil/timestamp.h"
29 #include "avfilter.h"
30 #include "filters.h"
31 #include "formats.h"
32 #include "internal.h"
33 #include "video.h"
34 
35 typedef struct GraphMonitorContext {
36  const AVClass *class;
37 
38  int w, h;
39  float opacity;
40  int mode;
41  int flags;
43 
51  uint8_t bg[4];
53 
54 enum {
55  MODE_QUEUE = 1 << 0,
56  MODE_FCIN = 1 << 1,
57  MODE_FCOUT = 1 << 2,
58  MODE_PTS = 1 << 3,
59  MODE_TIME = 1 << 4,
60  MODE_TB = 1 << 5,
61  MODE_FMT = 1 << 6,
62  MODE_SIZE = 1 << 7,
63  MODE_RATE = 1 << 8,
64  MODE_EOF = 1 << 9,
65 };
66 
67 #define OFFSET(x) offsetof(GraphMonitorContext, x)
68 #define VF AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
69 
70 static const AVOption graphmonitor_options[] = {
71  { "size", "set monitor size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str="hd720"}, 0, 0, VF },
72  { "s", "set monitor size", OFFSET(w), AV_OPT_TYPE_IMAGE_SIZE, {.str="hd720"}, 0, 0, VF },
73  { "opacity", "set video opacity", OFFSET(opacity), AV_OPT_TYPE_FLOAT, {.dbl=.9}, 0, 1, VF },
74  { "o", "set video opacity", OFFSET(opacity), AV_OPT_TYPE_FLOAT, {.dbl=.9}, 0, 1, VF },
75  { "mode", "set mode", OFFSET(mode), AV_OPT_TYPE_INT, {.i64=0}, 0, 1, VF, "mode" },
76  { "m", "set mode", OFFSET(mode), AV_OPT_TYPE_INT, {.i64=0}, 0, 1, VF, "mode" },
77  { "full", NULL, 0, AV_OPT_TYPE_CONST, {.i64=0}, 0, 0, VF, "mode" },
78  { "compact", NULL, 0, AV_OPT_TYPE_CONST, {.i64=1}, 0, 0, VF, "mode" },
79  { "flags", "set flags", OFFSET(flags), AV_OPT_TYPE_FLAGS, {.i64=MODE_QUEUE}, 0, INT_MAX, VF, "flags" },
80  { "f", "set flags", OFFSET(flags), AV_OPT_TYPE_FLAGS, {.i64=MODE_QUEUE}, 0, INT_MAX, VF, "flags" },
81  { "queue", NULL, 0, AV_OPT_TYPE_CONST, {.i64=MODE_QUEUE}, 0, 0, VF, "flags" },
82  { "frame_count_in", NULL, 0, AV_OPT_TYPE_CONST, {.i64=MODE_FCOUT}, 0, 0, VF, "flags" },
83  { "frame_count_out", NULL, 0, AV_OPT_TYPE_CONST, {.i64=MODE_FCIN}, 0, 0, VF, "flags" },
84  { "pts", NULL, 0, AV_OPT_TYPE_CONST, {.i64=MODE_PTS}, 0, 0, VF, "flags" },
85  { "time", NULL, 0, AV_OPT_TYPE_CONST, {.i64=MODE_TIME}, 0, 0, VF, "flags" },
86  { "timebase", NULL, 0, AV_OPT_TYPE_CONST, {.i64=MODE_TB}, 0, 0, VF, "flags" },
87  { "format", NULL, 0, AV_OPT_TYPE_CONST, {.i64=MODE_FMT}, 0, 0, VF, "flags" },
88  { "size", NULL, 0, AV_OPT_TYPE_CONST, {.i64=MODE_SIZE}, 0, 0, VF, "flags" },
89  { "rate", NULL, 0, AV_OPT_TYPE_CONST, {.i64=MODE_RATE}, 0, 0, VF, "flags" },
90  { "eof", NULL, 0, AV_OPT_TYPE_CONST, {.i64=MODE_EOF}, 0, 0, VF, "flags" },
91  { "rate", "set video rate", OFFSET(frame_rate), AV_OPT_TYPE_VIDEO_RATE, {.str = "25"}, 0, INT_MAX, VF },
92  { "r", "set video rate", OFFSET(frame_rate), AV_OPT_TYPE_VIDEO_RATE, {.str = "25"}, 0, INT_MAX, VF },
93  { NULL }
94 };
95 
97 {
98  AVFilterLink *outlink = ctx->outputs[0];
99  static const enum AVPixelFormat pix_fmts[] = {
102  };
103  int ret;
104 
106  if ((ret = ff_formats_ref(fmts_list, &outlink->incfg.formats)) < 0)
107  return ret;
108 
109  return 0;
110 }
111 
113 {
114  int bg = AV_RN32(s->bg);
115 
116  for (int i = 0; i < out->height; i++)
117  for (int j = 0; j < out->width; j++)
118  AV_WN32(out->data[0] + i * out->linesize[0] + j * 4, bg);
119 }
120 
121 static void drawtext(AVFrame *pic, int x, int y, const char *txt, uint8_t *color)
122 {
123  const uint8_t *font;
124  int font_height;
125  int i;
126 
127  font = avpriv_cga_font, font_height = 8;
128 
129  if (y + 8 >= pic->height ||
130  x + strlen(txt) * 8 >= pic->width)
131  return;
132 
133  for (i = 0; txt[i]; i++) {
134  int char_y, mask;
135 
136  uint8_t *p = pic->data[0] + y*pic->linesize[0] + (x + i*8)*4;
137  for (char_y = 0; char_y < font_height; char_y++) {
138  for (mask = 0x80; mask; mask >>= 1) {
139  if (font[txt[i] * font_height + char_y] & mask) {
140  p[0] = color[0];
141  p[1] = color[1];
142  p[2] = color[2];
143  }
144  p += 4;
145  }
146  p += pic->linesize[0] - 8 * 4;
147  }
148  }
149 }
150 
152 {
153  for (int j = 0; j < filter->nb_inputs; j++) {
154  AVFilterLink *l = filter->inputs[j];
155  size_t frames = ff_inlink_queued_frames(l);
156 
157  if (frames)
158  return 1;
159  }
160 
161  for (int j = 0; j < filter->nb_outputs; j++) {
162  AVFilterLink *l = filter->outputs[j];
163  size_t frames = ff_inlink_queued_frames(l);
164 
165  if (frames)
166  return 1;
167  }
168 
169  return 0;
170 }
171 
173  int xpos, int ypos,
174  AVFilterLink *l,
175  size_t frames)
176 {
177  GraphMonitorContext *s = ctx->priv;
178  char buffer[1024] = { 0 };
179 
180  if (s->flags & MODE_FMT) {
181  if (l->type == AVMEDIA_TYPE_VIDEO) {
182  snprintf(buffer, sizeof(buffer)-1, " | format: %s",
184  } else if (l->type == AVMEDIA_TYPE_AUDIO) {
185  snprintf(buffer, sizeof(buffer)-1, " | format: %s",
187  }
188  drawtext(out, xpos, ypos, buffer, s->white);
189  xpos += strlen(buffer) * 8;
190  }
191  if (s->flags & MODE_SIZE) {
192  if (l->type == AVMEDIA_TYPE_VIDEO) {
193  snprintf(buffer, sizeof(buffer)-1, " | size: %dx%d", l->w, l->h);
194  } else if (l->type == AVMEDIA_TYPE_AUDIO) {
195  snprintf(buffer, sizeof(buffer)-1, " | channels: %d", l->channels);
196  }
197  drawtext(out, xpos, ypos, buffer, s->white);
198  xpos += strlen(buffer) * 8;
199  }
200  if (s->flags & MODE_RATE) {
201  if (l->type == AVMEDIA_TYPE_VIDEO) {
202  snprintf(buffer, sizeof(buffer)-1, " | fps: %d/%d", l->frame_rate.num, l->frame_rate.den);
203  } else if (l->type == AVMEDIA_TYPE_AUDIO) {
204  snprintf(buffer, sizeof(buffer)-1, " | samplerate: %d", l->sample_rate);
205  }
206  drawtext(out, xpos, ypos, buffer, s->white);
207  xpos += strlen(buffer) * 8;
208  }
209  if (s->flags & MODE_TB) {
210  snprintf(buffer, sizeof(buffer)-1, " | tb: %d/%d", l->time_base.num, l->time_base.den);
211  drawtext(out, xpos, ypos, buffer, s->white);
212  xpos += strlen(buffer) * 8;
213  }
214  if (s->flags & MODE_QUEUE) {
215  snprintf(buffer, sizeof(buffer)-1, " | queue: ");
216  drawtext(out, xpos, ypos, buffer, s->white);
217  xpos += strlen(buffer) * 8;
218  snprintf(buffer, sizeof(buffer)-1, "%"SIZE_SPECIFIER, frames);
219  drawtext(out, xpos, ypos, buffer, frames > 0 ? frames >= 10 ? frames >= 50 ? s->red : s->yellow : s->green : s->white);
220  xpos += strlen(buffer) * 8;
221  }
222  if (s->flags & MODE_FCIN) {
223  snprintf(buffer, sizeof(buffer)-1, " | in: %"PRId64, l->frame_count_in);
224  drawtext(out, xpos, ypos, buffer, s->white);
225  xpos += strlen(buffer) * 8;
226  }
227  if (s->flags & MODE_FCOUT) {
228  snprintf(buffer, sizeof(buffer)-1, " | out: %"PRId64, l->frame_count_out);
229  drawtext(out, xpos, ypos, buffer, s->white);
230  xpos += strlen(buffer) * 8;
231  }
232  if (s->flags & MODE_PTS) {
233  snprintf(buffer, sizeof(buffer)-1, " | pts: %s", av_ts2str(l->current_pts_us));
234  drawtext(out, xpos, ypos, buffer, s->white);
235  xpos += strlen(buffer) * 8;
236  }
237  if (s->flags & MODE_TIME) {
238  snprintf(buffer, sizeof(buffer)-1, " | time: %s", av_ts2timestr(l->current_pts_us, &AV_TIME_BASE_Q));
239  drawtext(out, xpos, ypos, buffer, s->white);
240  xpos += strlen(buffer) * 8;
241  }
242  if (s->flags & MODE_EOF && ff_outlink_get_status(l)) {
243  snprintf(buffer, sizeof(buffer)-1, " | eof");
244  drawtext(out, xpos, ypos, buffer, s->blue);
245  xpos += strlen(buffer) * 8;
246  }
247 }
248 
250 {
251  GraphMonitorContext *s = ctx->priv;
252  AVFilterLink *outlink = ctx->outputs[0];
253  AVFrame *out;
254  int xpos, ypos = 0;
255 
256  out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
257  if (!out)
258  return AVERROR(ENOMEM);
259 
260  clear_image(s, out, outlink);
261 
262  for (int i = 0; i < ctx->graph->nb_filters; i++) {
263  AVFilterContext *filter = ctx->graph->filters[i];
264  char buffer[1024] = { 0 };
265 
266  if (s->mode && !filter_have_queued(filter))
267  continue;
268 
269  xpos = 0;
270  drawtext(out, xpos, ypos, filter->name, s->white);
271  xpos += strlen(filter->name) * 8 + 10;
272  drawtext(out, xpos, ypos, filter->filter->name, s->white);
273  ypos += 10;
274  for (int j = 0; j < filter->nb_inputs; j++) {
275  AVFilterLink *l = filter->inputs[j];
276  size_t frames = ff_inlink_queued_frames(l);
277 
278  if (s->mode && !frames)
279  continue;
280 
281  xpos = 10;
282  snprintf(buffer, sizeof(buffer)-1, "in%d: ", j);
283  drawtext(out, xpos, ypos, buffer, s->white);
284  xpos += strlen(buffer) * 8;
285  drawtext(out, xpos, ypos, l->src->name, s->white);
286  xpos += strlen(l->src->name) * 8 + 10;
287  draw_items(ctx, out, xpos, ypos, l, frames);
288  ypos += 10;
289  }
290 
291  ypos += 2;
292  for (int j = 0; j < filter->nb_outputs; j++) {
293  AVFilterLink *l = filter->outputs[j];
294  size_t frames = ff_inlink_queued_frames(l);
295 
296  if (s->mode && !frames)
297  continue;
298 
299  xpos = 10;
300  snprintf(buffer, sizeof(buffer)-1, "out%d: ", j);
301  drawtext(out, xpos, ypos, buffer, s->white);
302  xpos += strlen(buffer) * 8;
303  drawtext(out, xpos, ypos, l->dst->name, s->white);
304  xpos += strlen(l->dst->name) * 8 + 10;
305  draw_items(ctx, out, xpos, ypos, l, frames);
306  ypos += 10;
307  }
308  ypos += 5;
309  }
310 
311  out->pts = pts;
312  s->pts = pts + 1;
313  return ff_filter_frame(outlink, out);
314 }
315 
317 {
318  GraphMonitorContext *s = ctx->priv;
319  AVFilterLink *inlink = ctx->inputs[0];
320  AVFilterLink *outlink = ctx->outputs[0];
322 
323  FF_FILTER_FORWARD_STATUS_BACK(outlink, inlink);
324 
325  if (ff_inlink_queued_frames(inlink)) {
326  AVFrame *frame = NULL;
327  int ret;
328 
329  ret = ff_inlink_consume_frame(inlink, &frame);
330  if (ret < 0)
331  return ret;
332  if (ret > 0) {
333  pts = frame->pts;
335  }
336  }
337 
338  if (pts != AV_NOPTS_VALUE) {
339  pts = av_rescale_q(pts, inlink->time_base, outlink->time_base);
340  if (s->pts == AV_NOPTS_VALUE)
341  s->pts = pts;
342  s->next_pts = pts;
343  }
344 
345  if (s->pts < s->next_pts && ff_outlink_frame_wanted(outlink))
346  return create_frame(ctx, s->pts);
347 
348  FF_FILTER_FORWARD_STATUS(inlink, outlink);
349  FF_FILTER_FORWARD_WANTED(outlink, inlink);
350 
351  return FFERROR_NOT_READY;
352 }
353 
354 static int config_output(AVFilterLink *outlink)
355 {
356  GraphMonitorContext *s = outlink->src->priv;
357 
358  s->bg[3] = 255 * s->opacity;
359  s->white[0] = s->white[1] = s->white[2] = 255;
360  s->yellow[0] = s->yellow[1] = 255;
361  s->red[0] = 255;
362  s->green[1] = 255;
363  s->blue[2] = 255;
364  s->pts = AV_NOPTS_VALUE;
365  s->next_pts = AV_NOPTS_VALUE;
366  outlink->w = s->w;
367  outlink->h = s->h;
368  outlink->sample_aspect_ratio = (AVRational){1,1};
369  outlink->frame_rate = s->frame_rate;
370  outlink->time_base = av_inv_q(s->frame_rate);
371 
372  return 0;
373 }
374 
375 #if CONFIG_GRAPHMONITOR_FILTER
376 
377 AVFILTER_DEFINE_CLASS(graphmonitor);
378 
379 static const AVFilterPad graphmonitor_inputs[] = {
380  {
381  .name = "default",
382  .type = AVMEDIA_TYPE_VIDEO,
383  },
384  { NULL }
385 };
386 
387 static const AVFilterPad graphmonitor_outputs[] = {
388  {
389  .name = "default",
390  .type = AVMEDIA_TYPE_VIDEO,
391  .config_props = config_output,
392  },
393  { NULL }
394 };
395 
397  .name = "graphmonitor",
398  .description = NULL_IF_CONFIG_SMALL("Show various filtergraph stats."),
399  .priv_size = sizeof(GraphMonitorContext),
400  .priv_class = &graphmonitor_class,
402  .activate = activate,
403  .inputs = graphmonitor_inputs,
404  .outputs = graphmonitor_outputs,
405 };
406 
407 #endif // CONFIG_GRAPHMONITOR_FILTER
408 
409 #if CONFIG_AGRAPHMONITOR_FILTER
410 
411 #define agraphmonitor_options graphmonitor_options
412 AVFILTER_DEFINE_CLASS(agraphmonitor);
413 
414 static const AVFilterPad agraphmonitor_inputs[] = {
415  {
416  .name = "default",
417  .type = AVMEDIA_TYPE_AUDIO,
418  },
419  { NULL }
420 };
421 
422 static const AVFilterPad agraphmonitor_outputs[] = {
423  {
424  .name = "default",
425  .type = AVMEDIA_TYPE_VIDEO,
426  .config_props = config_output,
427  },
428  { NULL }
429 };
430 
432  .name = "agraphmonitor",
433  .description = NULL_IF_CONFIG_SMALL("Show various filtergraph stats."),
434  .priv_size = sizeof(GraphMonitorContext),
435  .priv_class = &agraphmonitor_class,
437  .activate = activate,
438  .inputs = agraphmonitor_inputs,
439  .outputs = agraphmonitor_outputs,
440 };
441 #endif // CONFIG_AGRAPHMONITOR_FILTER
static const AVFilterPad inputs[]
Definition: af_acontrast.c:193
static const AVFilterPad outputs[]
Definition: af_acontrast.c:203
AVFilter ff_avf_agraphmonitor
AVFilter ff_vf_graphmonitor
uint8_t
int ff_outlink_get_status(AVFilterLink *link)
Get the status on an output link.
Definition: avfilter.c:1643
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1096
size_t ff_inlink_queued_frames(AVFilterLink *link)
Get the number of frames available on the link.
Definition: avfilter.c:1464
int ff_inlink_consume_frame(AVFilterLink *link, AVFrame **rframe)
Take a frame from the link's FIFO and update the link's stats.
Definition: avfilter.c:1494
Main libavfilter public API header.
#define flags(name, subs,...)
Definition: cbs_av1.c:572
#define s(width, name)
Definition: cbs_vp9.c:257
static av_always_inline void filter(int16_t *output, ptrdiff_t out_stride, const int16_t *low, ptrdiff_t low_stride, const int16_t *high, ptrdiff_t high_stride, int len, int clip)
Definition: cfhddsp.c:27
#define NULL
Definition: coverity.c:32
long long int64_t
Definition: coverity.c:34
static AVFrame * frame
mode
Use these values in ebur128_init (or'ed).
Definition: ebur128.h:83
simple arithmetic expression evaluator
static const AVOption graphmonitor_options[]
static int create_frame(AVFilterContext *ctx, int64_t pts)
static void clear_image(GraphMonitorContext *s, AVFrame *out, AVFilterLink *outlink)
static void drawtext(AVFrame *pic, int x, int y, const char *txt, uint8_t *color)
static int query_formats(AVFilterContext *ctx)
static void draw_items(AVFilterContext *ctx, AVFrame *out, int xpos, int ypos, AVFilterLink *l, size_t frames)
static int filter_have_queued(AVFilterContext *filter)
#define VF
static int activate(AVFilterContext *ctx)
#define OFFSET(x)
static int config_output(AVFilterLink *outlink)
@ MODE_PTS
@ MODE_RATE
@ MODE_FCOUT
@ MODE_FMT
@ MODE_QUEUE
@ MODE_EOF
@ MODE_TIME
@ MODE_SIZE
@ MODE_FCIN
@ MODE_TB
#define FF_FILTER_FORWARD_WANTED(outlink, inlink)
Forward the frame_wanted_out flag from an output link to an input link.
Definition: filters.h:254
#define FF_FILTER_FORWARD_STATUS(inlink, outlink)
Acknowledge the status on an input link and forward it to an output link.
Definition: filters.h:226
#define FFERROR_NOT_READY
Filters implementation helper functions.
Definition: filters.h:34
#define FF_FILTER_FORWARD_STATUS_BACK(outlink, inlink)
Forward the status on an output link to an input link.
Definition: filters.h:199
static int ff_outlink_frame_wanted(AVFilterLink *link)
Test if a frame is wanted on an output link.
Definition: filters.h:172
int ff_formats_ref(AVFilterFormats *f, AVFilterFormats **ref)
Add ref as a new reference to formats.
Definition: formats.c:466
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:286
@ AV_OPT_TYPE_IMAGE_SIZE
offset must point to two consecutive integers
Definition: opt.h:235
@ AV_OPT_TYPE_CONST
Definition: opt.h:234
@ AV_OPT_TYPE_FLAGS
Definition: opt.h:224
@ AV_OPT_TYPE_VIDEO_RATE
offset must point to AVRational
Definition: opt.h:238
@ AV_OPT_TYPE_INT
Definition: opt.h:225
@ AV_OPT_TYPE_FLOAT
Definition: opt.h:228
#define AVERROR(e)
Definition: error.h:43
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:203
static av_always_inline AVRational av_inv_q(AVRational q)
Invert a rational.
Definition: rational.h:159
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
@ AVMEDIA_TYPE_AUDIO
Definition: avutil.h:202
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
const char * av_get_sample_fmt_name(enum AVSampleFormat sample_fmt)
Return the name of sample_fmt, or NULL if sample_fmt is not recognized.
Definition: samplefmt.c:49
#define AV_NOPTS_VALUE
Undefined timestamp value.
Definition: avutil.h:248
#define AV_TIME_BASE_Q
Internal time base represented as fractional value.
Definition: avutil.h:260
int i
Definition: input.c:407
#define AV_WN32(p, v)
Definition: intreadwrite.h:376
#define AV_RN32(p)
Definition: intreadwrite.h:364
#define AVFILTER_DEFINE_CLASS(fname)
Definition: internal.h:288
common internal API header
#define SIZE_SPECIFIER
Definition: internal.h:193
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:117
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:309
uint8_t w
Definition: llviddspenc.c:39
static const uint16_t mask[17]
Definition: lzw.c:38
AVOptions.
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2489
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
@ AV_PIX_FMT_RGBA
packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
Definition: pixfmt.h:93
static char buffer[20]
Definition: seek.c:32
#define snprintf
Definition: snprintf.h:34
Describe the class of an AVClass context structure.
Definition: log.h:67
An instance of a filter.
Definition: avfilter.h:341
char * name
name of this filter instance
Definition: avfilter.h:346
void * priv
private data for use by the filter
Definition: avfilter.h:356
AVFilterFormats * formats
List of supported formats (pixel or sample).
Definition: avfilter.h:445
A list of supported formats for one end of a filter link.
Definition: formats.h:65
A filter pad used for either input or output.
Definition: internal.h:54
const char * name
Pad name.
Definition: internal.h:60
Filter definition.
Definition: avfilter.h:145
const char * name
Filter name.
Definition: avfilter.h:149
This structure describes decoded (raw) audio or video data.
Definition: frame.h:318
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:411
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:332
int width
Definition: frame.h:376
int height
Definition: frame.h:376
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:349
AVOption.
Definition: opt.h:248
Rational number (pair of numerator and denominator).
Definition: rational.h:58
int num
Numerator.
Definition: rational.h:59
int den
Denominator.
Definition: rational.h:60
FILE * out
Definition: movenc.c:54
int frames
Definition: movenc.c:66
AVFormatContext * ctx
Definition: movenc.c:48
timestamp utils, mostly useful for debugging/logging purposes
#define av_ts2str(ts)
Convenience macro, the return value should be used only directly in function arguments but never stan...
Definition: timestamp.h:54
#define av_ts2timestr(ts, tb)
Convenience macro, the return value should be used only directly in function arguments but never stan...
Definition: timestamp.h:76
static int64_t pts
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:104
const uint8_t avpriv_cga_font[2048]
Definition: xga_font_data.c:29
CGA/EGA/VGA ROM font data.