FFmpeg
cmdutils.c
Go to the documentation of this file.
1 /*
2  * Various utilities for command line tools
3  * Copyright (c) 2000-2003 Fabrice Bellard
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 #include <string.h>
23 #include <stdint.h>
24 #include <stdlib.h>
25 #include <errno.h>
26 #include <math.h>
27 
28 /* Include only the enabled headers since some compilers (namely, Sun
29  Studio) will not omit unused inline functions and create undefined
30  references to libraries that are not being built. */
31 
32 #include "config.h"
33 #include "compat/va_copy.h"
34 #include "libavformat/avformat.h"
35 #include "libavfilter/avfilter.h"
36 #include "libavdevice/avdevice.h"
38 #include "libswscale/swscale.h"
41 #include "libavutil/attributes.h"
42 #include "libavutil/avassert.h"
43 #include "libavutil/avstring.h"
44 #include "libavutil/bprint.h"
45 #include "libavutil/display.h"
46 #include "libavutil/mathematics.h"
47 #include "libavutil/imgutils.h"
48 #include "libavutil/libm.h"
49 #include "libavutil/parseutils.h"
50 #include "libavutil/pixdesc.h"
51 #include "libavutil/eval.h"
52 #include "libavutil/dict.h"
53 #include "libavutil/opt.h"
54 #include "libavutil/cpu.h"
55 #include "libavutil/ffversion.h"
56 #include "libavutil/version.h"
57 #include "cmdutils.h"
58 #if HAVE_SYS_RESOURCE_H
59 #include <sys/time.h>
60 #include <sys/resource.h>
61 #endif
62 #ifdef _WIN32
63 #include <windows.h>
64 #endif
65 
66 static int init_report(const char *env);
67 
71 
72 static FILE *report_file;
74 int hide_banner = 0;
75 
80 };
81 
82 void init_opts(void)
83 {
84  av_dict_set(&sws_dict, "flags", "bicubic", 0);
85 }
86 
87 void uninit_opts(void)
88 {
94 }
95 
96 void log_callback_help(void *ptr, int level, const char *fmt, va_list vl)
97 {
98  vfprintf(stdout, fmt, vl);
99 }
100 
101 static void log_callback_report(void *ptr, int level, const char *fmt, va_list vl)
102 {
103  va_list vl2;
104  char line[1024];
105  static int print_prefix = 1;
106 
107  va_copy(vl2, vl);
108  av_log_default_callback(ptr, level, fmt, vl);
109  av_log_format_line(ptr, level, fmt, vl2, line, sizeof(line), &print_prefix);
110  va_end(vl2);
111  if (report_file_level >= level) {
112  fputs(line, report_file);
113  fflush(report_file);
114  }
115 }
116 
117 void init_dynload(void)
118 {
119 #if HAVE_SETDLLDIRECTORY && defined(_WIN32)
120  /* Calling SetDllDirectory with the empty string (but not NULL) removes the
121  * current working directory from the DLL search path as a security pre-caution. */
122  SetDllDirectory("");
123 #endif
124 }
125 
126 static void (*program_exit)(int ret);
127 
128 void register_exit(void (*cb)(int ret))
129 {
130  program_exit = cb;
131 }
132 
133 void exit_program(int ret)
134 {
135  if (program_exit)
136  program_exit(ret);
137 
138  exit(ret);
139 }
140 
141 double parse_number_or_die(const char *context, const char *numstr, int type,
142  double min, double max)
143 {
144  char *tail;
145  const char *error;
146  double d = av_strtod(numstr, &tail);
147  if (*tail)
148  error = "Expected number for %s but found: %s\n";
149  else if (d < min || d > max)
150  error = "The value for %s was %s which is not within %f - %f\n";
151  else if (type == OPT_INT64 && (int64_t)d != d)
152  error = "Expected int64 for %s but found %s\n";
153  else if (type == OPT_INT && (int)d != d)
154  error = "Expected int for %s but found %s\n";
155  else
156  return d;
157  av_log(NULL, AV_LOG_FATAL, error, context, numstr, min, max);
158  exit_program(1);
159  return 0;
160 }
161 
162 int64_t parse_time_or_die(const char *context, const char *timestr,
163  int is_duration)
164 {
165  int64_t us;
166  if (av_parse_time(&us, timestr, is_duration) < 0) {
167  av_log(NULL, AV_LOG_FATAL, "Invalid %s specification for %s: %s\n",
168  is_duration ? "duration" : "date", context, timestr);
169  exit_program(1);
170  }
171  return us;
172 }
173 
174 void show_help_options(const OptionDef *options, const char *msg, int req_flags,
175  int rej_flags, int alt_flags)
176 {
177  const OptionDef *po;
178  int first;
179 
180  first = 1;
181  for (po = options; po->name; po++) {
182  char buf[128];
183 
184  if (((po->flags & req_flags) != req_flags) ||
185  (alt_flags && !(po->flags & alt_flags)) ||
186  (po->flags & rej_flags))
187  continue;
188 
189  if (first) {
190  printf("%s\n", msg);
191  first = 0;
192  }
193  av_strlcpy(buf, po->name, sizeof(buf));
194  if (po->argname) {
195  av_strlcat(buf, " ", sizeof(buf));
196  av_strlcat(buf, po->argname, sizeof(buf));
197  }
198  printf("-%-17s %s\n", buf, po->help);
199  }
200  printf("\n");
201 }
202 
203 void show_help_children(const AVClass *class, int flags)
204 {
205  const AVClass *child = NULL;
206  if (class->option) {
207  av_opt_show2(&class, NULL, flags, 0);
208  printf("\n");
209  }
210 
211  while (child = av_opt_child_class_next(class, child))
212  show_help_children(child, flags);
213 }
214 
215 static const OptionDef *find_option(const OptionDef *po, const char *name)
216 {
217  const char *p = strchr(name, ':');
218  int len = p ? p - name : strlen(name);
219 
220  while (po->name) {
221  if (!strncmp(name, po->name, len) && strlen(po->name) == len)
222  break;
223  po++;
224  }
225  return po;
226 }
227 
228 /* _WIN32 means using the windows libc - cygwin doesn't define that
229  * by default. HAVE_COMMANDLINETOARGVW is true on cygwin, while
230  * it doesn't provide the actual command line via GetCommandLineW(). */
231 #if HAVE_COMMANDLINETOARGVW && defined(_WIN32)
232 #include <shellapi.h>
233 /* Will be leaked on exit */
234 static char** win32_argv_utf8 = NULL;
235 static int win32_argc = 0;
236 
237 /**
238  * Prepare command line arguments for executable.
239  * For Windows - perform wide-char to UTF-8 conversion.
240  * Input arguments should be main() function arguments.
241  * @param argc_ptr Arguments number (including executable)
242  * @param argv_ptr Arguments list.
243  */
244 static void prepare_app_arguments(int *argc_ptr, char ***argv_ptr)
245 {
246  char *argstr_flat;
247  wchar_t **argv_w;
248  int i, buffsize = 0, offset = 0;
249 
250  if (win32_argv_utf8) {
251  *argc_ptr = win32_argc;
252  *argv_ptr = win32_argv_utf8;
253  return;
254  }
255 
256  win32_argc = 0;
257  argv_w = CommandLineToArgvW(GetCommandLineW(), &win32_argc);
258  if (win32_argc <= 0 || !argv_w)
259  return;
260 
261  /* determine the UTF-8 buffer size (including NULL-termination symbols) */
262  for (i = 0; i < win32_argc; i++)
263  buffsize += WideCharToMultiByte(CP_UTF8, 0, argv_w[i], -1,
264  NULL, 0, NULL, NULL);
265 
266  win32_argv_utf8 = av_mallocz(sizeof(char *) * (win32_argc + 1) + buffsize);
267  argstr_flat = (char *)win32_argv_utf8 + sizeof(char *) * (win32_argc + 1);
268  if (!win32_argv_utf8) {
269  LocalFree(argv_w);
270  return;
271  }
272 
273  for (i = 0; i < win32_argc; i++) {
274  win32_argv_utf8[i] = &argstr_flat[offset];
275  offset += WideCharToMultiByte(CP_UTF8, 0, argv_w[i], -1,
276  &argstr_flat[offset],
277  buffsize - offset, NULL, NULL);
278  }
279  win32_argv_utf8[i] = NULL;
280  LocalFree(argv_w);
281 
282  *argc_ptr = win32_argc;
283  *argv_ptr = win32_argv_utf8;
284 }
285 #else
286 static inline void prepare_app_arguments(int *argc_ptr, char ***argv_ptr)
287 {
288  /* nothing to do */
289 }
290 #endif /* HAVE_COMMANDLINETOARGVW */
291 
292 static int write_option(void *optctx, const OptionDef *po, const char *opt,
293  const char *arg)
294 {
295  /* new-style options contain an offset into optctx, old-style address of
296  * a global var*/
297  void *dst = po->flags & (OPT_OFFSET | OPT_SPEC) ?
298  (uint8_t *)optctx + po->u.off : po->u.dst_ptr;
299  int *dstcount;
300 
301  if (po->flags & OPT_SPEC) {
302  SpecifierOpt **so = dst;
303  char *p = strchr(opt, ':');
304  char *str;
305 
306  dstcount = (int *)(so + 1);
307  *so = grow_array(*so, sizeof(**so), dstcount, *dstcount + 1);
308  str = av_strdup(p ? p + 1 : "");
309  if (!str)
310  return AVERROR(ENOMEM);
311  (*so)[*dstcount - 1].specifier = str;
312  dst = &(*so)[*dstcount - 1].u;
313  }
314 
315  if (po->flags & OPT_STRING) {
316  char *str;
317  str = av_strdup(arg);
318  av_freep(dst);
319  if (!str)
320  return AVERROR(ENOMEM);
321  *(char **)dst = str;
322  } else if (po->flags & OPT_BOOL || po->flags & OPT_INT) {
323  *(int *)dst = parse_number_or_die(opt, arg, OPT_INT64, INT_MIN, INT_MAX);
324  } else if (po->flags & OPT_INT64) {
325  *(int64_t *)dst = parse_number_or_die(opt, arg, OPT_INT64, INT64_MIN, INT64_MAX);
326  } else if (po->flags & OPT_TIME) {
327  *(int64_t *)dst = parse_time_or_die(opt, arg, 1);
328  } else if (po->flags & OPT_FLOAT) {
329  *(float *)dst = parse_number_or_die(opt, arg, OPT_FLOAT, -INFINITY, INFINITY);
330  } else if (po->flags & OPT_DOUBLE) {
331  *(double *)dst = parse_number_or_die(opt, arg, OPT_DOUBLE, -INFINITY, INFINITY);
332  } else if (po->u.func_arg) {
333  int ret = po->u.func_arg(optctx, opt, arg);
334  if (ret < 0) {
336  "Failed to set value '%s' for option '%s': %s\n",
337  arg, opt, av_err2str(ret));
338  return ret;
339  }
340  }
341  if (po->flags & OPT_EXIT)
342  exit_program(0);
343 
344  return 0;
345 }
346 
347 int parse_option(void *optctx, const char *opt, const char *arg,
348  const OptionDef *options)
349 {
350  const OptionDef *po;
351  int ret;
352 
353  po = find_option(options, opt);
354  if (!po->name && opt[0] == 'n' && opt[1] == 'o') {
355  /* handle 'no' bool option */
356  po = find_option(options, opt + 2);
357  if ((po->name && (po->flags & OPT_BOOL)))
358  arg = "0";
359  } else if (po->flags & OPT_BOOL)
360  arg = "1";
361 
362  if (!po->name)
363  po = find_option(options, "default");
364  if (!po->name) {
365  av_log(NULL, AV_LOG_ERROR, "Unrecognized option '%s'\n", opt);
366  return AVERROR(EINVAL);
367  }
368  if (po->flags & HAS_ARG && !arg) {
369  av_log(NULL, AV_LOG_ERROR, "Missing argument for option '%s'\n", opt);
370  return AVERROR(EINVAL);
371  }
372 
373  ret = write_option(optctx, po, opt, arg);
374  if (ret < 0)
375  return ret;
376 
377  return !!(po->flags & HAS_ARG);
378 }
379 
380 void parse_options(void *optctx, int argc, char **argv, const OptionDef *options,
381  void (*parse_arg_function)(void *, const char*))
382 {
383  const char *opt;
384  int optindex, handleoptions = 1, ret;
385 
386  /* perform system-dependent conversions for arguments list */
387  prepare_app_arguments(&argc, &argv);
388 
389  /* parse options */
390  optindex = 1;
391  while (optindex < argc) {
392  opt = argv[optindex++];
393 
394  if (handleoptions && opt[0] == '-' && opt[1] != '\0') {
395  if (opt[1] == '-' && opt[2] == '\0') {
396  handleoptions = 0;
397  continue;
398  }
399  opt++;
400 
401  if ((ret = parse_option(optctx, opt, argv[optindex], options)) < 0)
402  exit_program(1);
403  optindex += ret;
404  } else {
405  if (parse_arg_function)
406  parse_arg_function(optctx, opt);
407  }
408  }
409 }
410 
411 int parse_optgroup(void *optctx, OptionGroup *g)
412 {
413  int i, ret;
414 
415  av_log(NULL, AV_LOG_DEBUG, "Parsing a group of options: %s %s.\n",
416  g->group_def->name, g->arg);
417 
418  for (i = 0; i < g->nb_opts; i++) {
419  Option *o = &g->opts[i];
420 
421  if (g->group_def->flags &&
422  !(g->group_def->flags & o->opt->flags)) {
423  av_log(NULL, AV_LOG_ERROR, "Option %s (%s) cannot be applied to "
424  "%s %s -- you are trying to apply an input option to an "
425  "output file or vice versa. Move this option before the "
426  "file it belongs to.\n", o->key, o->opt->help,
427  g->group_def->name, g->arg);
428  return AVERROR(EINVAL);
429  }
430 
431  av_log(NULL, AV_LOG_DEBUG, "Applying option %s (%s) with argument %s.\n",
432  o->key, o->opt->help, o->val);
433 
434  ret = write_option(optctx, o->opt, o->key, o->val);
435  if (ret < 0)
436  return ret;
437  }
438 
439  av_log(NULL, AV_LOG_DEBUG, "Successfully parsed a group of options.\n");
440 
441  return 0;
442 }
443 
444 int locate_option(int argc, char **argv, const OptionDef *options,
445  const char *optname)
446 {
447  const OptionDef *po;
448  int i;
449 
450  for (i = 1; i < argc; i++) {
451  const char *cur_opt = argv[i];
452 
453  if (*cur_opt++ != '-')
454  continue;
455 
456  po = find_option(options, cur_opt);
457  if (!po->name && cur_opt[0] == 'n' && cur_opt[1] == 'o')
458  po = find_option(options, cur_opt + 2);
459 
460  if ((!po->name && !strcmp(cur_opt, optname)) ||
461  (po->name && !strcmp(optname, po->name)))
462  return i;
463 
464  if (!po->name || po->flags & HAS_ARG)
465  i++;
466  }
467  return 0;
468 }
469 
470 static void dump_argument(const char *a)
471 {
472  const unsigned char *p;
473 
474  for (p = a; *p; p++)
475  if (!((*p >= '+' && *p <= ':') || (*p >= '@' && *p <= 'Z') ||
476  *p == '_' || (*p >= 'a' && *p <= 'z')))
477  break;
478  if (!*p) {
479  fputs(a, report_file);
480  return;
481  }
482  fputc('"', report_file);
483  for (p = a; *p; p++) {
484  if (*p == '\\' || *p == '"' || *p == '$' || *p == '`')
485  fprintf(report_file, "\\%c", *p);
486  else if (*p < ' ' || *p > '~')
487  fprintf(report_file, "\\x%02x", *p);
488  else
489  fputc(*p, report_file);
490  }
491  fputc('"', report_file);
492 }
493 
494 static void check_options(const OptionDef *po)
495 {
496  while (po->name) {
497  if (po->flags & OPT_PERFILE)
499  po++;
500  }
501 }
502 
503 void parse_loglevel(int argc, char **argv, const OptionDef *options)
504 {
505  int idx = locate_option(argc, argv, options, "loglevel");
506  const char *env;
507 
509 
510  if (!idx)
511  idx = locate_option(argc, argv, options, "v");
512  if (idx && argv[idx + 1])
513  opt_loglevel(NULL, "loglevel", argv[idx + 1]);
514  idx = locate_option(argc, argv, options, "report");
515  if ((env = getenv("FFREPORT")) || idx) {
516  init_report(env);
517  if (report_file) {
518  int i;
519  fprintf(report_file, "Command line:\n");
520  for (i = 0; i < argc; i++) {
521  dump_argument(argv[i]);
522  fputc(i < argc - 1 ? ' ' : '\n', report_file);
523  }
524  fflush(report_file);
525  }
526  }
527  idx = locate_option(argc, argv, options, "hide_banner");
528  if (idx)
529  hide_banner = 1;
530 }
531 
532 static const AVOption *opt_find(void *obj, const char *name, const char *unit,
533  int opt_flags, int search_flags)
534 {
535  const AVOption *o = av_opt_find(obj, name, unit, opt_flags, search_flags);
536  if(o && !o->flags)
537  return NULL;
538  return o;
539 }
540 
541 #define FLAGS (o->type == AV_OPT_TYPE_FLAGS && (arg[0]=='-' || arg[0]=='+')) ? AV_DICT_APPEND : 0
542 int opt_default(void *optctx, const char *opt, const char *arg)
543 {
544  const AVOption *o;
545  int consumed = 0;
546  char opt_stripped[128];
547  const char *p;
548  const AVClass *cc = avcodec_get_class(), *fc = avformat_get_class();
549 #if CONFIG_AVRESAMPLE
550  const AVClass *rc = avresample_get_class();
551 #endif
552 #if CONFIG_SWSCALE
553  const AVClass *sc = sws_get_class();
554 #endif
555 #if CONFIG_SWRESAMPLE
556  const AVClass *swr_class = swr_get_class();
557 #endif
558 
559  if (!strcmp(opt, "debug") || !strcmp(opt, "fdebug"))
561 
562  if (!(p = strchr(opt, ':')))
563  p = opt + strlen(opt);
564  av_strlcpy(opt_stripped, opt, FFMIN(sizeof(opt_stripped), p - opt + 1));
565 
566  if ((o = opt_find(&cc, opt_stripped, NULL, 0,
568  ((opt[0] == 'v' || opt[0] == 'a' || opt[0] == 's') &&
569  (o = opt_find(&cc, opt + 1, NULL, 0, AV_OPT_SEARCH_FAKE_OBJ)))) {
570  av_dict_set(&codec_opts, opt, arg, FLAGS);
571  consumed = 1;
572  }
573  if ((o = opt_find(&fc, opt, NULL, 0,
575  av_dict_set(&format_opts, opt, arg, FLAGS);
576  if (consumed)
577  av_log(NULL, AV_LOG_VERBOSE, "Routing option %s to both codec and muxer layer\n", opt);
578  consumed = 1;
579  }
580 #if CONFIG_SWSCALE
581  if (!consumed && (o = opt_find(&sc, opt, NULL, 0,
583  struct SwsContext *sws = sws_alloc_context();
584  int ret = av_opt_set(sws, opt, arg, 0);
585  sws_freeContext(sws);
586  if (!strcmp(opt, "srcw") || !strcmp(opt, "srch") ||
587  !strcmp(opt, "dstw") || !strcmp(opt, "dsth") ||
588  !strcmp(opt, "src_format") || !strcmp(opt, "dst_format")) {
589  av_log(NULL, AV_LOG_ERROR, "Directly using swscale dimensions/format options is not supported, please use the -s or -pix_fmt options\n");
590  return AVERROR(EINVAL);
591  }
592  if (ret < 0) {
593  av_log(NULL, AV_LOG_ERROR, "Error setting option %s.\n", opt);
594  return ret;
595  }
596 
597  av_dict_set(&sws_dict, opt, arg, FLAGS);
598 
599  consumed = 1;
600  }
601 #else
602  if (!consumed && !strcmp(opt, "sws_flags")) {
603  av_log(NULL, AV_LOG_WARNING, "Ignoring %s %s, due to disabled swscale\n", opt, arg);
604  consumed = 1;
605  }
606 #endif
607 #if CONFIG_SWRESAMPLE
608  if (!consumed && (o=opt_find(&swr_class, opt, NULL, 0,
610  struct SwrContext *swr = swr_alloc();
611  int ret = av_opt_set(swr, opt, arg, 0);
612  swr_free(&swr);
613  if (ret < 0) {
614  av_log(NULL, AV_LOG_ERROR, "Error setting option %s.\n", opt);
615  return ret;
616  }
617  av_dict_set(&swr_opts, opt, arg, FLAGS);
618  consumed = 1;
619  }
620 #endif
621 #if CONFIG_AVRESAMPLE
622  if ((o=opt_find(&rc, opt, NULL, 0,
625  consumed = 1;
626  }
627 #endif
628 
629  if (consumed)
630  return 0;
632 }
633 
634 /*
635  * Check whether given option is a group separator.
636  *
637  * @return index of the group definition that matched or -1 if none
638  */
639 static int match_group_separator(const OptionGroupDef *groups, int nb_groups,
640  const char *opt)
641 {
642  int i;
643 
644  for (i = 0; i < nb_groups; i++) {
645  const OptionGroupDef *p = &groups[i];
646  if (p->sep && !strcmp(p->sep, opt))
647  return i;
648  }
649 
650  return -1;
651 }
652 
653 /*
654  * Finish parsing an option group.
655  *
656  * @param group_idx which group definition should this group belong to
657  * @param arg argument of the group delimiting option
658  */
659 static void finish_group(OptionParseContext *octx, int group_idx,
660  const char *arg)
661 {
662  OptionGroupList *l = &octx->groups[group_idx];
663  OptionGroup *g;
664 
665  GROW_ARRAY(l->groups, l->nb_groups);
666  g = &l->groups[l->nb_groups - 1];
667 
668  *g = octx->cur_group;
669  g->arg = arg;
670  g->group_def = l->group_def;
671  g->sws_dict = sws_dict;
672  g->swr_opts = swr_opts;
673  g->codec_opts = codec_opts;
674  g->format_opts = format_opts;
675  g->resample_opts = resample_opts;
676 
677  codec_opts = NULL;
678  format_opts = NULL;
680  sws_dict = NULL;
681  swr_opts = NULL;
682  init_opts();
683 
684  memset(&octx->cur_group, 0, sizeof(octx->cur_group));
685 }
686 
687 /*
688  * Add an option instance to currently parsed group.
689  */
690 static void add_opt(OptionParseContext *octx, const OptionDef *opt,
691  const char *key, const char *val)
692 {
693  int global = !(opt->flags & (OPT_PERFILE | OPT_SPEC | OPT_OFFSET));
694  OptionGroup *g = global ? &octx->global_opts : &octx->cur_group;
695 
696  GROW_ARRAY(g->opts, g->nb_opts);
697  g->opts[g->nb_opts - 1].opt = opt;
698  g->opts[g->nb_opts - 1].key = key;
699  g->opts[g->nb_opts - 1].val = val;
700 }
701 
703  const OptionGroupDef *groups, int nb_groups)
704 {
705  static const OptionGroupDef global_group = { "global" };
706  int i;
707 
708  memset(octx, 0, sizeof(*octx));
709 
710  octx->nb_groups = nb_groups;
711  octx->groups = av_mallocz_array(octx->nb_groups, sizeof(*octx->groups));
712  if (!octx->groups)
713  exit_program(1);
714 
715  for (i = 0; i < octx->nb_groups; i++)
716  octx->groups[i].group_def = &groups[i];
717 
718  octx->global_opts.group_def = &global_group;
719  octx->global_opts.arg = "";
720 
721  init_opts();
722 }
723 
725 {
726  int i, j;
727 
728  for (i = 0; i < octx->nb_groups; i++) {
729  OptionGroupList *l = &octx->groups[i];
730 
731  for (j = 0; j < l->nb_groups; j++) {
732  av_freep(&l->groups[j].opts);
736 
737  av_dict_free(&l->groups[j].sws_dict);
738  av_dict_free(&l->groups[j].swr_opts);
739  }
740  av_freep(&l->groups);
741  }
742  av_freep(&octx->groups);
743 
744  av_freep(&octx->cur_group.opts);
745  av_freep(&octx->global_opts.opts);
746 
747  uninit_opts();
748 }
749 
750 int split_commandline(OptionParseContext *octx, int argc, char *argv[],
751  const OptionDef *options,
752  const OptionGroupDef *groups, int nb_groups)
753 {
754  int optindex = 1;
755  int dashdash = -2;
756 
757  /* perform system-dependent conversions for arguments list */
758  prepare_app_arguments(&argc, &argv);
759 
760  init_parse_context(octx, groups, nb_groups);
761  av_log(NULL, AV_LOG_DEBUG, "Splitting the commandline.\n");
762 
763  while (optindex < argc) {
764  const char *opt = argv[optindex++], *arg;
765  const OptionDef *po;
766  int ret;
767 
768  av_log(NULL, AV_LOG_DEBUG, "Reading option '%s' ...", opt);
769 
770  if (opt[0] == '-' && opt[1] == '-' && !opt[2]) {
771  dashdash = optindex;
772  continue;
773  }
774  /* unnamed group separators, e.g. output filename */
775  if (opt[0] != '-' || !opt[1] || dashdash+1 == optindex) {
776  finish_group(octx, 0, opt);
777  av_log(NULL, AV_LOG_DEBUG, " matched as %s.\n", groups[0].name);
778  continue;
779  }
780  opt++;
781 
782 #define GET_ARG(arg) \
783 do { \
784  arg = argv[optindex++]; \
785  if (!arg) { \
786  av_log(NULL, AV_LOG_ERROR, "Missing argument for option '%s'.\n", opt);\
787  return AVERROR(EINVAL); \
788  } \
789 } while (0)
790 
791  /* named group separators, e.g. -i */
792  if ((ret = match_group_separator(groups, nb_groups, opt)) >= 0) {
793  GET_ARG(arg);
794  finish_group(octx, ret, arg);
795  av_log(NULL, AV_LOG_DEBUG, " matched as %s with argument '%s'.\n",
796  groups[ret].name, arg);
797  continue;
798  }
799 
800  /* normal options */
801  po = find_option(options, opt);
802  if (po->name) {
803  if (po->flags & OPT_EXIT) {
804  /* optional argument, e.g. -h */
805  arg = argv[optindex++];
806  } else if (po->flags & HAS_ARG) {
807  GET_ARG(arg);
808  } else {
809  arg = "1";
810  }
811 
812  add_opt(octx, po, opt, arg);
813  av_log(NULL, AV_LOG_DEBUG, " matched as option '%s' (%s) with "
814  "argument '%s'.\n", po->name, po->help, arg);
815  continue;
816  }
817 
818  /* AVOptions */
819  if (argv[optindex]) {
820  ret = opt_default(NULL, opt, argv[optindex]);
821  if (ret >= 0) {
822  av_log(NULL, AV_LOG_DEBUG, " matched as AVOption '%s' with "
823  "argument '%s'.\n", opt, argv[optindex]);
824  optindex++;
825  continue;
826  } else if (ret != AVERROR_OPTION_NOT_FOUND) {
827  av_log(NULL, AV_LOG_ERROR, "Error parsing option '%s' "
828  "with argument '%s'.\n", opt, argv[optindex]);
829  return ret;
830  }
831  }
832 
833  /* boolean -nofoo options */
834  if (opt[0] == 'n' && opt[1] == 'o' &&
835  (po = find_option(options, opt + 2)) &&
836  po->name && po->flags & OPT_BOOL) {
837  add_opt(octx, po, opt, "0");
838  av_log(NULL, AV_LOG_DEBUG, " matched as option '%s' (%s) with "
839  "argument 0.\n", po->name, po->help);
840  continue;
841  }
842 
843  av_log(NULL, AV_LOG_ERROR, "Unrecognized option '%s'.\n", opt);
845  }
846 
848  av_log(NULL, AV_LOG_WARNING, "Trailing option(s) found in the "
849  "command: may be ignored.\n");
850 
851  av_log(NULL, AV_LOG_DEBUG, "Finished splitting the commandline.\n");
852 
853  return 0;
854 }
855 
856 int opt_cpuflags(void *optctx, const char *opt, const char *arg)
857 {
858  int ret;
859  unsigned flags = av_get_cpu_flags();
860 
861  if ((ret = av_parse_cpu_caps(&flags, arg)) < 0)
862  return ret;
863 
865  return 0;
866 }
867 
868 int opt_loglevel(void *optctx, const char *opt, const char *arg)
869 {
870  const struct { const char *name; int level; } log_levels[] = {
871  { "quiet" , AV_LOG_QUIET },
872  { "panic" , AV_LOG_PANIC },
873  { "fatal" , AV_LOG_FATAL },
874  { "error" , AV_LOG_ERROR },
875  { "warning", AV_LOG_WARNING },
876  { "info" , AV_LOG_INFO },
877  { "verbose", AV_LOG_VERBOSE },
878  { "debug" , AV_LOG_DEBUG },
879  { "trace" , AV_LOG_TRACE },
880  };
881  const char *token;
882  char *tail;
883  int flags = av_log_get_flags();
884  int level = av_log_get_level();
885  int cmd, i = 0;
886 
887  av_assert0(arg);
888  while (*arg) {
889  token = arg;
890  if (*token == '+' || *token == '-') {
891  cmd = *token++;
892  } else {
893  cmd = 0;
894  }
895  if (!i && !cmd) {
896  flags = 0; /* missing relative prefix, build absolute value */
897  }
898  if (!strncmp(token, "repeat", 6)) {
899  if (cmd == '-') {
901  } else {
903  }
904  arg = token + 6;
905  } else if (!strncmp(token, "level", 5)) {
906  if (cmd == '-') {
908  } else {
910  }
911  arg = token + 5;
912  } else {
913  break;
914  }
915  i++;
916  }
917  if (!*arg) {
918  goto end;
919  } else if (*arg == '+') {
920  arg++;
921  } else if (!i) {
922  flags = av_log_get_flags(); /* level value without prefix, reset flags */
923  }
924 
925  for (i = 0; i < FF_ARRAY_ELEMS(log_levels); i++) {
926  if (!strcmp(log_levels[i].name, arg)) {
927  level = log_levels[i].level;
928  goto end;
929  }
930  }
931 
932  level = strtol(arg, &tail, 10);
933  if (*tail) {
934  av_log(NULL, AV_LOG_FATAL, "Invalid loglevel \"%s\". "
935  "Possible levels are numbers or:\n", arg);
936  for (i = 0; i < FF_ARRAY_ELEMS(log_levels); i++)
937  av_log(NULL, AV_LOG_FATAL, "\"%s\"\n", log_levels[i].name);
938  exit_program(1);
939  }
940 
941 end:
944  return 0;
945 }
946 
947 static void expand_filename_template(AVBPrint *bp, const char *template,
948  struct tm *tm)
949 {
950  int c;
951 
952  while ((c = *(template++))) {
953  if (c == '%') {
954  if (!(c = *(template++)))
955  break;
956  switch (c) {
957  case 'p':
958  av_bprintf(bp, "%s", program_name);
959  break;
960  case 't':
961  av_bprintf(bp, "%04d%02d%02d-%02d%02d%02d",
962  tm->tm_year + 1900, tm->tm_mon + 1, tm->tm_mday,
963  tm->tm_hour, tm->tm_min, tm->tm_sec);
964  break;
965  case '%':
966  av_bprint_chars(bp, c, 1);
967  break;
968  }
969  } else {
970  av_bprint_chars(bp, c, 1);
971  }
972  }
973 }
974 
975 static int init_report(const char *env)
976 {
977  char *filename_template = NULL;
978  char *key, *val;
979  int ret, count = 0;
980  int prog_loglevel, envlevel = 0;
981  time_t now;
982  struct tm *tm;
983  AVBPrint filename;
984 
985  if (report_file) /* already opened */
986  return 0;
987  time(&now);
988  tm = localtime(&now);
989 
990  while (env && *env) {
991  if ((ret = av_opt_get_key_value(&env, "=", ":", 0, &key, &val)) < 0) {
992  if (count)
994  "Failed to parse FFREPORT environment variable: %s\n",
995  av_err2str(ret));
996  break;
997  }
998  if (*env)
999  env++;
1000  count++;
1001  if (!strcmp(key, "file")) {
1002  av_free(filename_template);
1003  filename_template = val;
1004  val = NULL;
1005  } else if (!strcmp(key, "level")) {
1006  char *tail;
1007  report_file_level = strtol(val, &tail, 10);
1008  if (*tail) {
1009  av_log(NULL, AV_LOG_FATAL, "Invalid report file level\n");
1010  exit_program(1);
1011  }
1012  envlevel = 1;
1013  } else {
1014  av_log(NULL, AV_LOG_ERROR, "Unknown key '%s' in FFREPORT\n", key);
1015  }
1016  av_free(val);
1017  av_free(key);
1018  }
1019 
1021  expand_filename_template(&filename,
1022  av_x_if_null(filename_template, "%p-%t.log"), tm);
1023  av_free(filename_template);
1024  if (!av_bprint_is_complete(&filename)) {
1025  av_log(NULL, AV_LOG_ERROR, "Out of memory building report file name\n");
1026  return AVERROR(ENOMEM);
1027  }
1028 
1029  prog_loglevel = av_log_get_level();
1030  if (!envlevel)
1031  report_file_level = FFMAX(report_file_level, prog_loglevel);
1032 
1033  report_file = fopen(filename.str, "w");
1034  if (!report_file) {
1035  int ret = AVERROR(errno);
1036  av_log(NULL, AV_LOG_ERROR, "Failed to open report \"%s\": %s\n",
1037  filename.str, strerror(errno));
1038  return ret;
1039  }
1042  "%s started on %04d-%02d-%02d at %02d:%02d:%02d\n"
1043  "Report written to \"%s\"\n"
1044  "Log level: %d\n",
1045  program_name,
1046  tm->tm_year + 1900, tm->tm_mon + 1, tm->tm_mday,
1047  tm->tm_hour, tm->tm_min, tm->tm_sec,
1048  filename.str, report_file_level);
1049  av_bprint_finalize(&filename, NULL);
1050  return 0;
1051 }
1052 
1053 int opt_report(void *optctx, const char *opt, const char *arg)
1054 {
1055  return init_report(NULL);
1056 }
1057 
1058 int opt_max_alloc(void *optctx, const char *opt, const char *arg)
1059 {
1060  char *tail;
1061  size_t max;
1062 
1063  max = strtol(arg, &tail, 10);
1064  if (*tail) {
1065  av_log(NULL, AV_LOG_FATAL, "Invalid max_alloc \"%s\".\n", arg);
1066  exit_program(1);
1067  }
1068  av_max_alloc(max);
1069  return 0;
1070 }
1071 
1072 int opt_timelimit(void *optctx, const char *opt, const char *arg)
1073 {
1074 #if HAVE_SETRLIMIT
1075  int lim = parse_number_or_die(opt, arg, OPT_INT64, 0, INT_MAX);
1076  struct rlimit rl = { lim, lim + 1 };
1077  if (setrlimit(RLIMIT_CPU, &rl))
1078  perror("setrlimit");
1079 #else
1080  av_log(NULL, AV_LOG_WARNING, "-%s not implemented on this OS\n", opt);
1081 #endif
1082  return 0;
1083 }
1084 
1085 void print_error(const char *filename, int err)
1086 {
1087  char errbuf[128];
1088  const char *errbuf_ptr = errbuf;
1089 
1090  if (av_strerror(err, errbuf, sizeof(errbuf)) < 0)
1091  errbuf_ptr = strerror(AVUNERROR(err));
1092  av_log(NULL, AV_LOG_ERROR, "%s: %s\n", filename, errbuf_ptr);
1093 }
1094 
1095 static int warned_cfg = 0;
1096 
1097 #define INDENT 1
1098 #define SHOW_VERSION 2
1099 #define SHOW_CONFIG 4
1100 #define SHOW_COPYRIGHT 8
1101 
1102 #define PRINT_LIB_INFO(libname, LIBNAME, flags, level) \
1103  if (CONFIG_##LIBNAME) { \
1104  const char *indent = flags & INDENT? " " : ""; \
1105  if (flags & SHOW_VERSION) { \
1106  unsigned int version = libname##_version(); \
1107  av_log(NULL, level, \
1108  "%slib%-11s %2d.%3d.%3d / %2d.%3d.%3d\n", \
1109  indent, #libname, \
1110  LIB##LIBNAME##_VERSION_MAJOR, \
1111  LIB##LIBNAME##_VERSION_MINOR, \
1112  LIB##LIBNAME##_VERSION_MICRO, \
1113  AV_VERSION_MAJOR(version), AV_VERSION_MINOR(version),\
1114  AV_VERSION_MICRO(version)); \
1115  } \
1116  if (flags & SHOW_CONFIG) { \
1117  const char *cfg = libname##_configuration(); \
1118  if (strcmp(FFMPEG_CONFIGURATION, cfg)) { \
1119  if (!warned_cfg) { \
1120  av_log(NULL, level, \
1121  "%sWARNING: library configuration mismatch\n", \
1122  indent); \
1123  warned_cfg = 1; \
1124  } \
1125  av_log(NULL, level, "%s%-11s configuration: %s\n", \
1126  indent, #libname, cfg); \
1127  } \
1128  } \
1129  } \
1130 
1131 static void print_all_libs_info(int flags, int level)
1132 {
1133  PRINT_LIB_INFO(avutil, AVUTIL, flags, level);
1134  PRINT_LIB_INFO(avcodec, AVCODEC, flags, level);
1135  PRINT_LIB_INFO(avformat, AVFORMAT, flags, level);
1136  PRINT_LIB_INFO(avdevice, AVDEVICE, flags, level);
1137  PRINT_LIB_INFO(avfilter, AVFILTER, flags, level);
1138  PRINT_LIB_INFO(avresample, AVRESAMPLE, flags, level);
1139  PRINT_LIB_INFO(swscale, SWSCALE, flags, level);
1140  PRINT_LIB_INFO(swresample, SWRESAMPLE, flags, level);
1141  PRINT_LIB_INFO(postproc, POSTPROC, flags, level);
1142 }
1143 
1144 static void print_program_info(int flags, int level)
1145 {
1146  const char *indent = flags & INDENT? " " : "";
1147 
1148  av_log(NULL, level, "%s version " FFMPEG_VERSION, program_name);
1149  if (flags & SHOW_COPYRIGHT)
1150  av_log(NULL, level, " Copyright (c) %d-%d the FFmpeg developers",
1151  program_birth_year, CONFIG_THIS_YEAR);
1152  av_log(NULL, level, "\n");
1153  av_log(NULL, level, "%sbuilt with %s\n", indent, CC_IDENT);
1154 
1155  av_log(NULL, level, "%sconfiguration: " FFMPEG_CONFIGURATION "\n", indent);
1156 }
1157 
1158 static void print_buildconf(int flags, int level)
1159 {
1160  const char *indent = flags & INDENT ? " " : "";
1161  char str[] = { FFMPEG_CONFIGURATION };
1162  char *conflist, *remove_tilde, *splitconf;
1163 
1164  // Change all the ' --' strings to '~--' so that
1165  // they can be identified as tokens.
1166  while ((conflist = strstr(str, " --")) != NULL) {
1167  strncpy(conflist, "~--", 3);
1168  }
1169 
1170  // Compensate for the weirdness this would cause
1171  // when passing 'pkg-config --static'.
1172  while ((remove_tilde = strstr(str, "pkg-config~")) != NULL) {
1173  strncpy(remove_tilde, "pkg-config ", 11);
1174  }
1175 
1176  splitconf = strtok(str, "~");
1177  av_log(NULL, level, "\n%sconfiguration:\n", indent);
1178  while (splitconf != NULL) {
1179  av_log(NULL, level, "%s%s%s\n", indent, indent, splitconf);
1180  splitconf = strtok(NULL, "~");
1181  }
1182 }
1183 
1184 void show_banner(int argc, char **argv, const OptionDef *options)
1185 {
1186  int idx = locate_option(argc, argv, options, "version");
1187  if (hide_banner || idx)
1188  return;
1189 
1193 }
1194 
1195 int show_version(void *optctx, const char *opt, const char *arg)
1196 {
1200 
1201  return 0;
1202 }
1203 
1204 int show_buildconf(void *optctx, const char *opt, const char *arg)
1205 {
1208 
1209  return 0;
1210 }
1211 
1212 int show_license(void *optctx, const char *opt, const char *arg)
1213 {
1214 #if CONFIG_NONFREE
1215  printf(
1216  "This version of %s has nonfree parts compiled in.\n"
1217  "Therefore it is not legally redistributable.\n",
1218  program_name );
1219 #elif CONFIG_GPLV3
1220  printf(
1221  "%s is free software; you can redistribute it and/or modify\n"
1222  "it under the terms of the GNU General Public License as published by\n"
1223  "the Free Software Foundation; either version 3 of the License, or\n"
1224  "(at your option) any later version.\n"
1225  "\n"
1226  "%s is distributed in the hope that it will be useful,\n"
1227  "but WITHOUT ANY WARRANTY; without even the implied warranty of\n"
1228  "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n"
1229  "GNU General Public License for more details.\n"
1230  "\n"
1231  "You should have received a copy of the GNU General Public License\n"
1232  "along with %s. If not, see <http://www.gnu.org/licenses/>.\n",
1234 #elif CONFIG_GPL
1235  printf(
1236  "%s is free software; you can redistribute it and/or modify\n"
1237  "it under the terms of the GNU General Public License as published by\n"
1238  "the Free Software Foundation; either version 2 of the License, or\n"
1239  "(at your option) any later version.\n"
1240  "\n"
1241  "%s is distributed in the hope that it will be useful,\n"
1242  "but WITHOUT ANY WARRANTY; without even the implied warranty of\n"
1243  "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n"
1244  "GNU General Public License for more details.\n"
1245  "\n"
1246  "You should have received a copy of the GNU General Public License\n"
1247  "along with %s; if not, write to the Free Software\n"
1248  "Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n",
1250 #elif CONFIG_LGPLV3
1251  printf(
1252  "%s is free software; you can redistribute it and/or modify\n"
1253  "it under the terms of the GNU Lesser General Public License as published by\n"
1254  "the Free Software Foundation; either version 3 of the License, or\n"
1255  "(at your option) any later version.\n"
1256  "\n"
1257  "%s is distributed in the hope that it will be useful,\n"
1258  "but WITHOUT ANY WARRANTY; without even the implied warranty of\n"
1259  "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n"
1260  "GNU Lesser General Public License for more details.\n"
1261  "\n"
1262  "You should have received a copy of the GNU Lesser General Public License\n"
1263  "along with %s. If not, see <http://www.gnu.org/licenses/>.\n",
1265 #else
1266  printf(
1267  "%s is free software; you can redistribute it and/or\n"
1268  "modify it under the terms of the GNU Lesser General Public\n"
1269  "License as published by the Free Software Foundation; either\n"
1270  "version 2.1 of the License, or (at your option) any later version.\n"
1271  "\n"
1272  "%s is distributed in the hope that it will be useful,\n"
1273  "but WITHOUT ANY WARRANTY; without even the implied warranty of\n"
1274  "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n"
1275  "Lesser General Public License for more details.\n"
1276  "\n"
1277  "You should have received a copy of the GNU Lesser General Public\n"
1278  "License along with %s; if not, write to the Free Software\n"
1279  "Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n",
1281 #endif
1282 
1283  return 0;
1284 }
1285 
1286 static int is_device(const AVClass *avclass)
1287 {
1288  if (!avclass)
1289  return 0;
1290  return AV_IS_INPUT_DEVICE(avclass->category) || AV_IS_OUTPUT_DEVICE(avclass->category);
1291 }
1292 
1293 static int show_formats_devices(void *optctx, const char *opt, const char *arg, int device_only, int muxdemuxers)
1294 {
1295  void *ifmt_opaque = NULL;
1296  const AVInputFormat *ifmt = NULL;
1297  void *ofmt_opaque = NULL;
1298  const AVOutputFormat *ofmt = NULL;
1299  const char *last_name;
1300  int is_dev;
1301 
1302  printf("%s\n"
1303  " D. = Demuxing supported\n"
1304  " .E = Muxing supported\n"
1305  " --\n", device_only ? "Devices:" : "File formats:");
1306  last_name = "000";
1307  for (;;) {
1308  int decode = 0;
1309  int encode = 0;
1310  const char *name = NULL;
1311  const char *long_name = NULL;
1312 
1313  if (muxdemuxers !=SHOW_DEMUXERS) {
1314  ofmt_opaque = NULL;
1315  while ((ofmt = av_muxer_iterate(&ofmt_opaque))) {
1316  is_dev = is_device(ofmt->priv_class);
1317  if (!is_dev && device_only)
1318  continue;
1319  if ((!name || strcmp(ofmt->name, name) < 0) &&
1320  strcmp(ofmt->name, last_name) > 0) {
1321  name = ofmt->name;
1322  long_name = ofmt->long_name;
1323  encode = 1;
1324  }
1325  }
1326  }
1327  if (muxdemuxers != SHOW_MUXERS) {
1328  ifmt_opaque = NULL;
1329  while ((ifmt = av_demuxer_iterate(&ifmt_opaque))) {
1330  is_dev = is_device(ifmt->priv_class);
1331  if (!is_dev && device_only)
1332  continue;
1333  if ((!name || strcmp(ifmt->name, name) < 0) &&
1334  strcmp(ifmt->name, last_name) > 0) {
1335  name = ifmt->name;
1336  long_name = ifmt->long_name;
1337  encode = 0;
1338  }
1339  if (name && strcmp(ifmt->name, name) == 0)
1340  decode = 1;
1341  }
1342  }
1343  if (!name)
1344  break;
1345  last_name = name;
1346 
1347  printf(" %s%s %-15s %s\n",
1348  decode ? "D" : " ",
1349  encode ? "E" : " ",
1350  name,
1351  long_name ? long_name:" ");
1352  }
1353  return 0;
1354 }
1355 
1356 int show_formats(void *optctx, const char *opt, const char *arg)
1357 {
1358  return show_formats_devices(optctx, opt, arg, 0, SHOW_DEFAULT);
1359 }
1360 
1361 int show_muxers(void *optctx, const char *opt, const char *arg)
1362 {
1363  return show_formats_devices(optctx, opt, arg, 0, SHOW_MUXERS);
1364 }
1365 
1366 int show_demuxers(void *optctx, const char *opt, const char *arg)
1367 {
1368  return show_formats_devices(optctx, opt, arg, 0, SHOW_DEMUXERS);
1369 }
1370 
1371 int show_devices(void *optctx, const char *opt, const char *arg)
1372 {
1373  return show_formats_devices(optctx, opt, arg, 1, SHOW_DEFAULT);
1374 }
1375 
1376 #define PRINT_CODEC_SUPPORTED(codec, field, type, list_name, term, get_name) \
1377  if (codec->field) { \
1378  const type *p = codec->field; \
1379  \
1380  printf(" Supported " list_name ":"); \
1381  while (*p != term) { \
1382  get_name(*p); \
1383  printf(" %s", name); \
1384  p++; \
1385  } \
1386  printf("\n"); \
1387  } \
1388 
1389 static void print_codec(const AVCodec *c)
1390 {
1391  int encoder = av_codec_is_encoder(c);
1392 
1393  printf("%s %s [%s]:\n", encoder ? "Encoder" : "Decoder", c->name,
1394  c->long_name ? c->long_name : "");
1395 
1396  printf(" General capabilities: ");
1397  if (c->capabilities & AV_CODEC_CAP_DRAW_HORIZ_BAND)
1398  printf("horizband ");
1399  if (c->capabilities & AV_CODEC_CAP_DR1)
1400  printf("dr1 ");
1401  if (c->capabilities & AV_CODEC_CAP_TRUNCATED)
1402  printf("trunc ");
1403  if (c->capabilities & AV_CODEC_CAP_DELAY)
1404  printf("delay ");
1405  if (c->capabilities & AV_CODEC_CAP_SMALL_LAST_FRAME)
1406  printf("small ");
1407  if (c->capabilities & AV_CODEC_CAP_SUBFRAMES)
1408  printf("subframes ");
1409  if (c->capabilities & AV_CODEC_CAP_EXPERIMENTAL)
1410  printf("exp ");
1411  if (c->capabilities & AV_CODEC_CAP_CHANNEL_CONF)
1412  printf("chconf ");
1413  if (c->capabilities & AV_CODEC_CAP_PARAM_CHANGE)
1414  printf("paramchange ");
1415  if (c->capabilities & AV_CODEC_CAP_VARIABLE_FRAME_SIZE)
1416  printf("variable ");
1417  if (c->capabilities & (AV_CODEC_CAP_FRAME_THREADS |
1420  printf("threads ");
1421  if (c->capabilities & AV_CODEC_CAP_AVOID_PROBING)
1422  printf("avoidprobe ");
1423  if (c->capabilities & AV_CODEC_CAP_HARDWARE)
1424  printf("hardware ");
1425  if (c->capabilities & AV_CODEC_CAP_HYBRID)
1426  printf("hybrid ");
1427  if (!c->capabilities)
1428  printf("none");
1429  printf("\n");
1430 
1431  if (c->type == AVMEDIA_TYPE_VIDEO ||
1432  c->type == AVMEDIA_TYPE_AUDIO) {
1433  printf(" Threading capabilities: ");
1434  switch (c->capabilities & (AV_CODEC_CAP_FRAME_THREADS |
1438  AV_CODEC_CAP_SLICE_THREADS: printf("frame and slice"); break;
1439  case AV_CODEC_CAP_FRAME_THREADS: printf("frame"); break;
1440  case AV_CODEC_CAP_SLICE_THREADS: printf("slice"); break;
1441  case AV_CODEC_CAP_AUTO_THREADS : printf("auto"); break;
1442  default: printf("none"); break;
1443  }
1444  printf("\n");
1445  }
1446 
1447  if (avcodec_get_hw_config(c, 0)) {
1448  printf(" Supported hardware devices: ");
1449  for (int i = 0;; i++) {
1450  const AVCodecHWConfig *config = avcodec_get_hw_config(c, i);
1451  if (!config)
1452  break;
1453  printf("%s ", av_hwdevice_get_type_name(config->device_type));
1454  }
1455  printf("\n");
1456  }
1457 
1458  if (c->supported_framerates) {
1459  const AVRational *fps = c->supported_framerates;
1460 
1461  printf(" Supported framerates:");
1462  while (fps->num) {
1463  printf(" %d/%d", fps->num, fps->den);
1464  fps++;
1465  }
1466  printf("\n");
1467  }
1468  PRINT_CODEC_SUPPORTED(c, pix_fmts, enum AVPixelFormat, "pixel formats",
1470  PRINT_CODEC_SUPPORTED(c, supported_samplerates, int, "sample rates", 0,
1472  PRINT_CODEC_SUPPORTED(c, sample_fmts, enum AVSampleFormat, "sample formats",
1474  PRINT_CODEC_SUPPORTED(c, channel_layouts, uint64_t, "channel layouts",
1475  0, GET_CH_LAYOUT_DESC);
1476 
1477  if (c->priv_class) {
1478  show_help_children(c->priv_class,
1481  }
1482 }
1483 
1485 {
1486  switch (type) {
1487  case AVMEDIA_TYPE_VIDEO: return 'V';
1488  case AVMEDIA_TYPE_AUDIO: return 'A';
1489  case AVMEDIA_TYPE_DATA: return 'D';
1490  case AVMEDIA_TYPE_SUBTITLE: return 'S';
1491  case AVMEDIA_TYPE_ATTACHMENT:return 'T';
1492  default: return '?';
1493  }
1494 }
1495 
1496 static const AVCodec *next_codec_for_id(enum AVCodecID id, void **iter,
1497  int encoder)
1498 {
1499  const AVCodec *c;
1500  while ((c = av_codec_iterate(iter))) {
1501  if (c->id == id &&
1502  (encoder ? av_codec_is_encoder(c) : av_codec_is_decoder(c)))
1503  return c;
1504  }
1505  return NULL;
1506 }
1507 
1508 static int compare_codec_desc(const void *a, const void *b)
1509 {
1510  const AVCodecDescriptor * const *da = a;
1511  const AVCodecDescriptor * const *db = b;
1512 
1513  return (*da)->type != (*db)->type ? FFDIFFSIGN((*da)->type, (*db)->type) :
1514  strcmp((*da)->name, (*db)->name);
1515 }
1516 
1517 static unsigned get_codecs_sorted(const AVCodecDescriptor ***rcodecs)
1518 {
1519  const AVCodecDescriptor *desc = NULL;
1520  const AVCodecDescriptor **codecs;
1521  unsigned nb_codecs = 0, i = 0;
1522 
1523  while ((desc = avcodec_descriptor_next(desc)))
1524  nb_codecs++;
1525  if (!(codecs = av_calloc(nb_codecs, sizeof(*codecs)))) {
1526  av_log(NULL, AV_LOG_ERROR, "Out of memory\n");
1527  exit_program(1);
1528  }
1529  desc = NULL;
1530  while ((desc = avcodec_descriptor_next(desc)))
1531  codecs[i++] = desc;
1532  av_assert0(i == nb_codecs);
1533  qsort(codecs, nb_codecs, sizeof(*codecs), compare_codec_desc);
1534  *rcodecs = codecs;
1535  return nb_codecs;
1536 }
1537 
1538 static void print_codecs_for_id(enum AVCodecID id, int encoder)
1539 {
1540  void *iter = NULL;
1541  const AVCodec *codec;
1542 
1543  printf(" (%s: ", encoder ? "encoders" : "decoders");
1544 
1545  while ((codec = next_codec_for_id(id, &iter, encoder)))
1546  printf("%s ", codec->name);
1547 
1548  printf(")");
1549 }
1550 
1551 int show_codecs(void *optctx, const char *opt, const char *arg)
1552 {
1553  const AVCodecDescriptor **codecs;
1554  unsigned i, nb_codecs = get_codecs_sorted(&codecs);
1555 
1556  printf("Codecs:\n"
1557  " D..... = Decoding supported\n"
1558  " .E.... = Encoding supported\n"
1559  " ..V... = Video codec\n"
1560  " ..A... = Audio codec\n"
1561  " ..S... = Subtitle codec\n"
1562  " ...I.. = Intra frame-only codec\n"
1563  " ....L. = Lossy compression\n"
1564  " .....S = Lossless compression\n"
1565  " -------\n");
1566  for (i = 0; i < nb_codecs; i++) {
1567  const AVCodecDescriptor *desc = codecs[i];
1568  const AVCodec *codec;
1569  void *iter = NULL;
1570 
1571  if (strstr(desc->name, "_deprecated"))
1572  continue;
1573 
1574  printf(" ");
1575  printf(avcodec_find_decoder(desc->id) ? "D" : ".");
1576  printf(avcodec_find_encoder(desc->id) ? "E" : ".");
1577 
1578  printf("%c", get_media_type_char(desc->type));
1579  printf((desc->props & AV_CODEC_PROP_INTRA_ONLY) ? "I" : ".");
1580  printf((desc->props & AV_CODEC_PROP_LOSSY) ? "L" : ".");
1581  printf((desc->props & AV_CODEC_PROP_LOSSLESS) ? "S" : ".");
1582 
1583  printf(" %-20s %s", desc->name, desc->long_name ? desc->long_name : "");
1584 
1585  /* print decoders/encoders when there's more than one or their
1586  * names are different from codec name */
1587  while ((codec = next_codec_for_id(desc->id, &iter, 0))) {
1588  if (strcmp(codec->name, desc->name)) {
1589  print_codecs_for_id(desc->id, 0);
1590  break;
1591  }
1592  }
1593  iter = NULL;
1594  while ((codec = next_codec_for_id(desc->id, &iter, 1))) {
1595  if (strcmp(codec->name, desc->name)) {
1596  print_codecs_for_id(desc->id, 1);
1597  break;
1598  }
1599  }
1600 
1601  printf("\n");
1602  }
1603  av_free(codecs);
1604  return 0;
1605 }
1606 
1607 static void print_codecs(int encoder)
1608 {
1609  const AVCodecDescriptor **codecs;
1610  unsigned i, nb_codecs = get_codecs_sorted(&codecs);
1611 
1612  printf("%s:\n"
1613  " V..... = Video\n"
1614  " A..... = Audio\n"
1615  " S..... = Subtitle\n"
1616  " .F.... = Frame-level multithreading\n"
1617  " ..S... = Slice-level multithreading\n"
1618  " ...X.. = Codec is experimental\n"
1619  " ....B. = Supports draw_horiz_band\n"
1620  " .....D = Supports direct rendering method 1\n"
1621  " ------\n",
1622  encoder ? "Encoders" : "Decoders");
1623  for (i = 0; i < nb_codecs; i++) {
1624  const AVCodecDescriptor *desc = codecs[i];
1625  const AVCodec *codec;
1626  void *iter = NULL;
1627 
1628  while ((codec = next_codec_for_id(desc->id, &iter, encoder))) {
1629  printf(" %c", get_media_type_char(desc->type));
1630  printf((codec->capabilities & AV_CODEC_CAP_FRAME_THREADS) ? "F" : ".");
1631  printf((codec->capabilities & AV_CODEC_CAP_SLICE_THREADS) ? "S" : ".");
1632  printf((codec->capabilities & AV_CODEC_CAP_EXPERIMENTAL) ? "X" : ".");
1633  printf((codec->capabilities & AV_CODEC_CAP_DRAW_HORIZ_BAND)?"B" : ".");
1634  printf((codec->capabilities & AV_CODEC_CAP_DR1) ? "D" : ".");
1635 
1636  printf(" %-20s %s", codec->name, codec->long_name ? codec->long_name : "");
1637  if (strcmp(codec->name, desc->name))
1638  printf(" (codec %s)", desc->name);
1639 
1640  printf("\n");
1641  }
1642  }
1643  av_free(codecs);
1644 }
1645 
1646 int show_decoders(void *optctx, const char *opt, const char *arg)
1647 {
1648  print_codecs(0);
1649  return 0;
1650 }
1651 
1652 int show_encoders(void *optctx, const char *opt, const char *arg)
1653 {
1654  print_codecs(1);
1655  return 0;
1656 }
1657 
1658 int show_bsfs(void *optctx, const char *opt, const char *arg)
1659 {
1660  const AVBitStreamFilter *bsf = NULL;
1661  void *opaque = NULL;
1662 
1663  printf("Bitstream filters:\n");
1664  while ((bsf = av_bsf_iterate(&opaque)))
1665  printf("%s\n", bsf->name);
1666  printf("\n");
1667  return 0;
1668 }
1669 
1670 int show_protocols(void *optctx, const char *opt, const char *arg)
1671 {
1672  void *opaque = NULL;
1673  const char *name;
1674 
1675  printf("Supported file protocols:\n"
1676  "Input:\n");
1677  while ((name = avio_enum_protocols(&opaque, 0)))
1678  printf(" %s\n", name);
1679  printf("Output:\n");
1680  while ((name = avio_enum_protocols(&opaque, 1)))
1681  printf(" %s\n", name);
1682  return 0;
1683 }
1684 
1685 int show_filters(void *optctx, const char *opt, const char *arg)
1686 {
1687 #if CONFIG_AVFILTER
1688  const AVFilter *filter = NULL;
1689  char descr[64], *descr_cur;
1690  void *opaque = NULL;
1691  int i, j;
1692  const AVFilterPad *pad;
1693 
1694  printf("Filters:\n"
1695  " T.. = Timeline support\n"
1696  " .S. = Slice threading\n"
1697  " ..C = Command support\n"
1698  " A = Audio input/output\n"
1699  " V = Video input/output\n"
1700  " N = Dynamic number and/or type of input/output\n"
1701  " | = Source or sink filter\n");
1702  while ((filter = av_filter_iterate(&opaque))) {
1703  descr_cur = descr;
1704  for (i = 0; i < 2; i++) {
1705  if (i) {
1706  *(descr_cur++) = '-';
1707  *(descr_cur++) = '>';
1708  }
1709  pad = i ? filter->outputs : filter->inputs;
1710  for (j = 0; pad && avfilter_pad_get_name(pad, j); j++) {
1711  if (descr_cur >= descr + sizeof(descr) - 4)
1712  break;
1713  *(descr_cur++) = get_media_type_char(avfilter_pad_get_type(pad, j));
1714  }
1715  if (!j)
1716  *(descr_cur++) = ((!i && (filter->flags & AVFILTER_FLAG_DYNAMIC_INPUTS)) ||
1717  ( i && (filter->flags & AVFILTER_FLAG_DYNAMIC_OUTPUTS))) ? 'N' : '|';
1718  }
1719  *descr_cur = 0;
1720  printf(" %c%c%c %-17s %-10s %s\n",
1721  filter->flags & AVFILTER_FLAG_SUPPORT_TIMELINE ? 'T' : '.',
1722  filter->flags & AVFILTER_FLAG_SLICE_THREADS ? 'S' : '.',
1723  filter->process_command ? 'C' : '.',
1724  filter->name, descr, filter->description);
1725  }
1726 #else
1727  printf("No filters available: libavfilter disabled\n");
1728 #endif
1729  return 0;
1730 }
1731 
1732 int show_colors(void *optctx, const char *opt, const char *arg)
1733 {
1734  const char *name;
1735  const uint8_t *rgb;
1736  int i;
1737 
1738  printf("%-32s #RRGGBB\n", "name");
1739 
1740  for (i = 0; name = av_get_known_color_name(i, &rgb); i++)
1741  printf("%-32s #%02x%02x%02x\n", name, rgb[0], rgb[1], rgb[2]);
1742 
1743  return 0;
1744 }
1745 
1746 int show_pix_fmts(void *optctx, const char *opt, const char *arg)
1747 {
1748  const AVPixFmtDescriptor *pix_desc = NULL;
1749 
1750  printf("Pixel formats:\n"
1751  "I.... = Supported Input format for conversion\n"
1752  ".O... = Supported Output format for conversion\n"
1753  "..H.. = Hardware accelerated format\n"
1754  "...P. = Paletted format\n"
1755  "....B = Bitstream format\n"
1756  "FLAGS NAME NB_COMPONENTS BITS_PER_PIXEL\n"
1757  "-----\n");
1758 
1759 #if !CONFIG_SWSCALE
1760 # define sws_isSupportedInput(x) 0
1761 # define sws_isSupportedOutput(x) 0
1762 #endif
1763 
1764  while ((pix_desc = av_pix_fmt_desc_next(pix_desc))) {
1766  printf("%c%c%c%c%c %-16s %d %2d\n",
1767  sws_isSupportedInput (pix_fmt) ? 'I' : '.',
1768  sws_isSupportedOutput(pix_fmt) ? 'O' : '.',
1769  pix_desc->flags & AV_PIX_FMT_FLAG_HWACCEL ? 'H' : '.',
1770  pix_desc->flags & AV_PIX_FMT_FLAG_PAL ? 'P' : '.',
1771  pix_desc->flags & AV_PIX_FMT_FLAG_BITSTREAM ? 'B' : '.',
1772  pix_desc->name,
1773  pix_desc->nb_components,
1774  av_get_bits_per_pixel(pix_desc));
1775  }
1776  return 0;
1777 }
1778 
1779 int show_layouts(void *optctx, const char *opt, const char *arg)
1780 {
1781  int i = 0;
1782  uint64_t layout, j;
1783  const char *name, *descr;
1784 
1785  printf("Individual channels:\n"
1786  "NAME DESCRIPTION\n");
1787  for (i = 0; i < 63; i++) {
1788  name = av_get_channel_name((uint64_t)1 << i);
1789  if (!name)
1790  continue;
1791  descr = av_get_channel_description((uint64_t)1 << i);
1792  printf("%-14s %s\n", name, descr);
1793  }
1794  printf("\nStandard channel layouts:\n"
1795  "NAME DECOMPOSITION\n");
1796  for (i = 0; !av_get_standard_channel_layout(i, &layout, &name); i++) {
1797  if (name) {
1798  printf("%-14s ", name);
1799  for (j = 1; j; j <<= 1)
1800  if ((layout & j))
1801  printf("%s%s", (layout & (j - 1)) ? "+" : "", av_get_channel_name(j));
1802  printf("\n");
1803  }
1804  }
1805  return 0;
1806 }
1807 
1808 int show_sample_fmts(void *optctx, const char *opt, const char *arg)
1809 {
1810  int i;
1811  char fmt_str[128];
1812  for (i = -1; i < AV_SAMPLE_FMT_NB; i++)
1813  printf("%s\n", av_get_sample_fmt_string(fmt_str, sizeof(fmt_str), i));
1814  return 0;
1815 }
1816 
1817 static void show_help_codec(const char *name, int encoder)
1818 {
1819  const AVCodecDescriptor *desc;
1820  const AVCodec *codec;
1821 
1822  if (!name) {
1823  av_log(NULL, AV_LOG_ERROR, "No codec name specified.\n");
1824  return;
1825  }
1826 
1827  codec = encoder ? avcodec_find_encoder_by_name(name) :
1829 
1830  if (codec)
1831  print_codec(codec);
1832  else if ((desc = avcodec_descriptor_get_by_name(name))) {
1833  void *iter = NULL;
1834  int printed = 0;
1835 
1836  while ((codec = next_codec_for_id(desc->id, &iter, encoder))) {
1837  printed = 1;
1838  print_codec(codec);
1839  }
1840 
1841  if (!printed) {
1842  av_log(NULL, AV_LOG_ERROR, "Codec '%s' is known to FFmpeg, "
1843  "but no %s for it are available. FFmpeg might need to be "
1844  "recompiled with additional external libraries.\n",
1845  name, encoder ? "encoders" : "decoders");
1846  }
1847  } else {
1848  av_log(NULL, AV_LOG_ERROR, "Codec '%s' is not recognized by FFmpeg.\n",
1849  name);
1850  }
1851 }
1852 
1853 static void show_help_demuxer(const char *name)
1854 {
1855  const AVInputFormat *fmt = av_find_input_format(name);
1856 
1857  if (!fmt) {
1858  av_log(NULL, AV_LOG_ERROR, "Unknown format '%s'.\n", name);
1859  return;
1860  }
1861 
1862  printf("Demuxer %s [%s]:\n", fmt->name, fmt->long_name);
1863 
1864  if (fmt->extensions)
1865  printf(" Common extensions: %s.\n", fmt->extensions);
1866 
1867  if (fmt->priv_class)
1869 }
1870 
1871 static void show_help_protocol(const char *name)
1872 {
1873  const AVClass *proto_class;
1874 
1875  if (!name) {
1876  av_log(NULL, AV_LOG_ERROR, "No protocol name specified.\n");
1877  return;
1878  }
1879 
1880  proto_class = avio_protocol_get_class(name);
1881  if (!proto_class) {
1882  av_log(NULL, AV_LOG_ERROR, "Unknown protocol '%s'.\n", name);
1883  return;
1884  }
1885 
1887 }
1888 
1889 static void show_help_muxer(const char *name)
1890 {
1891  const AVCodecDescriptor *desc;
1892  const AVOutputFormat *fmt = av_guess_format(name, NULL, NULL);
1893 
1894  if (!fmt) {
1895  av_log(NULL, AV_LOG_ERROR, "Unknown format '%s'.\n", name);
1896  return;
1897  }
1898 
1899  printf("Muxer %s [%s]:\n", fmt->name, fmt->long_name);
1900 
1901  if (fmt->extensions)
1902  printf(" Common extensions: %s.\n", fmt->extensions);
1903  if (fmt->mime_type)
1904  printf(" Mime type: %s.\n", fmt->mime_type);
1905  if (fmt->video_codec != AV_CODEC_ID_NONE &&
1907  printf(" Default video codec: %s.\n", desc->name);
1908  }
1909  if (fmt->audio_codec != AV_CODEC_ID_NONE &&
1911  printf(" Default audio codec: %s.\n", desc->name);
1912  }
1913  if (fmt->subtitle_codec != AV_CODEC_ID_NONE &&
1915  printf(" Default subtitle codec: %s.\n", desc->name);
1916  }
1917 
1918  if (fmt->priv_class)
1920 }
1921 
1922 #if CONFIG_AVFILTER
1923 static void show_help_filter(const char *name)
1924 {
1925 #if CONFIG_AVFILTER
1926  const AVFilter *f = avfilter_get_by_name(name);
1927  int i, count;
1928 
1929  if (!name) {
1930  av_log(NULL, AV_LOG_ERROR, "No filter name specified.\n");
1931  return;
1932  } else if (!f) {
1933  av_log(NULL, AV_LOG_ERROR, "Unknown filter '%s'.\n", name);
1934  return;
1935  }
1936 
1937  printf("Filter %s\n", f->name);
1938  if (f->description)
1939  printf(" %s\n", f->description);
1940 
1941  if (f->flags & AVFILTER_FLAG_SLICE_THREADS)
1942  printf(" slice threading supported\n");
1943 
1944  printf(" Inputs:\n");
1945  count = avfilter_pad_count(f->inputs);
1946  for (i = 0; i < count; i++) {
1947  printf(" #%d: %s (%s)\n", i, avfilter_pad_get_name(f->inputs, i),
1949  }
1950  if (f->flags & AVFILTER_FLAG_DYNAMIC_INPUTS)
1951  printf(" dynamic (depending on the options)\n");
1952  else if (!count)
1953  printf(" none (source filter)\n");
1954 
1955  printf(" Outputs:\n");
1956  count = avfilter_pad_count(f->outputs);
1957  for (i = 0; i < count; i++) {
1958  printf(" #%d: %s (%s)\n", i, avfilter_pad_get_name(f->outputs, i),
1960  }
1961  if (f->flags & AVFILTER_FLAG_DYNAMIC_OUTPUTS)
1962  printf(" dynamic (depending on the options)\n");
1963  else if (!count)
1964  printf(" none (sink filter)\n");
1965 
1966  if (f->priv_class)
1969  if (f->flags & AVFILTER_FLAG_SUPPORT_TIMELINE)
1970  printf("This filter has support for timeline through the 'enable' option.\n");
1971 #else
1972  av_log(NULL, AV_LOG_ERROR, "Build without libavfilter; "
1973  "can not to satisfy request\n");
1974 #endif
1975 }
1976 #endif
1977 
1978 static void show_help_bsf(const char *name)
1979 {
1981 
1982  if (!name) {
1983  av_log(NULL, AV_LOG_ERROR, "No bitstream filter name specified.\n");
1984  return;
1985  } else if (!bsf) {
1986  av_log(NULL, AV_LOG_ERROR, "Unknown bit stream filter '%s'.\n", name);
1987  return;
1988  }
1989 
1990  printf("Bit stream filter %s\n", bsf->name);
1991  PRINT_CODEC_SUPPORTED(bsf, codec_ids, enum AVCodecID, "codecs",
1993  if (bsf->priv_class)
1995 }
1996 
1997 int show_help(void *optctx, const char *opt, const char *arg)
1998 {
1999  char *topic, *par;
2001 
2002  topic = av_strdup(arg ? arg : "");
2003  if (!topic)
2004  return AVERROR(ENOMEM);
2005  par = strchr(topic, '=');
2006  if (par)
2007  *par++ = 0;
2008 
2009  if (!*topic) {
2010  show_help_default(topic, par);
2011  } else if (!strcmp(topic, "decoder")) {
2012  show_help_codec(par, 0);
2013  } else if (!strcmp(topic, "encoder")) {
2014  show_help_codec(par, 1);
2015  } else if (!strcmp(topic, "demuxer")) {
2016  show_help_demuxer(par);
2017  } else if (!strcmp(topic, "muxer")) {
2018  show_help_muxer(par);
2019  } else if (!strcmp(topic, "protocol")) {
2020  show_help_protocol(par);
2021 #if CONFIG_AVFILTER
2022  } else if (!strcmp(topic, "filter")) {
2023  show_help_filter(par);
2024 #endif
2025  } else if (!strcmp(topic, "bsf")) {
2026  show_help_bsf(par);
2027  } else {
2028  show_help_default(topic, par);
2029  }
2030 
2031  av_freep(&topic);
2032  return 0;
2033 }
2034 
2035 int read_yesno(void)
2036 {
2037  int c = getchar();
2038  int yesno = (av_toupper(c) == 'Y');
2039 
2040  while (c != '\n' && c != EOF)
2041  c = getchar();
2042 
2043  return yesno;
2044 }
2045 
2046 FILE *get_preset_file(char *filename, size_t filename_size,
2047  const char *preset_name, int is_path,
2048  const char *codec_name)
2049 {
2050  FILE *f = NULL;
2051  int i;
2052  const char *base[3] = { getenv("FFMPEG_DATADIR"),
2053  getenv("HOME"),
2054  FFMPEG_DATADIR, };
2055 
2056  if (is_path) {
2057  av_strlcpy(filename, preset_name, filename_size);
2058  f = fopen(filename, "r");
2059  } else {
2060 #if HAVE_GETMODULEHANDLE && defined(_WIN32)
2061  char datadir[MAX_PATH], *ls;
2062  base[2] = NULL;
2063 
2064  if (GetModuleFileNameA(GetModuleHandleA(NULL), datadir, sizeof(datadir) - 1))
2065  {
2066  for (ls = datadir; ls < datadir + strlen(datadir); ls++)
2067  if (*ls == '\\') *ls = '/';
2068 
2069  if (ls = strrchr(datadir, '/'))
2070  {
2071  *ls = 0;
2072  strncat(datadir, "/ffpresets", sizeof(datadir) - 1 - strlen(datadir));
2073  base[2] = datadir;
2074  }
2075  }
2076 #endif
2077  for (i = 0; i < 3 && !f; i++) {
2078  if (!base[i])
2079  continue;
2080  snprintf(filename, filename_size, "%s%s/%s.ffpreset", base[i],
2081  i != 1 ? "" : "/.ffmpeg", preset_name);
2082  f = fopen(filename, "r");
2083  if (!f && codec_name) {
2084  snprintf(filename, filename_size,
2085  "%s%s/%s-%s.ffpreset",
2086  base[i], i != 1 ? "" : "/.ffmpeg", codec_name,
2087  preset_name);
2088  f = fopen(filename, "r");
2089  }
2090  }
2091  }
2092 
2093  return f;
2094 }
2095 
2096 int check_stream_specifier(AVFormatContext *s, AVStream *st, const char *spec)
2097 {
2098  int ret = avformat_match_stream_specifier(s, st, spec);
2099  if (ret < 0)
2100  av_log(s, AV_LOG_ERROR, "Invalid stream specifier: %s.\n", spec);
2101  return ret;
2102 }
2103 
2105  AVFormatContext *s, AVStream *st, AVCodec *codec)
2106 {
2107  AVDictionary *ret = NULL;
2108  AVDictionaryEntry *t = NULL;
2109  int flags = s->oformat ? AV_OPT_FLAG_ENCODING_PARAM
2111  char prefix = 0;
2112  const AVClass *cc = avcodec_get_class();
2113 
2114  if (!codec)
2115  codec = s->oformat ? avcodec_find_encoder(codec_id)
2117 
2118  switch (st->codecpar->codec_type) {
2119  case AVMEDIA_TYPE_VIDEO:
2120  prefix = 'v';
2122  break;
2123  case AVMEDIA_TYPE_AUDIO:
2124  prefix = 'a';
2126  break;
2127  case AVMEDIA_TYPE_SUBTITLE:
2128  prefix = 's';
2130  break;
2131  }
2132 
2133  while (t = av_dict_get(opts, "", t, AV_DICT_IGNORE_SUFFIX)) {
2134  char *p = strchr(t->key, ':');
2135 
2136  /* check stream specification in opt name */
2137  if (p)
2138  switch (check_stream_specifier(s, st, p + 1)) {
2139  case 1: *p = 0; break;
2140  case 0: continue;
2141  default: exit_program(1);
2142  }
2143 
2144  if (av_opt_find(&cc, t->key, NULL, flags, AV_OPT_SEARCH_FAKE_OBJ) ||
2145  !codec ||
2146  (codec->priv_class &&
2147  av_opt_find(&codec->priv_class, t->key, NULL, flags,
2149  av_dict_set(&ret, t->key, t->value, 0);
2150  else if (t->key[0] == prefix &&
2151  av_opt_find(&cc, t->key + 1, NULL, flags,
2153  av_dict_set(&ret, t->key + 1, t->value, 0);
2154 
2155  if (p)
2156  *p = ':';
2157  }
2158  return ret;
2159 }
2160 
2163 {
2164  int i;
2165  AVDictionary **opts;
2166 
2167  if (!s->nb_streams)
2168  return NULL;
2169  opts = av_mallocz_array(s->nb_streams, sizeof(*opts));
2170  if (!opts) {
2172  "Could not alloc memory for stream options.\n");
2173  return NULL;
2174  }
2175  for (i = 0; i < s->nb_streams; i++)
2176  opts[i] = filter_codec_opts(codec_opts, s->streams[i]->codecpar->codec_id,
2177  s, s->streams[i], NULL);
2178  return opts;
2179 }
2180 
2181 void *grow_array(void *array, int elem_size, int *size, int new_size)
2182 {
2183  if (new_size >= INT_MAX / elem_size) {
2184  av_log(NULL, AV_LOG_ERROR, "Array too big.\n");
2185  exit_program(1);
2186  }
2187  if (*size < new_size) {
2188  uint8_t *tmp = av_realloc_array(array, new_size, elem_size);
2189  if (!tmp) {
2190  av_log(NULL, AV_LOG_ERROR, "Could not alloc buffer.\n");
2191  exit_program(1);
2192  }
2193  memset(tmp + *size*elem_size, 0, (new_size-*size) * elem_size);
2194  *size = new_size;
2195  return tmp;
2196  }
2197  return array;
2198 }
2199 
2201 {
2202  uint8_t* displaymatrix = av_stream_get_side_data(st,
2204  double theta = 0;
2205  if (displaymatrix)
2206  theta = -av_display_rotation_get((int32_t*) displaymatrix);
2207 
2208  theta -= 360*floor(theta/360 + 0.9/360);
2209 
2210  if (fabs(theta - 90*round(theta/90)) > 2)
2211  av_log(NULL, AV_LOG_WARNING, "Odd rotation angle.\n"
2212  "If you want to help, upload a sample "
2213  "of this file to https://streams.videolan.org/upload/ "
2214  "and contact the ffmpeg-devel mailing list. (ffmpeg-devel@ffmpeg.org)");
2215 
2216  return theta;
2217 }
2218 
2219 #if CONFIG_AVDEVICE
2220 static int print_device_sources(AVInputFormat *fmt, AVDictionary *opts)
2221 {
2222  int ret, i;
2223  AVDeviceInfoList *device_list = NULL;
2224 
2225  if (!fmt || !fmt->priv_class || !AV_IS_INPUT_DEVICE(fmt->priv_class->category))
2226  return AVERROR(EINVAL);
2227 
2228  printf("Auto-detected sources for %s:\n", fmt->name);
2229  if (!fmt->get_device_list) {
2230  ret = AVERROR(ENOSYS);
2231  printf("Cannot list sources. Not implemented.\n");
2232  goto fail;
2233  }
2234 
2235  if ((ret = avdevice_list_input_sources(fmt, NULL, opts, &device_list)) < 0) {
2236  printf("Cannot list sources.\n");
2237  goto fail;
2238  }
2239 
2240  for (i = 0; i < device_list->nb_devices; i++) {
2241  printf("%s %s [%s]\n", device_list->default_device == i ? "*" : " ",
2242  device_list->devices[i]->device_name, device_list->devices[i]->device_description);
2243  }
2244 
2245  fail:
2246  avdevice_free_list_devices(&device_list);
2247  return ret;
2248 }
2249 
2250 static int print_device_sinks(AVOutputFormat *fmt, AVDictionary *opts)
2251 {
2252  int ret, i;
2253  AVDeviceInfoList *device_list = NULL;
2254 
2255  if (!fmt || !fmt->priv_class || !AV_IS_OUTPUT_DEVICE(fmt->priv_class->category))
2256  return AVERROR(EINVAL);
2257 
2258  printf("Auto-detected sinks for %s:\n", fmt->name);
2259  if (!fmt->get_device_list) {
2260  ret = AVERROR(ENOSYS);
2261  printf("Cannot list sinks. Not implemented.\n");
2262  goto fail;
2263  }
2264 
2265  if ((ret = avdevice_list_output_sinks(fmt, NULL, opts, &device_list)) < 0) {
2266  printf("Cannot list sinks.\n");
2267  goto fail;
2268  }
2269 
2270  for (i = 0; i < device_list->nb_devices; i++) {
2271  printf("%s %s [%s]\n", device_list->default_device == i ? "*" : " ",
2272  device_list->devices[i]->device_name, device_list->devices[i]->device_description);
2273  }
2274 
2275  fail:
2276  avdevice_free_list_devices(&device_list);
2277  return ret;
2278 }
2279 
2280 static int show_sinks_sources_parse_arg(const char *arg, char **dev, AVDictionary **opts)
2281 {
2282  int ret;
2283  if (arg) {
2284  char *opts_str = NULL;
2285  av_assert0(dev && opts);
2286  *dev = av_strdup(arg);
2287  if (!*dev)
2288  return AVERROR(ENOMEM);
2289  if ((opts_str = strchr(*dev, ','))) {
2290  *(opts_str++) = '\0';
2291  if (opts_str[0] && ((ret = av_dict_parse_string(opts, opts_str, "=", ":", 0)) < 0)) {
2292  av_freep(dev);
2293  return ret;
2294  }
2295  }
2296  } else
2297  printf("\nDevice name is not provided.\n"
2298  "You can pass devicename[,opt1=val1[,opt2=val2...]] as an argument.\n\n");
2299  return 0;
2300 }
2301 
2302 int show_sources(void *optctx, const char *opt, const char *arg)
2303 {
2304  AVInputFormat *fmt = NULL;
2305  char *dev = NULL;
2306  AVDictionary *opts = NULL;
2307  int ret = 0;
2308  int error_level = av_log_get_level();
2309 
2311 
2312  if ((ret = show_sinks_sources_parse_arg(arg, &dev, &opts)) < 0)
2313  goto fail;
2314 
2315  do {
2316  fmt = av_input_audio_device_next(fmt);
2317  if (fmt) {
2318  if (!strcmp(fmt->name, "lavfi"))
2319  continue; //it's pointless to probe lavfi
2320  if (dev && !av_match_name(dev, fmt->name))
2321  continue;
2322  print_device_sources(fmt, opts);
2323  }
2324  } while (fmt);
2325  do {
2326  fmt = av_input_video_device_next(fmt);
2327  if (fmt) {
2328  if (dev && !av_match_name(dev, fmt->name))
2329  continue;
2330  print_device_sources(fmt, opts);
2331  }
2332  } while (fmt);
2333  fail:
2334  av_dict_free(&opts);
2335  av_free(dev);
2336  av_log_set_level(error_level);
2337  return ret;
2338 }
2339 
2340 int show_sinks(void *optctx, const char *opt, const char *arg)
2341 {
2342  AVOutputFormat *fmt = NULL;
2343  char *dev = NULL;
2344  AVDictionary *opts = NULL;
2345  int ret = 0;
2346  int error_level = av_log_get_level();
2347 
2349 
2350  if ((ret = show_sinks_sources_parse_arg(arg, &dev, &opts)) < 0)
2351  goto fail;
2352 
2353  do {
2354  fmt = av_output_audio_device_next(fmt);
2355  if (fmt) {
2356  if (dev && !av_match_name(dev, fmt->name))
2357  continue;
2358  print_device_sinks(fmt, opts);
2359  }
2360  } while (fmt);
2361  do {
2362  fmt = av_output_video_device_next(fmt);
2363  if (fmt) {
2364  if (dev && !av_match_name(dev, fmt->name))
2365  continue;
2366  print_device_sinks(fmt, opts);
2367  }
2368  } while (fmt);
2369  fail:
2370  av_dict_free(&opts);
2371  av_free(dev);
2372  av_log_set_level(error_level);
2373  return ret;
2374 }
2375 
2376 #endif
error
static void error(const char *err)
Definition: target_bsf_fuzzer.c:29
OPT_FLOAT
#define OPT_FLOAT
Definition: cmdutils.h:168
add_bytes
static void add_bytes(HYuvContext *s, uint8_t *dst, uint8_t *src, int w)
Definition: huffyuvdec.c:859
formats
formats
Definition: signature.h:48
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:99
GET_ARG
#define GET_ARG(arg)
OPT_EXIT
#define OPT_EXIT
Definition: cmdutils.h:171
ff_get_audio_buffer
AVFrame * ff_get_audio_buffer(AVFilterLink *link, int nb_samples)
Request an audio samples buffer with a specific set of permissions.
Definition: audio.c:86
be
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it be(in the first position) for now. Options ------- Then comes the options array. This is what will define the user accessible options. For example
av_force_cpu_flags
void av_force_cpu_flags(int arg)
Disables cpu detection and forces the specified flags.
Definition: cpu.c:65
AVCodec
AVCodec.
Definition: codec.h:190
print_codecs_for_id
static void print_codecs_for_id(enum AVCodecID id, int encoder)
Definition: cmdutils.c:1538
L1
F H1 F F H1 F F F F H1<-F-------F-------F v v v H2 H3 H2 ^ ^ ^ F-------F-------F-> H1<-F-------F-------F|||||||||F H1 F|||||||||F H1 Funavailable fullpel samples(outside the picture for example) shall be equalto the closest available fullpel sampleSmaller pel interpolation:--------------------------if diag_mc is set then points which lie on a line between 2 vertically, horizontally or diagonally adjacent halfpel points shall be interpolatedlinearly with rounding to nearest and halfway values rounded up.points which lie on 2 diagonals at the same time should only use the onediagonal not containing the fullpel point F--> O q O<--h1-> O q O<--F v \/v \/v O O O O O O O|/|\|q q q q q|/|\|O O O O O O O ^/\ ^/\ ^ h2--> O q O<--h3-> O q O<--h2 v \/v \/v O O O O O O O|\|/|q q q q q|\|/|O O O O O O O ^/\ ^/\ ^ F--> O q O<--h1-> O q O<--Fthe remaining points shall be bilinearly interpolated from theup to 4 surrounding halfpel and fullpel points, again rounding should be tonearest and halfway values rounded upcompliant Snow decoders MUST support 1-1/8 pel luma and 1/2-1/16 pel chromainterpolation at leastOverlapped block motion compensation:-------------------------------------FIXMELL band prediction:===================Each sample in the LL0 subband is predicted by the median of the left, top andleft+top-topleft samples, samples outside the subband shall be considered tobe 0. To reverse this prediction in the decoder apply the following.for(y=0;y< height;y++){ for(x=0;x< width;x++){ sample[y][x]+=median(sample[y-1][x], sample[y][x-1], sample[y-1][x]+sample[y][x-1]-sample[y-1][x-1]);}}sample[-1][ *]=sample[ *][-1]=0;width, height here are the width and height of the LL0 subband not of the finalvideoDequantization:===============FIXMEWavelet Transform:==================Snow supports 2 wavelet transforms, the symmetric biorthogonal 5/3 integertransform and an integer approximation of the symmetric biorthogonal 9/7daubechies wavelet.2D IDWT(inverse discrete wavelet transform) --------------------------------------------The 2D IDWT applies a 2D filter recursively, each time combining the4 lowest frequency subbands into a single subband until only 1 subbandremains.The 2D filter is done by first applying a 1D filter in the vertical directionand then applying it in the horizontal one. --------------- --------------- --------------- ---------------|LL0|HL0|||||||||||||---+---|HL1||L0|H0|HL1||LL1|HL1|||||LH0|HH0|||||||||||||-------+-------|-> L1 H1 LH1 HH1 LH1 HH1 LH1 HH1 L1
Definition: snow.txt:554
OptionGroup::group_def
const OptionGroupDef * group_def
Definition: cmdutils.h:309
show_help_default
void show_help_default(const char *opt, const char *arg)
Per-fftool specific help handler.
Definition: ffmpeg_opt.c:3189
stride
int stride
Definition: mace.c:144
AVMEDIA_TYPE_SUBTITLE
@ AVMEDIA_TYPE_SUBTITLE
Definition: avutil.h:204
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:182
direct
static void direct(const float *in, const FFTComplex *ir, int len, float *out)
Definition: af_afir.c:60
process
static void process(NormalizeContext *s, AVFrame *in, AVFrame *out)
Definition: vf_normalize.c:156
draw_horiz_band
static void draw_horiz_band(AVCodecContext *ctx, const AVFrame *fr, int offset[4], int slice_position, int type, int height)
Definition: api-band-test.c:36
sws_isSupportedOutput
#define sws_isSupportedOutput(x)
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
name
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
Definition: writing_filters.txt:88
status
they must not be accessed directly The fifo field contains the frames that are queued in the input for processing by the filter The status_in and status_out fields contains the queued status(EOF or error) of the link
L2
F H1 F F H1 F F F F H1<-F-------F-------F v v v H2 H3 H2 ^ ^ ^ F-------F-------F-> H1<-F-------F-------F|||||||||F H1 F|||||||||F H1 Funavailable fullpel samples(outside the picture for example) shall be equalto the closest available fullpel sampleSmaller pel interpolation:--------------------------if diag_mc is set then points which lie on a line between 2 vertically, horizontally or diagonally adjacent halfpel points shall be interpolatedlinearly with rounding to nearest and halfway values rounded up.points which lie on 2 diagonals at the same time should only use the onediagonal not containing the fullpel point F--> O q O<--h1-> O q O<--F v \/v \/v O O O O O O O|/|\|q q q q q|/|\|O O O O O O O ^/\ ^/\ ^ h2--> O q O<--h3-> O q O<--h2 v \/v \/v O O O O O O O|\|/|q q q q q|\|/|O O O O O O O ^/\ ^/\ ^ F--> O q O<--h1-> O q O<--Fthe remaining points shall be bilinearly interpolated from theup to 4 surrounding halfpel and fullpel points, again rounding should be tonearest and halfway values rounded upcompliant Snow decoders MUST support 1-1/8 pel luma and 1/2-1/16 pel chromainterpolation at leastOverlapped block motion compensation:-------------------------------------FIXMELL band prediction:===================Each sample in the LL0 subband is predicted by the median of the left, top andleft+top-topleft samples, samples outside the subband shall be considered tobe 0. To reverse this prediction in the decoder apply the following.for(y=0;y< height;y++){ for(x=0;x< width;x++){ sample[y][x]+=median(sample[y-1][x], sample[y][x-1], sample[y-1][x]+sample[y][x-1]-sample[y-1][x-1]);}}sample[-1][ *]=sample[ *][-1]=0;width, height here are the width and height of the LL0 subband not of the finalvideoDequantization:===============FIXMEWavelet Transform:==================Snow supports 2 wavelet transforms, the symmetric biorthogonal 5/3 integertransform and an integer approximation of the symmetric biorthogonal 9/7daubechies wavelet.2D IDWT(inverse discrete wavelet transform) --------------------------------------------The 2D IDWT applies a 2D filter recursively, each time combining the4 lowest frequency subbands into a single subband until only 1 subbandremains.The 2D filter is done by first applying a 1D filter in the vertical directionand then applying it in the horizontal one. --------------- --------------- --------------- ---------------|LL0|HL0|||||||||||||---+---|HL1||L0|H0|HL1||LL1|HL1|||||LH0|HH0|||||||||||||-------+-------|-> L1 H1 LH1 HH1 LH1 HH1 LH1 HH1 L2
Definition: snow.txt:554
level
uint8_t level
Definition: svq3.c:210
program
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C program
Definition: undefined.txt:6
INFINITY
#define INFINITY
Definition: mathematics.h:67
cast
The reader does not expect b to be semantically here and if the code is changed by maybe adding a cast
Definition: undefined.txt:36
avdevice_list_input_sources
int avdevice_list_input_sources(AVInputFormat *device, const char *device_name, AVDictionary *device_options, AVDeviceInfoList **device_list)
List devices.
Definition: avdevice.c:228
sws_isSupportedInput
#define sws_isSupportedInput(x)
AVOutputFormat::extensions
const char * extensions
comma-separated filename extensions
Definition: avformat.h:499
init
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:35
mix
static int mix(int c0, int c1)
Definition: 4xm.c:714
AVOutputFormat::name
const char * name
Definition: avformat.h:491
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
nb_input_files
int nb_input_files
Definition: ffmpeg.c:150
avio_protocol_get_class
const AVClass * avio_protocol_get_class(const char *name)
Get AVClass by names of available protocols.
Definition: protocols.c:111
opt.h
AV_OPT_FLAG_VIDEO_PARAM
#define AV_OPT_FLAG_VIDEO_PARAM
Definition: opt.h:279
GET_SAMPLE_RATE_NAME
#define GET_SAMPLE_RATE_NAME(rate)
Definition: cmdutils.h:631
AVCodecParameters::codec_type
enum AVMediaType codec_type
General type of the encoded data.
Definition: codec_par.h:56
space
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated space
Definition: undefined.txt:4
AV_IS_INPUT_DEVICE
#define AV_IS_INPUT_DEVICE(category)
Definition: log.h:50
avfilter_pad_get_name
const char * avfilter_pad_get_name(const AVFilterPad *pads, int pad_idx)
Get the name of an AVFilterPad.
Definition: avfilter.c:1029
OptionDef::off
size_t off
Definition: cmdutils.h:183
Then
status_out is the status that have been taken into it is final when it is not The typical task of an activate callback is to first check the backward status of output and if relevant forward it to the corresponding input Then
Definition: filter_design.txt:165
AVCodec::long_name
const char * long_name
Descriptive name for the codec, meant to be more human readable than name.
Definition: codec.h:202
libm.h
report_file
static FILE * report_file
Definition: cmdutils.c:72
show_formats
int show_formats(void *optctx, const char *opt, const char *arg)
Print a listing containing all the formats supported by the program (including devices).
Definition: cmdutils.c:1356
av_bprint_finalize
int av_bprint_finalize(AVBPrint *buf, char **ret_str)
Finalize a print buffer.
Definition: bprint.c:235
out
FILE * out
Definition: movenc.c:54
av_frame_get_buffer
int av_frame_get_buffer(AVFrame *frame, int align)
Allocate new buffer(s) for audio or video data.
Definition: frame.c:325
AV_CODEC_PROP_LOSSY
#define AV_CODEC_PROP_LOSSY
Codec supports lossy compression.
Definition: codec_desc.h:78
elements
static const ElemCat * elements[ELEMENT_COUNT]
Definition: signature.h:566
scheduling
===============The purpose of these rules is to ensure that frames flow in the filter graph without getting stuck and accumulating somewhere. Simple filters that output one frame for each input frame should not have to worry about it. There are two design for filters:one using the filter_frame() and request_frame() callbacks and the other using the activate() callback. The design using filter_frame() and request_frame() is legacy, but it is suitable for filters that have a single input and process one frame at a time. New filters with several inputs, that treat several frames at a time or that require a special treatment at EOF should probably use the design using activate(). activate -------- This method is called when something must be done in a filter scheduling
Definition: filter_design.txt:142
opt_report
int opt_report(void *optctx, const char *opt, const char *arg)
Definition: cmdutils.c:1053
MID_STATE
#define MID_STATE
Definition: snow.h:40
av_get_sample_fmt_string
char * av_get_sample_fmt_string(char *buf, int buf_size, enum AVSampleFormat sample_fmt)
Generate a string corresponding to the sample format with sample_fmt, or a header if sample_fmt is ne...
Definition: samplefmt.c:93
av_bprint_init
void av_bprint_init(AVBPrint *buf, unsigned size_init, unsigned size_max)
Definition: bprint.c:69
show_layouts
int show_layouts(void *optctx, const char *opt, const char *arg)
Print a listing containing all the standard channel layouts supported by the program.
Definition: cmdutils.c:1779
cb
static double cb(void *priv, double x, double y)
Definition: vf_geq.c:215
nothing
static void nothing(void *foo)
Definition: dshow_capture.h:52
is
The official guide to swscale for confused that is
Definition: swscale.txt:28
playlist
Definition: hls.c:93
u
#define u(width, name, range_min, range_max)
Definition: cbs_h2645.c:262
AV_LOG_QUIET
#define AV_LOG_QUIET
Print no output.
Definition: log.h:158
AVCodec::priv_class
const AVClass * priv_class
AVClass for the private context.
Definition: codec.h:216
init_parse_context
static void init_parse_context(OptionParseContext *octx, const OptionGroupDef *groups, int nb_groups)
Definition: cmdutils.c:702
developers
The official guide to swscale for confused developers
Definition: swscale.txt:2
SHOW_DEFAULT
@ SHOW_DEFAULT
Definition: cmdutils.c:77
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1075
log_callback_report
static void log_callback_report(void *ptr, int level, const char *fmt, va_list vl)
Definition: cmdutils.c:101
sample_fmts
static enum AVSampleFormat sample_fmts[]
Definition: adpcmenc.c:716
va_copy.h
AVERROR_EOF
#define AVERROR_EOF
End of file.
Definition: error.h:55
AV_CODEC_CAP_HARDWARE
#define AV_CODEC_CAP_HARDWARE
Codec is backed by a hardware implementation.
Definition: codec.h:150
FFERROR_NOT_READY
return FFERROR_NOT_READY
Definition: filter_design.txt:204
filters
static const struct PPFilter filters[]
Definition: postprocess.c:134
AV_LOG_PANIC
#define AV_LOG_PANIC
Something went really wrong and we will crash now.
Definition: log.h:163
edgedetect
This document is a tutorial initiation for writing simple filters in libavfilter libavfilter is which means that it is highly recommended that you submit your filters to the FFmpeg development mailing list and make sure that they are applied your filters are likely to have a very short lifetime due to more or less regular internal API and a limited and testing changes the pixels in whatever fashion you and outputs the modified frame The most simple way of doing this is to take a similar filter We ll pick edgedetect
Definition: writing_filters.txt:16
AVDeviceInfo::device_name
char * device_name
device name, format depends on device
Definition: avdevice.h:453
sws_dict
AVDictionary * sws_dict
Definition: cmdutils.c:68
show_help_codec
static void show_help_codec(const char *name, int encoder)
Definition: cmdutils.c:1817
get_media_type_char
static char get_media_type_char(enum AVMediaType type)
Definition: cmdutils.c:1484
AVBitStreamFilter::name
const char * name
Definition: bsf.h:99
mv
static const int8_t mv[256][2]
Definition: 4xm.c:77
output
filter_frame For filters that do not use the this method is called when a frame is pushed to the filter s input It can be called at any time except in a reentrant way If the input frame is enough to produce output
Definition: filter_design.txt:225
codecs
static struct codec_string codecs[]
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
show_version
int show_version(void *optctx, const char *opt, const char *arg)
Print the version of the program to stdout.
Definition: cmdutils.c:1195
basis
static int16_t basis[64][64]
Definition: mpegvideo_enc.c:4270
avformat_get_class
const AVClass * avformat_get_class(void)
Get the AVClass for AVFormatContext.
Definition: options.c:170
program_name
const char program_name[]
program name, defined by the program for show_version().
Definition: ffmpeg.c:109
design
Filter design
Definition: filter_design.txt:2
av_unused
#define av_unused
Definition: attributes.h:131
AVDeviceInfoList::nb_devices
int nb_devices
number of autodetected devices
Definition: avdevice.h:462
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:203
GET_PIX_FMT_NAME
#define GET_PIX_FMT_NAME(pix_fmt)
Definition: cmdutils.h:622
end
static av_cold int end(AVCodecContext *avctx)
Definition: avrndec.c:92
callbacks
static const OMX_CALLBACKTYPE callbacks
Definition: omx.c:332
AV_CODEC_CAP_TRUNCATED
#define AV_CODEC_CAP_TRUNCATED
Definition: codec.h:51
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:300
tmp
static uint8_t tmp[11]
Definition: aes_ctr.c:26
av_frame_make_writable
int av_frame_make_writable(AVFrame *frame)
Ensure that the frame data is writable, avoiding data copy if possible.
Definition: frame.c:612
pixdesc.h
step
trying all byte sequences megabyte in length and selecting the best looking sequence will yield cases to try But a word about which is also called distortion Distortion can be quantified by almost any quality measurement one chooses the sum of squared differences is used but more complex methods that consider psychovisual effects can be used as well It makes no difference in this discussion First step
Definition: rate_distortion.txt:58
print_codec
static void print_codec(const AVCodec *c)
Definition: cmdutils.c:1389
AVCodec::capabilities
int capabilities
Codec capabilities.
Definition: codec.h:209
w
uint8_t w
Definition: llviddspenc.c:38
Rate
Rate
G723.1 rate values.
Definition: g723_1.h:72
OPT_INPUT
#define OPT_INPUT
Definition: cmdutils.h:178
even
Tag MUST be even
Definition: snow.txt:206
sources
Note except for filters that can have queued frames and sources
Definition: filter_design.txt:285
AVPixFmtDescriptor::name
const char * name
Definition: pixdesc.h:82
AVOption
AVOption.
Definition: opt.h:246
HAS_ARG
#define HAS_ARG
Definition: cmdutils.h:161
OptionGroupList::groups
OptionGroup * groups
Definition: cmdutils.h:329
b
#define b
Definition: input.c:41
chroma
static av_always_inline void chroma(WaveformContext *s, AVFrame *in, AVFrame *out, int component, int intensity, int offset_y, int offset_x, int column, int mirror, int jobnr, int nb_jobs)
Definition: vf_waveform.c:1631
table
static const uint16_t table[]
Definition: prosumer.c:206
likely
#define likely(x)
Definition: asm.h:33
OptionDef::dst_ptr
void * dst_ptr
Definition: cmdutils.h:181
OptionGroupList::nb_groups
int nb_groups
Definition: cmdutils.h:330
data
const char data[16]
Definition: mxf.c:91
linear
static int linear(InterplayACMContext *s, unsigned ind, unsigned col)
Definition: interplayacm.c:121
av_pix_fmt_desc_next
const AVPixFmtDescriptor * av_pix_fmt_desc_next(const AVPixFmtDescriptor *prev)
Iterate over all pixel format descriptors known to libavutil.
Definition: pixdesc.c:2556
format_opts
AVDictionary * format_opts
Definition: cmdutils.c:70
avio_enum_protocols
const char * avio_enum_protocols(void **opaque, int output)
Iterate through names of available protocols.
Definition: protocols.c:96
ff_request_frame
int ff_request_frame(AVFilterLink *link)
Request an input frame from the filter at the other end of the link.
Definition: avfilter.c:407
integer
int integer
Definition: swresample_internal.h:37
convert
Definition: convert.py:1
AV_DICT_IGNORE_SUFFIX
#define AV_DICT_IGNORE_SUFFIX
Return first entry in a dictionary whose first part corresponds to the search key,...
Definition: dict.h:70
possible
the frame and frame reference mechanism is intended to as much as possible
Definition: filter_design.txt:45
av_mallocz_array
void * av_mallocz_array(size_t nmemb, size_t size)
Definition: mem.c:190
FLAGS
#define FLAGS
Definition: cmdutils.c:541
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:192
F
#define F(x)
base
uint8_t base
Definition: vp3data.h:202
fc
#define fc(width, name, range_min, range_max)
Definition: cbs_av1.c:555
avresample.h
OptionGroup::swr_opts
AVDictionary * swr_opts
Definition: cmdutils.h:319
allocate
#define allocate(name, size)
Definition: cbs_h2645.c:423
show_help_children
void show_help_children(const AVClass *class, int flags)
Show help for all options with given flags in class and all its children.
Definition: cmdutils.c:203
AVOption::flags
int flags
Definition: opt.h:275
av_get_bits_per_pixel
int av_get_bits_per_pixel(const AVPixFmtDescriptor *pixdesc)
Return the number of bits per pixel used by the pixel format described by pixdesc.
Definition: pixdesc.c:2501
SHOW_COPYRIGHT
#define SHOW_COPYRIGHT
Definition: cmdutils.c:1100
max
#define max(a, b)
Definition: cuda_runtime.h:33
mathematics.h
filter
filter_frame For filters that do not use the this method is called when a frame is pushed to the filter s input It can be called at any time except in a reentrant way If the input frame is enough to produce then the filter should push the output frames on the output link immediately As an exception to the previous rule if the input frame is enough to produce several output frames then the filter needs output only at least one per link The additional frames can be left buffered in the filter
Definition: filter_design.txt:228
av_bsf_iterate
const AVBitStreamFilter * av_bsf_iterate(void **opaque)
Iterate over all registered bitstream filters.
Definition: bitstream_filters.c:66
AVDictionary
Definition: dict.c:30
Frame
Definition: ffplay.c:155
av_get_cpu_flags
int av_get_cpu_flags(void)
Return the flags which specify extensions supported by the CPU.
Definition: cpu.c:93
subbands
subbands
Definition: aptx.h:39
processed
status_in is a status change that must be taken into account after all frames in fifo have been processed
Definition: filter_design.txt:159
hide_banner
int hide_banner
Definition: cmdutils.c:74
config_props
static int config_props(AVFilterLink *outlink)
Definition: aeval.c:223
reverse
static uint32_t reverse(uint32_t num, int bits)
Definition: speedhq.c:565
put_pixel
static void put_pixel(uint16_t *dst, ptrdiff_t linesize, const int16_t *in, int bits_per_raw_sample)
Add bias value, clamp and output pixels of a slice.
Definition: proresdsp.c:41
though
though
Definition: snow.txt:1
AV_OPT_FLAG_FILTERING_PARAM
#define AV_OPT_FLAG_FILTERING_PARAM
a generic parameter which can be set by the user for filtering
Definition: opt.h:292
Makefile
s EdgeDetect Foobar g libavfilter vf_edgedetect c libavfilter vf_foobar c edit libavfilter Makefile
Definition: writing_filters.txt:20
FF_FILTER_FORWARD_STATUS_BACK
#define FF_FILTER_FORWARD_STATUS_BACK(outlink, inlink)
Forward the status on an output link to an input link.
Definition: filters.h:199
H0
F H1 F F H1 F F F F H1<-F-------F-------F v v v H2 H3 H2 ^ ^ ^ F-------F-------F-> H1<-F-------F-------F|||||||||F H1 F|||||||||F H1 Funavailable fullpel samples(outside the picture for example) shall be equalto the closest available fullpel sampleSmaller pel interpolation:--------------------------if diag_mc is set then points which lie on a line between 2 vertically, horizontally or diagonally adjacent halfpel points shall be interpolatedlinearly with rounding to nearest and halfway values rounded up.points which lie on 2 diagonals at the same time should only use the onediagonal not containing the fullpel point F--> O q O<--h1-> O q O<--F v \/v \/v O O O O O O O|/|\|q q q q q|/|\|O O O O O O O ^/\ ^/\ ^ h2--> O q O<--h3-> O q O<--h2 v \/v \/v O O O O O O O|\|/|q q q q q|\|/|O O O O O O O ^/\ ^/\ ^ F--> O q O<--h1-> O q O<--Fthe remaining points shall be bilinearly interpolated from theup to 4 surrounding halfpel and fullpel points, again rounding should be tonearest and halfway values rounded upcompliant Snow decoders MUST support 1-1/8 pel luma and 1/2-1/16 pel chromainterpolation at leastOverlapped block motion compensation:-------------------------------------FIXMELL band prediction:===================Each sample in the LL0 subband is predicted by the median of the left, top andleft+top-topleft samples, samples outside the subband shall be considered tobe 0. To reverse this prediction in the decoder apply the following.for(y=0;y< height;y++){ for(x=0;x< width;x++){ sample[y][x]+=median(sample[y-1][x], sample[y][x-1], sample[y-1][x]+sample[y][x-1]-sample[y-1][x-1]);}}sample[-1][ *]=sample[ *][-1]=0;width, height here are the width and height of the LL0 subband not of the finalvideoDequantization:===============FIXMEWavelet Transform:==================Snow supports 2 wavelet transforms, the symmetric biorthogonal 5/3 integertransform and an integer approximation of the symmetric biorthogonal 9/7daubechies wavelet.2D IDWT(inverse discrete wavelet transform) --------------------------------------------The 2D IDWT applies a 2D filter recursively, each time combining the4 lowest frequency subbands into a single subband until only 1 subbandremains.The 2D filter is done by first applying a 1D filter in the vertical directionand then applying it in the horizontal one. --------------- --------------- --------------- ---------------|LL0|HL0|||||||||||||---+---|HL1||L0|H0|HL1||LL1|HL1|||||LH0|HH0|||||||||||||-------+-------|-> L1 H1 LH1 HH1 LH1 HH1 LH1 HH1 H0
Definition: snow.txt:554
ff_thread_await_progress
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before ff_thread_await_progress() has been called on them. reget_buffer() and buffer age optimizations no longer work. *The contents of buffers must not be written to after ff_thread_report_progress() has been called on them. This includes draw_edges(). Porting codecs to frame threading
quality
trying all byte sequences megabyte in length and selecting the best looking sequence will yield cases to try But a word about quality
Definition: rate_distortion.txt:12
AVOutputFormat::subtitle_codec
enum AVCodecID subtitle_codec
default subtitle codec
Definition: avformat.h:503
D
D(D(float, sse)
Definition: rematrix_init.c:28
OptionDef
Definition: cmdutils.h:158
AVUNERROR
#define AVUNERROR(e)
Definition: error.h:44
av_bsf_get_by_name
const AVBitStreamFilter * av_bsf_get_by_name(const char *name)
Definition: bitstream_filters.c:83
AVInputFormat::long_name
const char * long_name
Descriptive name for the format, meant to be more human-readable than name.
Definition: avformat.h:648
bit
#define bit(string, value)
Definition: cbs_mpeg2.c:58
A
#define A(x)
Definition: vp56_arith.h:28
exit_program
void exit_program(int ret)
Wraps exit with a program-specific cleanup routine.
Definition: cmdutils.c:133
InputStream
Definition: ffmpeg.h:294
Filter
F H1 F F H1 F F F F H1<-F-------F-------F v v v H2 H3 H2 ^ ^ ^ F-------F-------F-> H1<-F-------F-------F|||||||||F H1 F|||||||||F H1 Funavailable fullpel samples(outside the picture for example) shall be equalto the closest available fullpel sampleSmaller pel interpolation:--------------------------if diag_mc is set then points which lie on a line between 2 vertically, horizontally or diagonally adjacent halfpel points shall be interpolatedlinearly with rounding to nearest and halfway values rounded up.points which lie on 2 diagonals at the same time should only use the onediagonal not containing the fullpel point F--> O q O<--h1-> O q O<--F v \/v \/v O O O O O O O|/|\|q q q q q|/|\|O O O O O O O ^/\ ^/\ ^ h2--> O q O<--h3-> O q O<--h2 v \/v \/v O O O O O O O|\|/|q q q q q|\|/|O O O O O O O ^/\ ^/\ ^ F--> O q O<--h1-> O q O<--Fthe remaining points shall be bilinearly interpolated from theup to 4 surrounding halfpel and fullpel points, again rounding should be tonearest and halfway values rounded upcompliant Snow decoders MUST support 1-1/8 pel luma and 1/2-1/16 pel chromainterpolation at leastOverlapped block motion compensation:-------------------------------------FIXMELL band prediction:===================Each sample in the LL0 subband is predicted by the median of the left, top andleft+top-topleft samples, samples outside the subband shall be considered tobe 0. To reverse this prediction in the decoder apply the following.for(y=0;y< height;y++){ for(x=0;x< width;x++){ sample[y][x]+=median(sample[y-1][x], sample[y][x-1], sample[y-1][x]+sample[y][x-1]-sample[y-1][x-1]);}}sample[-1][ *]=sample[ *][-1]=0;width, height here are the width and height of the LL0 subband not of the finalvideoDequantization:===============FIXMEWavelet Transform:==================Snow supports 2 wavelet transforms, the symmetric biorthogonal 5/3 integertransform and an integer approximation of the symmetric biorthogonal 9/7daubechies wavelet.2D IDWT(inverse discrete wavelet transform) --------------------------------------------The 2D IDWT applies a 2D filter recursively, each time combining the4 lowest frequency subbands into a single subband until only 1 subbandremains.The 2D filter is done by first applying a 1D filter in the vertical directionand then applying it in the horizontal one. --------------- --------------- --------------- ---------------|LL0|HL0|||||||||||||---+---|HL1||L0|H0|HL1||LL1|HL1|||||LH0|HH0|||||||||||||-------+-------|-> L1 H1 LH1 HH1 LH1 HH1 LH1 HH1 Filter
Definition: snow.txt:554
ff_inlink_consume_frame
int ff_inlink_consume_frame(AVFilterLink *link, AVFrame **rframe)
Take a frame from the link's FIFO and update the link's stats.
Definition: avfilter.c:1476
av_max_alloc
void av_max_alloc(size_t max)
Set the maximum size that may be allocated in one block.
Definition: mem.c:73
parse_number_or_die
double parse_number_or_die(const char *context, const char *numstr, int type, double min, double max)
Parse a string and return its corresponding value as a double.
Definition: cmdutils.c:141
return
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a it should return
Definition: filter_design.txt:264
av_guess_format
ff_const59 AVOutputFormat * av_guess_format(const char *short_name, const char *filename, const char *mime_type)
Return the output format in the list of registered output formats which best matches the provided par...
Definition: format.c:51
avcodec_find_decoder_by_name
AVCodec * avcodec_find_decoder_by_name(const char *name)
Find a registered decoder with the specified name.
Definition: allcodecs.c:947
print_error
void print_error(const char *filename, int err)
Print an error message to stderr, indicating filename and a human readable description of the error c...
Definition: cmdutils.c:1085
some
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that some(invalid) inputs can trigger overflows(undefined behavior). In these cases
decoder
static const chunk_decoder decoder[8]
Definition: dfa.c:330
OptionGroupList
A list of option groups that all have the same group type (e.g.
Definition: cmdutils.h:326
reasons
if it could not for temporary reasons
Definition: filter_design.txt:265
fail
#define fail()
Definition: checkasm.h:123
resolution
The official guide to swscale for confused that consecutive non overlapping rectangles of slice_bottom special converter These generally are unscaled converters of common like for each output line the vertical scaler pulls lines from a ring buffer When the ring buffer does not contain the wanted then it is pulled from the input slice through the input converter and horizontal scaler The result is also stored in the ring buffer to serve future vertical scaler requests When no more output can be generated because lines from a future slice would be then all remaining lines in the current slice are horizontally scaled and put in the ring buffer[This is done for luma and chroma, each with possibly different numbers of lines per picture.] Input to YUV Converter When the input to the main path is not planar bits per component YUV or bit it is converted to planar bit YUV Two sets of converters exist for this the other leaves the full chroma resolution
Definition: swscale.txt:54
av_strerror
int av_strerror(int errnum, char *errbuf, size_t errbuf_size)
Put a description of the AVERROR code errnum in errbuf.
Definition: error.c:105
show_decoders
int show_decoders(void *optctx, const char *opt, const char *arg)
Print a listing containing all the decoders supported by the program.
Definition: cmdutils.c:1646
av_output_video_device_next
AVOutputFormat * av_output_video_device_next(AVOutputFormat *d)
Video output devices iterator.
Definition: avdevice.c:121
AV_PIX_FMT_FLAG_HWACCEL
#define AV_PIX_FMT_FLAG_HWACCEL
Pixel format is an HW accelerated format.
Definition: pixdesc.h:140
frames
if it could not because there are no more frames
Definition: filter_design.txt:266
relevant
status_out is the status that have been taken into it is final when it is not The typical task of an activate callback is to first check the backward status of output and if relevant forward it to the corresponding input if relevant
Definition: filter_design.txt:165
SHOW_CONFIG
#define SHOW_CONFIG
Definition: cmdutils.c:1099
av_filter_iterate
const AVFilter * av_filter_iterate(void **opaque)
Iterate over all registered filters.
Definition: allfilters.c:509
ff_thread_get_buffer
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call have so the codec calls ff_thread_report set FF_CODEC_CAP_ALLOCATE_PROGRESS in AVCodec caps_internal and use ff_thread_get_buffer() to allocate frames. The frames must then be freed with ff_thread_release_buffer(). Otherwise decode directly into the user-supplied frames. Call ff_thread_report_progress() after some part of the current picture has decoded. A good place to put this is where draw_horiz_band() is called - add this if it isn 't called anywhere
IA
#define IA(x)
Definition: cast5.c:26
av_parse_cpu_caps
int av_parse_cpu_caps(unsigned *flags, const char *s)
Parse CPU caps from a string and update the given AV_CPU_* flags based on that.
Definition: cpu.c:191
tables
Writing a table generator This documentation is preliminary Parts of the API are not good and should be changed Basic concepts A table generator consists of two *_tablegen c and *_tablegen h The h file will provide the variable declarations and initialization code for the tables
Definition: tablegen.txt:10
OptionParseContext
Definition: cmdutils.h:333
future
FFmpeg s bug feature request tracker new issues and changes to existing issues can be done through a web interface Issues can be different kinds of things we want to keep track of but that do not belong into the source tree itself This includes bug feature requests and license violations We might add more items to this list in the future
Definition: issue_tracker.txt:13
avcodec_find_encoder
AVCodec * avcodec_find_encoder(enum AVCodecID id)
Find a registered encoder with a matching codec ID.
Definition: allcodecs.c:914
AVERROR_OPTION_NOT_FOUND
#define AVERROR_OPTION_NOT_FOUND
Option not found.
Definition: error.h:61
AV_BPRINT_SIZE_AUTOMATIC
#define AV_BPRINT_SIZE_AUTOMATIC
Option
An option extracted from the commandline.
Definition: cmdutils.h:287
variant
Definition: hls.c:180
val
static double val(void *priv, double ch)
Definition: aeval.c:76
type
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf type
Definition: writing_filters.txt:86
update
static av_always_inline void update(SilenceDetectContext *s, AVFrame *insamples, int is_silence, int current_sample, int64_t nb_samples_notify, AVRational time_base)
Definition: af_silencedetect.c:78
pts
static int64_t pts
Definition: transcode_aac.c:647
account
status_out is the status that have been taken into account
Definition: filter_design.txt:160
sws_get_class
const AVClass * sws_get_class(void)
Get the AVClass for swsContext.
Definition: options.c:95
us
#define us(width, name, range_min, range_max, subs,...)
Definition: cbs_h2645.c:276
AV_PKT_DATA_DISPLAYMATRIX
@ AV_PKT_DATA_DISPLAYMATRIX
This side data contains a 3x3 transformation matrix describing an affine transformation that needs to...
Definition: packet.h:108
OptionGroup::nb_opts
int nb_opts
Definition: cmdutils.h:313
av_opt_set
int av_opt_set(void *obj, const char *name, const char *val, int search_flags)
Definition: opt.c:465
show_muxdemuxers
show_muxdemuxers
Definition: cmdutils.c:76
OPT_STRING
#define OPT_STRING
Definition: cmdutils.h:164
OptionGroupList::group_def
const OptionGroupDef * group_def
Definition: cmdutils.h:327
AVFILTER_FLAG_DYNAMIC_INPUTS
#define AVFILTER_FLAG_DYNAMIC_INPUTS
The number of the filter inputs is not determined just by AVFilter.inputs.
Definition: avfilter.h:105
fast
static int fast
Definition: ffplay.c:334
OptionDef::help
const char * help
Definition: cmdutils.h:185
AVRational::num
int num
Numerator.
Definition: rational.h:59
idct
static void idct(int16_t block[64])
Definition: 4xm.c:163
InputFile
Definition: ffmpeg.h:393
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:54
show_help_bsf
static void show_help_bsf(const char *name)
Definition: cmdutils.c:1978
OptionGroupDef
Definition: cmdutils.h:293
resample
static int resample(ResampleContext *c, void *dst, const void *src, int *consumed, int src_size, int dst_size, int update_ctx, int nearest_neighbour)
Definition: resample.c:259
qlogs
spatial_decomposition_type s header_state qlog s header_state mv_scale s header_state qbias s header_state block_max_depth s header_state qlogs
Definition: snow.txt:85
LH
#define LH(psrc)
Definition: generic_macros_msa.h:93
AVDeviceInfoList::devices
AVDeviceInfo ** devices
list of autodetected devices
Definition: avdevice.h:461
aligned
static int aligned(int val)
Definition: dashdec.c:167
C
s EdgeDetect Foobar g libavfilter vf_edgedetect c libavfilter vf_foobar c edit libavfilter and add an entry for foobar following the pattern of the other filters edit libavfilter allfilters and add an entry for foobar following the pattern of the other filters configure make j< whatever > ffmpeg ffmpeg i you should get a foobar png with Lena edge detected That s your new playground is ready Some little details about what s going which in turn will define variables for the build system and the C
Definition: writing_filters.txt:58
check_stream_specifier
int check_stream_specifier(AVFormatContext *s, AVStream *st, const char *spec)
Check if the given stream matches a stream specifier.
Definition: cmdutils.c:2096
SHOW_VERSION
#define SHOW_VERSION
Definition: cmdutils.c:1098
first
trying all byte sequences megabyte in length and selecting the best looking sequence will yield cases to try But first
Definition: rate_distortion.txt:12
avassert.h
variables
FFmpeg currently uses a custom build this text attempts to document some of its obscure features and options Makefile variables
Definition: build_system.txt:7
description
Tag description
Definition: snow.txt:206
AV_LOG_TRACE
#define AV_LOG_TRACE
Extremely verbose debugging, useful for libav* development.
Definition: log.h:202
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
print_buildconf
static void print_buildconf(int flags, int level)
Definition: cmdutils.c:1158
initFilter
static av_cold int initFilter(int16_t **outFilter, int32_t **filterPos, int *outFilterSize, int xInc, int srcW, int dstW, int filterAlign, int one, int flags, int cpu_flags, SwsVector *srcFilter, SwsVector *dstFilter, double param[2], int srcPos, int dstPos)
Definition: utils.c:336
AV_CODEC_CAP_EXPERIMENTAL
#define AV_CODEC_CAP_EXPERIMENTAL
Codec is experimental and is thus avoided in favor of non experimental encoders.
Definition: codec.h:98
AVInputFormat
Definition: avformat.h:636
AVInputFormat::extensions
const char * extensions
If extensions are defined, then no probe is done.
Definition: avformat.h:662
OptionGroup::codec_opts
AVDictionary * codec_opts
Definition: cmdutils.h:315
ff_set_common_formats
int ff_set_common_formats(AVFilterContext *ctx, AVFilterFormats *formats)
A helper for query_formats() which sets all links to the same list of formats.
Definition: formats.c:605
set
static void set(uint8_t *a[], int ch, int index, int ch_count, enum AVSampleFormat f, double v)
Definition: swresample.c:59
expand_filename_template
static void expand_filename_template(AVBPrint *bp, const char *template, struct tm *tm)
Definition: cmdutils.c:947
check_options
static void check_options(const OptionDef *po)
Definition: cmdutils.c:494
media_type_string
#define media_type_string
Definition: cmdutils.h:617
await_progress
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call have so the codec calls ff_thread_report await_progress()
ff_thread_report_progress
void ff_thread_report_progress(ThreadFrame *f, int n, int field)
Notify later decoding threads when part of their reference picture is ready.
Definition: pthread_frame.c:568
check
#define check(x, y, S, v)
Definition: motion_est_template.c:404
YUV
The official guide to swscale for confused that consecutive non overlapping rectangles of slice_bottom special converter These generally are unscaled converters of common like YUV
Definition: swscale.txt:38
av_dict_get
AVDictionaryEntry * av_dict_get(const AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags)
Get a dictionary entry with matching key.
Definition: dict.c:40
postprocess.h
class
#define class
Definition: math.h:25
av_log_format_line
void av_log_format_line(void *ptr, int level, const char *fmt, va_list vl, char *line, int line_size, int *print_prefix)
Format a line of log the same way as the default callback.
Definition: log.c:328
decode
static void decode(AVCodecContext *dec_ctx, AVPacket *pkt, AVFrame *frame, FILE *outfile)
Definition: decode_audio.c:71
ff_outlink_set_status
static void ff_outlink_set_status(AVFilterLink *link, int status, int64_t pts)
Set the status field of a link from the source filter.
Definition: filters.h:189
filter_codec_opts
AVDictionary * filter_codec_opts(AVDictionary *opts, enum AVCodecID codec_id, AVFormatContext *s, AVStream *st, AVCodec *codec)
Filter out options for given codec.
Definition: cmdutils.c:2104
OPT_INT
#define OPT_INT
Definition: cmdutils.h:167
ff_inlink_request_frame
void ff_inlink_request_frame(AVFilterLink *link)
Mark that a frame is wanted on the link.
Definition: avfilter.c:1602
input_streams
InputStream ** input_streams
Definition: ffmpeg.c:147
width
#define width
MC
#define MC(PEL, DIR, WIDTH)
Definition: hevcdsp_mips.h:26
AVCodecDescriptor
This struct describes the properties of a single codec described by an AVCodecID.
Definition: codec_desc.h:38
matter
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not matter(as it is from invalid input). In some cases the input can be checked easily in others checking the input is computationally too intensive. In these remaining cases a unsigned type can be used instead of a signed type. unsigned overflows are defined in C. SUINT ----- As we have above established there is a need to use "unsigned" sometimes in computations which work with signed integers(which overflow). Using "unsigned" for signed integers has the very significant potential to cause confusion as in unsigned a
s
#define s(width, name)
Definition: cbs_vp9.c:257
OptionDef::argname
const char * argname
Definition: cmdutils.h:186
split_commandline
int split_commandline(OptionParseContext *octx, int argc, char *argv[], const OptionDef *options, const OptionGroupDef *groups, int nb_groups)
Split the commandline into an intermediate form convenient for further processing.
Definition: cmdutils.c:750
AV_OPT_FLAG_ENCODING_PARAM
#define AV_OPT_FLAG_ENCODING_PARAM
a generic parameter which can be set by the user for muxing or encoding
Definition: opt.h:276
resample_opts
AVDictionary * resample_opts
Definition: cmdutils.c:70
av_realloc_array
void * av_realloc_array(void *ptr, size_t nmemb, size_t size)
Definition: mem.c:198
format
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate. The lists are not just lists
AVInputFormat::name
const char * name
A comma separated list of short names for the format.
Definition: avformat.h:641
SWS_FULL_CHR_H_INP
#define SWS_FULL_CHR_H_INP
Definition: swscale.h:81
g
const char * g
Definition: vf_curves.c:115
changes
This document is a tutorial initiation for writing simple filters in libavfilter libavfilter is which means that it is highly recommended that you submit your filters to the FFmpeg development mailing list and make sure that they are applied your filters are likely to have a very short lifetime due to more or less regular internal API changes
Definition: writing_filters.txt:8
AVDictionaryEntry::key
char * key
Definition: dict.h:82
Option::key
const char * key
Definition: cmdutils.h:289
AVMEDIA_TYPE_AUDIO
@ AVMEDIA_TYPE_AUDIO
Definition: avutil.h:202
avfilter_pad_count
int avfilter_pad_count(const AVFilterPad *pads)
Get the number of elements in a NULL-terminated array of AVFilterPads (e.g.
Definition: avfilter.c:560
sse
static int sse(MpegEncContext *s, uint8_t *src1, uint8_t *src2, int w, int h, int stride)
Definition: mpegvideo_enc.c:2704
info
MIPS optimizations info
Definition: mips.txt:2
swr_alloc
av_cold struct SwrContext * swr_alloc(void)
Allocate SwrContext.
Definition: options.c:149
bits
uint8_t bits
Definition: vp3data.h:202
from
const char * from
Definition: jacosubdec.c:65
to
const char * to
Definition: webvttdec.c:34
avresample_get_class
const attribute_deprecated AVClass * avresample_get_class(void)
Definition: options.c:110
form
This is the more generic form
Definition: tablegen.txt:34
AVOutputFormat::audio_codec
enum AVCodecID audio_codec
default audio codec
Definition: avformat.h:501
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
get
static void get(uint8_t *pixels, int stride, int16_t *block)
Definition: proresenc_anatoliy.c:304
reaction
Note except for filters that can have queued frames and request_frame does not push and as a reaction
Definition: filter_design.txt:287
outputs
static const AVFilterPad outputs[]
Definition: af_acontrast.c:203
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:275
data
the buffer is automatically deallocated once all corresponding references have been destroyed The characteristics of the data(resolution, sample rate, etc.) are stored in the reference
av_get_channel_name
const char * av_get_channel_name(uint64_t channel)
Get the name of a given channel.
Definition: channel_layout.c:243
AVFilter::flags
int flags
A combination of AVFILTER_FLAG_*.
Definition: avfilter.h:187
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:197
ctx
AVFormatContext * ctx
Definition: movenc.c:48
pointers
Undefined Behavior In the C some operations are like signed integer dereferencing freed pointers
Definition: undefined.txt:4
dump_argument
static void dump_argument(const char *a)
Definition: cmdutils.c:470
report_file_level
static int report_file_level
Definition: cmdutils.c:73
pix_fmt
static enum AVPixelFormat pix_fmt
Definition: demuxing_decoding.c:40
Slice
Definition: magicyuv.c:36
on
s EdgeDetect Foobar g libavfilter vf_edgedetect c libavfilter vf_foobar c edit libavfilter and add an entry for foobar following the pattern of the other filters edit libavfilter allfilters and add an entry for foobar following the pattern of the other filters configure make j< whatever > ffmpeg ffmpeg i you should get a foobar png with Lena edge detected That s your new playground is ready Some little details about what s going on
Definition: writing_filters.txt:34
field
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this field
Definition: writing_filters.txt:78
write_fileheader
write_fileheader() adds some minor things like a "this is a generated file" comment and some standard includes. tablegen.h defines some write functions for one- and two-dimensional arrays for standard types - they print only the "core" parts so they are easier to reuse for multi-dimensional arrays so the outermost
Definition: tablegen.txt:39
av_hwdevice_get_type_name
const char * av_hwdevice_get_type_name(enum AVHWDeviceType type)
Get the string name of an AVHWDeviceType.
Definition: hwcontext.c:92
export
static int export(AVFilterContext *ctx, StreamContext *sc, int input)
Definition: vf_signature.c:570
codec_id
enum AVCodecID codec_id
Definition: vaapi_decode.c:369
parse_options
void parse_options(void *optctx, int argc, char **argv, const OptionDef *options, void(*parse_arg_function)(void *, const char *))
Definition: cmdutils.c:380
AV_OPT_FLAG_BSF_PARAM
#define AV_OPT_FLAG_BSF_PARAM
a generic parameter which can be set by the user for bit stream filtering
Definition: opt.h:290
key
const char * key
Definition: hwcontext_opencl.c:168
SwrContext
The libswresample context.
Definition: swresample_internal.h:95
AVMEDIA_TYPE_DATA
@ AVMEDIA_TYPE_DATA
Opaque data information usually continuous.
Definition: avutil.h:203
XMM_CLOBBERS
#define XMM_CLOBBERS(...)
Definition: asm.h:98
f
#define f(width, name)
Definition: cbs_vp9.c:255
pass
#define pass
Definition: fft_template.c:609
command
static int command(AVFilterContext *ctx, const char *cmd, const char *arg, char *res, int res_len, int flags)
Definition: vf_drawtext.c:871
link
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
Definition: filter_design.txt:23
AV_OPT_FLAG_AUDIO_PARAM
#define AV_OPT_FLAG_AUDIO_PARAM
Definition: opt.h:278
compare_codec_desc
static int compare_codec_desc(const void *a, const void *b)
Definition: cmdutils.c:1508
int32_t
int32_t
Definition: audio_convert.c:194
ff_inlink_make_frame_writable
int ff_inlink_make_frame_writable(AVFilterLink *link, AVFrame **rframe)
Make sure a frame is writable.
Definition: avfilter.c:1520
av_opt_find
const AVOption * av_opt_find(void *obj, const char *name, const char *unit, int opt_flags, int search_flags)
Look for an option in an object.
Definition: opt.c:1660
arg
const char * arg
Definition: jacosubdec.c:66
callback
static void callback(void *priv_data, int index, uint8_t *buf, int buf_size, int64_t time, enum dshowDeviceType devtype)
Definition: dshow.c:161
included
must be printed separately If there s no standard function for printing the type you the WRITE_1D_FUNC_ARGV macro is a very quick way to create one See libavcodec dv_tablegen c for an example The h file This file should the initialization functions should not do and instead of the variable declarations the generated *_tables h file should be included Since that will be generated in the build the path must be included
Definition: tablegen.txt:59
fields
the definition of that something depends on the semantic of the filter The callback must examine the status of the filter s links and proceed accordingly The status of output links is stored in the status_in and status_out fields and tested by the then the processing requires a frame on this link and the filter is expected to make efforts in that direction The status of input links is stored by the fifo and status_out fields
Definition: filter_design.txt:155
if
if(ret)
Definition: filter_design.txt:179
OPT_SPEC
#define OPT_SPEC
Definition: cmdutils.h:175
finish_group
static void finish_group(OptionParseContext *octx, int group_idx, const char *arg)
Definition: cmdutils.c:659
output_streams
OutputStream ** output_streams
Definition: ffmpeg.c:152
H2
vertical halfpel samples are found by H2[y][x]
Definition: snow.txt:421
AV_IS_OUTPUT_DEVICE
#define AV_IS_OUTPUT_DEVICE(category)
Definition: log.h:55
context
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option keep it simple and lowercase description are in without and describe what they for example set the foo of the bar offset is the offset of the field in your context
Definition: writing_filters.txt:91
AV_CODEC_CAP_FRAME_THREADS
#define AV_CODEC_CAP_FRAME_THREADS
Codec supports frame-level multithreading.
Definition: codec.h:106
AVFormatContext
Format I/O context.
Definition: avformat.h:1335
negotiation
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format negotiation
Definition: filter_design.txt:12
av_log_get_level
int av_log_get_level(void)
Get the current log level.
Definition: log.c:435
need
must be printed separately If there s no standard function for printing the type you need
Definition: tablegen.txt:45
show_buildconf
int show_buildconf(void *optctx, const char *opt, const char *arg)
Print the build configuration of the program to stdout.
Definition: cmdutils.c:1204
AV_CODEC_PROP_INTRA_ONLY
#define AV_CODEC_PROP_INTRA_ONLY
Codec uses only intra compression.
Definition: codec_desc.h:72
avfilter_get_by_name
const AVFilter * avfilter_get_by_name(const char *name)
Get a filter definition matching the given name.
Definition: allfilters.c:520
quant_table
static const int16_t quant_table[64]
Definition: intrax8.c:556
init_dynload
void init_dynload(void)
Initialize dynamic library loading.
Definition: cmdutils.c:117
opts
AVDictionary * opts
Definition: movenc.c:50
OptionGroup::format_opts
AVDictionary * format_opts
Definition: cmdutils.h:316
AVStream::codecpar
AVCodecParameters * codecpar
Codec parameters associated with this stream.
Definition: avformat.h:1012
main
int main(int argc, char *argv[])
Definition: avio_list_dir.c:112
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:67
avcodec_get_class
const AVClass * avcodec_get_class(void)
Get the AVClass for AVCodecContext.
Definition: options.c:295
result
and forward the result(frame or status change) to the corresponding input. If nothing is possible
NULL
#define NULL
Definition: coverity.c:32
flush
static void flush(AVCodecContext *avctx)
Definition: aacdec_template.c:500
frames
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across frames
Definition: multithreading.txt:37
OptionParseContext::global_opts
OptionGroup global_opts
Definition: cmdutils.h:334
Option::opt
const OptionDef * opt
Definition: cmdutils.h:288
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:659
run
uint8_t run
Definition: svq3.c:209
prepare_app_arguments
static void prepare_app_arguments(int *argc_ptr, char ***argv_ptr)
Definition: cmdutils.c:286
AVPixFmtDescriptor::nb_components
uint8_t nb_components
The number of components each pixel has, (1-4)
Definition: pixdesc.h:83
push
static void push(HysteresisContext *s, int x, int y, int w)
Definition: vf_hysteresis.c:145
pixel
uint8_t pixel
Definition: tiny_ssim.c:42
anything
must be printed separately If there s no standard function for printing the type you the WRITE_1D_FUNC_ARGV macro is a very quick way to create one See libavcodec dv_tablegen c for an example The h file This file should the initialization functions should not do anything
Definition: tablegen.txt:56
swr_get_class
const AVClass * swr_get_class(void)
Get the AVClass for SwrContext.
Definition: options.c:144
frame_wanted_out
the definition of that something depends on the semantic of the filter The callback must examine the status of the filter s links and proceed accordingly The status of output links is stored in the frame_wanted_out
Definition: filter_design.txt:148
LIBAVFILTER_VERSION_MICRO
#define LIBAVFILTER_VERSION_MICRO
Definition: version.h:34
contain
must be printed separately If there s no standard function for printing the type you the WRITE_1D_FUNC_ARGV macro is a very quick way to create one See libavcodec dv_tablegen c for an example The h file This file should contain
Definition: tablegen.txt:55
transform
static const int8_t transform[32][32]
Definition: hevcdsp.c:27
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
period
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option keep it simple and lowercase description are in without period
Definition: writing_filters.txt:89
AVOutputFormat::get_device_list
int(* get_device_list)(struct AVFormatContext *s, struct AVDeviceInfoList *device_list)
Returns device list with it properties.
Definition: avformat.h:590
coefficients
static double coefficients[8 *8]
Definition: dctref.c:35
next_codec_for_id
static const AVCodec * next_codec_for_id(enum AVCodecID id, void **iter, int encoder)
Definition: cmdutils.c:1496
greater
static int greater(MetadataContext *s, const char *value1, const char *value2)
Definition: f_metadata.c:158
AVOutputFormat::long_name
const char * long_name
Descriptive name for the format, meant to be more human-readable than name.
Definition: avformat.h:497
show_formats_devices
static int show_formats_devices(void *optctx, const char *opt, const char *arg, int device_only, int muxdemuxers)
Definition: cmdutils.c:1293
activate
filter_frame For filters that do not use the activate() callback
H
F H1 F F H1 F F F F H1<-F-------F-------F v v v H2 H3 H2 ^ ^ ^ F-------F-------F-> H1<-F-------F-------F|||||||||F H1 F|||||||||F H1 Funavailable fullpel samples(outside the picture for example) shall be equalto the closest available fullpel sampleSmaller pel interpolation:--------------------------if diag_mc is set then points which lie on a line between 2 vertically, horizontally or diagonally adjacent halfpel points shall be interpolatedlinearly with rounding to nearest and halfway values rounded up.points which lie on 2 diagonals at the same time should only use the onediagonal not containing the fullpel point F--> O q O<--h1-> O q O<--F v \/v \/v O O O O O O O|/|\|q q q q q|/|\|O O O O O O O ^/\ ^/\ ^ h2--> O q O<--h3-> O q O<--h2 v \/v \/v O O O O O O O|\|/|q q q q q|\|/|O O O O O O O ^/\ ^/\ ^ F--> O q O<--h1-> O q O<--Fthe remaining points shall be bilinearly interpolated from theup to 4 surrounding halfpel and fullpel points, again rounding should be tonearest and halfway values rounded upcompliant Snow decoders MUST support 1-1/8 pel luma and 1/2-1/16 pel chromainterpolation at leastOverlapped block motion compensation:-------------------------------------FIXMELL band prediction:===================Each sample in the LL0 subband is predicted by the median of the left, top andleft+top-topleft samples, samples outside the subband shall be considered tobe 0. To reverse this prediction in the decoder apply the following.for(y=0;y< height;y++){ for(x=0;x< width;x++){ sample[y][x]+=median(sample[y-1][x], sample[y][x-1], sample[y-1][x]+sample[y][x-1]-sample[y-1][x-1]);}}sample[-1][ *]=sample[ *][-1]=0;width, height here are the width and height of the LL0 subband not of the finalvideoDequantization:===============FIXMEWavelet Transform:==================Snow supports 2 wavelet transforms, the symmetric biorthogonal 5/3 integertransform and an integer approximation of the symmetric biorthogonal 9/7daubechies wavelet.2D IDWT(inverse discrete wavelet transform) --------------------------------------------The 2D IDWT applies a 2D filter recursively, each time combining the4 lowest frequency subbands into a single subband until only 1 subbandremains.The 2D filter is done by first applying a 1D filter in the vertical directionand then applying it in the horizontal one. --------------- --------------- --------------- ---------------|LL0|HL0|||||||||||||---+---|HL1||L0|H0|HL1||LL1|HL1|||||LH0|HH0|||||||||||||-------+-------|-> L1 H1 LH1 HH1 LH1 HH1 LH1 HH1 this can end with a L or a H
Definition: snow.txt:555
system
FFmpeg currently uses a custom build system
Definition: build_system.txt:1
GET_CODEC_NAME
#define GET_CODEC_NAME(id)
Definition: cmdutils.h:625
warned_cfg
static int warned_cfg
Definition: cmdutils.c:1095
av_log_set_flags
void av_log_set_flags(int arg)
Definition: log.c:445
work
must be printed separately If there s no standard function for printing the type you the WRITE_1D_FUNC_ARGV macro is a very quick way to create one See libavcodec dv_tablegen c for an example The h file This file should the initialization functions should not do and instead of the variable declarations the generated *_tables h file should be included Since that will be generated in the build the path must be i e not Makefile changes To make the automatic table creation work
Definition: tablegen.txt:66
src
#define src
Definition: vp8dsp.c:254
parseutils.h
INDENT
#define INDENT
Definition: cmdutils.c:1097
sws_alloc_context
struct SwsContext * sws_alloc_context(void)
Allocate an empty SwsContext.
Definition: utils.c:1084
show_muxers
int show_muxers(void *optctx, const char *opt, const char *arg)
Print a listing containing all the muxers supported by the program (including devices).
Definition: cmdutils.c:1361
L0
#define L0
Definition: hevcdec.h:59
init_opts
void init_opts(void)
Initialize the cmdutils option system, in particular allocate the *_opts contexts.
Definition: cmdutils.c:82
list
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining list
Definition: filter_design.txt:25
not
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If not
Definition: filter_design.txt:259
AVBitStreamFilter::priv_class
const AVClass * priv_class
A class for the private data, used to declare bitstream filter private AVOptions.
Definition: bsf.h:117
OPT_INT64
#define OPT_INT64
Definition: cmdutils.h:170
Prediction
Prediction
Definition: magicyuv.c:41
particular
different references for the same buffer can show different characteristics In particular
Definition: filter_design.txt:55
AV_CODEC_CAP_VARIABLE_FRAME_SIZE
#define AV_CODEC_CAP_VARIABLE_FRAME_SIZE
Audio encoder supports receiving a different number of samples in each call.
Definition: codec.h:122
av_cpu_max_align
size_t av_cpu_max_align(void)
Get the maximum data alignment that may be required by FFmpeg.
Definition: cpu.c:309
av_parse_time
int av_parse_time(int64_t *timeval, const char *timestr, int duration)
Parse timestr and return in *time a corresponding number of microseconds.
Definition: parseutils.c:587
abs
#define abs(x)
Definition: cuda_runtime.h:35
AVOutputFormat::priv_class
const AVClass * priv_class
AVClass for the private context.
Definition: avformat.h:519
inputs
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several inputs
Definition: filter_design.txt:243
nb_input_streams
int nb_input_streams
Definition: ffmpeg.c:148
write_option
static int write_option(void *optctx, const OptionDef *po, const char *opt, const char *arg)
Definition: cmdutils.c:292
av_get_standard_channel_layout
int av_get_standard_channel_layout(unsigned index, uint64_t *layout, const char **name)
Get the value and name of a standard channel layout.
Definition: channel_layout.c:279
OptionGroup::opts
Option * opts
Definition: cmdutils.h:312
AVPixFmtDescriptor::flags
uint64_t flags
Combination of AV_PIX_FMT_FLAG_...
Definition: pixdesc.h:106
OptionGroup
Definition: cmdutils.h:308
ff_inlink_acknowledge_status
int ff_inlink_acknowledge_status(AVFilterLink *link, int *rstatus, int64_t *rpts)
Test and acknowledge the change of status on the link.
Definition: avfilter.c:1431
av_output_audio_device_next
AVOutputFormat * av_output_audio_device_next(AVOutputFormat *d)
Audio output devices iterator.
Definition: avdevice.c:115
diff_bytes
static void diff_bytes(HYuvContext *s, uint8_t *dst, const uint8_t *src0, const uint8_t *src1, int w)
Definition: huffyuvenc.c:41
Range
Definition: vf_colorbalance.c:38
swresample.h
index
int index
Definition: gxfenc.c:89
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
converted
The official guide to swscale for confused that consecutive non overlapping rectangles of slice_bottom special converter These generally are unscaled converters of common like for each output line the vertical scaler pulls lines from a ring buffer When the ring buffer does not contain the wanted then it is pulled from the input slice through the input converter and horizontal scaler The result is also stored in the ring buffer to serve future vertical scaler requests When no more output can be generated because lines from a future slice would be then all remaining lines in the current slice are converted
Definition: swscale.txt:46
H1
F H1 F F H1 F F F F H1<-F-------F-------F v v v H2 H3 H2 ^ ^ ^ F-------F-------F-> H1<-F-------F-------F|||||||||F H1 F|||||||||F H1 Funavailable fullpel samples(outside the picture for example) shall be equalto the closest available fullpel sampleSmaller pel interpolation:--------------------------if diag_mc is set then points which lie on a line between 2 vertically, horizontally or diagonally adjacent halfpel points shall be interpolatedlinearly with rounding to nearest and halfway values rounded up.points which lie on 2 diagonals at the same time should only use the onediagonal not containing the fullpel point F--> O q O<--h1-> O q O<--F v \/v \/v O O O O O O O|/|\|q q q q q|/|\|O O O O O O O ^/\ ^/\ ^ h2--> O q O<--h3-> O q O<--h2 v \/v \/v O O O O O O O|\|/|q q q q q|\|/|O O O O O O O ^/\ ^/\ ^ F--> O q O<--h1-> O q O<--Fthe remaining points shall be bilinearly interpolated from theup to 4 surrounding halfpel and fullpel points, again rounding should be tonearest and halfway values rounded upcompliant Snow decoders MUST support 1-1/8 pel luma and 1/2-1/16 pel chromainterpolation at leastOverlapped block motion compensation:-------------------------------------FIXMELL band prediction:===================Each sample in the LL0 subband is predicted by the median of the left, top andleft+top-topleft samples, samples outside the subband shall be considered tobe 0. To reverse this prediction in the decoder apply the following.for(y=0;y< height;y++){ for(x=0;x< width;x++){ sample[y][x]+=median(sample[y-1][x], sample[y][x-1], sample[y-1][x]+sample[y][x-1]-sample[y-1][x-1]);}}sample[-1][ *]=sample[ *][-1]=0;width, height here are the width and height of the LL0 subband not of the finalvideoDequantization:===============FIXMEWavelet Transform:==================Snow supports 2 wavelet transforms, the symmetric biorthogonal 5/3 integertransform and an integer approximation of the symmetric biorthogonal 9/7daubechies wavelet.2D IDWT(inverse discrete wavelet transform) --------------------------------------------The 2D IDWT applies a 2D filter recursively, each time combining the4 lowest frequency subbands into a single subband until only 1 subbandremains.The 2D filter is done by first applying a 1D filter in the vertical directionand then applying it in the horizontal one. --------------- --------------- --------------- ---------------|LL0|HL0|||||||||||||---+---|HL1||L0|H0|HL1||LL1|HL1|||||LH0|HH0|||||||||||||-------+-------|-> L1 H1 LH1 HH1 LH1 HH1 LH1 HH1 H1
Definition: snow.txt:554
input_files
InputFile ** input_files
Definition: ffmpeg.c:149
AV_OPT_SEARCH_FAKE_OBJ
#define AV_OPT_SEARCH_FAKE_OBJ
The obj passed to av_opt_find() is fake – only a double pointer to AVClass instead of a required poin...
Definition: opt.h:566
av_bprint_is_complete
static int av_bprint_is_complete(const AVBPrint *buf)
Test if the print buffer is complete (not truncated).
Definition: bprint.h:185
AVCodecID
AVCodecID
Identify the syntax and semantics of the bitstream.
Definition: codec_id.h:46
av_input_video_device_next
AVInputFormat * av_input_video_device_next(AVInputFormat *d)
Video input devices iterator.
Definition: avdevice.c:109
for
for(j=16;j >0;--j)
Definition: h264pred_template.c:469
AVFILTER_FLAG_DYNAMIC_OUTPUTS
#define AVFILTER_FLAG_DYNAMIC_OUTPUTS
The number of the filter outputs is not determined just by AVFilter.outputs.
Definition: avfilter.h:111
methods
FFmpeg multithreading methods
Definition: multithreading.txt:2
source
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a source
Definition: filter_design.txt:255
AV_CODEC_CAP_CHANNEL_CONF
#define AV_CODEC_CAP_CHANNEL_CONF
Codec should fill in channel configuration and samplerate instead of container.
Definition: codec.h:102
http
s EdgeDetect Foobar g libavfilter vf_edgedetect c libavfilter vf_foobar c edit libavfilter and add an entry for foobar following the pattern of the other filters edit libavfilter allfilters and add an entry for foobar following the pattern of the other filters configure make j< whatever > ffmpeg ffmpeg i http
Definition: writing_filters.txt:29
ff_thread_release_buffer
void ff_thread_release_buffer(AVCodecContext *avctx, ThreadFrame *f)
Wrapper around release_buffer() frame-for multithreaded codecs.
Definition: pthread_frame.c:1006
locate_option
int locate_option(int argc, char **argv, const OptionDef *options, const char *optname)
Return index of option opt in argv or 0 if not found.
Definition: cmdutils.c:444
av_codec_is_decoder
int av_codec_is_decoder(const AVCodec *codec)
Definition: utils.c:99
interleave
static void interleave(uint8_t *dst, uint8_t *src, int w, int h, int dst_linesize, int src_linesize, enum FilterMode mode, int swap)
Definition: vf_il.c:117
FF_FILTER_FORWARD_STATUS_ALL
FF_FILTER_FORWARD_STATUS_ALL(outlink, filter)
codec_opts
AVDictionary * codec_opts
Definition: cmdutils.c:70
options
const OptionDef options[]
eval.h
show_help_demuxer
static void show_help_demuxer(const char *name)
Definition: cmdutils.c:1853
AV_CODEC_CAP_AUTO_THREADS
#define AV_CODEC_CAP_AUTO_THREADS
Codec supports avctx->thread_count == 0 (auto).
Definition: codec.h:118
desc
const char * desc
Definition: nvenc.c:79
blur
static void blur(uint8_t *dst, int dst_step, const uint8_t *src, int src_step, int len, int radius, int pixsize)
Definition: vf_boxblur.c:160
H3
vertical horizontal halfpel samples are found by H3[y][x]
Definition: snow.txt:427
AV_CODEC_CAP_DR1
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() for allocating buffers and supports custom allocators.
Definition: codec.h:50
get_audio_buffer
static AVFrame * get_audio_buffer(AVFilterLink *inlink, int nb_samples)
Definition: avf_concat.c:208
AV_SAMPLE_FMT_NB
@ AV_SAMPLE_FMT_NB
Number of sample formats. DO NOT USE if linking dynamically.
Definition: samplefmt.h:74
show_help
int show_help(void *optctx, const char *opt, const char *arg)
Generic -h handler common to all fftools.
Definition: cmdutils.c:1997
AVMediaType
AVMediaType
Definition: avutil.h:199
av_log_set_callback
void av_log_set_callback(void(*callback)(void *, int, const char *, va_list))
Set the logging callback.
Definition: log.c:455
ff_inlink_set_status
void ff_inlink_set_status(AVFilterLink *link, int status)
Set the status on an input link.
Definition: avfilter.c:1610
avformat_match_stream_specifier
int avformat_match_stream_specifier(AVFormatContext *s, AVStream *st, const char *spec)
Check if the stream st contained in s is matched by the stream specifier spec.
Definition: utils.c:5329
scroll
static void scroll(AVFilterContext *ctx, AVFrame *in, AVFrame *out)
Definition: vf_scroll.c:111
copy
static void copy(const float *p1, float *p2, const int length)
Definition: vf_vaguedenoiser.c:194
Header
@ Header
Definition: mxfdec.c:63
av_frame_ref
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:444
av_stream_get_side_data
uint8_t * av_stream_get_side_data(const AVStream *stream, enum AVPacketSideDataType type, int *size)
Get side information from stream.
Definition: utils.c:5508
AVClass::category
AVClassCategory category
Category used for visualization (like color) This is only set if the category is equal for all object...
Definition: log.h:130
output_files
OutputFile ** output_files
Definition: ffmpeg.c:154
cpu.h
av_err2str
#define av_err2str(errnum)
Convenience macro, the return value should be used only directly in function arguments but never stan...
Definition: error.h:119
FFMAX
#define FFMAX(a, b)
Definition: common.h:94
get_preset_file
FILE * get_preset_file(char *filename, size_t filename_size, const char *preset_name, int is_path, const char *codec_name)
Get a file corresponding to a preset file.
Definition: cmdutils.c:2046
PRINT_CODEC_SUPPORTED
#define PRINT_CODEC_SUPPORTED(codec, field, type, list_name, term, get_name)
Definition: cmdutils.c:1376
AV_SAMPLE_FMT_NONE
@ AV_SAMPLE_FMT_NONE
Definition: samplefmt.h:59
sample
#define sample
Definition: flacdsp_template.c:44
uninit_opts
void uninit_opts(void)
Uninitialize the cmdutils option system, in particular free the *_opts contexts and their contents.
Definition: cmdutils.c:87
size
int size
Definition: twinvq_data.h:11134
state
static struct @314 state
print_codecs
static void print_codecs(int encoder)
Definition: cmdutils.c:1607
section
Definition: ffprobe.c:140
swr_free
av_cold void swr_free(SwrContext **ss)
Free the given SwrContext and set the pointer to NULL.
Definition: swresample.c:137
AV_PIX_FMT_FLAG_BITSTREAM
#define AV_PIX_FMT_FLAG_BITSTREAM
All values of a component are bit-wise packed end to end.
Definition: pixdesc.h:136
av_frame_is_writable
int av_frame_is_writable(AVFrame *frame)
Check if the frame data is writable.
Definition: frame.c:595
FFDIFFSIGN
#define FFDIFFSIGN(x, y)
Comparator.
Definition: common.h:92
does
s EdgeDetect Foobar g libavfilter vf_edgedetect c libavfilter vf_foobar c edit libavfilter and add an entry for foobar following the pattern of the other filters edit libavfilter allfilters and add an entry for foobar following the pattern of the other filters configure make j< whatever > ffmpeg ffmpeg i you should get a foobar png with Lena edge detected That s your new playground is ready Some little details about what s going which in turn will define variables for the build system and the and we are assuming vf_foobar is as well We are also assuming vf_foobar is not an edge detector so you can update the boilerplate with your credits Doxy Next chunk is the Doxygen about the file See does
Definition: writing_filters.txt:66
setup_find_stream_info_opts
AVDictionary ** setup_find_stream_info_opts(AVFormatContext *s, AVDictionary *codec_opts)
Setup AVCodecContext options for avformat_find_stream_info().
Definition: cmdutils.c:2161
GET_SAMPLE_FMT_NAME
#define GET_SAMPLE_FMT_NAME(sample_fmt)
Definition: cmdutils.h:628
swscale
static int swscale(SwsContext *c, const uint8_t *src[], int srcStride[], int srcSliceY, int srcSliceH, uint8_t *dst[], int dstStride[])
Definition: swscale.c:237
av_demuxer_iterate
const AVInputFormat * av_demuxer_iterate(void **opaque)
Iterate over all registered demuxers.
Definition: allformats.c:533
printf
printf("static const uint8_t my_array[100] = {\n")
gray
The official guide to swscale for confused that consecutive non overlapping rectangles of slice_bottom special converter These generally are unscaled converters of common like for each output line the vertical scaler pulls lines from a ring buffer When the ring buffer does not contain the wanted then it is pulled from the input slice through the input converter and horizontal scaler The result is also stored in the ring buffer to serve future vertical scaler requests When no more output can be generated because lines from a future slice would be then all remaining lines in the current slice are horizontally scaled and put in the ring buffer[This is done for luma and chroma, each with possibly different numbers of lines per picture.] Input to YUV Converter When the input to the main path is not planar bits per component YUV or bit gray
Definition: swscale.txt:52
show_protocols
int show_protocols(void *optctx, const char *opt, const char *arg)
Print a listing containing all the protocols supported by the program.
Definition: cmdutils.c:1670
av_log_get_flags
int av_log_get_flags(void)
Definition: log.c:450
avdevice.h
AVFilter::description
const char * description
A description of the filter.
Definition: avfilter.h:155
avdevice_free_list_devices
void avdevice_free_list_devices(AVDeviceInfoList **device_list)
Convenient function to free result of avdevice_list_devices().
Definition: avdevice.c:250
header
static const uint8_t header[24]
Definition: sdr2.c:67
AV_OPT_SEARCH_CHILDREN
#define AV_OPT_SEARCH_CHILDREN
Search in possible children of the given object first.
Definition: opt.h:558
split
static char * split(char *message, char delim)
Definition: af_channelmap.c:81
CONFIG_FOOBAR_FILTER
#define CONFIG_FOOBAR_FILTER
av_input_audio_device_next
AVInputFormat * av_input_audio_device_next(AVInputFormat *d)
Audio input devices iterator.
Definition: avdevice.c:103
encode
static void encode(AVCodecContext *ctx, AVFrame *frame, AVPacket *pkt, FILE *output)
Definition: encode_audio.c:95
height
#define height
FFMIN
#define FFMIN(a, b)
Definition: common.h:96
a
The reader does not expect b to be semantically here and if the code is changed by maybe adding a a division or other the signedness will almost certainly be mistaken To avoid this confusion a new type was SUINT is the C unsigned type but it holds a signed int to use the same example SUINT a
Definition: undefined.txt:41
H
#define H
Definition: pixlet.c:39
AV_CODEC_CAP_SLICE_THREADS
#define AV_CODEC_CAP_SLICE_THREADS
Codec supports slice-based (or partition-based) multithreading.
Definition: codec.h:110
offset
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf offset
Definition: writing_filters.txt:86
line
Definition: graph2dot.c:48
FF_FILTER_FORWARD_WANTED
FF_FILTER_FORWARD_WANTED(outlink, inlink)
attributes.h
av_pix_fmt_desc_get_id
enum AVPixelFormat av_pix_fmt_desc_get_id(const AVPixFmtDescriptor *desc)
Definition: pixdesc.c:2568
av_dict_free
void av_dict_free(AVDictionary **pm)
Free all the memory allocated for an AVDictionary struct and all keys and values.
Definition: dict.c:203
show_devices
int show_devices(void *optctx, const char *opt, const char *arg)
Print a listing containing all the devices supported by the program.
Definition: cmdutils.c:1371
that
if it could not because there are no more it should return AVERROR_EOF The typical implementation of request_frame for a filter with several inputs will look like that
Definition: filter_design.txt:273
N
#define N
Definition: af_mcompand.c:54
va_copy
#define va_copy(dst, src)
Definition: va_copy.h:31
version
version
Definition: libkvazaar.c:292
AVDeviceInfo::device_description
char * device_description
human friendly name
Definition: avdevice.h:454
avdevice_list_output_sinks
int avdevice_list_output_sinks(AVOutputFormat *device, const char *device_name, AVDictionary *device_options, AVDeviceInfoList **device_list)
Definition: avdevice.c:239
show_pix_fmts
int show_pix_fmts(void *optctx, const char *opt, const char *arg)
Print a listing containing all the pixel formats supported by the program.
Definition: cmdutils.c:1746
input
and forward the test the status of outputs and forward it to the corresponding return FFERROR_NOT_READY If the filters stores internally one or a few frame for some input
Definition: filter_design.txt:172
interpolation
static int interpolation(DeclickChannel *c, const double *src, int ar_order, double *acoefficients, int *index, int nb_errors, double *auxiliary, double *interpolated)
Definition: af_adeclick.c:357
Y
#define Y
Definition: boxblur.h:38
help
static void help(void)
Definition: dct.c:450
introduced
The reader does not expect b to be semantically here and if the code is changed by maybe adding a a division or other the signedness will almost certainly be mistaken To avoid this confusion a new type was introduced
Definition: undefined.txt:38
AVOutputFormat::mime_type
const char * mime_type
Definition: avformat.h:498
XMM_CLOBBERS_ONLY
#define XMM_CLOBBERS_ONLY(...)
Definition: asm.h:99
AV_LOG_INFO
#define AV_LOG_INFO
Standard information.
Definition: log.h:187
implementations
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec implementations
Definition: multithreading.txt:30
distribution
This document is a tutorial initiation for writing simple filters in libavfilter libavfilter is which means that it is highly recommended that you submit your filters to the FFmpeg development mailing list and make sure that they are applied your filters are likely to have a very short lifetime due to more or less regular internal API and a limited distribution
Definition: writing_filters.txt:8
show_sample_fmts
int show_sample_fmts(void *optctx, const char *opt, const char *arg)
Print a listing containing all the sample formats supported by the program.
Definition: cmdutils.c:1808
avcodec_descriptor_next
const AVCodecDescriptor * avcodec_descriptor_next(const AVCodecDescriptor *prev)
Iterate over all codec descriptors known to libavcodec.
Definition: codec_desc.c:3400
avcodec_find_encoder_by_name
AVCodec * avcodec_find_encoder_by_name(const char *name)
Find a registered encoder with the specified name.
Definition: allcodecs.c:942
AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC
#define AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC
Some filters support a generic "enable" expression option that can be used to enable or disable a fil...
Definition: avfilter.h:125
b
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not b
Definition: undefined.txt:32
show_banner
void show_banner(int argc, char **argv, const OptionDef *options)
Print the program banner to stderr.
Definition: cmdutils.c:1184
av_codec_is_encoder
int av_codec_is_encoder(const AVCodec *codec)
Definition: utils.c:94
layout
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel layout
Definition: filter_design.txt:18
program_exit
static void(* program_exit)(int ret)
Definition: cmdutils.c:126
flag
#define flag(name)
Definition: cbs_av1.c:557
register_exit
void register_exit(void(*cb)(int ret))
Register a program-specific cleanup routine.
Definition: cmdutils.c:128
GET_CH_LAYOUT_DESC
#define GET_CH_LAYOUT_DESC(ch_layout)
Definition: cmdutils.h:639
in
uint8_t pi<< 24) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0f/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16, int16_t,(*(const int16_t *) pi >> 8)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32, int32_t,(*(const int32_t *) pi >> 24)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, float, av_clip_uint8(lrintf(*(const float *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, float, av_clip_int16(lrintf(*(const float *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, float, av_clipl_int32(llrintf(*(const float *) pi *(1U<< 31)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, double, av_clip_uint8(lrint(*(const double *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, double, av_clip_int16(lrint(*(const double *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, double, av_clipl_int32(llrint(*(const double *) pi *(1U<< 31)))) #define SET_CONV_FUNC_GROUP(ofmt, ifmt) static void set_generic_function(AudioConvert *ac) { } void ff_audio_convert_free(AudioConvert **ac) { if(! *ac) return;ff_dither_free(&(*ac) ->dc);av_freep(ac);} AudioConvert *ff_audio_convert_alloc(AVAudioResampleContext *avr, enum AVSampleFormat out_fmt, enum AVSampleFormat in_fmt, int channels, int sample_rate, int apply_map) { AudioConvert *ac;int in_planar, out_planar;ac=av_mallocz(sizeof(*ac));if(!ac) return NULL;ac->avr=avr;ac->out_fmt=out_fmt;ac->in_fmt=in_fmt;ac->channels=channels;ac->apply_map=apply_map;if(avr->dither_method !=AV_RESAMPLE_DITHER_NONE &&av_get_packed_sample_fmt(out_fmt)==AV_SAMPLE_FMT_S16 &&av_get_bytes_per_sample(in_fmt) > 2) { ac->dc=ff_dither_alloc(avr, out_fmt, in_fmt, channels, sample_rate, apply_map);if(!ac->dc) { av_free(ac);return NULL;} return ac;} in_planar=ff_sample_fmt_is_planar(in_fmt, channels);out_planar=ff_sample_fmt_is_planar(out_fmt, channels);if(in_planar==out_planar) { ac->func_type=CONV_FUNC_TYPE_FLAT;ac->planes=in_planar ? ac->channels :1;} else if(in_planar) ac->func_type=CONV_FUNC_TYPE_INTERLEAVE;else ac->func_type=CONV_FUNC_TYPE_DEINTERLEAVE;set_generic_function(ac);if(ARCH_AARCH64) ff_audio_convert_init_aarch64(ac);if(ARCH_ARM) ff_audio_convert_init_arm(ac);if(ARCH_X86) ff_audio_convert_init_x86(ac);return ac;} int ff_audio_convert(AudioConvert *ac, AudioData *out, AudioData *in) { int use_generic=1;int len=in->nb_samples;int p;if(ac->dc) { av_log(ac->avr, AV_LOG_TRACE, "%d samples - audio_convert: %s to %s (dithered)\n", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt));return ff_convert_dither(ac-> in
Definition: audio_convert.c:326
AV_CODEC_PROP_LOSSLESS
#define AV_CODEC_PROP_LOSSLESS
Codec supports lossless compression.
Definition: codec_desc.h:82
av_find_input_format
ff_const59 AVInputFormat * av_find_input_format(const char *short_name)
Find AVInputFormat based on the short name of the input format.
Definition: format.c:118
tests
const TestCase tests[]
Definition: fifo_muxer.c:245
less
static int less(MetadataContext *s, const char *value1, const char *value2)
Definition: f_metadata.c:148
av_log_set_level
void av_log_set_level(int level)
Set the log level.
Definition: log.c:440
Type
Type
Definition: vf_idet.h:29
bprint.h
AV_CODEC_ID_NONE
@ AV_CODEC_ID_NONE
Definition: codec_id.h:47
AVOutputFormat
Definition: avformat.h:490
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
code
and forward the test the status of outputs and forward it to the corresponding return FFERROR_NOT_READY If the filters stores internally one or a few frame for some it can consider them to be part of the FIFO and delay acknowledging a status change accordingly Example code
Definition: filter_design.txt:178
print_all_libs_info
static void print_all_libs_info(int flags, int level)
Definition: cmdutils.c:1131
round
static av_always_inline av_const double round(double x)
Definition: libm.h:444
OPT_TIME
#define OPT_TIME
Definition: cmdutils.h:176
swr_opts
AVDictionary * swr_opts
Definition: cmdutils.c:69
LIBAVFILTER_VERSION_MINOR
#define LIBAVFILTER_VERSION_MINOR
Definition: version.h:33
available
if no frame is available
Definition: filter_design.txt:166
Code
One code in hash table.
Definition: lzwenc.c:42
filter_frame
static int filter_frame(DBEContext *s, AVFrame *frame)
Definition: dolby_e.c:565
display.h
needed
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is needed
Definition: filter_design.txt:212
AVSampleFormat
AVSampleFormat
Audio sample formats.
Definition: samplefmt.h:58
delta
float delta
Definition: vorbis_enc_data.h:457
draw_edges
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call have so the codec calls ff_thread_report set FF_CODEC_CAP_ALLOCATE_PROGRESS in AVCodec caps_internal and use as it s useful too and the implementation is trivial when you re doing this Note that draw_edges() needs to be called before reporting progress. Before accessing a reference frame or its MVs
ilog2
any process which generates a stream compliant to the syntactical and semantic requirements and which is decodable by the process described in this spec shall be considered a conformant Snow encoder but not strictly required ilog2(x) is the rounded down logarithm of x with basis 2 ilog2(0)=0Type definitions
Definition: snow.txt:23
value
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default value
Definition: writing_filters.txt:86
av_toupper
static av_const int av_toupper(int c)
Locale-independent conversion of ASCII characters to uppercase.
Definition: avstring.h:231
AVMEDIA_TYPE_ATTACHMENT
@ AVMEDIA_TYPE_ATTACHMENT
Opaque data information usually sparse.
Definition: avutil.h:205
AV_OPT_FLAG_DECODING_PARAM
#define AV_OPT_FLAG_DECODING_PARAM
a generic parameter which can be set by the user for demuxing or decoding
Definition: opt.h:277
CONFIG_HARDCODED_TABLES
#define CONFIG_HARDCODED_TABLES
Definition: aacps_tablegen_template.c:24
description
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf description
Definition: writing_filters.txt:86
SUINT
#define SUINT
Definition: dct32_template.c:30
uint8_t
uint8_t
Definition: audio_convert.c:194
SHOW_MUXERS
@ SHOW_MUXERS
Definition: cmdutils.c:79
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:237
get_codecs_sorted
static unsigned get_codecs_sorted(const AVCodecDescriptor ***rcodecs)
Definition: cmdutils.c:1517
filter
s EdgeDetect Foobar g libavfilter vf_edgedetect c libavfilter vf_foobar c edit libavfilter and add an entry for foobar following the pattern of the other filters edit libavfilter allfilters and add an entry for foobar following the pattern of the other filters configure make j< whatever > ffmpeg ffmpeg i you should get a foobar png with Lena edge detected That s your new playground is ready Some little details about what s going which in turn will define variables for the build system and the and we are assuming vf_foobar is as well We are also assuming vf_foobar is not an edge detector filter
Definition: writing_filters.txt:60
pix_sum
static int pix_sum(uint8_t *pix, int line_size, int w, int h)
Definition: snowenc.c:165
OPT_OUTPUT
#define OPT_OUTPUT
Definition: cmdutils.h:179
AVCodec::name
const char * name
Name of the codec implementation.
Definition: codec.h:197
update_thread_context
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call have update_thread_context() run it in the next thread. Add AV_CODEC_CAP_FRAME_THREADS to the codec capabilities. There will be very little speed gain at this point but it should work. If there are inter-frame dependencies
CONTEXT
#define CONTEXT
Definition: af_asetrate.c:31
len
int len
Definition: vorbis_enc_data.h:452
opt_timelimit
int opt_timelimit(void *optctx, const char *opt, const char *arg)
Limit the execution time.
Definition: cmdutils.c:1072
OPT_OFFSET
#define OPT_OFFSET
Definition: cmdutils.h:174
mv_scale
static av_always_inline void mv_scale(Mv *dst, Mv *src, int td, int tb)
Definition: hevc_mvs.c:115
headroom
static int headroom(int *la)
Definition: nellymoser.c:104
plain
static const uint8_t plain[]
Definition: aes_ctr.c:23
opt_max_alloc
int opt_max_alloc(void *optctx, const char *opt, const char *arg)
Definition: cmdutils.c:1058
nb_output_files
int nb_output_files
Definition: ffmpeg.c:155
OptionParseContext::groups
OptionGroupList * groups
Definition: cmdutils.h:336
av_codec_iterate
const AVCodec * av_codec_iterate(void **opaque)
Iterate over all registered codecs.
Definition: allcodecs.c:832
log2
#define log2(x)
Definition: libm.h:404
needed
The official guide to swscale for confused that consecutive non overlapping rectangles of slice_bottom special converter These generally are unscaled converters of common like for each output line the vertical scaler pulls lines from a ring buffer When the ring buffer does not contain the wanted then it is pulled from the input slice through the input converter and horizontal scaler The result is also stored in the ring buffer to serve future vertical scaler requests When no more output can be generated because lines from a future slice would be needed
Definition: swscale.txt:45
parse_optgroup
int parse_optgroup(void *optctx, OptionGroup *g)
Parse an options group and write results into optctx.
Definition: cmdutils.c:411
OptionDef::u
union OptionDef::@1 u
parse_loglevel
void parse_loglevel(int argc, char **argv, const OptionDef *options)
Find the '-loglevel' option in the command line args and apply it.
Definition: cmdutils.c:503
AVInputFormat::get_device_list
int(* get_device_list)(struct AVFormatContext *s, struct AVDeviceInfoList *device_list)
Returns device list with it properties.
Definition: avformat.h:766
AVFilter
Filter definition.
Definition: avfilter.h:144
version.h
OptionGroup::sws_dict
AVDictionary * sws_dict
Definition: cmdutils.h:318
directory
FFmpeg currently uses a custom build this text attempts to document some of its obscure features and options Makefile the full command issued by make and its output will be shown on the screen DBG Preprocess x86 external assembler files to a dbg asm file in the object directory
Definition: build_system.txt:12
language
Undefined Behavior In the C language
Definition: undefined.txt:3
SpecifierOpt
Definition: cmdutils.h:146
OptionGroup::resample_opts
AVDictionary * resample_opts
Definition: cmdutils.h:317
array
static int array[MAX_W *MAX_W]
Definition: jpeg2000dwt.c:106
G
#define G
Definition: huffyuvdsp.h:33
files
Writing a table generator This documentation is preliminary Parts of the API are not good and should be changed Basic concepts A table generator consists of two files
Definition: tablegen.txt:8
ret
ret
Definition: filter_design.txt:187
AVStream
Stream structure.
Definition: avformat.h:865
AV_LOG_FATAL
#define AV_LOG_FATAL
Something went wrong and recovery is not possible.
Definition: log.h:170
pixfmt
enum AVPixelFormat pixfmt
Definition: kmsgrab.c:202
pred
static const float pred[4]
Definition: siprdata.h:259
currently
The official guide to swscale for confused that consecutive non overlapping rectangles of slice_bottom special converter These generally are unscaled converters of common like for each output line the vertical scaler pulls lines from a ring buffer When the ring buffer does not contain the wanted then it is pulled from the input slice through the input converter and horizontal scaler The result is also stored in the ring buffer to serve future vertical scaler requests When no more output can be generated because lines from a future slice would be then all remaining lines in the current slice are horizontally scaled and put in the ring buffer[This is done for luma and chroma, each with possibly different numbers of lines per picture.] Input to YUV Converter When the input to the main path is not planar bits per component YUV or bit it is converted to planar bit YUV Two sets of converters exist for this currently
Definition: swscale.txt:54
read_yesno
int read_yesno(void)
Return a positive value if a line read from standard input starts with [yY], otherwise return 0.
Definition: cmdutils.c:2035
links
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output links
Definition: filter_design.txt:14
av_strtod
double av_strtod(const char *numstr, char **tail)
Parse the string in numstr and return its value as a double.
Definition: eval.c:106
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
comment
static int FUNC() comment(CodedBitstreamContext *ctx, RWContext *rw, JPEGRawComment *current)
Definition: cbs_jpeg_syntax_template.c:174
av_strlcat
size_t av_strlcat(char *dst, const char *src, size_t size)
Append the string src to the string dst, but to a total length of no more than size - 1 bytes,...
Definition: avstring.c:93
OptionGroup::arg
const char * arg
Definition: cmdutils.h:310
AVDeviceInfoList
List of devices.
Definition: avdevice.h:460
avcodec_find_decoder
AVCodec * avcodec_find_decoder(enum AVCodecID id)
Find a registered decoder with a matching codec ID.
Definition: allcodecs.c:919
uninit_parse_context
void uninit_parse_context(OptionParseContext *octx)
Free all allocated memory in an OptionParseContext.
Definition: cmdutils.c:724
log_callback_help
void log_callback_help(void *ptr, int level, const char *fmt, va_list vl)
Trivial log callback.
Definition: cmdutils.c:96
OPT_PERFILE
#define OPT_PERFILE
Definition: cmdutils.h:173
av_opt_get_key_value
int av_opt_get_key_value(const char **ropts, const char *key_val_sep, const char *pairs_sep, unsigned flags, char **rkey, char **rval)
Extract a key-value pair from the beginning of a string.
Definition: opt.c:1536
avformat.h
av_bprintf
void av_bprintf(AVBPrint *buf, const char *fmt,...)
Definition: bprint.c:94
dict.h
ff_thread_finish_setup
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling up to before the decode process starts Call ff_thread_finish_setup() afterwards. If some code can 't be moved
av_opt_child_class_next
const AVClass * av_opt_child_class_next(const AVClass *parent, const AVClass *prev)
Iterate over potential AVOptions-enabled children of parent.
Definition: opt.c:1718
av_get_channel_description
const char * av_get_channel_description(uint64_t channel)
Get the description of a given channel.
Definition: channel_layout.c:254
AV_LOG_SKIP_REPEATED
#define AV_LOG_SKIP_REPEATED
Skip repeated messages, this requires the user app to use av_log() instead of (f)printf as the 2 woul...
Definition: log.h:366
left
Tag MUST be and< 10hcoeff half pel interpolation filter coefficients, hcoeff[0] are the 2 middle coefficients[1] are the next outer ones and so on, resulting in a filter like:...eff[2], hcoeff[1], hcoeff[0], hcoeff[0], hcoeff[1], hcoeff[2] ... the sign of the coefficients is not explicitly stored but alternates after each coeff and coeff[0] is positive, so ...,+,-,+,-,+,+,-,+,-,+,... hcoeff[0] is not explicitly stored but found by subtracting the sum of all stored coefficients with signs from 32 hcoeff[0]=32 - hcoeff[1] - hcoeff[2] - ... a good choice for hcoeff and htaps is htaps=6 hcoeff={40,-10, 2} an alternative which requires more computations at both encoder and decoder side and may or may not be better is htaps=8 hcoeff={42,-14, 6,-2}ref_frames minimum of the number of available reference frames and max_ref_frames for example the first frame after a key frame always has ref_frames=1spatial_decomposition_type wavelet type 0 is a 9/7 symmetric compact integer wavelet 1 is a 5/3 symmetric compact integer wavelet others are reserved stored as delta from last, last is reset to 0 if always_reset||keyframeqlog quality(logarithmic quantizer scale) stored as delta from last, last is reset to 0 if always_reset||keyframemv_scale stored as delta from last, last is reset to 0 if always_reset||keyframe FIXME check that everything works fine if this changes between framesqbias dequantization bias stored as delta from last, last is reset to 0 if always_reset||keyframeblock_max_depth maximum depth of the block tree stored as delta from last, last is reset to 0 if always_reset||keyframequant_table quantization tableHighlevel bitstream structure:==============================--------------------------------------------|Header|--------------------------------------------|------------------------------------|||Block0||||split?||||yes no||||......... intra?||||:Block01 :yes no||||:Block02 :....... ..........||||:Block03 ::y DC ::ref index:||||:Block04 ::cb DC ::motion x :||||......... :cr DC ::motion y :||||....... ..........|||------------------------------------||------------------------------------|||Block1|||...|--------------------------------------------|------------ ------------ ------------|||Y subbands||Cb subbands||Cr subbands||||--- ---||--- ---||--- ---|||||LL0||HL0||||LL0||HL0||||LL0||HL0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||LH0||HH0||||LH0||HH0||||LH0||HH0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HL1||LH1||||HL1||LH1||||HL1||LH1|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HH1||HL2||||HH1||HL2||||HH1||HL2|||||...||...||...|||------------ ------------ ------------|--------------------------------------------Decoding process:=================------------|||Subbands|------------||||------------|Intra DC||||LL0 subband prediction ------------|\ Dequantization ------------------- \||Reference frames|\ IDWT|------- -------|Motion \|||Frame 0||Frame 1||Compensation . OBMC v -------|------- -------|--------------. \------> Frame n output Frame Frame<----------------------------------/|...|------------------- Range Coder:============Binary Range Coder:------------------- The implemented range coder is an adapted version based upon "Range encoding: an algorithm for removing redundancy from a digitised message." by G. N. N. Martin. The symbols encoded by the Snow range coder are bits(0|1). The associated probabilities are not fix but change depending on the symbol mix seen so far. bit seen|new state ---------+----------------------------------------------- 0|256 - state_transition_table[256 - old_state];1|state_transition_table[old_state];state_transition_table={ 0, 0, 0, 0, 0, 0, 0, 0, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 190, 191, 192, 194, 194, 195, 196, 197, 198, 199, 200, 201, 202, 202, 204, 205, 206, 207, 208, 209, 209, 210, 211, 212, 213, 215, 215, 216, 217, 218, 219, 220, 220, 222, 223, 224, 225, 226, 227, 227, 229, 229, 230, 231, 232, 234, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 248, 0, 0, 0, 0, 0, 0, 0};FIXME Range Coding of integers:------------------------- FIXME Neighboring Blocks:===================left and top are set to the respective blocks unless they are outside of the image in which case they are set to the Null block top-left is set to the top left block unless it is outside of the image in which case it is set to the left block if this block has no larger parent block or it is at the left side of its parent block and the top right block is not outside of the image then the top right block is used for top-right else the top-left block is used Null block y, cb, cr are 128 level, ref, mx and my are 0 Motion Vector Prediction:=========================1. the motion vectors of all the neighboring blocks are scaled to compensate for the difference of reference frames scaled_mv=(mv *(256 *(current_reference+1)/(mv.reference+1))+128)> the median of the scaled left
Definition: snow.txt:386
clients
=============================================Slice threading - *The client 's draw_horiz_band() must be thread-safe according to the comment in avcodec.h. Frame threading - *Restrictions with slice threading also apply. *For best performance, the client should set thread_safe_callbacks if it provides a thread-safe get_buffer() callback. *There is one frame of delay added for every thread beyond the first one. Clients must be able to handle this clients
Definition: multithreading.txt:26
add_opt
static void add_opt(OptionParseContext *octx, const OptionDef *opt, const char *key, const char *val)
Definition: cmdutils.c:690
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen_template.c:38
show_codecs
int show_codecs(void *optctx, const char *opt, const char *arg)
Print a listing containing all the codecs supported by the program.
Definition: cmdutils.c:1551
init_report
static int init_report(const char *env)
Definition: cmdutils.c:975
transforms
static const struct @78 transforms[18]
avfilter_pad_get_type
enum AVMediaType avfilter_pad_get_type(const AVFilterPad *pads, int pad_idx)
Get the type of an AVFilterPad.
Definition: avfilter.c:1034
L
#define L(x)
Definition: vp56_arith.h:36
AVCodecContext
main external API structure.
Definition: avcodec.h:526
compare
static float compare(const AVFrame *haystack, const AVFrame *obj, int offx, int offy)
Definition: vf_find_rect.c:104
av_muxer_iterate
const AVOutputFormat * av_muxer_iterate(void **opaque)
Iterate over all registered muxers.
Definition: allformats.c:516
parse_option
int parse_option(void *optctx, const char *opt, const char *arg, const OptionDef *options)
Parse one given option.
Definition: cmdutils.c:347
get_rotation
double get_rotation(AVStream *st)
Definition: cmdutils.c:2200
av_calloc
void * av_calloc(size_t nmemb, size_t size)
Non-inlined equivalent of av_mallocz_array().
Definition: mem.c:245
AVDeviceInfoList::default_device
int default_device
index of default device or -1 if no default
Definition: avdevice.h:463
opt_cpuflags
int opt_cpuflags(void *optctx, const char *opt, const char *arg)
Override the cpuflags.
Definition: cmdutils.c:856
again
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining again
Definition: filter_design.txt:25
if
if(!keyframe)
Definition: snow.txt:61
sws_freeContext
void sws_freeContext(struct SwsContext *swsContext)
Free the swscaler context swsContext.
Definition: utils.c:2319
AVBitStreamFilter
Definition: bsf.h:98
buffer
the frame and frame reference mechanism is intended to as much as expensive copies of that data while still allowing the filters to produce correct results The data is stored in buffers represented by AVFrame structures Several references can point to the same frame buffer
Definition: filter_design.txt:49
SHOW_DEMUXERS
@ SHOW_DEMUXERS
Definition: cmdutils.c:78
get_buffer
the pkt_dts and pkt_pts fields in AVFrame will work as usual Restrictions on codec whose streams don t reset across will not work because their bitstreams cannot be decoded in parallel *The contents of buffers must not be read before as well as code calling get_buffer()
AVRational::den
int den
Denominator.
Definition: rational.h:60
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
Compensation
Motion Compensation
Definition: snow.txt:418
Transform
Definition: deshake.h:47
headers
FFmpeg currently uses a custom build this text attempts to document some of its obscure features and options Makefile the full command issued by make and its output will be shown on the screen DBG Preprocess x86 external assembler files to a dbg asm file in the object which then gets compiled Helps in developing those assembler files DESTDIR Destination directory for the install useful to prepare packages or install FFmpeg in cross environments GEN Set to ‘1’ to generate the missing or mismatched references Makefile builds all the libraries and the executables fate Run the fate test note that you must have installed it fate list List all fate regression test targets install Install headers
Definition: build_system.txt:34
avfilter.h
av_match_name
int av_match_name(const char *name, const char *names)
Match instances of a name in a comma-separated list of names.
Definition: avstring.c:350
video
A Quick Description Of Rate Distortion Theory We want to encode a video
Definition: rate_distortion.txt:3
test
static void test(const char *pattern, const char *host)
Definition: noproxy.c:23
av_dict_parse_string
int av_dict_parse_string(AVDictionary **pm, const char *str, const char *key_val_sep, const char *pairs_sep, int flags)
Parse the key/value pairs list and add the parsed entries to a dictionary.
Definition: dict.c:180
values
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return values
Definition: filter_design.txt:263
ref
static int ref[MAX_W *MAX_W]
Definition: jpeg2000dwt.c:107
AVOutputFormat::video_codec
enum AVCodecID video_codec
default video codec
Definition: avformat.h:502
AV_CODEC_CAP_DELAY
#define AV_CODEC_CAP_DELAY
Encoder or decoder requires flushing with NULL input at the end in order to give the complete and cor...
Definition: codec.h:75
samples
Filter the word “frame” indicates either a video frame or a group of audio samples
Definition: filter_design.txt:8
Option::val
const char * val
Definition: cmdutils.h:290
note
F H1 F F H1 F F F F H1<-F-------F-------F v v v H2 H3 H2 ^ ^ ^ F-------F-------F-> H1<-F-------F-------F|||||||||F H1 F|||||||||F H1 Funavailable fullpel samples(outside the picture for example) shall be equalto the closest available fullpel sampleSmaller pel interpolation:--------------------------if diag_mc is set then points which lie on a line between 2 vertically, horizontally or diagonally adjacent halfpel points shall be interpolatedlinearly with rounding to nearest and halfway values rounded up.points which lie on 2 diagonals at the same time should only use the onediagonal not containing the fullpel point F--> O q O<--h1-> O q O<--F v \/v \/v O O O O O O O|/|\|q q q q q|/|\|O O O O O O O ^/\ ^/\ ^ h2--> O q O<--h3-> O q O<--h2 v \/v \/v O O O O O O O|\|/|q q q q q|\|/|O O O O O O O ^/\ ^/\ ^ F--> O q O<--h1-> O q O<--Fthe remaining points shall be bilinearly interpolated from theup to 4 surrounding halfpel and fullpel points, again rounding should be tonearest and halfway values rounded upcompliant Snow decoders MUST support 1-1/8 pel luma and 1/2-1/16 pel chromainterpolation at leastOverlapped block motion compensation:-------------------------------------FIXMELL band prediction:===================Each sample in the LL0 subband is predicted by the median of the left, top andleft+top-topleft samples, samples outside the subband shall be considered tobe 0. To reverse this prediction in the decoder apply the following.for(y=0;y< height;y++){ for(x=0;x< width;x++){ sample[y][x]+=median(sample[y-1][x], sample[y][x-1], sample[y-1][x]+sample[y][x-1]-sample[y-1][x-1]);}}sample[-1][ *]=sample[ *][-1]=0;width, height here are the width and height of the LL0 subband not of the finalvideoDequantization:===============FIXMEWavelet Transform:==================Snow supports 2 wavelet transforms, the symmetric biorthogonal 5/3 integertransform and an integer approximation of the symmetric biorthogonal 9/7daubechies wavelet.2D IDWT(inverse discrete wavelet transform) --------------------------------------------The 2D IDWT applies a 2D filter recursively, each time combining the4 lowest frequency subbands into a single subband until only 1 subbandremains.The 2D filter is done by first applying a 1D filter in the vertical directionand then applying it in the horizontal one. --------------- --------------- --------------- ---------------|LL0|HL0|||||||||||||---+---|HL1||L0|H0|HL1||LL1|HL1|||||LH0|HH0|||||||||||||-------+-------|-> L1 H1 LH1 HH1 LH1 HH1 LH1 HH1 note
Definition: snow.txt:555
GROW_ARRAY
#define GROW_ARRAY(array, nb_elems)
Definition: cmdutils.h:619
IDCT
#define IDCT(H)
Definition: hevcdsp_template.c:240
avcodec_get_hw_config
const AVCodecHWConfig * avcodec_get_hw_config(const AVCodec *codec, int index)
Retrieve supported hardware configurations for a codec.
Definition: utils.c:1848
ff_outlink_get_status
int ff_outlink_get_status(AVFilterLink *link)
Get the status on an output link.
Definition: avfilter.c:1625
AVFilterContext
An instance of a filter.
Definition: avfilter.h:338
need
s EdgeDetect Foobar g libavfilter vf_edgedetect c libavfilter vf_foobar c edit libavfilter and add an entry for foobar following the pattern of the other filters edit libavfilter allfilters and add an entry for foobar following the pattern of the other filters configure make j< whatever > ffmpeg ffmpeg i you should get a foobar png with Lena edge detected That s your new playground is ready Some little details about what s going which in turn will define variables for the build system and the and we are assuming vf_foobar is as well We are also assuming vf_foobar is not an edge detector so you can update the boilerplate with your credits Doxy Next chunk is the Doxygen about the file See and add some references if you feel like it Context Skip the headers and scroll down to the definition of FoobarContext This is your state context It is already filled with when you get it so do not worry about uninitialized reads into this context This is where you put all global information that you need
Definition: writing_filters.txt:75
factor
static const int factor[16]
Definition: vf_pp7.c:75
AV_CODEC_CAP_PARAM_CHANGE
#define AV_CODEC_CAP_PARAM_CHANGE
Codec supports changed parameters at any point.
Definition: codec.h:114
timeline
Definition: dashdec.c:46
https
s EdgeDetect Foobar g libavfilter vf_edgedetect c libavfilter vf_foobar c edit libavfilter and add an entry for foobar following the pattern of the other filters edit libavfilter allfilters and add an entry for foobar following the pattern of the other filters configure make j< whatever > ffmpeg ffmpeg i you should get a foobar png with Lena edge detected That s your new playground is ready Some little details about what s going which in turn will define variables for the build system and the and we are assuming vf_foobar is as well We are also assuming vf_foobar is not an edge detector so you can update the boilerplate with your credits Doxy Next chunk is the Doxygen about the file See https
Definition: writing_filters.txt:66
bad
static int bad(InterplayACMContext *s, unsigned ind, unsigned col)
Definition: interplayacm.c:116
print_program_info
static void print_program_info(int flags, int level)
Definition: cmdutils.c:1144
shift
static int shift(int a, int b)
Definition: sonic.c:82
AVFILTER_FLAG_SLICE_THREADS
#define AVFILTER_FLAG_SLICE_THREADS
The filter supports multithreading by splitting frames into multiple parts and processing them concur...
Definition: avfilter.h:116
opt_default
int opt_default(void *optctx, const char *opt, const char *arg)
Fallback for options that are not explicitly handled, these will be parsed through AVOptions.
Definition: cmdutils.c:542
av_strdup
char * av_strdup(const char *s)
Duplicate a string.
Definition: mem.c:253
review
This document is a tutorial initiation for writing simple filters in libavfilter libavfilter is which means that it is highly recommended that you submit your filters to the FFmpeg development mailing list and make sure that they are applied your filters are likely to have a very short lifetime due to more or less regular internal API and a limited review
Definition: writing_filters.txt:8
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
av_log_default_callback
void av_log_default_callback(void *ptr, int level, const char *fmt, va_list vl)
Default logging callback.
Definition: log.c:346
AV_CODEC_CAP_SUBFRAMES
#define AV_CODEC_CAP_SUBFRAMES
Codec can output multiple frames per AVPacket Normally demuxers return one frame at a time,...
Definition: codec.h:93
AV_OPT_FLAG_SUBTITLE_PARAM
#define AV_OPT_FLAG_SUBTITLE_PARAM
Definition: opt.h:280
overflow
Undefined Behavior In the C some operations are like signed integer overflow
Definition: undefined.txt:3
show_colors
int show_colors(void *optctx, const char *opt, const char *arg)
Print a listing containing all the color names and values recognized by the program.
Definition: cmdutils.c:1732
codec_ids
static enum AVCodecID codec_ids[]
Definition: aac_adtstoasc_bsf.c:148
FF_CODEC_CAP_ALLOCATE_PROGRESS
#define FF_CODEC_CAP_ALLOCATE_PROGRESS
Definition: internal.h:75
L3
F H1 F F H1 F F F F H1<-F-------F-------F v v v H2 H3 H2 ^ ^ ^ F-------F-------F-> H1<-F-------F-------F|||||||||F H1 F|||||||||F H1 Funavailable fullpel samples(outside the picture for example) shall be equalto the closest available fullpel sampleSmaller pel interpolation:--------------------------if diag_mc is set then points which lie on a line between 2 vertically, horizontally or diagonally adjacent halfpel points shall be interpolatedlinearly with rounding to nearest and halfway values rounded up.points which lie on 2 diagonals at the same time should only use the onediagonal not containing the fullpel point F--> O q O<--h1-> O q O<--F v \/v \/v O O O O O O O|/|\|q q q q q|/|\|O O O O O O O ^/\ ^/\ ^ h2--> O q O<--h3-> O q O<--h2 v \/v \/v O O O O O O O|\|/|q q q q q|\|/|O O O O O O O ^/\ ^/\ ^ F--> O q O<--h1-> O q O<--Fthe remaining points shall be bilinearly interpolated from theup to 4 surrounding halfpel and fullpel points, again rounding should be tonearest and halfway values rounded upcompliant Snow decoders MUST support 1-1/8 pel luma and 1/2-1/16 pel chromainterpolation at leastOverlapped block motion compensation:-------------------------------------FIXMELL band prediction:===================Each sample in the LL0 subband is predicted by the median of the left, top andleft+top-topleft samples, samples outside the subband shall be considered tobe 0. To reverse this prediction in the decoder apply the following.for(y=0;y< height;y++){ for(x=0;x< width;x++){ sample[y][x]+=median(sample[y-1][x], sample[y][x-1], sample[y-1][x]+sample[y][x-1]-sample[y-1][x-1]);}}sample[-1][ *]=sample[ *][-1]=0;width, height here are the width and height of the LL0 subband not of the finalvideoDequantization:===============FIXMEWavelet Transform:==================Snow supports 2 wavelet transforms, the symmetric biorthogonal 5/3 integertransform and an integer approximation of the symmetric biorthogonal 9/7daubechies wavelet.2D IDWT(inverse discrete wavelet transform) --------------------------------------------The 2D IDWT applies a 2D filter recursively, each time combining the4 lowest frequency subbands into a single subband until only 1 subbandremains.The 2D filter is done by first applying a 1D filter in the vertical directionand then applying it in the horizontal one. --------------- --------------- --------------- ---------------|LL0|HL0|||||||||||||---+---|HL1||L0|H0|HL1||LL1|HL1|||||LH0|HH0|||||||||||||-------+-------|-> L1 H1 LH1 HH1 LH1 HH1 LH1 HH1 L3
Definition: snow.txt:554
it
s EdgeDetect Foobar g libavfilter vf_edgedetect c libavfilter vf_foobar c edit libavfilter and add an entry for foobar following the pattern of the other filters edit libavfilter allfilters and add an entry for foobar following the pattern of the other filters configure make j< whatever > ffmpeg ffmpeg i you should get a foobar png with Lena edge detected That s it
Definition: writing_filters.txt:31
AV_CODEC_CAP_HYBRID
#define AV_CODEC_CAP_HYBRID
Codec is potentially backed by a hardware implementation, but not necessarily.
Definition: codec.h:157
av_get_known_color_name
const char * av_get_known_color_name(int color_idx, const uint8_t **rgbp)
Get the name of a color from the internal table of hard-coded named colors.
Definition: parseutils.c:434
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:81
OptionDef::name
const char * name
Definition: cmdutils.h:159
show_filters
int show_filters(void *optctx, const char *opt, const char *arg)
Print a listing containing all the filters supported by the program.
Definition: cmdutils.c:1685
show_encoders
int show_encoders(void *optctx, const char *opt, const char *arg)
Print a listing containing all the encoders supported by the program.
Definition: cmdutils.c:1652
functions
static const struct drawtext_function functions[]
av_free
#define av_free(p)
Definition: tableprint_vlc.h:34
AVDictionaryEntry
Definition: dict.h:81
opt_loglevel
int opt_loglevel(void *optctx, const char *opt, const char *arg)
Set the libav* libraries log level.
Definition: cmdutils.c:868
show_help_protocol
static void show_help_protocol(const char *name)
Definition: cmdutils.c:1871
FF_FILTER_FORWARD_STATUS
FF_FILTER_FORWARD_STATUS(inlink, outlink)
equal
static int equal(MetadataContext *s, const char *value1, const char *value2)
Definition: f_metadata.c:138
cr
static double cr(void *priv, double x, double y)
Definition: vf_geq.c:216
OptionGroupDef::sep
const char * sep
Option to be used as group separator.
Definition: cmdutils.h:300
channel_layouts
static const uint16_t channel_layouts[7]
Definition: dca_lbr.c:113
status_in
the definition of that something depends on the semantic of the filter The callback must examine the status of the filter s links and proceed accordingly The status of output links is stored in the status_in and status_out fields and tested by the then the processing requires a frame on this link and the filter is expected to make efforts in that direction The status of input links is stored by the status_in
Definition: filter_design.txt:154
AVFILTER_FLAG_SUPPORT_TIMELINE
#define AVFILTER_FLAG_SUPPORT_TIMELINE
Handy mask to test whether the filter supports or no the timeline feature (internally or generically)...
Definition: avfilter.h:138
avoid
the frame and frame reference mechanism is intended to avoid
Definition: filter_design.txt:45
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:70
cmdutils.h
ready
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already ready
Definition: filter_design.txt:258
htaps
static const double htaps[HTAPS]
The 2nd half (48 coeffs) of a 96-tap symmetric lowpass filter.
Definition: dsd_tablegen.h:55
OPT_BOOL
#define OPT_BOOL
Definition: cmdutils.h:162
once
static pthread_once_t once
Definition: ffjni.c:36
inverse
static uint32_t inverse(uint32_t v)
find multiplicative inverse modulo 2 ^ 32
Definition: asfcrypt.c:35
query_formats
static int query_formats(AVFilterContext *ctx)
Definition: aeval.c:244
convert_header.str
string str
Definition: convert_header.py:20
parse_time_or_die
int64_t parse_time_or_die(const char *context, const char *timestr, int is_duration)
Parse a string specifying a time and return its corresponding value as a number of microseconds.
Definition: cmdutils.c:162
grow_array
void * grow_array(void *array, int elem_size, int *size, int new_size)
Realloc array to hold new_size elements of elem_size.
Definition: cmdutils.c:2181
imgutils.h
OutputStream
Definition: muxing.c:53
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:565
AV_CODEC_CAP_DRAW_HORIZ_BAND
#define AV_CODEC_CAP_DRAW_HORIZ_BAND
Decoder can use draw_horiz_band callback.
Definition: codec.h:44
av_strlcpy
size_t av_strlcpy(char *dst, const char *src, size_t size)
Copy the string src to dst, but no more than size - 1 bytes, and null-terminate dst.
Definition: avstring.c:83
coeff
static const double coeff[2][5]
Definition: vf_owdenoise.c:72
block
The exact code depends on how similar the blocks are and how related they are to the block
Definition: filter_design.txt:207
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:28
OptionParseContext::nb_groups
int nb_groups
Definition: cmdutils.h:337
AV_CODEC_CAP_AVOID_PROBING
#define AV_CODEC_CAP_AVOID_PROBING
Decoder is not a preferred choice for probing.
Definition: codec.h:132
pix_norm1
static int pix_norm1(uint8_t *pix, int line_size, int w)
Definition: snowenc.c:181
find_option
static const OptionDef * find_option(const OptionDef *po, const char *name)
Definition: cmdutils.c:215
AVCodecHWConfig
Definition: codec.h:425
uninit
static av_cold int uninit(AVCodecContext *avctx)
Definition: crystalhd.c:279
h
h
Definition: vp9dsp_template.c:2038
ff_outlink_frame_wanted
the definition of that something depends on the semantic of the filter The callback must examine the status of the filter s links and proceed accordingly The status of output links is stored in the status_in and status_out fields and tested by the ff_outlink_frame_wanted() function. If this function returns true
avcodec_descriptor_get
const AVCodecDescriptor * avcodec_descriptor_get(enum AVCodecID id)
Definition: codec_desc.c:3394
Sequence
@ Sequence
Definition: mxf.h:37
AVDictionaryEntry::value
char * value
Definition: dict.h:83
avstring.h
dimension
The official guide to swscale for confused that consecutive non overlapping rectangles of dimension(0, slice_top) -(picture_width
project
static float project(float origin_x, float origin_y, float dest_x, float dest_y, int point_x, int point_y)
Definition: vsrc_gradients.c:150
show_help_options
void show_help_options(const OptionDef *options, const char *msg, int req_flags, int rej_flags, int alt_flags)
Print help for all options matching specified flags.
Definition: cmdutils.c:174
show_bsfs
int show_bsfs(void *optctx, const char *opt, const char *arg)
Print a listing containing all the bit stream filters supported by the program.
Definition: cmdutils.c:1658
Otherwise
This document is a tutorial initiation for writing simple filters in libavfilter libavfilter is which means that it is highly recommended that you submit your filters to the FFmpeg development mailing list and make sure that they are applied Otherwise
Definition: writing_filters.txt:6
show_license
int show_license(void *optctx, const char *opt, const char *arg)
Print the license of the program to stdout.
Definition: cmdutils.c:1212
PRINT_LIB_INFO
#define PRINT_LIB_INFO(libname, LIBNAME, flags, level)
Definition: cmdutils.c:1102
monolithic
This document is a tutorial initiation for writing simple filters in libavfilter libavfilter is monolithic
Definition: writing_filters.txt:4
avcodec_descriptor_get_by_name
const AVCodecDescriptor * avcodec_descriptor_get_by_name(const char *name)
Definition: codec_desc.c:3409
planar
uint8_t pi<< 24) CONV_FUNC(AV_SAMPLE_FMT_S64, int64_t, AV_SAMPLE_FMT_U8,(uint64_t)((*(const uint8_t *) pi - 0x80U))<< 56) CONV_FUNC(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8,(*(const uint8_t *) pi - 0x80) *(1.0f/(1<< 7))) CONV_FUNC(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8,(*(const uint8_t *) pi - 0x80) *(1.0/(1<< 7))) CONV_FUNC(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16,(*(const int16_t *) pi >>8)+0x80) CONV_FUNC(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_S16, *(const int16_t *) pi *(1<< 16)) CONV_FUNC(AV_SAMPLE_FMT_S64, int64_t, AV_SAMPLE_FMT_S16,(uint64_t)(*(const int16_t *) pi)<< 48) CONV_FUNC(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, *(const int16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, *(const int16_t *) pi *(1.0/(1<< 15))) CONV_FUNC(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32,(*(const int32_t *) pi >>24)+0x80) CONV_FUNC(AV_SAMPLE_FMT_S64, int64_t, AV_SAMPLE_FMT_S32,(uint64_t)(*(const int32_t *) pi)<< 32) CONV_FUNC(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, *(const int32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, *(const int32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S64,(*(const int64_t *) pi >>56)+0x80) CONV_FUNC(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S64, *(const int64_t *) pi *(1.0f/(UINT64_C(1)<< 63))) CONV_FUNC(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S64, *(const int64_t *) pi *(1.0/(UINT64_C(1)<< 63))) CONV_FUNC(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, av_clip_uint8(lrintf(*(const float *) pi *(1<< 7))+0x80)) CONV_FUNC(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, av_clip_int16(lrintf(*(const float *) pi *(1<< 15)))) CONV_FUNC(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, av_clipl_int32(llrintf(*(const float *) pi *(1U<< 31)))) CONV_FUNC(AV_SAMPLE_FMT_S64, int64_t, AV_SAMPLE_FMT_FLT, llrintf(*(const float *) pi *(UINT64_C(1)<< 63))) CONV_FUNC(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, av_clip_uint8(lrint(*(const double *) pi *(1<< 7))+0x80)) CONV_FUNC(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, av_clip_int16(lrint(*(const double *) pi *(1<< 15)))) CONV_FUNC(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, av_clipl_int32(llrint(*(const double *) pi *(1U<< 31)))) CONV_FUNC(AV_SAMPLE_FMT_S64, int64_t, AV_SAMPLE_FMT_DBL, llrint(*(const double *) pi *(UINT64_C(1)<< 63))) #define FMT_PAIR_FUNC(out, in) static conv_func_type *const fmt_pair_to_conv_functions[AV_SAMPLE_FMT_NB *AV_SAMPLE_FMT_NB]={ FMT_PAIR_FUNC(AV_SAMPLE_FMT_U8, AV_SAMPLE_FMT_U8), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S16, AV_SAMPLE_FMT_U8), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S32, AV_SAMPLE_FMT_U8), FMT_PAIR_FUNC(AV_SAMPLE_FMT_FLT, AV_SAMPLE_FMT_U8), FMT_PAIR_FUNC(AV_SAMPLE_FMT_DBL, AV_SAMPLE_FMT_U8), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S64, AV_SAMPLE_FMT_U8), FMT_PAIR_FUNC(AV_SAMPLE_FMT_U8, AV_SAMPLE_FMT_S16), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S16, AV_SAMPLE_FMT_S16), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S32, AV_SAMPLE_FMT_S16), FMT_PAIR_FUNC(AV_SAMPLE_FMT_FLT, AV_SAMPLE_FMT_S16), FMT_PAIR_FUNC(AV_SAMPLE_FMT_DBL, AV_SAMPLE_FMT_S16), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S64, AV_SAMPLE_FMT_S16), FMT_PAIR_FUNC(AV_SAMPLE_FMT_U8, AV_SAMPLE_FMT_S32), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S16, AV_SAMPLE_FMT_S32), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S32, AV_SAMPLE_FMT_S32), FMT_PAIR_FUNC(AV_SAMPLE_FMT_FLT, AV_SAMPLE_FMT_S32), FMT_PAIR_FUNC(AV_SAMPLE_FMT_DBL, AV_SAMPLE_FMT_S32), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S64, AV_SAMPLE_FMT_S32), FMT_PAIR_FUNC(AV_SAMPLE_FMT_U8, AV_SAMPLE_FMT_FLT), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S16, AV_SAMPLE_FMT_FLT), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S32, AV_SAMPLE_FMT_FLT), FMT_PAIR_FUNC(AV_SAMPLE_FMT_FLT, AV_SAMPLE_FMT_FLT), FMT_PAIR_FUNC(AV_SAMPLE_FMT_DBL, AV_SAMPLE_FMT_FLT), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S64, AV_SAMPLE_FMT_FLT), FMT_PAIR_FUNC(AV_SAMPLE_FMT_U8, AV_SAMPLE_FMT_DBL), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S16, AV_SAMPLE_FMT_DBL), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S32, AV_SAMPLE_FMT_DBL), FMT_PAIR_FUNC(AV_SAMPLE_FMT_FLT, AV_SAMPLE_FMT_DBL), FMT_PAIR_FUNC(AV_SAMPLE_FMT_DBL, AV_SAMPLE_FMT_DBL), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S64, AV_SAMPLE_FMT_DBL), FMT_PAIR_FUNC(AV_SAMPLE_FMT_U8, AV_SAMPLE_FMT_S64), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S16, AV_SAMPLE_FMT_S64), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S32, AV_SAMPLE_FMT_S64), FMT_PAIR_FUNC(AV_SAMPLE_FMT_FLT, AV_SAMPLE_FMT_S64), FMT_PAIR_FUNC(AV_SAMPLE_FMT_DBL, AV_SAMPLE_FMT_S64), FMT_PAIR_FUNC(AV_SAMPLE_FMT_S64, AV_SAMPLE_FMT_S64), };static void cpy1(uint8_t **dst, const uint8_t **src, int len){ memcpy(*dst, *src, len);} static void cpy2(uint8_t **dst, const uint8_t **src, int len){ memcpy(*dst, *src, 2 *len);} static void cpy4(uint8_t **dst, const uint8_t **src, int len){ memcpy(*dst, *src, 4 *len);} static void cpy8(uint8_t **dst, const uint8_t **src, int len){ memcpy(*dst, *src, 8 *len);} AudioConvert *swri_audio_convert_alloc(enum AVSampleFormat out_fmt, enum AVSampleFormat in_fmt, int channels, const int *ch_map, int flags) { AudioConvert *ctx;conv_func_type *f=fmt_pair_to_conv_functions[av_get_packed_sample_fmt(out_fmt)+AV_SAMPLE_FMT_NB *av_get_packed_sample_fmt(in_fmt)];if(!f) return NULL;ctx=av_mallocz(sizeof(*ctx));if(!ctx) return NULL;if(channels==1){ in_fmt=av_get_planar_sample_fmt(in_fmt);out_fmt=av_get_planar_sample_fmt(out_fmt);} ctx->channels=channels;ctx->conv_f=f;ctx->ch_map=ch_map;if(in_fmt==AV_SAMPLE_FMT_U8||in_fmt==AV_SAMPLE_FMT_U8P) memset(ctx->silence, 0x80, sizeof(ctx->silence));if(out_fmt==in_fmt &&!ch_map) { switch(av_get_bytes_per_sample(in_fmt)){ case 1:ctx->simd_f=cpy1;break;case 2:ctx->simd_f=cpy2;break;case 4:ctx->simd_f=cpy4;break;case 8:ctx->simd_f=cpy8;break;} } if(HAVE_X86ASM &&1) swri_audio_convert_init_x86(ctx, out_fmt, in_fmt, channels);if(ARCH_ARM) swri_audio_convert_init_arm(ctx, out_fmt, in_fmt, channels);if(ARCH_AARCH64) swri_audio_convert_init_aarch64(ctx, out_fmt, in_fmt, channels);return ctx;} void swri_audio_convert_free(AudioConvert **ctx) { av_freep(ctx);} int swri_audio_convert(AudioConvert *ctx, AudioData *out, AudioData *in, int len) { int ch;int off=0;const int os=(out->planar ? 1 :out->ch_count) *out->bps;unsigned misaligned=0;av_assert0(ctx->channels==out->ch_count);if(ctx->in_simd_align_mask) { int planes=in->planar ? in->ch_count :1;unsigned m=0;for(ch=0;ch< planes;ch++) m|=(intptr_t) in->ch[ch];misaligned|=m &ctx->in_simd_align_mask;} if(ctx->out_simd_align_mask) { int planes=out->planar ? out->ch_count :1;unsigned m=0;for(ch=0;ch< planes;ch++) m|=(intptr_t) out->ch[ch];misaligned|=m &ctx->out_simd_align_mask;} if(ctx->simd_f &&!ctx->ch_map &&!misaligned){ off=len &~15;av_assert1(off >=0);av_assert1(off<=len);av_assert2(ctx->channels==SWR_CH_MAX||!in->ch[ctx->channels]);if(off >0){ if(out->planar==in->planar){ int planes=out->planar ? out->ch_count :1;for(ch=0;ch< planes;ch++){ ctx->simd_f(out->ch+ch,(const uint8_t **) in->ch+ch, off *(out-> planar
Definition: audioconvert.c:56
AV_CODEC_CAP_SMALL_LAST_FRAME
#define AV_CODEC_CAP_SMALL_LAST_FRAME
Codec can be fed a final frame with a smaller size.
Definition: codec.h:80
SwsContext
Definition: swscale_internal.h:280
av_opt_show2
int av_opt_show2(void *obj, void *av_log_obj, int req_flags, int rej_flags)
Show the obj options.
Definition: opt.c:1345
show_help_muxer
static void show_help_muxer(const char *name)
Definition: cmdutils.c:1889
AV_PIX_FMT_FLAG_PAL
#define AV_PIX_FMT_FLAG_PAL
Pixel format has a palette in data[1], values are indexes in this palette.
Definition: pixdesc.h:132
MpegEncContext
MpegEncContext.
Definition: mpegvideo.h:81
snprintf
#define snprintf
Definition: snprintf.h:34
filter
F H1 F F H1 F F F F H1<-F-------F-------F v v v H2 H3 H2 ^ ^ ^ F-------F-------F-> H1<-F-------F-------F|||||||||F H1 F|||||||||F H1 Funavailable fullpel samples(outside the picture for example) shall be equalto the closest available fullpel sampleSmaller pel interpolation:--------------------------if diag_mc is set then points which lie on a line between 2 vertically, horizontally or diagonally adjacent halfpel points shall be interpolatedlinearly with rounding to nearest and halfway values rounded up.points which lie on 2 diagonals at the same time should only use the onediagonal not containing the fullpel point F--> O q O<--h1-> O q O<--F v \/v \/v O O O O O O O|/|\|q q q q q|/|\|O O O O O O O ^/\ ^/\ ^ h2--> O q O<--h3-> O q O<--h2 v \/v \/v O O O O O O O|\|/|q q q q q|\|/|O O O O O O O ^/\ ^/\ ^ F--> O q O<--h1-> O q O<--Fthe remaining points shall be bilinearly interpolated from theup to 4 surrounding halfpel and fullpel points, again rounding should be tonearest and halfway values rounded upcompliant Snow decoders MUST support 1-1/8 pel luma and 1/2-1/16 pel chromainterpolation at leastOverlapped block motion compensation:-------------------------------------FIXMELL band prediction:===================Each sample in the LL0 subband is predicted by the median of the left, top andleft+top-topleft samples, samples outside the subband shall be considered tobe 0. To reverse this prediction in the decoder apply the following.for(y=0;y< height;y++){ for(x=0;x< width;x++){ sample[y][x]+=median(sample[y-1][x], sample[y][x-1], sample[y-1][x]+sample[y][x-1]-sample[y-1][x-1]);}}sample[-1][ *]=sample[ *][-1]=0;width, height here are the width and height of the LL0 subband not of the finalvideoDequantization:===============FIXMEWavelet Transform:==================Snow supports 2 wavelet transforms, the symmetric biorthogonal 5/3 integertransform and an integer approximation of the symmetric biorthogonal 9/7daubechies wavelet.2D IDWT(inverse discrete wavelet transform) --------------------------------------------The 2D IDWT applies a 2D filter recursively, each time combining the4 lowest frequency subbands into a single subband until only 1 subbandremains.The 2D filter is done by first applying a 1D filter in the vertical directionand then applying it in the horizontal one. --------------- --------------- --------------- ---------------|LL0|HL0|||||||||||||---+---|HL1||L0|H0|HL1||LL1|HL1|||||LH0|HH0|||||||||||||-------+-------|-> L1 H1 LH1 HH1 LH1 HH1 LH1 HH1 this can end with a L or a the number of elements shall be w s[-1] shall be considered equivalent to s[1] s[w] shall be considered equivalent to s[w-2] perform the lifting steps in order as described below Integer filter
Definition: snow.txt:562
sanitize
static void sanitize(uint8_t *line)
Definition: log.c:245
do
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option keep it simple and lowercase description are in without and describe what they do
Definition: writing_filters.txt:90
OptionParseContext::cur_group
OptionGroup cur_group
Definition: cmdutils.h:340
passed
static int passed(HysteresisContext *s, int x, int y, int w)
Definition: vf_hysteresis.c:140
AV_LOG_PRINT_LEVEL
#define AV_LOG_PRINT_LEVEL
Include the log severity in messages originating from codecs.
Definition: log.h:374
AVCodecHWConfig::device_type
enum AVHWDeviceType device_type
The device type associated with the configuration.
Definition: codec.h:446
get_video_buffer
static AVFrame * get_video_buffer(AVFilterLink *inlink, int w, int h)
Definition: avf_concat.c:199
Therefore
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input Therefore
Definition: filter_design.txt:244
undefined
Undefined Behavior In the C some operations are undefined
Definition: undefined.txt:3
is_device
static int is_device(const AVClass *avclass)
Definition: cmdutils.c:1286
av_bprint_chars
void av_bprint_chars(AVBPrint *buf, char c, unsigned n)
Append char c n times to a print buffer.
Definition: bprint.c:140
show_demuxers
int show_demuxers(void *optctx, const char *opt, const char *arg)
Print a listing containing all the demuxer supported by the program (including devices).
Definition: cmdutils.c:1366
channel
channel
Definition: ebur128.h:39
swscale.h
request_frame
static int request_frame(AVFilterLink *outlink)
Definition: aeval.c:274
match_group_separator
static int match_group_separator(const OptionGroupDef *groups, int nb_groups, const char *opt)
Definition: cmdutils.c:639
AVInputFormat::priv_class
const AVClass * priv_class
AVClass for the private context.
Definition: avformat.h:666
foobar
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf foobar
Definition: writing_filters.txt:84
OptionDef::func_arg
int(* func_arg)(void *, const char *, const char *)
Definition: cmdutils.h:182
opt_find
static const AVOption * opt_find(void *obj, const char *name, const char *unit, int opt_flags, int search_flags)
Definition: cmdutils.c:532
av_x_if_null
static void * av_x_if_null(const void *p, const void *x)
Return x default pointer in case p is NULL.
Definition: avutil.h:308
nb_output_streams
int nb_output_streams
Definition: ffmpeg.c:153
av_display_rotation_get
double av_display_rotation_get(const int32_t matrix[9])
Extract the rotation component of the transformation matrix.
Definition: display.c:34
OutputFile
Definition: ffmpeg.h:554
re
float re
Definition: fft.c:82
GEN
#define GEN(table)
program_birth_year
const int program_birth_year
program birth year, defined by the program for show_banner()
Definition: ffmpeg.c:110
min
float min
Definition: vorbis_enc_data.h:456
OptionDef::flags
int flags
Definition: cmdutils.h:160
OPT_DOUBLE
#define OPT_DOUBLE
Definition: cmdutils.h:177
dither
static const uint8_t dither[8][8]
Definition: vf_fspp.c:57