FFmpeg
vf_overlay_vaapi.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 #include <string.h>
19 
20 #include "libavutil/avassert.h"
21 #include "libavutil/mem.h"
22 #include "libavutil/opt.h"
23 #include "libavutil/pixdesc.h"
24 
25 #include "avfilter.h"
26 #include "framesync.h"
27 #include "formats.h"
28 #include "internal.h"
29 #include "vaapi_vpp.h"
30 
31 typedef struct OverlayVAAPIContext {
32  VAAPIVPPContext vpp_ctx; /**< must be the first field */
38  float alpha;
40 
42 {
43  int ret;
44  enum {
45  MAIN = 0,
46  OVERLAY = 1,
47  };
48 
49  static const enum AVPixelFormat pix_fmts[] = {
52  };
53 
54  ret = ff_formats_ref(ff_make_format_list(pix_fmts), &ctx->inputs[MAIN]->outcfg.formats);
55  if (ret < 0)
56  return ret;
57 
58  ret = ff_formats_ref(ff_make_format_list(pix_fmts), &ctx->inputs[OVERLAY]->outcfg.formats);
59  if (ret < 0)
60  return ret;
61 
62  ret = ff_formats_ref(ff_make_format_list(pix_fmts), &ctx->outputs[0]->incfg.formats);
63  if (ret < 0)
64  return ret;
65 
66  return 0;
67 }
68 
70 {
71  VAAPIVPPContext *vpp_ctx = avctx->priv;
72  VAStatus vas;
73  int support_flag;
74  VAProcPipelineCaps pipeline_caps;
75 
76  memset(&pipeline_caps, 0, sizeof(pipeline_caps));
77  vas = vaQueryVideoProcPipelineCaps(vpp_ctx->hwctx->display,
78  vpp_ctx->va_context,
79  NULL, 0,
80  &pipeline_caps);
81  if (vas != VA_STATUS_SUCCESS) {
82  av_log(avctx, AV_LOG_ERROR, "Failed to query pipeline "
83  "caps: %d (%s).\n", vas, vaErrorStr(vas));
84  return AVERROR(EIO);
85  }
86 
87  if (!pipeline_caps.blend_flags) {
88  av_log(avctx, AV_LOG_ERROR, "VAAPI driver doesn't support overlay\n");
89  return AVERROR(EINVAL);
90  }
91 
92  support_flag = pipeline_caps.blend_flags & VA_BLEND_GLOBAL_ALPHA;
93  if (!support_flag) {
94  av_log(avctx, AV_LOG_ERROR, "VAAPI driver doesn't support global alpha blending\n");
95  return AVERROR(EINVAL);
96  }
97 
98  return 0;
99 }
100 
102  VAProcPipelineParameterBuffer *params,
103  VAProcPipelineParameterBuffer *subpic_params,
105 {
106  VAAPIVPPContext *ctx = avctx->priv;
107  VASurfaceID output_surface;
108  VABufferID params_id;
109  VABufferID subpic_params_id;
110  VAStatus vas;
111  int err = 0;
112 
113  output_surface = (VASurfaceID)(uintptr_t)output_frame->data[3];
114 
115  vas = vaBeginPicture(ctx->hwctx->display,
116  ctx->va_context, output_surface);
117  if (vas != VA_STATUS_SUCCESS) {
118  av_log(avctx, AV_LOG_ERROR, "Failed to attach new picture: "
119  "%d (%s).\n", vas, vaErrorStr(vas));
120  err = AVERROR(EIO);
121  goto fail;
122  }
123 
124  vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
125  VAProcPipelineParameterBufferType,
126  sizeof(*params), 1, params, &params_id);
127  if (vas != VA_STATUS_SUCCESS) {
128  av_log(avctx, AV_LOG_ERROR, "Failed to create parameter buffer: "
129  "%d (%s).\n", vas, vaErrorStr(vas));
130  err = AVERROR(EIO);
131  goto fail_after_begin;
132  }
133  av_log(avctx, AV_LOG_DEBUG, "Pipeline parameter buffer is %#x.\n",
134  params_id);
135 
136 
137  vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
138  VAProcPipelineParameterBufferType,
139  sizeof(*subpic_params), 1, subpic_params, &subpic_params_id);
140  if (vas != VA_STATUS_SUCCESS) {
141  av_log(avctx, AV_LOG_ERROR, "Failed to create parameter buffer: "
142  "%d (%s).\n", vas, vaErrorStr(vas));
143  err = AVERROR(EIO);
144  goto fail_after_begin;
145  }
146  av_log(avctx, AV_LOG_DEBUG, "Pipeline subpic parameter buffer is %#x.\n",
147  subpic_params_id);
148 
149  vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context,
150  &params_id, 1);
151  if (vas != VA_STATUS_SUCCESS) {
152  av_log(avctx, AV_LOG_ERROR, "Failed to render parameter buffer: "
153  "%d (%s).\n", vas, vaErrorStr(vas));
154  err = AVERROR(EIO);
155  goto fail_after_begin;
156  }
157 
158  vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context,
159  &subpic_params_id, 1);
160  if (vas != VA_STATUS_SUCCESS) {
161  av_log(avctx, AV_LOG_ERROR, "Failed to render subpic parameter buffer: "
162  "%d (%s).\n", vas, vaErrorStr(vas));
163  err = AVERROR(EIO);
164  goto fail_after_begin;
165  }
166 
167  vas = vaEndPicture(ctx->hwctx->display, ctx->va_context);
168  if (vas != VA_STATUS_SUCCESS) {
169  av_log(avctx, AV_LOG_ERROR, "Failed to start picture processing: "
170  "%d (%s).\n", vas, vaErrorStr(vas));
171  err = AVERROR(EIO);
172  goto fail_after_render;
173  }
174 
175  if (CONFIG_VAAPI_1 || ctx->hwctx->driver_quirks &
177  vas = vaDestroyBuffer(ctx->hwctx->display, params_id);
178  if (vas != VA_STATUS_SUCCESS) {
179  av_log(avctx, AV_LOG_ERROR, "Failed to free parameter buffer: "
180  "%d (%s).\n", vas, vaErrorStr(vas));
181  // And ignore.
182  }
183  }
184 
185  return 0;
186 
187  // We want to make sure that if vaBeginPicture has been called, we also
188  // call vaRenderPicture and vaEndPicture. These calls may well fail or
189  // do something else nasty, but once we're in this failure case there
190  // isn't much else we can do.
191 fail_after_begin:
192  vaRenderPicture(ctx->hwctx->display, ctx->va_context, &params_id, 1);
193 fail_after_render:
194  vaEndPicture(ctx->hwctx->display, ctx->va_context);
195 fail:
196  return err;
197 }
198 
200 {
201  AVFilterContext *avctx = fs->parent;
202  AVFilterLink *outlink = avctx->outputs[0];
203  OverlayVAAPIContext *ctx = avctx->priv;
204  VAAPIVPPContext *vpp_ctx = avctx->priv;
205  AVFrame *input_main, *input_overlay;
206  AVFrame *output;
207  VAProcPipelineParameterBuffer params, subpic_params;
208  VABlendState blend_state; /**< Blend State */
209  VARectangle overlay_region, output_region;
210  int err;
211 
213  if (err < 0)
214  return err;
215 
216  err = ff_framesync_get_frame(fs, 0, &input_main, 0);
217  if (err < 0)
218  return err;
219  err = ff_framesync_get_frame(fs, 1, &input_overlay, 0);
220  if (err < 0)
221  return err;
222 
223  av_log(avctx, AV_LOG_DEBUG, "Filter main: %s, %ux%u (%"PRId64").\n",
224  av_get_pix_fmt_name(input_main->format),
225  input_main->width, input_main->height, input_main->pts);
226 
227  av_log(avctx, AV_LOG_DEBUG, "Filter overlay: %s, %ux%u (%"PRId64").\n",
228  av_get_pix_fmt_name(input_overlay->format),
229  input_overlay->width, input_overlay->height, input_overlay->pts);
230 
231  if (vpp_ctx->va_context == VA_INVALID_ID)
232  return AVERROR(EINVAL);
233 
234  output = ff_get_video_buffer(outlink, outlink->w, outlink->h);
235  if (!output) {
236  err = AVERROR(ENOMEM);
237  goto fail;
238  }
239 
240  err = av_frame_copy_props(output, input_main);
241  if (err < 0)
242  goto fail;
243 
244  err = ff_vaapi_vpp_init_params(avctx, &params,
245  input_main, output);
246  if (err < 0)
247  goto fail;
248 
249  overlay_region = (VARectangle) {
250  .x = ctx->overlay_ox,
251  .y = ctx->overlay_oy,
252  .width = ctx->overlay_ow ? ctx->overlay_ow : input_overlay->width,
253  .height = ctx->overlay_oh ? ctx->overlay_oh : input_overlay->height,
254  };
255 
256  output_region = (VARectangle) {
257  .x = 0,
258  .y = 0,
259  .width = output->width,
260  .height = output->height,
261  };
262 
263  if (overlay_region.x + overlay_region.width > input_main->width ||
264  overlay_region.y + overlay_region.height > input_main->height) {
266  "The overlay image exceeds the scope of the main image, "
267  "will crop the overlay image according based on the main image.\n");
268  }
269 
270  params.filters = &vpp_ctx->filter_buffers[0];
271  params.num_filters = vpp_ctx->nb_filter_buffers;
272 
273  params.output_region = &output_region;
274  params.output_background_color = VAAPI_VPP_BACKGROUND_BLACK;
275 
276  memcpy(&subpic_params, &params, sizeof(subpic_params));
277 
278  blend_state.flags = VA_BLEND_GLOBAL_ALPHA;
279  blend_state.global_alpha = ctx->alpha;
280  subpic_params.blend_state = &blend_state;
281 
282  subpic_params.surface = (VASurfaceID)(uintptr_t)input_overlay->data[3];
283  subpic_params.output_region = &overlay_region;
284 
285  err = overlay_vaapi_render_picture(avctx, &params, &subpic_params, output);
286  if (err < 0)
287  goto fail;
288 
289  av_log(avctx, AV_LOG_DEBUG, "Filter output: %s, %ux%u (%"PRId64").\n",
290  av_get_pix_fmt_name(output->format),
291  output->width, output->height, output->pts);
292 
293  return ff_filter_frame(outlink, output);
294 
295 fail:
297  return err;
298 }
299 
301 {
302  OverlayVAAPIContext *ctx = avctx->priv;
303  int ret, i;
304 
305  ctx->fs.on_event = overlay_vaapi_blend;
306  ctx->fs.opaque = ctx;
307  ret = ff_framesync_init(&ctx->fs, avctx, avctx->nb_inputs);
308  if (ret < 0)
309  return ret;
310 
311  for (i = 0; i < avctx->nb_inputs; i++) {
312  FFFrameSyncIn *in = &ctx->fs.in[i];
313  in->before = EXT_STOP;
314  in->after = EXT_INFINITY;
315  in->sync = i ? 1 : 2;
316  in->time_base = avctx->inputs[i]->time_base;
317  }
318 
319  return ff_framesync_configure(&ctx->fs);
320 }
321 
323 {
324  AVFilterContext *avctx = outlink->src;
325  OverlayVAAPIContext *ctx = avctx->priv;
326  VAAPIVPPContext *vpp_ctx = avctx->priv;
327  int err;
328 
329  err = overlay_vaapi_init_framesync(avctx);
330  if (err < 0)
331  return err;
332 
333  vpp_ctx->output_width = avctx->inputs[0]->w;
334  vpp_ctx->output_height = avctx->inputs[0]->h;
335 
336  err = ff_vaapi_vpp_config_output(outlink);
337  if (err < 0)
338  return err;
339 
340  err = ff_framesync_init_dualinput(&ctx->fs, avctx);
341  if (err < 0)
342  return err;
343 
344  return ff_framesync_configure(&ctx->fs);
345 }
346 
348 {
349  VAAPIVPPContext *vpp_ctx = avctx->priv;
350 
351  ff_vaapi_vpp_ctx_init(avctx);
352  vpp_ctx->output_format = AV_PIX_FMT_NONE;
353 
354  return 0;
355 }
356 
358 {
359  OverlayVAAPIContext *ctx = avctx->priv;
360 
361  return ff_framesync_activate(&ctx->fs);
362 }
363 
365 {
366  OverlayVAAPIContext *ctx = avctx->priv;
367 
368  ff_framesync_uninit(&ctx->fs);
369 }
370 
371 #define OFFSET(x) offsetof(OverlayVAAPIContext, x)
372 #define FLAGS (AV_OPT_FLAG_FILTERING_PARAM | AV_OPT_FLAG_VIDEO_PARAM)
373 static const AVOption overlay_vaapi_options[] = {
374  { "x", "Overlay x position",
375  OFFSET(overlay_ox), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, .flags = FLAGS },
376  { "y", "Overlay y position",
377  OFFSET(overlay_oy), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, .flags = FLAGS },
378  { "w", "Overlay width",
379  OFFSET(overlay_ow), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, .flags = FLAGS },
380  { "h", "Overlay height",
381  OFFSET(overlay_oh), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, .flags = FLAGS },
382  { "alpha", "Overlay global alpha",
383  OFFSET(alpha), AV_OPT_TYPE_FLOAT, { .dbl = 0.0}, 0.0, 1.0, .flags = FLAGS},
384  { NULL },
385 };
386 
387 AVFILTER_DEFINE_CLASS(overlay_vaapi);
388 
390  {
391  .name = "main",
392  .type = AVMEDIA_TYPE_VIDEO,
393  .get_buffer.video = ff_default_get_video_buffer,
394  .config_props = &ff_vaapi_vpp_config_input,
395  },
396  {
397  .name = "overlay",
398  .type = AVMEDIA_TYPE_VIDEO,
399  .get_buffer.video = ff_default_get_video_buffer,
400  },
401 };
402 
404  {
405  .name = "default",
406  .type = AVMEDIA_TYPE_VIDEO,
407  .config_props = &overlay_vaapi_config_output,
408  },
409 };
410 
412  .name = "overlay_vaapi",
413  .description = NULL_IF_CONFIG_SMALL("Overlay one video on top of another"),
414  .priv_size = sizeof(OverlayVAAPIContext),
415  .priv_class = &overlay_vaapi_class,
422  .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
423 };
ff_get_video_buffer
AVFrame * ff_get_video_buffer(AVFilterLink *link, int w, int h)
Request a picture buffer with a specific set of permissions.
Definition: video.c:101
ff_vaapi_vpp_ctx_init
void ff_vaapi_vpp_ctx_init(AVFilterContext *avctx)
Definition: vaapi_vpp.c:666
OFFSET
#define OFFSET(x)
Definition: vf_overlay_vaapi.c:371
FFFrameSyncIn::time_base
AVRational time_base
Time base for the incoming frames.
Definition: framesync.h:96
ff_framesync_configure
int ff_framesync_configure(FFFrameSync *fs)
Configure a frame sync structure.
Definition: framesync.c:119
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
OverlayVAAPIContext::alpha
float alpha
Definition: vf_overlay_vaapi.c:38
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
ff_make_format_list
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:380
ff_framesync_uninit
void ff_framesync_uninit(FFFrameSync *fs)
Free all memory currently allocated.
Definition: framesync.c:285
FLAGS
#define FLAGS
Definition: vf_overlay_vaapi.c:372
FF_FILTER_FLAG_HWFRAME_AWARE
#define FF_FILTER_FLAG_HWFRAME_AWARE
The filter is aware of hardware frames, and any hardware frame context should not be automatically pr...
Definition: internal.h:370
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:999
OverlayVAAPIContext::vpp_ctx
VAAPIVPPContext vpp_ctx
must be the first field
Definition: vf_overlay_vaapi.c:32
ff_framesync_get_frame
int ff_framesync_get_frame(FFFrameSync *fs, unsigned in, AVFrame **rframe, unsigned get)
Get the current frame in an input.
Definition: framesync.c:248
output
filter_frame For filters that do not use the this method is called when a frame is pushed to the filter s input It can be called at any time except in a reentrant way If the input frame is enough to produce output
Definition: filter_design.txt:225
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:111
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:325
pixdesc.h
AVFrame::pts
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:432
AVFrame::width
int width
Definition: frame.h:397
overlay_vaapi_options
static const AVOption overlay_vaapi_options[]
Definition: vf_overlay_vaapi.c:373
AVOption
AVOption.
Definition: opt.h:251
FILTER_QUERY_FUNC
#define FILTER_QUERY_FUNC(func)
Definition: internal.h:167
overlay_vaapi_outputs
static const AVFilterPad overlay_vaapi_outputs[]
Definition: vf_overlay_vaapi.c:403
AVVAAPIDeviceContext::display
VADisplay display
The VADisplay handle, to be filled by the user.
Definition: hwcontext_vaapi.h:72
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(overlay_vaapi)
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:175
FFFrameSync
Frame sync structure.
Definition: framesync.h:146
EXT_INFINITY
@ EXT_INFINITY
Extend the frame to infinity.
Definition: framesync.h:75
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:346
overlay_vaapi_build_filter_params
static int overlay_vaapi_build_filter_params(AVFilterContext *avctx)
Definition: vf_overlay_vaapi.c:69
formats.h
init
static int init
Definition: av_tx.c:47
ff_default_get_video_buffer
AVFrame * ff_default_get_video_buffer(AVFilterLink *link, int w, int h)
Definition: video.c:96
EXT_STOP
@ EXT_STOP
Completely stop all streams with this one.
Definition: framesync.h:65
AVFilterContext::priv
void * priv
private data for use by the filter
Definition: avfilter.h:423
overlay_vaapi_query_formats
static int overlay_vaapi_query_formats(AVFilterContext *ctx)
Definition: vf_overlay_vaapi.c:41
fail
#define fail()
Definition: checkasm.h:131
FFFrameSyncIn
Input stream structure.
Definition: framesync.h:81
overlay_vaapi_init
static av_cold int overlay_vaapi_init(AVFilterContext *avctx)
Definition: vf_overlay_vaapi.c:347
FFFrameSyncIn::sync
unsigned sync
Synchronization level: frames on input at the highest sync level will generate output frame events.
Definition: framesync.h:139
AVFilterPad
A filter pad used for either input or output.
Definition: internal.h:49
ff_vf_overlay_vaapi
const AVFilter ff_vf_overlay_vaapi
Definition: vf_overlay_vaapi.c:411
avassert.h
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
av_cold
#define av_cold
Definition: attributes.h:90
OverlayVAAPIContext::overlay_oh
int overlay_oh
Definition: vf_overlay_vaapi.c:37
overlay_vaapi_inputs
static const AVFilterPad overlay_vaapi_inputs[]
Definition: vf_overlay_vaapi.c:389
ff_formats_ref
int ff_formats_ref(AVFilterFormats *f, AVFilterFormats **ref)
Add *ref as a new reference to formats.
Definition: formats.c:596
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:296
VAAPIVPPContext::output_width
int output_width
Definition: vaapi_vpp.h:48
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:201
ctx
AVFormatContext * ctx
Definition: movenc.c:48
VAAPIVPPContext::output_format
enum AVPixelFormat output_format
Definition: vaapi_vpp.h:47
VAAPIVPPContext::hwctx
AVVAAPIDeviceContext * hwctx
Definition: vaapi_vpp.h:36
AVFormatContext::opaque
void * opaque
User data.
Definition: avformat.h:1719
FILTER_INPUTS
#define FILTER_INPUTS(array)
Definition: internal.h:190
overlay_vaapi_config_output
static int overlay_vaapi_config_output(AVFilterLink *outlink)
Definition: vf_overlay_vaapi.c:322
NULL
#define NULL
Definition: coverity.c:32
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:596
fs
#define fs(width, name, subs,...)
Definition: cbs_vp9.c:258
activate
filter_frame For filters that do not use the activate() callback
AVFilterContext::inputs
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:416
ff_vaapi_vpp_config_input
int ff_vaapi_vpp_config_input(AVFilterLink *inlink)
Definition: vaapi_vpp.c:70
OverlayVAAPIContext
Definition: vf_overlay_vaapi.c:31
vaapi_vpp.h
AVFilterContext::nb_inputs
unsigned nb_inputs
number of input pads
Definition: avfilter.h:417
OverlayVAAPIContext::overlay_oy
int overlay_oy
Definition: vf_overlay_vaapi.c:35
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:117
ff_framesync_init_dualinput
int ff_framesync_init_dualinput(FFFrameSync *fs, AVFilterContext *parent)
Initialize a frame sync structure for dualinput.
Definition: framesync.c:353
OVERLAY
@ OVERLAY
Definition: vf_waveform.c:58
overlay_vaapi_render_picture
static int overlay_vaapi_render_picture(AVFilterContext *avctx, VAProcPipelineParameterBuffer *params, VAProcPipelineParameterBuffer *subpic_params, AVFrame *output_frame)
Definition: vf_overlay_vaapi.c:101
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:412
overlay_vaapi_uninit
static av_cold void overlay_vaapi_uninit(AVFilterContext *avctx)
Definition: vf_overlay_vaapi.c:364
output_frame
static int output_frame(H264Context *h, AVFrame *dst, H264Picture *srcp)
Definition: h264dec.c:844
AV_PIX_FMT_VAAPI
@ AV_PIX_FMT_VAAPI
Hardware acceleration through VA-API, data[3] contains a VASurfaceID.
Definition: pixfmt.h:119
internal.h
AV_OPT_TYPE_FLOAT
@ AV_OPT_TYPE_FLOAT
Definition: opt.h:228
VAAPIVPPContext::output_height
int output_height
Definition: vaapi_vpp.h:49
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
VAAPIVPPContext::filter_buffers
VABufferID filter_buffers[VAProcFilterCount]
Definition: vaapi_vpp.h:51
AVFilterPad::name
const char * name
Pad name.
Definition: internal.h:55
AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS
@ AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS
The driver does not destroy parameter buffers when they are used by vaRenderPicture().
Definition: hwcontext_vaapi.h:47
AVFilter
Filter definition.
Definition: avfilter.h:171
ret
ret
Definition: filter_design.txt:187
VAAPIVPPContext
Definition: vaapi_vpp.h:33
MAIN
#define MAIN
Definition: vf_overlay.c:63
ff_framesync_init
int ff_framesync_init(FFFrameSync *fs, AVFilterContext *parent, unsigned nb_in)
Initialize a frame sync structure.
Definition: framesync.c:79
VAAPIVPPContext::va_context
VAContextID va_context
Definition: vaapi_vpp.h:41
AVFrame::height
int height
Definition: frame.h:397
FFFrameSyncIn::before
enum FFFrameSyncExtMode before
Extrapolation mode for timestamps before the first frame.
Definition: framesync.h:86
framesync.h
ff_vaapi_vpp_config_output
int ff_vaapi_vpp_config_output(AVFilterLink *outlink)
Definition: vaapi_vpp.c:95
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Definition: opt.h:225
avfilter.h
overlay_vaapi_activate
static int overlay_vaapi_activate(AVFilterContext *avctx)
Definition: vf_overlay_vaapi.c:357
AVFilterContext
An instance of a filter.
Definition: avfilter.h:408
VAAPIVPPContext::nb_filter_buffers
int nb_filter_buffers
Definition: vaapi_vpp.h:52
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
mem.h
alpha
static const int16_t alpha[]
Definition: ilbcdata.h:55
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: internal.h:191
OverlayVAAPIContext::overlay_ow
int overlay_ow
Definition: vf_overlay_vaapi.c:36
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
FFFrameSyncIn::after
enum FFFrameSyncExtMode after
Extrapolation mode for timestamps after the last frame.
Definition: framesync.h:91
overlay_vaapi_blend
static int overlay_vaapi_blend(FFFrameSync *fs)
Definition: vf_overlay_vaapi.c:199
uninit
static av_cold int uninit(AVCodecContext *avctx)
Definition: crystalhd.c:285
overlay_vaapi_init_framesync
static int overlay_vaapi_init_framesync(AVFilterContext *avctx)
Definition: vf_overlay_vaapi.c:300
ff_framesync_activate
int ff_framesync_activate(FFFrameSync *fs)
Examine the frames in the filter's input and try to produce output.
Definition: framesync.c:336
OverlayVAAPIContext::overlay_ox
int overlay_ox
Definition: vf_overlay_vaapi.c:34
VAAPI_VPP_BACKGROUND_BLACK
#define VAAPI_VPP_BACKGROUND_BLACK
Definition: vaapi_vpp.h:31
OverlayVAAPIContext::fs
FFFrameSync fs
Definition: vf_overlay_vaapi.c:33
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2582
ff_vaapi_vpp_init_params
int ff_vaapi_vpp_init_params(AVFilterContext *avctx, VAProcPipelineParameterBuffer *params, const AVFrame *input_frame, AVFrame *output_frame)
Definition: vaapi_vpp.c:515
AVFilterContext::outputs
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:420