FFmpeg
vf_amf_common.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "vf_amf_common.h"
20 
21 #include "libavutil/avassert.h"
22 #include "avfilter.h"
23 #include "avfilter_internal.h"
24 #include "formats.h"
25 #include "libavutil/mem.h"
26 #include "libavutil/imgutils.h"
27 
30 #include "AMF/components/ColorSpace.h"
31 #include "scale_eval.h"
32 
33 #if CONFIG_DXVA2
34 #include <d3d9.h>
35 #endif
36 
37 #if CONFIG_D3D11VA
38 #include <d3d11.h>
39 #endif
40 
42 {
43  AMFFilterContext *ctx = avctx->priv;
44 
45  if (!strcmp(ctx->format_str, "same")) {
46  ctx->format = AV_PIX_FMT_NONE;
47  } else {
48  ctx->format = av_get_pix_fmt(ctx->format_str);
49  if (ctx->format == AV_PIX_FMT_NONE) {
50  av_log(avctx, AV_LOG_ERROR, "Unrecognized pixel format: %s\n", ctx->format_str);
51  return AVERROR(EINVAL);
52  }
53  }
54 
55  return 0;
56 }
57 
59 {
60  AMFFilterContext *ctx = avctx->priv;
61 
62  if (ctx->component) {
63  ctx->component->pVtbl->Terminate(ctx->component);
64  ctx->component->pVtbl->Release(ctx->component);
65  ctx->component = NULL;
66  }
67 
68  av_buffer_unref(&ctx->amf_device_ref);
69  av_buffer_unref(&ctx->hwdevice_ref);
70  av_buffer_unref(&ctx->hwframes_in_ref);
71  av_buffer_unref(&ctx->hwframes_out_ref);
72 }
73 
75 {
76  AVFilterContext *avctx = inlink->dst;
77  AMFFilterContext *ctx = avctx->priv;
78  AVFilterLink *outlink = avctx->outputs[0];
79  AMF_RESULT res;
80  AMFSurface *surface_in;
81  AMFSurface *surface_out;
82  AMFData *data_out = NULL;
83  enum AVColorSpace out_colorspace;
84  enum AVColorRange out_color_range;
85 
86  AVFrame *out = NULL;
87  int ret = 0;
88 
89  if (!ctx->component)
90  return AVERROR(EINVAL);
91 
92  ret = amf_avframe_to_amfsurface(avctx, in, &surface_in);
93  if (ret < 0)
94  goto fail;
95 
96  res = ctx->component->pVtbl->SubmitInput(ctx->component, (AMFData*)surface_in);
97  surface_in->pVtbl->Release(surface_in); // release surface after use
98  AMF_GOTO_FAIL_IF_FALSE(avctx, res == AMF_OK, AVERROR_UNKNOWN, "SubmitInput() failed with error %d\n", res);
99  res = ctx->component->pVtbl->QueryOutput(ctx->component, &data_out);
100  AMF_GOTO_FAIL_IF_FALSE(avctx, res == AMF_OK, AVERROR_UNKNOWN, "QueryOutput() failed with error %d\n", res);
101 
102  if (data_out) {
103  AMFGuid guid = IID_AMFSurface();
104  data_out->pVtbl->QueryInterface(data_out, &guid, (void**)&surface_out); // query for buffer interface
105  data_out->pVtbl->Release(data_out);
106  }
107 
108  out = amf_amfsurface_to_avframe(avctx, surface_out);
109 
110  ret = av_frame_copy_props(out, in);
111  av_frame_unref(in);
112 
113  out_colorspace = AVCOL_SPC_UNSPECIFIED;
114 
115  if (ctx->color_profile != AMF_VIDEO_CONVERTER_COLOR_PROFILE_UNKNOWN) {
116  switch(ctx->color_profile) {
117  case AMF_VIDEO_CONVERTER_COLOR_PROFILE_601:
118  out_colorspace = AVCOL_SPC_SMPTE170M;
119  break;
120  case AMF_VIDEO_CONVERTER_COLOR_PROFILE_709:
121  out_colorspace = AVCOL_SPC_BT709;
122  break;
123  case AMF_VIDEO_CONVERTER_COLOR_PROFILE_2020:
124  out_colorspace = AVCOL_SPC_BT2020_NCL;
125  break;
126  case AMF_VIDEO_CONVERTER_COLOR_PROFILE_JPEG:
127  out_colorspace = AVCOL_SPC_RGB;
128  break;
129  default:
130  out_colorspace = AVCOL_SPC_UNSPECIFIED;
131  break;
132  }
133  out->colorspace = out_colorspace;
134  }
135 
136  out_color_range = AVCOL_RANGE_UNSPECIFIED;
137  if (ctx->color_range == AMF_COLOR_RANGE_FULL)
138  out_color_range = AVCOL_RANGE_JPEG;
139  else if (ctx->color_range == AMF_COLOR_RANGE_STUDIO)
140  out_color_range = AVCOL_RANGE_MPEG;
141 
142  if (ctx->color_range != AMF_COLOR_RANGE_UNDEFINED)
143  out->color_range = out_color_range;
144 
145  if (ctx->primaries != AMF_COLOR_PRIMARIES_UNDEFINED)
146  out->color_primaries = ctx->primaries;
147 
148  if (ctx->trc != AMF_COLOR_TRANSFER_CHARACTERISTIC_UNDEFINED)
149  out->color_trc = ctx->trc;
150 
151 
152  if (ret < 0)
153  goto fail;
154 
155  out->hw_frames_ctx = av_buffer_ref(ctx->hwframes_out_ref);
156  if (!out->hw_frames_ctx) {
157  ret = AVERROR(ENOMEM);
158  goto fail;
159  }
160 
161  av_frame_free(&in);
162  return ff_filter_frame(outlink, out);
163 fail:
164  av_frame_free(&in);
165  av_frame_free(&out);
166  return ret;
167 }
168 
169 
170 
172  const enum AVPixelFormat *input_pix_fmts,
173  const enum AVPixelFormat *output_pix_fmts)
174 {
175  int err;
176  AVFilterFormats *input_formats;
177  AVFilterFormats *output_formats;
178 
179  //in case if hw_device_ctx is set to DXVA2 we change order of pixel formats to set DXVA2 be choosen by default
180  //The order is ignored if hw_frames_ctx is not NULL on the config_output stage
181  if (avctx->hw_device_ctx) {
182  AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)avctx->hw_device_ctx->data;
183 
184  switch (device_ctx->type) {
185  #if CONFIG_D3D11VA
187  {
188  static const enum AVPixelFormat output_pix_fmts_d3d11[] = {
191  };
192  output_pix_fmts = output_pix_fmts_d3d11;
193  }
194  break;
195  #endif
196  #if CONFIG_DXVA2
198  {
199  static const enum AVPixelFormat output_pix_fmts_dxva2[] = {
202  };
203  output_pix_fmts = output_pix_fmts_dxva2;
204  }
205  break;
206  #endif
208  break;
209  default:
210  {
211  av_log(avctx, AV_LOG_ERROR, "Unsupported device : %s\n", av_hwdevice_get_type_name(device_ctx->type));
212  return AVERROR(EINVAL);
213  }
214  break;
215  }
216  }
217 
218  input_formats = ff_make_format_list(output_pix_fmts);
219  if (!input_formats) {
220  return AVERROR(ENOMEM);
221  }
222  output_formats = ff_make_format_list(output_pix_fmts);
223  if (!output_formats) {
224  return AVERROR(ENOMEM);
225  }
226 
227  if ((err = ff_formats_ref(input_formats, &avctx->inputs[0]->outcfg.formats)) < 0)
228  return err;
229 
230  if ((err = ff_formats_ref(output_formats, &avctx->outputs[0]->incfg.formats)) < 0)
231  return err;
232  return 0;
233 }
234 
236  AMFSurface* surface)
237 {
238  AMFPlane *plane;
239  uint8_t *dst_data[4];
240  int dst_linesize[4];
241  int planes;
242  int i;
243 
244  planes = (int)surface->pVtbl->GetPlanesCount(surface);
245  av_assert0(planes < FF_ARRAY_ELEMS(dst_data));
246 
247  for (i = 0; i < planes; i++) {
248  plane = surface->pVtbl->GetPlaneAt(surface, i);
249  dst_data[i] = plane->pVtbl->GetNative(plane);
250  dst_linesize[i] = plane->pVtbl->GetHPitch(plane);
251  }
252  av_image_copy(dst_data, dst_linesize,
253  (const uint8_t**)frame->data, frame->linesize, frame->format,
254  frame->width, frame->height);
255 
256  return 0;
257 }
258 
259 int amf_init_filter_config(AVFilterLink *outlink, enum AVPixelFormat *in_format)
260 {
261  int err;
262  AMF_RESULT res;
263  AVFilterContext *avctx = outlink->src;
264  AVFilterLink *inlink = avctx->inputs[0];
265  AMFFilterContext *ctx = avctx->priv;
266  AVHWFramesContext *hwframes_out;
267  AVHWDeviceContext *hwdev_ctx;
268  enum AVPixelFormat in_sw_format = inlink->format;
269  enum AVPixelFormat out_sw_format = ctx->format;
271  FilterLink *outl = ff_filter_link(outlink);
272  double w_adj = 1.0;
273 
274  if ((err = ff_scale_eval_dimensions(avctx,
275  ctx->w_expr, ctx->h_expr,
276  inlink, outlink,
277  &ctx->width, &ctx->height)) < 0)
278  return err;
279 
280  if (ctx->reset_sar && inlink->sample_aspect_ratio.num)
281  w_adj = (double) inlink->sample_aspect_ratio.num / inlink->sample_aspect_ratio.den;
282 
283  ff_scale_adjust_dimensions(inlink, &ctx->width, &ctx->height,
284  ctx->force_original_aspect_ratio, ctx->force_divisible_by, w_adj);
285 
286  av_buffer_unref(&ctx->amf_device_ref);
287  av_buffer_unref(&ctx->hwframes_in_ref);
288  av_buffer_unref(&ctx->hwframes_out_ref);
289  ctx->local_context = 0;
290  if (inl->hw_frames_ctx) {
292  if (av_av_to_amf_format(frames_ctx->sw_format) == AMF_SURFACE_UNKNOWN) {
293  av_log(avctx, AV_LOG_ERROR, "Format of input frames context (%s) is not supported by AMF.\n",
294  av_get_pix_fmt_name(frames_ctx->sw_format));
295  return AVERROR(EINVAL);
296  }
297 
298  err = av_hwdevice_ctx_create_derived(&ctx->amf_device_ref, AV_HWDEVICE_TYPE_AMF, frames_ctx->device_ref, 0);
299  if (err < 0)
300  return err;
301 
302  ctx->hwframes_in_ref = av_buffer_ref(inl->hw_frames_ctx);
303  if (!ctx->hwframes_in_ref)
304  return AVERROR(ENOMEM);
305 
306  in_sw_format = frames_ctx->sw_format;
307  } else if (avctx->hw_device_ctx) {
308  err = av_hwdevice_ctx_create_derived(&ctx->amf_device_ref, AV_HWDEVICE_TYPE_AMF, avctx->hw_device_ctx, 0);
309  if (err < 0)
310  return err;
311  ctx->hwdevice_ref = av_buffer_ref(avctx->hw_device_ctx);
312  if (!ctx->hwdevice_ref)
313  return AVERROR(ENOMEM);
314  } else {
315  res = av_hwdevice_ctx_create(&ctx->amf_device_ref, AV_HWDEVICE_TYPE_AMF, NULL, NULL, 0);
316  AMF_RETURN_IF_FALSE(avctx, res == 0, res, "Failed to create hardware device context (AMF) : %s\n", av_err2str(res));
317 
318  }
319  if(out_sw_format == AV_PIX_FMT_NONE){
320  if(outlink->format == AV_PIX_FMT_AMF_SURFACE)
321  out_sw_format = in_sw_format;
322  else
323  out_sw_format = outlink->format;
324  }
325  ctx->hwframes_out_ref = av_hwframe_ctx_alloc(ctx->amf_device_ref);
326  if (!ctx->hwframes_out_ref)
327  return AVERROR(ENOMEM);
328  hwframes_out = (AVHWFramesContext*)ctx->hwframes_out_ref->data;
329  hwdev_ctx = (AVHWDeviceContext*)ctx->amf_device_ref->data;
330  if (hwdev_ctx->type == AV_HWDEVICE_TYPE_AMF)
331  {
332  ctx->amf_device_ctx = hwdev_ctx->hwctx;
333  }
334  hwframes_out->format = AV_PIX_FMT_AMF_SURFACE;
335  hwframes_out->sw_format = out_sw_format;
336 
337  if (inlink->format == AV_PIX_FMT_AMF_SURFACE) {
338  *in_format = in_sw_format;
339  } else {
340  *in_format = inlink->format;
341  }
342  outlink->w = ctx->width;
343  outlink->h = ctx->height;
344 
345  if (ctx->reset_sar)
346  outlink->sample_aspect_ratio = (AVRational){1, 1};
347  else if (inlink->sample_aspect_ratio.num) {
348  outlink->sample_aspect_ratio = av_mul_q((AVRational){outlink->h * inlink->w, outlink->w * inlink->h}, inlink->sample_aspect_ratio);
349  } else
350  outlink->sample_aspect_ratio = inlink->sample_aspect_ratio;
351 
352  hwframes_out->width = outlink->w;
353  hwframes_out->height = outlink->h;
354 
355  err = av_hwframe_ctx_init(ctx->hwframes_out_ref);
356  if (err < 0)
357  return err;
358 
359  outl->hw_frames_ctx = av_buffer_ref(ctx->hwframes_out_ref);
360  if (!outl->hw_frames_ctx) {
361  return AVERROR(ENOMEM);
362  }
363  return 0;
364 }
365 
366 void amf_free_amfsurface(void *opaque, uint8_t *data)
367 {
368  AMFSurface *surface = (AMFSurface*)data;
369  surface->pVtbl->Release(surface);
370 }
371 
372 AVFrame *amf_amfsurface_to_avframe(AVFilterContext *avctx, AMFSurface* pSurface)
373 {
375  AMFFilterContext *ctx = avctx->priv;
376 
377  if (!frame)
378  return NULL;
379 
380  if (ctx->hwframes_out_ref) {
381  AVHWFramesContext *hwframes_out = (AVHWFramesContext *)ctx->hwframes_out_ref->data;
382  if (hwframes_out->format == AV_PIX_FMT_AMF_SURFACE) {
383  int ret = av_hwframe_get_buffer(ctx->hwframes_out_ref, frame, 0);
384  if (ret < 0) {
385  av_log(avctx, AV_LOG_ERROR, "Get hw frame failed.\n");
387  return NULL;
388  }
389  frame->data[0] = (uint8_t *)pSurface;
390  frame->buf[1] = av_buffer_create((uint8_t *)pSurface, sizeof(AMFSurface),
392  (void*)avctx,
394  } else { // FIXME: add processing of other hw formats
395  av_log(ctx, AV_LOG_ERROR, "Unknown pixel format\n");
396  return NULL;
397  }
398  } else {
399 
400  switch (pSurface->pVtbl->GetMemoryType(pSurface))
401  {
402  #if CONFIG_D3D11VA
403  case AMF_MEMORY_DX11:
404  {
405  AMFPlane *plane0 = pSurface->pVtbl->GetPlaneAt(pSurface, 0);
406  frame->data[0] = plane0->pVtbl->GetNative(plane0);
407  frame->data[1] = (uint8_t*)(intptr_t)0;
408 
409  frame->buf[0] = av_buffer_create(NULL,
410  0,
412  pSurface,
414  }
415  break;
416  #endif
417  #if CONFIG_DXVA2
418  case AMF_MEMORY_DX9:
419  {
420  AMFPlane *plane0 = pSurface->pVtbl->GetPlaneAt(pSurface, 0);
421  frame->data[3] = plane0->pVtbl->GetNative(plane0);
422 
423  frame->buf[0] = av_buffer_create(NULL,
424  0,
426  pSurface,
428  }
429  break;
430  #endif
431  default:
432  {
433  av_log(avctx, AV_LOG_ERROR, "Unsupported memory type : %d\n", pSurface->pVtbl->GetMemoryType(pSurface));
434  return NULL;
435  }
436  }
437  }
438 
439  return frame;
440 }
441 
442 int amf_avframe_to_amfsurface(AVFilterContext *avctx, const AVFrame *frame, AMFSurface** ppSurface)
443 {
444  AMFFilterContext *ctx = avctx->priv;
445  AMFSurface *surface;
446  AMF_RESULT res;
447  int hw_surface = 0;
448 
449  switch (frame->format) {
450 #if CONFIG_D3D11VA
451  case AV_PIX_FMT_D3D11:
452  {
453  static const GUID AMFTextureArrayIndexGUID = { 0x28115527, 0xe7c3, 0x4b66, { 0x99, 0xd3, 0x4f, 0x2a, 0xe6, 0xb4, 0x7f, 0xaf } };
454  ID3D11Texture2D *texture = (ID3D11Texture2D*)frame->data[0]; // actual texture
455  int index = (intptr_t)frame->data[1]; // index is a slice in texture array is - set to tell AMF which slice to use
456  texture->lpVtbl->SetPrivateData(texture, &AMFTextureArrayIndexGUID, sizeof(index), &index);
457 
458  res = ctx->amf_device_ctx->context->pVtbl->CreateSurfaceFromDX11Native(ctx->amf_device_ctx->context, texture, &surface, NULL); // wrap to AMF surface
459  AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR(ENOMEM), "CreateSurfaceFromDX11Native() failed with error %d\n", res);
460  hw_surface = 1;
461  }
462  break;
463 #endif
465  {
466  surface = (AMFSurface*)frame->data[0]; // actual surface
467  surface->pVtbl->Acquire(surface); // returned surface has to be to be ref++
468  hw_surface = 1;
469  }
470  break;
471 
472 #if CONFIG_DXVA2
474  {
475  IDirect3DSurface9 *texture = (IDirect3DSurface9 *)frame->data[3]; // actual texture
476 
477  res = ctx->amf_device_ctx->context->pVtbl->CreateSurfaceFromDX9Native(ctx->amf_device_ctx->context, texture, &surface, NULL); // wrap to AMF surface
478  AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR(ENOMEM), "CreateSurfaceFromDX9Native() failed with error %d\n", res);
479  hw_surface = 1;
480  }
481  break;
482 #endif
483  default:
484  {
485  AMF_SURFACE_FORMAT amf_fmt = av_av_to_amf_format(frame->format);
486  res = ctx->amf_device_ctx->context->pVtbl->AllocSurface(ctx->amf_device_ctx->context, AMF_MEMORY_HOST, amf_fmt, frame->width, frame->height, &surface);
487  AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR(ENOMEM), "AllocSurface() failed with error %d\n", res);
488  amf_copy_surface(avctx, frame, surface);
489  }
490  break;
491  }
492 
493  if (frame->crop_left || frame->crop_right || frame->crop_top || frame->crop_bottom) {
494  size_t crop_x = frame->crop_left;
495  size_t crop_y = frame->crop_top;
496  size_t crop_w = frame->width - (frame->crop_left + frame->crop_right);
497  size_t crop_h = frame->height - (frame->crop_top + frame->crop_bottom);
498  AVFilterLink *outlink = avctx->outputs[0];
499  if (crop_x || crop_y) {
500  if (crop_w == outlink->w && crop_h == outlink->h) {
501  AMFData *cropped_buffer = NULL;
502  res = surface->pVtbl->Duplicate(surface, surface->pVtbl->GetMemoryType(surface), &cropped_buffer);
503  AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR(ENOMEM), "Duplicate() failed with error %d\n", res);
504  surface->pVtbl->Release(surface);
505  surface = (AMFSurface*)cropped_buffer;
506  }
507  else
508  surface->pVtbl->SetCrop(surface, (amf_int32)crop_x, (amf_int32)crop_y, (amf_int32)crop_w, (amf_int32)crop_h);
509  }
510  else
511  surface->pVtbl->SetCrop(surface, (amf_int32)crop_x, (amf_int32)crop_y, (amf_int32)crop_w, (amf_int32)crop_h);
512  }
513  else if (hw_surface) {
514  // input HW surfaces can be vertically aligned by 16; tell AMF the real size
515  surface->pVtbl->SetCrop(surface, 0, 0, frame->width, frame->height);
516  }
517 
518  surface->pVtbl->SetPts(surface, frame->pts);
519  *ppSurface = surface;
520  return 0;
521 }
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:86
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
amf_avframe_to_amfsurface
int amf_avframe_to_amfsurface(AVFilterContext *avctx, const AVFrame *frame, AMFSurface **ppSurface)
Definition: vf_amf_common.c:442
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
ff_make_format_list
AVFilterFormats * ff_make_format_list(const int *fmts)
Create a list of supported formats.
Definition: formats.c:435
out
FILE * out
Definition: movenc.c:55
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1078
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
planes
static const struct @475 planes[]
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:198
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:163
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:326
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:410
AVCOL_RANGE_JPEG
@ AVCOL_RANGE_JPEG
Full range content.
Definition: pixfmt.h:733
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:252
data
const char data[16]
Definition: mxf.c:149
AVCOL_SPC_RGB
@ AVCOL_SPC_RGB
order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB), YZX and ST 428-1
Definition: pixfmt.h:657
ff_scale_eval_dimensions
int ff_scale_eval_dimensions(void *log_ctx, const char *w_expr, const char *h_expr, AVFilterLink *inlink, AVFilterLink *outlink, int *ret_w, int *ret_h)
Parse and evaluate string expressions for width and height.
Definition: scale_eval.c:57
AVFilterContext::hw_device_ctx
AVBufferRef * hw_device_ctx
For filters which will create hardware frames, sets the device the filter should create them in.
Definition: avfilter.h:339
amf_setup_input_output_formats
int amf_setup_input_output_formats(AVFilterContext *avctx, const enum AVPixelFormat *input_pix_fmts, const enum AVPixelFormat *output_pix_fmts)
Definition: vf_amf_common.c:171
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AMF_RETURN_IF_FALSE
#define AMF_RETURN_IF_FALSE(avctx, exp, ret_value,...)
Error handling helper.
Definition: amfenc.h:162
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:218
AV_PIX_FMT_AMF_SURFACE
@ AV_PIX_FMT_AMF_SURFACE
HW acceleration through AMF.
Definition: pixfmt.h:477
AVFilterFormats
A list of supported formats for one end of a filter link.
Definition: formats.h:64
formats.h
amf_free_amfsurface
void amf_free_amfsurface(void *opaque, uint8_t *data)
Definition: vf_amf_common.c:366
AVFilterContext::priv
void * priv
private data for use by the filter
Definition: avfilter.h:272
fail
#define fail()
Definition: checkasm.h:193
AV_HWDEVICE_TYPE_D3D11VA
@ AV_HWDEVICE_TYPE_D3D11VA
Definition: hwcontext.h:35
av_av_to_amf_format
enum AMF_SURFACE_FORMAT av_av_to_amf_format(enum AVPixelFormat fmt)
Definition: hwcontext_amf.c:116
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:61
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:151
avassert.h
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:209
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
AVHWFramesContext::height
int height
Definition: hwcontext.h:218
AV_PIX_FMT_DXVA2_VLD
@ AV_PIX_FMT_DXVA2_VLD
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer.
Definition: pixfmt.h:134
AV_BUFFER_FLAG_READONLY
#define AV_BUFFER_FLAG_READONLY
Always treat the buffer as read-only, even when it has only one reference.
Definition: buffer.h:114
AVCOL_SPC_SMPTE170M
@ AVCOL_SPC_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC / functionally identical to above
Definition: pixfmt.h:663
ff_formats_ref
int ff_formats_ref(AVFilterFormats *f, AVFilterFormats **ref)
Add *ref as a new reference to formats.
Definition: formats.c:678
vf_amf_common.h
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:40
AV_HWDEVICE_TYPE_AMF
@ AV_HWDEVICE_TYPE_AMF
Definition: hwcontext.h:41
AMFFilterContext
Definition: vf_amf_common.h:28
ctx
AVFormatContext * ctx
Definition: movenc.c:49
AMF_GOTO_FAIL_IF_FALSE
#define AMF_GOTO_FAIL_IF_FALSE(avctx, exp, ret_value,...)
Definition: hwcontext_amf_internal.h:34
hwcontext_amf.h
av_hwdevice_get_type_name
const char * av_hwdevice_get_type_name(enum AVHWDeviceType type)
Get the string name of an AVHWDeviceType.
Definition: hwcontext.c:116
if
if(ret)
Definition: filter_design.txt:179
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:211
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:726
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AV_HWDEVICE_TYPE_DXVA2
@ AV_HWDEVICE_TYPE_DXVA2
Definition: hwcontext.h:32
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
AVHWFramesContext::device_ref
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
Definition: hwcontext.h:127
AVFilterContext::inputs
AVFilterLink ** inputs
array of pointers to input links
Definition: avfilter.h:265
double
double
Definition: af_crystalizer.c:132
avfilter_internal.h
AVCOL_RANGE_UNSPECIFIED
@ AVCOL_RANGE_UNSPECIFIED
Definition: pixfmt.h:699
index
int index
Definition: gxfenc.c:90
ff_filter_link
static FilterLink * ff_filter_link(AVFilterLink *link)
Definition: filters.h:197
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
av_err2str
#define av_err2str(errnum)
Convenience macro, the return value should be used only directly in function arguments but never stan...
Definition: error.h:122
scale_eval.h
amf_copy_surface
int amf_copy_surface(AVFilterContext *avctx, const AVFrame *frame, AMFSurface *surface)
Definition: vf_amf_common.c:235
AV_PIX_FMT_D3D11
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:336
amf_filter_filter_frame
int amf_filter_filter_frame(AVFilterLink *inlink, AVFrame *in)
Definition: vf_amf_common.c:74
amf_filter_uninit
void amf_filter_uninit(AVFilterContext *avctx)
Definition: vf_amf_common.c:58
av_hwdevice_ctx_create_derived
int av_hwdevice_ctx_create_derived(AVBufferRef **dst_ref_ptr, enum AVHWDeviceType type, AVBufferRef *src_ref, int flags)
Create a new device of the specified type from an existing device.
Definition: hwcontext.c:707
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
AVCOL_SPC_BT2020_NCL
@ AVCOL_SPC_BT2020_NCL
ITU-R BT2020 non-constant luminance system.
Definition: pixfmt.h:667
AVColorSpace
AVColorSpace
YUV colorspace type.
Definition: pixfmt.h:656
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:623
AVCOL_SPC_UNSPECIFIED
@ AVCOL_SPC_UNSPECIFIED
Definition: pixfmt.h:659
amf_filter_init
int amf_filter_init(AVFilterContext *avctx)
Definition: vf_amf_common.c:41
AVCOL_RANGE_MPEG
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
Definition: pixfmt.h:716
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:116
ret
ret
Definition: filter_design.txt:187
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:73
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
av_hwdevice_ctx_create
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:604
av_get_pix_fmt
enum AVPixelFormat av_get_pix_fmt(const char *name)
Return the pixel format corresponding to name.
Definition: pixdesc.c:3180
hwcontext_amf_internal.h
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
avfilter.h
amf_init_filter_config
int amf_init_filter_config(AVFilterLink *outlink, enum AVPixelFormat *in_format)
Definition: vf_amf_common.c:259
av_mul_q
AVRational av_mul_q(AVRational b, AVRational c)
Multiply two rationals.
Definition: rational.c:80
AVFilterContext
An instance of a filter.
Definition: avfilter.h:257
mem.h
AVFilterFormatsConfig::formats
AVFilterFormats * formats
List of supported formats (pixel or sample).
Definition: avfilter.h:114
amf_amfsurface_to_avframe
AVFrame * amf_amfsurface_to_avframe(AVFilterContext *avctx, AMFSurface *pSurface)
Definition: vf_amf_common.c:372
imgutils.h
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
av_image_copy
void av_image_copy(uint8_t *const dst_data[4], const int dst_linesizes[4], const uint8_t *const src_data[4], const int src_linesizes[4], enum AVPixelFormat pix_fmt, int width, int height)
Copy image in src_data to dst_data.
Definition: imgutils.c:422
AVCOL_SPC_BT709
@ AVCOL_SPC_BT709
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / derived in SMPTE RP 177 Annex B
Definition: pixfmt.h:658
AVColorRange
AVColorRange
Visual content value range.
Definition: pixfmt.h:698
av_hwframe_get_buffer
int av_hwframe_get_buffer(AVBufferRef *hwframe_ref, AVFrame *frame, int flags)
Allocate a new frame attached to the given AVHWFramesContext.
Definition: hwcontext.c:495
ff_scale_adjust_dimensions
int ff_scale_adjust_dimensions(AVFilterLink *inlink, int *ret_w, int *ret_h, int force_original_aspect_ratio, int force_divisible_by, double w_adj)
Transform evaluated width and height obtained from ff_scale_eval_dimensions into actual target width ...
Definition: scale_eval.c:113
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:3168
AVFilterContext::outputs
AVFilterLink ** outputs
array of pointers to output links
Definition: avfilter.h:269