FFmpeg
hwcontext_qsv.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include <stdatomic.h>
20 #include <stdint.h>
21 #include <string.h>
22 
23 #include <mfxvideo.h>
24 
25 #include "config.h"
26 
27 #if HAVE_PTHREADS
28 #include <pthread.h>
29 #endif
30 
31 #define COBJMACROS
32 #if CONFIG_VAAPI
33 #include "hwcontext_vaapi.h"
34 #endif
35 #if CONFIG_D3D11VA
36 #include "hwcontext_d3d11va.h"
37 #endif
38 #if CONFIG_DXVA2
39 #include "hwcontext_dxva2.h"
40 #endif
41 
42 #include "buffer.h"
43 #include "common.h"
44 #include "hwcontext.h"
45 #include "hwcontext_internal.h"
46 #include "hwcontext_qsv.h"
47 #include "mem.h"
48 #include "pixfmt.h"
49 #include "pixdesc.h"
50 #include "time.h"
51 #include "imgutils.h"
52 #include "avassert.h"
53 
54 #define QSV_VERSION_ATLEAST(MAJOR, MINOR) \
55  (MFX_VERSION_MAJOR > (MAJOR) || \
56  MFX_VERSION_MAJOR == (MAJOR) && MFX_VERSION_MINOR >= (MINOR))
57 
58 #define MFX_IMPL_VIA_MASK(impl) (0x0f00 & (impl))
59 #define QSV_ONEVPL QSV_VERSION_ATLEAST(2, 0)
60 #define QSV_HAVE_OPAQUE !QSV_ONEVPL
61 
62 #if QSV_ONEVPL
63 #include <mfxdispatcher.h>
64 #else
65 #define MFXUnload(a) do { } while(0)
66 #endif
67 
68 typedef struct QSVDevicePriv {
71 
72 typedef struct QSVDeviceContext {
73  mfxHDL handle;
74  mfxHandleType handle_type;
75  mfxVersion ver;
76  mfxIMPL impl;
77 
81 
82 typedef struct QSVFramesContext {
83  mfxSession session_download;
85  mfxSession session_upload;
87 #if HAVE_PTHREADS
88  pthread_mutex_t session_lock;
89 #endif
90 
92  mfxFrameSurface1 *surfaces_internal;
93  mfxHDLPair *handle_pairs_internal;
95 
96  // used in the frame allocator for non-opaque surfaces
97  mfxMemId *mem_ids;
98 #if QSV_HAVE_OPAQUE
99  // used in the opaque alloc request for opaque surfaces
100  mfxFrameSurface1 **surface_ptrs;
101 
102  mfxExtOpaqueSurfaceAlloc opaque_alloc;
103  mfxExtBuffer *ext_buffers[1];
104 #endif
108 
109 static const struct {
111  uint32_t fourcc;
112  uint16_t mfx_shift;
114  { AV_PIX_FMT_NV12, MFX_FOURCC_NV12, 0 },
115  { AV_PIX_FMT_BGRA, MFX_FOURCC_RGB4, 0 },
116  { AV_PIX_FMT_P010, MFX_FOURCC_P010, 1 },
117  { AV_PIX_FMT_PAL8, MFX_FOURCC_P8, 0 },
118 #if CONFIG_VAAPI
120  MFX_FOURCC_YUY2, 0 },
122  MFX_FOURCC_UYVY, 0 },
123  { AV_PIX_FMT_Y210,
124  MFX_FOURCC_Y210, 1 },
125  // VUYX is used for VAAPI child device,
126  // the SDK only delares support for AYUV
127  { AV_PIX_FMT_VUYX,
128  MFX_FOURCC_AYUV, 0 },
129  // XV30 is used for VAAPI child device,
130  // the SDK only delares support for Y410
131  { AV_PIX_FMT_XV30,
132  MFX_FOURCC_Y410, 0 },
133 #if QSV_VERSION_ATLEAST(1, 31)
134  // P012 is used for VAAPI child device,
135  // the SDK only delares support for P016
136  { AV_PIX_FMT_P012,
137  MFX_FOURCC_P016, 1 },
138  // Y212 is used for VAAPI child device,
139  // the SDK only delares support for Y216
140  { AV_PIX_FMT_Y212,
141  MFX_FOURCC_Y216, 1 },
142  // XV36 is used for VAAPI child device,
143  // the SDK only delares support for Y416
144  { AV_PIX_FMT_XV36,
145  MFX_FOURCC_Y416, 1 },
146 #endif
147 #endif
148 };
149 
150 extern int ff_qsv_get_surface_base_handle(mfxFrameSurface1 *surf,
151  enum AVHWDeviceType base_dev_type,
152  void **base_handle);
153 
154 /**
155  * Caller needs to allocate enough space for base_handle pointer.
156  **/
157 int ff_qsv_get_surface_base_handle(mfxFrameSurface1 *surf,
158  enum AVHWDeviceType base_dev_type,
159  void **base_handle)
160 {
161  mfxHDLPair *handle_pair;
162  handle_pair = surf->Data.MemId;
163  switch (base_dev_type) {
164 #if CONFIG_VAAPI
166  base_handle[0] = handle_pair->first;
167  return 0;
168 #endif
169 #if CONFIG_D3D11VA
171  base_handle[0] = handle_pair->first;
172  base_handle[1] = handle_pair->second;
173  return 0;
174 #endif
175 #if CONFIG_DXVA2
177  base_handle[0] = handle_pair->first;
178  return 0;
179 #endif
180  }
181  return AVERROR(EINVAL);
182 }
183 
185 {
186  int i;
187  for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++) {
189  return supported_pixel_formats[i].fourcc;
190  }
191  return 0;
192 }
193 
195 {
196  for (int i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++) {
198  return supported_pixel_formats[i].mfx_shift;
199  }
200 
201  return 0;
202 }
203 
204 #if CONFIG_D3D11VA
205 static uint32_t qsv_get_d3d11va_bind_flags(int mem_type)
206 {
207  uint32_t bind_flags = 0;
208 
209  if ((mem_type & MFX_MEMTYPE_VIDEO_MEMORY_ENCODER_TARGET) && (mem_type & MFX_MEMTYPE_INTERNAL_FRAME))
210  bind_flags = D3D11_BIND_DECODER | D3D11_BIND_VIDEO_ENCODER;
211  else
212  bind_flags = D3D11_BIND_DECODER;
213 
214  if ((MFX_MEMTYPE_FROM_VPPOUT & mem_type) || (MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET & mem_type))
215  bind_flags = D3D11_BIND_RENDER_TARGET;
216 
217  return bind_flags;
218 }
219 #endif
220 
221 static int qsv_fill_border(AVFrame *dst, const AVFrame *src)
222 {
223  const AVPixFmtDescriptor *desc;
224  int i, planes_nb = 0;
225  if (dst->format != src->format)
226  return AVERROR(EINVAL);
227 
229 
230  for (i = 0; i < desc->nb_components; i++)
231  planes_nb = FFMAX(planes_nb, desc->comp[i].plane + 1);
232 
233  for (i = 0; i < planes_nb; i++) {
234  int sheight, dheight, y;
235  ptrdiff_t swidth = av_image_get_linesize(src->format,
236  src->width,
237  i);
238  ptrdiff_t dwidth = av_image_get_linesize(dst->format,
239  dst->width,
240  i);
241  const AVComponentDescriptor comp = desc->comp[i];
242  if (swidth < 0 || dwidth < 0) {
243  av_log(NULL, AV_LOG_ERROR, "av_image_get_linesize failed\n");
244  return AVERROR(EINVAL);
245  }
246  sheight = src->height;
247  dheight = dst->height;
248  if (i) {
249  sheight = AV_CEIL_RSHIFT(src->height, desc->log2_chroma_h);
250  dheight = AV_CEIL_RSHIFT(dst->height, desc->log2_chroma_h);
251  }
252  //fill right padding
253  for (y = 0; y < sheight; y++) {
254  void *line_ptr = dst->data[i] + y*dst->linesize[i] + swidth;
255  av_memcpy_backptr(line_ptr,
256  comp.depth > 8 ? 2 : 1,
257  dwidth - swidth);
258  }
259  //fill bottom padding
260  for (y = sheight; y < dheight; y++) {
261  memcpy(dst->data[i]+y*dst->linesize[i],
262  dst->data[i]+(sheight-1)*dst->linesize[i],
263  dwidth);
264  }
265  }
266  return 0;
267 }
268 
270 {
271  AVQSVDeviceContext *hwctx = ctx->hwctx;
272  QSVDeviceContext *s = ctx->internal->priv;
273  int hw_handle_supported = 0;
274  mfxHandleType handle_type;
275  enum AVHWDeviceType device_type;
276  enum AVPixelFormat pix_fmt;
277  mfxStatus err;
278 
279  err = MFXQueryIMPL(hwctx->session, &s->impl);
280  if (err == MFX_ERR_NONE)
281  err = MFXQueryVersion(hwctx->session, &s->ver);
282  if (err != MFX_ERR_NONE) {
283  av_log(ctx, AV_LOG_ERROR, "Error querying the session attributes\n");
284  return AVERROR_UNKNOWN;
285  }
286 
287  if (MFX_IMPL_VIA_VAAPI == MFX_IMPL_VIA_MASK(s->impl)) {
288 #if CONFIG_VAAPI
289  handle_type = MFX_HANDLE_VA_DISPLAY;
290  device_type = AV_HWDEVICE_TYPE_VAAPI;
292  hw_handle_supported = 1;
293 #endif
294  } else if (MFX_IMPL_VIA_D3D11 == MFX_IMPL_VIA_MASK(s->impl)) {
295 #if CONFIG_D3D11VA
296  handle_type = MFX_HANDLE_D3D11_DEVICE;
297  device_type = AV_HWDEVICE_TYPE_D3D11VA;
299  hw_handle_supported = 1;
300 #endif
301  } else if (MFX_IMPL_VIA_D3D9 == MFX_IMPL_VIA_MASK(s->impl)) {
302 #if CONFIG_DXVA2
303  handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
304  device_type = AV_HWDEVICE_TYPE_DXVA2;
306  hw_handle_supported = 1;
307 #endif
308  }
309 
310  if (hw_handle_supported) {
311  err = MFXVideoCORE_GetHandle(hwctx->session, handle_type, &s->handle);
312  if (err == MFX_ERR_NONE) {
313  s->handle_type = handle_type;
314  s->child_device_type = device_type;
315  s->child_pix_fmt = pix_fmt;
316  }
317  }
318  if (!s->handle) {
319  av_log(ctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
320  "from the session\n");
321  }
322  return 0;
323 }
324 
326 {
327  QSVFramesContext *s = ctx->internal->priv;
328 
329  if (s->session_download) {
330  MFXVideoVPP_Close(s->session_download);
331  MFXClose(s->session_download);
332  }
333  s->session_download = NULL;
334  s->session_download_init = 0;
335 
336  if (s->session_upload) {
337  MFXVideoVPP_Close(s->session_upload);
338  MFXClose(s->session_upload);
339  }
340  s->session_upload = NULL;
341  s->session_upload_init = 0;
342 
343 #if HAVE_PTHREADS
344  pthread_mutex_destroy(&s->session_lock);
345 #endif
346 
347  av_freep(&s->mem_ids);
348 #if QSV_HAVE_OPAQUE
349  av_freep(&s->surface_ptrs);
350 #endif
351  av_freep(&s->surfaces_internal);
352  av_freep(&s->handle_pairs_internal);
353  av_frame_unref(&s->realigned_upload_frame);
354  av_frame_unref(&s->realigned_download_frame);
355  av_buffer_unref(&s->child_frames_ref);
356 }
357 
358 static void qsv_pool_release_dummy(void *opaque, uint8_t *data)
359 {
360 }
361 
362 static AVBufferRef *qsv_pool_alloc(void *opaque, size_t size)
363 {
365  QSVFramesContext *s = ctx->internal->priv;
366  AVQSVFramesContext *hwctx = ctx->hwctx;
367 
368  if (s->nb_surfaces_used < hwctx->nb_surfaces) {
369  s->nb_surfaces_used++;
370  return av_buffer_create((uint8_t*)(s->surfaces_internal + s->nb_surfaces_used - 1),
371  sizeof(*hwctx->surfaces), qsv_pool_release_dummy, NULL, 0);
372  }
373 
374  return NULL;
375 }
376 
378 {
379  AVQSVFramesContext *hwctx = ctx->hwctx;
380  QSVFramesContext *s = ctx->internal->priv;
381  QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
382 
383  AVBufferRef *child_device_ref = NULL;
384  AVBufferRef *child_frames_ref = NULL;
385 
386  AVHWDeviceContext *child_device_ctx;
387  AVHWFramesContext *child_frames_ctx;
388 
389  int i, ret = 0;
390 
391  if (!device_priv->handle) {
393  "Cannot create a non-opaque internal surface pool without "
394  "a hardware handle\n");
395  return AVERROR(EINVAL);
396  }
397 
398  child_device_ref = av_hwdevice_ctx_alloc(device_priv->child_device_type);
399  if (!child_device_ref)
400  return AVERROR(ENOMEM);
401  child_device_ctx = (AVHWDeviceContext*)child_device_ref->data;
402 
403 #if CONFIG_VAAPI
404  if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
405  AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
406  child_device_hwctx->display = (VADisplay)device_priv->handle;
407  }
408 #endif
409 #if CONFIG_D3D11VA
410  if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
411  AVD3D11VADeviceContext *child_device_hwctx = child_device_ctx->hwctx;
412  ID3D11Device_AddRef((ID3D11Device*)device_priv->handle);
413  child_device_hwctx->device = (ID3D11Device*)device_priv->handle;
414  }
415 #endif
416 #if CONFIG_DXVA2
417  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
418  AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
419  child_device_hwctx->devmgr = (IDirect3DDeviceManager9*)device_priv->handle;
420  }
421 #endif
422 
423  ret = av_hwdevice_ctx_init(child_device_ref);
424  if (ret < 0) {
425  av_log(ctx, AV_LOG_ERROR, "Error initializing a child device context\n");
426  goto fail;
427  }
428 
429  child_frames_ref = av_hwframe_ctx_alloc(child_device_ref);
430  if (!child_frames_ref) {
431  ret = AVERROR(ENOMEM);
432  goto fail;
433  }
434  child_frames_ctx = (AVHWFramesContext*)child_frames_ref->data;
435 
436  child_frames_ctx->format = device_priv->child_pix_fmt;
437  child_frames_ctx->sw_format = ctx->sw_format;
438  child_frames_ctx->initial_pool_size = ctx->initial_pool_size;
439  child_frames_ctx->width = FFALIGN(ctx->width, 16);
440  child_frames_ctx->height = FFALIGN(ctx->height, 16);
441 
442 #if CONFIG_D3D11VA
443  if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
444  AVD3D11VAFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
445  if (hwctx->frame_type == 0)
446  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
447  if (hwctx->frame_type & MFX_MEMTYPE_SHARED_RESOURCE)
448  child_frames_hwctx->MiscFlags = D3D11_RESOURCE_MISC_SHARED;
449  child_frames_hwctx->BindFlags = qsv_get_d3d11va_bind_flags(hwctx->frame_type);
450  }
451 #endif
452 #if CONFIG_DXVA2
453  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
454  AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
455  if (hwctx->frame_type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET)
456  child_frames_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
457  else
458  child_frames_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
459  }
460 #endif
461 
462  ret = av_hwframe_ctx_init(child_frames_ref);
463  if (ret < 0) {
464  av_log(ctx, AV_LOG_ERROR, "Error initializing a child frames context\n");
465  goto fail;
466  }
467 
468 #if CONFIG_VAAPI
469  if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
470  AVVAAPIFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
471  for (i = 0; i < ctx->initial_pool_size; i++) {
472  s->handle_pairs_internal[i].first = child_frames_hwctx->surface_ids + i;
473  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
474  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
475  }
476  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
477  }
478 #endif
479 #if CONFIG_D3D11VA
480  if (child_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
481  AVD3D11VAFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
482  for (i = 0; i < ctx->initial_pool_size; i++) {
483  s->handle_pairs_internal[i].first = (mfxMemId)child_frames_hwctx->texture_infos[i].texture;
484  if(child_frames_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
485  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
486  } else {
487  s->handle_pairs_internal[i].second = (mfxMemId)child_frames_hwctx->texture_infos[i].index;
488  }
489  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
490  }
491  if (child_frames_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
492  hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
493  } else {
494  hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
495  }
496  }
497 #endif
498 #if CONFIG_DXVA2
499  if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
500  AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
501  for (i = 0; i < ctx->initial_pool_size; i++) {
502  s->handle_pairs_internal[i].first = (mfxMemId)child_frames_hwctx->surfaces[i];
503  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
504  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
505  }
506  if (child_frames_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
507  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
508  else
509  hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
510  }
511 #endif
512 
513  s->child_frames_ref = child_frames_ref;
514  child_frames_ref = NULL;
515 
516 fail:
517  av_buffer_unref(&child_device_ref);
518  av_buffer_unref(&child_frames_ref);
519  return ret;
520 }
521 
522 static int qsv_init_surface(AVHWFramesContext *ctx, mfxFrameSurface1 *surf)
523 {
524  const AVPixFmtDescriptor *desc;
525  uint32_t fourcc;
526 
527  desc = av_pix_fmt_desc_get(ctx->sw_format);
528  if (!desc)
529  return AVERROR(EINVAL);
530 
531  fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
532  if (!fourcc)
533  return AVERROR(EINVAL);
534 
535  surf->Info.BitDepthLuma = desc->comp[0].depth;
536  surf->Info.BitDepthChroma = desc->comp[0].depth;
537  surf->Info.Shift = qsv_shift_from_pix_fmt(ctx->sw_format);
538 
539  if (desc->log2_chroma_w && desc->log2_chroma_h)
540  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
541  else if (desc->log2_chroma_w)
542  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV422;
543  else
544  surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV444;
545 
546  surf->Info.FourCC = fourcc;
547  surf->Info.Width = FFALIGN(ctx->width, 16);
548  surf->Info.CropW = ctx->width;
549  surf->Info.Height = FFALIGN(ctx->height, 16);
550  surf->Info.CropH = ctx->height;
551  surf->Info.FrameRateExtN = 25;
552  surf->Info.FrameRateExtD = 1;
553  surf->Info.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
554 
555  return 0;
556 }
557 
559 {
560  QSVFramesContext *s = ctx->internal->priv;
561  AVQSVFramesContext *frames_hwctx = ctx->hwctx;
562 
563  int i, ret = 0;
564 
565  if (ctx->initial_pool_size <= 0) {
566  av_log(ctx, AV_LOG_ERROR, "QSV requires a fixed frame pool size\n");
567  return AVERROR(EINVAL);
568  }
569 
570  s->handle_pairs_internal = av_calloc(ctx->initial_pool_size,
571  sizeof(*s->handle_pairs_internal));
572  if (!s->handle_pairs_internal)
573  return AVERROR(ENOMEM);
574 
575  s->surfaces_internal = av_calloc(ctx->initial_pool_size,
576  sizeof(*s->surfaces_internal));
577  if (!s->surfaces_internal)
578  return AVERROR(ENOMEM);
579 
580  for (i = 0; i < ctx->initial_pool_size; i++) {
581  ret = qsv_init_surface(ctx, &s->surfaces_internal[i]);
582  if (ret < 0)
583  return ret;
584  }
585 
586 #if QSV_HAVE_OPAQUE
587  if (!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME)) {
589  if (ret < 0)
590  return ret;
591  }
592 #else
594  if (ret < 0)
595  return ret;
596 #endif
597 
598  ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(mfxFrameSurface1),
600  if (!ctx->internal->pool_internal)
601  return AVERROR(ENOMEM);
602 
603  frames_hwctx->surfaces = s->surfaces_internal;
604  frames_hwctx->nb_surfaces = ctx->initial_pool_size;
605 
606  return 0;
607 }
608 
609 static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
610  mfxFrameAllocResponse *resp)
611 {
612  AVHWFramesContext *ctx = pthis;
613  QSVFramesContext *s = ctx->internal->priv;
614  AVQSVFramesContext *hwctx = ctx->hwctx;
615  mfxFrameInfo *i = &req->Info;
616  mfxFrameInfo *i1 = &hwctx->surfaces[0].Info;
617 
618  if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
619  !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
620  !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
621  return MFX_ERR_UNSUPPORTED;
622  if (i->Width > i1->Width || i->Height > i1->Height ||
623  i->FourCC != i1->FourCC || i->ChromaFormat != i1->ChromaFormat) {
624  av_log(ctx, AV_LOG_ERROR, "Mismatching surface properties in an "
625  "allocation request: %dx%d %d %d vs %dx%d %d %d\n",
626  i->Width, i->Height, i->FourCC, i->ChromaFormat,
627  i1->Width, i1->Height, i1->FourCC, i1->ChromaFormat);
628  return MFX_ERR_UNSUPPORTED;
629  }
630 
631  resp->mids = s->mem_ids;
632  resp->NumFrameActual = hwctx->nb_surfaces;
633 
634  return MFX_ERR_NONE;
635 }
636 
637 static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
638 {
639  return MFX_ERR_NONE;
640 }
641 
642 static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
643 {
644  return MFX_ERR_UNSUPPORTED;
645 }
646 
647 static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
648 {
649  return MFX_ERR_UNSUPPORTED;
650 }
651 
652 static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
653 {
654  mfxHDLPair *pair_dst = (mfxHDLPair*)hdl;
655  mfxHDLPair *pair_src = (mfxHDLPair*)mid;
656 
657  pair_dst->first = pair_src->first;
658 
659  if (pair_src->second != (mfxMemId)MFX_INFINITE)
660  pair_dst->second = pair_src->second;
661  return MFX_ERR_NONE;
662 }
663 
664 #if QSV_ONEVPL
665 
666 static int qsv_d3d11_update_config(void *ctx, mfxHDL handle, mfxConfig cfg)
667 {
668 #if CONFIG_D3D11VA
669  mfxStatus sts;
670  IDXGIAdapter *pDXGIAdapter;
671  DXGI_ADAPTER_DESC adapterDesc;
672  IDXGIDevice *pDXGIDevice = NULL;
673  HRESULT hr;
674  ID3D11Device *device = handle;
675  mfxVariant impl_value;
676 
677  hr = ID3D11Device_QueryInterface(device, &IID_IDXGIDevice, (void**)&pDXGIDevice);
678  if (SUCCEEDED(hr)) {
679  hr = IDXGIDevice_GetAdapter(pDXGIDevice, &pDXGIAdapter);
680  if (FAILED(hr)) {
681  av_log(ctx, AV_LOG_ERROR, "Error IDXGIDevice_GetAdapter %d\n", hr);
682  goto fail;
683  }
684 
685  hr = IDXGIAdapter_GetDesc(pDXGIAdapter, &adapterDesc);
686  if (FAILED(hr)) {
687  av_log(ctx, AV_LOG_ERROR, "Error IDXGIAdapter_GetDesc %d\n", hr);
688  goto fail;
689  }
690  } else {
691  av_log(ctx, AV_LOG_ERROR, "Error ID3D11Device_QueryInterface %d\n", hr);
692  goto fail;
693  }
694 
695  impl_value.Type = MFX_VARIANT_TYPE_U16;
696  impl_value.Data.U16 = adapterDesc.DeviceId;
697  sts = MFXSetConfigFilterProperty(cfg,
698  (const mfxU8 *)"mfxExtendedDeviceId.DeviceID", impl_value);
699  if (sts != MFX_ERR_NONE) {
700  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration"
701  "DeviceID property: %d.\n", sts);
702  goto fail;
703  }
704 
705  impl_value.Type = MFX_VARIANT_TYPE_PTR;
706  impl_value.Data.Ptr = &adapterDesc.AdapterLuid;
707  sts = MFXSetConfigFilterProperty(cfg,
708  (const mfxU8 *)"mfxExtendedDeviceId.DeviceLUID", impl_value);
709  if (sts != MFX_ERR_NONE) {
710  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration"
711  "DeviceLUID property: %d.\n", sts);
712  goto fail;
713  }
714 
715  impl_value.Type = MFX_VARIANT_TYPE_U32;
716  impl_value.Data.U32 = 0x0001;
717  sts = MFXSetConfigFilterProperty(cfg,
718  (const mfxU8 *)"mfxExtendedDeviceId.LUIDDeviceNodeMask", impl_value);
719  if (sts != MFX_ERR_NONE) {
720  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration"
721  "LUIDDeviceNodeMask property: %d.\n", sts);
722  goto fail;
723  }
724 
725  return 0;
726 
727 fail:
728 #endif
729  return AVERROR_UNKNOWN;
730 }
731 
732 static int qsv_d3d9_update_config(void *ctx, mfxHDL handle, mfxConfig cfg)
733 {
734  int ret = AVERROR_UNKNOWN;
735 #if CONFIG_DXVA2
736  mfxStatus sts;
737  IDirect3DDeviceManager9* devmgr = handle;
738  IDirect3DDevice9Ex *device = NULL;
739  HANDLE device_handle = 0;
740  IDirect3D9Ex *d3d9ex = NULL;
741  LUID luid;
742  D3DDEVICE_CREATION_PARAMETERS params;
743  HRESULT hr;
744  mfxVariant impl_value;
745 
746  hr = IDirect3DDeviceManager9_OpenDeviceHandle(devmgr, &device_handle);
747  if (FAILED(hr)) {
748  av_log(ctx, AV_LOG_ERROR, "Error OpenDeviceHandle %d\n", hr);
749  goto fail;
750  }
751 
752  hr = IDirect3DDeviceManager9_LockDevice(devmgr, device_handle, &device, TRUE);
753  if (FAILED(hr)) {
754  av_log(ctx, AV_LOG_ERROR, "Error LockDevice %d\n", hr);
755  goto fail;
756  }
757 
758  hr = IDirect3DDevice9Ex_GetCreationParameters(device, &params);
759  if (FAILED(hr)) {
760  av_log(ctx, AV_LOG_ERROR, "Error IDirect3DDevice9_GetCreationParameters %d\n", hr);
761  goto unlock;
762  }
763 
764  hr = IDirect3DDevice9Ex_GetDirect3D(device, &d3d9ex);
765  if (FAILED(hr)) {
766  av_log(ctx, AV_LOG_ERROR, "Error IDirect3DDevice9Ex_GetAdapterLUID %d\n", hr);
767  goto unlock;
768  }
769 
770  hr = IDirect3D9Ex_GetAdapterLUID(d3d9ex, params.AdapterOrdinal, &luid);
771  if (FAILED(hr)) {
772  av_log(ctx, AV_LOG_ERROR, "Error IDirect3DDevice9Ex_GetAdapterLUID %d\n", hr);
773  goto unlock;
774  }
775 
776  impl_value.Type = MFX_VARIANT_TYPE_PTR;
777  impl_value.Data.Ptr = &luid;
778  sts = MFXSetConfigFilterProperty(cfg,
779  (const mfxU8 *)"mfxExtendedDeviceId.DeviceLUID", impl_value);
780  if (sts != MFX_ERR_NONE) {
781  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration"
782  "DeviceLUID property: %d.\n", sts);
783  goto unlock;
784  }
785 
786  ret = 0;
787 
788 unlock:
789  IDirect3DDeviceManager9_UnlockDevice(devmgr, device_handle, FALSE);
790 fail:
791 #endif
792  return ret;
793 }
794 
795 static int qsv_va_update_config(void *ctx, mfxHDL handle, mfxConfig cfg)
796 {
797 #if CONFIG_VAAPI
798 #if VA_CHECK_VERSION(1, 15, 0)
799  mfxStatus sts;
800  VADisplay dpy = handle;
801  VAStatus vas;
802  VADisplayAttribute attr = {
803  .type = VADisplayPCIID,
804  };
805  mfxVariant impl_value;
806 
807  vas = vaGetDisplayAttributes(dpy, &attr, 1);
808  if (vas == VA_STATUS_SUCCESS && attr.flags != VA_DISPLAY_ATTRIB_NOT_SUPPORTED) {
809  impl_value.Type = MFX_VARIANT_TYPE_U16;
810  impl_value.Data.U16 = (attr.value & 0xFFFF);
811  sts = MFXSetConfigFilterProperty(cfg,
812  (const mfxU8 *)"mfxExtendedDeviceId.DeviceID", impl_value);
813  if (sts != MFX_ERR_NONE) {
814  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration"
815  "DeviceID property: %d.\n", sts);
816  goto fail;
817  }
818  } else {
819  av_log(ctx, AV_LOG_ERROR, "libva: Failed to get device id from the driver. Please "
820  "consider to upgrade the driver to support VA-API 1.15.0\n");
821  goto fail;
822  }
823 
824  return 0;
825 
826 fail:
827 #else
828  av_log(ctx, AV_LOG_ERROR, "libva: This version of libva doesn't support retrieving "
829  "the device information from the driver. Please consider to upgrade libva to "
830  "support VA-API 1.15.0\n");
831 #endif
832 #endif
833  return AVERROR_UNKNOWN;
834 }
835 
836 static int qsv_new_mfx_loader(void *ctx,
837  mfxHDL handle,
838  mfxHandleType handle_type,
839  mfxIMPL implementation,
840  mfxVersion *pver,
841  void **ploader)
842 {
843  mfxStatus sts;
844  mfxLoader loader = NULL;
845  mfxConfig cfg;
846  mfxVariant impl_value;
847 
848  *ploader = NULL;
849  loader = MFXLoad();
850  if (!loader) {
851  av_log(ctx, AV_LOG_ERROR, "Error creating a MFX loader\n");
852  goto fail;
853  }
854 
855  /* Create configurations for implementation */
856  cfg = MFXCreateConfig(loader);
857  if (!cfg) {
858  av_log(ctx, AV_LOG_ERROR, "Error creating a MFX configuration\n");
859  goto fail;
860  }
861 
862  impl_value.Type = MFX_VARIANT_TYPE_U32;
863  impl_value.Data.U32 = (implementation == MFX_IMPL_SOFTWARE) ?
864  MFX_IMPL_TYPE_SOFTWARE : MFX_IMPL_TYPE_HARDWARE;
865  sts = MFXSetConfigFilterProperty(cfg,
866  (const mfxU8 *)"mfxImplDescription.Impl", impl_value);
867  if (sts != MFX_ERR_NONE) {
868  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration "
869  "property: %d.\n", sts);
870  goto fail;
871  }
872 
873  impl_value.Type = MFX_VARIANT_TYPE_U32;
874  impl_value.Data.U32 = pver->Version;
875  sts = MFXSetConfigFilterProperty(cfg,
876  (const mfxU8 *)"mfxImplDescription.ApiVersion.Version",
877  impl_value);
878  if (sts != MFX_ERR_NONE) {
879  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration "
880  "property: %d.\n", sts);
881  goto fail;
882  }
883 
884  impl_value.Type = MFX_VARIANT_TYPE_U16;
885  impl_value.Data.U16 = 0x8086; // Intel device only
886  sts = MFXSetConfigFilterProperty(cfg,
887  (const mfxU8 *)"mfxExtendedDeviceId.VendorID", impl_value);
888  if (sts != MFX_ERR_NONE) {
889  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration"
890  "VendorID property: %d.\n", sts);
891  goto fail;
892  }
893 
894  if (MFX_HANDLE_VA_DISPLAY == handle_type) {
895  if (handle && qsv_va_update_config(ctx, handle, cfg))
896  goto fail;
897 
898  impl_value.Data.U32 = MFX_ACCEL_MODE_VIA_VAAPI;
899  } else if (MFX_HANDLE_D3D9_DEVICE_MANAGER == handle_type) {
900  if (handle && qsv_d3d9_update_config(ctx, handle, cfg))
901  goto fail;
902 
903  impl_value.Data.U32 = MFX_ACCEL_MODE_VIA_D3D9;
904  } else {
905  if (handle && qsv_d3d11_update_config(ctx, handle, cfg))
906  goto fail;
907 
908  impl_value.Data.U32 = MFX_ACCEL_MODE_VIA_D3D11;
909  }
910 
911  impl_value.Type = MFX_VARIANT_TYPE_U32;
912  sts = MFXSetConfigFilterProperty(cfg,
913  (const mfxU8 *)"mfxImplDescription.AccelerationMode", impl_value);
914  if (sts != MFX_ERR_NONE) {
915  av_log(ctx, AV_LOG_ERROR, "Error adding a MFX configuration"
916  "AccelerationMode property: %d.\n", sts);
917  goto fail;
918  }
919 
920  *ploader = loader;
921 
922  return 0;
923 
924 fail:
925  if (loader)
926  MFXUnload(loader);
927 
928  return AVERROR_UNKNOWN;
929 }
930 
931 static int qsv_create_mfx_session_from_loader(void *ctx, mfxLoader loader, mfxSession *psession)
932 {
933  mfxStatus sts;
934  mfxSession session = NULL;
935  uint32_t impl_idx = 0;
936  mfxVersion ver;
937 
938  while (1) {
939  /* Enumerate all implementations */
940  mfxImplDescription *impl_desc;
941 
942  sts = MFXEnumImplementations(loader, impl_idx,
943  MFX_IMPLCAPS_IMPLDESCSTRUCTURE,
944  (mfxHDL *)&impl_desc);
945  /* Failed to find an available implementation */
946  if (sts == MFX_ERR_NOT_FOUND)
947  break;
948  else if (sts != MFX_ERR_NONE) {
949  impl_idx++;
950  continue;
951  }
952 
953  sts = MFXCreateSession(loader, impl_idx, &session);
954  MFXDispReleaseImplDescription(loader, impl_desc);
955  if (sts == MFX_ERR_NONE)
956  break;
957 
958  impl_idx++;
959  }
960 
961  if (sts != MFX_ERR_NONE) {
962  av_log(ctx, AV_LOG_ERROR, "Error creating a MFX session: %d.\n", sts);
963  goto fail;
964  }
965 
966  sts = MFXQueryVersion(session, &ver);
967  if (sts != MFX_ERR_NONE) {
968  av_log(ctx, AV_LOG_ERROR, "Error querying a MFX session: %d.\n", sts);
969  goto fail;
970  }
971 
972  av_log(ctx, AV_LOG_VERBOSE, "Initialize MFX session: implementation "
973  "version is %d.%d\n", ver.Major, ver.Minor);
974 
975  *psession = session;
976 
977  return 0;
978 
979 fail:
980  if (session)
981  MFXClose(session);
982 
983  return AVERROR_UNKNOWN;
984 }
985 
986 static int qsv_create_mfx_session(void *ctx,
987  mfxHDL handle,
988  mfxHandleType handle_type,
989  mfxIMPL implementation,
990  mfxVersion *pver,
991  mfxSession *psession,
992  void **ploader)
993 {
994  mfxLoader loader = NULL;
995 
997  "Use Intel(R) oneVPL to create MFX session, API version is "
998  "%d.%d, the required implementation version is %d.%d\n",
999  MFX_VERSION_MAJOR, MFX_VERSION_MINOR, pver->Major, pver->Minor);
1000 
1001  if (handle_type != MFX_HANDLE_VA_DISPLAY &&
1002  handle_type != MFX_HANDLE_D3D9_DEVICE_MANAGER &&
1003  handle_type != MFX_HANDLE_D3D11_DEVICE) {
1005  "Invalid MFX device handle type\n");
1006  return AVERROR(EXDEV);
1007  }
1008 
1009  *psession = NULL;
1010 
1011  if (!*ploader) {
1012  if (qsv_new_mfx_loader(ctx, handle, handle_type, implementation, pver, (void **)&loader))
1013  goto fail;
1014 
1015  av_assert0(loader);
1016  } else
1017  loader = *ploader; // Use the input mfxLoader to create mfx session
1018 
1019  if (qsv_create_mfx_session_from_loader(ctx, loader, psession))
1020  goto fail;
1021 
1022  if (!*ploader)
1023  *ploader = loader;
1024 
1025  return 0;
1026 
1027 fail:
1028  if (!*ploader && loader)
1029  MFXUnload(loader);
1030 
1031  return AVERROR_UNKNOWN;
1032 }
1033 
1034 #else
1035 
1036 static int qsv_create_mfx_session(void *ctx,
1037  mfxHDL handle,
1038  mfxHandleType handle_type,
1039  mfxIMPL implementation,
1040  mfxVersion *pver,
1041  mfxSession *psession,
1042  void **ploader)
1043 {
1044  mfxVersion ver;
1045  mfxStatus sts;
1046  mfxSession session = NULL;
1047 
1049  "Use Intel(R) Media SDK to create MFX session, API version is "
1050  "%d.%d, the required implementation version is %d.%d\n",
1051  MFX_VERSION_MAJOR, MFX_VERSION_MINOR, pver->Major, pver->Minor);
1052 
1053  *ploader = NULL;
1054  *psession = NULL;
1055  ver = *pver;
1056  sts = MFXInit(implementation, &ver, &session);
1057  if (sts != MFX_ERR_NONE) {
1058  av_log(ctx, AV_LOG_ERROR, "Error initializing an MFX session: "
1059  "%d.\n", sts);
1060  goto fail;
1061  }
1062 
1063  sts = MFXQueryVersion(session, &ver);
1064  if (sts != MFX_ERR_NONE) {
1065  av_log(ctx, AV_LOG_ERROR, "Error querying an MFX session: "
1066  "%d.\n", sts);
1067  goto fail;
1068  }
1069 
1070  av_log(ctx, AV_LOG_VERBOSE, "Initialize MFX session: implementation "
1071  "version is %d.%d\n", ver.Major, ver.Minor);
1072 
1073  MFXClose(session);
1074 
1075  sts = MFXInit(implementation, &ver, &session);
1076  if (sts != MFX_ERR_NONE) {
1077  av_log(ctx, AV_LOG_ERROR, "Error initializing an MFX session: "
1078  "%d.\n", sts);
1079  goto fail;
1080  }
1081 
1082  *psession = session;
1083 
1084  return 0;
1085 
1086 fail:
1087  if (session)
1088  MFXClose(session);
1089 
1090  return AVERROR_UNKNOWN;
1091 }
1092 
1093 #endif
1094 
1096  mfxSession *session, int upload)
1097 {
1098  AVQSVFramesContext *frames_hwctx = ctx->hwctx;
1099  QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
1100  int opaque = 0;
1101 
1102  mfxFrameAllocator frame_allocator = {
1103  .pthis = ctx,
1104  .Alloc = frame_alloc,
1105  .Lock = frame_lock,
1106  .Unlock = frame_unlock,
1107  .GetHDL = frame_get_hdl,
1108  .Free = frame_free,
1109  };
1110 
1111  mfxVideoParam par;
1112  mfxStatus err;
1113  int ret = AVERROR_UNKNOWN;
1114  AVQSVDeviceContext *hwctx = ctx->device_ctx->hwctx;
1115  /* hwctx->loader is non-NULL for oneVPL user and NULL for non-oneVPL user */
1116  void **loader = &hwctx->loader;
1117 
1118 #if QSV_HAVE_OPAQUE
1119  QSVFramesContext *s = ctx->internal->priv;
1120  opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
1121 #endif
1122 
1123  ret = qsv_create_mfx_session(ctx, device_priv->handle, device_priv->handle_type,
1124  device_priv->impl, &device_priv->ver, session, loader);
1125  if (ret)
1126  goto fail;
1127 
1128  if (device_priv->handle) {
1129  err = MFXVideoCORE_SetHandle(*session, device_priv->handle_type,
1130  device_priv->handle);
1131  if (err != MFX_ERR_NONE) {
1132  ret = AVERROR_UNKNOWN;
1133  goto fail;
1134  }
1135  }
1136 
1137  if (!opaque) {
1138  err = MFXVideoCORE_SetFrameAllocator(*session, &frame_allocator);
1139  if (err != MFX_ERR_NONE) {
1140  ret = AVERROR_UNKNOWN;
1141  goto fail;
1142  }
1143  }
1144 
1145  memset(&par, 0, sizeof(par));
1146 
1147  if (!opaque) {
1148  par.IOPattern = upload ? MFX_IOPATTERN_OUT_VIDEO_MEMORY :
1149  MFX_IOPATTERN_IN_VIDEO_MEMORY;
1150  }
1151 #if QSV_HAVE_OPAQUE
1152  else {
1153  par.ExtParam = s->ext_buffers;
1154  par.NumExtParam = FF_ARRAY_ELEMS(s->ext_buffers);
1155  par.IOPattern = upload ? MFX_IOPATTERN_OUT_OPAQUE_MEMORY :
1156  MFX_IOPATTERN_IN_OPAQUE_MEMORY;
1157  }
1158 #endif
1159 
1160  par.IOPattern |= upload ? MFX_IOPATTERN_IN_SYSTEM_MEMORY :
1161  MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
1162  par.AsyncDepth = 1;
1163 
1164  par.vpp.In = frames_hwctx->surfaces[0].Info;
1165 
1166  /* Apparently VPP requires the frame rate to be set to some value, otherwise
1167  * init will fail (probably for the framerate conversion filter). Since we
1168  * are only doing data upload/download here, we just invent an arbitrary
1169  * value */
1170  par.vpp.In.FrameRateExtN = 25;
1171  par.vpp.In.FrameRateExtD = 1;
1172  par.vpp.Out = par.vpp.In;
1173 
1174  err = MFXVideoVPP_Init(*session, &par);
1175  if (err != MFX_ERR_NONE) {
1176  av_log(ctx, AV_LOG_VERBOSE, "Error opening the internal VPP session."
1177  "Surface upload/download will not be possible\n");
1178 
1179  ret = AVERROR_UNKNOWN;
1180  goto fail;
1181  }
1182 
1183  return 0;
1184 
1185 fail:
1186  if (*session)
1187  MFXClose(*session);
1188 
1189  *session = NULL;
1190 
1191  return ret;
1192 }
1193 
1195 {
1196  QSVFramesContext *s = ctx->internal->priv;
1197  AVQSVFramesContext *frames_hwctx = ctx->hwctx;
1198 
1199  int opaque = 0;
1200 
1201  uint32_t fourcc;
1202  int i, ret;
1203 
1204 #if QSV_HAVE_OPAQUE
1205  opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
1206 #endif
1207 
1208  fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
1209  if (!fourcc) {
1210  av_log(ctx, AV_LOG_ERROR, "Unsupported pixel format\n");
1211  return AVERROR(ENOSYS);
1212  }
1213 
1214  if (!ctx->pool) {
1216  if (ret < 0) {
1217  av_log(ctx, AV_LOG_ERROR, "Error creating an internal frame pool\n");
1218  return ret;
1219  }
1220  }
1221 
1222  if (!opaque) {
1223  s->mem_ids = av_calloc(frames_hwctx->nb_surfaces, sizeof(*s->mem_ids));
1224  if (!s->mem_ids)
1225  return AVERROR(ENOMEM);
1226 
1227  for (i = 0; i < frames_hwctx->nb_surfaces; i++)
1228  s->mem_ids[i] = frames_hwctx->surfaces[i].Data.MemId;
1229  }
1230 #if QSV_HAVE_OPAQUE
1231  else {
1232  s->surface_ptrs = av_calloc(frames_hwctx->nb_surfaces,
1233  sizeof(*s->surface_ptrs));
1234  if (!s->surface_ptrs)
1235  return AVERROR(ENOMEM);
1236 
1237  for (i = 0; i < frames_hwctx->nb_surfaces; i++)
1238  s->surface_ptrs[i] = frames_hwctx->surfaces + i;
1239 
1240  s->opaque_alloc.In.Surfaces = s->surface_ptrs;
1241  s->opaque_alloc.In.NumSurface = frames_hwctx->nb_surfaces;
1242  s->opaque_alloc.In.Type = frames_hwctx->frame_type;
1243 
1244  s->opaque_alloc.Out = s->opaque_alloc.In;
1245 
1246  s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
1247  s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
1248 
1249  s->ext_buffers[0] = (mfxExtBuffer*)&s->opaque_alloc;
1250  }
1251 #endif
1252 
1253  s->session_download = NULL;
1254  s->session_upload = NULL;
1255 
1256  s->session_download_init = 0;
1257  s->session_upload_init = 0;
1258 
1259 #if HAVE_PTHREADS
1260  pthread_mutex_init(&s->session_lock, NULL);
1261 #endif
1262 
1263  return 0;
1264 }
1265 
1267 {
1268  frame->buf[0] = av_buffer_pool_get(ctx->pool);
1269  if (!frame->buf[0])
1270  return AVERROR(ENOMEM);
1271 
1272  frame->data[3] = frame->buf[0]->data;
1273  frame->format = AV_PIX_FMT_QSV;
1274  frame->width = ctx->width;
1275  frame->height = ctx->height;
1276 
1277  return 0;
1278 }
1279 
1281  enum AVHWFrameTransferDirection dir,
1282  enum AVPixelFormat **formats)
1283 {
1284  enum AVPixelFormat *fmts;
1285 
1286  fmts = av_malloc_array(2, sizeof(*fmts));
1287  if (!fmts)
1288  return AVERROR(ENOMEM);
1289 
1290  fmts[0] = ctx->sw_format;
1291  fmts[1] = AV_PIX_FMT_NONE;
1292 
1293  *formats = fmts;
1294 
1295  return 0;
1296 }
1297 
1299  AVHWFramesContext *src_ctx, int flags)
1300 {
1301  AVQSVFramesContext *src_hwctx = src_ctx->hwctx;
1302  int i;
1303 
1304  switch (dst_ctx->device_ctx->type) {
1305 #if CONFIG_VAAPI
1307  {
1308  AVVAAPIFramesContext *dst_hwctx = dst_ctx->hwctx;
1309  dst_hwctx->surface_ids = av_calloc(src_hwctx->nb_surfaces,
1310  sizeof(*dst_hwctx->surface_ids));
1311  if (!dst_hwctx->surface_ids)
1312  return AVERROR(ENOMEM);
1313  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1314  mfxHDLPair *pair = (mfxHDLPair*)src_hwctx->surfaces[i].Data.MemId;
1315  dst_hwctx->surface_ids[i] = *(VASurfaceID*)pair->first;
1316  }
1317  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
1318  }
1319  break;
1320 #endif
1321 #if CONFIG_D3D11VA
1323  {
1324  D3D11_TEXTURE2D_DESC texDesc;
1325  dst_ctx->initial_pool_size = src_ctx->initial_pool_size;
1326  AVD3D11VAFramesContext *dst_hwctx = dst_ctx->hwctx;
1327  dst_hwctx->texture_infos = av_calloc(src_hwctx->nb_surfaces,
1328  sizeof(*dst_hwctx->texture_infos));
1329  if (!dst_hwctx->texture_infos)
1330  return AVERROR(ENOMEM);
1331  if (src_hwctx->frame_type & MFX_MEMTYPE_SHARED_RESOURCE)
1332  dst_hwctx->MiscFlags = D3D11_RESOURCE_MISC_SHARED;
1333  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1334  mfxHDLPair *pair = (mfxHDLPair*)src_hwctx->surfaces[i].Data.MemId;
1335  dst_hwctx->texture_infos[i].texture = (ID3D11Texture2D*)pair->first;
1336  dst_hwctx->texture_infos[i].index = pair->second == (mfxMemId)MFX_INFINITE ? (intptr_t)0 : (intptr_t)pair->second;
1337  }
1338  ID3D11Texture2D_GetDesc(dst_hwctx->texture_infos[0].texture, &texDesc);
1339  dst_hwctx->BindFlags = texDesc.BindFlags;
1340  }
1341  break;
1342 #endif
1343 #if CONFIG_DXVA2
1345  {
1346  AVDXVA2FramesContext *dst_hwctx = dst_ctx->hwctx;
1347  dst_hwctx->surfaces = av_calloc(src_hwctx->nb_surfaces,
1348  sizeof(*dst_hwctx->surfaces));
1349  if (!dst_hwctx->surfaces)
1350  return AVERROR(ENOMEM);
1351  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1352  mfxHDLPair *pair = (mfxHDLPair*)src_hwctx->surfaces[i].Data.MemId;
1353  dst_hwctx->surfaces[i] = (IDirect3DSurface9*)pair->first;
1354  }
1355  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
1356  if (src_hwctx->frame_type == MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET)
1357  dst_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
1358  else
1359  dst_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
1360  }
1361  break;
1362 #endif
1363  default:
1364  return AVERROR(ENOSYS);
1365  }
1366 
1367  return 0;
1368 }
1369 
1371  AVFrame *dst, const AVFrame *src, int flags)
1372 {
1373  QSVFramesContext *s = ctx->internal->priv;
1374  mfxFrameSurface1 *surf = (mfxFrameSurface1*)src->data[3];
1375  AVHWFramesContext *child_frames_ctx;
1376  const AVPixFmtDescriptor *desc;
1377  uint8_t *child_data;
1378  AVFrame *dummy;
1379  int ret = 0;
1380 
1381  if (!s->child_frames_ref)
1382  return AVERROR(ENOSYS);
1383  child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
1384 
1385  switch (child_frames_ctx->device_ctx->type) {
1386 #if CONFIG_VAAPI
1388  {
1389  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
1390  /* pair->first is *VASurfaceID while data[3] in vaapi frame is VASurfaceID, so
1391  * we need this casting for vaapi.
1392  * Add intptr_t to force cast from VASurfaceID(uint) type to pointer(long) type
1393  * to avoid compile warning */
1394  child_data = (uint8_t*)(intptr_t)*(VASurfaceID*)pair->first;
1395  break;
1396  }
1397 #endif
1398 #if CONFIG_D3D11VA
1400  {
1401  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
1402  child_data = pair->first;
1403  break;
1404  }
1405 #endif
1406 #if CONFIG_DXVA2
1408  {
1409  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
1410  child_data = pair->first;
1411  break;
1412  }
1413 #endif
1414  default:
1415  return AVERROR(ENOSYS);
1416  }
1417 
1418  if (dst->format == child_frames_ctx->format) {
1419  ret = ff_hwframe_map_create(s->child_frames_ref,
1420  dst, src, NULL, NULL);
1421  if (ret < 0)
1422  return ret;
1423 
1424  dst->width = src->width;
1425  dst->height = src->height;
1426 
1427  if (child_frames_ctx->device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
1428  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
1429  dst->data[0] = pair->first;
1430  dst->data[1] = pair->second == (mfxMemId)MFX_INFINITE ? (uint8_t *)0 : pair->second;
1431  } else {
1432  dst->data[3] = child_data;
1433  }
1434 
1435  return 0;
1436  }
1437 
1439  if (desc && desc->flags & AV_PIX_FMT_FLAG_HWACCEL) {
1440  // This only supports mapping to software.
1441  return AVERROR(ENOSYS);
1442  }
1443 
1444  dummy = av_frame_alloc();
1445  if (!dummy)
1446  return AVERROR(ENOMEM);
1447 
1448  dummy->buf[0] = av_buffer_ref(src->buf[0]);
1449  dummy->hw_frames_ctx = av_buffer_ref(s->child_frames_ref);
1450  if (!dummy->buf[0] || !dummy->hw_frames_ctx)
1451  goto fail;
1452 
1453  dummy->format = child_frames_ctx->format;
1454  dummy->width = src->width;
1455  dummy->height = src->height;
1456 
1457  if (child_frames_ctx->device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
1458  mfxHDLPair *pair = (mfxHDLPair*)surf->Data.MemId;
1459  dummy->data[0] = pair->first;
1460  dummy->data[1] = pair->second == (mfxMemId)MFX_INFINITE ? (uint8_t *)0 : pair->second;
1461  } else {
1462  dummy->data[3] = child_data;
1463  }
1464 
1465  ret = av_hwframe_map(dst, dummy, flags);
1466 
1467 fail:
1468  av_frame_free(&dummy);
1469 
1470  return ret;
1471 }
1472 
1474  const AVFrame *src)
1475 {
1476  QSVFramesContext *s = ctx->internal->priv;
1477  AVHWFramesContext *child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
1478  int download = !!src->hw_frames_ctx;
1479  mfxFrameSurface1 *surf = (mfxFrameSurface1*)(download ? src->data[3] : dst->data[3]);
1480 
1481  AVFrame *dummy;
1482  int ret;
1483 
1484  dummy = av_frame_alloc();
1485  if (!dummy)
1486  return AVERROR(ENOMEM);
1487 
1488  dummy->format = child_frames_ctx->format;
1489  dummy->width = src->width;
1490  dummy->height = src->height;
1491  dummy->buf[0] = download ? src->buf[0] : dst->buf[0];
1492  dummy->data[3] = surf->Data.MemId;
1493  dummy->hw_frames_ctx = s->child_frames_ref;
1494 
1495  ret = download ? av_hwframe_transfer_data(dst, dummy, 0) :
1497 
1498  dummy->buf[0] = NULL;
1499  dummy->data[3] = NULL;
1500  dummy->hw_frames_ctx = NULL;
1501 
1502  av_frame_free(&dummy);
1503 
1504  return ret;
1505 }
1506 
1507 static int map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
1508 {
1509  switch (frame->format) {
1510  case AV_PIX_FMT_NV12:
1511  case AV_PIX_FMT_P010:
1512  case AV_PIX_FMT_P012:
1513  surface->Data.Y = frame->data[0];
1514  surface->Data.UV = frame->data[1];
1515  break;
1516 
1517  case AV_PIX_FMT_YUV420P:
1518  surface->Data.Y = frame->data[0];
1519  surface->Data.U = frame->data[1];
1520  surface->Data.V = frame->data[2];
1521  break;
1522 
1523  case AV_PIX_FMT_BGRA:
1524  surface->Data.B = frame->data[0];
1525  surface->Data.G = frame->data[0] + 1;
1526  surface->Data.R = frame->data[0] + 2;
1527  surface->Data.A = frame->data[0] + 3;
1528  break;
1529 #if CONFIG_VAAPI
1530  case AV_PIX_FMT_YUYV422:
1531  surface->Data.Y = frame->data[0];
1532  surface->Data.U = frame->data[0] + 1;
1533  surface->Data.V = frame->data[0] + 3;
1534  break;
1535 
1536  case AV_PIX_FMT_Y210:
1537  case AV_PIX_FMT_Y212:
1538  surface->Data.Y16 = (mfxU16 *)frame->data[0];
1539  surface->Data.U16 = (mfxU16 *)frame->data[0] + 1;
1540  surface->Data.V16 = (mfxU16 *)frame->data[0] + 3;
1541  break;
1542  case AV_PIX_FMT_VUYX:
1543  surface->Data.V = frame->data[0];
1544  surface->Data.U = frame->data[0] + 1;
1545  surface->Data.Y = frame->data[0] + 2;
1546  // Only set Data.A to a valid address, the SDK doesn't
1547  // use the value from the frame.
1548  surface->Data.A = frame->data[0] + 3;
1549  break;
1550  case AV_PIX_FMT_XV30:
1551  surface->Data.U = frame->data[0];
1552  break;
1553  case AV_PIX_FMT_XV36:
1554  surface->Data.U = frame->data[0];
1555  surface->Data.Y = frame->data[0] + 2;
1556  surface->Data.V = frame->data[0] + 4;
1557  // Only set Data.A to a valid address, the SDK doesn't
1558  // use the value from the frame.
1559  surface->Data.A = frame->data[0] + 6;
1560  break;
1561  case AV_PIX_FMT_UYVY422:
1562  surface->Data.Y = frame->data[0] + 1;
1563  surface->Data.U = frame->data[0];
1564  surface->Data.V = frame->data[0] + 2;
1565  break;
1566 #endif
1567  default:
1568  return MFX_ERR_UNSUPPORTED;
1569  }
1570  surface->Data.Pitch = frame->linesize[0];
1571  surface->Data.TimeStamp = frame->pts;
1572 
1573  return 0;
1574 }
1575 
1577 {
1578  QSVFramesContext *s = ctx->internal->priv;
1579  atomic_int *inited = upload ? &s->session_upload_init : &s->session_download_init;
1580  mfxSession *session = upload ? &s->session_upload : &s->session_download;
1581  int ret = 0;
1582 
1583  if (atomic_load(inited))
1584  return 0;
1585 
1586 #if HAVE_PTHREADS
1587  pthread_mutex_lock(&s->session_lock);
1588 #endif
1589 
1590  if (!atomic_load(inited)) {
1591  ret = qsv_init_internal_session(ctx, session, upload);
1592  atomic_store(inited, 1);
1593  }
1594 
1595 #if HAVE_PTHREADS
1596  pthread_mutex_unlock(&s->session_lock);
1597 #endif
1598 
1599  return ret;
1600 }
1601 
1603  const AVFrame *src)
1604 {
1605  QSVFramesContext *s = ctx->internal->priv;
1606  mfxFrameSurface1 out = {{ 0 }};
1607  mfxFrameSurface1 *in = (mfxFrameSurface1*)src->data[3];
1608 
1609  mfxSyncPoint sync = NULL;
1610  mfxStatus err;
1611  int ret = 0;
1612  /* download to temp frame if the output is not padded as libmfx requires */
1613  AVFrame *tmp_frame = &s->realigned_download_frame;
1614  AVFrame *dst_frame;
1615  int realigned = 0;
1616 
1618  if (ret < 0)
1619  return ret;
1620 
1621  /* According to MSDK spec for mfxframeinfo, "Width must be a multiple of 16.
1622  * Height must be a multiple of 16 for progressive frame sequence and a
1623  * multiple of 32 otherwise.", so allign all frames to 16 before downloading. */
1624  if (dst->height & 15 || dst->linesize[0] & 15) {
1625  realigned = 1;
1626  if (tmp_frame->format != dst->format ||
1627  tmp_frame->width != FFALIGN(dst->linesize[0], 16) ||
1628  tmp_frame->height != FFALIGN(dst->height, 16)) {
1629  av_frame_unref(tmp_frame);
1630 
1631  tmp_frame->format = dst->format;
1632  tmp_frame->width = FFALIGN(dst->linesize[0], 16);
1633  tmp_frame->height = FFALIGN(dst->height, 16);
1634  ret = av_frame_get_buffer(tmp_frame, 0);
1635  if (ret < 0)
1636  return ret;
1637  }
1638  }
1639 
1640  dst_frame = realigned ? tmp_frame : dst;
1641 
1642  if (!s->session_download) {
1643  if (s->child_frames_ref)
1644  return qsv_transfer_data_child(ctx, dst_frame, src);
1645 
1646  av_log(ctx, AV_LOG_ERROR, "Surface download not possible\n");
1647  return AVERROR(ENOSYS);
1648  }
1649 
1650  out.Info = in->Info;
1651  map_frame_to_surface(dst_frame, &out);
1652 
1653  do {
1654  err = MFXVideoVPP_RunFrameVPPAsync(s->session_download, in, &out, NULL, &sync);
1655  if (err == MFX_WRN_DEVICE_BUSY)
1656  av_usleep(1);
1657  } while (err == MFX_WRN_DEVICE_BUSY);
1658 
1659  if (err < 0 || !sync) {
1660  av_log(ctx, AV_LOG_ERROR, "Error downloading the surface\n");
1661  return AVERROR_UNKNOWN;
1662  }
1663 
1664  do {
1665  err = MFXVideoCORE_SyncOperation(s->session_download, sync, 1000);
1666  } while (err == MFX_WRN_IN_EXECUTION);
1667  if (err < 0) {
1668  av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation: %d\n", err);
1669  return AVERROR_UNKNOWN;
1670  }
1671 
1672  if (realigned) {
1673  tmp_frame->width = dst->width;
1674  tmp_frame->height = dst->height;
1675  ret = av_frame_copy(dst, tmp_frame);
1676  tmp_frame->width = FFALIGN(dst->linesize[0], 16);
1677  tmp_frame->height = FFALIGN(dst->height, 16);
1678  if (ret < 0)
1679  return ret;
1680  }
1681 
1682  return 0;
1683 }
1684 
1686  const AVFrame *src)
1687 {
1688  QSVFramesContext *s = ctx->internal->priv;
1689  mfxFrameSurface1 in = {{ 0 }};
1690  mfxFrameSurface1 *out = (mfxFrameSurface1*)dst->data[3];
1691  mfxFrameInfo tmp_info;
1692 
1693  mfxSyncPoint sync = NULL;
1694  mfxStatus err;
1695  int ret = 0;
1696  /* make a copy if the input is not padded as libmfx requires */
1697  AVFrame *tmp_frame = &s->realigned_upload_frame;
1698  const AVFrame *src_frame;
1699  int realigned = 0;
1700 
1702  if (ret < 0)
1703  return ret;
1704 
1705  /* According to MSDK spec for mfxframeinfo, "Width must be a multiple of 16.
1706  * Height must be a multiple of 16 for progressive frame sequence and a
1707  * multiple of 32 otherwise.", so allign all frames to 16 before uploading. */
1708  if (src->height & 15 || src->linesize[0] & 15) {
1709  realigned = 1;
1710  if (tmp_frame->format != src->format ||
1711  tmp_frame->width != FFALIGN(src->width, 16) ||
1712  tmp_frame->height != FFALIGN(src->height, 16)) {
1713  av_frame_unref(tmp_frame);
1714 
1715  tmp_frame->format = src->format;
1716  tmp_frame->width = FFALIGN(src->width, 16);
1717  tmp_frame->height = FFALIGN(src->height, 16);
1718  ret = av_frame_get_buffer(tmp_frame, 0);
1719  if (ret < 0)
1720  return ret;
1721  }
1722  ret = av_frame_copy(tmp_frame, src);
1723  if (ret < 0) {
1724  av_frame_unref(tmp_frame);
1725  return ret;
1726  }
1727  ret = qsv_fill_border(tmp_frame, src);
1728  if (ret < 0) {
1729  av_frame_unref(tmp_frame);
1730  return ret;
1731  }
1732 
1733  tmp_info = out->Info;
1734  out->Info.CropW = FFMIN(out->Info.Width, tmp_frame->width);
1735  out->Info.CropH = FFMIN(out->Info.Height, tmp_frame->height);
1736  }
1737 
1738  src_frame = realigned ? tmp_frame : src;
1739 
1740  if (!s->session_upload) {
1741  if (s->child_frames_ref)
1742  return qsv_transfer_data_child(ctx, dst, src_frame);
1743 
1744  av_log(ctx, AV_LOG_ERROR, "Surface upload not possible\n");
1745  return AVERROR(ENOSYS);
1746  }
1747 
1748  in.Info = out->Info;
1749  map_frame_to_surface(src_frame, &in);
1750 
1751  do {
1752  err = MFXVideoVPP_RunFrameVPPAsync(s->session_upload, &in, out, NULL, &sync);
1753  if (err == MFX_WRN_DEVICE_BUSY)
1754  av_usleep(1);
1755  } while (err == MFX_WRN_DEVICE_BUSY);
1756 
1757  if (err < 0 || !sync) {
1758  av_log(ctx, AV_LOG_ERROR, "Error uploading the surface\n");
1759  return AVERROR_UNKNOWN;
1760  }
1761 
1762  do {
1763  err = MFXVideoCORE_SyncOperation(s->session_upload, sync, 1000);
1764  } while (err == MFX_WRN_IN_EXECUTION);
1765  if (err < 0) {
1766  av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation\n");
1767  return AVERROR_UNKNOWN;
1768  }
1769 
1770  if (realigned) {
1771  out->Info.CropW = tmp_info.CropW;
1772  out->Info.CropH = tmp_info.CropH;
1773  }
1774 
1775  return 0;
1776 }
1777 
1779  AVHWFramesContext *src_ctx, int flags)
1780 {
1781  QSVFramesContext *s = dst_ctx->internal->priv;
1782  AVQSVFramesContext *dst_hwctx = dst_ctx->hwctx;
1783  int i;
1784 
1785  if (src_ctx->initial_pool_size == 0) {
1786  av_log(dst_ctx, AV_LOG_ERROR, "Only fixed-size pools can be "
1787  "mapped to QSV frames.\n");
1788  return AVERROR(EINVAL);
1789  }
1790 
1791  switch (src_ctx->device_ctx->type) {
1792 #if CONFIG_VAAPI
1794  {
1795  AVVAAPIFramesContext *src_hwctx = src_ctx->hwctx;
1796  s->handle_pairs_internal = av_calloc(src_ctx->initial_pool_size,
1797  sizeof(*s->handle_pairs_internal));
1798  if (!s->handle_pairs_internal)
1799  return AVERROR(ENOMEM);
1800  s->surfaces_internal = av_calloc(src_hwctx->nb_surfaces,
1801  sizeof(*s->surfaces_internal));
1802  if (!s->surfaces_internal)
1803  return AVERROR(ENOMEM);
1804  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1805  qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
1806  s->handle_pairs_internal[i].first = src_hwctx->surface_ids + i;
1807  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
1808  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
1809  }
1810  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
1811  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
1812  }
1813  break;
1814 #endif
1815 #if CONFIG_D3D11VA
1817  {
1818  AVD3D11VAFramesContext *src_hwctx = src_ctx->hwctx;
1819  s->handle_pairs_internal = av_calloc(src_ctx->initial_pool_size,
1820  sizeof(*s->handle_pairs_internal));
1821  if (!s->handle_pairs_internal)
1822  return AVERROR(ENOMEM);
1823  s->surfaces_internal = av_calloc(src_ctx->initial_pool_size,
1824  sizeof(*s->surfaces_internal));
1825  if (!s->surfaces_internal)
1826  return AVERROR(ENOMEM);
1827  for (i = 0; i < src_ctx->initial_pool_size; i++) {
1828  qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
1829  s->handle_pairs_internal[i].first = (mfxMemId)src_hwctx->texture_infos[i].texture;
1830  if (src_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
1831  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
1832  } else {
1833  s->handle_pairs_internal[i].second = (mfxMemId)src_hwctx->texture_infos[i].index;
1834  }
1835  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
1836  }
1837  dst_hwctx->nb_surfaces = src_ctx->initial_pool_size;
1838  if (src_hwctx->BindFlags & D3D11_BIND_RENDER_TARGET) {
1839  dst_hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
1840  } else {
1841  dst_hwctx->frame_type |= MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
1842  }
1843  }
1844  break;
1845 #endif
1846 #if CONFIG_DXVA2
1848  {
1849  AVDXVA2FramesContext *src_hwctx = src_ctx->hwctx;
1850  s->handle_pairs_internal = av_calloc(src_ctx->initial_pool_size,
1851  sizeof(*s->handle_pairs_internal));
1852  if (!s->handle_pairs_internal)
1853  return AVERROR(ENOMEM);
1854  s->surfaces_internal = av_calloc(src_hwctx->nb_surfaces,
1855  sizeof(*s->surfaces_internal));
1856  if (!s->surfaces_internal)
1857  return AVERROR(ENOMEM);
1858  for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1859  qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
1860  s->handle_pairs_internal[i].first = (mfxMemId)src_hwctx->surfaces[i];
1861  s->handle_pairs_internal[i].second = (mfxMemId)MFX_INFINITE;
1862  s->surfaces_internal[i].Data.MemId = (mfxMemId)&s->handle_pairs_internal[i];
1863  }
1864  dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
1865  if (src_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
1866  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
1867  else
1868  dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
1869  }
1870  break;
1871 #endif
1872  default:
1873  return AVERROR(ENOSYS);
1874  }
1875 
1876  dst_hwctx->surfaces = s->surfaces_internal;
1877 
1878  return 0;
1879 }
1880 
1881 static int qsv_map_to(AVHWFramesContext *dst_ctx,
1882  AVFrame *dst, const AVFrame *src, int flags)
1883 {
1884  AVQSVFramesContext *hwctx = dst_ctx->hwctx;
1885  int i, err, index = -1;
1886 
1887  for (i = 0; i < hwctx->nb_surfaces && index < 0; i++) {
1888  switch(src->format) {
1889 #if CONFIG_VAAPI
1890  case AV_PIX_FMT_VAAPI:
1891  {
1892  mfxHDLPair *pair = (mfxHDLPair*)hwctx->surfaces[i].Data.MemId;
1893  if (*(VASurfaceID*)pair->first == (VASurfaceID)src->data[3]) {
1894  index = i;
1895  break;
1896  }
1897  }
1898 #endif
1899 #if CONFIG_D3D11VA
1900  case AV_PIX_FMT_D3D11:
1901  {
1902  mfxHDLPair *pair = (mfxHDLPair*)hwctx->surfaces[i].Data.MemId;
1903  if (pair->first == src->data[0]
1904  && (pair->second == src->data[1]
1905  || (pair->second == (mfxMemId)MFX_INFINITE && src->data[1] == (uint8_t *)0))) {
1906  index = i;
1907  break;
1908  }
1909  }
1910 #endif
1911 #if CONFIG_DXVA2
1912  case AV_PIX_FMT_DXVA2_VLD:
1913  {
1914  mfxHDLPair *pair = (mfxHDLPair*)hwctx->surfaces[i].Data.MemId;
1915  if (pair->first == src->data[3]) {
1916  index = i;
1917  break;
1918  }
1919  }
1920 #endif
1921  }
1922  }
1923  if (index < 0) {
1924  av_log(dst_ctx, AV_LOG_ERROR, "Trying to map from a surface which "
1925  "is not in the mapped frames context.\n");
1926  return AVERROR(EINVAL);
1927  }
1928 
1930  dst, src, NULL, NULL);
1931  if (err)
1932  return err;
1933 
1934  dst->width = src->width;
1935  dst->height = src->height;
1936  dst->data[3] = (uint8_t*)&hwctx->surfaces[index];
1937 
1938  return 0;
1939 }
1940 
1942  const void *hwconfig,
1943  AVHWFramesConstraints *constraints)
1944 {
1945  int i;
1946 
1948  sizeof(*constraints->valid_sw_formats));
1949  if (!constraints->valid_sw_formats)
1950  return AVERROR(ENOMEM);
1951 
1952  for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++)
1953  constraints->valid_sw_formats[i] = supported_pixel_formats[i].pix_fmt;
1955 
1956  constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
1957  if (!constraints->valid_hw_formats)
1958  return AVERROR(ENOMEM);
1959 
1960  constraints->valid_hw_formats[0] = AV_PIX_FMT_QSV;
1961  constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
1962 
1963  return 0;
1964 }
1965 
1967 {
1968  AVQSVDeviceContext *hwctx = ctx->hwctx;
1969  QSVDevicePriv *priv = ctx->user_opaque;
1970 
1971  if (hwctx->session)
1972  MFXClose(hwctx->session);
1973 
1974  if (hwctx->loader)
1975  MFXUnload(hwctx->loader);
1977  av_freep(&priv);
1978 }
1979 
1980 static mfxIMPL choose_implementation(const char *device, enum AVHWDeviceType child_device_type)
1981 {
1982  static const struct {
1983  const char *name;
1984  mfxIMPL impl;
1985  } impl_map[] = {
1986  { "auto", MFX_IMPL_AUTO },
1987  { "sw", MFX_IMPL_SOFTWARE },
1988  { "hw", MFX_IMPL_HARDWARE },
1989  { "auto_any", MFX_IMPL_AUTO_ANY },
1990  { "hw_any", MFX_IMPL_HARDWARE_ANY },
1991  { "hw2", MFX_IMPL_HARDWARE2 },
1992  { "hw3", MFX_IMPL_HARDWARE3 },
1993  { "hw4", MFX_IMPL_HARDWARE4 },
1994  };
1995 
1996  mfxIMPL impl = MFX_IMPL_AUTO_ANY;
1997  int i;
1998 
1999  if (device) {
2000  for (i = 0; i < FF_ARRAY_ELEMS(impl_map); i++)
2001  if (!strcmp(device, impl_map[i].name)) {
2002  impl = impl_map[i].impl;
2003  break;
2004  }
2005  if (i == FF_ARRAY_ELEMS(impl_map))
2006  impl = strtol(device, NULL, 0);
2007  }
2008 
2009  if (impl != MFX_IMPL_SOFTWARE) {
2010  if (child_device_type == AV_HWDEVICE_TYPE_D3D11VA)
2011  impl |= MFX_IMPL_VIA_D3D11;
2012  else if (child_device_type == AV_HWDEVICE_TYPE_DXVA2)
2013  impl |= MFX_IMPL_VIA_D3D9;
2014  }
2015 
2016  return impl;
2017 }
2018 
2020  mfxIMPL implementation,
2021  AVHWDeviceContext *child_device_ctx,
2022  int flags)
2023 {
2024  AVQSVDeviceContext *hwctx = ctx->hwctx;
2025 
2026  mfxVersion ver = { { 3, 1 } };
2027  mfxHDL handle;
2028  mfxHandleType handle_type;
2029  mfxStatus err;
2030  int ret;
2031 
2032  switch (child_device_ctx->type) {
2033 #if CONFIG_VAAPI
2035  {
2036  AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
2037  handle_type = MFX_HANDLE_VA_DISPLAY;
2038  handle = (mfxHDL)child_device_hwctx->display;
2039  }
2040  break;
2041 #endif
2042 #if CONFIG_D3D11VA
2044  {
2045  AVD3D11VADeviceContext *child_device_hwctx = child_device_ctx->hwctx;
2046  handle_type = MFX_HANDLE_D3D11_DEVICE;
2047  handle = (mfxHDL)child_device_hwctx->device;
2048  }
2049  break;
2050 #endif
2051 #if CONFIG_DXVA2
2053  {
2054  AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
2055  handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
2056  handle = (mfxHDL)child_device_hwctx->devmgr;
2057  }
2058  break;
2059 #endif
2060  default:
2061  ret = AVERROR(ENOSYS);
2062  goto fail;
2063  }
2064 
2065  ret = qsv_create_mfx_session(ctx, handle, handle_type, implementation, &ver,
2066  &hwctx->session, &hwctx->loader);
2067  if (ret)
2068  goto fail;
2069 
2070  err = MFXVideoCORE_SetHandle(hwctx->session, handle_type, handle);
2071  if (err != MFX_ERR_NONE) {
2072  av_log(ctx, AV_LOG_ERROR, "Error setting child device handle: "
2073  "%d\n", err);
2074  ret = AVERROR_UNKNOWN;
2075  goto fail;
2076  }
2077 
2078  return 0;
2079 
2080 fail:
2081  if (hwctx->session)
2082  MFXClose(hwctx->session);
2083 
2084  if (hwctx->loader)
2085  MFXUnload(hwctx->loader);
2086 
2087  hwctx->session = NULL;
2088  hwctx->loader = NULL;
2089  return ret;
2090 }
2091 
2093  AVHWDeviceContext *child_device_ctx,
2094  AVDictionary *opts, int flags)
2095 {
2096  mfxIMPL impl;
2097  impl = choose_implementation("hw_any", child_device_ctx->type);
2098  return qsv_device_derive_from_child(ctx, impl,
2099  child_device_ctx, flags);
2100 }
2101 
2102 static int qsv_device_create(AVHWDeviceContext *ctx, const char *device,
2103  AVDictionary *opts, int flags)
2104 {
2105  QSVDevicePriv *priv;
2106  enum AVHWDeviceType child_device_type;
2107  AVHWDeviceContext *child_device;
2108  AVDictionary *child_device_opts;
2109  AVDictionaryEntry *e;
2110 
2111  mfxIMPL impl;
2112  int ret;
2113 
2114  priv = av_mallocz(sizeof(*priv));
2115  if (!priv)
2116  return AVERROR(ENOMEM);
2117 
2118  ctx->user_opaque = priv;
2119  ctx->free = qsv_device_free;
2120 
2121  e = av_dict_get(opts, "child_device_type", NULL, 0);
2122  if (e) {
2123  child_device_type = av_hwdevice_find_type_by_name(e->value);
2124  if (child_device_type == AV_HWDEVICE_TYPE_NONE) {
2125  av_log(ctx, AV_LOG_ERROR, "Unknown child device type "
2126  "\"%s\".\n", e->value);
2127  return AVERROR(EINVAL);
2128  }
2129  } else if (CONFIG_VAAPI) {
2130  child_device_type = AV_HWDEVICE_TYPE_VAAPI;
2131 #if QSV_ONEVPL
2132  } else if (CONFIG_D3D11VA) { // Use D3D11 by default if d3d11va is enabled
2134  "Defaulting child_device_type to AV_HWDEVICE_TYPE_D3D11VA for oneVPL."
2135  "Please explicitly set child device type via \"-init_hw_device\" "
2136  "option if needed.\n");
2137  child_device_type = AV_HWDEVICE_TYPE_D3D11VA;
2138  } else if (CONFIG_DXVA2) {
2139  child_device_type = AV_HWDEVICE_TYPE_DXVA2;
2140 #else
2141  } else if (CONFIG_DXVA2) {
2143  "WARNING: defaulting child_device_type to AV_HWDEVICE_TYPE_DXVA2 for compatibility "
2144  "with old commandlines. This behaviour will be removed "
2145  "in the future. Please explicitly set device type via \"-init_hw_device\" option.\n");
2146  child_device_type = AV_HWDEVICE_TYPE_DXVA2;
2147  } else if (CONFIG_D3D11VA) {
2148  child_device_type = AV_HWDEVICE_TYPE_D3D11VA;
2149 #endif
2150  } else {
2151  av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
2152  return AVERROR(ENOSYS);
2153  }
2154 
2155  child_device_opts = NULL;
2156  switch (child_device_type) {
2157 #if CONFIG_VAAPI
2159  {
2160  // libmfx does not actually implement VAAPI properly, rather it
2161  // depends on the specific behaviour of a matching iHD driver when
2162  // used on recent Intel hardware. Set options to the VAAPI device
2163  // creation so that we should pick a usable setup by default if
2164  // possible, even when multiple devices and drivers are available.
2165  av_dict_set(&child_device_opts, "kernel_driver", "i915", 0);
2166  av_dict_set(&child_device_opts, "driver", "iHD", 0);
2167  }
2168  break;
2169 #endif
2170 #if CONFIG_D3D11VA
2172  break;
2173 #endif
2174 #if CONFIG_DXVA2
2176 #if QSV_ONEVPL
2177  {
2179  "d3d11va is not available or child device type is set to dxva2 "
2180  "explicitly for oneVPL.\n");
2181  }
2182 #endif
2183  break;
2184 #endif
2185  default:
2186  {
2187  av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
2188  return AVERROR(ENOSYS);
2189  }
2190  break;
2191  }
2192 
2193  e = av_dict_get(opts, "child_device", NULL, 0);
2194  ret = av_hwdevice_ctx_create(&priv->child_device_ctx, child_device_type,
2195  e ? e->value : NULL, child_device_opts, 0);
2196 
2197  av_dict_free(&child_device_opts);
2198  if (ret < 0)
2199  return ret;
2200 
2201  child_device = (AVHWDeviceContext*)priv->child_device_ctx->data;
2202 
2203  impl = choose_implementation(device, child_device_type);
2204 
2205  return qsv_device_derive_from_child(ctx, impl, child_device, 0);
2206 }
2207 
2210  .name = "QSV",
2211 
2212  .device_hwctx_size = sizeof(AVQSVDeviceContext),
2213  .device_priv_size = sizeof(QSVDeviceContext),
2214  .frames_hwctx_size = sizeof(AVQSVFramesContext),
2215  .frames_priv_size = sizeof(QSVFramesContext),
2216 
2217  .device_create = qsv_device_create,
2218  .device_derive = qsv_device_derive,
2219  .device_init = qsv_device_init,
2220  .frames_get_constraints = qsv_frames_get_constraints,
2221  .frames_init = qsv_frames_init,
2222  .frames_uninit = qsv_frames_uninit,
2223  .frames_get_buffer = qsv_get_buffer,
2224  .transfer_get_formats = qsv_transfer_get_formats,
2225  .transfer_data_to = qsv_transfer_data_to,
2226  .transfer_data_from = qsv_transfer_data_from,
2227  .map_to = qsv_map_to,
2228  .map_from = qsv_map_from,
2229  .frames_derive_to = qsv_frames_derive_to,
2230  .frames_derive_from = qsv_frames_derive_from,
2231 
2232  .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_QSV, AV_PIX_FMT_NONE },
2233 };
formats
formats
Definition: signature.h:48
pthread_mutex_t
_fmutex pthread_mutex_t
Definition: os2threads.h:53
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:92
qsv_transfer_data_child
static int qsv_transfer_data_child(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:1473
AVQSVFramesContext::frame_type
int frame_type
A combination of MFX_MEMTYPE_* describing the frame pool.
Definition: hwcontext_qsv.h:60
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
name
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
Definition: writing_filters.txt:88
atomic_store
#define atomic_store(object, desired)
Definition: stdatomic.h:85
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
out
FILE * out
Definition: movenc.c:54
av_frame_get_buffer
int av_frame_get_buffer(AVFrame *frame, int align)
Allocate new buffer(s) for audio or video data.
Definition: frame.c:242
comp
static void comp(unsigned char *dst, ptrdiff_t dst_stride, unsigned char *src, ptrdiff_t src_stride, int add)
Definition: eamad.c:80
QSVFramesContext::child_frames_ref
AVBufferRef * child_frames_ref
Definition: hwcontext_qsv.c:91
qsv_transfer_data_to
static int qsv_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:1685
av_pix_fmt_desc_get
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
Definition: pixdesc.c:2888
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
pthread_mutex_init
static av_always_inline int pthread_mutex_init(pthread_mutex_t *mutex, const pthread_mutexattr_t *attr)
Definition: os2threads.h:104
qsv_map_from
static int qsv_map_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext_qsv.c:1370
qsv_fourcc_from_pix_fmt
static uint32_t qsv_fourcc_from_pix_fmt(enum AVPixelFormat pix_fmt)
Definition: hwcontext_qsv.c:184
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:209
qsv_fill_border
static int qsv_fill_border(AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:221
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:99
QSVDeviceContext::ver
mfxVersion ver
Definition: hwcontext_qsv.c:75
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:334
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:330
pixdesc.h
AVFrame::width
int width
Definition: frame.h:402
AVQSVDeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_qsv.h:35
AVD3D11VAFramesContext::MiscFlags
UINT MiscFlags
D3D11_TEXTURE2D_DESC.MiscFlags used for texture creation.
Definition: hwcontext_d3d11va.h:166
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:248
qsv_device_derive
static int qsv_device_derive(AVHWDeviceContext *ctx, AVHWDeviceContext *child_device_ctx, AVDictionary *opts, int flags)
Definition: hwcontext_qsv.c:2092
AVDXVA2FramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_dxva2.h:46
qsv_frames_derive_from
static int qsv_frames_derive_from(AVHWFramesContext *dst_ctx, AVHWFramesContext *src_ctx, int flags)
Definition: hwcontext_qsv.c:1298
AV_HWDEVICE_TYPE_NONE
@ AV_HWDEVICE_TYPE_NONE
Definition: hwcontext.h:28
av_hwframe_map
int av_hwframe_map(AVFrame *dst, const AVFrame *src, int flags)
Map a hardware frame.
Definition: hwcontext.c:794
qsv_init_surface
static int qsv_init_surface(AVHWFramesContext *ctx, mfxFrameSurface1 *surf)
Definition: hwcontext_qsv.c:522
data
const char data[16]
Definition: mxf.c:146
atomic_int
intptr_t atomic_int
Definition: stdatomic.h:55
choose_implementation
static mfxIMPL choose_implementation(const char *device, enum AVHWDeviceType child_device_type)
Definition: hwcontext_qsv.c:1980
QSVDeviceContext
Definition: hwcontext_qsv.c:72
av_hwdevice_find_type_by_name
enum AVHWDeviceType av_hwdevice_find_type_by_name(const char *name)
Look up an AVHWDeviceType by name.
Definition: hwcontext.c:83
AV_PIX_FMT_XV30
#define AV_PIX_FMT_XV30
Definition: pixfmt.h:514
AVDXVA2DeviceContext::devmgr
IDirect3DDeviceManager9 * devmgr
Definition: hwcontext_dxva2.h:40
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
AVVAAPIDeviceContext::display
VADisplay display
The VADisplay handle, to be filled by the user.
Definition: hwcontext_vaapi.h:72
AV_PIX_FMT_BGRA
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:95
AVHWFramesContext::internal
AVHWFramesInternal * internal
Private data used internally by libavutil.
Definition: hwcontext.h:134
AVDictionary
Definition: dict.c:32
ff_hwframe_map_create
int ff_hwframe_map_create(AVBufferRef *hwframe_ref, AVFrame *dst, const AVFrame *src, void(*unmap)(AVHWFramesContext *ctx, HWMapDescriptor *hwmap), void *priv)
Definition: hwcontext.c:742
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AVHWFramesConstraints::valid_hw_formats
enum AVPixelFormat * valid_hw_formats
A list of possible values for format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:458
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:229
fourcc
uint32_t fourcc
Definition: hwcontext_qsv.c:111
av_hwdevice_ctx_init
int av_hwdevice_ctx_init(AVBufferRef *ref)
Finalize the device context before use.
Definition: hwcontext.c:201
AVFrame::buf
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:539
QSVDeviceContext::handle_type
mfxHandleType handle_type
Definition: hwcontext_qsv.c:74
qsv_transfer_data_from
static int qsv_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_qsv.c:1602
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:351
AVHWFramesConstraints
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
Definition: hwcontext.h:453
QSVDevicePriv
Definition: hwcontext_qsv.c:68
AVD3D11VAFramesContext::BindFlags
UINT BindFlags
D3D11_TEXTURE2D_DESC.BindFlags used for texture creation.
Definition: hwcontext_d3d11va.h:160
AVVAAPIFramesContext::surface_ids
VASurfaceID * surface_ids
The surfaces IDs of all surfaces in the pool after creation.
Definition: hwcontext_vaapi.h:101
AVHWFramesInternal::priv
void * priv
Definition: hwcontext_internal.h:116
AVD3D11FrameDescriptor::texture
ID3D11Texture2D * texture
The texture in which the frame is located.
Definition: hwcontext_d3d11va.h:117
QSVDeviceContext::child_device_type
enum AVHWDeviceType child_device_type
Definition: hwcontext_qsv.c:78
qsv_init_child_ctx
static int qsv_init_child_ctx(AVHWFramesContext *ctx)
Definition: hwcontext_qsv.c:377
fail
#define fail()
Definition: checkasm.h:134
AV_PIX_FMT_FLAG_HWACCEL
#define AV_PIX_FMT_FLAG_HWACCEL
Pixel format is an HW accelerated format.
Definition: pixdesc.h:128
dummy
int dummy
Definition: motion.c:65
AV_HWDEVICE_TYPE_D3D11VA
@ AV_HWDEVICE_TYPE_D3D11VA
Definition: hwcontext.h:35
qsv_frames_get_constraints
static int qsv_frames_get_constraints(AVHWDeviceContext *ctx, const void *hwconfig, AVHWFramesConstraints *constraints)
Definition: hwcontext_qsv.c:1941
av_buffer_pool_init2
AVBufferPool * av_buffer_pool_init2(size_t size, void *opaque, AVBufferRef *(*alloc)(void *opaque, size_t size), void(*pool_free)(void *opaque))
Allocate and initialize a buffer pool with a more complex allocator.
Definition: buffer.c:259
QSVFramesContext::session_download_init
atomic_int session_download_init
Definition: hwcontext_qsv.c:84
qsv_frames_derive_to
static int qsv_frames_derive_to(AVHWFramesContext *dst_ctx, AVHWFramesContext *src_ctx, int flags)
Definition: hwcontext_qsv.c:1778
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:61
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:87
frame_free
static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
Definition: hwcontext_qsv.c:637
AV_PIX_FMT_Y210
#define AV_PIX_FMT_Y210
Definition: pixfmt.h:512
avassert.h
HWContextType::type
enum AVHWDeviceType type
Definition: hwcontext_internal.h:30
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
AVHWFramesContext::height
int height
Definition: hwcontext.h:229
av_hwdevice_ctx_alloc
AVBufferRef * av_hwdevice_ctx_alloc(enum AVHWDeviceType type)
Allocate an AVHWDeviceContext for a given hardware type.
Definition: hwcontext.c:143
AVHWFramesConstraints::valid_sw_formats
enum AVPixelFormat * valid_sw_formats
A list of possible values for sw_format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:465
QSVFramesContext::ext_buffers
mfxExtBuffer * ext_buffers[1]
Definition: hwcontext_qsv.c:103
QSVFramesContext::session_upload_init
atomic_int session_upload_init
Definition: hwcontext_qsv.c:86
frame_alloc
static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req, mfxFrameAllocResponse *resp)
Definition: hwcontext_qsv.c:609
av_dict_get
AVDictionaryEntry * av_dict_get(const AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags)
Get a dictionary entry with matching key.
Definition: dict.c:60
av_memcpy_backptr
void av_memcpy_backptr(uint8_t *dst, int back, int cnt)
Overlapping memcpy() implementation.
Definition: mem.c:445
av_buffer_pool_get
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
Definition: buffer.c:384
QSVDevicePriv::child_device_ctx
AVBufferRef * child_device_ctx
Definition: hwcontext_qsv.c:69
AV_PIX_FMT_DXVA2_VLD
@ AV_PIX_FMT_DXVA2_VLD
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer.
Definition: pixfmt.h:127
s
#define s(width, name)
Definition: cbs_vp9.c:256
AVD3D11VADeviceContext::device
ID3D11Device * device
Device used for texture creation and access.
Definition: hwcontext_d3d11va.h:56
AV_CEIL_RSHIFT
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:50
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
AVHWDeviceType
AVHWDeviceType
Definition: hwcontext.h:27
QSVDeviceContext::handle
mfxHDL handle
Definition: hwcontext_qsv.c:73
QSVFramesContext::mem_ids
mfxMemId * mem_ids
Definition: hwcontext_qsv.c:97
ctx
AVFormatContext * ctx
Definition: movenc.c:48
AVDXVA2FramesContext::nb_surfaces
int nb_surfaces
Definition: hwcontext_dxva2.h:59
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
av_usleep
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
atomic_load
#define atomic_load(object)
Definition: stdatomic.h:93
MFXUnload
#define MFXUnload(a)
Definition: hwcontext_qsv.c:65
if
if(ret)
Definition: filter_design.txt:179
ff_hwcontext_type_qsv
const HWContextType ff_hwcontext_type_qsv
Definition: hwcontext_qsv.c:2208
qsv_create_mfx_session
static int qsv_create_mfx_session(void *ctx, mfxHDL handle, mfxHandleType handle_type, mfxIMPL implementation, mfxVersion *pver, mfxSession *psession, void **ploader)
Definition: hwcontext_qsv.c:1036
opts
AVDictionary * opts
Definition: movenc.c:50
AVD3D11VAFramesContext::texture_infos
AVD3D11FrameDescriptor * texture_infos
In case if texture structure member above is not NULL contains the same texture pointer for all eleme...
Definition: hwcontext_d3d11va.h:175
AVQSVFramesContext::surfaces
mfxFrameSurface1 * surfaces
Definition: hwcontext_qsv.h:54
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:222
qsv_frames_uninit
static void qsv_frames_uninit(AVHWFramesContext *ctx)
Definition: hwcontext_qsv.c:325
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AVComponentDescriptor
Definition: pixdesc.h:30
AV_HWDEVICE_TYPE_DXVA2
@ AV_HWDEVICE_TYPE_DXVA2
Definition: hwcontext.h:32
AV_PIX_FMT_YUYV422
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:67
MFX_IMPL_VIA_MASK
#define MFX_IMPL_VIA_MASK(impl)
Definition: hwcontext_qsv.c:58
qsv_internal_session_check_init
static int qsv_internal_session_check_init(AVHWFramesContext *ctx, int upload)
Definition: hwcontext_qsv.c:1576
qsv_frames_init
static int qsv_frames_init(AVHWFramesContext *ctx)
Definition: hwcontext_qsv.c:1194
time.h
AV_PIX_FMT_QSV
@ AV_PIX_FMT_QSV
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
Definition: pixfmt.h:240
map_frame_to_surface
static int map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
Definition: hwcontext_qsv.c:1507
frame_unlock
static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: hwcontext_qsv.c:647
index
int index
Definition: gxfenc.c:89
pthread_mutex_unlock
#define pthread_mutex_unlock(a)
Definition: ffprobe.c:79
AVD3D11VAFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_d3d11va.h:131
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
QSVFramesContext::realigned_upload_frame
AVFrame realigned_upload_frame
Definition: hwcontext_qsv.c:105
qsv_init_internal_session
static int qsv_init_internal_session(AVHWFramesContext *ctx, mfxSession *session, int upload)
Definition: hwcontext_qsv.c:1095
hwcontext_dxva2.h
QSVFramesContext::opaque_alloc
mfxExtOpaqueSurfaceAlloc opaque_alloc
Definition: hwcontext_qsv.c:102
qsv_get_buffer
static int qsv_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
Definition: hwcontext_qsv.c:1266
AV_PIX_FMT_P012
#define AV_PIX_FMT_P012
Definition: pixfmt.h:509
AVDXVA2FramesContext::surface_type
DWORD surface_type
The surface type (e.g.
Definition: hwcontext_dxva2.h:51
av_frame_copy
int av_frame_copy(AVFrame *dst, const AVFrame *src)
Copy the frame data from src to dst.
Definition: frame.c:762
size
int size
Definition: twinvq_data.h:10344
QSVFramesContext::nb_surfaces_used
int nb_surfaces_used
Definition: hwcontext_qsv.c:94
qsv_device_free
static void qsv_device_free(AVHWDeviceContext *ctx)
Definition: hwcontext_qsv.c:1966
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:417
ff_qsv_get_surface_base_handle
int ff_qsv_get_surface_base_handle(mfxFrameSurface1 *surf, enum AVHWDeviceType base_dev_type, void **base_handle)
Caller needs to allocate enough space for base_handle pointer.
Definition: hwcontext_qsv.c:157
qsv_transfer_get_formats
static int qsv_transfer_get_formats(AVHWFramesContext *ctx, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats)
Definition: hwcontext_qsv.c:1280
buffer.h
AV_PIX_FMT_Y212
#define AV_PIX_FMT_Y212
Definition: pixfmt.h:513
qsv_device_derive_from_child
static int qsv_device_derive_from_child(AVHWDeviceContext *ctx, mfxIMPL implementation, AVHWDeviceContext *child_device_ctx, int flags)
Definition: hwcontext_qsv.c:2019
AVQSVDeviceContext::loader
void * loader
The mfxLoader handle used for mfxSession creation.
Definition: hwcontext_qsv.h:47
AV_PIX_FMT_D3D11
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:333
av_dict_free
void av_dict_free(AVDictionary **pm)
Free all the memory allocated for an AVDictionary struct and all keys and values.
Definition: dict.c:223
AVQSVFramesContext::nb_surfaces
int nb_surfaces
Definition: hwcontext_qsv.h:55
frame_get_hdl
static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
Definition: hwcontext_qsv.c:652
AV_PIX_FMT_VAAPI
@ AV_PIX_FMT_VAAPI
Hardware acceleration through VA-API, data[3] contains a VASurfaceID.
Definition: pixfmt.h:119
AV_HWDEVICE_TYPE_VAAPI
@ AV_HWDEVICE_TYPE_VAAPI
Definition: hwcontext.h:31
pthread_mutex_destroy
static av_always_inline int pthread_mutex_destroy(pthread_mutex_t *mutex)
Definition: os2threads.h:112
av_image_get_linesize
int av_image_get_linesize(enum AVPixelFormat pix_fmt, int width, int plane)
Compute the size of an image line with format pix_fmt and width width for the plane plane.
Definition: imgutils.c:76
hwcontext_qsv.h
qsv_device_init
static int qsv_device_init(AVHWDeviceContext *ctx)
Definition: hwcontext_qsv.c:269
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
AVDXVA2DeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_dxva2.h:39
av_malloc_array
#define av_malloc_array(a, b)
Definition: tableprint_vlc.h:31
common.h
AVD3D11VADeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_d3d11va.h:45
QSVFramesContext::handle_pairs_internal
mfxHDLPair * handle_pairs_internal
Definition: hwcontext_qsv.c:93
AVD3D11FrameDescriptor::index
intptr_t index
The index into the array texture element representing the frame, or 0 if the texture is not an array ...
Definition: hwcontext_d3d11va.h:125
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
QSVFramesContext::surface_ptrs
mfxFrameSurface1 ** surface_ptrs
Definition: hwcontext_qsv.c:100
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:478
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:254
QSVFramesContext::session_download
mfxSession session_download
Definition: hwcontext_qsv.c:83
supported_pixel_formats
static const struct @329 supported_pixel_formats[]
AVDXVA2FramesContext::surfaces
IDirect3DSurface9 ** surfaces
The surface pool.
Definition: hwcontext_dxva2.h:58
av_calloc
void * av_calloc(size_t nmemb, size_t size)
Definition: mem.c:262
AVHWFrameTransferDirection
AVHWFrameTransferDirection
Definition: hwcontext.h:415
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:124
hwcontext_vaapi.h
qsv_map_to
static int qsv_map_to(AVHWFramesContext *dst_ctx, AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext_qsv.c:1881
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:77
ret
ret
Definition: filter_design.txt:187
pix_fmt
enum AVPixelFormat pix_fmt
Definition: hwcontext_qsv.c:110
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:79
pixfmt.h
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:89
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
AVHWFramesContext::device_ctx
AVHWDeviceContext * device_ctx
The parent AVHWDeviceContext.
Definition: hwcontext.h:149
AVHWFramesContext::hwctx
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:162
av_hwdevice_ctx_create
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:615
QSVDeviceContext::impl
mfxIMPL impl
Definition: hwcontext_qsv.c:76
QSVFramesContext::realigned_download_frame
AVFrame realigned_download_frame
Definition: hwcontext_qsv.c:106
av_hwframe_transfer_data
int av_hwframe_transfer_data(AVFrame *dst, const AVFrame *src, int flags)
Copy data to or from a hw surface.
Definition: hwcontext.c:448
frame_lock
static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: hwcontext_qsv.c:642
AV_PIX_FMT_UYVY422
@ AV_PIX_FMT_UYVY422
packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
Definition: pixfmt.h:81
AVFrame::hw_frames_ctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:678
AV_HWDEVICE_TYPE_QSV
@ AV_HWDEVICE_TYPE_QSV
Definition: hwcontext.h:33
qsv_pool_release_dummy
static void qsv_pool_release_dummy(void *opaque, uint8_t *data)
Definition: hwcontext_qsv.c:358
AVFrame::height
int height
Definition: frame.h:402
QSVDeviceContext::child_pix_fmt
enum AVPixelFormat child_pix_fmt
Definition: hwcontext_qsv.c:79
AVVAAPIFramesContext::nb_surfaces
int nb_surfaces
Definition: hwcontext_vaapi.h:102
AVQSVDeviceContext::session
mfxSession session
Definition: hwcontext_qsv.h:36
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
QSVFramesContext::session_upload
mfxSession session_upload
Definition: hwcontext_qsv.c:85
qsv_device_create
static int qsv_device_create(AVHWDeviceContext *ctx, const char *device, AVDictionary *opts, int flags)
Definition: hwcontext_qsv.c:2102
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:508
AVQSVFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_qsv.h:53
AVHWFramesContext::initial_pool_size
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:199
desc
const char * desc
Definition: libsvtav1.c:83
mem.h
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
mfx_shift
uint16_t mfx_shift
Definition: hwcontext_qsv.c:112
qsv_shift_from_pix_fmt
static uint16_t qsv_shift_from_pix_fmt(enum AVPixelFormat pix_fmt)
Definition: hwcontext_qsv.c:194
AVPixFmtDescriptor
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
Definition: pixdesc.h:69
hwcontext_internal.h
AVVAAPIFramesContext
VAAPI-specific data associated with a frame pool.
Definition: hwcontext_vaapi.h:88
QSVFramesContext::surfaces_internal
mfxFrameSurface1 * surfaces_internal
Definition: hwcontext_qsv.c:92
AVDictionaryEntry
Definition: dict.h:89
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
QSVFramesContext
Definition: qsv_internal.h:115
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:86
src
INIT_CLIP pixel * src
Definition: h264pred_template.c:418
imgutils.h
AV_PIX_FMT_XV36
#define AV_PIX_FMT_XV36
Definition: pixfmt.h:515
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:561
hwcontext.h
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:375
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
HWContextType
Definition: hwcontext_internal.h:29
qsv_pool_alloc
static AVBufferRef * qsv_pool_alloc(void *opaque, size_t size)
Definition: hwcontext_qsv.c:362
ID3D11Device
void ID3D11Device
Definition: nvenc.h:28
AVVAAPIDeviceContext
VAAPI connection details.
Definition: hwcontext_vaapi.h:68
AVDictionaryEntry::value
char * value
Definition: dict.h:91
AV_PIX_FMT_VUYX
@ AV_PIX_FMT_VUYX
packed VUYX 4:4:4, 32bpp, Variant of VUYA where alpha channel is left undefined
Definition: pixfmt.h:403
hwcontext_d3d11va.h
qsv_init_pool
static int qsv_init_pool(AVHWFramesContext *ctx, uint32_t fourcc)
Definition: hwcontext_qsv.c:558
pthread_mutex_lock
#define pthread_mutex_lock(a)
Definition: ffprobe.c:75