FFmpeg
qsv.c
Go to the documentation of this file.
1 /*
2  * Intel MediaSDK QSV encoder/decoder shared code
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with FFmpeg; if not, write to the Free Software
18  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 #include <mfxvideo.h>
22 #include <mfxjpeg.h>
23 
24 #include <stdio.h>
25 #include <string.h>
26 
27 #include "libavutil/avstring.h"
28 #include "libavutil/common.h"
29 #include "libavutil/error.h"
30 #include "libavutil/hwcontext.h"
32 #include "libavutil/imgutils.h"
33 #include "libavutil/avassert.h"
34 
35 #include "avcodec.h"
36 #include "qsv_internal.h"
37 
38 #define MFX_IMPL_VIA_MASK(impl) (0x0f00 & (impl))
39 #define QSV_HAVE_USER_PLUGIN !QSV_ONEVPL
40 #define QSV_HAVE_AUDIO !QSV_ONEVPL
41 
42 #include "mfxvp8.h"
43 
44 #if QSV_HAVE_USER_PLUGIN
45 #include <mfxplugin.h>
46 #endif
47 
48 #if QSV_ONEVPL
49 #include <mfxdispatcher.h>
50 #else
51 #define MFXUnload(a) do { } while(0)
52 #endif
53 
55 {
56  switch (codec_id) {
57  case AV_CODEC_ID_H264:
58  return MFX_CODEC_AVC;
59  case AV_CODEC_ID_HEVC:
60  return MFX_CODEC_HEVC;
63  return MFX_CODEC_MPEG2;
64  case AV_CODEC_ID_VC1:
65  return MFX_CODEC_VC1;
66  case AV_CODEC_ID_VP8:
67  return MFX_CODEC_VP8;
68  case AV_CODEC_ID_MJPEG:
69  return MFX_CODEC_JPEG;
70  case AV_CODEC_ID_VP9:
71  return MFX_CODEC_VP9;
72 #if QSV_VERSION_ATLEAST(1, 34)
73  case AV_CODEC_ID_AV1:
74  return MFX_CODEC_AV1;
75 #endif
76 
77  default:
78  break;
79  }
80 
81  return AVERROR(ENOSYS);
82 }
83 
84 static const struct {
86  const char *desc;
87 } qsv_iopatterns[] = {
88  {MFX_IOPATTERN_IN_VIDEO_MEMORY, "input is video memory surface" },
89  {MFX_IOPATTERN_IN_SYSTEM_MEMORY, "input is system memory surface" },
90 #if QSV_HAVE_OPAQUE
91  {MFX_IOPATTERN_IN_OPAQUE_MEMORY, "input is opaque memory surface" },
92 #endif
93  {MFX_IOPATTERN_OUT_VIDEO_MEMORY, "output is video memory surface" },
94  {MFX_IOPATTERN_OUT_SYSTEM_MEMORY, "output is system memory surface" },
95 #if QSV_HAVE_OPAQUE
96  {MFX_IOPATTERN_OUT_OPAQUE_MEMORY, "output is opaque memory surface" },
97 #endif
98 };
99 
100 int ff_qsv_print_iopattern(void *log_ctx, int mfx_iopattern,
101  const char *extra_string)
102 {
103  const char *desc = NULL;
104 
105  for (int i = 0; i < FF_ARRAY_ELEMS(qsv_iopatterns); i++) {
107  desc = qsv_iopatterns[i].desc;
108  }
109  }
110  if (!desc)
111  desc = "unknown iopattern";
112 
113  av_log(log_ctx, AV_LOG_VERBOSE, "%s: %s\n", extra_string, desc);
114  return 0;
115 }
116 
117 static const struct {
118  mfxStatus mfxerr;
119  int averr;
120  const char *desc;
121 } qsv_errors[] = {
122  { MFX_ERR_NONE, 0, "success" },
123  { MFX_ERR_UNKNOWN, AVERROR_UNKNOWN, "unknown error" },
124  { MFX_ERR_NULL_PTR, AVERROR(EINVAL), "NULL pointer" },
125  { MFX_ERR_UNSUPPORTED, AVERROR(ENOSYS), "unsupported" },
126  { MFX_ERR_MEMORY_ALLOC, AVERROR(ENOMEM), "failed to allocate memory" },
127  { MFX_ERR_NOT_ENOUGH_BUFFER, AVERROR(ENOMEM), "insufficient input/output buffer" },
128  { MFX_ERR_INVALID_HANDLE, AVERROR(EINVAL), "invalid handle" },
129  { MFX_ERR_LOCK_MEMORY, AVERROR(EIO), "failed to lock the memory block" },
130  { MFX_ERR_NOT_INITIALIZED, AVERROR_BUG, "not initialized" },
131  { MFX_ERR_NOT_FOUND, AVERROR(ENOSYS), "specified object was not found" },
132  /* the following 3 errors should always be handled explicitly, so those "mappings"
133  * are for completeness only */
134  { MFX_ERR_MORE_DATA, AVERROR_UNKNOWN, "expect more data at input" },
135  { MFX_ERR_MORE_SURFACE, AVERROR_UNKNOWN, "expect more surface at output" },
136  { MFX_ERR_MORE_BITSTREAM, AVERROR_UNKNOWN, "expect more bitstream at output" },
137  { MFX_ERR_ABORTED, AVERROR_UNKNOWN, "operation aborted" },
138  { MFX_ERR_DEVICE_LOST, AVERROR(EIO), "device lost" },
139  { MFX_ERR_INCOMPATIBLE_VIDEO_PARAM, AVERROR(EINVAL), "incompatible video parameters" },
140  { MFX_ERR_INVALID_VIDEO_PARAM, AVERROR(EINVAL), "invalid video parameters" },
141  { MFX_ERR_UNDEFINED_BEHAVIOR, AVERROR_BUG, "undefined behavior" },
142  { MFX_ERR_DEVICE_FAILED, AVERROR(EIO), "device failed" },
143 #if QSV_HAVE_AUDIO
144  { MFX_ERR_INCOMPATIBLE_AUDIO_PARAM, AVERROR(EINVAL), "incompatible audio parameters" },
145  { MFX_ERR_INVALID_AUDIO_PARAM, AVERROR(EINVAL), "invalid audio parameters" },
146 #endif
147  { MFX_ERR_GPU_HANG, AVERROR(EIO), "GPU Hang" },
148  { MFX_ERR_REALLOC_SURFACE, AVERROR_UNKNOWN, "need bigger surface for output" },
149 
150  { MFX_WRN_IN_EXECUTION, 0, "operation in execution" },
151  { MFX_WRN_DEVICE_BUSY, 0, "device busy" },
152  { MFX_WRN_VIDEO_PARAM_CHANGED, 0, "video parameters changed" },
153  { MFX_WRN_PARTIAL_ACCELERATION, 0, "partial acceleration" },
154  { MFX_WRN_INCOMPATIBLE_VIDEO_PARAM, 0, "incompatible video parameters" },
155  { MFX_WRN_VALUE_NOT_CHANGED, 0, "value is saturated" },
156  { MFX_WRN_OUT_OF_RANGE, 0, "value out of range" },
157  { MFX_WRN_FILTER_SKIPPED, 0, "filter skipped" },
158 #if QSV_HAVE_AUDIO
159  { MFX_WRN_INCOMPATIBLE_AUDIO_PARAM, 0, "incompatible audio parameters" },
160 #endif
161 
162 #if QSV_VERSION_ATLEAST(1, 31)
163  { MFX_ERR_NONE_PARTIAL_OUTPUT, 0, "partial output" },
164 #endif
165 };
166 
167 /**
168  * Convert a libmfx error code into an FFmpeg error code.
169  */
170 static int qsv_map_error(mfxStatus mfx_err, const char **desc)
171 {
172  int i;
173  for (i = 0; i < FF_ARRAY_ELEMS(qsv_errors); i++) {
174  if (qsv_errors[i].mfxerr == mfx_err) {
175  if (desc)
176  *desc = qsv_errors[i].desc;
177  return qsv_errors[i].averr;
178  }
179  }
180  if (desc)
181  *desc = "unknown error";
182  return AVERROR_UNKNOWN;
183 }
184 
185 int ff_qsv_print_error(void *log_ctx, mfxStatus err,
186  const char *error_string)
187 {
188  const char *desc;
189  int ret = qsv_map_error(err, &desc);
190  av_log(log_ctx, AV_LOG_ERROR, "%s: %s (%d)\n", error_string, desc, err);
191  return ret;
192 }
193 
194 int ff_qsv_print_warning(void *log_ctx, mfxStatus err,
195  const char *warning_string)
196 {
197  const char *desc;
198  int ret = qsv_map_error(err, &desc);
199  av_log(log_ctx, AV_LOG_WARNING, "%s: %s (%d)\n", warning_string, desc, err);
200  return ret;
201 }
202 
204 {
205  switch (fourcc) {
206  case MFX_FOURCC_NV12: return AV_PIX_FMT_NV12;
207  case MFX_FOURCC_P010: return AV_PIX_FMT_P010;
208  case MFX_FOURCC_P8: return AV_PIX_FMT_PAL8;
209  case MFX_FOURCC_A2RGB10: return AV_PIX_FMT_X2RGB10;
210  case MFX_FOURCC_RGB4: return AV_PIX_FMT_BGRA;
211 #if CONFIG_VAAPI
212  case MFX_FOURCC_YUY2: return AV_PIX_FMT_YUYV422;
213  case MFX_FOURCC_Y210: return AV_PIX_FMT_Y210;
214  case MFX_FOURCC_AYUV: return AV_PIX_FMT_VUYX;
215  case MFX_FOURCC_Y410: return AV_PIX_FMT_XV30;
216 #if QSV_VERSION_ATLEAST(1, 31)
217  case MFX_FOURCC_P016: return AV_PIX_FMT_P012;
218  case MFX_FOURCC_Y216: return AV_PIX_FMT_Y212;
219  case MFX_FOURCC_Y416: return AV_PIX_FMT_XV36;
220 #endif
221 #endif
222  }
223  return AV_PIX_FMT_NONE;
224 }
225 
226 int ff_qsv_map_pixfmt(enum AVPixelFormat format, uint32_t *fourcc, uint16_t *shift)
227 {
228  switch (format) {
229  case AV_PIX_FMT_YUV420P:
230  case AV_PIX_FMT_YUVJ420P:
231  case AV_PIX_FMT_NV12:
232  *fourcc = MFX_FOURCC_NV12;
233  *shift = 0;
234  return AV_PIX_FMT_NV12;
236  case AV_PIX_FMT_P010:
237  *fourcc = MFX_FOURCC_P010;
238  *shift = 1;
239  return AV_PIX_FMT_P010;
240  case AV_PIX_FMT_X2RGB10:
241  *fourcc = MFX_FOURCC_A2RGB10;
242  *shift = 1;
243  return AV_PIX_FMT_X2RGB10;
244  case AV_PIX_FMT_BGRA:
245  *fourcc = MFX_FOURCC_RGB4;
246  *shift = 0;
247  return AV_PIX_FMT_BGRA;
248 #if CONFIG_VAAPI
249  case AV_PIX_FMT_YUV422P:
250  case AV_PIX_FMT_YUYV422:
251  *fourcc = MFX_FOURCC_YUY2;
252  *shift = 0;
253  return AV_PIX_FMT_YUYV422;
255  case AV_PIX_FMT_Y210:
256  *fourcc = MFX_FOURCC_Y210;
257  *shift = 1;
258  return AV_PIX_FMT_Y210;
259  case AV_PIX_FMT_VUYX:
260  *fourcc = MFX_FOURCC_AYUV;
261  *shift = 0;
262  return AV_PIX_FMT_VUYX;
263  case AV_PIX_FMT_XV30:
264  *fourcc = MFX_FOURCC_Y410;
265  *shift = 0;
266  return AV_PIX_FMT_XV30;
267 #if QSV_VERSION_ATLEAST(1, 31)
268  case AV_PIX_FMT_P012:
269  *fourcc = MFX_FOURCC_P016;
270  *shift = 1;
271  return AV_PIX_FMT_P012;
272  case AV_PIX_FMT_Y212:
273  *fourcc = MFX_FOURCC_Y216;
274  *shift = 1;
275  return AV_PIX_FMT_Y212;
276  case AV_PIX_FMT_XV36:
277  *fourcc = MFX_FOURCC_Y416;
278  *shift = 1;
279  return AV_PIX_FMT_XV36;
280 #endif
281 #endif
282  default:
283  return AVERROR(ENOSYS);
284  }
285 }
286 
287 int ff_qsv_map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
288 {
289  switch (frame->format) {
290  case AV_PIX_FMT_NV12:
291  case AV_PIX_FMT_P010:
292  case AV_PIX_FMT_P012:
293  surface->Data.Y = frame->data[0];
294  surface->Data.UV = frame->data[1];
295  /* The SDK checks Data.V when using system memory for VP9 encoding */
296  surface->Data.V = surface->Data.UV + 1;
297  break;
299  case AV_PIX_FMT_BGRA:
300  surface->Data.B = frame->data[0];
301  surface->Data.G = frame->data[0] + 1;
302  surface->Data.R = frame->data[0] + 2;
303  surface->Data.A = frame->data[0] + 3;
304  break;
305  case AV_PIX_FMT_YUYV422:
306  surface->Data.Y = frame->data[0];
307  surface->Data.U = frame->data[0] + 1;
308  surface->Data.V = frame->data[0] + 3;
309  break;
310 
311  case AV_PIX_FMT_Y210:
312  case AV_PIX_FMT_Y212:
313  surface->Data.Y16 = (mfxU16 *)frame->data[0];
314  surface->Data.U16 = (mfxU16 *)frame->data[0] + 1;
315  surface->Data.V16 = (mfxU16 *)frame->data[0] + 3;
316  break;
317 
318  case AV_PIX_FMT_VUYX:
319  surface->Data.V = frame->data[0];
320  surface->Data.U = frame->data[0] + 1;
321  surface->Data.Y = frame->data[0] + 2;
322  // Only set Data.A to a valid address, the SDK doesn't
323  // use the value from the frame.
324  surface->Data.A = frame->data[0] + 3;
325  break;
326 
327  case AV_PIX_FMT_XV30:
328  surface->Data.U = frame->data[0];
329  break;
330 
331  case AV_PIX_FMT_XV36:
332  surface->Data.U = frame->data[0];
333  surface->Data.Y = frame->data[0] + 2;
334  surface->Data.V = frame->data[0] + 4;
335  // Only set Data.A to a valid address, the SDK doesn't
336  // use the value from the frame.
337  surface->Data.A = frame->data[0] + 6;
338  break;
339 
340  default:
341  return AVERROR(ENOSYS);
342  }
343  surface->Data.PitchLow = frame->linesize[0];
344 
345  return 0;
346 }
347 
349 {
350  int i;
351  for (i = 0; i < ctx->nb_mids; i++) {
352  QSVMid *mid = &ctx->mids[i];
353  mfxHDLPair *pair = (mfxHDLPair*)frame->surface.Data.MemId;
354  if ((mid->handle_pair->first == pair->first) &&
355  (mid->handle_pair->second == pair->second))
356  return i;
357  }
358  return AVERROR_BUG;
359 }
360 
361 enum AVFieldOrder ff_qsv_map_picstruct(int mfx_pic_struct)
362 {
364  switch (mfx_pic_struct & 0xF) {
365  case MFX_PICSTRUCT_PROGRESSIVE:
367  break;
368  case MFX_PICSTRUCT_FIELD_TFF:
369  field = AV_FIELD_TT;
370  break;
371  case MFX_PICSTRUCT_FIELD_BFF:
372  field = AV_FIELD_BB;
373  break;
374  }
375 
376  return field;
377 }
378 
379 enum AVPictureType ff_qsv_map_pictype(int mfx_pic_type)
380 {
381  enum AVPictureType type;
382  switch (mfx_pic_type & 0x7) {
383  case MFX_FRAMETYPE_I:
384  if (mfx_pic_type & MFX_FRAMETYPE_S)
386  else
388  break;
389  case MFX_FRAMETYPE_B:
391  break;
392  case MFX_FRAMETYPE_P:
393  if (mfx_pic_type & MFX_FRAMETYPE_S)
395  else
397  break;
398  case MFX_FRAMETYPE_UNKNOWN:
400  break;
401  default:
402  av_assert0(0);
403  }
404 
405  return type;
406 }
407 
408 static int qsv_load_plugins(mfxSession session, const char *load_plugins,
409  void *logctx)
410 {
411 #if QSV_HAVE_USER_PLUGIN
412  if (!load_plugins || !*load_plugins)
413  return 0;
414 
415  while (*load_plugins) {
416  mfxPluginUID uid;
417  mfxStatus ret;
418  int i, err = 0;
419 
420  char *plugin = av_get_token(&load_plugins, ":");
421  if (!plugin)
422  return AVERROR(ENOMEM);
423  if (strlen(plugin) != 2 * sizeof(uid.Data)) {
424  av_log(logctx, AV_LOG_ERROR, "Invalid plugin UID length\n");
425  err = AVERROR(EINVAL);
426  goto load_plugin_fail;
427  }
428 
429  for (i = 0; i < sizeof(uid.Data); i++) {
430  err = sscanf(plugin + 2 * i, "%2hhx", uid.Data + i);
431  if (err != 1) {
432  av_log(logctx, AV_LOG_ERROR, "Invalid plugin UID\n");
433  err = AVERROR(EINVAL);
434  goto load_plugin_fail;
435  }
436 
437  }
438 
439  ret = MFXVideoUSER_Load(session, &uid, 1);
440  if (ret < 0) {
441  char errorbuf[128];
442  snprintf(errorbuf, sizeof(errorbuf),
443  "Could not load the requested plugin '%s'", plugin);
444  err = ff_qsv_print_error(logctx, ret, errorbuf);
445  goto load_plugin_fail;
446  }
447 
448  if (*load_plugins)
449  load_plugins++;
450 load_plugin_fail:
451  av_freep(&plugin);
452  if (err < 0)
453  return err;
454  }
455 #endif
456 
457  return 0;
458 
459 }
460 
461 //This code is only required for Linux since a display handle is required.
462 //For Windows the session is complete and ready to use.
463 
464 #ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
465 static int ff_qsv_set_display_handle(AVCodecContext *avctx, QSVSession *qs)
466 {
467  AVDictionary *child_device_opts = NULL;
468  AVVAAPIDeviceContext *hwctx;
469  int ret;
470 
471  av_dict_set(&child_device_opts, "kernel_driver", "i915", 0);
472  av_dict_set(&child_device_opts, "driver", "iHD", 0);
473 
474  ret = av_hwdevice_ctx_create(&qs->va_device_ref, AV_HWDEVICE_TYPE_VAAPI, NULL, child_device_opts, 0);
475  av_dict_free(&child_device_opts);
476  if (ret < 0) {
477  av_log(avctx, AV_LOG_ERROR, "Failed to create a VAAPI device.\n");
478  return ret;
479  } else {
480  qs->va_device_ctx = (AVHWDeviceContext*)qs->va_device_ref->data;
481  hwctx = qs->va_device_ctx->hwctx;
482 
483  ret = MFXVideoCORE_SetHandle(qs->session,
484  (mfxHandleType)MFX_HANDLE_VA_DISPLAY, (mfxHDL)hwctx->display);
485  if (ret < 0) {
486  return ff_qsv_print_error(avctx, ret, "Error during set display handle\n");
487  }
488  }
489 
490  return 0;
491 }
492 #endif //AVCODEC_QSV_LINUX_SESSION_HANDLE
493 
494 #if QSV_ONEVPL
495 static int qsv_new_mfx_loader(AVCodecContext *avctx,
496  mfxIMPL implementation,
497  mfxVersion *pver,
498  void **ploader)
499 {
500  mfxStatus sts;
501  mfxLoader loader = NULL;
502  mfxConfig cfg;
503  mfxVariant impl_value;
504 
505  loader = MFXLoad();
506  if (!loader) {
507  av_log(avctx, AV_LOG_ERROR, "Error creating a MFX loader\n");
508  goto fail;
509  }
510 
511  /* Create configurations for implementation */
512  cfg = MFXCreateConfig(loader);
513  if (!cfg) {
514  av_log(avctx, AV_LOG_ERROR, "Error creating a MFX configurations\n");
515  goto fail;
516  }
517 
518  impl_value.Type = MFX_VARIANT_TYPE_U32;
519  impl_value.Data.U32 = (implementation == MFX_IMPL_SOFTWARE) ?
520  MFX_IMPL_TYPE_SOFTWARE : MFX_IMPL_TYPE_HARDWARE;
521  sts = MFXSetConfigFilterProperty(cfg,
522  (const mfxU8 *)"mfxImplDescription.Impl", impl_value);
523  if (sts != MFX_ERR_NONE) {
524  av_log(avctx, AV_LOG_ERROR, "Error adding a MFX configuration "
525  "property: %d\n", sts);
526  goto fail;
527  }
528 
529  impl_value.Type = MFX_VARIANT_TYPE_U32;
530  impl_value.Data.U32 = pver->Version;
531  sts = MFXSetConfigFilterProperty(cfg,
532  (const mfxU8 *)"mfxImplDescription.ApiVersion.Version",
533  impl_value);
534  if (sts != MFX_ERR_NONE) {
535  av_log(avctx, AV_LOG_ERROR, "Error adding a MFX configuration "
536  "property: %d\n", sts);
537  goto fail;
538  }
539 
540  *ploader = loader;
541 
542  return 0;
543 
544 fail:
545  if (loader)
546  MFXUnload(loader);
547 
548  *ploader = NULL;
549  return AVERROR_UNKNOWN;
550 }
551 
552 static int qsv_create_mfx_session_from_loader(void *ctx, mfxLoader loader, mfxSession *psession)
553 {
554  mfxStatus sts;
555  mfxSession session = NULL;
556  uint32_t impl_idx = 0;
557 
558  while (1) {
559  /* Enumerate all implementations */
560  mfxImplDescription *impl_desc;
561 
562  sts = MFXEnumImplementations(loader, impl_idx,
563  MFX_IMPLCAPS_IMPLDESCSTRUCTURE,
564  (mfxHDL *)&impl_desc);
565  /* Failed to find an available implementation */
566  if (sts == MFX_ERR_NOT_FOUND)
567  break;
568  else if (sts != MFX_ERR_NONE) {
569  impl_idx++;
570  continue;
571  }
572 
573  sts = MFXCreateSession(loader, impl_idx, &session);
574  MFXDispReleaseImplDescription(loader, impl_desc);
575  if (sts == MFX_ERR_NONE)
576  break;
577 
578  impl_idx++;
579  }
580 
581  if (sts != MFX_ERR_NONE) {
582  av_log(ctx, AV_LOG_ERROR, "Error creating a MFX session: %d.\n", sts);
583  goto fail;
584  }
585 
586  *psession = session;
587 
588  return 0;
589 
590 fail:
591  if (session)
592  MFXClose(session);
593 
594  *psession = NULL;
595  return AVERROR_UNKNOWN;
596 }
597 
598 static int qsv_create_mfx_session(AVCodecContext *avctx,
599  mfxIMPL implementation,
600  mfxVersion *pver,
601  int gpu_copy,
602  mfxSession *psession,
603  void **ploader)
604 {
605  mfxLoader loader = NULL;
606 
607  /* Don't create a new MFX loader if the input loader is valid */
608  if (*ploader == NULL) {
609  av_log(avctx, AV_LOG_VERBOSE,
610  "Use Intel(R) oneVPL to create MFX session, the required "
611  "implementation version is %d.%d\n",
612  pver->Major, pver->Minor);
613 
614  if (qsv_new_mfx_loader(avctx, implementation, pver, (void **)&loader))
615  goto fail;
616 
617  av_assert0(loader);
618  } else {
619  av_log(avctx, AV_LOG_VERBOSE,
620  "Use Intel(R) oneVPL to create MFX session with the specified MFX loader\n");
621 
622  loader = *ploader;
623  }
624 
625  if (qsv_create_mfx_session_from_loader(avctx, loader, psession))
626  goto fail;
627 
628  if (!*ploader)
629  *ploader = loader;
630 
631  return 0;
632 
633 fail:
634  if (!*ploader && loader)
635  MFXUnload(loader);
636 
637  return AVERROR_UNKNOWN;
638 }
639 
640 #else
641 
643  mfxIMPL implementation,
644  mfxVersion *pver,
645  int gpu_copy,
646  mfxSession *psession,
647  void **ploader)
648 {
649  mfxInitParam init_par = { MFX_IMPL_AUTO_ANY };
650  mfxSession session = NULL;
651  mfxStatus sts;
652 
653  av_log(avctx, AV_LOG_VERBOSE,
654  "Use Intel(R) Media SDK to create MFX session, the required "
655  "implementation version is %d.%d\n",
656  pver->Major, pver->Minor);
657 
658  *psession = NULL;
659  *ploader = NULL;
660 
661  init_par.GPUCopy = gpu_copy;
662  init_par.Implementation = implementation;
663  init_par.Version = *pver;
664  sts = MFXInitEx(init_par, &session);
665  if (sts < 0)
666  return ff_qsv_print_error(avctx, sts,
667  "Error initializing a MFX session");
668  else if (sts > 0) {
669  ff_qsv_print_warning(avctx, sts,
670  "Warning in MFX initialization");
671  return AVERROR_UNKNOWN;
672  }
673 
674  *psession = session;
675 
676  return 0;
677 }
678 
679 #endif
680 
682  const char *load_plugins, int gpu_copy)
683 {
684 #if CONFIG_D3D11VA
685  mfxIMPL impl = MFX_IMPL_AUTO_ANY | MFX_IMPL_VIA_D3D11;
686 #else
687  mfxIMPL impl = MFX_IMPL_AUTO_ANY;
688 #endif
689  mfxVersion ver = { { QSV_VERSION_MINOR, QSV_VERSION_MAJOR } };
690 
691  const char *desc;
692  int ret = qsv_create_mfx_session(avctx, impl, &ver, gpu_copy, &qs->session,
693  &qs->loader);
694  if (ret)
695  return ret;
696 
697 #ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
698  ret = ff_qsv_set_display_handle(avctx, qs);
699  if (ret < 0)
700  return ret;
701 #endif
702 
703  ret = qsv_load_plugins(qs->session, load_plugins, avctx);
704  if (ret < 0) {
705  av_log(avctx, AV_LOG_ERROR, "Error loading plugins\n");
706  return ret;
707  }
708 
709  ret = MFXQueryIMPL(qs->session, &impl);
710  if (ret != MFX_ERR_NONE)
711  return ff_qsv_print_error(avctx, ret,
712  "Error querying the session attributes");
713 
714  switch (MFX_IMPL_BASETYPE(impl)) {
715  case MFX_IMPL_SOFTWARE:
716  desc = "software";
717  break;
718  case MFX_IMPL_HARDWARE:
719  case MFX_IMPL_HARDWARE2:
720  case MFX_IMPL_HARDWARE3:
721  case MFX_IMPL_HARDWARE4:
722  desc = "hardware accelerated";
723  break;
724  default:
725  desc = "unknown";
726  }
727 
728  av_log(avctx, AV_LOG_VERBOSE,
729  "Initialized an internal MFX session using %s implementation\n",
730  desc);
731 
732  return 0;
733 }
734 
735 static void mids_buf_free(void *opaque, uint8_t *data)
736 {
737  AVBufferRef *hw_frames_ref = opaque;
738  av_buffer_unref(&hw_frames_ref);
739  av_freep(&data);
740 }
741 
742 static AVBufferRef *qsv_create_mids(AVBufferRef *hw_frames_ref)
743 {
744  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)hw_frames_ref->data;
745  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
746  int nb_surfaces = frames_hwctx->nb_surfaces;
747 
748  AVBufferRef *mids_buf, *hw_frames_ref1;
749  QSVMid *mids;
750  int i;
751 
752  hw_frames_ref1 = av_buffer_ref(hw_frames_ref);
753  if (!hw_frames_ref1)
754  return NULL;
755 
756  mids = av_calloc(nb_surfaces, sizeof(*mids));
757  if (!mids) {
758  av_buffer_unref(&hw_frames_ref1);
759  return NULL;
760  }
761 
762  mids_buf = av_buffer_create((uint8_t*)mids, nb_surfaces * sizeof(*mids),
763  mids_buf_free, hw_frames_ref1, 0);
764  if (!mids_buf) {
765  av_buffer_unref(&hw_frames_ref1);
766  av_freep(&mids);
767  return NULL;
768  }
769 
770  for (i = 0; i < nb_surfaces; i++) {
771  QSVMid *mid = &mids[i];
772  mid->handle_pair = (mfxHDLPair*)frames_hwctx->surfaces[i].Data.MemId;
773  mid->hw_frames_ref = hw_frames_ref1;
774  }
775 
776  return mids_buf;
777 }
778 
779 static int qsv_setup_mids(mfxFrameAllocResponse *resp, AVBufferRef *hw_frames_ref,
780  AVBufferRef *mids_buf)
781 {
782  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)hw_frames_ref->data;
783  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
784  QSVMid *mids = (QSVMid*)mids_buf->data;
785  int nb_surfaces = frames_hwctx->nb_surfaces;
786  int i;
787 
788  // the allocated size of the array is two larger than the number of
789  // surfaces, we store the references to the frames context and the
790  // QSVMid array there
791  resp->mids = av_calloc(nb_surfaces + 2, sizeof(*resp->mids));
792  if (!resp->mids)
793  return AVERROR(ENOMEM);
794 
795  for (i = 0; i < nb_surfaces; i++)
796  resp->mids[i] = &mids[i];
797  resp->NumFrameActual = nb_surfaces;
798 
799  resp->mids[resp->NumFrameActual] = (mfxMemId)av_buffer_ref(hw_frames_ref);
800  if (!resp->mids[resp->NumFrameActual]) {
801  av_freep(&resp->mids);
802  return AVERROR(ENOMEM);
803  }
804 
805  resp->mids[resp->NumFrameActual + 1] = av_buffer_ref(mids_buf);
806  if (!resp->mids[resp->NumFrameActual + 1]) {
807  av_buffer_unref((AVBufferRef**)&resp->mids[resp->NumFrameActual]);
808  av_freep(&resp->mids);
809  return AVERROR(ENOMEM);
810  }
811 
812  return 0;
813 }
814 
815 static mfxStatus qsv_frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
816  mfxFrameAllocResponse *resp)
817 {
818  QSVFramesContext *ctx = pthis;
819  int ret;
820 
821  /* this should only be called from an encoder or decoder and
822  * only allocates video memory frames */
823  if (!(req->Type & (MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET |
824  MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET)) ||
825  !(req->Type & (MFX_MEMTYPE_FROM_DECODE | MFX_MEMTYPE_FROM_ENCODE)))
826  return MFX_ERR_UNSUPPORTED;
827 
828  if (req->Type & MFX_MEMTYPE_EXTERNAL_FRAME) {
829  /* external frames -- fill from the caller-supplied frames context */
830  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)ctx->hw_frames_ctx->data;
831  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
832  mfxFrameInfo *i = &req->Info;
833  mfxFrameInfo *i1 = &frames_hwctx->surfaces[0].Info;
834 
835  if (i->Width > i1->Width || i->Height > i1->Height ||
836  i->FourCC != i1->FourCC || i->ChromaFormat != i1->ChromaFormat) {
837  av_log(ctx->logctx, AV_LOG_ERROR, "Mismatching surface properties in an "
838  "allocation request: %dx%d %d %d vs %dx%d %d %d\n",
839  i->Width, i->Height, i->FourCC, i->ChromaFormat,
840  i1->Width, i1->Height, i1->FourCC, i1->ChromaFormat);
841  return MFX_ERR_UNSUPPORTED;
842  }
843 
844  ret = qsv_setup_mids(resp, ctx->hw_frames_ctx, ctx->mids_buf);
845  if (ret < 0) {
846  av_log(ctx->logctx, AV_LOG_ERROR,
847  "Error filling an external frame allocation request\n");
848  return MFX_ERR_MEMORY_ALLOC;
849  }
850  } else if (req->Type & MFX_MEMTYPE_INTERNAL_FRAME) {
851  /* internal frames -- allocate a new hw frames context */
852  AVHWFramesContext *ext_frames_ctx = (AVHWFramesContext*)ctx->hw_frames_ctx->data;
853  mfxFrameInfo *i = &req->Info;
854 
855  AVBufferRef *frames_ref, *mids_buf;
856  AVHWFramesContext *frames_ctx;
857  AVQSVFramesContext *frames_hwctx;
858 
859  frames_ref = av_hwframe_ctx_alloc(ext_frames_ctx->device_ref);
860  if (!frames_ref)
861  return MFX_ERR_MEMORY_ALLOC;
862 
863  frames_ctx = (AVHWFramesContext*)frames_ref->data;
864  frames_hwctx = frames_ctx->hwctx;
865 
866  frames_ctx->format = AV_PIX_FMT_QSV;
867  frames_ctx->sw_format = ff_qsv_map_fourcc(i->FourCC);
868  frames_ctx->width = i->Width;
869  frames_ctx->height = i->Height;
870  frames_ctx->initial_pool_size = req->NumFrameSuggested;
871 
872  frames_hwctx->frame_type = req->Type;
873 
874  ret = av_hwframe_ctx_init(frames_ref);
875  if (ret < 0) {
876  av_log(ctx->logctx, AV_LOG_ERROR,
877  "Error initializing a frames context for an internal frame "
878  "allocation request\n");
879  av_buffer_unref(&frames_ref);
880  return MFX_ERR_MEMORY_ALLOC;
881  }
882 
883  mids_buf = qsv_create_mids(frames_ref);
884  if (!mids_buf) {
885  av_buffer_unref(&frames_ref);
886  return MFX_ERR_MEMORY_ALLOC;
887  }
888 
889  ret = qsv_setup_mids(resp, frames_ref, mids_buf);
890  av_buffer_unref(&mids_buf);
891  av_buffer_unref(&frames_ref);
892  if (ret < 0) {
893  av_log(ctx->logctx, AV_LOG_ERROR,
894  "Error filling an internal frame allocation request\n");
895  return MFX_ERR_MEMORY_ALLOC;
896  }
897  } else {
898  return MFX_ERR_UNSUPPORTED;
899  }
900 
901  return MFX_ERR_NONE;
902 }
903 
904 static mfxStatus qsv_frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
905 {
906  av_buffer_unref((AVBufferRef**)&resp->mids[resp->NumFrameActual]);
907  av_buffer_unref((AVBufferRef**)&resp->mids[resp->NumFrameActual + 1]);
908  av_freep(&resp->mids);
909  return MFX_ERR_NONE;
910 }
911 
912 static mfxStatus qsv_frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
913 {
914  QSVMid *qsv_mid = mid;
915  AVHWFramesContext *hw_frames_ctx = (AVHWFramesContext*)qsv_mid->hw_frames_ref->data;
916  AVQSVFramesContext *hw_frames_hwctx = hw_frames_ctx->hwctx;
917  int ret;
918 
919  if (qsv_mid->locked_frame)
920  return MFX_ERR_UNDEFINED_BEHAVIOR;
921 
922  /* Allocate a system memory frame that will hold the mapped data. */
923  qsv_mid->locked_frame = av_frame_alloc();
924  if (!qsv_mid->locked_frame)
925  return MFX_ERR_MEMORY_ALLOC;
926  qsv_mid->locked_frame->format = hw_frames_ctx->sw_format;
927 
928  /* wrap the provided handle in a hwaccel AVFrame */
929  qsv_mid->hw_frame = av_frame_alloc();
930  if (!qsv_mid->hw_frame)
931  goto fail;
932 
933  qsv_mid->hw_frame->data[3] = (uint8_t*)&qsv_mid->surf;
934  qsv_mid->hw_frame->format = AV_PIX_FMT_QSV;
935 
936  // doesn't really matter what buffer is used here
937  qsv_mid->hw_frame->buf[0] = av_buffer_alloc(1);
938  if (!qsv_mid->hw_frame->buf[0])
939  goto fail;
940 
941  qsv_mid->hw_frame->width = hw_frames_ctx->width;
942  qsv_mid->hw_frame->height = hw_frames_ctx->height;
943 
944  qsv_mid->hw_frame->hw_frames_ctx = av_buffer_ref(qsv_mid->hw_frames_ref);
945  if (!qsv_mid->hw_frame->hw_frames_ctx)
946  goto fail;
947 
948  qsv_mid->surf.Info = hw_frames_hwctx->surfaces[0].Info;
949  qsv_mid->surf.Data.MemId = qsv_mid->handle_pair;
950 
951  /* map the data to the system memory */
952  ret = av_hwframe_map(qsv_mid->locked_frame, qsv_mid->hw_frame,
954  if (ret < 0)
955  goto fail;
956 
957  ptr->Pitch = qsv_mid->locked_frame->linesize[0];
958  ptr->Y = qsv_mid->locked_frame->data[0];
959  ptr->U = qsv_mid->locked_frame->data[1];
960  ptr->V = qsv_mid->locked_frame->data[1] + 1;
961 
962  return MFX_ERR_NONE;
963 fail:
964  av_frame_free(&qsv_mid->hw_frame);
965  av_frame_free(&qsv_mid->locked_frame);
966  return MFX_ERR_MEMORY_ALLOC;
967 }
968 
969 static mfxStatus qsv_frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
970 {
971  QSVMid *qsv_mid = mid;
972 
973  av_frame_free(&qsv_mid->locked_frame);
974  av_frame_free(&qsv_mid->hw_frame);
975 
976  return MFX_ERR_NONE;
977 }
978 
979 static mfxStatus qsv_frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
980 {
981  QSVMid *qsv_mid = (QSVMid*)mid;
982  mfxHDLPair *pair_dst = (mfxHDLPair*)hdl;
983  mfxHDLPair *pair_src = (mfxHDLPair*)qsv_mid->handle_pair;
984 
985  pair_dst->first = pair_src->first;
986 
987  if (pair_src->second != (mfxMemId)MFX_INFINITE)
988  pair_dst->second = pair_src->second;
989  return MFX_ERR_NONE;
990 }
991 
992 int ff_qsv_init_session_device(AVCodecContext *avctx, mfxSession *psession,
993  AVBufferRef *device_ref, const char *load_plugins,
994  int gpu_copy)
995 {
996  AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)device_ref->data;
997  AVQSVDeviceContext *device_hwctx = device_ctx->hwctx;
998  mfxSession parent_session = device_hwctx->session;
999  void *loader = device_hwctx->loader;
1000  mfxHDL handle = NULL;
1001  int hw_handle_supported = 0;
1002 
1003  mfxSession session;
1004  mfxVersion ver;
1005  mfxIMPL impl;
1006  mfxHandleType handle_type;
1007  mfxStatus err;
1008  int ret;
1009 
1010  err = MFXQueryIMPL(parent_session, &impl);
1011  if (err == MFX_ERR_NONE)
1012  err = MFXQueryVersion(parent_session, &ver);
1013  if (err != MFX_ERR_NONE)
1014  return ff_qsv_print_error(avctx, err,
1015  "Error querying the session attributes");
1016 
1017  if (MFX_IMPL_VIA_VAAPI == MFX_IMPL_VIA_MASK(impl)) {
1018  handle_type = MFX_HANDLE_VA_DISPLAY;
1019  hw_handle_supported = 1;
1020  } else if (MFX_IMPL_VIA_D3D11 == MFX_IMPL_VIA_MASK(impl)) {
1021  handle_type = MFX_HANDLE_D3D11_DEVICE;
1022  hw_handle_supported = 1;
1023  } else if (MFX_IMPL_VIA_D3D9 == MFX_IMPL_VIA_MASK(impl)) {
1024  handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
1025  hw_handle_supported = 1;
1026  }
1027 
1028  if (hw_handle_supported) {
1029  err = MFXVideoCORE_GetHandle(parent_session, handle_type, &handle);
1030  if (err != MFX_ERR_NONE) {
1031  return ff_qsv_print_error(avctx, err,
1032  "Error getting handle session");
1033  }
1034  }
1035  if (!handle) {
1036  av_log(avctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
1037  "from the session\n");
1038  }
1039 
1040  ret = qsv_create_mfx_session(avctx, impl, &ver, gpu_copy, &session,
1041  &loader);
1042  if (ret)
1043  return ret;
1044 
1045  if (handle) {
1046  err = MFXVideoCORE_SetHandle(session, handle_type, handle);
1047  if (err != MFX_ERR_NONE)
1048  return ff_qsv_print_error(avctx, err,
1049  "Error setting a HW handle");
1050  }
1051 
1052  if (QSV_RUNTIME_VERSION_ATLEAST(ver, 1, 25)) {
1053  err = MFXJoinSession(parent_session, session);
1054  if (err != MFX_ERR_NONE)
1055  return ff_qsv_print_error(avctx, err,
1056  "Error joining session");
1057  }
1058 
1059  ret = qsv_load_plugins(session, load_plugins, avctx);
1060  if (ret < 0) {
1061  av_log(avctx, AV_LOG_ERROR, "Error loading plugins\n");
1062  return ret;
1063  }
1064 
1065  *psession = session;
1066  return 0;
1067 }
1068 
1069 int ff_qsv_init_session_frames(AVCodecContext *avctx, mfxSession *psession,
1070  QSVFramesContext *qsv_frames_ctx,
1071  const char *load_plugins, int opaque, int gpu_copy)
1072 {
1073  mfxFrameAllocator frame_allocator = {
1074  .pthis = qsv_frames_ctx,
1075  .Alloc = qsv_frame_alloc,
1076  .Lock = qsv_frame_lock,
1077  .Unlock = qsv_frame_unlock,
1078  .GetHDL = qsv_frame_get_hdl,
1079  .Free = qsv_frame_free,
1080  };
1081 
1082  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)qsv_frames_ctx->hw_frames_ctx->data;
1083  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
1084 
1085  mfxSession session;
1086  mfxStatus err;
1087 
1088  int ret;
1089 
1090  ret = ff_qsv_init_session_device(avctx, &session,
1091  frames_ctx->device_ref, load_plugins, gpu_copy);
1092  if (ret < 0)
1093  return ret;
1094 
1095  if (!opaque) {
1096  qsv_frames_ctx->logctx = avctx;
1097 
1098  /* allocate the memory ids for the external frames */
1099  av_buffer_unref(&qsv_frames_ctx->mids_buf);
1100  qsv_frames_ctx->mids_buf = qsv_create_mids(qsv_frames_ctx->hw_frames_ctx);
1101  if (!qsv_frames_ctx->mids_buf)
1102  return AVERROR(ENOMEM);
1103  qsv_frames_ctx->mids = (QSVMid*)qsv_frames_ctx->mids_buf->data;
1104  qsv_frames_ctx->nb_mids = frames_hwctx->nb_surfaces;
1105 
1106  err = MFXVideoCORE_SetFrameAllocator(session, &frame_allocator);
1107  if (err != MFX_ERR_NONE)
1108  return ff_qsv_print_error(avctx, err,
1109  "Error setting a frame allocator");
1110  }
1111 
1112  *psession = session;
1113  return 0;
1114 }
1115 
1117 {
1118  if (qs->session) {
1119  MFXClose(qs->session);
1120  qs->session = NULL;
1121  }
1122 
1123  if (qs->loader) {
1124  MFXUnload(qs->loader);
1125  qs->loader = NULL;
1126  }
1127 
1128 #ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
1129  av_buffer_unref(&qs->va_device_ref);
1130 #endif
1131  return 0;
1132 }
1133 
1135  mfxExtBuffer * param)
1136 {
1137  int i;
1138 
1139  for (i = 0; i < frame->num_ext_params; i++) {
1140  mfxExtBuffer *ext_buffer = frame->ext_param[i];
1141 
1142  if (ext_buffer->BufferId == param->BufferId) {
1143  av_log(avctx, AV_LOG_WARNING, "A buffer with the same type has been "
1144  "added\n");
1145  return;
1146  }
1147  }
1148 
1149  if (frame->num_ext_params < QSV_MAX_FRAME_EXT_PARAMS) {
1150  frame->ext_param[frame->num_ext_params] = param;
1151  frame->num_ext_params++;
1152  frame->surface.Data.NumExtParam = frame->num_ext_params;
1153  } else {
1154  av_log(avctx, AV_LOG_WARNING, "Ignore this extra buffer because do not "
1155  "have enough space\n");
1156  }
1157 
1158 
1159 }
qsv_errors
static const struct @128 qsv_errors[]
QSV_MAX_FRAME_EXT_PARAMS
#define QSV_MAX_FRAME_EXT_PARAMS
Definition: qsv_internal.h:58
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:92
qsv_frame_unlock
static mfxStatus qsv_frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: qsv.c:969
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
AV_FIELD_PROGRESSIVE
@ AV_FIELD_PROGRESSIVE
Definition: codec_par.h:40
QSVFramesContext::nb_mids
int nb_mids
Definition: qsv_internal.h:125
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
uid
UID uid
Definition: mxfenc.c:2204
QSV_VERSION_MAJOR
#define QSV_VERSION_MAJOR
Definition: qsv_internal.h:48
QSVFramesContext::hw_frames_ctx
AVBufferRef * hw_frames_ctx
Definition: qsv_internal.h:116
averr
int averr
Definition: qsv.c:119
QSVMid::locked_frame
AVFrame * locked_frame
Definition: qsv_internal.h:75
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
AVPictureType
AVPictureType
Definition: avutil.h:272
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:99
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:334
ff_qsv_close_internal_session
int ff_qsv_close_internal_session(QSVSession *qs)
Definition: qsv.c:1116
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:330
ff_qsv_map_pictype
enum AVPictureType ff_qsv_map_pictype(int mfx_pic_type)
Definition: qsv.c:379
AVFrame::width
int width
Definition: frame.h:402
AVQSVDeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_qsv.h:35
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:248
av_hwframe_map
int av_hwframe_map(AVFrame *dst, const AVFrame *src, int flags)
Map a hardware frame.
Definition: hwcontext.c:794
ff_qsv_find_surface_idx
int ff_qsv_find_surface_idx(QSVFramesContext *ctx, QSVFrame *frame)
Definition: qsv.c:348
data
const char data[16]
Definition: mxf.c:146
AV_PIX_FMT_YUV420P10
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:459
AV_PIX_FMT_XV30
#define AV_PIX_FMT_XV30
Definition: pixfmt.h:514
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:196
AVVAAPIDeviceContext::display
VADisplay display
The VADisplay handle, to be filled by the user.
Definition: hwcontext_vaapi.h:72
AV_PIX_FMT_BGRA
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:95
AVDictionary
Definition: dict.c:32
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:229
AVFrame::buf
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:539
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:351
ff_qsv_init_session_device
int ff_qsv_init_session_device(AVCodecContext *avctx, mfxSession *psession, AVBufferRef *device_ref, const char *load_plugins, int gpu_copy)
Definition: qsv.c:992
ff_qsv_map_frame_to_surface
int ff_qsv_map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
Definition: qsv.c:287
fail
#define fail()
Definition: checkasm.h:134
MFXUnload
#define MFXUnload(a)
Definition: qsv.c:51
qsv_iopatterns
static const struct @127 qsv_iopatterns[]
qsv_load_plugins
static int qsv_load_plugins(mfxSession session, const char *load_plugins, void *logctx)
Definition: qsv.c:408
type
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf type
Definition: writing_filters.txt:86
AV_FIELD_UNKNOWN
@ AV_FIELD_UNKNOWN
Definition: codec_par.h:39
desc
const char * desc
Definition: qsv.c:86
qsv_internal.h
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:61
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:87
AV_PIX_FMT_Y210
#define AV_PIX_FMT_Y210
Definition: pixfmt.h:512
avassert.h
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
ff_qsv_print_warning
int ff_qsv_print_warning(void *log_ctx, mfxStatus err, const char *warning_string)
Definition: qsv.c:194
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
AVHWFramesContext::height
int height
Definition: hwcontext.h:229
QSV_VERSION_MINOR
#define QSV_VERSION_MINOR
Definition: qsv_internal.h:49
AVFieldOrder
AVFieldOrder
Definition: codec_par.h:38
QSVMid::hw_frame
AVFrame * hw_frame
Definition: qsv_internal.h:76
format
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate. The lists are not just lists
AV_CODEC_ID_VP9
@ AV_CODEC_ID_VP9
Definition: codec_id.h:220
QSV_RUNTIME_VERSION_ATLEAST
#define QSV_RUNTIME_VERSION_ATLEAST(MFX_VERSION, MAJOR, MINOR)
Definition: qsv_internal.h:64
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:37
ctx
AVFormatContext * ctx
Definition: movenc.c:48
AV_HWFRAME_MAP_DIRECT
@ AV_HWFRAME_MAP_DIRECT
The mapping must be direct.
Definition: hwcontext.h:540
field
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this field
Definition: writing_filters.txt:78
codec_id
enum AVCodecID codec_id
Definition: vaapi_decode.c:388
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
AV_CODEC_ID_H264
@ AV_CODEC_ID_H264
Definition: codec_id.h:79
if
if(ret)
Definition: filter_design.txt:179
mids_buf_free
static void mids_buf_free(void *opaque, uint8_t *data)
Definition: qsv.c:735
ff_qsv_init_session_frames
int ff_qsv_init_session_frames(AVCodecContext *avctx, mfxSession *psession, QSVFramesContext *qsv_frames_ctx, const char *load_plugins, int opaque, int gpu_copy)
Definition: qsv.c:1069
QSVFrame
Definition: qsv_internal.h:80
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:222
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AV_CODEC_ID_AV1
@ AV_CODEC_ID_AV1
Definition: codec_id.h:283
AV_PIX_FMT_YUYV422
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:67
AVHWFramesContext::device_ref
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
Definition: hwcontext.h:141
AV_PIX_FMT_YUVJ420P
@ AV_PIX_FMT_YUVJ420P
planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of AV_PIX_FMT_YUV420P and setting col...
Definition: pixfmt.h:78
AV_PICTURE_TYPE_SI
@ AV_PICTURE_TYPE_SI
Switching Intra.
Definition: avutil.h:278
AV_PICTURE_TYPE_I
@ AV_PICTURE_TYPE_I
Intra.
Definition: avutil.h:274
ff_qsv_print_iopattern
int ff_qsv_print_iopattern(void *log_ctx, int mfx_iopattern, const char *extra_string)
Definition: qsv.c:100
QSVFramesContext::mids_buf
AVBufferRef * mids_buf
Definition: qsv_internal.h:123
AV_PIX_FMT_YUV422P10
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:460
AV_PIX_FMT_QSV
@ AV_PIX_FMT_QSV
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
Definition: pixfmt.h:240
AV_PICTURE_TYPE_SP
@ AV_PICTURE_TYPE_SP
Switching Predicted.
Definition: avutil.h:279
AV_CODEC_ID_MPEG1VIDEO
@ AV_CODEC_ID_MPEG1VIDEO
Definition: codec_id.h:53
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
error.h
AVCodecID
AVCodecID
Identify the syntax and semantics of the bitstream.
Definition: codec_id.h:49
AV_PIX_FMT_X2RGB10LE
@ AV_PIX_FMT_X2RGB10LE
packed RGB 10:10:10, 30bpp, (msb)2X 10R 10G 10B(lsb), little-endian, X=unused/undefined
Definition: pixfmt.h:381
mfxerr
mfxStatus mfxerr
Definition: qsv.c:118
AV_PIX_FMT_P012
#define AV_PIX_FMT_P012
Definition: pixfmt.h:509
shift
static int shift(int a, int b)
Definition: bonk.c:257
QSVMid::hw_frames_ref
AVBufferRef * hw_frames_ref
Definition: qsv_internal.h:72
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:417
AV_PICTURE_TYPE_NONE
@ AV_PICTURE_TYPE_NONE
Undefined.
Definition: avutil.h:273
AV_PIX_FMT_Y212
#define AV_PIX_FMT_Y212
Definition: pixfmt.h:513
AV_FIELD_TT
@ AV_FIELD_TT
Top coded_first, top displayed first.
Definition: codec_par.h:41
mfx_iopattern
int mfx_iopattern
Definition: qsv.c:85
QSVMid::handle_pair
mfxHDLPair * handle_pair
Definition: qsv_internal.h:73
av_dict_free
void av_dict_free(AVDictionary **pm)
Free all the memory allocated for an AVDictionary struct and all keys and values.
Definition: dict.c:223
av_buffer_alloc
AVBufferRef * av_buffer_alloc(size_t size)
Allocate an AVBuffer of the given size using av_malloc().
Definition: buffer.c:77
QSVFramesContext::mids
QSVMid * mids
Definition: qsv_internal.h:124
AV_HWDEVICE_TYPE_VAAPI
@ AV_HWDEVICE_TYPE_VAAPI
Definition: hwcontext.h:31
AV_CODEC_ID_MJPEG
@ AV_CODEC_ID_MJPEG
Definition: codec_id.h:59
hwcontext_qsv.h
ff_qsv_map_pixfmt
int ff_qsv_map_pixfmt(enum AVPixelFormat format, uint32_t *fourcc, uint16_t *shift)
Definition: qsv.c:226
ff_qsv_map_picstruct
enum AVFieldOrder ff_qsv_map_picstruct(int mfx_pic_struct)
Definition: qsv.c:361
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
common.h
AV_CODEC_ID_HEVC
@ AV_CODEC_ID_HEVC
Definition: codec_id.h:226
QSVMid::surf
mfxFrameSurface1 surf
Definition: qsv_internal.h:77
AV_PIX_FMT_X2RGB10
#define AV_PIX_FMT_X2RGB10
Definition: pixfmt.h:516
AV_CODEC_ID_VC1
@ AV_CODEC_ID_VC1
Definition: codec_id.h:122
qsv_map_error
static int qsv_map_error(mfxStatus mfx_err, const char **desc)
Convert a libmfx error code into an FFmpeg error code.
Definition: qsv.c:170
av_calloc
void * av_calloc(size_t nmemb, size_t size)
Definition: mem.c:262
avcodec.h
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:124
qsv_frame_free
static mfxStatus qsv_frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
Definition: qsv.c:904
AV_PIX_FMT_PAL8
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
Definition: pixfmt.h:77
ret
ret
Definition: filter_design.txt:187
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:89
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
QSVSession
Definition: qsv_internal.h:106
AVHWFramesContext::hwctx
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:162
ff_qsv_codec_id_to_mfx
int ff_qsv_codec_id_to_mfx(enum AVCodecID codec_id)
Definition: qsv.c:54
qsv_frame_get_hdl
static mfxStatus qsv_frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
Definition: qsv.c:979
av_hwdevice_ctx_create
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:615
AVFrame::hw_frames_ctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:678
AVCodecContext
main external API structure.
Definition: avcodec.h:426
AVFrame::height
int height
Definition: frame.h:402
qsv_frame_alloc
static mfxStatus qsv_frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req, mfxFrameAllocResponse *resp)
Definition: qsv.c:815
AV_PICTURE_TYPE_B
@ AV_PICTURE_TYPE_B
Bi-dir predicted.
Definition: avutil.h:276
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
av_get_token
char * av_get_token(const char **buf, const char *term)
Unescape the given string until a non escaped terminating char, and return the token corresponding to...
Definition: avstring.c:144
qsv_create_mfx_session
static int qsv_create_mfx_session(AVCodecContext *avctx, mfxIMPL implementation, mfxVersion *pver, int gpu_copy, mfxSession *psession, void **ploader)
Definition: qsv.c:642
MFX_IMPL_VIA_MASK
#define MFX_IMPL_VIA_MASK(impl)
Definition: qsv.c:38
QSVSession::session
mfxSession session
Definition: qsv_internal.h:107
ff_qsv_map_fourcc
enum AVPixelFormat ff_qsv_map_fourcc(uint32_t fourcc)
Definition: qsv.c:203
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:508
AVQSVFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_qsv.h:53
AV_PICTURE_TYPE_P
@ AV_PICTURE_TYPE_P
Predicted.
Definition: avutil.h:275
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
qsv_setup_mids
static int qsv_setup_mids(mfxFrameAllocResponse *resp, AVBufferRef *hw_frames_ref, AVBufferRef *mids_buf)
Definition: qsv.c:779
QSVFramesContext::logctx
void * logctx
Definition: qsv_internal.h:117
QSVFramesContext
Definition: qsv_internal.h:115
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:86
AV_FIELD_BB
@ AV_FIELD_BB
Bottom coded first, bottom displayed first.
Definition: codec_par.h:42
qsv_create_mids
static AVBufferRef * qsv_create_mids(AVBufferRef *hw_frames_ref)
Definition: qsv.c:742
imgutils.h
AV_PIX_FMT_XV36
#define AV_PIX_FMT_XV36
Definition: pixfmt.h:515
AV_CODEC_ID_VP8
@ AV_CODEC_ID_VP8
Definition: codec_id.h:192
hwcontext.h
AVERROR_BUG
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:52
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:375
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
AVVAAPIDeviceContext
VAAPI connection details.
Definition: hwcontext_vaapi.h:68
avstring.h
AV_PIX_FMT_VUYX
@ AV_PIX_FMT_VUYX
packed VUYX 4:4:4, 32bpp, Variant of VUYA where alpha channel is left undefined
Definition: pixfmt.h:403
QSVSession::loader
void * loader
Definition: qsv_internal.h:112
ff_qsv_frame_add_ext_param
void ff_qsv_frame_add_ext_param(AVCodecContext *avctx, QSVFrame *frame, mfxExtBuffer *param)
Definition: qsv.c:1134
AV_CODEC_ID_MPEG2VIDEO
@ AV_CODEC_ID_MPEG2VIDEO
preferred ID for MPEG-1/2 video decoding
Definition: codec_id.h:54
QSVMid
Definition: qsv_internal.h:71
ff_qsv_print_error
int ff_qsv_print_error(void *log_ctx, mfxStatus err, const char *error_string)
Definition: qsv.c:185
fourcc
uint32_t fourcc
Definition: vaapi_decode.c:241
ff_qsv_init_internal_session
int ff_qsv_init_internal_session(AVCodecContext *avctx, QSVSession *qs, const char *load_plugins, int gpu_copy)
Definition: qsv.c:681
snprintf
#define snprintf
Definition: snprintf.h:34
qsv_frame_lock
static mfxStatus qsv_frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
Definition: qsv.c:912