FFmpeg
hwcontext.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "config.h"
20 
21 #include "avassert.h"
22 #include "buffer.h"
23 #include "common.h"
24 #include "hwcontext.h"
25 #include "hwcontext_internal.h"
26 #include "imgutils.h"
27 #include "log.h"
28 #include "mem.h"
29 #include "pixdesc.h"
30 #include "pixfmt.h"
31 
32 static const HWContextType * const hw_table[] = {
33 #if CONFIG_CUDA
35 #endif
36 #if CONFIG_D3D11VA
38 #endif
39 #if CONFIG_D3D12VA
41 #endif
42 #if CONFIG_LIBDRM
44 #endif
45 #if CONFIG_DXVA2
47 #endif
48 #if CONFIG_OPENCL
50 #endif
51 #if CONFIG_QSV
53 #endif
54 #if CONFIG_VAAPI
56 #endif
57 #if CONFIG_VDPAU
59 #endif
60 #if CONFIG_VIDEOTOOLBOX
62 #endif
63 #if CONFIG_MEDIACODEC
65 #endif
66 #if CONFIG_VULKAN
68 #endif
69  NULL,
70 };
71 
72 static const char *const hw_type_names[] = {
73  [AV_HWDEVICE_TYPE_CUDA] = "cuda",
74  [AV_HWDEVICE_TYPE_DRM] = "drm",
75  [AV_HWDEVICE_TYPE_DXVA2] = "dxva2",
76  [AV_HWDEVICE_TYPE_D3D11VA] = "d3d11va",
77  [AV_HWDEVICE_TYPE_D3D12VA] = "d3d12va",
78  [AV_HWDEVICE_TYPE_OPENCL] = "opencl",
79  [AV_HWDEVICE_TYPE_QSV] = "qsv",
80  [AV_HWDEVICE_TYPE_VAAPI] = "vaapi",
81  [AV_HWDEVICE_TYPE_VDPAU] = "vdpau",
82  [AV_HWDEVICE_TYPE_VIDEOTOOLBOX] = "videotoolbox",
83  [AV_HWDEVICE_TYPE_MEDIACODEC] = "mediacodec",
84  [AV_HWDEVICE_TYPE_VULKAN] = "vulkan",
85 };
86 
87 typedef struct FFHWDeviceContext {
88  /**
89  * The public AVHWDeviceContext. See hwcontext.h for it.
90  */
92 
94 
95  /**
96  * For a derived device, a reference to the original device
97  * context it was derived from.
98  */
101 
103 {
104  int type;
105  for (type = 0; type < FF_ARRAY_ELEMS(hw_type_names); type++) {
106  if (hw_type_names[type] && !strcmp(hw_type_names[type], name))
107  return type;
108  }
109  return AV_HWDEVICE_TYPE_NONE;
110 }
111 
113 {
114  if (type > AV_HWDEVICE_TYPE_NONE &&
116  return hw_type_names[type];
117  else
118  return NULL;
119 }
120 
122 {
123  enum AVHWDeviceType next;
124  int i, set = 0;
125  for (i = 0; hw_table[i]; i++) {
126  if (prev != AV_HWDEVICE_TYPE_NONE && hw_table[i]->type <= prev)
127  continue;
128  if (!set || hw_table[i]->type < next) {
129  next = hw_table[i]->type;
130  set = 1;
131  }
132  }
133  return set ? next : AV_HWDEVICE_TYPE_NONE;
134 }
135 
136 static const AVClass hwdevice_ctx_class = {
137  .class_name = "AVHWDeviceContext",
138  .item_name = av_default_item_name,
139  .version = LIBAVUTIL_VERSION_INT,
140 };
141 
142 static void hwdevice_ctx_free(void *opaque, uint8_t *data)
143 {
145  AVHWDeviceContext *ctx = &ctxi->p;
146 
147  /* uninit might still want access the hw context and the user
148  * free() callback might destroy it, so uninit has to be called first */
149  if (ctxi->hw_type->device_uninit)
150  ctxi->hw_type->device_uninit(ctx);
151 
152  if (ctx->free)
153  ctx->free(ctx);
154 
156 
157  av_freep(&ctx->hwctx);
158  av_freep(&ctx);
159 }
160 
162 {
163  FFHWDeviceContext *ctxi;
165  AVBufferRef *buf;
166  const HWContextType *hw_type = NULL;
167  int i;
168 
169  for (i = 0; hw_table[i]; i++) {
170  if (hw_table[i]->type == type) {
171  hw_type = hw_table[i];
172  break;
173  }
174  }
175  if (!hw_type)
176  return NULL;
177 
178  ctxi = av_mallocz(sizeof(*ctxi));
179  if (!ctxi)
180  return NULL;
181  ctx = &ctxi->p;
182 
183  if (hw_type->device_hwctx_size) {
184  ctx->hwctx = av_mallocz(hw_type->device_hwctx_size);
185  if (!ctx->hwctx)
186  goto fail;
187  }
188 
189  buf = av_buffer_create((uint8_t*)ctx, sizeof(*ctx),
192  if (!buf)
193  goto fail;
194 
195  ctx->type = type;
197 
198  ctxi->hw_type = hw_type;
199 
200  return buf;
201 
202 fail:
203  av_freep(&ctx->hwctx);
204  av_freep(&ctx);
205  return NULL;
206 }
207 
209 {
210  FFHWDeviceContext *ctxi = (FFHWDeviceContext*)ref->data;
211  AVHWDeviceContext *ctx = &ctxi->p;
212  int ret = 0;
213 
214  if (ctxi->hw_type->device_init)
215  ret = ctxi->hw_type->device_init(ctx);
216 
217  return ret;
218 }
219 
220 static const AVClass hwframe_ctx_class = {
221  .class_name = "AVHWFramesContext",
222  .item_name = av_default_item_name,
223  .version = LIBAVUTIL_VERSION_INT,
224 };
225 
226 static void hwframe_ctx_free(void *opaque, uint8_t *data)
227 {
229  AVHWFramesContext *ctx = &ctxi->p;
230 
231  if (ctxi->pool_internal)
233 
234  if (ctxi->hw_type->frames_uninit)
235  ctxi->hw_type->frames_uninit(ctx);
236 
237  if (ctx->free)
238  ctx->free(ctx);
239 
241 
242  av_buffer_unref(&ctx->device_ref);
243 
244  av_freep(&ctx->hwctx);
245  av_freep(&ctx);
246 }
247 
249 {
250  FFHWDeviceContext *device_ctx = (FFHWDeviceContext*)device_ref_in->data;
251  const HWContextType *hw_type = device_ctx->hw_type;
252  FFHWFramesContext *ctxi;
254  AVBufferRef *buf, *device_ref = NULL;
255 
256  ctxi = av_mallocz(sizeof(*ctxi));
257  if (!ctxi)
258  return NULL;
259  ctx = &ctxi->p;
260 
261  if (hw_type->frames_hwctx_size) {
262  ctx->hwctx = av_mallocz(hw_type->frames_hwctx_size);
263  if (!ctx->hwctx)
264  goto fail;
265  }
266 
267  device_ref = av_buffer_ref(device_ref_in);
268  if (!device_ref)
269  goto fail;
270 
271  buf = av_buffer_create((uint8_t*)ctx, sizeof(*ctx),
274  if (!buf)
275  goto fail;
276 
278  ctx->device_ref = device_ref;
279  ctx->device_ctx = &device_ctx->p;
280  ctx->format = AV_PIX_FMT_NONE;
281  ctx->sw_format = AV_PIX_FMT_NONE;
282 
283  ctxi->hw_type = hw_type;
284 
285  return buf;
286 
287 fail:
288  av_buffer_unref(&device_ref);
289  av_freep(&ctx->hwctx);
290  av_freep(&ctx);
291  return NULL;
292 }
293 
295 {
297  AVFrame **frames;
298  int i, ret = 0;
299 
300  frames = av_calloc(ctx->initial_pool_size, sizeof(*frames));
301  if (!frames)
302  return AVERROR(ENOMEM);
303 
304  for (i = 0; i < ctx->initial_pool_size; i++) {
305  frames[i] = av_frame_alloc();
306  if (!frames[i])
307  goto fail;
308 
310  if (ret < 0)
311  goto fail;
312  }
313 
314 fail:
315  for (i = 0; i < ctx->initial_pool_size; i++)
317  av_freep(&frames);
318 
319  return ret;
320 }
321 
323 {
324  FFHWFramesContext *ctxi = (FFHWFramesContext*)ref->data;
325  AVHWFramesContext *ctx = &ctxi->p;
326  const enum AVPixelFormat *pix_fmt;
327  int ret;
328 
329  if (ctxi->source_frames) {
330  /* A derived frame context is already initialised. */
331  return 0;
332  }
333 
334  /* validate the pixel format */
335  for (pix_fmt = ctxi->hw_type->pix_fmts; *pix_fmt != AV_PIX_FMT_NONE; pix_fmt++) {
336  if (*pix_fmt == ctx->format)
337  break;
338  }
339  if (*pix_fmt == AV_PIX_FMT_NONE) {
341  "The hardware pixel format '%s' is not supported by the device type '%s'\n",
342  av_get_pix_fmt_name(ctx->format), ctxi->hw_type->name);
343  return AVERROR(ENOSYS);
344  }
345 
346  /* validate the dimensions */
347  ret = av_image_check_size(ctx->width, ctx->height, 0, ctx);
348  if (ret < 0)
349  return ret;
350 
351  /* format-specific init */
352  if (ctxi->hw_type->frames_init) {
353  ret = ctxi->hw_type->frames_init(ctx);
354  if (ret < 0)
355  return ret;
356  }
357 
358  if (ctxi->pool_internal && !ctx->pool)
359  ctx->pool = ctxi->pool_internal;
360 
361  /* preallocate the frames in the pool, if requested */
362  if (ctx->initial_pool_size > 0) {
364  if (ret < 0)
365  return ret;
366  }
367 
368  return 0;
369 }
370 
373  enum AVPixelFormat **formats, int flags)
374 {
375  FFHWFramesContext *ctxi = (FFHWFramesContext*)hwframe_ref->data;
376 
378  return AVERROR(ENOSYS);
379 
380  return ctxi->hw_type->transfer_get_formats(&ctxi->p, dir, formats);
381 }
382 
383 static int transfer_data_alloc(AVFrame *dst, const AVFrame *src, int flags)
384 {
386  AVFrame *frame_tmp;
387  int ret = 0;
388 
389  if (!src->hw_frames_ctx)
390  return AVERROR(EINVAL);
391  ctx = (AVHWFramesContext*)src->hw_frames_ctx->data;
392 
393  frame_tmp = av_frame_alloc();
394  if (!frame_tmp)
395  return AVERROR(ENOMEM);
396 
397  /* if the format is set, use that
398  * otherwise pick the first supported one */
399  if (dst->format >= 0) {
400  frame_tmp->format = dst->format;
401  } else {
402  enum AVPixelFormat *formats;
403 
404  ret = av_hwframe_transfer_get_formats(src->hw_frames_ctx,
406  &formats, 0);
407  if (ret < 0)
408  goto fail;
409  frame_tmp->format = formats[0];
410  av_freep(&formats);
411  }
412  frame_tmp->width = ctx->width;
413  frame_tmp->height = ctx->height;
414 
415  ret = av_frame_get_buffer(frame_tmp, 0);
416  if (ret < 0)
417  goto fail;
418 
419  ret = av_hwframe_transfer_data(frame_tmp, src, flags);
420  if (ret < 0)
421  goto fail;
422 
423  frame_tmp->width = src->width;
424  frame_tmp->height = src->height;
425 
426  av_frame_move_ref(dst, frame_tmp);
427 
428 fail:
429  av_frame_free(&frame_tmp);
430  return ret;
431 }
432 
434 {
435  int ret;
436 
437  if (!dst->buf[0])
438  return transfer_data_alloc(dst, src, flags);
439 
440  /*
441  * Hardware -> Hardware Transfer.
442  * Unlike Software -> Hardware or Hardware -> Software, the transfer
443  * function could be provided by either the src or dst, depending on
444  * the specific combination of hardware.
445  */
446  if (src->hw_frames_ctx && dst->hw_frames_ctx) {
447  FFHWFramesContext *src_ctx =
448  (FFHWFramesContext*)src->hw_frames_ctx->data;
449  FFHWFramesContext *dst_ctx =
450  (FFHWFramesContext*)dst->hw_frames_ctx->data;
451 
452  if (src_ctx->source_frames) {
453  av_log(src_ctx, AV_LOG_ERROR,
454  "A device with a derived frame context cannot be used as "
455  "the source of a HW -> HW transfer.");
456  return AVERROR(ENOSYS);
457  }
458 
459  if (dst_ctx->source_frames) {
460  av_log(src_ctx, AV_LOG_ERROR,
461  "A device with a derived frame context cannot be used as "
462  "the destination of a HW -> HW transfer.");
463  return AVERROR(ENOSYS);
464  }
465 
466  ret = src_ctx->hw_type->transfer_data_from(&src_ctx->p, dst, src);
467  if (ret == AVERROR(ENOSYS))
468  ret = dst_ctx->hw_type->transfer_data_to(&dst_ctx->p, dst, src);
469  if (ret < 0)
470  return ret;
471  } else {
472  if (src->hw_frames_ctx) {
473  FFHWFramesContext *ctx = (FFHWFramesContext*)src->hw_frames_ctx->data;
474 
475  ret = ctx->hw_type->transfer_data_from(&ctx->p, dst, src);
476  if (ret < 0)
477  return ret;
478  } else if (dst->hw_frames_ctx) {
479  FFHWFramesContext *ctx = (FFHWFramesContext*)dst->hw_frames_ctx->data;
480 
481  ret = ctx->hw_type->transfer_data_to(&ctx->p, dst, src);
482  if (ret < 0)
483  return ret;
484  } else {
485  return AVERROR(ENOSYS);
486  }
487  }
488  return 0;
489 }
490 
492 {
493  FFHWFramesContext *ctxi = (FFHWFramesContext*)hwframe_ref->data;
494  AVHWFramesContext *ctx = &ctxi->p;
495  int ret;
496 
497  if (ctxi->source_frames) {
498  // This is a derived frame context, so we allocate in the source
499  // and map the frame immediately.
500  AVFrame *src_frame;
501 
502  frame->format = ctx->format;
503  frame->hw_frames_ctx = av_buffer_ref(hwframe_ref);
504  if (!frame->hw_frames_ctx)
505  return AVERROR(ENOMEM);
506 
507  src_frame = av_frame_alloc();
508  if (!src_frame)
509  return AVERROR(ENOMEM);
510 
512  src_frame, 0);
513  if (ret < 0) {
514  av_frame_free(&src_frame);
515  return ret;
516  }
517 
518  ret = av_hwframe_map(frame, src_frame,
520  if (ret) {
521  av_log(ctx, AV_LOG_ERROR, "Failed to map frame into derived "
522  "frame context: %d.\n", ret);
523  av_frame_free(&src_frame);
524  return ret;
525  }
526 
527  // Free the source frame immediately - the mapped frame still
528  // contains a reference to it.
529  av_frame_free(&src_frame);
530 
531  return 0;
532  }
533 
534  if (!ctxi->hw_type->frames_get_buffer)
535  return AVERROR(ENOSYS);
536 
537  if (!ctx->pool)
538  return AVERROR(EINVAL);
539 
540  frame->hw_frames_ctx = av_buffer_ref(hwframe_ref);
541  if (!frame->hw_frames_ctx)
542  return AVERROR(ENOMEM);
543 
545  if (ret < 0) {
546  av_buffer_unref(&frame->hw_frames_ctx);
547  return ret;
548  }
549 
550  frame->extended_data = frame->data;
551 
552  return 0;
553 }
554 
556 {
558  const HWContextType *hw_type = ctx->hw_type;
559 
560  if (hw_type->device_hwconfig_size == 0)
561  return NULL;
562 
563  return av_mallocz(hw_type->device_hwconfig_size);
564 }
565 
567  const void *hwconfig)
568 {
570  const HWContextType *hw_type = ctx->hw_type;
571  AVHWFramesConstraints *constraints;
572 
573  if (!hw_type->frames_get_constraints)
574  return NULL;
575 
576  constraints = av_mallocz(sizeof(*constraints));
577  if (!constraints)
578  return NULL;
579 
580  constraints->min_width = constraints->min_height = 0;
581  constraints->max_width = constraints->max_height = INT_MAX;
582 
583  if (hw_type->frames_get_constraints(&ctx->p, hwconfig, constraints) >= 0) {
584  return constraints;
585  } else {
586  av_hwframe_constraints_free(&constraints);
587  return NULL;
588  }
589 }
590 
592 {
593  if (*constraints) {
594  av_freep(&(*constraints)->valid_hw_formats);
595  av_freep(&(*constraints)->valid_sw_formats);
596  }
597  av_freep(constraints);
598 }
599 
601  const char *device, AVDictionary *opts, int flags)
602 {
603  AVBufferRef *device_ref = NULL;
604  FFHWDeviceContext *device_ctx;
605  int ret = 0;
606 
607  device_ref = av_hwdevice_ctx_alloc(type);
608  if (!device_ref) {
609  ret = AVERROR(ENOMEM);
610  goto fail;
611  }
612  device_ctx = (FFHWDeviceContext*)device_ref->data;
613 
614  if (!device_ctx->hw_type->device_create) {
615  ret = AVERROR(ENOSYS);
616  goto fail;
617  }
618 
619  ret = device_ctx->hw_type->device_create(&device_ctx->p, device,
620  opts, flags);
621  if (ret < 0)
622  goto fail;
623 
624  ret = av_hwdevice_ctx_init(device_ref);
625  if (ret < 0)
626  goto fail;
627 
628  *pdevice_ref = device_ref;
629  return 0;
630 fail:
631  av_buffer_unref(&device_ref);
632  *pdevice_ref = NULL;
633  return ret;
634 }
635 
637  enum AVHWDeviceType type,
638  AVBufferRef *src_ref,
639  AVDictionary *options, int flags)
640 {
641  AVBufferRef *dst_ref = NULL, *tmp_ref;
642  FFHWDeviceContext *dst_ctx;
643  int ret = 0;
644 
645  tmp_ref = src_ref;
646  while (tmp_ref) {
647  FFHWDeviceContext *tmp_ctx = (FFHWDeviceContext*)tmp_ref->data;
648  if (tmp_ctx->p.type == type) {
649  dst_ref = av_buffer_ref(tmp_ref);
650  if (!dst_ref) {
651  ret = AVERROR(ENOMEM);
652  goto fail;
653  }
654  goto done;
655  }
656  tmp_ref = tmp_ctx->source_device;
657  }
658 
659  dst_ref = av_hwdevice_ctx_alloc(type);
660  if (!dst_ref) {
661  ret = AVERROR(ENOMEM);
662  goto fail;
663  }
664  dst_ctx = (FFHWDeviceContext*)dst_ref->data;
665 
666  tmp_ref = src_ref;
667  while (tmp_ref) {
668  FFHWDeviceContext *tmp_ctx = (FFHWDeviceContext*)tmp_ref->data;
669  if (dst_ctx->hw_type->device_derive) {
670  ret = dst_ctx->hw_type->device_derive(&dst_ctx->p,
671  &tmp_ctx->p,
672  options, flags);
673  if (ret == 0) {
674  dst_ctx->source_device = av_buffer_ref(src_ref);
675  if (!dst_ctx->source_device) {
676  ret = AVERROR(ENOMEM);
677  goto fail;
678  }
679  ret = av_hwdevice_ctx_init(dst_ref);
680  if (ret < 0)
681  goto fail;
682  goto done;
683  }
684  if (ret != AVERROR(ENOSYS))
685  goto fail;
686  }
687  tmp_ref = tmp_ctx->source_device;
688  }
689 
690  ret = AVERROR(ENOSYS);
691  goto fail;
692 
693 done:
694  *dst_ref_ptr = dst_ref;
695  return 0;
696 
697 fail:
698  av_buffer_unref(&dst_ref);
699  *dst_ref_ptr = NULL;
700  return ret;
701 }
702 
704  enum AVHWDeviceType type,
705  AVBufferRef *src_ref, int flags)
706 {
707  return av_hwdevice_ctx_create_derived_opts(dst_ref_ptr, type, src_ref,
708  NULL, flags);
709 }
710 
711 static void ff_hwframe_unmap(void *opaque, uint8_t *data)
712 {
714  AVHWFramesContext *ctx = opaque;
715 
716  if (hwmap->unmap)
717  hwmap->unmap(ctx, hwmap);
718 
719  av_frame_free(&hwmap->source);
720 
722 
723  av_free(hwmap);
724 }
725 
727  AVFrame *dst, const AVFrame *src,
728  void (*unmap)(AVHWFramesContext *ctx,
729  HWMapDescriptor *hwmap),
730  void *priv)
731 {
732  AVHWFramesContext *ctx = (AVHWFramesContext*)hwframe_ref->data;
733  HWMapDescriptor *hwmap;
734  int ret;
735 
736  hwmap = av_mallocz(sizeof(*hwmap));
737  if (!hwmap) {
738  ret = AVERROR(ENOMEM);
739  goto fail;
740  }
741 
742  hwmap->source = av_frame_alloc();
743  if (!hwmap->source) {
744  ret = AVERROR(ENOMEM);
745  goto fail;
746  }
747  ret = av_frame_ref(hwmap->source, src);
748  if (ret < 0)
749  goto fail;
750 
751  hwmap->hw_frames_ctx = av_buffer_ref(hwframe_ref);
752  if (!hwmap->hw_frames_ctx) {
753  ret = AVERROR(ENOMEM);
754  goto fail;
755  }
756 
757  hwmap->unmap = unmap;
758  hwmap->priv = priv;
759 
760  dst->buf[0] = av_buffer_create((uint8_t*)hwmap, sizeof(*hwmap),
761  &ff_hwframe_unmap, ctx, 0);
762  if (!dst->buf[0]) {
763  ret = AVERROR(ENOMEM);
764  goto fail;
765  }
766 
767  return 0;
768 
769 fail:
770  if (hwmap) {
771  av_buffer_unref(&hwmap->hw_frames_ctx);
772  av_frame_free(&hwmap->source);
773  }
774  av_free(hwmap);
775  return ret;
776 }
777 
779 {
780  AVBufferRef *orig_dst_frames = dst->hw_frames_ctx;
781  enum AVPixelFormat orig_dst_fmt = dst->format;
782  HWMapDescriptor *hwmap;
783  int ret;
784 
785  if (src->hw_frames_ctx && dst->hw_frames_ctx) {
786  FFHWFramesContext *src_frames = (FFHWFramesContext*)src->hw_frames_ctx->data;
787  FFHWFramesContext *dst_frames = (FFHWFramesContext*)dst->hw_frames_ctx->data;
788 
789  if ((src_frames == dst_frames &&
790  src->format == dst_frames->p.sw_format &&
791  dst->format == dst_frames->p.format) ||
792  (src_frames->source_frames &&
793  src_frames->source_frames->data ==
794  (uint8_t*)dst_frames)) {
795  // This is an unmap operation. We don't need to directly
796  // do anything here other than fill in the original frame,
797  // because the real unmap will be invoked when the last
798  // reference to the mapped frame disappears.
799  if (!src->buf[0]) {
800  av_log(src_frames, AV_LOG_ERROR, "Invalid mapping "
801  "found when attempting unmap.\n");
802  return AVERROR(EINVAL);
803  }
804  hwmap = (HWMapDescriptor*)src->buf[0]->data;
805  return av_frame_replace(dst, hwmap->source);
806  }
807  }
808 
809  if (src->hw_frames_ctx) {
810  FFHWFramesContext *src_frames = (FFHWFramesContext*)src->hw_frames_ctx->data;
811 
812  if (src_frames->p.format == src->format &&
813  src_frames->hw_type->map_from) {
814  ret = src_frames->hw_type->map_from(&src_frames->p,
815  dst, src, flags);
816  if (ret >= 0)
817  return ret;
818  else if (ret != AVERROR(ENOSYS))
819  goto fail;
820  }
821  }
822 
823  if (dst->hw_frames_ctx) {
824  FFHWFramesContext *dst_frames = (FFHWFramesContext*)dst->hw_frames_ctx->data;
825 
826  if (dst_frames->p.format == dst->format &&
827  dst_frames->hw_type->map_to) {
828  ret = dst_frames->hw_type->map_to(&dst_frames->p,
829  dst, src, flags);
830  if (ret >= 0)
831  return ret;
832  else if (ret != AVERROR(ENOSYS))
833  goto fail;
834  }
835  }
836 
837  return AVERROR(ENOSYS);
838 
839 fail:
840  // if the caller provided dst frames context, it should be preserved
841  // by this function
842  av_assert0(orig_dst_frames == NULL ||
843  orig_dst_frames == dst->hw_frames_ctx);
844 
845  // preserve user-provided dst frame fields, but clean
846  // anything we might have set
847  dst->hw_frames_ctx = NULL;
849 
850  dst->hw_frames_ctx = orig_dst_frames;
851  dst->format = orig_dst_fmt;
852 
853  return ret;
854 }
855 
857  enum AVPixelFormat format,
858  AVBufferRef *derived_device_ctx,
859  AVBufferRef *source_frame_ctx,
860  int flags)
861 {
862  AVBufferRef *dst_ref = NULL;
863  FFHWFramesContext *dsti = NULL;
864  FFHWFramesContext *srci = (FFHWFramesContext*)source_frame_ctx->data;
865  AVHWFramesContext *dst, *src = &srci->p;
866  int ret;
867 
868  if (srci->source_frames) {
869  AVHWFramesContext *src_src =
871  AVHWDeviceContext *dst_dev =
872  (AVHWDeviceContext*)derived_device_ctx->data;
873 
874  if (src_src->device_ctx == dst_dev) {
875  // This is actually an unmapping, so we just return a
876  // reference to the source frame context.
877  *derived_frame_ctx = av_buffer_ref(srci->source_frames);
878  if (!*derived_frame_ctx) {
879  ret = AVERROR(ENOMEM);
880  goto fail;
881  }
882  return 0;
883  }
884  }
885 
886  dst_ref = av_hwframe_ctx_alloc(derived_device_ctx);
887  if (!dst_ref) {
888  ret = AVERROR(ENOMEM);
889  goto fail;
890  }
891 
892  dsti = (FFHWFramesContext*)dst_ref->data;
893  dst = &dsti->p;
894 
895  dst->format = format;
896  dst->sw_format = src->sw_format;
897  dst->width = src->width;
898  dst->height = src->height;
899 
900  dsti->source_frames = av_buffer_ref(source_frame_ctx);
901  if (!dsti->source_frames) {
902  ret = AVERROR(ENOMEM);
903  goto fail;
904  }
905 
911 
912  ret = AVERROR(ENOSYS);
913  if (srci->hw_type->frames_derive_from)
915  if (ret == AVERROR(ENOSYS) &&
916  dsti->hw_type->frames_derive_to)
917  ret = dsti->hw_type->frames_derive_to(dst, src, flags);
918  if (ret == AVERROR(ENOSYS))
919  ret = 0;
920  if (ret)
921  goto fail;
922 
923  *derived_frame_ctx = dst_ref;
924  return 0;
925 
926 fail:
927  if (dsti)
929  av_buffer_unref(&dst_ref);
930  return ret;
931 }
932 
934 {
935  HWMapDescriptor *hwmap = (HWMapDescriptor*)dst->buf[0]->data;
936  return av_frame_replace(hwmap->source, src);
937 }
formats
formats
Definition: signature.h:47
FFHWFramesContext::pool_internal
AVBufferPool * pool_internal
Definition: hwcontext_internal.h:101
hwframe_ctx_free
static void hwframe_ctx_free(void *opaque, uint8_t *data)
Definition: hwcontext.c:226
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
name
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
Definition: writing_filters.txt:88
HWContextType::device_uninit
void(* device_uninit)(AVHWDeviceContext *ctx)
Definition: hwcontext_internal.h:64
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
AV_HWFRAME_TRANSFER_DIRECTION_FROM
@ AV_HWFRAME_TRANSFER_DIRECTION_FROM
Transfer the data from the queried hw frame.
Definition: hwcontext.h:407
HWContextType::frames_get_buffer
int(* frames_get_buffer)(AVHWFramesContext *ctx, AVFrame *frame)
Definition: hwcontext_internal.h:73
HWMapDescriptor::source
AVFrame * source
A reference to the original source of the mapping.
Definition: hwcontext_internal.h:124
av_frame_get_buffer
int av_frame_get_buffer(AVFrame *frame, int align)
Allocate new buffer(s) for audio or video data.
Definition: frame.c:304
transfer_data_alloc
static int transfer_data_alloc(AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext.c:383
av_hwdevice_hwconfig_alloc
void * av_hwdevice_hwconfig_alloc(AVBufferRef *ref)
Allocate a HW-specific configuration structure for a given HW device.
Definition: hwcontext.c:555
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
ff_hwframe_unmap
static void ff_hwframe_unmap(void *opaque, uint8_t *data)
Definition: hwcontext.c:711
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:197
hwdevice_ctx_free
static void hwdevice_ctx_free(void *opaque, uint8_t *data)
Definition: hwcontext.c:142
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:162
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:322
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:389
pixdesc.h
FFHWFramesContext
Definition: hwcontext_internal.h:93
AVFrame::width
int width
Definition: frame.h:461
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:248
AV_HWDEVICE_TYPE_NONE
@ AV_HWDEVICE_TYPE_NONE
Definition: hwcontext.h:28
ff_hwcontext_type_qsv
const HWContextType ff_hwcontext_type_qsv
Definition: hwcontext_qsv.c:2624
av_hwframe_map
int av_hwframe_map(AVFrame *dst, const AVFrame *src, int flags)
Map a hardware frame.
Definition: hwcontext.c:778
ff_hwcontext_type_drm
const HWContextType ff_hwcontext_type_drm
Definition: hwcontext_drm.c:305
data
const char data[16]
Definition: mxf.c:149
AV_HWDEVICE_TYPE_MEDIACODEC
@ AV_HWDEVICE_TYPE_MEDIACODEC
Definition: hwcontext.h:38
HWContextType::map_to
int(* map_to)(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext_internal.h:82
ff_hwcontext_type_vdpau
const HWContextType ff_hwcontext_type_vdpau
Definition: hwcontext_vdpau.c:513
av_hwdevice_find_type_by_name
enum AVHWDeviceType av_hwdevice_find_type_by_name(const char *name)
Look up an AVHWDeviceType by name.
Definition: hwcontext.c:102
HWContextType::frames_init
int(* frames_init)(AVHWFramesContext *ctx)
Definition: hwcontext_internal.h:70
ff_hwcontext_type_vaapi
const HWContextType ff_hwcontext_type_vaapi
Definition: hwcontext_vaapi.c:2057
HWContextType::map_from
int(* map_from)(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src, int flags)
Definition: hwcontext_internal.h:84
av_hwdevice_iterate_types
enum AVHWDeviceType av_hwdevice_iterate_types(enum AVHWDeviceType prev)
Iterate over supported device types.
Definition: hwcontext.c:121
AVDictionary
Definition: dict.c:34
HWMapDescriptor::hw_frames_ctx
AVBufferRef * hw_frames_ctx
A reference to the hardware frames context in which this mapping was made.
Definition: hwcontext_internal.h:130
ff_hwframe_map_create
int ff_hwframe_map_create(AVBufferRef *hwframe_ref, AVFrame *dst, const AVFrame *src, void(*unmap)(AVHWFramesContext *ctx, HWMapDescriptor *hwmap), void *priv)
Definition: hwcontext.c:726
AV_HWDEVICE_TYPE_VIDEOTOOLBOX
@ AV_HWDEVICE_TYPE_VIDEOTOOLBOX
Definition: hwcontext.h:34
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
HWMapDescriptor::priv
void * priv
Hardware-specific private data associated with the mapping.
Definition: hwcontext_internal.h:139
av_hwdevice_get_hwframe_constraints
AVHWFramesConstraints * av_hwdevice_get_hwframe_constraints(AVBufferRef *ref, const void *hwconfig)
Get the constraints on HW frames given a device and the HW-specific configuration to be used with tha...
Definition: hwcontext.c:566
av_hwdevice_ctx_init
int av_hwdevice_ctx_init(AVBufferRef *ref)
Finalize the device context before use.
Definition: hwcontext.c:208
AV_HWDEVICE_TYPE_VULKAN
@ AV_HWDEVICE_TYPE_VULKAN
Definition: hwcontext.h:39
AVHWFramesConstraints
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
Definition: hwcontext.h:441
AV_HWDEVICE_TYPE_CUDA
@ AV_HWDEVICE_TYPE_CUDA
Definition: hwcontext.h:30
AV_HWFRAME_MAP_WRITE
@ AV_HWFRAME_MAP_WRITE
The mapping must be writeable.
Definition: hwcontext.h:516
ff_hwcontext_type_d3d11va
const HWContextType ff_hwcontext_type_d3d11va
Definition: hwcontext_d3d11va.c:712
HWContextType::device_derive
int(* device_derive)(AVHWDeviceContext *dst_ctx, AVHWDeviceContext *src_ctx, AVDictionary *opts, int flags)
Definition: hwcontext_internal.h:59
fail
#define fail()
Definition: checkasm.h:193
frames
if it could not because there are no more frames
Definition: filter_design.txt:266
AV_HWDEVICE_TYPE_D3D11VA
@ AV_HWDEVICE_TYPE_D3D11VA
Definition: hwcontext.h:35
av_hwdevice_ctx_create_derived_opts
int av_hwdevice_ctx_create_derived_opts(AVBufferRef **dst_ref_ptr, enum AVHWDeviceType type, AVBufferRef *src_ref, AVDictionary *options, int flags)
Create a new device of the specified type from an existing device.
Definition: hwcontext.c:636
type
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf type
Definition: writing_filters.txt:86
ff_hwcontext_type_mediacodec
const HWContextType ff_hwcontext_type_mediacodec
Definition: hwcontext_mediacodec.c:107
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:60
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:150
hwframe_pool_prealloc
static int hwframe_pool_prealloc(AVBufferRef *ref)
Definition: hwcontext.c:294
FFHWDeviceContext::hw_type
const HWContextType * hw_type
Definition: hwcontext.c:93
avassert.h
AV_HWFRAME_MAP_OVERWRITE
@ AV_HWFRAME_MAP_OVERWRITE
The mapped frame will be overwritten completely in subsequent operations, so the current frame data n...
Definition: hwcontext.h:522
HWContextType::type
enum AVHWDeviceType type
Definition: hwcontext_internal.h:30
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:209
ff_hwcontext_type_dxva2
const HWContextType ff_hwcontext_type_dxva2
Definition: hwcontext_dxva2.c:595
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
set
static void set(uint8_t *a[], int ch, int index, int ch_count, enum AVSampleFormat f, double v)
Definition: swresample.c:59
hw_type_names
static const char *const hw_type_names[]
Definition: hwcontext.c:72
av_hwdevice_ctx_alloc
AVBufferRef * av_hwdevice_ctx_alloc(enum AVHWDeviceType type)
Allocate an AVHWDeviceContext for a given hardware type.
Definition: hwcontext.c:161
hw_table
static const HWContextType *const hw_table[]
Definition: hwcontext.c:32
av_hwframe_constraints_free
void av_hwframe_constraints_free(AVHWFramesConstraints **constraints)
Free an AVHWFrameConstraints structure.
Definition: hwcontext.c:591
AV_BUFFER_FLAG_READONLY
#define AV_BUFFER_FLAG_READONLY
Always treat the buffer as read-only, even when it has only one reference.
Definition: buffer.h:114
format
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate. The lists are not just lists
pix_fmt
static enum AVPixelFormat pix_fmt
Definition: demux_decode.c:41
HWContextType::frames_uninit
void(* frames_uninit)(AVHWFramesContext *ctx)
Definition: hwcontext_internal.h:71
FFHWFramesContext::source_allocation_map_flags
int source_allocation_map_flags
Flags to apply to the mapping from the source to the derived frame context when trying to allocate in...
Definition: hwcontext_internal.h:112
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:40
AVHWDeviceType
AVHWDeviceType
Definition: hwcontext.h:27
ctx
AVFormatContext * ctx
Definition: movenc.c:49
HWContextType::device_create
int(* device_create)(AVHWDeviceContext *ctx, const char *device, AVDictionary *opts, int flags)
Definition: hwcontext_internal.h:57
av_hwdevice_get_type_name
const char * av_hwdevice_get_type_name(enum AVHWDeviceType type)
Get the string name of an AVHWDeviceType.
Definition: hwcontext.c:112
AV_HWFRAME_MAP_READ
@ AV_HWFRAME_MAP_READ
The mapping must be readable.
Definition: hwcontext.h:512
FFHWFramesContext::p
AVHWFramesContext p
The public AVHWFramesContext.
Definition: hwcontext_internal.h:97
ff_hwcontext_type_videotoolbox
const HWContextType ff_hwcontext_type_videotoolbox
Definition: hwcontext_videotoolbox.c:830
if
if(ret)
Definition: filter_design.txt:179
opts
AVDictionary * opts
Definition: movenc.c:51
LIBAVUTIL_VERSION_INT
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:75
NULL
#define NULL
Definition: coverity.c:32
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
HWContextType::transfer_get_formats
int(* transfer_get_formats)(AVHWFramesContext *ctx, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats)
Definition: hwcontext_internal.h:74
AV_HWDEVICE_TYPE_DXVA2
@ AV_HWDEVICE_TYPE_DXVA2
Definition: hwcontext.h:32
av_default_item_name
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:237
av_buffer_pool_uninit
void av_buffer_pool_uninit(AVBufferPool **ppool)
Mark the pool as being available for freeing.
Definition: buffer.c:328
options
Definition: swscale.c:42
av_hwframe_ctx_create_derived
int av_hwframe_ctx_create_derived(AVBufferRef **derived_frame_ctx, enum AVPixelFormat format, AVBufferRef *derived_device_ctx, AVBufferRef *source_frame_ctx, int flags)
Create and initialise an AVHWFramesContext as a mapping of another existing AVHWFramesContext on a di...
Definition: hwcontext.c:856
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
HWContextType::frames_derive_to
int(* frames_derive_to)(AVHWFramesContext *dst_ctx, AVHWFramesContext *src_ctx, int flags)
Definition: hwcontext_internal.h:87
AV_HWDEVICE_TYPE_D3D12VA
@ AV_HWDEVICE_TYPE_D3D12VA
Definition: hwcontext.h:40
AV_HWDEVICE_TYPE_OPENCL
@ AV_HWDEVICE_TYPE_OPENCL
Definition: hwcontext.h:37
FFHWFramesContext::hw_type
const HWContextType * hw_type
Definition: hwcontext_internal.h:99
av_frame_ref
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:400
dst
uint8_t ptrdiff_t const uint8_t ptrdiff_t int intptr_t intptr_t int int16_t * dst
Definition: dsp.h:83
FFHWFramesContext::source_frames
AVBufferRef * source_frames
For a derived context, a reference to the original frames context it was derived from.
Definition: hwcontext_internal.h:107
ff_hwcontext_type_cuda
const HWContextType ff_hwcontext_type_cuda
Definition: hwcontext_cuda.c:563
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:476
buffer.h
HWContextType::frames_derive_from
int(* frames_derive_from)(AVHWFramesContext *dst_ctx, AVHWFramesContext *src_ctx, int flags)
Definition: hwcontext_internal.h:89
hwframe_ctx_class
static const AVClass hwframe_ctx_class
Definition: hwcontext.c:220
hwdevice_ctx_class
static const AVClass hwdevice_ctx_class
Definition: hwcontext.c:136
AV_HWDEVICE_TYPE_VAAPI
@ AV_HWDEVICE_TYPE_VAAPI
Definition: hwcontext.h:31
FFHWDeviceContext
Definition: hwcontext.c:87
ff_hwcontext_type_vulkan
const HWContextType ff_hwcontext_type_vulkan
Definition: hwcontext_vulkan.c:4412
av_hwdevice_ctx_create_derived
int av_hwdevice_ctx_create_derived(AVBufferRef **dst_ref_ptr, enum AVHWDeviceType type, AVBufferRef *src_ref, int flags)
Create a new device of the specified type from an existing device.
Definition: hwcontext.c:703
HWContextType::device_hwctx_size
size_t device_hwctx_size
size of the public hardware-specific context, i.e.
Definition: hwcontext_internal.h:43
log.h
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
FFHWDeviceContext::source_device
AVBufferRef * source_device
For a derived device, a reference to the original device context it was derived from.
Definition: hwcontext.c:99
HWContextType::name
const char * name
Definition: hwcontext_internal.h:31
AV_HWFRAME_MAP_DIRECT
@ AV_HWFRAME_MAP_DIRECT
The mapping must be direct.
Definition: hwcontext.h:528
common.h
HWMapDescriptor::unmap
void(* unmap)(AVHWFramesContext *ctx, struct HWMapDescriptor *hwmap)
Unmap function.
Definition: hwcontext_internal.h:134
av_frame_move_ref
void av_frame_move_ref(AVFrame *dst, AVFrame *src)
Move everything contained in src to dst and reset src.
Definition: frame.c:649
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:622
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:256
AV_HWDEVICE_TYPE_VDPAU
@ AV_HWDEVICE_TYPE_VDPAU
Definition: hwcontext.h:29
ff_hwframe_map_replace
int ff_hwframe_map_replace(AVFrame *dst, const AVFrame *src)
Replace the current hwmap of dst with the one from src, used for indirect mappings like VAAPI->(DRM)-...
Definition: hwcontext.c:933
av_calloc
void * av_calloc(size_t nmemb, size_t size)
Definition: mem.c:264
AVHWFrameTransferDirection
AVHWFrameTransferDirection
Definition: hwcontext.h:403
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:115
ret
ret
Definition: filter_design.txt:187
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:72
pixfmt.h
AVClass::class_name
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:80
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
AVHWFramesContext::device_ctx
AVHWDeviceContext * device_ctx
The parent AVHWDeviceContext.
Definition: hwcontext.h:134
av_hwdevice_ctx_create
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:600
AVFormatContext::av_class
const AVClass * av_class
A class for logging and AVOptions.
Definition: avformat.h:1305
av_hwframe_transfer_data
int av_hwframe_transfer_data(AVFrame *dst, const AVFrame *src, int flags)
Copy data to or from a hw surface.
Definition: hwcontext.c:433
HWContextType::device_init
int(* device_init)(AVHWDeviceContext *ctx)
Definition: hwcontext_internal.h:63
AV_HWDEVICE_TYPE_QSV
@ AV_HWDEVICE_TYPE_QSV
Definition: hwcontext.h:33
av_frame_replace
int av_frame_replace(AVFrame *dst, const AVFrame *src)
Ensure the destination frame refers to the same data described by the source frame,...
Definition: frame.c:499
AVFrame::height
int height
Definition: frame.h:461
ff_hwcontext_type_d3d12va
const HWContextType ff_hwcontext_type_d3d12va
Definition: hwcontext_d3d12va.c:679
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
ref
static int ref[MAX_W *MAX_W]
Definition: jpeg2000dwt.c:117
HWContextType::pix_fmts
enum AVPixelFormat * pix_fmts
An array of pixel formats supported by the AVHWFramesContext instances Terminated by AV_PIX_FMT_NONE.
Definition: hwcontext_internal.h:37
av_hwframe_transfer_get_formats
int av_hwframe_transfer_get_formats(AVBufferRef *hwframe_ref, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats, int flags)
Get a list of possible source or target formats usable in av_hwframe_transfer_data().
Definition: hwcontext.c:371
FFHWDeviceContext::p
AVHWDeviceContext p
The public AVHWDeviceContext.
Definition: hwcontext.c:91
mem.h
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
hwcontext_internal.h
av_free
#define av_free(p)
Definition: tableprint_vlc.h:33
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
imgutils.h
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:482
hwcontext.h
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
HWContextType::transfer_data_from
int(* transfer_data_from)(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_internal.h:79
HWContextType
Definition: hwcontext_internal.h:29
av_image_check_size
int av_image_check_size(unsigned int w, unsigned int h, int log_offset, void *log_ctx)
Check if the given dimension of an image is valid, meaning that all bytes of the image can be address...
Definition: imgutils.c:318
av_hwframe_get_buffer
int av_hwframe_get_buffer(AVBufferRef *hwframe_ref, AVFrame *frame, int flags)
Allocate a new frame attached to the given AVHWFramesContext.
Definition: hwcontext.c:491
HWMapDescriptor
Definition: hwcontext_internal.h:120
ff_hwcontext_type_opencl
const HWContextType ff_hwcontext_type_opencl
Definition: hwcontext_opencl.c:3039
src
#define src
Definition: vp8dsp.c:248
AV_HWDEVICE_TYPE_DRM
@ AV_HWDEVICE_TYPE_DRM
Definition: hwcontext.h:36
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:3090