FFmpeg
hwcontext_vdpau.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "config.h"
20 
21 #include <stdint.h>
22 #include <string.h>
23 
24 #include <vdpau/vdpau.h>
25 
26 #include "buffer.h"
27 #include "common.h"
28 #include "hwcontext.h"
29 #include "hwcontext_internal.h"
30 #include "hwcontext_vdpau.h"
31 #include "mem.h"
32 #include "pixfmt.h"
33 #include "pixdesc.h"
34 
35 typedef struct VDPAUPixFmtMap {
36  VdpYCbCrFormat vdpau_fmt;
39 
40 static const VDPAUPixFmtMap pix_fmts_420[] = {
41  { VDP_YCBCR_FORMAT_NV12, AV_PIX_FMT_NV12 },
42  { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV420P },
43 #ifdef VDP_YCBCR_FORMAT_P016
44  { VDP_YCBCR_FORMAT_P016, AV_PIX_FMT_P016 },
45  { VDP_YCBCR_FORMAT_P010, AV_PIX_FMT_P010 },
46 #endif
47  { 0, AV_PIX_FMT_NONE, },
48 };
49 
50 static const VDPAUPixFmtMap pix_fmts_422[] = {
51  { VDP_YCBCR_FORMAT_NV12, AV_PIX_FMT_NV16 },
52  { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV422P },
53  { VDP_YCBCR_FORMAT_UYVY, AV_PIX_FMT_UYVY422 },
54  { VDP_YCBCR_FORMAT_YUYV, AV_PIX_FMT_YUYV422 },
55  { 0, AV_PIX_FMT_NONE, },
56 };
57 
58 static const VDPAUPixFmtMap pix_fmts_444[] = {
59 #ifdef VDP_YCBCR_FORMAT_Y_U_V_444
60  { VDP_YCBCR_FORMAT_Y_U_V_444, AV_PIX_FMT_YUV444P },
61 #endif
62 #ifdef VDP_YCBCR_FORMAT_P016
63  {VDP_YCBCR_FORMAT_Y_U_V_444_16, AV_PIX_FMT_YUV444P16},
64 #endif
65  { 0, AV_PIX_FMT_NONE, },
66 };
67 
68 static const struct {
69  VdpChromaType chroma_type;
72 } vdpau_pix_fmts[] = {
73  { VDP_CHROMA_TYPE_420, AV_PIX_FMT_YUV420P, pix_fmts_420 },
74  { VDP_CHROMA_TYPE_422, AV_PIX_FMT_YUV422P, pix_fmts_422 },
75  { VDP_CHROMA_TYPE_444, AV_PIX_FMT_YUV444P, pix_fmts_444 },
76 #ifdef VDP_YCBCR_FORMAT_P016
77  { VDP_CHROMA_TYPE_420_16, AV_PIX_FMT_YUV420P10, pix_fmts_420 },
78  { VDP_CHROMA_TYPE_420_16, AV_PIX_FMT_YUV420P12, pix_fmts_420 },
79  { VDP_CHROMA_TYPE_422_16, AV_PIX_FMT_YUV422P10, pix_fmts_422 },
80  { VDP_CHROMA_TYPE_444_16, AV_PIX_FMT_YUV444P10, pix_fmts_444 },
81  { VDP_CHROMA_TYPE_444_16, AV_PIX_FMT_YUV444P12, pix_fmts_444 },
82 #endif
83 };
84 
85 typedef struct VDPAUDeviceContext {
86  /**
87  * The public AVVDPAUDeviceContext. See hwcontext_vdpau.h for it.
88  */
90 
91  VdpVideoSurfaceQueryGetPutBitsYCbCrCapabilities *get_transfer_caps;
92  VdpVideoSurfaceGetBitsYCbCr *get_data;
93  VdpVideoSurfacePutBitsYCbCr *put_data;
94  VdpVideoSurfaceCreate *surf_create;
95  VdpVideoSurfaceDestroy *surf_destroy;
96 
100 
101 typedef struct VDPAUFramesContext {
102  VdpVideoSurfaceGetBitsYCbCr *get_data;
103  VdpVideoSurfacePutBitsYCbCr *put_data;
104  VdpChromaType chroma_type;
106 
107  const enum AVPixelFormat *pix_fmts;
110 
111 static int count_pixfmts(const VDPAUPixFmtMap *map)
112 {
113  int count = 0;
114  while (map->pix_fmt != AV_PIX_FMT_NONE) {
115  map++;
116  count++;
117  }
118  return count;
119 }
120 
122 {
123  VDPAUDeviceContext *priv = ctx->hwctx;
124  AVVDPAUDeviceContext *hwctx = &priv->p;
125  int i;
126 
127  for (i = 0; i < FF_ARRAY_ELEMS(priv->pix_fmts); i++) {
128  const VDPAUPixFmtMap *map = vdpau_pix_fmts[i].map;
129  int nb_pix_fmts;
130 
131  nb_pix_fmts = count_pixfmts(map);
132  priv->pix_fmts[i] = av_malloc_array(nb_pix_fmts + 1, sizeof(*priv->pix_fmts[i]));
133  if (!priv->pix_fmts[i])
134  return AVERROR(ENOMEM);
135 
136  nb_pix_fmts = 0;
137  while (map->pix_fmt != AV_PIX_FMT_NONE) {
138  VdpBool supported;
139  VdpStatus err = priv->get_transfer_caps(hwctx->device, vdpau_pix_fmts[i].chroma_type,
140  map->vdpau_fmt, &supported);
141  if (err == VDP_STATUS_OK && supported)
142  priv->pix_fmts[i][nb_pix_fmts++] = map->pix_fmt;
143  map++;
144  }
145  priv->pix_fmts[i][nb_pix_fmts++] = AV_PIX_FMT_NONE;
146  priv->nb_pix_fmts[i] = nb_pix_fmts;
147  }
148 
149  return 0;
150 }
151 
152 #define GET_CALLBACK(id, result) \
153 do { \
154  void *tmp; \
155  err = hwctx->get_proc_address(hwctx->device, id, &tmp); \
156  if (err != VDP_STATUS_OK) { \
157  av_log(ctx, AV_LOG_ERROR, "Error getting the " #id " callback.\n"); \
158  return AVERROR_UNKNOWN; \
159  } \
160  result = tmp; \
161 } while (0)
162 
164 {
165  VDPAUDeviceContext *priv = ctx->hwctx;
166  AVVDPAUDeviceContext *hwctx = &priv->p;
167  VdpStatus err;
168  int ret;
169 
170  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_QUERY_GET_PUT_BITS_Y_CB_CR_CAPABILITIES,
171  priv->get_transfer_caps);
172  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_GET_BITS_Y_CB_CR, priv->get_data);
173  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_PUT_BITS_Y_CB_CR, priv->put_data);
174  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_CREATE, priv->surf_create);
175  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_DESTROY, priv->surf_destroy);
176 
178  if (ret < 0) {
179  av_log(ctx, AV_LOG_ERROR, "Error querying the supported pixel formats\n");
180  return ret;
181  }
182 
183  return 0;
184 }
185 
187 {
188  VDPAUDeviceContext *priv = ctx->hwctx;
189  int i;
190 
191  for (i = 0; i < FF_ARRAY_ELEMS(priv->pix_fmts); i++)
192  av_freep(&priv->pix_fmts[i]);
193 }
194 
196  const void *hwconfig,
197  AVHWFramesConstraints *constraints)
198 {
199  VDPAUDeviceContext *priv = ctx->hwctx;
200  int nb_sw_formats = 0;
201  int i;
202 
204  sizeof(*constraints->valid_sw_formats));
205  if (!constraints->valid_sw_formats)
206  return AVERROR(ENOMEM);
207 
208  for (i = 0; i < FF_ARRAY_ELEMS(vdpau_pix_fmts); i++) {
209  if (priv->nb_pix_fmts[i] > 1)
210  constraints->valid_sw_formats[nb_sw_formats++] = vdpau_pix_fmts[i].frames_sw_format;
211  }
212  constraints->valid_sw_formats[nb_sw_formats] = AV_PIX_FMT_NONE;
213 
214  constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
215  if (!constraints->valid_hw_formats)
216  return AVERROR(ENOMEM);
217 
218  constraints->valid_hw_formats[0] = AV_PIX_FMT_VDPAU;
219  constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
220 
221  return 0;
222 }
223 
224 static void vdpau_buffer_free(void *opaque, uint8_t *data)
225 {
226  AVHWFramesContext *ctx = opaque;
227  VDPAUDeviceContext *device_priv = ctx->device_ctx->hwctx;
228  VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)data;
229 
230  device_priv->surf_destroy(surf);
231 }
232 
233 static AVBufferRef *vdpau_pool_alloc(void *opaque, size_t size)
234 {
235  AVHWFramesContext *ctx = opaque;
236  VDPAUFramesContext *priv = ctx->hwctx;
237  VDPAUDeviceContext *device_priv = ctx->device_ctx->hwctx;
238  AVVDPAUDeviceContext *device_hwctx = &device_priv->p;
239 
240  AVBufferRef *ret;
241  VdpVideoSurface surf;
242  VdpStatus err;
243 
244  err = device_priv->surf_create(device_hwctx->device, priv->chroma_type,
245  ctx->width, ctx->height, &surf);
246  if (err != VDP_STATUS_OK) {
247  av_log(ctx, AV_LOG_ERROR, "Error allocating a VDPAU video surface\n");
248  return NULL;
249  }
250 
251  ret = av_buffer_create((uint8_t*)(uintptr_t)surf, sizeof(surf),
253  if (!ret) {
254  device_priv->surf_destroy(surf);
255  return NULL;
256  }
257 
258  return ret;
259 }
260 
262 {
263  VDPAUDeviceContext *device_priv = ctx->device_ctx->hwctx;
264  VDPAUFramesContext *priv = ctx->hwctx;
265 
266  int i;
267 
268  for (i = 0; i < FF_ARRAY_ELEMS(vdpau_pix_fmts); i++) {
269  if (vdpau_pix_fmts[i].frames_sw_format == ctx->sw_format) {
270  priv->chroma_type = vdpau_pix_fmts[i].chroma_type;
271  priv->chroma_idx = i;
272  priv->pix_fmts = device_priv->pix_fmts[i];
273  priv->nb_pix_fmts = device_priv->nb_pix_fmts[i];
274  break;
275  }
276  }
277  if (priv->nb_pix_fmts < 2) {
278  av_log(ctx, AV_LOG_ERROR, "Unsupported sw format: %s\n",
279  av_get_pix_fmt_name(ctx->sw_format));
280  return AVERROR(ENOSYS);
281  }
282 
283  if (!ctx->pool) {
285  av_buffer_pool_init2(sizeof(VdpVideoSurface), ctx,
287  if (!ffhwframesctx(ctx)->pool_internal)
288  return AVERROR(ENOMEM);
289  }
290 
291  priv->get_data = device_priv->get_data;
292  priv->put_data = device_priv->put_data;
293 
294  return 0;
295 }
296 
298 {
299  frame->buf[0] = av_buffer_pool_get(ctx->pool);
300  if (!frame->buf[0])
301  return AVERROR(ENOMEM);
302 
303  frame->data[3] = frame->buf[0]->data;
304  frame->format = AV_PIX_FMT_VDPAU;
305  frame->width = ctx->width;
306  frame->height = ctx->height;
307 
308  return 0;
309 }
310 
313  enum AVPixelFormat **formats)
314 {
315  VDPAUFramesContext *priv = ctx->hwctx;
316 
317  enum AVPixelFormat *fmts;
318 
319  if (priv->nb_pix_fmts == 1) {
321  "No target formats are supported for this chroma type\n");
322  return AVERROR(ENOSYS);
323  }
324 
325  fmts = av_malloc_array(priv->nb_pix_fmts, sizeof(*fmts));
326  if (!fmts)
327  return AVERROR(ENOMEM);
328 
329  memcpy(fmts, priv->pix_fmts, sizeof(*fmts) * (priv->nb_pix_fmts));
330  *formats = fmts;
331 
332  return 0;
333 }
334 
336  const AVFrame *src)
337 {
338  VDPAUFramesContext *priv = ctx->hwctx;
339  VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)src->data[3];
340 
341  void *data[3];
342  uint32_t linesize[3];
343 
344  const VDPAUPixFmtMap *map;
345  VdpYCbCrFormat vdpau_format;
346  VdpStatus err;
347  int i;
348 
349  for (i = 0; i< FF_ARRAY_ELEMS(data) && dst->data[i]; i++) {
350  data[i] = dst->data[i];
351  if (dst->linesize[i] < 0 || dst->linesize[i] > UINT32_MAX) {
353  "The linesize %d cannot be represented as uint32\n",
354  dst->linesize[i]);
355  return AVERROR(ERANGE);
356  }
357  linesize[i] = dst->linesize[i];
358  }
359 
360  map = vdpau_pix_fmts[priv->chroma_idx].map;
361  for (i = 0; map[i].pix_fmt != AV_PIX_FMT_NONE; i++) {
362  if (map[i].pix_fmt == dst->format) {
363  vdpau_format = map[i].vdpau_fmt;
364  break;
365  }
366  }
367  if (map[i].pix_fmt == AV_PIX_FMT_NONE) {
369  "Unsupported target pixel format: %s\n",
370  av_get_pix_fmt_name(dst->format));
371  return AVERROR(EINVAL);
372  }
373 
374  if ((vdpau_format == VDP_YCBCR_FORMAT_YV12)
375 #ifdef VDP_YCBCR_FORMAT_Y_U_V_444
376  || (vdpau_format == VDP_YCBCR_FORMAT_Y_U_V_444)
377 #endif
378 #ifdef VDP_YCBCR_FORMAT_P016
379  || (vdpau_format == VDP_YCBCR_FORMAT_Y_U_V_444_16)
380 #endif
381  )
382  FFSWAP(void*, data[1], data[2]);
383 
384  err = priv->get_data(surf, vdpau_format, data, linesize);
385  if (err != VDP_STATUS_OK) {
386  av_log(ctx, AV_LOG_ERROR, "Error retrieving the data from a VDPAU surface\n");
387  return AVERROR_UNKNOWN;
388  }
389 
390  return 0;
391 }
392 
394  const AVFrame *src)
395 {
396  VDPAUFramesContext *priv = ctx->hwctx;
397  VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)dst->data[3];
398 
399  const void *data[3];
400  uint32_t linesize[3];
401 
402  const VDPAUPixFmtMap *map;
403  VdpYCbCrFormat vdpau_format;
404  VdpStatus err;
405  int i;
406 
407  for (i = 0; i< FF_ARRAY_ELEMS(data) && src->data[i]; i++) {
408  data[i] = src->data[i];
409  if (src->linesize[i] < 0 || src->linesize[i] > UINT32_MAX) {
411  "The linesize %d cannot be represented as uint32\n",
412  src->linesize[i]);
413  return AVERROR(ERANGE);
414  }
415  linesize[i] = src->linesize[i];
416  }
417 
418  map = vdpau_pix_fmts[priv->chroma_idx].map;
419  for (i = 0; map[i].pix_fmt != AV_PIX_FMT_NONE; i++) {
420  if (map[i].pix_fmt == src->format) {
421  vdpau_format = map[i].vdpau_fmt;
422  break;
423  }
424  }
425  if (map[i].pix_fmt == AV_PIX_FMT_NONE) {
427  "Unsupported source pixel format: %s\n",
428  av_get_pix_fmt_name(src->format));
429  return AVERROR(EINVAL);
430  }
431 
432  if ((vdpau_format == VDP_YCBCR_FORMAT_YV12)
433 #ifdef VDP_YCBCR_FORMAT_Y_U_V_444
434  || (vdpau_format == VDP_YCBCR_FORMAT_Y_U_V_444)
435 #endif
436  )
437  FFSWAP(const void*, data[1], data[2]);
438 
439  err = priv->put_data(surf, vdpau_format, data, linesize);
440  if (err != VDP_STATUS_OK) {
441  av_log(ctx, AV_LOG_ERROR, "Error uploading the data to a VDPAU surface\n");
442  return AVERROR_UNKNOWN;
443  }
444 
445  return 0;
446 }
447 
448 #if HAVE_VDPAU_X11
449 #include <vdpau/vdpau_x11.h>
450 #include <X11/Xlib.h>
451 
452 typedef struct VDPAUDevicePriv {
453  VdpDeviceDestroy *device_destroy;
454  Display *dpy;
455 } VDPAUDevicePriv;
456 
457 static void vdpau_device_free(AVHWDeviceContext *ctx)
458 {
459  AVVDPAUDeviceContext *hwctx = ctx->hwctx;
460  VDPAUDevicePriv *priv = ctx->user_opaque;
461 
462  if (priv->device_destroy)
463  priv->device_destroy(hwctx->device);
464  if (priv->dpy)
465  XCloseDisplay(priv->dpy);
466  av_freep(&priv);
467 }
468 
469 static int vdpau_device_create(AVHWDeviceContext *ctx, const char *device,
470  AVDictionary *opts, int flags)
471 {
472  AVVDPAUDeviceContext *hwctx = ctx->hwctx;
473 
474  VDPAUDevicePriv *priv;
475  VdpStatus err;
476  VdpGetInformationString *get_information_string;
477  const char *display, *vendor;
478 
479  priv = av_mallocz(sizeof(*priv));
480  if (!priv)
481  return AVERROR(ENOMEM);
482 
483  ctx->user_opaque = priv;
484  ctx->free = vdpau_device_free;
485 
486  priv->dpy = XOpenDisplay(device);
487  if (!priv->dpy) {
488  av_log(ctx, AV_LOG_ERROR, "Cannot open the X11 display %s.\n",
489  XDisplayName(device));
490  return AVERROR_UNKNOWN;
491  }
492  display = XDisplayString(priv->dpy);
493 
494  err = vdp_device_create_x11(priv->dpy, XDefaultScreen(priv->dpy),
495  &hwctx->device, &hwctx->get_proc_address);
496  if (err != VDP_STATUS_OK) {
497  av_log(ctx, AV_LOG_ERROR, "VDPAU device creation on X11 display %s failed.\n",
498  display);
499  return AVERROR_UNKNOWN;
500  }
501 
502  GET_CALLBACK(VDP_FUNC_ID_GET_INFORMATION_STRING, get_information_string);
503  GET_CALLBACK(VDP_FUNC_ID_DEVICE_DESTROY, priv->device_destroy);
504 
505  get_information_string(&vendor);
506  av_log(ctx, AV_LOG_VERBOSE, "Successfully created a VDPAU device (%s) on "
507  "X11 display %s\n", vendor, display);
508 
509  return 0;
510 }
511 #endif
512 
515  .name = "VDPAU",
516 
517  .device_hwctx_size = sizeof(VDPAUDeviceContext),
518  .frames_hwctx_size = sizeof(VDPAUFramesContext),
519 
520 #if HAVE_VDPAU_X11
521  .device_create = vdpau_device_create,
522 #endif
523  .device_init = vdpau_device_init,
524  .device_uninit = vdpau_device_uninit,
525  .frames_get_constraints = vdpau_frames_get_constraints,
526  .frames_init = vdpau_frames_init,
527  .frames_get_buffer = vdpau_get_buffer,
528  .transfer_get_formats = vdpau_transfer_get_formats,
529  .transfer_data_to = vdpau_transfer_data_to,
530  .transfer_data_from = vdpau_transfer_data_from,
531 
532  .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_VDPAU, AV_PIX_FMT_NONE },
533 };
formats
formats
Definition: signature.h:47
FFHWFramesContext::pool_internal
AVBufferPool * pool_internal
Definition: hwcontext_internal.h:101
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
VDPAUFramesContext
Definition: hwcontext_vdpau.c:101
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
vdpau_frames_init
static int vdpau_frames_init(AVHWFramesContext *ctx)
Definition: hwcontext_vdpau.c:261
VDPAUPixFmtMap::pix_fmt
enum AVPixelFormat pix_fmt
Definition: hwcontext_vdpau.c:37
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:389
pixdesc.h
VDPAUPixFmtMap::vdpau_fmt
VdpYCbCrFormat vdpau_fmt
Definition: hwcontext_vdpau.c:36
VDPAUFramesContext::pix_fmts
enum AVPixelFormat * pix_fmts
Definition: hwcontext_vdpau.c:107
data
const char data[16]
Definition: mxf.c:149
AV_PIX_FMT_YUV420P10
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:502
AVVDPAUDeviceContext::get_proc_address
VdpGetProcAddress * get_proc_address
Definition: hwcontext_vdpau.h:37
VDPAUDeviceContext::pix_fmts
enum AVPixelFormat * pix_fmts[FF_ARRAY_ELEMS(vdpau_pix_fmts)]
Definition: hwcontext_vdpau.c:97
AVVDPAUDeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_vdpau.h:35
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:225
AVDictionary
Definition: dict.c:34
chroma_type
VdpChromaType chroma_type
Definition: hwcontext_vdpau.c:69
AVHWFramesConstraints::valid_hw_formats
enum AVPixelFormat * valid_hw_formats
A list of possible values for format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:446
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
VDPAUFramesContext::chroma_idx
int chroma_idx
Definition: hwcontext_vdpau.c:105
AVHWFramesConstraints
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
Definition: hwcontext.h:441
vdpau_buffer_free
static void vdpau_buffer_free(void *opaque, uint8_t *data)
Definition: hwcontext_vdpau.c:224
pix_fmts_422
static const VDPAUPixFmtMap pix_fmts_422[]
Definition: hwcontext_vdpau.c:50
VDPAUDeviceContext
Definition: hwcontext_vdpau.c:85
VDPAUDeviceContext::get_data
VdpVideoSurfaceGetBitsYCbCr * get_data
Definition: hwcontext_vdpau.c:92
VDPAUPixFmtMap
Definition: hwcontext_vdpau.c:35
av_buffer_pool_init2
AVBufferPool * av_buffer_pool_init2(size_t size, void *opaque, AVBufferRef *(*alloc)(void *opaque, size_t size), void(*pool_free)(void *opaque))
Allocate and initialize a buffer pool with a more complex allocator.
Definition: buffer.c:259
VDPAUDeviceContext::get_transfer_caps
VdpVideoSurfaceQueryGetPutBitsYCbCrCapabilities * get_transfer_caps
Definition: hwcontext_vdpau.c:91
count_pixfmts
static int count_pixfmts(const VDPAUPixFmtMap *map)
Definition: hwcontext_vdpau.c:111
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:60
AV_PIX_FMT_YUV444P10
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:505
HWContextType::type
enum AVHWDeviceType type
Definition: hwcontext_internal.h:30
ffhwframesctx
static FFHWFramesContext * ffhwframesctx(AVHWFramesContext *ctx)
Definition: hwcontext_internal.h:115
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:209
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
AVHWFramesConstraints::valid_sw_formats
enum AVPixelFormat * valid_sw_formats
A list of possible values for sw_format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:453
av_buffer_pool_get
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
Definition: buffer.c:390
AV_PIX_FMT_YUV444P16
#define AV_PIX_FMT_YUV444P16
Definition: pixfmt.h:515
AV_BUFFER_FLAG_READONLY
#define AV_BUFFER_FLAG_READONLY
Always treat the buffer as read-only, even when it has only one reference.
Definition: buffer.h:114
pix_fmt
static enum AVPixelFormat pix_fmt
Definition: demux_decode.c:41
vdpau_pix_fmts
static const struct @450 vdpau_pix_fmts[]
ctx
AVFormatContext * ctx
Definition: movenc.c:49
vdpau_pool_alloc
static AVBufferRef * vdpau_pool_alloc(void *opaque, size_t size)
Definition: hwcontext_vdpau.c:233
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
frames_sw_format
enum AVPixelFormat frames_sw_format
Definition: hwcontext_vdpau.c:70
pix_fmts_420
static const VDPAUPixFmtMap pix_fmts_420[]
Definition: hwcontext_vdpau.c:40
opts
AVDictionary * opts
Definition: movenc.c:51
NULL
#define NULL
Definition: coverity.c:32
vdpau_frames_get_constraints
static int vdpau_frames_get_constraints(AVHWDeviceContext *ctx, const void *hwconfig, AVHWFramesConstraints *constraints)
Definition: hwcontext_vdpau.c:195
AV_PIX_FMT_YUYV422
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:74
ff_hwcontext_type_vdpau
const HWContextType ff_hwcontext_type_vdpau
Definition: hwcontext_vdpau.c:513
AV_PIX_FMT_YUV422P10
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:503
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
VDPAUDeviceContext::surf_destroy
VdpVideoSurfaceDestroy * surf_destroy
Definition: hwcontext_vdpau.c:95
vdpau_device_uninit
static void vdpau_device_uninit(AVHWDeviceContext *ctx)
Definition: hwcontext_vdpau.c:186
dst
uint8_t ptrdiff_t const uint8_t ptrdiff_t int intptr_t intptr_t int int16_t * dst
Definition: dsp.h:83
size
int size
Definition: twinvq_data.h:10344
AV_PIX_FMT_YUV444P12
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:509
VDPAUDeviceContext::p
AVVDPAUDeviceContext p
The public AVVDPAUDeviceContext.
Definition: hwcontext_vdpau.c:89
VDPAUFramesContext::nb_pix_fmts
int nb_pix_fmts
Definition: hwcontext_vdpau.c:108
AV_PIX_FMT_NV16
@ AV_PIX_FMT_NV16
interleaved chroma YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:198
buffer.h
VDPAUFramesContext::put_data
VdpVideoSurfacePutBitsYCbCr * put_data
Definition: hwcontext_vdpau.c:103
VDPAUFramesContext::chroma_type
VdpChromaType chroma_type
Definition: hwcontext_vdpau.c:104
AV_PIX_FMT_VDPAU
@ AV_PIX_FMT_VDPAU
HW acceleration through VDPAU, Picture.data[3] contains a VdpVideoSurface.
Definition: pixfmt.h:194
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
av_malloc_array
#define av_malloc_array(a, b)
Definition: tableprint_vlc.h:31
common.h
vdpau_transfer_data_to
static int vdpau_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_vdpau.c:393
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:256
AV_HWDEVICE_TYPE_VDPAU
@ AV_HWDEVICE_TYPE_VDPAU
Definition: hwcontext.h:29
vdpau_transfer_get_formats
static int vdpau_transfer_get_formats(AVHWFramesContext *ctx, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats)
Definition: hwcontext_vdpau.c:311
hwcontext_vdpau.h
AV_PIX_FMT_P016
#define AV_PIX_FMT_P016
Definition: pixfmt.h:554
AVHWFrameTransferDirection
AVHWFrameTransferDirection
Definition: hwcontext.h:403
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:115
ret
ret
Definition: filter_design.txt:187
pix_fmts_444
static const VDPAUPixFmtMap pix_fmts_444[]
Definition: hwcontext_vdpau.c:58
pixfmt.h
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:96
FFSWAP
#define FFSWAP(type, a, b)
Definition: macros.h:52
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
AV_PIX_FMT_YUV420P12
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:506
AV_PIX_FMT_UYVY422
@ AV_PIX_FMT_UYVY422
packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
Definition: pixfmt.h:88
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
vdpau_init_pixmfts
static int vdpau_init_pixmfts(AVHWDeviceContext *ctx)
Definition: hwcontext_vdpau.c:121
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:78
vdpau_transfer_data_from
static int vdpau_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
Definition: hwcontext_vdpau.c:335
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:552
VDPAUFramesContext::get_data
VdpVideoSurfaceGetBitsYCbCr * get_data
Definition: hwcontext_vdpau.c:102
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:77
mem.h
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
vdpau_device_init
static int vdpau_device_init(AVHWDeviceContext *ctx)
Definition: hwcontext_vdpau.c:163
hwcontext_internal.h
map
const VDPAUPixFmtMap * map
Definition: hwcontext_vdpau.c:71
VDPAUDeviceContext::nb_pix_fmts
int nb_pix_fmts[FF_ARRAY_ELEMS(vdpau_pix_fmts)]
Definition: hwcontext_vdpau.c:98
vdpau_get_buffer
static int vdpau_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
Definition: hwcontext_vdpau.c:297
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
VDPAUDeviceContext::surf_create
VdpVideoSurfaceCreate * surf_create
Definition: hwcontext_vdpau.c:94
flags
#define flags(name, subs,...)
Definition: cbs_av1.c:482
hwcontext.h
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
HWContextType
Definition: hwcontext_internal.h:29
AVVDPAUDeviceContext::device
VdpDevice device
Definition: hwcontext_vdpau.h:36
src
#define src
Definition: vp8dsp.c:248
VDPAUDeviceContext::put_data
VdpVideoSurfacePutBitsYCbCr * put_data
Definition: hwcontext_vdpau.c:93
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:3090
GET_CALLBACK
#define GET_CALLBACK(id, result)
Definition: hwcontext_vdpau.c:152