Go to the documentation of this file.
24 #include <VideoToolbox/VideoToolbox.h>
48 #ifdef kCFCoreFoundationVersionNumber10_7
49 { kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
false,
AV_PIX_FMT_NV12 },
50 { kCVPixelFormatType_420YpCbCr8BiPlanarFullRange,
true,
AV_PIX_FMT_NV12 },
53 #if HAVE_KCVPIXELFORMATTYPE_420YPCBCR10BIPLANARVIDEORANGE
57 #if HAVE_KCVPIXELFORMATTYPE_422YPCBCR8BIPLANARVIDEORANGE
58 { kCVPixelFormatType_422YpCbCr8BiPlanarVideoRange,
false,
AV_PIX_FMT_NV16 },
59 { kCVPixelFormatType_422YpCbCr8BiPlanarFullRange,
true,
AV_PIX_FMT_NV16 },
61 #if HAVE_KCVPIXELFORMATTYPE_422YPCBCR10BIPLANARVIDEORANGE
62 { kCVPixelFormatType_422YpCbCr10BiPlanarVideoRange,
false,
AV_PIX_FMT_P210 },
63 { kCVPixelFormatType_422YpCbCr10BiPlanarFullRange,
true,
AV_PIX_FMT_P210 },
65 #if HAVE_KCVPIXELFORMATTYPE_422YPCBCR16BIPLANARVIDEORANGE
66 { kCVPixelFormatType_422YpCbCr16BiPlanarVideoRange,
false,
AV_PIX_FMT_P216 },
68 #if HAVE_KCVPIXELFORMATTYPE_444YPCBCR8BIPLANARVIDEORANGE
69 { kCVPixelFormatType_444YpCbCr8BiPlanarVideoRange,
false,
AV_PIX_FMT_NV24 },
70 { kCVPixelFormatType_444YpCbCr8BiPlanarFullRange,
true,
AV_PIX_FMT_NV24 },
72 #if HAVE_KCVPIXELFORMATTYPE_444YPCBCR10BIPLANARVIDEORANGE
73 { kCVPixelFormatType_444YpCbCr10BiPlanarVideoRange,
false,
AV_PIX_FMT_P410 },
74 { kCVPixelFormatType_444YpCbCr10BiPlanarFullRange,
true,
AV_PIX_FMT_P410 },
76 #if HAVE_KCVPIXELFORMATTYPE_444YPCBCR16BIPLANARVIDEORANGE
77 { kCVPixelFormatType_444YpCbCr16BiPlanarVideoRange,
false,
AV_PIX_FMT_P416 },
82 #ifdef kCFCoreFoundationVersionNumber10_7
88 #if HAVE_KCVPIXELFORMATTYPE_420YPCBCR10BIPLANARVIDEORANGE
91 #if HAVE_KCVPIXELFORMATTYPE_422YPCBCR8BIPLANARVIDEORANGE
94 #if HAVE_KCVPIXELFORMATTYPE_422YPCBCR10BIPLANARVIDEORANGE
97 #if HAVE_KCVPIXELFORMATTYPE_422YPCBCR16BIPLANARVIDEORANGE
100 #if HAVE_KCVPIXELFORMATTYPE_444YPCBCR8BIPLANARVIDEORANGE
103 #if HAVE_KCVPIXELFORMATTYPE_444YPCBCR10BIPLANARVIDEORANGE
106 #if HAVE_KCVPIXELFORMATTYPE_444YPCBCR16BIPLANARVIDEORANGE
113 const void *hwconfig,
168 CFMutableDictionaryRef attributes, iosurface_properties;
170 attributes = CFDictionaryCreateMutable(
173 &kCFTypeDictionaryKeyCallBacks,
174 &kCFTypeDictionaryValueCallBacks);
177 pixfmt = CFNumberCreate(
NULL, kCFNumberSInt32Type, &cv_pixfmt);
178 CFDictionarySetValue(
180 kCVPixelBufferPixelFormatTypeKey,
184 iosurface_properties = CFDictionaryCreateMutable(
187 &kCFTypeDictionaryKeyCallBacks,
188 &kCFTypeDictionaryValueCallBacks);
189 CFDictionarySetValue(attributes, kCVPixelBufferIOSurfacePropertiesKey, iosurface_properties);
190 CFRelease(iosurface_properties);
192 w = CFNumberCreate(
NULL, kCFNumberSInt32Type, &
ctx->width);
193 h = CFNumberCreate(
NULL, kCFNumberSInt32Type, &
ctx->height);
194 CFDictionarySetValue(attributes, kCVPixelBufferWidthKey,
w);
195 CFDictionarySetValue(attributes, kCVPixelBufferHeightKey,
h);
199 err = CVPixelBufferPoolCreate(
204 CFRelease(attributes);
206 if (err == kCVReturnSuccess)
215 CVPixelBufferRelease((CVPixelBufferRef)
data);
220 CVPixelBufferRef pixbuf;
226 err = CVPixelBufferPoolCreatePixelBuffer(
231 if (err != kCVReturnSuccess) {
239 CVPixelBufferRelease(pixbuf);
249 CVPixelBufferPoolRelease(fctx->
pool);
271 if (!
ctx->internal->pool_internal)
304 fmts[0] =
ctx->sw_format;
313 CVPixelBufferRef pixbuf = (CVPixelBufferRef)hwmap->
source->
data[3];
315 CVPixelBufferUnlockBaseAddress(pixbuf, (uintptr_t)hwmap->
priv);
321 CFMutableDictionaryRef par =
NULL;
332 num = CFNumberCreate(kCFAllocatorDefault,
336 den = CFNumberCreate(kCFAllocatorDefault,
340 par = CFDictionaryCreateMutable(kCFAllocatorDefault,
342 &kCFCopyStringDictionaryKeyCallBacks,
343 &kCFTypeDictionaryValueCallBacks);
345 if (!par || !num || !den) {
346 if (par) CFRelease(par);
347 if (num) CFRelease(num);
348 if (den) CFRelease(den);
352 CFDictionarySetValue(
354 kCVImageBufferPixelAspectRatioHorizontalSpacingKey,
356 CFDictionarySetValue(
358 kCVImageBufferPixelAspectRatioVerticalSpacingKey,
361 CVBufferSetAttachment(
363 kCVImageBufferPixelAspectRatioKey,
365 kCVAttachmentMode_ShouldPropagate
379 return kCVImageBufferChromaLocation_Left;
381 return kCVImageBufferChromaLocation_Center;
383 return kCVImageBufferChromaLocation_Top;
385 return kCVImageBufferChromaLocation_Bottom;
387 return kCVImageBufferChromaLocation_TopLeft;
389 return kCVImageBufferChromaLocation_BottomLeft;
401 CVBufferSetAttachment(
403 kCVImageBufferChromaLocationTopFieldKey,
405 kCVAttachmentMode_ShouldPropagate);
416 #if HAVE_KCVIMAGEBUFFERYCBCRMATRIX_ITU_R_2020
417 if (__builtin_available(macOS 10.11, iOS 9, *))
420 return CFSTR(
"ITU_R_2020");
423 return kCVImageBufferYCbCrMatrix_ITU_R_601_4;
425 return kCVImageBufferYCbCrMatrix_ITU_R_709_2;
427 return kCVImageBufferYCbCrMatrix_SMPTE_240M_1995;
429 #if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_ITU_R_2100_HLG
430 if (__builtin_available(macOS 10.13, iOS 11, tvOS 11, watchOS 4, *))
431 return CVYCbCrMatrixGetStringForIntegerCodePoint(
space);
442 #if HAVE_KCVIMAGEBUFFERCOLORPRIMARIES_ITU_R_2020
443 if (__builtin_available(macOS 10.11, iOS 9, *))
446 return CFSTR(
"ITU_R_2020");
448 return kCVImageBufferColorPrimaries_ITU_R_709_2;
450 return kCVImageBufferColorPrimaries_SMPTE_C;
452 return kCVImageBufferColorPrimaries_EBU_3213;
454 #if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_ITU_R_2100_HLG
455 if (__builtin_available(macOS 10.13, iOS 11, tvOS 11, watchOS 4, *))
456 return CVColorPrimariesGetStringForIntegerCodePoint(pri);
468 #if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_SMPTE_ST_2084_PQ
469 if (__builtin_available(macOS 10.13, iOS 11, *))
470 return kCVImageBufferTransferFunction_SMPTE_ST_2084_PQ;
472 return CFSTR(
"SMPTE_ST_2084_PQ");
475 #if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_ITU_R_2020
476 if (__builtin_available(macOS 10.11, iOS 9, *))
479 return CFSTR(
"ITU_R_2020");
481 return kCVImageBufferTransferFunction_ITU_R_709_2;
483 return kCVImageBufferTransferFunction_SMPTE_240M_1995;
485 #if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_SMPTE_ST_428_1
486 if (__builtin_available(macOS 10.12, iOS 10, *))
487 return kCVImageBufferTransferFunction_SMPTE_ST_428_1;
489 return CFSTR(
"SMPTE_ST_428_1");
491 #if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_ITU_R_2100_HLG
492 if (__builtin_available(macOS 10.13, iOS 11, *))
493 return kCVImageBufferTransferFunction_ITU_R_2100_HLG;
495 return CFSTR(
"ITU_R_2100_HLG");
497 return kCVImageBufferTransferFunction_UseGamma;
499 return kCVImageBufferTransferFunction_UseGamma;
501 #if HAVE_KCVIMAGEBUFFERTRANSFERFUNCTION_ITU_R_2100_HLG
502 if (__builtin_available(macOS 10.13, iOS 11, tvOS 11, watchOS 4, *))
503 return CVTransferFunctionGetStringForIntegerCodePoint(trc);
513 CFStringRef colormatrix =
NULL, colorpri =
NULL, colortrc =
NULL;
534 CVBufferSetAttachment(
536 kCVImageBufferYCbCrMatrixKey,
538 kCVAttachmentMode_ShouldPropagate);
541 CVBufferSetAttachment(
543 kCVImageBufferColorPrimariesKey,
545 kCVAttachmentMode_ShouldPropagate);
548 CVBufferSetAttachment(
550 kCVImageBufferTransferFunctionKey,
552 kCVAttachmentMode_ShouldPropagate);
555 CFNumberRef gamma_level = CFNumberCreate(
NULL, kCFNumberFloat32Type, &gamma);
556 CVBufferSetAttachment(
558 kCVImageBufferGammaLevelKey,
560 kCVAttachmentMode_ShouldPropagate);
561 CFRelease(gamma_level);
592 CVPixelBufferRef pixbuf = (CVPixelBufferRef)
src->data[3];
593 OSType pixel_format = CVPixelBufferGetPixelFormatType(pixbuf);
595 uint32_t map_flags = 0;
607 if (CVPixelBufferGetWidth(pixbuf) !=
ctx->width ||
608 CVPixelBufferGetHeight(pixbuf) !=
ctx->height) {
614 map_flags = kCVPixelBufferLock_ReadOnly;
616 err = CVPixelBufferLockBaseAddress(pixbuf, map_flags);
617 if (err != kCVReturnSuccess) {
622 if (CVPixelBufferIsPlanar(pixbuf)) {
623 int planes = CVPixelBufferGetPlaneCount(pixbuf);
625 dst->
data[
i] = CVPixelBufferGetBaseAddressOfPlane(pixbuf,
i);
626 dst->
linesize[
i] = CVPixelBufferGetBytesPerRowOfPlane(pixbuf,
i);
629 dst->
data[0] = CVPixelBufferGetBaseAddress(pixbuf);
630 dst->
linesize[0] = CVPixelBufferGetBytesPerRow(pixbuf);
634 (
void *)(uintptr_t)map_flags);
641 CVPixelBufferUnlockBaseAddress(pixbuf, map_flags);
688 map->format =
src->format;
695 map->height =
src->height;
738 if (device && device[0]) {
748 .name =
"videotoolbox",
#define AV_LOG_WARNING
Something somehow does not look correct.
AVPixelFormat
Pixel format.
CFStringRef av_map_videotoolbox_color_trc_from_av(enum AVColorTransferCharacteristic trc)
Convert an AVColorTransferCharacteristic to a VideoToolbox/CoreVideo color transfer function string.
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated space
AVColorTransferCharacteristic
Color Transfer Characteristic.
AVFrame * source
A reference to the original source of the mapping.
CFStringRef av_map_videotoolbox_color_matrix_from_av(enum AVColorSpace space)
Convert an AVColorSpace to a VideoToolbox/CoreVideo color matrix string.
static int vt_transfer_data_from(AVHWFramesContext *hwfc, AVFrame *dst, const AVFrame *src)
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
This structure describes decoded (raw) audio or video data.
enum AVPixelFormat pix_fmt
int av_vt_pixbuf_set_attachments(void *log_ctx, CVPixelBufferRef pixbuf, const AVFrame *src)
@ AVCOL_TRC_BT2020_12
ITU-R BT2020 for 12-bit system.
static int vt_frames_init(AVHWFramesContext *ctx)
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
int ff_hwframe_map_create(AVBufferRef *hwframe_ref, AVFrame *dst, const AVFrame *src, void(*unmap)(AVHWFramesContext *ctx, HWMapDescriptor *hwmap), void *priv)
@ AV_HWDEVICE_TYPE_VIDEOTOOLBOX
AVColorPrimaries
Chromaticity coordinates of the source primaries.
static int vt_map_frame(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src, int flags)
void * priv
Hardware-specific private data associated with the mapping.
enum AVPixelFormat * valid_hw_formats
A list of possible values for format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
int width
The allocated dimensions of the frames in this pool.
CVPixelBufferPoolRef pool
@ AVCOL_SPC_BT2020_CL
ITU-R BT2020 constant luminance system.
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
const HWContextType ff_hwcontext_type_videotoolbox
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
static int vt_pixbuf_set_attachments(void *log_ctx, CVPixelBufferRef pixbuf, const AVFrame *src)
@ AVCOL_SPC_BT470BG
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601
const char * av_color_space_name(enum AVColorSpace space)
@ AVCOL_TRC_GAMMA28
also ITU-R BT470BG
AVBufferPool * av_buffer_pool_init2(size_t size, void *opaque, AVBufferRef *(*alloc)(void *opaque, size_t size), void(*pool_free)(void *opaque))
Allocate and initialize a buffer pool with a more complex allocator.
static int vt_device_create(AVHWDeviceContext *ctx, const char *device, AVDictionary *opts, int flags)
int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max)
Reduce a fraction.
static const struct @315 cv_pix_fmts[]
static void vt_frames_uninit(AVHWFramesContext *ctx)
@ AVCOL_TRC_GAMMA22
also ITU-R BT470M / ITU-R BT1700 625 PAL & SECAM
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
CFStringRef av_map_videotoolbox_color_primaries_from_av(enum AVColorPrimaries pri)
Convert an AVColorPrimaries to a VideoToolbox/CoreVideo color primaries string.
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
#define FF_ARRAY_ELEMS(a)
enum AVPixelFormat * valid_sw_formats
A list of possible values for sw_format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
static int vt_pixbuf_set_chromaloc(void *log_ctx, CVPixelBufferRef pixbuf, const AVFrame *src)
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
static enum AVPixelFormat supported_formats[]
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate. The lists are not just lists
@ AVCOL_SPC_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC / functionally identical to above
static int vt_transfer_get_formats(AVHWFramesContext *ctx, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats)
static enum AVPixelFormat pix_fmts[]
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
@ AVCOL_PRI_BT470BG
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM
static int vt_pixbuf_set_colorspace(void *log_ctx, CVPixelBufferRef pixbuf, const AVFrame *src)
@ AVCOL_PRI_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
static int vt_map_from(AVHWFramesContext *hwfc, AVFrame *dst, const AVFrame *src, int flags)
uint32_t av_map_videotoolbox_format_from_pixfmt(enum AVPixelFormat pix_fmt)
Convert an AVPixelFormat to a VideoToolbox (actually CoreVideo) format.
@ AVCHROMA_LOC_LEFT
MPEG-2/4 4:2:0, H.264 default for 4:2:0.
Rational number (pair of numerator and denominator).
@ AVCHROMA_LOC_TOPLEFT
ITU-R 601, SMPTE 274M 296M S314M(DV 4:1:1), mpeg2 4:2:2.
@ AVCOL_PRI_BT709
also ITU-R BT1361 / IEC 61966-2-4 / SMPTE RP 177 Annex B
const char * av_color_primaries_name(enum AVColorPrimaries primaries)
@ AVCOL_TRC_BT2020_10
ITU-R BT2020 for 10-bit system.
static AVBufferRef * vt_pool_alloc_buffer(void *opaque, size_t size)
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
@ AVCOL_PRI_BT2020
ITU-R BT2020.
static const struct @328 planes[]
@ AVCOL_TRC_SMPTE2084
SMPTE ST 2084 for 10-, 12-, 14- and 16-bit systems.
static int vt_pixbuf_set_par(void *log_ctx, CVPixelBufferRef pixbuf, const AVFrame *src)
int av_frame_copy(AVFrame *dst, const AVFrame *src)
Copy the frame data from src to dst.
#define AV_PIX_FMT_AYUV64
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
@ AV_PIX_FMT_NV16
interleaved chroma YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
static int vt_frames_get_constraints(AVHWDeviceContext *ctx, const void *hwconfig, AVHWFramesConstraints *constraints)
#define AVERROR_EXTERNAL
Generic error in an external library.
static void vt_unmap(AVHWFramesContext *ctx, HWMapDescriptor *hwmap)
static int vt_transfer_data_to(AVHWFramesContext *hwfc, AVFrame *dst, const AVFrame *src)
@ AVCOL_TRC_BT709
also ITU-R BT1361
AVChromaLocation
Location of chroma samples.
@ AVCOL_SPC_SMPTE240M
derived from 170M primaries and D65 white point, 170M is derived from BT470 System M's primaries
@ AV_PIX_FMT_VIDEOTOOLBOX
hardware decoding through Videotoolbox
@ AV_HWFRAME_MAP_READ
The mapping must be readable.
#define i(width, name, range_min, range_max)
@ AVCOL_SPC_BT2020_NCL
ITU-R BT2020 non-constant luminance system.
#define av_malloc_array(a, b)
AVColorSpace
YUV colorspace type.
@ AV_PIX_FMT_NV24
planar YUV 4:4:4, 24bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
@ AV_HWFRAME_MAP_WRITE
The mapping must be writeable.
AVHWFrameTransferDirection
This struct describes a set or pool of "hardware" frames (i.e.
static int vt_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
static void videotoolbox_buffer_release(void *opaque, uint8_t *data)
enum AVPixelFormat pixfmt
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
enum AVPixelFormat av_map_videotoolbox_format_to_pixfmt(uint32_t cv_fmt)
Convert a VideoToolbox (actually CoreVideo) format to AVPixelFormat.
@ AV_PIX_FMT_UYVY422
packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
@ AVCOL_TRC_ARIB_STD_B67
ARIB STD-B67, known as "Hybrid log-gamma".
@ AVCHROMA_LOC_CENTER
MPEG-1 4:2:0, JPEG 4:2:0, H.263 4:2:0.
@ AV_HWFRAME_MAP_OVERWRITE
The mapped frame will be overwritten completely in subsequent operations, so the current frame data n...
CFStringRef av_map_videotoolbox_chroma_loc_from_av(enum AVChromaLocation loc)
Convert an AVChromaLocation to a VideoToolbox/CoreVideo chroma location string.
A reference to a data buffer.
const VDPAUPixFmtMap * map
uint32_t av_map_videotoolbox_format_from_pixfmt2(enum AVPixelFormat pix_fmt, bool full_range)
Same as av_map_videotoolbox_format_from_pixfmt function, but can map and return full range pixel form...
#define flags(name, subs,...)
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
@ AVCOL_TRC_SMPTE428
SMPTE ST 428-1.
@ AVCOL_SPC_BT709
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / derived in SMPTE RP 177 Annex B
const char * av_color_transfer_name(enum AVColorTransferCharacteristic transfer)
@ AVCHROMA_LOC_BOTTOMLEFT
static int vt_pool_alloc(AVHWFramesContext *ctx)
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
#define av_fourcc2str(fourcc)