This example shows how to do QSV-accelerated H.264 decoding with output frames in the VA-API video surfaces.
#include <stdio.h>
#include <mfx/mfxvideo.h>
#include <va/va.h>
#include <va/va_x11.h>
#include <X11/Xlib.h>
static mfxStatus
frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
mfxFrameAllocResponse *resp)
{
int err, i;
fprintf(stderr, "Multiple allocation requests.\n");
return MFX_ERR_MEMORY_ALLOC;
}
if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET)) {
fprintf(stderr, "Unsupported surface type: %d\n", req->Type);
return MFX_ERR_UNSUPPORTED;
}
if (req->Info.BitDepthLuma != 8 || req->Info.BitDepthChroma != 8 ||
req->Info.Shift || req->Info.FourCC != MFX_FOURCC_NV12 ||
req->Info.ChromaFormat != MFX_CHROMAFORMAT_YUV420) {
fprintf(stderr, "Unsupported surface properties.\n");
return MFX_ERR_UNSUPPORTED;
}
err = vaCreateSurfaces(decode->
va_dpy, VA_RT_FORMAT_YUV420,
req->Info.Width, req->Info.Height,
decode->
surfaces, req->NumFrameSuggested,
if (err != VA_STATUS_SUCCESS) {
fprintf(stderr, "Error allocating VA surfaces\n");
}
return MFX_ERR_NONE;
return MFX_ERR_MEMORY_ALLOC;
}
static mfxStatus
frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
{
return MFX_ERR_NONE;
}
static mfxStatus
frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
{
return MFX_ERR_UNSUPPORTED;
}
static mfxStatus
frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
{
return MFX_ERR_UNSUPPORTED;
}
static mfxStatus
frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
{
*hdl = mid;
return MFX_ERR_NONE;
}
{
}
{
int *used = opaque;
*used = 0;
}
{
mfxFrameSurface1 *surf;
int idx;
break;
}
fprintf(stderr, "No free surfaces\n");
}
if (!surf)
if (!surf_buf) {
}
surf->Data.MemId = &decode->
surfaces[idx];
frame->
buf[0] = surf_buf;
return 0;
}
{
if (!qsv)
qsv->
iopattern = MFX_IOPATTERN_OUT_VIDEO_MEMORY;
}
}
pix_fmts++;
}
fprintf(stderr, "The QSV pixel format not offered in get_format()\n");
}
{
int ret = 0;
int got_frame = 1;
while (pkt->
size > 0 || (!pkt->
data && got_frame)) {
if (ret < 0) {
fprintf(stderr, "Error during decoding\n");
return ret;
}
if (got_frame) {
mfxFrameSurface1 *surf = (mfxFrameSurface1*)frame->
data[3];
VASurfaceID surface = *(VASurfaceID*)surf->Data.MemId;
VAImageFormat img_fmt = {
.fourcc = VA_FOURCC_NV12,
.byte_order = VA_LSB_FIRST,
.bits_per_pixel = 8,
.depth = 8,
};
VAStatus err;
int i, j;
img.buf = VA_INVALID_ID;
img.image_id = VA_INVALID_ID;
err = vaCreateImage(decode->
va_dpy, &img_fmt,
if (err != VA_STATUS_SUCCESS) {
fprintf(stderr, "Error creating an image: %s\n",
vaErrorStr(err));
}
err = vaGetImage(decode->
va_dpy, surface, 0, 0,
img.image_id);
if (err != VA_STATUS_SUCCESS) {
fprintf(stderr, "Error getting an image: %s\n",
vaErrorStr(err));
}
err = vaMapBuffer(decode->
va_dpy, img.buf, (
void**)&data);
if (err != VA_STATUS_SUCCESS) {
fprintf(stderr, "Error mapping the image buffer: %s\n",
vaErrorStr(err));
}
for (i = 0; i < img.num_planes; i++)
for (j = 0; j < (img.height >> (i > 0)); j++)
avio_write(output_ctx, data + img.offsets[i] + j * img.pitches[i], img.width);
if (img.buf != VA_INVALID_ID)
vaUnmapBuffer(decode->
va_dpy, img.buf);
if (img.image_id != VA_INVALID_ID)
vaDestroyImage(decode->
va_dpy, img.image_id);
if (ret < 0)
return ret;
}
}
return 0;
}
int main(
int argc,
char **argv)
{
int va_ver_major, va_ver_minor;
mfxIMPL mfx_impl = MFX_IMPL_AUTO_ANY;
mfxVersion mfx_ver = { { 1, 1 } };
mfxFrameAllocator frame_allocator = {
};
int ret, i, err;
if (argc < 3) {
fprintf(stderr, "Usage: %s <input file> <output file>\n", argv[0]);
return 1;
}
if (ret < 0) {
fprintf(stderr, "Cannot open input file '%s': ", argv[1]);
}
video_st = st;
else
}
if (!video_st) {
fprintf(stderr, "No H.264 video stream in the input file\n");
}
dpy = XOpenDisplay(
NULL);
if (!dpy) {
fprintf(stderr, "Cannot open the X display\n");
}
decode.
va_dpy = vaGetDisplay(dpy);
fprintf(stderr, "Cannot open the VA display\n");
}
err = vaInitialize(decode.
va_dpy, &va_ver_major, &va_ver_minor);
if (err != VA_STATUS_SUCCESS) {
fprintf(stderr, "Cannot initialize VA: %s\n", vaErrorStr(err));
}
fprintf(stderr, "Initialized VA v%d.%d\n", va_ver_major, va_ver_minor);
err = MFXInit(mfx_impl, &mfx_ver, &decode.
mfx_session);
if (err != MFX_ERR_NONE) {
fprintf(stderr, "Error initializing an MFX session\n");
}
MFXVideoCORE_SetFrameAllocator(decode.
mfx_session, &frame_allocator);
if (!decoder) {
fprintf(stderr, "The QSV decoder is not present in libavcodec\n");
}
if (!decoder_ctx) {
}
}
}
if (ret < 0) {
fprintf(stderr, "Error opening the decoder: ");
}
if (ret < 0) {
fprintf(stderr, "Error opening the output context: ");
}
if (!frame) {
}
while (ret >= 0) {
if (ret < 0)
break;
ret =
decode_packet(&decode, decoder_ctx, frame, &pkt, output_ctx);
}
ret =
decode_packet(&decode, decoder_ctx, frame, &pkt, output_ctx);
if (ret < 0) {
fprintf(stderr, "%s\n", buf);
}
if (decoder_ctx)
if (dpy)
XCloseDisplay(dpy);
return ret;
}