[FFmpeg-devel] [PATCH v5 3/5] libavcodec: VAAPI H.264 encoder

Mark Thompson sw at jkqxz.net
Sat Jan 30 23:13:20 CET 2016


---
 configure                   |    2 +
 libavcodec/Makefile         |    1 +
 libavcodec/allcodecs.c      |    1 +
 libavcodec/vaapi_enc_h264.c | 1015 +++++++++++++++++++++++++++++++++++++++++++
 4 files changed, 1019 insertions(+)
 create mode 100644 libavcodec/vaapi_enc_h264.c

diff --git a/configure b/configure
index d429cbb..9f8d9d4 100755
--- a/configure
+++ b/configure
@@ -2497,6 +2497,7 @@ h264_mmal_hwaccel_deps="mmal"
 h264_qsv_hwaccel_deps="libmfx"
 h264_vaapi_hwaccel_deps="vaapi"
 h264_vaapi_hwaccel_select="h264_decoder"
+h264_vaapi_encoder_deps="vaapi_recent VAEncPictureParameterBufferH264"
 h264_vda_decoder_deps="vda"
 h264_vda_decoder_select="h264_decoder"
 h264_vda_hwaccel_deps="vda"
@@ -5380,6 +5381,7 @@ check_type "d3d9.h dxva2api.h" DXVA2_ConfigPictureDecode -D_WIN32_WINNT=0x0602

 check_type "va/va.h" "VAPictureParameterBufferHEVC"
 check_type "va/va.h" "VADecPictureParameterBufferVP9"
+check_type "va/va.h" "VAEncPictureParameterBufferH264"

 check_type "vdpau/vdpau.h" "VdpPictureInfoHEVC"

diff --git a/libavcodec/Makefile b/libavcodec/Makefile
index 045d118..0b0a4e4 100644
--- a/libavcodec/Makefile
+++ b/libavcodec/Makefile
@@ -305,6 +305,7 @@ OBJS-$(CONFIG_H264_MMAL_DECODER)       += mmaldec.o
 OBJS-$(CONFIG_H264_VDA_DECODER)        += vda_h264_dec.o
 OBJS-$(CONFIG_H264_QSV_DECODER)        += qsvdec_h2645.o
 OBJS-$(CONFIG_H264_QSV_ENCODER)        += qsvenc_h264.o
+OBJS-$(CONFIG_H264_VAAPI_ENCODER)      += vaapi_enc_h264.o
 OBJS-$(CONFIG_HAP_DECODER)             += hapdec.o hap.o
 OBJS-$(CONFIG_HAP_ENCODER)             += hapenc.o hap.o
 OBJS-$(CONFIG_HEVC_DECODER)            += hevc.o hevc_mvs.o hevc_ps.o hevc_sei.o \
diff --git a/libavcodec/allcodecs.c b/libavcodec/allcodecs.c
index c7c1af5..f27c099 100644
--- a/libavcodec/allcodecs.c
+++ b/libavcodec/allcodecs.c
@@ -202,6 +202,7 @@ void avcodec_register_all(void)
 #if FF_API_VDPAU
     REGISTER_DECODER(H264_VDPAU,        h264_vdpau);
 #endif
+    REGISTER_ENCODER(H264_VAAPI,        h264_vaapi);
     REGISTER_ENCDEC (HAP,               hap);
     REGISTER_DECODER(HEVC,              hevc);
     REGISTER_DECODER(HEVC_QSV,          hevc_qsv);
diff --git a/libavcodec/vaapi_enc_h264.c b/libavcodec/vaapi_enc_h264.c
new file mode 100644
index 0000000..ecd8503
--- /dev/null
+++ b/libavcodec/vaapi_enc_h264.c
@@ -0,0 +1,1015 @@
+/*
+ * VAAPI H.264 encoder.
+ *
+ * Copyright (C) 2016 Mark Thompson <mrt at jkqxz.net>
+ *
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#include "libavutil/opt.h"
+#include "libavutil/pixdesc.h"
+
+#include "avcodec.h"
+#include "golomb.h"
+#include "h264.h"
+#include "put_bits.h"
+#include "vaapi_support.h"
+
+#include <va/va_enc_h264.h>
+
+#define DPB_FRAMES  16
+#define INPUT_FRAMES 2
+
+typedef struct VAAPIH264EncodeFrame {
+    AVFrame *frame;
+    VASurfaceID surface_id;
+
+    int frame_num;
+    enum {
+        FRAME_TYPE_I,
+        FRAME_TYPE_P,
+        FRAME_TYPE_B,
+    } type;
+
+    VAPictureH264 pic;
+    VAEncSliceParameterBufferH264 params;
+    VABufferID params_id;
+
+    VABufferID coded_data_id;
+
+    struct VAAPIH264EncodeFrame *refp, *refb;
+} VAAPIH264EncodeFrame;
+
+typedef struct VAAPIH264EncodeContext {
+    const AVClass *class;
+
+    AVVAAPIHardwareContext *hardware_context;
+
+    AVVAAPIPipelineConfig  codec_config;
+    AVVAAPIPipelineContext codec;
+
+    AVVAAPISurfaceConfig input_config;
+    AVVAAPISurfacePool   input_pool;
+    AVVAAPISurfaceConfig recon_config;
+    AVVAAPISurfacePool   recon_pool;
+
+    int input_is_vaapi;
+
+    VAProfile va_profile;
+    int level;
+    int rc_mode;
+    int width;
+    int height;
+
+    VAEncSequenceParameterBufferH264 seq_params;
+    VABufferID seq_params_id;
+
+    VAEncMiscParameterRateControl rc_params;
+    VAEncMiscParameterBuffer rc_params_buffer;
+    VABufferID rc_params_id;
+
+    VAEncPictureParameterBufferH264 pic_params;
+    VABufferID pic_params_id;
+
+    int frame_num;
+
+    VAAPIH264EncodeFrame dpb[DPB_FRAMES];
+    int current_frame;
+    int previous_frame;
+
+    struct {
+        int64_t hardware_context;
+
+        const char *profile;
+        const char *level;
+        int qp;
+        int idr_interval;
+    } options;
+
+} VAAPIH264EncodeContext;
+
+
+static int vaapi_h264_render_packed_header(VAAPIH264EncodeContext *ctx, int type,
+                                           char *data, size_t bit_len)
+{
+    VAStatus vas;
+    VABufferID id_list[2];
+    VAEncPackedHeaderParameterBuffer buffer = {
+        .type = type,
+        .bit_length = bit_len,
+        .has_emulation_bytes = 0,
+    };
+
+    vas = vaCreateBuffer(ctx->hardware_context->display, ctx->codec.context_id,
+                         VAEncPackedHeaderParameterBufferType,
+                         sizeof(&buffer), 1, &buffer, &id_list[0]);
+    if(vas != VA_STATUS_SUCCESS) {
+        av_log(ctx, AV_LOG_ERROR, "Failed to create parameter buffer for packed "
+               "header (type %d): %d (%s).\n", type, vas, vaErrorStr(vas));
+        return -1;
+    }
+
+    vas = vaCreateBuffer(ctx->hardware_context->display, ctx->codec.context_id,
+                         VAEncPackedHeaderDataBufferType,
+                         (bit_len + 7) / 8, 1, data, &id_list[1]);
+    if(vas != VA_STATUS_SUCCESS) {
+        av_log(ctx, AV_LOG_ERROR, "Failed to create data buffer for packed "
+               "header (type %d): %d (%s).\n", type, vas, vaErrorStr(vas));
+        return -1;
+    }
+
+    vas = vaRenderPicture(ctx->hardware_context->display, ctx->codec.context_id,
+                          id_list, 2);
+    if(vas != VA_STATUS_SUCCESS) {
+        av_log(ctx, AV_LOG_ERROR, "Failed to render packed "
+               "header (type %d): %d (%s).\n", type, vas, vaErrorStr(vas));
+        return -1;
+    }
+
+    return 0;
+}
+
+static void vaapi_h264_write_nal_header(PutBitContext *b, int ref, int type)
+{
+    // zero_byte
+    put_bits(b, 8, 0);
+    // start_code_prefix_one_3bytes
+    put_bits(b, 24, 1);
+    // forbidden_zero_bit
+    put_bits(b, 1, 0);
+    // nal_ref_idc
+    put_bits(b, 2, ref);
+    // nal_unit_type
+    put_bits(b, 5, type);
+}
+
+static void vaapi_h264_write_trailing_rbsp(PutBitContext *b)
+{
+    // rbsp_stop_one_bit
+    put_bits(b, 1, 1);
+    while(put_bits_count(b) & 7) {
+        // rbsp_alignment_zero_bit
+        put_bits(b, 1, 0);
+    }
+}
+
+static int vaapi_h264_render_packed_sps(VAAPIH264EncodeContext *ctx)
+{
+    PutBitContext b;
+    char tmp[256];
+    size_t len;
+
+    init_put_bits(&b, tmp, sizeof(tmp));
+
+    vaapi_h264_write_nal_header(&b, 3, NAL_SPS);
+
+    // profile_idc
+    put_bits(&b, 8, 66);
+    // constraint_set0_flag
+    put_bits(&b, 1, 0);
+    // constraint_set1_flag
+    put_bits(&b, 1, ctx->va_profile == VAProfileH264ConstrainedBaseline);
+    // constraint_set2_flag
+    put_bits(&b, 1, 0);
+    // constraint_set3_flag
+    put_bits(&b, 1, 0);
+    // constraint_set4_flag
+    put_bits(&b, 1, 0);
+    // constraint_set5_flag
+    put_bits(&b, 1, 0);
+    // reserved_zero_2bits
+    put_bits(&b, 2, 0);
+    // level_idc
+    put_bits(&b, 8, 52);
+    // seq_parameter_set_id
+    set_ue_golomb(&b, 0);
+
+    if(0) {
+        // chroma_format_idc
+        set_ue_golomb(&b, 1);
+        // bit_depth_luma_minus8
+        set_ue_golomb(&b, 0);
+        // bit_depth_chroma_minus8
+        set_ue_golomb(&b, 0);
+        // qpprime_y_zero_transform_bypass_flag
+        put_bits(&b, 1, 0);
+        // seq_scaling_matrix_present_flag
+        put_bits(&b, 1, 0);
+    }
+
+    // log2_max_frame_num_minus4
+    set_ue_golomb(&b, 4);
+    // pic_order_cnt_type
+    set_ue_golomb(&b, 2);
+
+    // max_num_ref_frames
+    set_ue_golomb(&b, 1);
+    // gaps_in_frame_num_value_allowed_flag
+    put_bits(&b, 1, 0);
+    // pic_width_in_mbs_minus1
+    set_ue_golomb(&b, (ctx->width  + 15) / 16 - 1);
+    // pic_height_in_map_units_minus1
+    set_ue_golomb(&b, (ctx->height + 15) / 16 - 1);
+    // frame_mbs_oly_flag
+    put_bits(&b, 1, 1);
+
+    // direct_8x8_inference_flag
+    put_bits(&b, 1, 1);
+    // frame_cropping_flag
+    put_bits(&b, 1, 0);
+
+    // vui_parameters_present_flag
+    put_bits(&b, 1, 0);
+
+    vaapi_h264_write_trailing_rbsp(&b);
+
+    len = put_bits_count(&b);
+    flush_put_bits(&b);
+
+    return vaapi_h264_render_packed_header(ctx, VAEncPackedHeaderSequence,
+                                           tmp, len);
+}
+
+static int vaapi_h264_render_packed_pps(VAAPIH264EncodeContext *ctx)
+{
+    PutBitContext b;
+    char tmp[256];
+    size_t len;
+
+    init_put_bits(&b, tmp, sizeof(tmp));
+
+    vaapi_h264_write_nal_header(&b, 3, NAL_PPS);
+
+    // seq_parameter_set_id
+    set_ue_golomb(&b, 0);
+    // pic_parameter_set_id
+    set_ue_golomb(&b, 0);
+    // entropy_coding_mode_flag
+    put_bits(&b, 1, 1);
+    // bottom_field_pic_order_in_frame_present_flag
+    put_bits(&b, 1, 0);
+    // num_slice_groups_minus1
+    set_ue_golomb(&b, 0);
+
+    // num_ref_idx_l0_default_active_minus1
+    set_ue_golomb(&b, 0);
+    // num_ref_idx_l1_default_active_minus1
+    set_ue_golomb(&b, 0);
+    // weighted_pred_flag
+    put_bits(&b, 1, 0);
+    // weighted_bipred_idc
+    put_bits(&b, 2, 0);
+    // pic_init_qp_minus26
+    set_se_golomb(&b, ctx->options.qp - 26);
+    // pic_init_qs_minus26
+    set_se_golomb(&b, 0);
+    // chroma_qp_index_offset
+    set_se_golomb(&b, 0);
+    // deblocking_filter_control_present_flag
+    put_bits(&b, 1, 1);
+    // constrained_intra_pred_flag
+    put_bits(&b, 1, 0);
+    // redundant_pic_cnt_present_flag
+    put_bits(&b, 1, 0);
+
+    // transform_8x8_mode_flag
+    put_bits(&b, 1, 0);
+    // pic_scaling_matrix_present_flag
+    put_bits(&b, 1, 0);
+    // second_chroma_qp_index_offset
+    set_se_golomb(&b, 0);
+
+    vaapi_h264_write_trailing_rbsp(&b);
+
+    len = put_bits_count(&b);
+    flush_put_bits(&b);
+
+    return vaapi_h264_render_packed_header(ctx, VAEncPackedHeaderPicture,
+                                           tmp, len);
+}
+
+static int vaapi_h264_render_packed_slice(VAAPIH264EncodeContext *ctx,
+                                          VAAPIH264EncodeFrame *current)
+{
+    PutBitContext b;
+    char tmp[256];
+    size_t len;
+
+    init_put_bits(&b, tmp, sizeof(tmp));
+
+    if(current->type == FRAME_TYPE_I)
+        vaapi_h264_write_nal_header(&b, 3, NAL_IDR_SLICE);
+    else
+        vaapi_h264_write_nal_header(&b, 3, NAL_SLICE);
+
+    // first_mb_in_slice
+    set_ue_golomb(&b, 0);
+    // slice_type
+    set_ue_golomb(&b, (current->type == FRAME_TYPE_I ? 2 :
+                       current->type == FRAME_TYPE_P ? 0 : 1));
+    // pic_parameter_set_id
+    set_ue_golomb(&b, 0);
+
+    // frame_num
+    put_bits(&b, 8, current->frame_num);
+
+    if(current->type == FRAME_TYPE_I) {
+        // idr_pic_id
+        set_ue_golomb(&b, 0);
+    }
+
+    // pic_order_cnt stuff
+
+    if(current->type == FRAME_TYPE_B) {
+        // direct_spatial_mv_pred_flag
+        put_bits(&b, 1, 1);
+    }
+
+    if(current->type == FRAME_TYPE_P || current->type == FRAME_TYPE_B) {
+        // num_ref_idx_active_override_flag
+        put_bits(&b, 1, 0);
+        if(0) {
+            // num_ref_idx_l0_active_minus1
+            if(current->type == FRAME_TYPE_B) {
+                // num_ref_idx_l1_active_minus1
+            }
+        }
+
+        // ref_pic_list_modification_flag_l0
+        put_bits(&b, 1, 0);
+
+        if(current->type == FRAME_TYPE_B) {
+            // ref_pic_list_modification_flag_l1
+            put_bits(&b, 1, 0);
+        }
+    }
+
+    if(1) {
+        // dec_ref_pic_marking
+        if(current->type == FRAME_TYPE_I) {
+            // no_output_of_prior_pics_flag
+            put_bits(&b, 1, 0);
+            // long_term_reference_flag
+            put_bits(&b, 1, 0);
+        } else {
+            // adaptive_pic_ref_marking_mode_flag
+            put_bits(&b, 1, 0);
+        }
+    }
+
+    if(current->type != FRAME_TYPE_I) {
+        // cabac_init_idc
+        set_ue_golomb(&b, 0);
+    }
+
+    // slice_qp_delta
+    set_se_golomb(&b, 0);
+
+    if(1) {
+        // disable_deblocking_filter_idc
+        set_ue_golomb(&b, 0);
+        // slice_alpha_c0_offset_div2
+        set_se_golomb(&b, 0);
+        // slice_beta_offset_div2
+        set_se_golomb(&b, 0);
+    }
+
+    len = put_bits_count(&b);
+    flush_put_bits(&b);
+
+    return vaapi_h264_render_packed_header(ctx, VAEncPackedHeaderSlice,
+                                           tmp, len);
+}
+
+static int vaapi_h264_render_sequence(VAAPIH264EncodeContext *ctx)
+{
+    VAStatus vas;
+    VAEncSequenceParameterBufferH264 *seq = &ctx->seq_params;
+
+    {
+        memset(seq, 0, sizeof(*seq));
+
+        seq->level_idc = 52;
+        seq->picture_width_in_mbs  = (ctx->width  + 15) / 16;
+        seq->picture_height_in_mbs = (ctx->height + 15) / 16;
+
+        seq->intra_period = 0;
+        seq->intra_idr_period = 0;
+        seq->ip_period = 1;
+
+        seq->max_num_ref_frames = 2;
+        seq->time_scale = 900;
+        seq->num_units_in_tick = 15;
+        seq->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 = 4;
+        seq->seq_fields.bits.log2_max_frame_num_minus4 = 4;
+        seq->seq_fields.bits.frame_mbs_only_flag = 1;
+        seq->seq_fields.bits.chroma_format_idc = 1;
+        seq->seq_fields.bits.direct_8x8_inference_flag = 1;
+        seq->seq_fields.bits.pic_order_cnt_type = 2;
+
+        seq->frame_cropping_flag = 1;
+        seq->frame_crop_left_offset   = 0;
+        seq->frame_crop_right_offset  = 0;
+        seq->frame_crop_top_offset    = 0;
+        seq->frame_crop_bottom_offset = 8;
+    }
+
+    vas = vaCreateBuffer(ctx->hardware_context->display, ctx->codec.context_id,
+                         VAEncSequenceParameterBufferType,
+                         sizeof(*seq), 1, seq, &ctx->seq_params_id);
+    if(vas != VA_STATUS_SUCCESS) {
+        av_log(ctx, AV_LOG_ERROR, "Failed to create buffer for sequence "
+               "parameters: %d (%s).\n", vas, vaErrorStr(vas));
+        return -1;
+    }
+    av_log(ctx, AV_LOG_DEBUG, "Sequence parameter buffer is %#x.\n",
+           ctx->seq_params_id);
+
+    vas = vaRenderPicture(ctx->hardware_context->display, ctx->codec.context_id,
+                          &ctx->seq_params_id, 1);
+    if(vas != VA_STATUS_SUCCESS) {
+        av_log(ctx, AV_LOG_ERROR, "Failed to send sequence parameters: "
+               "%d (%s).\n", vas, vaErrorStr(vas));
+        return -1;
+    }
+
+    return 0;
+}
+
+static int vaapi_h264_render_picture(VAAPIH264EncodeContext *ctx,
+                                     VAAPIH264EncodeFrame *current)
+{
+    VAStatus vas;
+    VAEncPictureParameterBufferH264 *pic = &ctx->pic_params;
+    int i;
+
+    memset(pic, 0, sizeof(*pic));
+    memcpy(&pic->CurrPic, &current->pic, sizeof(VAPictureH264));
+    for(i = 0; i < FF_ARRAY_ELEMS(pic->ReferenceFrames); i++) {
+        pic->ReferenceFrames[i].picture_id = VA_INVALID_ID;
+        pic->ReferenceFrames[i].flags = VA_PICTURE_H264_INVALID;
+    }
+    if(current->type == FRAME_TYPE_P || current->type == FRAME_TYPE_B)
+        memcpy(&pic->ReferenceFrames[0], &current->refp->pic,
+               sizeof(VAPictureH264));
+    if(current->type == FRAME_TYPE_B)
+        memcpy(&pic->ReferenceFrames[1], &current->refb->pic,
+               sizeof(VAPictureH264));
+
+    pic->pic_fields.bits.idr_pic_flag = (current->type == FRAME_TYPE_I);
+    pic->pic_fields.bits.reference_pic_flag = 1;
+    pic->pic_fields.bits.entropy_coding_mode_flag = 1;
+    pic->pic_fields.bits.deblocking_filter_control_present_flag = 1;
+
+    pic->frame_num = current->frame_num;
+    pic->last_picture = 0;
+    pic->pic_init_qp = ctx->options.qp;
+
+    pic->coded_buf = current->coded_data_id;
+
+    vas = vaCreateBuffer(ctx->hardware_context->display, ctx->codec.context_id,
+                         VAEncPictureParameterBufferType,
+                         sizeof(*pic), 1, pic, &ctx->pic_params_id);
+    if(vas != VA_STATUS_SUCCESS) {
+        av_log(ctx, AV_LOG_ERROR, "Failed to create buffer for picture "
+               "parameters: %d (%s).\n", vas, vaErrorStr(vas));
+        return -1;
+    }
+    av_log(ctx, AV_LOG_DEBUG, "Picture parameter buffer is %#x.\n",
+           ctx->pic_params_id);
+
+    vas = vaRenderPicture(ctx->hardware_context->display, ctx->codec.context_id,
+                          &ctx->pic_params_id, 1);
+    if(vas != VA_STATUS_SUCCESS) {
+        av_log(ctx, AV_LOG_ERROR, "Failed to send picture parameters: "
+               "%d (%s).\n", vas, vaErrorStr(vas));
+        return -1;
+    }
+
+    return 0;
+}
+
+static int vaapi_h264_render_slice(VAAPIH264EncodeContext *ctx,
+                                   VAAPIH264EncodeFrame *current)
+{
+    VAStatus vas;
+    VAEncSliceParameterBufferH264 *slice = &current->params;
+    int i;
+
+    {
+        memset(slice, 0, sizeof(*slice));
+
+        slice->slice_type = (current->type == FRAME_TYPE_I ? 2 :
+                             current->type == FRAME_TYPE_P ? 0 : 1);
+        slice->idr_pic_id = 0;
+
+        slice->macroblock_address = 0;
+        slice->num_macroblocks = (ctx->seq_params.picture_width_in_mbs *
+                                  ctx->seq_params.picture_height_in_mbs);
+        slice->macroblock_info = VA_INVALID_ID;
+
+        for(i = 0; i < FF_ARRAY_ELEMS(slice->RefPicList0); i++) {
+            slice->RefPicList0[i].picture_id = VA_INVALID_SURFACE;
+            slice->RefPicList0[i].flags = VA_PICTURE_H264_INVALID;
+        }
+        for(i = 0; i < FF_ARRAY_ELEMS(slice->RefPicList1); i++) {
+            slice->RefPicList1[i].picture_id = VA_INVALID_SURFACE;
+            slice->RefPicList1[i].flags = VA_PICTURE_H264_INVALID;
+        }
+
+        if(current->refp) {
+            av_log(ctx, AV_LOG_DEBUG, "Using %#x as first reference frame.\n",
+                   current->refp->pic.picture_id);
+            slice->RefPicList0[0].picture_id = current->refp->pic.picture_id;
+            slice->RefPicList0[0].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
+        }
+        if(current->refb) {
+            av_log(ctx, AV_LOG_DEBUG, "Using %#x as second reference frame.\n",
+                   current->refb->pic.picture_id);
+            slice->RefPicList0[1].picture_id = current->refb->pic.picture_id;
+            slice->RefPicList0[1].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
+        }
+
+        slice->slice_qp_delta = 0;
+        slice->slice_alpha_c0_offset_div2 = 0;
+        slice->slice_beta_offset_div2 = 0;
+        slice->direct_spatial_mv_pred_flag = 1;
+    }
+
+    vas = vaCreateBuffer(ctx->hardware_context->display, ctx->codec.context_id,
+                         VAEncSliceParameterBufferType,
+                         sizeof(*slice), 1, slice, &current->params_id);
+    if(vas != VA_STATUS_SUCCESS) {
+        av_log(ctx, AV_LOG_ERROR, "Failed to create buffer for slice "
+               "parameters: %d (%s).\n", vas, vaErrorStr(vas));
+        return -1;
+    }
+    av_log(ctx, AV_LOG_DEBUG, "Slice buffer is %#x.\n", current->params_id);
+
+    vas = vaRenderPicture(ctx->hardware_context->display, ctx->codec.context_id,
+                          &current->params_id, 1);
+    if(vas != VA_STATUS_SUCCESS) {
+        av_log(ctx, AV_LOG_ERROR, "Failed to send slice parameters: "
+               "%d (%s).\n", vas, vaErrorStr(vas));
+        return -1;
+    }
+
+    return 0;
+}
+
+static int vaapi_h264_encode_picture(AVCodecContext *avctx, AVPacket *pkt,
+                                     const AVFrame *pic, int *got_packet)
+{
+    VAAPIH264EncodeContext *ctx = avctx->priv_data;
+    VAAPIH264EncodeFrame *current;
+    AVFrame *input_image, *recon_image;
+    VASurfaceID input_surface, recon_surface;
+    VACodedBufferSegment *buf_list, *buf;
+    VAStatus vas;
+    int err;
+
+    av_log(ctx, AV_LOG_DEBUG, "New frame: format %s, size %ux%u.\n",
+           av_get_pix_fmt_name(pic->format), pic->width, pic->height);
+
+    av_vaapi_lock_hardware_context(ctx->hardware_context);
+
+    if(pic->format == AV_PIX_FMT_VAAPI) {
+        input_image = 0;
+        input_surface = (VASurfaceID)pic->data[3];
+
+    } else {
+        input_image = av_frame_alloc();
+        if(!input_image) {
+            av_log(ctx, AV_LOG_ERROR, "Failed to allocate input frame.");
+            err = AVERROR(ENOMEM);
+            goto fail;
+        }
+
+        err = av_vaapi_surface_pool_get(&ctx->input_pool, input_image);
+        if(err < 0) {
+            av_log(ctx, AV_LOG_ERROR, "Failed to allocate input frame "
+                   "from surface pool: %d (%s).\n", err, av_err2str(err));
+            goto fail;
+        }
+
+        input_image->format = AV_PIX_FMT_VAAPI;
+        input_image->width  = pic->width;
+        input_image->height = pic->height;
+
+        err = av_vaapi_copy_to_surface(input_image, pic);
+        if(err < 0) {
+            av_log(ctx, AV_LOG_ERROR, "Failed to copy to input surface: "
+                   "%d (%s).\n", err, av_err2str(err));
+            goto fail;
+        }
+
+        input_surface = (VASurfaceID)input_image->data[3];
+    }
+    av_log(ctx, AV_LOG_DEBUG, "Using surface %#x for input image.\n",
+           input_surface);
+
+    recon_image = av_frame_alloc();
+    if(!recon_image) {
+        av_log(ctx, AV_LOG_ERROR, "Failed to allocate reconstructed frame.");
+        err = AVERROR(ENOMEM);
+        goto fail;
+    }
+
+    err = av_vaapi_surface_pool_get(&ctx->recon_pool, recon_image);
+    if(err < 0) {
+        av_log(ctx, AV_LOG_ERROR, "Failed to allocate reconstructed frame "
+               "from surface pool: %d (%s).\n", err, av_err2str(err));
+        goto fail;
+    }
+
+    recon_surface = (VASurfaceID)recon_image->data[3];
+    av_log(ctx, AV_LOG_DEBUG, "Using surface %#x for reconstructed image.\n",
+           recon_surface);
+
+    if(ctx->previous_frame != ctx->current_frame) {
+        av_frame_free(&ctx->dpb[ctx->previous_frame].frame);
+    }
+
+    ctx->previous_frame = ctx->current_frame;
+    ctx->current_frame = (ctx->current_frame + 1) % DPB_FRAMES;
+    {
+        current = &ctx->dpb[ctx->current_frame];
+
+        if(ctx->frame_num < 0 ||
+           ctx->frame_num == ctx->options.idr_interval)
+            current->type = FRAME_TYPE_I;
+        else
+            current->type = FRAME_TYPE_P;
+
+        if(current->type == FRAME_TYPE_I)
+            ctx->frame_num = 0;
+        else
+            ++ctx->frame_num;
+        current->frame_num = ctx->frame_num;
+
+        if(current->type == FRAME_TYPE_I) {
+            current->refp = 0;
+            current->refb = 0;
+        } else if(current->type == FRAME_TYPE_P) {
+            current->refp = &ctx->dpb[ctx->previous_frame];
+            current->refb = 0;
+        } else {
+            av_assert0(0);
+        }
+
+        memset(&current->pic, 0, sizeof(VAPictureH264));
+        current->pic.picture_id = recon_surface;
+        current->pic.frame_idx = ctx->frame_num;
+
+        current->frame = recon_image;
+    }
+    av_log(ctx, AV_LOG_DEBUG, "Encoding as frame as %s (%d).\n",
+           current->type == FRAME_TYPE_I ? "I" :
+           current->type == FRAME_TYPE_P ? "P" : "B", ctx->frame_num);
+
+    vas = vaBeginPicture(ctx->hardware_context->display, ctx->codec.context_id,
+                         input_surface);
+    if(vas != VA_STATUS_SUCCESS) {
+        av_log(ctx, AV_LOG_ERROR, "Failed to attach new picture: "
+               "%d (%s).\n", vas, vaErrorStr(vas));
+        err = AVERROR_EXTERNAL;
+        goto fail;
+    }
+
+    if(current->type == FRAME_TYPE_I) {
+        err = vaapi_h264_render_sequence(ctx);
+        if(err < 0) goto fail;
+    }
+
+    err = vaapi_h264_render_picture(ctx, current);
+    if(err < 0) goto fail;
+
+    if(current->type == FRAME_TYPE_I) {
+        err = vaapi_h264_render_packed_sps(ctx);
+        if(err < 0) goto fail;
+
+        err = vaapi_h264_render_packed_pps(ctx);
+        if(err < 0) goto fail;
+    }
+
+    err = vaapi_h264_render_packed_slice(ctx, current);
+    if(err < 0) goto fail;
+
+    err = vaapi_h264_render_slice(ctx, current);
+    if(err < 0) goto fail;
+
+    vas = vaEndPicture(ctx->hardware_context->display, ctx->codec.context_id);
+    if(vas != VA_STATUS_SUCCESS) {
+        av_log(ctx, AV_LOG_ERROR, "Failed to start picture processing: "
+               "%d (%s).\n", vas, vaErrorStr(vas));
+        err = AVERROR_EXTERNAL;
+        goto fail;
+    }
+
+    vas = vaSyncSurface(ctx->hardware_context->display, input_surface);
+    if(vas != VA_STATUS_SUCCESS) {
+        av_log(ctx, AV_LOG_ERROR, "Failed to sync to picture completion: "
+               "%d (%s).\n", vas, vaErrorStr(vas));
+        err = AVERROR_EXTERNAL;
+        goto fail;
+    }
+
+    buf_list = 0;
+    vas = vaMapBuffer(ctx->hardware_context->display, current->coded_data_id,
+                      (void**)&buf_list);
+    if(vas != VA_STATUS_SUCCESS) {
+        av_log(ctx, AV_LOG_ERROR, "Failed to map output buffers: "
+               "%d (%s).\n", vas, vaErrorStr(vas));
+        err = AVERROR_EXTERNAL;
+        goto fail;
+    }
+
+    for(buf = buf_list; buf; buf = buf->next) {
+        av_log(ctx, AV_LOG_DEBUG, "Output buffer: %u bytes.\n", buf->size);
+        err = av_new_packet(pkt, buf->size);
+        if(err < 0) {
+            av_log(ctx, AV_LOG_ERROR, "Failed to make output buffer "
+                   "(%u bytes).\n", buf->size);
+            goto fail;
+        }
+
+        memcpy(pkt->data, buf->buf, buf->size);
+
+        if(current->type == FRAME_TYPE_I)
+            pkt->flags |= AV_PKT_FLAG_KEY;
+
+        *got_packet = 1;
+    }
+
+    vas = vaUnmapBuffer(ctx->hardware_context->display, current->coded_data_id);
+    if(vas != VA_STATUS_SUCCESS) {
+        av_log(ctx, AV_LOG_ERROR, "Failed to unmap output buffers: "
+               "%d (%s).\n", vas, vaErrorStr(vas));
+        err = AVERROR_EXTERNAL;
+        goto fail;
+    }
+
+    if(pic->format != AV_PIX_FMT_VAAPI)
+        av_frame_free(&input_image);
+
+    err = 0;
+  fail:
+    av_vaapi_unlock_hardware_context(ctx->hardware_context);
+    return err;
+}
+
+static VAConfigAttrib config_attributes[] = {
+    { .type  = VAConfigAttribRTFormat,
+      .value = VA_RT_FORMAT_YUV420 },
+    { .type  = VAConfigAttribRateControl,
+      .value = VA_RC_CQP },
+    { .type  = VAConfigAttribEncPackedHeaders,
+      .value = 0 },
+};
+
+static av_cold int vaapi_h264_encode_init(AVCodecContext *avctx)
+{
+    VAAPIH264EncodeContext *ctx = avctx->priv_data;
+    VAStatus vas;
+    int i, err;
+
+    if(ctx->options.hardware_context == 0) {
+        av_log(ctx, AV_LOG_ERROR, "VAAPI encode requires hardware context.\n");
+        return AVERROR(EINVAL);
+    }
+    ctx->hardware_context =
+        (AVVAAPIHardwareContext*)ctx->options.hardware_context;
+
+    if(strcmp(ctx->options.profile, "constrained_baseline"))
+        ctx->va_profile = VAProfileH264ConstrainedBaseline;
+    else if(strcmp(ctx->options.profile, "baseline"))
+        ctx->va_profile = VAProfileH264Baseline;
+    else if(strcmp(ctx->options.profile, "main"))
+        ctx->va_profile = VAProfileH264Main;
+    else if(strcmp(ctx->options.profile, "high"))
+        ctx->va_profile = VAProfileH264High;
+    else {
+        av_log(ctx, AV_LOG_ERROR, "Invalid profile '%s'.\n",
+               ctx->options.profile);
+        return AVERROR(EINVAL);
+    }
+
+    ctx->level = -1;
+    if(sscanf(ctx->options.level, "%d", &ctx->level) <= 0 ||
+       ctx->level < 0 || ctx->level > 52) {
+        av_log(ctx, AV_LOG_ERROR, "Invaid level '%s'.\n", ctx->options.level);
+        return AVERROR(EINVAL);
+    }
+
+    if(ctx->options.qp >= 0) {
+        ctx->rc_mode = VA_RC_CQP;
+    } else {
+        // Default to CQP 26.
+        ctx->rc_mode = VA_RC_CQP;
+        ctx->options.qp = 26;
+    }
+    av_log(ctx, AV_LOG_VERBOSE, "Using constant-QP mode at %d.\n",
+           ctx->options.qp);
+
+    ctx->width  = avctx->width;
+    ctx->height = avctx->height;
+
+    ctx->frame_num = -1;
+
+    av_vaapi_lock_hardware_context(ctx->hardware_context);
+
+    if(avctx->pix_fmt == AV_PIX_FMT_VAAPI) {
+        // Using the input frames directly.
+        ctx->input_is_vaapi = 1;
+
+    } else {
+        AVVAAPISurfaceConfig *config = &ctx->input_config;
+
+        config->rt_format = VA_RT_FORMAT_YUV420;
+        config->av_format = AV_PIX_FMT_VAAPI;
+
+        config->image_format.fourcc = VA_FOURCC_NV12;
+        config->image_format.bits_per_pixel = 12;
+
+        config->width  = ctx->width;
+        config->height = ctx->height;
+
+        config->attribute_count = 0;
+
+        ctx->input_is_vaapi = 0;
+
+        err = av_vaapi_surface_pool_init(&ctx->input_pool,
+                                         ctx->hardware_context,
+                                         config, INPUT_FRAMES);
+        if(err < 0) {
+            av_log(ctx, AV_LOG_ERROR, "Failed to create input surface pool: "
+                   "%d (%s).\n", err, av_err2str(err));
+            goto fail;
+        }
+    }
+
+    {
+        AVVAAPISurfaceConfig *config = &ctx->recon_config;
+
+        config->rt_format = VA_RT_FORMAT_YUV420;
+        config->av_format = AV_PIX_FMT_VAAPI;
+
+        config->image_format.fourcc = VA_FOURCC_NV12;
+        config->image_format.bits_per_pixel = 12;
+
+        config->width  = ctx->width;
+        config->height = ctx->height;
+
+        config->attribute_count = 0;
+
+        err = av_vaapi_surface_pool_init(&ctx->recon_pool,
+                                         ctx->hardware_context,
+                                         config, DPB_FRAMES);
+        if(err < 0) {
+            av_log(ctx, AV_LOG_ERROR, "Failed to create recon surface pool: "
+                   "%d (%s).\n", err, av_err2str(err));
+            goto fail;
+        }
+    }
+
+    {
+        AVVAAPIPipelineConfig *config = &ctx->codec_config;
+
+        config->profile    = ctx->va_profile;
+        config->entrypoint = VAEntrypointEncSlice;
+
+        config->width  = ctx->width;
+        config->height = ctx->height;
+
+        config->attribute_count = FF_ARRAY_ELEMS(config_attributes);
+        config->attributes = config_attributes;
+    }
+
+    err = av_vaapi_pipeline_init(&ctx->codec, ctx->hardware_context,
+                                 &ctx->codec_config, &ctx->recon_pool);
+    if(err < 0) {
+        av_log(ctx, AV_LOG_ERROR, "Failed to create codec: %d (%s).\n",
+               err, av_err2str(err));
+        goto fail;
+    }
+
+    for(i = 0; i < DPB_FRAMES; i++) {
+        vas = vaCreateBuffer(ctx->hardware_context->display,
+                             ctx->codec.context_id,
+                             VAEncCodedBufferType,
+                             1048576, 1, 0, &ctx->dpb[i].coded_data_id);
+        if(vas != VA_STATUS_SUCCESS) {
+            av_log(ctx, AV_LOG_ERROR, "Failed to create buffer for "
+                   "coded data: %d (%s).\n", vas, vaErrorStr(vas));
+            err = AVERROR_EXTERNAL;
+            goto fail;
+        }
+        av_log(ctx, AV_LOG_TRACE, "Coded data buffer %d is %#x.\n",
+               i, ctx->dpb[i].coded_data_id);
+    }
+
+    av_vaapi_unlock_hardware_context(ctx->hardware_context);
+
+    av_log(ctx, AV_LOG_VERBOSE, "Started VAAPI H.264 encoder.\n");
+    return 0;
+
+  fail:
+    av_vaapi_unlock_hardware_context(ctx->hardware_context);
+    return err;
+}
+
+static av_cold int vaapi_h264_encode_close(AVCodecContext *avctx)
+{
+    VAAPIH264EncodeContext *ctx = avctx->priv_data;
+    int err, i;
+
+    av_vaapi_lock_hardware_context(ctx->hardware_context);
+
+    for(i = 0; i < DPB_FRAMES; i++) {
+        if(ctx->dpb[i].frame)
+            av_frame_free(&ctx->dpb[i].frame);
+    }
+
+    err = av_vaapi_pipeline_uninit(&ctx->codec);
+    if(err < 0) {
+        av_log(ctx, AV_LOG_ERROR, "Failed to uninitialise codec: "
+               "%d (%s).\n", err, av_err2str(err));
+    }
+
+    err = av_vaapi_surface_pool_uninit(&ctx->recon_pool);
+    if(err < 0) {
+        av_log(ctx, AV_LOG_ERROR, "Failed to uninitialise recon "
+               "surface pool: %d (%s).\n", err, av_err2str(err));
+    }
+
+    if(!ctx->input_is_vaapi) {
+        err = av_vaapi_surface_pool_uninit(&ctx->input_pool);
+        if(err < 0) {
+            av_log(ctx, AV_LOG_ERROR, "Failed to uninitialise input "
+                   "surface pool: %d (%s).\n", err, av_err2str(err));
+        }
+    }
+
+    av_vaapi_unlock_hardware_context(ctx->hardware_context);
+
+    return 0;
+}
+
+#define OFFSET(member) offsetof(VAAPIH264EncodeContext, options.member)
+#define FLAGS (AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM)
+static const AVOption vaapi_h264_options[] = {
+    { "hardware_context", "VAAPI hardware context",
+      OFFSET(hardware_context), AV_OPT_TYPE_INT64,
+      { .i64 = 0 }, INT64_MIN, INT64_MAX, AV_OPT_FLAG_VIDEO_PARAM },
+    { "profile", "Set H.264 profile",
+      OFFSET(profile),  AV_OPT_TYPE_STRING,
+      { .str = "baseline" }, 0, 0, FLAGS },
+    { "level", "Set H.264 level",
+      OFFSET(level),    AV_OPT_TYPE_STRING,
+      { .str = "52" }, 0, 0, FLAGS },
+    { "qp", "Use constant quantisation parameter",
+      OFFSET(qp), AV_OPT_TYPE_INT,
+      { .i64 = -1 }, -1, 52, FLAGS },
+    { "idr_interval", "Number of frames between IDR frames (0 = all intra)",
+      OFFSET(idr_interval), AV_OPT_TYPE_INT,
+      { .i64 = -1 }, -1, INT_MAX, FLAGS },
+    { 0 }
+};
+
+static const AVClass vaapi_h264_class = {
+    .class_name = "vaapi_h264",
+    .item_name  = av_default_item_name,
+    .option     = vaapi_h264_options,
+    .version    = LIBAVUTIL_VERSION_INT,
+};
+
+AVCodec ff_h264_vaapi_encoder = {
+    .name           = "vaapi_h264",
+    .long_name      = NULL_IF_CONFIG_SMALL("H.264/AVC (VAAPI)"),
+    .type           = AVMEDIA_TYPE_VIDEO,
+    .id             = AV_CODEC_ID_H264,
+    .priv_data_size = sizeof(VAAPIH264EncodeContext),
+    .init           = &vaapi_h264_encode_init,
+    .encode2        = &vaapi_h264_encode_picture,
+    .close          = &vaapi_h264_encode_close,
+    .priv_class     = &vaapi_h264_class,
+    .pix_fmts = (const enum AVPixelFormat[]) {
+        AV_PIX_FMT_VAAPI,
+        AV_PIX_FMT_NV12,
+        AV_PIX_FMT_NONE,
+    },
+};
-- 
2.7.0.rc3




More information about the ffmpeg-devel mailing list