[FFmpeg-devel] [PATCH v2] avcodec/v4l2_m2m_dec: export v4l2 buffer dma-buf
Dave Stevenson
dave.stevenson at raspberrypi.com
Thu Mar 24 14:42:41 EET 2022
Hi Ming
Thanks for working on V4L2 M2M - it's nice to see people using it.
On Thu, 24 Mar 2022 at 06:13, Ming Qian <ming.qian at nxp.com> wrote:
>
> if the v4l2 buffer is supported to export dma-buf,
> then we can report it to AV_PIX_FMT_DRM_PRIME,
> so the caller can pass it to other hardware device,
> such as display it directly without copy frame data.
>
> Signed-off-by: Ming Qian <ming.qian at nxp.com>
> ---
> libavcodec/v4l2_buffers.c | 115 +++++++++++++++++++++++++++++++++++++-
> libavcodec/v4l2_buffers.h | 2 +
> libavcodec/v4l2_context.c | 53 ++++++++++++++++++
> libavcodec/v4l2_context.h | 17 ++++++
> libavcodec/v4l2_fmt.c | 96 ++++++++++++++++++-------------
> libavcodec/v4l2_fmt.h | 1 +
> libavcodec/v4l2_m2m_dec.c | 21 +++++++
> 7 files changed, 264 insertions(+), 41 deletions(-)
>
> diff --git a/libavcodec/v4l2_buffers.c b/libavcodec/v4l2_buffers.c
> index 3f5471067a1a..18f17d871b8c 100644
> --- a/libavcodec/v4l2_buffers.c
> +++ b/libavcodec/v4l2_buffers.c
> @@ -21,17 +21,23 @@
> * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
> */
>
> +#ifndef _GNU_SOURCE
> +#define _GNU_SOURCE
> +#endif
> #include <linux/videodev2.h>
> #include <sys/ioctl.h>
> #include <sys/mman.h>
> #include <unistd.h>
> #include <fcntl.h>
> #include <poll.h>
> +#include "libavutil/hwcontext.h"
> +#include "libavutil/hwcontext_drm.h"
> #include "libavcodec/avcodec.h"
> #include "libavutil/pixdesc.h"
> #include "v4l2_context.h"
> #include "v4l2_buffers.h"
> #include "v4l2_m2m.h"
> +#include "v4l2_fmt.h"
>
> #define USEC_PER_SEC 1000000
> static AVRational v4l2_timebase = { 1, USEC_PER_SEC };
> @@ -209,7 +215,7 @@ static enum AVColorTransferCharacteristic v4l2_get_color_trc(V4L2Buffer *buf)
> return AVCOL_TRC_UNSPECIFIED;
> }
>
> -static void v4l2_free_buffer(void *opaque, uint8_t *unused)
> +static void v4l2_free_buffer(void *opaque, uint8_t *data)
> {
> V4L2Buffer* avbuf = opaque;
> V4L2m2mContext *s = buf_to_m2mctx(avbuf);
> @@ -229,6 +235,12 @@ static void v4l2_free_buffer(void *opaque, uint8_t *unused)
> ff_v4l2_buffer_enqueue(avbuf);
> }
>
> + if (avbuf->hwctx_ref) {
> + AVDRMFrameDescriptor *desc = (AVDRMFrameDescriptor *)data;
> +
> + av_buffer_unref(&avbuf->hwctx_ref);
> + av_free(desc);
> + }
> av_buffer_unref(&avbuf->context_ref);
> }
> }
> @@ -337,6 +349,90 @@ static int v4l2_buffer_buf_to_swframe(AVFrame *frame, V4L2Buffer *avbuf)
> return 0;
> }
>
> +static int v4l2_buffer_buf_to_hwframe(AVFrame *frame, V4L2Buffer *avbuf)
> +{
> + V4L2Context *ctx = avbuf->context;
> + AVDRMFrameDescriptor *desc = NULL;
> + AVDRMLayerDescriptor *layer = NULL;
> + int i;
> + int ret;
> +
> + if (!ctx->hwframes)
> + return AVERROR(EINVAL);
> +
> + for (i = 0; i < avbuf->num_planes; i++) {
> + if (avbuf->plane_info[i].dmafd < 0)
> + return AVERROR(EINVAL);
> + }
> +
> + desc = av_mallocz(sizeof(AVDRMFrameDescriptor));
> + if (!desc)
> + return AVERROR(ENOMEM);
> +
> + for (i = 0; i < avbuf->num_planes; i++) {
> + desc->objects[i].fd = avbuf->plane_info[i].dmafd;
> + desc->objects[i].size = avbuf->plane_info[i].length;
> + }
> + desc->nb_objects = avbuf->num_planes;
> +
> + desc->nb_layers = 1;
> + layer = &desc->layers[0];
> + layer->format = ff_v4l2_format_avfmt_to_drm(avbuf->context->av_pix_fmt);
> + layer->nb_planes = avbuf->num_planes;
> + for (i = 0; i < avbuf->num_planes; i++) {
> + layer->planes[i].object_index = i;
> + layer->planes[i].offset = 0;
> + layer->planes[i].pitch = avbuf->plane_info[i].bytesperline;
> + }
> +
> + /* fixup special cases */
> + switch (avbuf->context->av_pix_fmt) {
> + case AV_PIX_FMT_NV12:
> + case AV_PIX_FMT_NV21:
> + if (avbuf->num_planes > 1)
> + break;
> + layer->nb_planes = 2;
> + layer->planes[1].object_index = 0;
> + layer->planes[1].offset = avbuf->plane_info[0].bytesperline * avbuf->context->format.fmt.pix_mp.height;
> + layer->planes[1].pitch = avbuf->plane_info[0].bytesperline;
> + break;
> +
> + case AV_PIX_FMT_YUV420P:
> + if (avbuf->num_planes > 1)
> + break;
> + layer->nb_planes = 3;
> + layer->planes[1].object_index = 0;
> + layer->planes[2].object_index = 0;
> + layer->planes[1].offset = avbuf->plane_info[0].bytesperline * avbuf->context->format.fmt.pix_mp.height;
> + layer->planes[2].offset = layer->planes[1].offset + ((avbuf->plane_info[0].bytesperline * avbuf->context->format.fmt.pix_mp.height) >> 2);
> + layer->planes[1].pitch = avbuf->plane_info[0].bytesperline >> 1;
> + layer->planes[2].pitch = avbuf->plane_info[0].bytesperline >> 1;
> + break;
> +
> + default:
> + break;
> + }
> +
> + avbuf->hwctx_ref = av_buffer_ref(ctx->hwdevice);
> + frame->buf[0] = av_buffer_create((uint8_t *)desc, sizeof(*desc), v4l2_free_buffer, avbuf, 0);
> + if (!frame->buf[0]) {
> + av_free(desc);
> + av_buffer_unref(&avbuf->hwctx_ref);
> + return AVERROR(ENOMEM);
> + }
> + frame->data[0] = (uint8_t *)desc;
> + frame->format = AV_PIX_FMT_DRM_PRIME;
> + frame->hw_frames_ctx = av_buffer_ref(ctx->hwframes);
> +
> + ret = v4l2_buf_increase_ref(avbuf);
> + if (ret) {
> + av_buffer_unref(&frame->buf[0]);
> + return ret;
> + }
> +
> + return 0;
> +}
> +
> static int v4l2_buffer_swframe_to_buf(const AVFrame *frame, V4L2Buffer *out)
> {
> int i, ret;
> @@ -417,12 +513,17 @@ int ff_v4l2_buffer_avframe_to_buf(const AVFrame *frame, V4L2Buffer *out)
>
> int ff_v4l2_buffer_buf_to_avframe(AVFrame *frame, V4L2Buffer *avbuf)
> {
> + V4L2m2mContext *s = buf_to_m2mctx(avbuf);
> int ret;
>
> av_frame_unref(frame);
>
> /* 1. get references to the actual data */
> - ret = v4l2_buffer_buf_to_swframe(frame, avbuf);
> + if (s->avctx->pix_fmt == AV_PIX_FMT_DRM_PRIME) {
> + ret = v4l2_buffer_buf_to_hwframe(frame, avbuf);
> + } else {
> + ret = v4l2_buffer_buf_to_swframe(frame, avbuf);
> + }
> if (ret)
> return ret;
>
> @@ -493,6 +594,7 @@ int ff_v4l2_buffer_avpkt_to_buf(const AVPacket *pkt, V4L2Buffer *out)
> int ff_v4l2_buffer_initialize(V4L2Buffer* avbuf, int index)
> {
> V4L2Context *ctx = avbuf->context;
> + struct v4l2_exportbuffer exp;
> int ret, i;
>
> avbuf->buf.memory = V4L2_MEMORY_MMAP;
> @@ -538,6 +640,15 @@ int ff_v4l2_buffer_initialize(V4L2Buffer* avbuf, int index)
>
> if (avbuf->plane_info[i].mm_addr == MAP_FAILED)
> return AVERROR(ENOMEM);
> +
> + exp.type = ctx->type;
> + exp.index = avbuf->buf.index;
> + exp.plane = i;
> + exp.fd = -1;
> + exp.flags = O_CLOEXEC | O_RDWR;
> + if (ioctl(buf_to_m2mctx(avbuf)->fd, VIDIOC_EXPBUF, &exp))
> + avbuf->context->support_dma_buf = 0;
> + avbuf->plane_info[i].dmafd = exp.fd;
> }
>
> avbuf->status = V4L2BUF_AVAILABLE;
> diff --git a/libavcodec/v4l2_buffers.h b/libavcodec/v4l2_buffers.h
> index 3d2ff1b9a5d7..04250cda175e 100644
> --- a/libavcodec/v4l2_buffers.h
> +++ b/libavcodec/v4l2_buffers.h
> @@ -55,6 +55,7 @@ typedef struct V4L2Buffer {
> int bytesperline;
> void * mm_addr;
> size_t length;
> + int dmafd;
> } plane_info[VIDEO_MAX_PLANES];
>
> int num_planes;
> @@ -66,6 +67,7 @@ typedef struct V4L2Buffer {
> int flags;
> enum V4L2Buffer_status status;
>
> + AVBufferRef *hwctx_ref;
> } V4L2Buffer;
>
> /**
> diff --git a/libavcodec/v4l2_context.c b/libavcodec/v4l2_context.c
> index 8910ae08d3a5..685de60690e0 100644
> --- a/libavcodec/v4l2_context.c
> +++ b/libavcodec/v4l2_context.c
> @@ -455,6 +455,10 @@ static int v4l2_release_buffers(V4L2Context* ctx)
> if (p->mm_addr && p->length)
> if (munmap(p->mm_addr, p->length) < 0)
> av_log(logger(ctx), AV_LOG_ERROR, "%s unmap plane (%s))\n", ctx->name, av_err2str(AVERROR(errno)));
> + if (p->dmafd >= 0) {
> + close(p->dmafd);
> + p->dmafd = -1;
> + }
> }
> }
>
> @@ -696,6 +700,53 @@ int ff_v4l2_context_set_format(V4L2Context* ctx)
> return ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_S_FMT, &ctx->format);
> }
>
> +
> +int ff_v4l2_context_init_hw_ctx(V4L2Context *ctx)
> +{
> + AVHWFramesContext *hwframes;
> + int ret;
> +
> + if (!ctx->support_dma_buf)
> + return AVERROR(EINVAL);
> +
> + ctx->hwdevice = av_hwdevice_ctx_alloc(AV_HWDEVICE_TYPE_DRM);
> + if (!ctx->hwdevice) {
> + ret = AVERROR(ENOMEM);
> + goto fail;
> + }
> +
> + ret = av_hwdevice_ctx_init(ctx->hwdevice);
> + if (ret < 0)
> + goto fail;
> +
> + ctx->hwframes = av_hwframe_ctx_alloc(ctx->hwdevice);
> + if (!ctx->hwframes) {
> + ret = AVERROR(ENOMEM);
> + goto fail;
> + }
> +
> + hwframes = (AVHWFramesContext*)ctx->hwframes->data;
> + hwframes->format = AV_PIX_FMT_DRM_PRIME;
> + hwframes->sw_format = ctx->av_pix_fmt;
> + hwframes->width = ctx->width;
> + hwframes->height = ctx->height;
> + ret = av_hwframe_ctx_init(ctx->hwframes);
> + if (ret < 0)
> + goto fail;
> +
> + return 0;
> +fail:
> + ff_v4l2_context_uninit_hw_ctx(ctx);
> + ctx->support_dma_buf = 0;
> + return ret;
> +}
> +
> +void ff_v4l2_context_uninit_hw_ctx(V4L2Context *ctx)
> +{
> + av_buffer_unref(&ctx->hwframes);
> + av_buffer_unref(&ctx->hwdevice);
> +}
> +
> void ff_v4l2_context_release(V4L2Context* ctx)
> {
> int ret;
> @@ -708,6 +759,7 @@ void ff_v4l2_context_release(V4L2Context* ctx)
> av_log(logger(ctx), AV_LOG_WARNING, "V4L2 failed to unmap the %s buffers\n", ctx->name);
>
> av_freep(&ctx->buffers);
> + ff_v4l2_context_uninit_hw_ctx(ctx);
> }
>
> int ff_v4l2_context_init(V4L2Context* ctx)
> @@ -742,6 +794,7 @@ int ff_v4l2_context_init(V4L2Context* ctx)
> return AVERROR(ENOMEM);
> }
>
> + ctx->support_dma_buf = 1;
> for (i = 0; i < req.count; i++) {
> ctx->buffers[i].context = ctx;
> ret = ff_v4l2_buffer_initialize(&ctx->buffers[i], i);
> diff --git a/libavcodec/v4l2_context.h b/libavcodec/v4l2_context.h
> index 6f7460c89a9d..723d622e38c3 100644
> --- a/libavcodec/v4l2_context.h
> +++ b/libavcodec/v4l2_context.h
> @@ -93,6 +93,9 @@ typedef struct V4L2Context {
> */
> int done;
>
> + int support_dma_buf;
> + AVBufferRef *hwdevice;
> + AVBufferRef *hwframes;
> } V4L2Context;
>
> /**
> @@ -184,4 +187,18 @@ int ff_v4l2_context_enqueue_packet(V4L2Context* ctx, const AVPacket* pkt);
> */
> int ff_v4l2_context_enqueue_frame(V4L2Context* ctx, const AVFrame* f);
>
> +/**
> + * Initializes the hw context of V4L2Context.
> + *
> + * @param[in] ctx A pointer to a V4L2Context. See V4L2Context description for required variables.
> + * @return 0 in case of success, a negative value representing the error otherwise.
> + */
> +int ff_v4l2_context_init_hw_ctx(V4L2Context *ctx);
> +
> +/**
> + * Releases the hw context of V4L2Context.
> + *
> + * @param[in] ctx A pointer to a V4L2Context.
> + */
> +void ff_v4l2_context_uninit_hw_ctx(V4L2Context *ctx);
> #endif // AVCODEC_V4L2_CONTEXT_H
> diff --git a/libavcodec/v4l2_fmt.c b/libavcodec/v4l2_fmt.c
> index 6df47e3f5a3c..a64b6d530283 100644
> --- a/libavcodec/v4l2_fmt.c
> +++ b/libavcodec/v4l2_fmt.c
> @@ -29,83 +29,91 @@
> #define AV_CODEC(x) AV_CODEC_ID_##x
> #define AV_FMT(x) AV_PIX_FMT_##x
>
> +#if CONFIG_LIBDRM
> +#include <drm_fourcc.h>
> +#define DRM_FMT(x) DRM_FORMAT_##x
> +#else
> +#define DRM_FMT(x) 0
> +#endif
> +
> static const struct fmt_conversion {
> enum AVPixelFormat avfmt;
> enum AVCodecID avcodec;
> uint32_t v4l2_fmt;
> + uint32_t drm_fmt;
> } fmt_map[] = {
> - { AV_FMT(RGB555LE), AV_CODEC(RAWVIDEO), V4L2_FMT(RGB555) },
> - { AV_FMT(RGB555BE), AV_CODEC(RAWVIDEO), V4L2_FMT(RGB555X) },
> - { AV_FMT(RGB565LE), AV_CODEC(RAWVIDEO), V4L2_FMT(RGB565) },
> - { AV_FMT(RGB565BE), AV_CODEC(RAWVIDEO), V4L2_FMT(RGB565X) },
> - { AV_FMT(BGR24), AV_CODEC(RAWVIDEO), V4L2_FMT(BGR24) },
> - { AV_FMT(RGB24), AV_CODEC(RAWVIDEO), V4L2_FMT(RGB24) },
> - { AV_FMT(BGR0), AV_CODEC(RAWVIDEO), V4L2_FMT(BGR32) },
> - { AV_FMT(0RGB), AV_CODEC(RAWVIDEO), V4L2_FMT(RGB32) },
> - { AV_FMT(GRAY8), AV_CODEC(RAWVIDEO), V4L2_FMT(GREY) },
> - { AV_FMT(YUV420P), AV_CODEC(RAWVIDEO), V4L2_FMT(YUV420) },
> - { AV_FMT(YUYV422), AV_CODEC(RAWVIDEO), V4L2_FMT(YUYV) },
> - { AV_FMT(UYVY422), AV_CODEC(RAWVIDEO), V4L2_FMT(UYVY) },
> - { AV_FMT(YUV422P), AV_CODEC(RAWVIDEO), V4L2_FMT(YUV422P) },
> - { AV_FMT(YUV411P), AV_CODEC(RAWVIDEO), V4L2_FMT(YUV411P) },
> - { AV_FMT(YUV410P), AV_CODEC(RAWVIDEO), V4L2_FMT(YUV410) },
> - { AV_FMT(YUV410P), AV_CODEC(RAWVIDEO), V4L2_FMT(YVU410) },
> - { AV_FMT(NV12), AV_CODEC(RAWVIDEO), V4L2_FMT(NV12) },
> - { AV_FMT(NONE), AV_CODEC(MJPEG), V4L2_FMT(MJPEG) },
> - { AV_FMT(NONE), AV_CODEC(MJPEG), V4L2_FMT(JPEG) },
> + { AV_FMT(RGB555LE), AV_CODEC(RAWVIDEO), V4L2_FMT(RGB555), DRM_FMT(XRGB1555) },
> + { AV_FMT(RGB555BE), AV_CODEC(RAWVIDEO), V4L2_FMT(RGB555X), DRM_FMT(XRGB1555) | DRM_FMT(BIG_ENDIAN) },
> + { AV_FMT(RGB565LE), AV_CODEC(RAWVIDEO), V4L2_FMT(RGB565), DRM_FMT(RGB565) },
> + { AV_FMT(RGB565BE), AV_CODEC(RAWVIDEO), V4L2_FMT(RGB565X), DRM_FMT(RGB565) | DRM_FMT(BIG_ENDIAN) },
> + { AV_FMT(BGR24), AV_CODEC(RAWVIDEO), V4L2_FMT(BGR24), DRM_FMT(BGR888) },
> + { AV_FMT(RGB24), AV_CODEC(RAWVIDEO), V4L2_FMT(RGB24), DRM_FMT(RGB888) },
> + { AV_FMT(BGR0), AV_CODEC(RAWVIDEO), V4L2_FMT(BGR32), DRM_FMT(XRGB8888) },
> + { AV_FMT(0RGB), AV_CODEC(RAWVIDEO), V4L2_FMT(RGB32), DRM_FMT(BGRX8888) },
I realise this is just extending the existing table with the extra
field, however V4L2_PIX_FMT_BGR32 and V4L2_PIX_FMT_RGB32 are
deprecated formats due to ill-defined behaviour of the alpha bits.
[A|X]BGR32 and [A|X]RGB32 replaced them depending on the
interpretation of alpha.
Dave
https://www.kernel.org/doc/html/latest/userspace-api/media/v4l/pixfmt-rgb.html#deprecated-rgb-formats
> + { AV_FMT(GRAY8), AV_CODEC(RAWVIDEO), V4L2_FMT(GREY), DRM_FMT(R8) },
> + { AV_FMT(YUV420P), AV_CODEC(RAWVIDEO), V4L2_FMT(YUV420), DRM_FMT(YUV420) },
> + { AV_FMT(YUYV422), AV_CODEC(RAWVIDEO), V4L2_FMT(YUYV), DRM_FMT(YUYV) },
> + { AV_FMT(UYVY422), AV_CODEC(RAWVIDEO), V4L2_FMT(UYVY), DRM_FMT(UYVY) },
> + { AV_FMT(YUV422P), AV_CODEC(RAWVIDEO), V4L2_FMT(YUV422P), DRM_FMT(YUV422) },
> + { AV_FMT(YUV411P), AV_CODEC(RAWVIDEO), V4L2_FMT(YUV411P), DRM_FMT(YUV411) },
> + { AV_FMT(YUV410P), AV_CODEC(RAWVIDEO), V4L2_FMT(YUV410), DRM_FMT(YUV410) },
> + { AV_FMT(YUV410P), AV_CODEC(RAWVIDEO), V4L2_FMT(YVU410), DRM_FMT(YVU410) },
> + { AV_FMT(NV12), AV_CODEC(RAWVIDEO), V4L2_FMT(NV12), DRM_FMT(NV12) },
> + { AV_FMT(NONE), AV_CODEC(MJPEG), V4L2_FMT(MJPEG), DRM_FMT(INVALID) },
> + { AV_FMT(NONE), AV_CODEC(MJPEG), V4L2_FMT(JPEG), DRM_FMT(INVALID) },
> #ifdef V4L2_PIX_FMT_SRGGB8
> - { AV_FMT(BAYER_BGGR8), AV_CODEC(RAWVIDEO), V4L2_FMT(SBGGR8) },
> - { AV_FMT(BAYER_GBRG8), AV_CODEC(RAWVIDEO), V4L2_FMT(SGBRG8) },
> - { AV_FMT(BAYER_GRBG8), AV_CODEC(RAWVIDEO), V4L2_FMT(SGRBG8) },
> - { AV_FMT(BAYER_RGGB8), AV_CODEC(RAWVIDEO), V4L2_FMT(SRGGB8) },
> + { AV_FMT(BAYER_BGGR8), AV_CODEC(RAWVIDEO), V4L2_FMT(SBGGR8), DRM_FMT(INVALID) },
> + { AV_FMT(BAYER_GBRG8), AV_CODEC(RAWVIDEO), V4L2_FMT(SGBRG8), DRM_FMT(INVALID) },
> + { AV_FMT(BAYER_GRBG8), AV_CODEC(RAWVIDEO), V4L2_FMT(SGRBG8), DRM_FMT(INVALID) },
> + { AV_FMT(BAYER_RGGB8), AV_CODEC(RAWVIDEO), V4L2_FMT(SRGGB8), DRM_FMT(INVALID) },
> #endif
> #ifdef V4L2_PIX_FMT_Y16
> - { AV_FMT(GRAY16LE), AV_CODEC(RAWVIDEO), V4L2_FMT(Y16) },
> + { AV_FMT(GRAY16LE), AV_CODEC(RAWVIDEO), V4L2_FMT(Y16), DRM_FMT(R16) },
> #endif
> #ifdef V4L2_PIX_FMT_NV12M
> - { AV_FMT(NV12), AV_CODEC(RAWVIDEO), V4L2_FMT(NV12M) },
> + { AV_FMT(NV12), AV_CODEC(RAWVIDEO), V4L2_FMT(NV12M), DRM_FMT(NV12) },
> #endif
> #ifdef V4L2_PIX_FMT_NV21M
> - { AV_FMT(NV21), AV_CODEC(RAWVIDEO), V4L2_FMT(NV21M) },
> + { AV_FMT(NV21), AV_CODEC(RAWVIDEO), V4L2_FMT(NV21M), DRM_FMT(NV21) },
> #endif
> #ifdef V4L2_PIX_FMT_YUV420M
> - { AV_FMT(YUV420P), AV_CODEC(RAWVIDEO), V4L2_FMT(YUV420M) },
> + { AV_FMT(YUV420P), AV_CODEC(RAWVIDEO), V4L2_FMT(YUV420M), DRM_FMT(YUV420) },
> #endif
> #ifdef V4L2_PIX_FMT_NV16M
> - { AV_FMT(NV16), AV_CODEC(RAWVIDEO), V4L2_FMT(NV16M) },
> + { AV_FMT(NV16), AV_CODEC(RAWVIDEO), V4L2_FMT(NV16M), DRM_FMT(NV16) },
> #endif
> #ifdef V4L2_PIX_FMT_H263
> - { AV_FMT(NONE), AV_CODEC(H263), V4L2_FMT(H263) },
> + { AV_FMT(NONE), AV_CODEC(H263), V4L2_FMT(H263), DRM_FMT(INVALID) },
> #endif
> #ifdef V4L2_PIX_FMT_H264
> - { AV_FMT(NONE), AV_CODEC(H264), V4L2_FMT(H264) },
> + { AV_FMT(NONE), AV_CODEC(H264), V4L2_FMT(H264), DRM_FMT(INVALID) },
> #endif
> #ifdef V4L2_PIX_FMT_MPEG4
> - { AV_FMT(NONE), AV_CODEC(MPEG4), V4L2_FMT(MPEG4) },
> + { AV_FMT(NONE), AV_CODEC(MPEG4), V4L2_FMT(MPEG4), DRM_FMT(INVALID) },
> #endif
> #ifdef V4L2_PIX_FMT_CPIA1
> - { AV_FMT(NONE), AV_CODEC(CPIA), V4L2_FMT(CPIA1) },
> + { AV_FMT(NONE), AV_CODEC(CPIA), V4L2_FMT(CPIA1), DRM_FMT(INVALID) },
> #endif
> #ifdef V4L2_PIX_FMT_DV
> - { AV_FMT(NONE), AV_CODEC(DVVIDEO), V4L2_FMT(DV) },
> + { AV_FMT(NONE), AV_CODEC(DVVIDEO), V4L2_FMT(DV), DRM_FMT(INVALID) },
> #endif
> #ifdef V4L2_PIX_FMT_MPEG1
> - { AV_FMT(NONE), AV_CODEC(MPEG1VIDEO), V4L2_FMT(MPEG1) },
> + { AV_FMT(NONE), AV_CODEC(MPEG1VIDEO), V4L2_FMT(MPEG1), DRM_FMT(INVALID) },
> #endif
> #ifdef V4L2_PIX_FMT_MPEG2
> - { AV_FMT(NONE), AV_CODEC(MPEG2VIDEO), V4L2_FMT(MPEG2) },
> + { AV_FMT(NONE), AV_CODEC(MPEG2VIDEO), V4L2_FMT(MPEG2), DRM_FMT(INVALID) },
> #endif
> #ifdef V4L2_PIX_FMT_VP8
> - { AV_FMT(NONE), AV_CODEC(VP8), V4L2_FMT(VP8) },
> + { AV_FMT(NONE), AV_CODEC(VP8), V4L2_FMT(VP8), DRM_FMT(INVALID) },
> #endif
> #ifdef V4L2_PIX_FMT_VP9
> - { AV_FMT(NONE), AV_CODEC(VP9), V4L2_FMT(VP9) },
> + { AV_FMT(NONE), AV_CODEC(VP9), V4L2_FMT(VP9), DRM_FMT(INVALID) },
> #endif
> #ifdef V4L2_PIX_FMT_HEVC
> - { AV_FMT(NONE), AV_CODEC(HEVC), V4L2_FMT(HEVC) },
> + { AV_FMT(NONE), AV_CODEC(HEVC), V4L2_FMT(HEVC), DRM_FMT(INVALID) },
> #endif
> #ifdef V4L2_PIX_FMT_VC1_ANNEX_G
> - { AV_FMT(NONE), AV_CODEC(VC1), V4L2_FMT(VC1_ANNEX_G) },
> + { AV_FMT(NONE), AV_CODEC(VC1), V4L2_FMT(VC1_ANNEX_G), DRM_FMT(INVALID) },
> #endif
> };
>
> @@ -139,3 +147,13 @@ enum AVPixelFormat ff_v4l2_format_v4l2_to_avfmt(uint32_t v4l2_fmt, enum AVCodecI
> }
> return AV_PIX_FMT_NONE;
> }
> +
> +uint32_t ff_v4l2_format_avfmt_to_drm(enum AVPixelFormat avfmt)
> +{
> + int i;
> + for (i = 0; i < FF_ARRAY_ELEMS(fmt_map); i++) {
> + if (fmt_map[i].avfmt == avfmt)
> + return fmt_map[i].drm_fmt;
> + }
> + return DRM_FMT(INVALID);
> +}
> diff --git a/libavcodec/v4l2_fmt.h b/libavcodec/v4l2_fmt.h
> index 577e03a7a76c..f705fe9c0b30 100644
> --- a/libavcodec/v4l2_fmt.h
> +++ b/libavcodec/v4l2_fmt.h
> @@ -31,5 +31,6 @@
> enum AVPixelFormat ff_v4l2_format_v4l2_to_avfmt(uint32_t v4l2_fmt, enum AVCodecID avcodec);
> uint32_t ff_v4l2_format_avcodec_to_v4l2(enum AVCodecID avcodec);
> uint32_t ff_v4l2_format_avfmt_to_v4l2(enum AVPixelFormat avfmt);
> +uint32_t ff_v4l2_format_avfmt_to_drm(enum AVPixelFormat avfmt);
>
> #endif /* AVCODEC_V4L2_FMT_H*/
> diff --git a/libavcodec/v4l2_m2m_dec.c b/libavcodec/v4l2_m2m_dec.c
> index 5793fca6d6dc..91b9dc7b474c 100644
> --- a/libavcodec/v4l2_m2m_dec.c
> +++ b/libavcodec/v4l2_m2m_dec.c
> @@ -29,6 +29,10 @@
> #include "libavcodec/avcodec.h"
> #include "codec_internal.h"
> #include "libavcodec/decode.h"
> +#include "libavcodec/internal.h"
> +#include "hwconfig.h"
> +#include "libavutil/hwcontext.h"
> +#include "libavutil/hwcontext_drm.h"
>
> #include "v4l2_context.h"
> #include "v4l2_m2m.h"
> @@ -99,6 +103,17 @@ static int v4l2_try_start(AVCodecContext *avctx)
> return ret;
> }
>
> + if (capture->support_dma_buf) {
> + ff_v4l2_context_uninit_hw_ctx(capture);
> + ret = ff_v4l2_context_init_hw_ctx(capture);
> + if (!ret) {
> + enum AVPixelFormat pix_fmts[3] = { AV_PIX_FMT_DRM_PRIME,
> + capture->av_pix_fmt,
> + AV_PIX_FMT_NONE };
> + avctx->pix_fmt = ff_get_format(s->avctx, pix_fmts);
> + }
> + }
> +
> return 0;
> }
>
> @@ -228,6 +243,11 @@ static const AVOption options[] = {
> { NULL},
> };
>
> +static const AVCodecHWConfigInternal *const v4l2_m2m_dec_hw_configs[] = {
> + HW_CONFIG_INTERNAL(DRM_PRIME),
> + NULL
> +};
> +
> #define M2MDEC_CLASS(NAME) \
> static const AVClass v4l2_m2m_ ## NAME ## _dec_class = { \
> .class_name = #NAME "_v4l2m2m_decoder", \
> @@ -252,6 +272,7 @@ static const AVOption options[] = {
> .p.capabilities = AV_CODEC_CAP_HARDWARE | AV_CODEC_CAP_DELAY | AV_CODEC_CAP_AVOID_PROBING, \
> .caps_internal = FF_CODEC_CAP_SETS_PKT_DTS | FF_CODEC_CAP_INIT_CLEANUP, \
> .p.wrapper_name = "v4l2m2m", \
> + .hw_configs = v4l2_m2m_dec_hw_configs, \
> }
>
> M2MDEC(h264, "H.264", AV_CODEC_ID_H264, "h264_mp4toannexb");
> --
> 2.33.0
>
> _______________________________________________
> ffmpeg-devel mailing list
> ffmpeg-devel at ffmpeg.org
> https://ffmpeg.org/mailman/listinfo/ffmpeg-devel
>
> To unsubscribe, visit link above, or email
> ffmpeg-devel-request at ffmpeg.org with subject "unsubscribe".
More information about the ffmpeg-devel
mailing list