FFmpeg
amfenc.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "config.h"
20 #include "config_components.h"
21 
22 #include "libavutil/avassert.h"
23 #include "libavutil/imgutils.h"
24 #include "libavutil/hwcontext.h"
27 #if CONFIG_D3D11VA
29 #endif
30 #if CONFIG_DXVA2
31 #define COBJMACROS
33 #endif
34 #include "libavutil/mem.h"
35 #include "libavutil/pixdesc.h"
36 #include "libavutil/time.h"
37 
38 #include "amfenc.h"
39 #include "encode.h"
40 #include "internal.h"
42 
43 #define AMF_AV_FRAME_REF L"av_frame_ref"
44 
45 static int amf_save_hdr_metadata(AVCodecContext *avctx, const AVFrame *frame, AMFHDRMetadata *hdrmeta)
46 {
47  AVFrameSideData *sd_display;
48  AVFrameSideData *sd_light;
49  AVMasteringDisplayMetadata *display_meta;
50  AVContentLightMetadata *light_meta;
51 
53  if (sd_display) {
54  display_meta = (AVMasteringDisplayMetadata *)sd_display->data;
55  if (display_meta->has_luminance) {
56  const unsigned int luma_den = 10000;
57  hdrmeta->maxMasteringLuminance =
58  (amf_uint32)(luma_den * av_q2d(display_meta->max_luminance));
59  hdrmeta->minMasteringLuminance =
60  FFMIN((amf_uint32)(luma_den * av_q2d(display_meta->min_luminance)), hdrmeta->maxMasteringLuminance);
61  }
62  if (display_meta->has_primaries) {
63  const unsigned int chroma_den = 50000;
64  hdrmeta->redPrimary[0] =
65  FFMIN((amf_uint16)(chroma_den * av_q2d(display_meta->display_primaries[0][0])), chroma_den);
66  hdrmeta->redPrimary[1] =
67  FFMIN((amf_uint16)(chroma_den * av_q2d(display_meta->display_primaries[0][1])), chroma_den);
68  hdrmeta->greenPrimary[0] =
69  FFMIN((amf_uint16)(chroma_den * av_q2d(display_meta->display_primaries[1][0])), chroma_den);
70  hdrmeta->greenPrimary[1] =
71  FFMIN((amf_uint16)(chroma_den * av_q2d(display_meta->display_primaries[1][1])), chroma_den);
72  hdrmeta->bluePrimary[0] =
73  FFMIN((amf_uint16)(chroma_den * av_q2d(display_meta->display_primaries[2][0])), chroma_den);
74  hdrmeta->bluePrimary[1] =
75  FFMIN((amf_uint16)(chroma_den * av_q2d(display_meta->display_primaries[2][1])), chroma_den);
76  hdrmeta->whitePoint[0] =
77  FFMIN((amf_uint16)(chroma_den * av_q2d(display_meta->white_point[0])), chroma_den);
78  hdrmeta->whitePoint[1] =
79  FFMIN((amf_uint16)(chroma_den * av_q2d(display_meta->white_point[1])), chroma_den);
80  }
81 
83  if (sd_light) {
84  light_meta = (AVContentLightMetadata *)sd_light->data;
85  if (light_meta) {
86  hdrmeta->maxContentLightLevel = (amf_uint16)light_meta->MaxCLL;
87  hdrmeta->maxFrameAverageLightLevel = (amf_uint16)light_meta->MaxFALL;
88  }
89  }
90  return 0;
91  }
92  return 1;
93 }
94 
95 #if CONFIG_D3D11VA
96 #include <d3d11.h>
97 #endif
98 
99 #ifdef _WIN32
100 #include "compat/w32dlfcn.h"
101 #else
102 #include <dlfcn.h>
103 #endif
104 
105 #define FFMPEG_AMF_WRITER_ID L"ffmpeg_amf"
106 
107 #define PTS_PROP L"PtsProp"
108 
112 #if CONFIG_D3D11VA
114 #endif
115 #if CONFIG_DXVA2
117 #endif
128 };
129 
131 {
132  AMFEncoderContext *ctx = avctx->priv_data;
133  const wchar_t *codec_id = NULL;
134  AMF_RESULT res;
135  enum AVPixelFormat pix_fmt;
136  AVHWDeviceContext *hw_device_ctx = (AVHWDeviceContext*)ctx->device_ctx_ref->data;
137  AVAMFDeviceContext *amf_device_ctx = (AVAMFDeviceContext *)hw_device_ctx->hwctx;
138 
139  switch (avctx->codec->id) {
140  case AV_CODEC_ID_H264:
141  codec_id = AMFVideoEncoderVCE_AVC;
142  break;
143  case AV_CODEC_ID_HEVC:
144  codec_id = AMFVideoEncoder_HEVC;
145  break;
146  case AV_CODEC_ID_AV1 :
147  codec_id = AMFVideoEncoder_AV1;
148  break;
149  default:
150  break;
151  }
152  AMF_RETURN_IF_FALSE(ctx, codec_id != NULL, AVERROR(EINVAL), "Codec %d is not supported\n", avctx->codec->id);
153 
154  if (avctx->hw_frames_ctx)
155  pix_fmt = ((AVHWFramesContext*)avctx->hw_frames_ctx->data)->sw_format;
156  else
157  pix_fmt = avctx->pix_fmt;
158 
159  if (pix_fmt == AV_PIX_FMT_P010) {
160  AMF_RETURN_IF_FALSE(ctx, amf_device_ctx->version >= AMF_MAKE_FULL_VERSION(1, 4, 32, 0), AVERROR_UNKNOWN, "10-bit encoder is not supported by AMD GPU drivers versions lower than 23.30.\n");
161  }
162 
163  ctx->format = av_av_to_amf_format(pix_fmt);
164  AMF_RETURN_IF_FALSE(ctx, ctx->format != AMF_SURFACE_UNKNOWN, AVERROR(EINVAL),
165  "Format %s is not supported\n", av_get_pix_fmt_name(pix_fmt));
166 
167  res = amf_device_ctx->factory->pVtbl->CreateComponent(amf_device_ctx->factory, amf_device_ctx->context, codec_id, &ctx->encoder);
168  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_ENCODER_NOT_FOUND, "CreateComponent(%ls) failed with error %d\n", codec_id, res);
169 
170  ctx->submitted_frame = 0;
171  ctx->encoded_frame = 0;
172  ctx->eof = 0;
173 
174  return 0;
175 }
176 
178 {
179  AMFEncoderContext *ctx = avctx->priv_data;
180 
181  if (ctx->encoder) {
182  ctx->encoder->pVtbl->Terminate(ctx->encoder);
183  ctx->encoder->pVtbl->Release(ctx->encoder);
184  ctx->encoder = NULL;
185  }
186 
187  av_buffer_unref(&ctx->device_ctx_ref);
188  av_fifo_freep2(&ctx->timestamp_list);
189 
190  return 0;
191 }
192 
193 static int amf_copy_surface(AVCodecContext *avctx, const AVFrame *frame,
194  AMFSurface* surface)
195 {
196  AMFPlane *plane;
197  uint8_t *dst_data[4] = {0};
198  int dst_linesize[4] = {0};
199  int planes;
200  int i;
201 
202  planes = (int)surface->pVtbl->GetPlanesCount(surface);
203  av_assert0(planes < FF_ARRAY_ELEMS(dst_data));
204 
205  for (i = 0; i < planes; i++) {
206  plane = surface->pVtbl->GetPlaneAt(surface, i);
207  dst_data[i] = plane->pVtbl->GetNative(plane);
208  dst_linesize[i] = plane->pVtbl->GetHPitch(plane);
209  }
210  av_image_copy2(dst_data, dst_linesize,
211  frame->data, frame->linesize, frame->format,
212  avctx->width, avctx->height);
213 
214  return 0;
215 }
216 
217 static int amf_copy_buffer(AVCodecContext *avctx, AVPacket *pkt, AMFBuffer *buffer)
218 {
219  AMFEncoderContext *ctx = avctx->priv_data;
220  int ret;
221  AMFVariantStruct var = {0};
222  int64_t timestamp = AV_NOPTS_VALUE;
223  int64_t size = buffer->pVtbl->GetSize(buffer);
224 
225  if ((ret = ff_get_encode_buffer(avctx, pkt, size, 0)) < 0) {
226  return ret;
227  }
228  memcpy(pkt->data, buffer->pVtbl->GetNative(buffer), size);
229 
230  switch (avctx->codec->id) {
231  case AV_CODEC_ID_H264:
232  buffer->pVtbl->GetProperty(buffer, AMF_VIDEO_ENCODER_OUTPUT_DATA_TYPE, &var);
233  if(var.int64Value == AMF_VIDEO_ENCODER_OUTPUT_DATA_TYPE_IDR) {
235  }
236  break;
237  case AV_CODEC_ID_HEVC:
238  buffer->pVtbl->GetProperty(buffer, AMF_VIDEO_ENCODER_HEVC_OUTPUT_DATA_TYPE, &var);
239  if (var.int64Value == AMF_VIDEO_ENCODER_HEVC_OUTPUT_DATA_TYPE_IDR) {
241  }
242  break;
243  case AV_CODEC_ID_AV1:
244  buffer->pVtbl->GetProperty(buffer, AMF_VIDEO_ENCODER_AV1_OUTPUT_FRAME_TYPE, &var);
245  if (var.int64Value == AMF_VIDEO_ENCODER_AV1_OUTPUT_FRAME_TYPE_KEY) {
247  }
248  default:
249  break;
250  }
251 
252  buffer->pVtbl->GetProperty(buffer, PTS_PROP, &var);
253 
254  pkt->pts = var.int64Value; // original pts
255 
256  AMF_RETURN_IF_FALSE(ctx, av_fifo_read(ctx->timestamp_list, &timestamp, 1) >= 0,
257  AVERROR_UNKNOWN, "timestamp_list is empty\n");
258 
259  // calc dts shift if max_b_frames > 0
260  if ((ctx->max_b_frames > 0 || ((ctx->pa_adaptive_mini_gop == 1) ? true : false)) && ctx->dts_delay == 0) {
261  int64_t timestamp_last = AV_NOPTS_VALUE;
262  size_t can_read = av_fifo_can_read(ctx->timestamp_list);
263 
264  AMF_RETURN_IF_FALSE(ctx, can_read > 0, AVERROR_UNKNOWN,
265  "timestamp_list is empty while max_b_frames = %d\n", avctx->max_b_frames);
266  av_fifo_peek(ctx->timestamp_list, &timestamp_last, 1, can_read - 1);
267  if (timestamp < 0 || timestamp_last < AV_NOPTS_VALUE) {
268  return AVERROR(ERANGE);
269  }
270  ctx->dts_delay = timestamp_last - timestamp;
271  }
272  pkt->dts = timestamp - ctx->dts_delay;
273  return 0;
274 }
275 
276 // amfenc API implementation
278 {
279  int ret;
280  AMFEncoderContext *ctx = avctx->priv_data;
281  AVHWDeviceContext *hwdev_ctx = NULL;
282 
283  // hardcoded to current HW queue size - will auto-realloc if too small
284  ctx->timestamp_list = av_fifo_alloc2(avctx->max_b_frames + 16, sizeof(int64_t),
286  if (!ctx->timestamp_list) {
287  return AVERROR(ENOMEM);
288  }
289  ctx->dts_delay = 0;
290 
291  ctx->hwsurfaces_in_queue = 0;
292 
293  if (avctx->hw_device_ctx) {
294  hwdev_ctx = (AVHWDeviceContext*)avctx->hw_device_ctx->data;
295  if (hwdev_ctx->type == AV_HWDEVICE_TYPE_AMF)
296  {
297  ctx->device_ctx_ref = av_buffer_ref(avctx->hw_device_ctx);
298  }
299  else {
301  AMF_RETURN_IF_FALSE(avctx, ret == 0, ret, "Failed to create derived AMF device context: %s\n", av_err2str(ret));
302  }
303  } else if (avctx->hw_frames_ctx) {
304  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
305  if (frames_ctx->device_ref ) {
306  if (frames_ctx->format == AV_PIX_FMT_AMF_SURFACE) {
307  ctx->device_ctx_ref = av_buffer_ref(frames_ctx->device_ref);
308  }
309  else {
310  ret = av_hwdevice_ctx_create_derived(&ctx->device_ctx_ref, AV_HWDEVICE_TYPE_AMF, frames_ctx->device_ref, 0);
311  AMF_RETURN_IF_FALSE(avctx, ret == 0, ret, "Failed to create derived AMF device context: %s\n", av_err2str(ret));
312  }
313  }
314  }
315  else {
316  ret = av_hwdevice_ctx_create(&ctx->device_ctx_ref, AV_HWDEVICE_TYPE_AMF, NULL, NULL, 0);
317  AMF_RETURN_IF_FALSE(avctx, ret == 0, ret, "Failed to create hardware device context (AMF) : %s\n", av_err2str(ret));
318  }
319 
320  if ((ret = amf_init_encoder(avctx)) == 0) {
321  return 0;
322  }
323 
324  ff_amf_encode_close(avctx);
325  return ret;
326 }
327 
328 static AMF_RESULT amf_set_property_buffer(AMFSurface *object, const wchar_t *name, AMFBuffer *val)
329 {
330  AMF_RESULT res;
331  AMFVariantStruct var;
332  res = AMFVariantInit(&var);
333  if (res == AMF_OK) {
334  AMFGuid guid_AMFInterface = IID_AMFInterface();
335  AMFInterface *amf_interface;
336  res = val->pVtbl->QueryInterface(val, &guid_AMFInterface, (void**)&amf_interface);
337 
338  if (res == AMF_OK) {
339  res = AMFVariantAssignInterface(&var, amf_interface);
340  amf_interface->pVtbl->Release(amf_interface);
341  }
342  if (res == AMF_OK) {
343  res = object->pVtbl->SetProperty(object, name, var);
344  }
345  AMFVariantClear(&var);
346  }
347  return res;
348 }
349 
350 static AMF_RESULT amf_store_attached_frame_ref(const AVFrame *frame, AMFSurface *surface)
351 {
352  AMF_RESULT res = AMF_FAIL;
353  int64_t data;
355  if (frame_ref) {
356  memcpy(&data, &frame_ref, sizeof(frame_ref)); // store pointer in 8 bytes
357  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_AV_FRAME_REF, data);
358  }
359  return res;
360 }
361 
362 static AMF_RESULT amf_release_attached_frame_ref(AMFBuffer *buffer)
363 {
364  AMFVariantStruct var = {0};
365  AMF_RESULT res = buffer->pVtbl->GetProperty(buffer, AMF_AV_FRAME_REF, &var);
366  if(res == AMF_OK && var.int64Value){
368  memcpy(&frame_ref, &var.int64Value, sizeof(frame_ref));
370  }
371  return res;
372 }
373 
375 {
376  AMFEncoderContext *ctx = avctx->priv_data;
377  AVHWDeviceContext *hw_device_ctx = (AVHWDeviceContext*)ctx->device_ctx_ref->data;
378  AVAMFDeviceContext *amf_device_ctx = (AVAMFDeviceContext *)hw_device_ctx->hwctx;
379  AMFSurface *surface;
380  AMF_RESULT res;
381  int ret;
382  AMF_RESULT res_query;
383  AMFData *data = NULL;
385  int block_and_wait;
386  int input_full = 0;
387  int hw_surface = 0;
388  int64_t pts = 0;
389  int max_b_frames = ctx->max_b_frames < 0 ? 0 : ctx->max_b_frames;
390 
391  if (!ctx->encoder){
393  return AVERROR(EINVAL);
394  }
395  ret = ff_encode_get_frame(avctx, frame);
396  if(ret < 0){
397  if(ret != AVERROR_EOF){
399  if(ret == AVERROR(EAGAIN)){
400  if(ctx->submitted_frame <= ctx->encoded_frame + max_b_frames + 1) // too soon to poll
401  return ret;
402  }
403  }
404  }
405  if(ret != AVERROR(EAGAIN)){
406  if (!frame->buf[0]) { // submit drain
407  if (!ctx->eof) { // submit drain one time only
408  if(!ctx->delayed_drain) {
409  res = ctx->encoder->pVtbl->Drain(ctx->encoder);
410  if (res == AMF_INPUT_FULL) {
411  ctx->delayed_drain = 1; // input queue is full: resubmit Drain() in receive loop
412  } else {
413  if (res == AMF_OK) {
414  ctx->eof = 1; // drain started
415  }
416  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "Drain() failed with error %d\n", res);
417  }
418  }
419  }
420  } else { // submit frame
421 
422  // prepare surface from frame
423  switch (frame->format) {
424  #if CONFIG_D3D11VA
425  case AV_PIX_FMT_D3D11:
426  {
427  static const GUID AMFTextureArrayIndexGUID = { 0x28115527, 0xe7c3, 0x4b66, { 0x99, 0xd3, 0x4f, 0x2a, 0xe6, 0xb4, 0x7f, 0xaf } };
428  ID3D11Texture2D *texture = (ID3D11Texture2D*)frame->data[0]; // actual texture
429  int index = (intptr_t)frame->data[1]; // index is a slice in texture array is - set to tell AMF which slice to use
430 
431  av_assert0(frame->hw_frames_ctx && avctx->hw_frames_ctx &&
432  frame->hw_frames_ctx->data == avctx->hw_frames_ctx->data);
433 
434  texture->lpVtbl->SetPrivateData(texture, &AMFTextureArrayIndexGUID, sizeof(index), &index);
435 
436  res = amf_device_ctx->context->pVtbl->CreateSurfaceFromDX11Native(amf_device_ctx->context, texture, &surface, NULL); // wrap to AMF surface
437  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR(ENOMEM), "CreateSurfaceFromDX11Native() failed with error %d\n", res);
438 
439  hw_surface = 1;
440  }
441  break;
442  #endif
443  #if CONFIG_DXVA2
445  {
446  IDirect3DSurface9 *texture = (IDirect3DSurface9 *)frame->data[3]; // actual texture
447 
448  res = amf_device_ctx->context->pVtbl->CreateSurfaceFromDX9Native(amf_device_ctx->context, texture, &surface, NULL); // wrap to AMF surface
449  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR(ENOMEM), "CreateSurfaceFromDX9Native() failed with error %d\n", res);
450 
451  hw_surface = 1;
452  }
453  break;
454  #endif
456  {
457  surface = (AMFSurface*)frame->data[0];
458  surface->pVtbl->Acquire(surface);
459  hw_surface = 1;
460  }
461  break;
462  default:
463  {
464  res = amf_device_ctx->context->pVtbl->AllocSurface(amf_device_ctx->context, AMF_MEMORY_HOST, ctx->format, avctx->width, avctx->height, &surface);
465  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR(ENOMEM), "AllocSurface() failed with error %d\n", res);
466  amf_copy_surface(avctx, frame, surface);
467  }
468  break;
469  }
470 
471  if (hw_surface) {
473  ctx->hwsurfaces_in_queue++;
474  // input HW surfaces can be vertically aligned by 16; tell AMF the real size
475  surface->pVtbl->SetCrop(surface, 0, 0, frame->width, frame->height);
476  }
477 
478  // HDR10 metadata
479  if (frame->color_trc == AVCOL_TRC_SMPTE2084) {
480  AMFBuffer * hdrmeta_buffer = NULL;
481  res = amf_device_ctx->context->pVtbl->AllocBuffer(amf_device_ctx->context, AMF_MEMORY_HOST, sizeof(AMFHDRMetadata), &hdrmeta_buffer);
482  if (res == AMF_OK) {
483  AMFHDRMetadata * hdrmeta = (AMFHDRMetadata*)hdrmeta_buffer->pVtbl->GetNative(hdrmeta_buffer);
484  if (amf_save_hdr_metadata(avctx, frame, hdrmeta) == 0) {
485  switch (avctx->codec->id) {
486  case AV_CODEC_ID_H264:
487  AMF_ASSIGN_PROPERTY_INTERFACE(res, ctx->encoder, AMF_VIDEO_ENCODER_INPUT_HDR_METADATA, hdrmeta_buffer); break;
488  case AV_CODEC_ID_HEVC:
489  AMF_ASSIGN_PROPERTY_INTERFACE(res, ctx->encoder, AMF_VIDEO_ENCODER_HEVC_INPUT_HDR_METADATA, hdrmeta_buffer); break;
490  case AV_CODEC_ID_AV1:
491  AMF_ASSIGN_PROPERTY_INTERFACE(res, ctx->encoder, AMF_VIDEO_ENCODER_AV1_INPUT_HDR_METADATA, hdrmeta_buffer); break;
492  }
493  res = amf_set_property_buffer(surface, L"av_frame_hdrmeta", hdrmeta_buffer);
494  AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR_UNKNOWN, "SetProperty failed for \"av_frame_hdrmeta\" with error %d\n", res);
495  }
496  hdrmeta_buffer->pVtbl->Release(hdrmeta_buffer);
497  }
498  }
499 
500  surface->pVtbl->SetPts(surface, frame->pts);
501  AMF_ASSIGN_PROPERTY_INT64(res, surface, PTS_PROP, frame->pts);
502 
503  switch (avctx->codec->id) {
504  case AV_CODEC_ID_H264:
505  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_INSERT_AUD, !!ctx->aud);
506  switch (frame->pict_type) {
507  case AV_PICTURE_TYPE_I:
508  if (ctx->forced_idr) {
509  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_INSERT_SPS, 1);
510  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_INSERT_PPS, 1);
511  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_PICTURE_TYPE_IDR);
512  } else {
513  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_PICTURE_TYPE_I);
514  }
515  break;
516  case AV_PICTURE_TYPE_P:
517  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_PICTURE_TYPE_P);
518  break;
519  case AV_PICTURE_TYPE_B:
520  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_PICTURE_TYPE_B);
521  break;
522  }
523  break;
524  case AV_CODEC_ID_HEVC:
525  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_INSERT_AUD, !!ctx->aud);
526  switch (frame->pict_type) {
527  case AV_PICTURE_TYPE_I:
528  if (ctx->forced_idr) {
529  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_INSERT_HEADER, 1);
530  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_HEVC_PICTURE_TYPE_IDR);
531  } else {
532  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_HEVC_PICTURE_TYPE_I);
533  }
534  break;
535  case AV_PICTURE_TYPE_P:
536  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_HEVC_PICTURE_TYPE_P);
537  break;
538  }
539  break;
540  case AV_CODEC_ID_AV1:
541  if (frame->pict_type == AV_PICTURE_TYPE_I) {
542  if (ctx->forced_idr) {
543  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_AV1_FORCE_INSERT_SEQUENCE_HEADER, 1);
544  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_AV1_FORCE_FRAME_TYPE, AMF_VIDEO_ENCODER_AV1_FORCE_FRAME_TYPE_KEY);
545  } else {
546  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_AV1_FORCE_FRAME_TYPE, AMF_VIDEO_ENCODER_AV1_FORCE_FRAME_TYPE_INTRA_ONLY);
547  }
548  }
549  break;
550  default:
551  break;
552  }
553  pts = frame->pts;
554  // submit surface
555  res = ctx->encoder->pVtbl->SubmitInput(ctx->encoder, (AMFData*)surface);
557 
558  if (res == AMF_INPUT_FULL) { // handle full queue
559  //store surface for later submission
560  input_full = 1;
561  } else {
562  surface->pVtbl->Release(surface);
563  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "SubmitInput() failed with error %d\n", res);
564 
565  ctx->submitted_frame++;
566  ret = av_fifo_write(ctx->timestamp_list, &pts, 1);
567 
568  if (ret < 0)
569  return ret;
570  if(ctx->submitted_frame <= ctx->encoded_frame + max_b_frames + 1)
571  return AVERROR(EAGAIN); // if frame just submiited - don't poll or wait
572  }
573  }
574  }
576 
577  do {
578  block_and_wait = 0;
579  // poll data
580 
581  res_query = ctx->encoder->pVtbl->QueryOutput(ctx->encoder, &data);
582  if (data) {
583  // copy data to packet
584  AMFBuffer *buffer;
585  AMFGuid guid = IID_AMFBuffer();
586  data->pVtbl->QueryInterface(data, &guid, (void**)&buffer); // query for buffer interface
587  ret = amf_copy_buffer(avctx, avpkt, buffer);
588  if (amf_release_attached_frame_ref(buffer) == AMF_OK) {
589  ctx->hwsurfaces_in_queue--;
590  }
591  ctx->encoded_frame++;
592  buffer->pVtbl->Release(buffer);
593  data->pVtbl->Release(data);
594 
595  AMF_RETURN_IF_FALSE(ctx, ret >= 0, ret, "amf_copy_buffer() failed with error %d\n", ret);
596 
597  if (ctx->delayed_drain) { // try to resubmit drain
598  res = ctx->encoder->pVtbl->Drain(ctx->encoder);
599  if (res != AMF_INPUT_FULL) {
600  ctx->delayed_drain = 0;
601  ctx->eof = 1; // drain started
602  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "Repeated Drain() failed with error %d\n", res);
603  } else {
604  av_log(avctx, AV_LOG_WARNING, "Data acquired but delayed drain submission got AMF_INPUT_FULL- should not happen\n");
605  }
606  }
607  } else if (ctx->delayed_drain || (ctx->eof && res_query != AMF_EOF) || (ctx->hwsurfaces_in_queue >= ctx->hwsurfaces_in_queue_max) || input_full) {
608  block_and_wait = 1;
609  // Only sleep if the driver doesn't support waiting in QueryOutput()
610  // or if we already have output data so we will skip calling it.
611  if (!ctx->query_timeout_supported || avpkt->data || avpkt->buf) {
612  av_usleep(1000);
613  }
614  }
615  } while (block_and_wait);
616 
617  if (res_query == AMF_EOF) {
618  ret = AVERROR_EOF;
619  } else if (data == NULL) {
620  ret = AVERROR(EAGAIN);
621  } else {
622  if(input_full) {
623  // resubmit surface
624  res = ctx->encoder->pVtbl->SubmitInput(ctx->encoder, (AMFData*)surface);
625  surface->pVtbl->Release(surface);
626  if (res == AMF_INPUT_FULL) {
627  av_log(avctx, AV_LOG_WARNING, "Data acquired but delayed SubmitInput returned AMF_INPUT_FULL- should not happen\n");
628  } else {
629  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "SubmitInput() failed with error %d\n", res);
630 
631  ret = av_fifo_write(ctx->timestamp_list, &pts, 1);
632 
633  ctx->submitted_frame++;
634 
635  if (ret < 0)
636  return ret;
637  }
638  }
639  ret = 0;
640  }
641  return ret;
642 }
643 
645 {
646  amf_int64 color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_UNKNOWN;
647  if (avctx->color_range == AVCOL_RANGE_JPEG) {
648  /// Color Space for Full (JPEG) Range
649  switch (avctx->colorspace) {
650  case AVCOL_SPC_SMPTE170M:
651  color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_FULL_601;
652  break;
653  case AVCOL_SPC_BT709:
654  color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_FULL_709;
655  break;
657  case AVCOL_SPC_BT2020_CL:
658  color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_FULL_2020;
659  break;
660  }
661  } else {
662  /// Color Space for Limited (MPEG) range
663  switch (avctx->colorspace) {
664  case AVCOL_SPC_SMPTE170M:
665  color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_601;
666  break;
667  case AVCOL_SPC_BT709:
668  color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_709;
669  break;
671  case AVCOL_SPC_BT2020_CL:
672  color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_2020;
673  break;
674  }
675  }
676  return color_profile;
677 }
678 
680 #if CONFIG_D3D11VA
681  HW_CONFIG_ENCODER_FRAMES(D3D11, D3D11VA),
682  HW_CONFIG_ENCODER_DEVICE(NONE, D3D11VA),
683 #endif
684 #if CONFIG_DXVA2
685  HW_CONFIG_ENCODER_FRAMES(DXVA2_VLD, DXVA2),
687 #endif
688  HW_CONFIG_ENCODER_FRAMES(AMF_SURFACE, AMF),
690  NULL,
691 };
AVMasteringDisplayMetadata::has_primaries
int has_primaries
Flag indicating whether the display primaries (and white point) are set.
Definition: mastering_display_metadata.h:62
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:215
AVMasteringDisplayMetadata::max_luminance
AVRational max_luminance
Max luminance of mastering display (cd/m^2).
Definition: mastering_display_metadata.h:57
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
name
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
Definition: writing_filters.txt:88
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
AVCodecContext::colorspace
enum AVColorSpace colorspace
YUV colorspace type.
Definition: avcodec.h:699
av_frame_get_side_data
AVFrameSideData * av_frame_get_side_data(const AVFrame *frame, enum AVFrameSideDataType type)
Definition: frame.c:693
NONE
@ NONE
Definition: af_afade.c:60
AVERROR_EOF
#define AVERROR_EOF
End of file.
Definition: error.h:57
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
planes
static const struct @475 planes[]
AVMasteringDisplayMetadata::display_primaries
AVRational display_primaries[3][2]
CIE 1931 xy chromaticity coords of color primaries (r, g, b order).
Definition: mastering_display_metadata.h:42
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:198
AVMasteringDisplayMetadata::has_luminance
int has_luminance
Flag indicating whether the luminance (min_ and max_) have been set.
Definition: mastering_display_metadata.h:67
int64_t
long long int64_t
Definition: coverity.c:34
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:70
AVContentLightMetadata::MaxCLL
unsigned MaxCLL
Max content light level (cd/m^2).
Definition: mastering_display_metadata.h:111
av_fifo_peek
int av_fifo_peek(const AVFifo *f, void *buf, size_t nb_elems, size_t offset)
Read data from a FIFO without modifying FIFO state.
Definition: fifo.c:255
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:410
pixdesc.h
AVCOL_RANGE_JPEG
@ AVCOL_RANGE_JPEG
Full range content.
Definition: pixfmt.h:746
internal.h
AVPacket::data
uint8_t * data
Definition: packet.h:539
encode.h
data
const char data[16]
Definition: mxf.c:149
amf_set_property_buffer
static AMF_RESULT amf_set_property_buffer(AMFSurface *object, const wchar_t *name, AMFBuffer *val)
Definition: amfenc.c:328
AV_PIX_FMT_BGRA
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:102
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AMF_RETURN_IF_FALSE
#define AMF_RETURN_IF_FALSE(avctx, exp, ret_value,...)
Error handling helper.
Definition: amfenc.h:163
amf_copy_surface
static int amf_copy_surface(AVCodecContext *avctx, const AVFrame *frame, AMFSurface *surface)
Definition: amfenc.c:193
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVCOL_SPC_BT2020_CL
@ AVCOL_SPC_BT2020_CL
ITU-R BT2020 constant luminance system.
Definition: pixfmt.h:681
AV_PKT_FLAG_KEY
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
Definition: packet.h:594
AV_PIX_FMT_AMF_SURFACE
@ AV_PIX_FMT_AMF_SURFACE
HW acceleration through AMF.
Definition: pixfmt.h:477
AVContentLightMetadata
Content light level needed by to transmit HDR over HDMI (CTA-861.3).
Definition: mastering_display_metadata.h:107
AMF_AV_FRAME_REF
#define AMF_AV_FRAME_REF
Definition: amfenc.c:43
amf_release_attached_frame_ref
static AMF_RESULT amf_release_attached_frame_ref(AMFBuffer *buffer)
Definition: amfenc.c:362
AVCodecContext::codec
const struct AVCodec * codec
Definition: avcodec.h:460
av_fifo_write
int av_fifo_write(AVFifo *f, const void *buf, size_t nb_elems)
Write data into a FIFO.
Definition: fifo.c:188
ff_amf_encode_close
int av_cold ff_amf_encode_close(AVCodecContext *avctx)
Common encoder termination function.
Definition: amfenc.c:177
ff_amf_encode_init
int ff_amf_encode_init(AVCodecContext *avctx)
Common encoder initization function.
Definition: amfenc.c:277
val
static double val(void *priv, double ch)
Definition: aeval.c:77
pts
static int64_t pts
Definition: transcode_aac.c:644
av_av_to_amf_format
enum AMF_SURFACE_FORMAT av_av_to_amf_format(enum AVPixelFormat fmt)
Definition: hwcontext_amf.c:118
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:61
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:58
avassert.h
pkt
AVPacket * pkt
Definition: movenc.c:60
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
av_cold
#define av_cold
Definition: attributes.h:90
av_fifo_read
int av_fifo_read(AVFifo *f, void *buf, size_t nb_elems)
Read data from a FIFO.
Definition: fifo.c:240
AVMasteringDisplayMetadata::white_point
AVRational white_point[2]
CIE 1931 xy chromaticity coords of white point.
Definition: mastering_display_metadata.h:47
AV_PIX_FMT_DXVA2_VLD
@ AV_PIX_FMT_DXVA2_VLD
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer.
Definition: pixfmt.h:134
AMFEncoderContext
AMF encoder context.
Definition: amfenc.h:40
AVCOL_SPC_SMPTE170M
@ AVCOL_SPC_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC / functionally identical to above
Definition: pixfmt.h:676
pix_fmt
static enum AVPixelFormat pix_fmt
Definition: demux_decode.c:41
av_q2d
static double av_q2d(AVRational a)
Convert an AVRational to a double.
Definition: rational.h:104
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:40
AV_HWDEVICE_TYPE_AMF
@ AV_HWDEVICE_TYPE_AMF
Definition: hwcontext.h:41
amf_init_encoder
static int amf_init_encoder(AVCodecContext *avctx)
Definition: amfenc.c:130
ctx
AVFormatContext * ctx
Definition: movenc.c:49
av_frame_clone
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:517
hwcontext_amf.h
codec_id
enum AVCodecID codec_id
Definition: vaapi_decode.c:410
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
av_usleep
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
AV_CODEC_ID_H264
@ AV_CODEC_ID_H264
Definition: codec_id.h:79
AV_PIX_FMT_RGBA
@ AV_PIX_FMT_RGBA
packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
Definition: pixfmt.h:100
if
if(ret)
Definition: filter_design.txt:179
AVPacket::buf
AVBufferRef * buf
A reference to the reference-counted buffer where the packet data is stored.
Definition: packet.h:522
NULL
#define NULL
Definition: coverity.c:32
AVCodecContext::color_range
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: avcodec.h:709
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AV_CODEC_ID_AV1
@ AV_CODEC_ID_AV1
Definition: codec_id.h:284
AVHWFramesContext::device_ref
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
Definition: hwcontext.h:127
ff_amf_receive_packet
int ff_amf_receive_packet(AVCodecContext *avctx, AVPacket *avpkt)
Ecoding one frame - common function for all AMF encoders.
Definition: amfenc.c:374
AV_PICTURE_TYPE_I
@ AV_PICTURE_TYPE_I
Intra.
Definition: avutil.h:279
av_fifo_can_read
size_t av_fifo_can_read(const AVFifo *f)
Definition: fifo.c:87
amf_copy_buffer
static int amf_copy_buffer(AVCodecContext *avctx, AVPacket *pkt, AMFBuffer *buffer)
Definition: amfenc.c:217
AV_FRAME_DATA_MASTERING_DISPLAY_METADATA
@ AV_FRAME_DATA_MASTERING_DISPLAY_METADATA
Mastering display metadata associated with a video frame.
Definition: frame.h:120
AV_PIX_FMT_BGR0
@ AV_PIX_FMT_BGR0
packed BGR 8:8:8, 32bpp, BGRXBGRX... X=unused/undefined
Definition: pixfmt.h:265
time.h
PTS_PROP
#define PTS_PROP
Definition: amfenc.c:107
index
int index
Definition: gxfenc.c:90
AVCOL_TRC_SMPTE2084
@ AVCOL_TRC_SMPTE2084
SMPTE ST 2084 for 10-, 12-, 14- and 16-bit systems.
Definition: pixfmt.h:657
AV_PIX_FMT_X2BGR10
#define AV_PIX_FMT_X2BGR10
Definition: pixfmt.h:593
hwcontext_dxva2.h
HW_CONFIG_ENCODER_DEVICE
#define HW_CONFIG_ENCODER_DEVICE(format, device_type_)
Definition: hwconfig.h:95
av_err2str
#define av_err2str(errnum)
Convenience macro, the return value should be used only directly in function arguments but never stan...
Definition: error.h:122
ff_amf_pix_fmts
enum AVPixelFormat ff_amf_pix_fmts[]
Supported formats.
Definition: amfenc.c:109
size
int size
Definition: twinvq_data.h:10344
AVAMFDeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_amf.h:33
AV_NOPTS_VALUE
#define AV_NOPTS_VALUE
Undefined timestamp value.
Definition: avutil.h:248
AVFrameSideData::data
uint8_t * data
Definition: frame.h:267
AVCodecHWConfigInternal
Definition: hwconfig.h:25
AVPacket::dts
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed.
Definition: packet.h:538
AVPacket::flags
int flags
A combination of AV_PKT_FLAG values.
Definition: packet.h:545
AV_PIX_FMT_RGB0
@ AV_PIX_FMT_RGB0
packed RGB 8:8:8, 32bpp, RGBXRGBX... X=unused/undefined
Definition: pixfmt.h:263
AV_PIX_FMT_D3D11
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:336
AVCodec::id
enum AVCodecID id
Definition: codec.h:201
AV_PIX_FMT_ARGB
@ AV_PIX_FMT_ARGB
packed ARGB 8:8:8:8, 32bpp, ARGBARGB...
Definition: pixfmt.h:99
HW_CONFIG_ENCODER_FRAMES
#define HW_CONFIG_ENCODER_FRAMES(format, device_type_)
Definition: hwconfig.h:98
AV_FRAME_DATA_CONTENT_LIGHT_LEVEL
@ AV_FRAME_DATA_CONTENT_LIGHT_LEVEL
Content light level (based on CTA-861.3).
Definition: frame.h:137
av_hwdevice_ctx_create_derived
int av_hwdevice_ctx_create_derived(AVBufferRef **dst_ref_ptr, enum AVHWDeviceType type, AVBufferRef *src_ref, int flags)
Create a new device of the specified type from an existing device.
Definition: hwcontext.c:707
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
AVPacket::pts
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: packet.h:532
AVCOL_SPC_BT2020_NCL
@ AVCOL_SPC_BT2020_NCL
ITU-R BT2020 non-constant luminance system.
Definition: pixfmt.h:680
amf_save_hdr_metadata
static int amf_save_hdr_metadata(AVCodecContext *avctx, const AVFrame *frame, AMFHDRMetadata *hdrmeta)
Definition: amfenc.c:45
hw_device_ctx
static AVBufferRef * hw_device_ctx
Definition: hw_decode.c:45
ff_amfenc_hw_configs
const AVCodecHWConfigInternal *const ff_amfenc_hw_configs[]
Definition: amfenc.c:679
AV_CODEC_ID_HEVC
@ AV_CODEC_ID_HEVC
Definition: codec_id.h:228
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
AVCodecContext::hw_device_ctx
AVBufferRef * hw_device_ctx
A reference to the AVHWDeviceContext describing the device which will be used by a hardware encoder/d...
Definition: avcodec.h:1515
AVMasteringDisplayMetadata
Mastering display metadata capable of representing the color volume of the display used to master the...
Definition: mastering_display_metadata.h:38
AVCodecContext::height
int height
Definition: avcodec.h:632
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:671
AVCodecContext::hw_frames_ctx
AVBufferRef * hw_frames_ctx
A reference to the AVHWFramesContext describing the input (for encoding) or output (decoding) frames.
Definition: avcodec.h:1493
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:116
frame_ref
static int frame_ref(AVFrame *dst, const AVFrame *src)
Definition: swscale.c:1339
ret
ret
Definition: filter_design.txt:187
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:73
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:96
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
av_hwdevice_ctx_create
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:604
av_fifo_alloc2
AVFifo * av_fifo_alloc2(size_t nb_elems, size_t elem_size, unsigned int flags)
Allocate and initialize an AVFifo with a given element size.
Definition: fifo.c:47
AVCodecContext
main external API structure.
Definition: avcodec.h:451
hwcontext_amf_internal.h
AV_PICTURE_TYPE_B
@ AV_PICTURE_TYPE_B
Bi-dir predicted.
Definition: avutil.h:281
buffer
the frame and frame reference mechanism is intended to as much as expensive copies of that data while still allowing the filters to produce correct results The data is stored in buffers represented by AVFrame structures Several references can point to the same frame buffer
Definition: filter_design.txt:49
ff_get_encode_buffer
int ff_get_encode_buffer(AVCodecContext *avctx, AVPacket *avpkt, int64_t size, int flags)
Get a buffer for a packet.
Definition: encode.c:106
av_image_copy2
static void av_image_copy2(uint8_t *const dst_data[4], const int dst_linesizes[4], uint8_t *const src_data[4], const int src_linesizes[4], enum AVPixelFormat pix_fmt, int width, int height)
Wrapper around av_image_copy() to workaround the limitation that the conversion from uint8_t * const ...
Definition: imgutils.h:184
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
L
#define L(x)
Definition: vpx_arith.h:36
amfenc.h
AVMasteringDisplayMetadata::min_luminance
AVRational min_luminance
Min luminance of mastering display (cd/m^2).
Definition: mastering_display_metadata.h:52
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:581
AV_PICTURE_TYPE_P
@ AV_PICTURE_TYPE_P
Predicted.
Definition: avutil.h:280
AVERROR_ENCODER_NOT_FOUND
#define AVERROR_ENCODER_NOT_FOUND
Encoder not found.
Definition: error.h:56
mem.h
AVCodecContext::max_b_frames
int max_b_frames
maximum number of B-frames between non-B-frames Note: The output will be delayed by max_b_frames+1 re...
Definition: avcodec.h:809
ff_encode_get_frame
int ff_encode_get_frame(AVCodecContext *avctx, AVFrame *frame)
Called by encoders to get the next frame for encoding.
Definition: encode.c:205
mastering_display_metadata.h
AVFrameSideData
Structure to hold side data for an AVFrame.
Definition: frame.h:265
ff_amf_get_color_profile
int ff_amf_get_color_profile(AVCodecContext *avctx)
Definition: amfenc.c:644
AVPacket
This structure stores compressed data.
Definition: packet.h:516
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:478
AVContentLightMetadata::MaxFALL
unsigned MaxFALL
Max average light level per frame (cd/m^2).
Definition: mastering_display_metadata.h:116
AV_PIX_FMT_RGBAF16
#define AV_PIX_FMT_RGBAF16
Definition: pixfmt.h:603
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:632
imgutils.h
hwcontext.h
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
av_fifo_freep2
void av_fifo_freep2(AVFifo **f)
Free an AVFifo and reset pointer to NULL.
Definition: fifo.c:286
AVCOL_SPC_BT709
@ AVCOL_SPC_BT709
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / derived in SMPTE RP 177 Annex B
Definition: pixfmt.h:671
hwcontext_d3d11va.h
AV_FIFO_FLAG_AUTO_GROW
#define AV_FIFO_FLAG_AUTO_GROW
Automatically resize the FIFO on writes, so that the data fits.
Definition: fifo.h:63
w32dlfcn.h
amf_store_attached_frame_ref
static AMF_RESULT amf_store_attached_frame_ref(const AVFrame *frame, AMFSurface *surface)
Definition: amfenc.c:350
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:3233