FFmpeg
decklink_enc.cpp
Go to the documentation of this file.
1 /*
2  * Blackmagic DeckLink output
3  * Copyright (c) 2013-2014 Ramiro Polla
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 #include <atomic>
23 using std::atomic;
24 
25 /* Include internal.h first to avoid conflict between winsock.h (used by
26  * DeckLink headers) and winsock2.h (used by libavformat) in MSVC++ builds */
27 extern "C" {
28 #include "libavformat/internal.h"
29 }
30 
31 #include <DeckLinkAPI.h>
32 
33 extern "C" {
34 #include "libavformat/avformat.h"
35 #include "libavcodec/bytestream.h"
36 #include "libavutil/frame.h"
37 #include "libavutil/internal.h"
38 #include "libavutil/imgutils.h"
39 #include "avdevice.h"
40 }
41 
42 #include "decklink_common.h"
43 #include "decklink_enc.h"
44 #if CONFIG_LIBKLVANC
45 #include "libklvanc/vanc.h"
46 #include "libklvanc/vanc-lines.h"
47 #include "libklvanc/pixels.h"
48 #endif
49 
50 /* DeckLink callback class declaration */
51 class decklink_frame : public IDeckLinkVideoFrame
52 {
53 public:
58  virtual long STDMETHODCALLTYPE GetWidth (void) { return _width; }
59  virtual long STDMETHODCALLTYPE GetHeight (void) { return _height; }
60  virtual long STDMETHODCALLTYPE GetRowBytes (void)
61  {
63  return _avframe->linesize[0] < 0 ? -_avframe->linesize[0] : _avframe->linesize[0];
64  else
65  return ((GetWidth() + 47) / 48) * 128;
66  }
67  virtual BMDPixelFormat STDMETHODCALLTYPE GetPixelFormat(void)
68  {
70  return bmdFormat8BitYUV;
71  else
72  return bmdFormat10BitYUV;
73  }
74  virtual BMDFrameFlags STDMETHODCALLTYPE GetFlags (void)
75  {
77  return _avframe->linesize[0] < 0 ? bmdFrameFlagFlipVertical : bmdFrameFlagDefault;
78  else
79  return bmdFrameFlagDefault;
80  }
81 
82  virtual HRESULT STDMETHODCALLTYPE GetBytes (void **buffer)
83  {
85  if (_avframe->linesize[0] < 0)
86  *buffer = (void *)(_avframe->data[0] + _avframe->linesize[0] * (_avframe->height - 1));
87  else
88  *buffer = (void *)(_avframe->data[0]);
89  } else {
90  *buffer = (void *)(_avpacket->data);
91  }
92  return S_OK;
93  }
94 
95  virtual HRESULT STDMETHODCALLTYPE GetTimecode (BMDTimecodeFormat format, IDeckLinkTimecode **timecode) { return S_FALSE; }
96  virtual HRESULT STDMETHODCALLTYPE GetAncillaryData(IDeckLinkVideoFrameAncillary **ancillary)
97  {
98  *ancillary = _ancillary;
99  if (_ancillary) {
100  _ancillary->AddRef();
101  return S_OK;
102  } else {
103  return S_FALSE;
104  }
105  }
106  virtual HRESULT STDMETHODCALLTYPE SetAncillaryData(IDeckLinkVideoFrameAncillary *ancillary)
107  {
108  if (_ancillary)
109  _ancillary->Release();
110  _ancillary = ancillary;
111  _ancillary->AddRef();
112  return S_OK;
113  }
114  virtual HRESULT STDMETHODCALLTYPE QueryInterface(REFIID iid, LPVOID *ppv) { return E_NOINTERFACE; }
115  virtual ULONG STDMETHODCALLTYPE AddRef(void) { return ++_refs; }
116  virtual ULONG STDMETHODCALLTYPE Release(void)
117  {
118  int ret = --_refs;
119  if (!ret) {
122  if (_ancillary)
123  _ancillary->Release();
124  delete this;
125  }
126  return ret;
127  }
128 
133  IDeckLinkVideoFrameAncillary *_ancillary;
134  int _height;
135  int _width;
136 
137 private:
138  std::atomic<int> _refs;
139 };
140 
141 class decklink_output_callback : public IDeckLinkVideoOutputCallback
142 {
143 public:
144  virtual HRESULT STDMETHODCALLTYPE ScheduledFrameCompleted(IDeckLinkVideoFrame *_frame, BMDOutputFrameCompletionResult result)
145  {
146  decklink_frame *frame = static_cast<decklink_frame *>(_frame);
147  struct decklink_ctx *ctx = frame->_ctx;
148 
149  if (frame->_avframe)
150  av_frame_unref(frame->_avframe);
151  if (frame->_avpacket)
152  av_packet_unref(frame->_avpacket);
153 
154  pthread_mutex_lock(&ctx->mutex);
155  ctx->frames_buffer_available_spots++;
156  pthread_cond_broadcast(&ctx->cond);
157  pthread_mutex_unlock(&ctx->mutex);
158 
159  return S_OK;
160  }
161  virtual HRESULT STDMETHODCALLTYPE ScheduledPlaybackHasStopped(void) { return S_OK; }
162  virtual HRESULT STDMETHODCALLTYPE QueryInterface(REFIID iid, LPVOID *ppv) { return E_NOINTERFACE; }
163  virtual ULONG STDMETHODCALLTYPE AddRef(void) { return 1; }
164  virtual ULONG STDMETHODCALLTYPE Release(void) { return 1; }
165 };
166 
168 {
169  struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
170  struct decklink_ctx *ctx = (struct decklink_ctx *)cctx->ctx;
172 
173  if (ctx->video) {
174  av_log(avctx, AV_LOG_ERROR, "Only one video stream is supported!\n");
175  return -1;
176  }
177 
178  if (c->codec_id == AV_CODEC_ID_WRAPPED_AVFRAME) {
179  if (c->format != AV_PIX_FMT_UYVY422) {
180  av_log(avctx, AV_LOG_ERROR, "Unsupported pixel format!"
181  " Only AV_PIX_FMT_UYVY422 is supported.\n");
182  return -1;
183  }
184  ctx->raw_format = bmdFormat8BitYUV;
185  } else if (c->codec_id != AV_CODEC_ID_V210) {
186  av_log(avctx, AV_LOG_ERROR, "Unsupported codec type!"
187  " Only V210 and wrapped frame with AV_PIX_FMT_UYVY422 are supported.\n");
188  return -1;
189  } else {
190  ctx->raw_format = bmdFormat10BitYUV;
191  }
192 
193  if (ff_decklink_set_configs(avctx, DIRECTION_OUT) < 0) {
194  av_log(avctx, AV_LOG_ERROR, "Could not set output configuration\n");
195  return -1;
196  }
197  if (ff_decklink_set_format(avctx, c->width, c->height,
198  st->time_base.num, st->time_base.den, c->field_order)) {
199  av_log(avctx, AV_LOG_ERROR, "Unsupported video size, framerate or field order!"
200  " Check available formats with -list_formats 1.\n");
201  return -1;
202  }
203  if (ctx->supports_vanc && ctx->dlo->EnableVideoOutput(ctx->bmd_mode, bmdVideoOutputVANC) != S_OK) {
204  av_log(avctx, AV_LOG_WARNING, "Could not enable video output with VANC! Trying without...\n");
205  ctx->supports_vanc = 0;
206  }
207  if (!ctx->supports_vanc && ctx->dlo->EnableVideoOutput(ctx->bmd_mode, bmdVideoOutputFlagDefault) != S_OK) {
208  av_log(avctx, AV_LOG_ERROR, "Could not enable video output!\n");
209  return -1;
210  }
211 
212  /* Set callback. */
213  ctx->output_callback = new decklink_output_callback();
214  ctx->dlo->SetScheduledFrameCompletionCallback(ctx->output_callback);
215 
216  ctx->frames_preroll = st->time_base.den * ctx->preroll;
217  if (st->time_base.den > 1000)
218  ctx->frames_preroll /= 1000;
219 
220  /* Buffer twice as many frames as the preroll. */
221  ctx->frames_buffer = ctx->frames_preroll * 2;
222  ctx->frames_buffer = FFMIN(ctx->frames_buffer, 60);
223  pthread_mutex_init(&ctx->mutex, NULL);
224  pthread_cond_init(&ctx->cond, NULL);
225  ctx->frames_buffer_available_spots = ctx->frames_buffer;
226 
227  av_log(avctx, AV_LOG_DEBUG, "output: %s, preroll: %d, frames buffer size: %d\n",
228  avctx->url, ctx->frames_preroll, ctx->frames_buffer);
229 
230  /* The device expects the framerate to be fixed. */
231  avpriv_set_pts_info(st, 64, st->time_base.num, st->time_base.den);
232 
233  ctx->video = 1;
234 
235  return 0;
236 }
237 
239 {
240  struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
241  struct decklink_ctx *ctx = (struct decklink_ctx *)cctx->ctx;
243 
244  if (ctx->audio) {
245  av_log(avctx, AV_LOG_ERROR, "Only one audio stream is supported!\n");
246  return -1;
247  }
248 
249  if (c->codec_id == AV_CODEC_ID_AC3) {
250  /* Regardless of the number of channels in the codec, we're only
251  using 2 SDI audio channels at 48000Hz */
252  ctx->channels = 2;
253  } else if (c->codec_id == AV_CODEC_ID_PCM_S16LE) {
254  if (c->sample_rate != 48000) {
255  av_log(avctx, AV_LOG_ERROR, "Unsupported sample rate!"
256  " Only 48kHz is supported.\n");
257  return -1;
258  }
259  if (c->ch_layout.nb_channels != 2 && c->ch_layout.nb_channels != 8 && c->ch_layout.nb_channels != 16) {
260  av_log(avctx, AV_LOG_ERROR, "Unsupported number of channels!"
261  " Only 2, 8 or 16 channels are supported.\n");
262  return -1;
263  }
264  ctx->channels = c->ch_layout.nb_channels;
265  } else {
266  av_log(avctx, AV_LOG_ERROR, "Unsupported codec specified!"
267  " Only PCM_S16LE and AC-3 are supported.\n");
268  return -1;
269  }
270 
271  if (ctx->dlo->EnableAudioOutput(bmdAudioSampleRate48kHz,
272  bmdAudioSampleType16bitInteger,
273  ctx->channels,
274  bmdAudioOutputStreamTimestamped) != S_OK) {
275  av_log(avctx, AV_LOG_ERROR, "Could not enable audio output!\n");
276  return -1;
277  }
278  if (ctx->dlo->BeginAudioPreroll() != S_OK) {
279  av_log(avctx, AV_LOG_ERROR, "Could not begin audio preroll!\n");
280  return -1;
281  }
282 
283  /* The device expects the sample rate to be fixed. */
284  avpriv_set_pts_info(st, 64, 1, 48000);
285 
286  ctx->audio = 1;
287 
288  return 0;
289 }
290 
291 /* Wrap the AC-3 packet into an S337 payload that is in S16LE format which can be easily
292  injected into the PCM stream. Note: despite the function name, only AC-3 is implemented */
293 static int create_s337_payload(AVPacket *pkt, uint8_t **outbuf, int *outsize)
294 {
295  /* Note: if the packet size is not divisible by four, we need to make the actual
296  payload larger to ensure it ends on an two channel S16LE boundary */
297  int payload_size = FFALIGN(pkt->size, 4) + 8;
298  uint16_t bitcount = pkt->size * 8;
299  uint8_t *s337_payload;
300  PutByteContext pb;
301 
302  /* Sanity check: According to SMPTE ST 340:2015 Sec 4.1, the AC-3 sync frame will
303  exactly match the 1536 samples of baseband (PCM) audio that it represents. */
304  if (pkt->size > 1536)
305  return AVERROR(EINVAL);
306 
307  /* Encapsulate AC3 syncframe into SMPTE 337 packet */
308  s337_payload = (uint8_t *) av_malloc(payload_size);
309  if (s337_payload == NULL)
310  return AVERROR(ENOMEM);
311  bytestream2_init_writer(&pb, s337_payload, payload_size);
312  bytestream2_put_le16u(&pb, 0xf872); /* Sync word 1 */
313  bytestream2_put_le16u(&pb, 0x4e1f); /* Sync word 1 */
314  bytestream2_put_le16u(&pb, 0x0001); /* Burst Info, including data type (1=ac3) */
315  bytestream2_put_le16u(&pb, bitcount); /* Length code */
316  for (int i = 0; i < (pkt->size - 1); i += 2)
317  bytestream2_put_le16u(&pb, (pkt->data[i] << 8) | pkt->data[i+1]);
318 
319  /* Ensure final payload is aligned on 4-byte boundary */
320  if (pkt->size & 1)
321  bytestream2_put_le16u(&pb, pkt->data[pkt->size - 1] << 8);
322  if ((pkt->size & 3) == 1 || (pkt->size & 3) == 2)
323  bytestream2_put_le16u(&pb, 0);
324 
325  *outsize = payload_size;
326  *outbuf = s337_payload;
327  return 0;
328 }
329 
331 {
332  int ret = -1;
333 
334  switch(st->codecpar->codec_id) {
335 #if CONFIG_LIBKLVANC
336  case AV_CODEC_ID_EIA_608:
337  /* No special setup required */
338  ret = 0;
339  break;
340 #endif
341  default:
342  av_log(avctx, AV_LOG_ERROR, "Unsupported subtitle codec specified\n");
343  break;
344  }
345 
346  return ret;
347 }
348 
350 {
351  int ret = -1;
352 
353  switch(st->codecpar->codec_id) {
354 #if CONFIG_LIBKLVANC
356  /* No specific setup required */
357  ret = 0;
358  break;
359 #endif
360  default:
361  av_log(avctx, AV_LOG_ERROR, "Unsupported data codec specified\n");
362  break;
363  }
364 
365  return ret;
366 }
367 
369 {
370  struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
371  struct decklink_ctx *ctx = (struct decklink_ctx *)cctx->ctx;
372 
373  if (ctx->playback_started) {
374  BMDTimeValue actual;
375  ctx->dlo->StopScheduledPlayback(ctx->last_pts * ctx->bmd_tb_num,
376  &actual, ctx->bmd_tb_den);
377  ctx->dlo->DisableVideoOutput();
378  if (ctx->audio)
379  ctx->dlo->DisableAudioOutput();
380  }
381 
382  ff_decklink_cleanup(avctx);
383 
384  if (ctx->output_callback)
385  delete ctx->output_callback;
386 
387  pthread_mutex_destroy(&ctx->mutex);
388  pthread_cond_destroy(&ctx->cond);
389 
390 #if CONFIG_LIBKLVANC
391  klvanc_context_destroy(ctx->vanc_ctx);
392 #endif
393  ff_decklink_packet_queue_end(&ctx->vanc_queue);
394 
395  ff_ccfifo_uninit(&ctx->cc_fifo);
396  av_freep(&cctx->ctx);
397 
398  return 0;
399 }
400 
401 #if CONFIG_LIBKLVANC
402 static void construct_cc(AVFormatContext *avctx, struct decklink_ctx *ctx,
403  AVPacket *pkt, struct klvanc_line_set_s *vanc_lines)
404 {
405  struct klvanc_packet_eia_708b_s *cdp;
406  uint16_t *cdp_words;
407  uint16_t len;
408  uint8_t cc_count;
409  size_t size;
410  int ret, i;
411 
413  if (!data)
414  return;
415 
416  cc_count = size / 3;
417 
418  ret = klvanc_create_eia708_cdp(&cdp);
419  if (ret)
420  return;
421 
422  ret = klvanc_set_framerate_EIA_708B(cdp, ctx->bmd_tb_num, ctx->bmd_tb_den);
423  if (ret) {
424  av_log(avctx, AV_LOG_ERROR, "Invalid framerate specified: %" PRId64 "/%" PRId64 "\n",
425  ctx->bmd_tb_num, ctx->bmd_tb_den);
426  klvanc_destroy_eia708_cdp(cdp);
427  return;
428  }
429 
430  if (cc_count > KLVANC_MAX_CC_COUNT) {
431  av_log(avctx, AV_LOG_ERROR, "Illegal cc_count received: %d\n", cc_count);
432  cc_count = KLVANC_MAX_CC_COUNT;
433  }
434 
435  /* CC data */
436  cdp->header.ccdata_present = 1;
437  cdp->header.caption_service_active = 1;
438  cdp->ccdata.cc_count = cc_count;
439  for (i = 0; i < cc_count; i++) {
440  if (data [3*i] & 0x04)
441  cdp->ccdata.cc[i].cc_valid = 1;
442  cdp->ccdata.cc[i].cc_type = data[3*i] & 0x03;
443  cdp->ccdata.cc[i].cc_data[0] = data[3*i+1];
444  cdp->ccdata.cc[i].cc_data[1] = data[3*i+2];
445  }
446 
447  klvanc_finalize_EIA_708B(cdp, ctx->cdp_sequence_num++);
448  ret = klvanc_convert_EIA_708B_to_words(cdp, &cdp_words, &len);
449  klvanc_destroy_eia708_cdp(cdp);
450  if (ret != 0) {
451  av_log(avctx, AV_LOG_ERROR, "Failed converting 708 packet to words\n");
452  return;
453  }
454 
455  ret = klvanc_line_insert(ctx->vanc_ctx, vanc_lines, cdp_words, len, 11, 0);
456  free(cdp_words);
457  if (ret != 0) {
458  av_log(avctx, AV_LOG_ERROR, "VANC line insertion failed\n");
459  return;
460  }
461 }
462 
463 /* See SMPTE ST 2016-3:2009 */
464 static void construct_afd(AVFormatContext *avctx, struct decklink_ctx *ctx,
465  AVPacket *pkt, struct klvanc_line_set_s *vanc_lines,
466  AVStream *st)
467 {
468  struct klvanc_packet_afd_s *afd = NULL;
469  uint16_t *afd_words = NULL;
470  uint16_t len;
471  size_t size;
472  int f1_line = 12, f2_line = 0, ret;
473 
474  const uint8_t *data = av_packet_get_side_data(pkt, AV_PKT_DATA_AFD, &size);
475  if (!data || size == 0)
476  return;
477 
478  ret = klvanc_create_AFD(&afd);
479  if (ret)
480  return;
481 
482  ret = klvanc_set_AFD_val(afd, data[0]);
483  if (ret) {
484  av_log(avctx, AV_LOG_ERROR, "Invalid AFD value specified: %d\n",
485  data[0]);
486  klvanc_destroy_AFD(afd);
487  return;
488  }
489 
490  /* Compute the AR flag based on the DAR (see ST 2016-1:2009 Sec 9.1). Note, we treat
491  anything below 1.4 as 4:3 (as opposed to the standard 1.33), because there are lots
492  of streams in the field that aren't *exactly* 4:3 but a tiny bit larger after doing
493  the math... */
495  st->codecpar->height * st->codecpar->sample_aspect_ratio.den}, (AVRational) {14, 10}) == 1)
496  afd->aspectRatio = ASPECT_16x9;
497  else
498  afd->aspectRatio = ASPECT_4x3;
499 
500  ret = klvanc_convert_AFD_to_words(afd, &afd_words, &len);
501  if (ret) {
502  av_log(avctx, AV_LOG_ERROR, "Failed converting AFD packet to words\n");
503  goto out;
504  }
505 
506  ret = klvanc_line_insert(ctx->vanc_ctx, vanc_lines, afd_words, len, f1_line, 0);
507  if (ret) {
508  av_log(avctx, AV_LOG_ERROR, "VANC line insertion failed\n");
509  goto out;
510  }
511 
512  /* For interlaced video, insert into both fields. Switching lines for field 2
513  derived from SMPTE RP 168:2009, Sec 6, Table 2. */
514  switch (ctx->bmd_mode) {
515  case bmdModeNTSC:
516  case bmdModeNTSC2398:
517  f2_line = 273 - 10 + f1_line;
518  break;
519  case bmdModePAL:
520  f2_line = 319 - 6 + f1_line;
521  break;
522  case bmdModeHD1080i50:
523  case bmdModeHD1080i5994:
524  case bmdModeHD1080i6000:
525  f2_line = 569 - 7 + f1_line;
526  break;
527  default:
528  f2_line = 0;
529  break;
530  }
531 
532  if (f2_line > 0) {
533  ret = klvanc_line_insert(ctx->vanc_ctx, vanc_lines, afd_words, len, f2_line, 0);
534  if (ret) {
535  av_log(avctx, AV_LOG_ERROR, "VANC line insertion failed\n");
536  goto out;
537  }
538  }
539 
540 out:
541  if (afd)
542  klvanc_destroy_AFD(afd);
543  if (afd_words)
544  free(afd_words);
545 }
546 
547 /* Parse any EIA-608 subtitles sitting on the queue, and write packet side data
548  that will later be handled by construct_cc... */
549 static void parse_608subs(AVFormatContext *avctx, struct decklink_ctx *ctx, AVPacket *pkt)
550 {
551  size_t cc_size = ff_ccfifo_getoutputsize(&ctx->cc_fifo);
552  uint8_t *cc_data;
553 
554  if (!ff_ccfifo_ccdetected(&ctx->cc_fifo))
555  return;
556 
557  cc_data = av_packet_new_side_data(pkt, AV_PKT_DATA_A53_CC, cc_size);
558  if (cc_data)
559  ff_ccfifo_injectbytes(&ctx->cc_fifo, cc_data, cc_size);
560 }
561 
562 static int decklink_construct_vanc(AVFormatContext *avctx, struct decklink_ctx *ctx,
564  AVStream *st)
565 {
566  struct klvanc_line_set_s vanc_lines = { 0 };
567  int ret = 0, i;
568 
569  if (!ctx->supports_vanc)
570  return 0;
571 
572  parse_608subs(avctx, ctx, pkt);
573  construct_cc(avctx, ctx, pkt, &vanc_lines);
574  construct_afd(avctx, ctx, pkt, &vanc_lines, st);
575 
576  /* See if there any pending data packets to process */
577  while (ff_decklink_packet_queue_size(&ctx->vanc_queue) > 0) {
578  AVStream *vanc_st;
579  AVPacket vanc_pkt;
580  int64_t pts;
581 
582  pts = ff_decklink_packet_queue_peekpts(&ctx->vanc_queue);
583  if (pts > ctx->last_pts) {
584  /* We haven't gotten to the video frame we are supposed to inject
585  the oldest VANC packet into yet, so leave it on the queue... */
586  break;
587  }
588 
589  ret = ff_decklink_packet_queue_get(&ctx->vanc_queue, &vanc_pkt, 1);
590  if (vanc_pkt.pts + 1 < ctx->last_pts) {
591  av_log(avctx, AV_LOG_WARNING, "VANC packet too old, throwing away\n");
592  av_packet_unref(&vanc_pkt);
593  continue;
594  }
595 
596  vanc_st = avctx->streams[vanc_pkt.stream_index];
597  if (vanc_st->codecpar->codec_id == AV_CODEC_ID_SMPTE_2038) {
598  struct klvanc_smpte2038_anc_data_packet_s *pkt_2038 = NULL;
599 
600  klvanc_smpte2038_parse_pes_payload(vanc_pkt.data, vanc_pkt.size, &pkt_2038);
601  if (pkt_2038 == NULL) {
602  av_log(avctx, AV_LOG_ERROR, "failed to decode SMPTE 2038 PES packet");
603  av_packet_unref(&vanc_pkt);
604  continue;
605  }
606  for (int i = 0; i < pkt_2038->lineCount; i++) {
607  struct klvanc_smpte2038_anc_data_line_s *l = &pkt_2038->lines[i];
608  uint16_t *vancWords = NULL;
609  uint16_t vancWordCount;
610 
611  if (klvanc_smpte2038_convert_line_to_words(l, &vancWords,
612  &vancWordCount) < 0)
613  break;
614 
615  ret = klvanc_line_insert(ctx->vanc_ctx, &vanc_lines, vancWords,
616  vancWordCount, l->line_number, 0);
617  free(vancWords);
618  if (ret != 0) {
619  av_log(avctx, AV_LOG_ERROR, "VANC line insertion failed\n");
620  break;
621  }
622  }
623  klvanc_smpte2038_anc_data_packet_free(pkt_2038);
624  }
625  av_packet_unref(&vanc_pkt);
626  }
627 
628  IDeckLinkVideoFrameAncillary *vanc;
629  int result = ctx->dlo->CreateAncillaryData(bmdFormat10BitYUV, &vanc);
630  if (result != S_OK) {
631  av_log(avctx, AV_LOG_ERROR, "Failed to create vanc\n");
632  ret = AVERROR(EIO);
633  goto done;
634  }
635 
636  /* Now that we've got all the VANC lines in a nice orderly manner, generate the
637  final VANC sections for the Decklink output */
638  for (i = 0; i < vanc_lines.num_lines; i++) {
639  struct klvanc_line_s *line = vanc_lines.lines[i];
640  int real_line;
641  void *buf;
642 
643  if (!line)
644  break;
645 
646  /* FIXME: include hack for certain Decklink cards which mis-represent
647  line numbers for pSF frames */
648  real_line = line->line_number;
649 
650  result = vanc->GetBufferForVerticalBlankingLine(real_line, &buf);
651  if (result != S_OK) {
652  av_log(avctx, AV_LOG_ERROR, "Failed to get VANC line %d: %d", real_line, result);
653  continue;
654  }
655 
656  /* Generate the full line taking into account all VANC packets on that line */
657  result = klvanc_generate_vanc_line_v210(ctx->vanc_ctx, line, (uint8_t *) buf,
658  ctx->bmd_width);
659  if (result) {
660  av_log(avctx, AV_LOG_ERROR, "Failed to generate VANC line\n");
661  continue;
662  }
663  }
664 
665  result = frame->SetAncillaryData(vanc);
666  vanc->Release();
667  if (result != S_OK) {
668  av_log(avctx, AV_LOG_ERROR, "Failed to set vanc: %d", result);
669  ret = AVERROR(EIO);
670  }
671 
672 done:
673  for (i = 0; i < vanc_lines.num_lines; i++)
674  klvanc_line_free(vanc_lines.lines[i]);
675 
676  return ret;
677 }
678 #endif
679 
681 {
682  struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
683  struct decklink_ctx *ctx = (struct decklink_ctx *)cctx->ctx;
684  AVStream *st = avctx->streams[pkt->stream_index];
685  AVFrame *avframe = NULL, *tmp = (AVFrame *)pkt->data;
686  AVPacket *avpacket = NULL;
688  uint32_t buffered;
689  HRESULT hr;
690 
691  ctx->last_pts = FFMAX(ctx->last_pts, pkt->pts);
692 
694  if (tmp->format != AV_PIX_FMT_UYVY422 ||
695  tmp->width != ctx->bmd_width ||
696  tmp->height != ctx->bmd_height) {
697  av_log(avctx, AV_LOG_ERROR, "Got a frame with invalid pixel format or dimension.\n");
698  return AVERROR(EINVAL);
699  }
700 
701  avframe = av_frame_clone(tmp);
702  if (!avframe) {
703  av_log(avctx, AV_LOG_ERROR, "Could not clone video frame.\n");
704  return AVERROR(EIO);
705  }
706 
707  frame = new decklink_frame(ctx, avframe, st->codecpar->codec_id, avframe->height, avframe->width);
708  } else {
709  avpacket = av_packet_clone(pkt);
710  if (!avpacket) {
711  av_log(avctx, AV_LOG_ERROR, "Could not clone video frame.\n");
712  return AVERROR(EIO);
713  }
714 
715  frame = new decklink_frame(ctx, avpacket, st->codecpar->codec_id, ctx->bmd_height, ctx->bmd_width);
716 
717 #if CONFIG_LIBKLVANC
718  if (decklink_construct_vanc(avctx, ctx, pkt, frame, st))
719  av_log(avctx, AV_LOG_ERROR, "Failed to construct VANC\n");
720 #endif
721  }
722 
723  if (!frame) {
724  av_log(avctx, AV_LOG_ERROR, "Could not create new frame.\n");
725  av_frame_free(&avframe);
726  av_packet_free(&avpacket);
727  return AVERROR(EIO);
728  }
729 
730  /* Always keep at most one second of frames buffered. */
731  pthread_mutex_lock(&ctx->mutex);
732  while (ctx->frames_buffer_available_spots == 0) {
733  pthread_cond_wait(&ctx->cond, &ctx->mutex);
734  }
735  ctx->frames_buffer_available_spots--;
736  pthread_mutex_unlock(&ctx->mutex);
737 
738  if (ctx->first_pts == AV_NOPTS_VALUE)
739  ctx->first_pts = pkt->pts;
740 
741  /* Schedule frame for playback. */
742  hr = ctx->dlo->ScheduleVideoFrame((class IDeckLinkVideoFrame *) frame,
743  pkt->pts * ctx->bmd_tb_num,
744  ctx->bmd_tb_num, ctx->bmd_tb_den);
745  /* Pass ownership to DeckLink, or release on failure */
746  frame->Release();
747  if (hr != S_OK) {
748  av_log(avctx, AV_LOG_ERROR, "Could not schedule video frame."
749  " error %08x.\n", (uint32_t) hr);
750  return AVERROR(EIO);
751  }
752 
753  ctx->dlo->GetBufferedVideoFrameCount(&buffered);
754  av_log(avctx, AV_LOG_DEBUG, "Buffered video frames: %d.\n", (int) buffered);
755  if (pkt->pts > 2 && buffered <= 2)
756  av_log(avctx, AV_LOG_WARNING, "There are not enough buffered video frames."
757  " Video may misbehave!\n");
758 
759  /* Preroll video frames. */
760  if (!ctx->playback_started && pkt->pts > (ctx->first_pts + ctx->frames_preroll)) {
761  av_log(avctx, AV_LOG_DEBUG, "Ending audio preroll.\n");
762  if (ctx->audio && ctx->dlo->EndAudioPreroll() != S_OK) {
763  av_log(avctx, AV_LOG_ERROR, "Could not end audio preroll!\n");
764  return AVERROR(EIO);
765  }
766  av_log(avctx, AV_LOG_DEBUG, "Starting scheduled playback.\n");
767  if (ctx->dlo->StartScheduledPlayback(ctx->first_pts * ctx->bmd_tb_num, ctx->bmd_tb_den, 1.0) != S_OK) {
768  av_log(avctx, AV_LOG_ERROR, "Could not start scheduled playback!\n");
769  return AVERROR(EIO);
770  }
771  ctx->playback_started = 1;
772  }
773 
774  return 0;
775 }
776 
778 {
779  struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
780  struct decklink_ctx *ctx = (struct decklink_ctx *)cctx->ctx;
781  AVStream *st = avctx->streams[pkt->stream_index];
782  int sample_count;
783  uint32_t buffered;
784  uint8_t *outbuf = NULL;
785  int ret = 0;
786 
787  ctx->dlo->GetBufferedAudioSampleFrameCount(&buffered);
788  if (pkt->pts > 1 && !buffered)
789  av_log(avctx, AV_LOG_WARNING, "There's no buffered audio."
790  " Audio will misbehave!\n");
791 
792  if (st->codecpar->codec_id == AV_CODEC_ID_AC3) {
793  /* Encapsulate AC3 syncframe into SMPTE 337 packet */
794  int outbuf_size;
795  ret = create_s337_payload(pkt, &outbuf, &outbuf_size);
796  if (ret < 0)
797  return ret;
798  sample_count = outbuf_size / 4;
799  } else {
800  sample_count = pkt->size / (ctx->channels << 1);
801  outbuf = pkt->data;
802  }
803 
804  if (ctx->dlo->ScheduleAudioSamples(outbuf, sample_count, pkt->pts,
805  bmdAudioSampleRate48kHz, NULL) != S_OK) {
806  av_log(avctx, AV_LOG_ERROR, "Could not schedule audio samples.\n");
807  ret = AVERROR(EIO);
808  }
809 
810  if (st->codecpar->codec_id == AV_CODEC_ID_AC3)
811  av_freep(&outbuf);
812 
813  return ret;
814 }
815 
817 {
818  struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
819  struct decklink_ctx *ctx = (struct decklink_ctx *)cctx->ctx;
820 
821  ff_ccfifo_extractbytes(&ctx->cc_fifo, pkt->data, pkt->size);
822 
823  return 0;
824 }
825 
827 {
828  struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
829  struct decklink_ctx *ctx = (struct decklink_ctx *)cctx->ctx;
830 
831  if (ff_decklink_packet_queue_put(&ctx->vanc_queue, pkt) < 0) {
832  av_log(avctx, AV_LOG_WARNING, "Failed to queue DATA packet\n");
833  }
834 
835  return 0;
836 }
837 
838 extern "C" {
839 
841 {
842  struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
843  struct decklink_ctx *ctx;
844  unsigned int n;
845  int ret;
846 
847  ctx = (struct decklink_ctx *) av_mallocz(sizeof(struct decklink_ctx));
848  if (!ctx)
849  return AVERROR(ENOMEM);
850  ctx->list_devices = cctx->list_devices;
851  ctx->list_formats = cctx->list_formats;
852  ctx->preroll = cctx->preroll;
853  ctx->duplex_mode = cctx->duplex_mode;
854  ctx->first_pts = AV_NOPTS_VALUE;
855  if (cctx->link > 0 && (unsigned int)cctx->link < FF_ARRAY_ELEMS(decklink_link_conf_map))
856  ctx->link = decklink_link_conf_map[cctx->link];
857  cctx->ctx = ctx;
858 #if CONFIG_LIBKLVANC
859  if (klvanc_context_create(&ctx->vanc_ctx) < 0) {
860  av_log(avctx, AV_LOG_ERROR, "Cannot create VANC library context\n");
861  return AVERROR(ENOMEM);
862  }
863  ctx->supports_vanc = 1;
864 #endif
865 
866  /* List available devices and exit. */
867  if (ctx->list_devices) {
868  ff_decklink_list_devices_legacy(avctx, 0, 1);
869  return AVERROR_EXIT;
870  }
871 
872  ret = ff_decklink_init_device(avctx, avctx->url);
873  if (ret < 0)
874  return ret;
875 
876  /* Get output device. */
877  if (ctx->dl->QueryInterface(IID_IDeckLinkOutput, (void **) &ctx->dlo) != S_OK) {
878  av_log(avctx, AV_LOG_ERROR, "Could not open output device from '%s'\n",
879  avctx->url);
880  ret = AVERROR(EIO);
881  goto error;
882  }
883 
884  /* List supported formats. */
885  if (ctx->list_formats) {
887  ret = AVERROR_EXIT;
888  goto error;
889  }
890 
891  /* Setup streams. */
892  ret = AVERROR(EIO);
893  for (n = 0; n < avctx->nb_streams; n++) {
894  AVStream *st = avctx->streams[n];
896  if (c->codec_type == AVMEDIA_TYPE_AUDIO) {
897  if (decklink_setup_audio(avctx, st))
898  goto error;
899  } else if (c->codec_type == AVMEDIA_TYPE_VIDEO) {
900  if (decklink_setup_video(avctx, st))
901  goto error;
902  } else if (c->codec_type == AVMEDIA_TYPE_DATA) {
903  if (decklink_setup_data(avctx, st))
904  goto error;
905  } else if (c->codec_type == AVMEDIA_TYPE_SUBTITLE) {
906  if (decklink_setup_subtitle(avctx, st))
907  goto error;
908  } else {
909  av_log(avctx, AV_LOG_ERROR, "Unsupported stream type.\n");
910  goto error;
911  }
912  }
913 
914  /* Reconfigure the data/subtitle stream clocks to match the video */
915  for (n = 0; n < avctx->nb_streams; n++) {
916  AVStream *st = avctx->streams[n];
918 
919  if(c->codec_type == AVMEDIA_TYPE_DATA ||
920  c->codec_type == AVMEDIA_TYPE_SUBTITLE)
921  avpriv_set_pts_info(st, 64, ctx->bmd_tb_num, ctx->bmd_tb_den);
922  }
923  ff_decklink_packet_queue_init(avctx, &ctx->vanc_queue, cctx->vanc_queue_size);
924 
925  ret = ff_ccfifo_init(&ctx->cc_fifo, av_make_q(ctx->bmd_tb_den, ctx->bmd_tb_num), avctx);
926  if (ret < 0) {
927  av_log(ctx, AV_LOG_ERROR, "Failure to setup CC FIFO queue\n");
928  goto error;
929  }
930 
931  return 0;
932 
933 error:
934  ff_decklink_cleanup(avctx);
935  return ret;
936 }
937 
939 {
940  AVStream *st = avctx->streams[pkt->stream_index];
941 
943  return decklink_write_video_packet(avctx, pkt);
944  else if (st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
945  return decklink_write_audio_packet(avctx, pkt);
946  else if (st->codecpar->codec_type == AVMEDIA_TYPE_DATA)
947  return decklink_write_data_packet(avctx, pkt);
948  else if (st->codecpar->codec_type == AVMEDIA_TYPE_SUBTITLE)
949  return decklink_write_subtitle_packet(avctx, pkt);
950 
951  return AVERROR(EIO);
952 }
953 
955 {
956  return ff_decklink_list_devices(avctx, device_list, 0, 1);
957 }
958 
959 } /* extern "C" */
error
static void error(const char *err)
Definition: target_bsf_fuzzer.c:32
AV_CODEC_ID_PCM_S16LE
@ AV_CODEC_ID_PCM_S16LE
Definition: codec_id.h:328
AV_CODEC_ID_EIA_608
@ AV_CODEC_ID_EIA_608
Definition: codec_id.h:560
av_packet_unref
void av_packet_unref(AVPacket *pkt)
Wipe the packet.
Definition: packet.c:429
AVMEDIA_TYPE_SUBTITLE
@ AVMEDIA_TYPE_SUBTITLE
Definition: avutil.h:204
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:215
AV_CODEC_ID_AC3
@ AV_CODEC_ID_AC3
Definition: codec_id.h:443
ff_ccfifo_extractbytes
int ff_ccfifo_extractbytes(CCFifo *ccf, uint8_t *cc_bytes, size_t len)
Just like ff_ccfifo_extract(), but takes the raw bytes instead of an AVFrame.
Definition: ccfifo.c:154
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
AVCodecParameters::codec_type
enum AVMediaType codec_type
General type of the encoded data.
Definition: codec_par.h:51
out
FILE * out
Definition: movenc.c:55
AVCodecParameters
This struct describes the properties of an encoded stream.
Definition: codec_par.h:47
pthread_mutex_init
static av_always_inline int pthread_mutex_init(pthread_mutex_t *mutex, const pthread_mutexattr_t *attr)
Definition: os2threads.h:104
int64_t
long long int64_t
Definition: coverity.c:34
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:162
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:389
tmp
static uint8_t tmp[11]
Definition: aes_ctr.c:28
AVFormatContext::streams
AVStream ** streams
A list of all streams in the file.
Definition: avformat.h:1368
AVFrame::width
int width
Definition: frame.h:461
AVPacket::data
uint8_t * data
Definition: packet.h:539
data
const char data[16]
Definition: mxf.c:148
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
av_packet_free
void av_packet_free(AVPacket **pkt)
Free the packet, if the packet is reference counted, it will be unreferenced first.
Definition: packet.c:74
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:410
av_malloc
#define av_malloc(s)
Definition: tableprint_vlc.h:30
avpriv_set_pts_info
void avpriv_set_pts_info(AVStream *st, int pts_wrap_bits, unsigned int pts_num, unsigned int pts_den)
Set the time base and wrapping info for a given stream.
Definition: avformat.c:862
ff_ccfifo_uninit
void ff_ccfifo_uninit(CCFifo *ccf)
Free all memory allocated in a CCFifo and clear the context.
Definition: ccfifo.c:46
pts
static int64_t pts
Definition: transcode_aac.c:644
AVRational::num
int num
Numerator.
Definition: rational.h:59
pkt
AVPacket * pkt
Definition: movenc.c:60
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:209
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
av_cold
#define av_cold
Definition: attributes.h:90
bytestream2_init_writer
static av_always_inline void bytestream2_init_writer(PutByteContext *p, uint8_t *buf, int buf_size)
Definition: bytestream.h:147
ff_ccfifo_getoutputsize
static int ff_ccfifo_getoutputsize(const CCFifo *ccf)
Provide the size in bytes of an output buffer to allocate.
Definition: ccfifo.h:95
format
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate. The lists are not just lists
AVCodecParameters::sample_aspect_ratio
AVRational sample_aspect_ratio
Video only.
Definition: codec_par.h:144
AVMEDIA_TYPE_AUDIO
@ AVMEDIA_TYPE_AUDIO
Definition: avutil.h:202
AVCodecParameters::width
int width
Video only.
Definition: codec_par.h:134
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:230
ctx
AVFormatContext * ctx
Definition: movenc.c:49
av_frame_clone
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:597
codec_id
enum AVCodecID codec_id
Definition: vaapi_decode.c:394
AVMEDIA_TYPE_DATA
@ AVMEDIA_TYPE_DATA
Opaque data information usually continuous.
Definition: avutil.h:203
AV_CODEC_ID_WRAPPED_AVFRAME
@ AV_CODEC_ID_WRAPPED_AVFRAME
Passthrough codec, AVFrames wrapped in AVPacket.
Definition: codec_id.h:602
pthread_cond_broadcast
static av_always_inline int pthread_cond_broadcast(pthread_cond_t *cond)
Definition: os2threads.h:162
AVFormatContext
Format I/O context.
Definition: avformat.h:1300
internal.h
AVStream::codecpar
AVCodecParameters * codecpar
Codec parameters associated with this stream.
Definition: avformat.h:771
result
and forward the result(frame or status change) to the corresponding input. If nothing is possible
AVStream::time_base
AVRational time_base
This is the fundamental unit of time (in seconds) in terms of which frame timestamps are represented.
Definition: avformat.h:787
NULL
#define NULL
Definition: coverity.c:32
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
pthread_mutex_unlock
#define pthread_mutex_unlock(a)
Definition: ffprobe.c:82
AVCodecID
AVCodecID
Identify the syntax and semantics of the bitstream.
Definition: codec_id.h:49
AVFormatContext::nb_streams
unsigned int nb_streams
Number of elements in AVFormatContext.streams.
Definition: avformat.h:1356
PutByteContext
Definition: bytestream.h:37
AVPacket::size
int size
Definition: packet.h:540
height
#define height
Definition: dsp.h:85
AVFormatContext::url
char * url
input or output URL.
Definition: avformat.h:1416
size
int size
Definition: twinvq_data.h:10344
av_make_q
static AVRational av_make_q(int num, int den)
Create an AVRational.
Definition: rational.h:71
AV_NOPTS_VALUE
#define AV_NOPTS_VALUE
Undefined timestamp value.
Definition: avutil.h:248
AV_CODEC_ID_V210
@ AV_CODEC_ID_V210
Definition: codec_id.h:179
frame.h
avdevice.h
line
Definition: graph2dot.c:48
pthread_cond_destroy
static av_always_inline int pthread_cond_destroy(pthread_cond_t *cond)
Definition: os2threads.h:144
pthread_mutex_destroy
static av_always_inline int pthread_mutex_destroy(pthread_mutex_t *mutex)
Definition: os2threads.h:112
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
AVPacket::pts
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: packet.h:532
av_packet_get_side_data
uint8_t * av_packet_get_side_data(const AVPacket *pkt, enum AVPacketSideDataType type, size_t *size)
Get side information from packet.
Definition: packet.c:252
AV_CODEC_ID_SMPTE_2038
@ AV_CODEC_ID_SMPTE_2038
Definition: codec_id.h:591
internal.h
AVCodecParameters::height
int height
Definition: codec_par.h:135
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:610
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:256
len
int len
Definition: vorbis_enc_data.h:426
av_cmp_q
static int av_cmp_q(AVRational a, AVRational b)
Compare two rationals.
Definition: rational.h:89
ff_ccfifo_init
int ff_ccfifo_init(CCFifo *ccf, AVRational framerate, void *log_ctx)
Initialize a CCFifo.
Definition: ccfifo.c:53
ret
ret
Definition: filter_design.txt:187
AVStream
Stream structure.
Definition: avformat.h:748
ff_ccfifo_injectbytes
int ff_ccfifo_injectbytes(CCFifo *ccf, uint8_t *cc_data, size_t len)
Just like ff_ccfifo_inject(), but takes the raw bytes to insert the CC data int rather than an AVFram...
Definition: ccfifo.c:92
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
AVDeviceInfoList
List of devices.
Definition: avdevice.h:343
avformat.h
AV_PIX_FMT_UYVY422
@ AV_PIX_FMT_UYVY422
packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
Definition: pixfmt.h:88
AVFrame::height
int height
Definition: frame.h:461
av_packet_new_side_data
uint8_t * av_packet_new_side_data(AVPacket *pkt, enum AVPacketSideDataType type, size_t size)
Allocate new information of a packet.
Definition: packet.c:231
buffer
the frame and frame reference mechanism is intended to as much as expensive copies of that data while still allowing the filters to produce correct results The data is stored in buffers represented by AVFrame structures Several references can point to the same frame buffer
Definition: filter_design.txt:49
AVRational::den
int den
Denominator.
Definition: rational.h:60
ff_ccfifo_ccdetected
static int ff_ccfifo_ccdetected(const CCFifo *ccf)
Returns 1 if captions have been found as a prior call to ff_ccfifo_extract() or ff_ccfifo_extractbyte...
Definition: ccfifo.h:124
AV_PKT_DATA_AFD
@ AV_PKT_DATA_AFD
Active Format Description data consisting of a single byte as specified in ETSI TS 101 154 using AVAc...
Definition: packet.h:258
AVPacket::stream_index
int stream_index
Definition: packet.h:541
pthread_cond_wait
static av_always_inline int pthread_cond_wait(pthread_cond_t *cond, pthread_mutex_t *mutex)
Definition: os2threads.h:192
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
AV_PKT_DATA_A53_CC
@ AV_PKT_DATA_A53_CC
ATSC A53 Part 4 Closed Captions.
Definition: packet.h:239
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
AVCodecParameters::codec_id
enum AVCodecID codec_id
Specific type of the encoded data (the codec used).
Definition: codec_par.h:55
AVPacket
This structure stores compressed data.
Definition: packet.h:516
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
bytestream.h
imgutils.h
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:434
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
pthread_cond_init
static av_always_inline int pthread_cond_init(pthread_cond_t *cond, const pthread_condattr_t *attr)
Definition: os2threads.h:133
AVERROR_EXIT
#define AVERROR_EXIT
Immediate exit was requested; the called function should not be restarted.
Definition: error.h:58
width
#define width
Definition: dsp.h:85
AVFormatContext::priv_data
void * priv_data
Format private data.
Definition: avformat.h:1328
av_packet_clone
AVPacket * av_packet_clone(const AVPacket *src)
Create a new packet that references the same data as src.
Definition: packet.c:473
pthread_mutex_lock
#define pthread_mutex_lock(a)
Definition: ffprobe.c:78