FFmpeg
decklink_enc.cpp
Go to the documentation of this file.
1 /*
2  * Blackmagic DeckLink output
3  * Copyright (c) 2013-2014 Ramiro Polla
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 #include <atomic>
23 using std::atomic;
24 
25 /* Include internal.h first to avoid conflict between winsock.h (used by
26  * DeckLink headers) and winsock2.h (used by libavformat) in MSVC++ builds */
27 extern "C" {
28 #include "libavformat/internal.h"
29 }
30 
31 #include <DeckLinkAPI.h>
32 
33 extern "C" {
34 #include "libavformat/avformat.h"
35 #include "libavutil/internal.h"
36 #include "libavutil/imgutils.h"
37 #include "avdevice.h"
38 }
39 
40 #include "decklink_common.h"
41 #include "decklink_enc.h"
42 #if CONFIG_LIBKLVANC
43 #include "libklvanc/vanc.h"
44 #include "libklvanc/vanc-lines.h"
45 #include "libklvanc/pixels.h"
46 #endif
47 
48 /* DeckLink callback class declaration */
49 class decklink_frame : public IDeckLinkVideoFrame
50 {
51 public:
56  virtual long STDMETHODCALLTYPE GetWidth (void) { return _width; }
57  virtual long STDMETHODCALLTYPE GetHeight (void) { return _height; }
58  virtual long STDMETHODCALLTYPE GetRowBytes (void)
59  {
61  return _avframe->linesize[0] < 0 ? -_avframe->linesize[0] : _avframe->linesize[0];
62  else
63  return ((GetWidth() + 47) / 48) * 128;
64  }
65  virtual BMDPixelFormat STDMETHODCALLTYPE GetPixelFormat(void)
66  {
68  return bmdFormat8BitYUV;
69  else
70  return bmdFormat10BitYUV;
71  }
72  virtual BMDFrameFlags STDMETHODCALLTYPE GetFlags (void)
73  {
75  return _avframe->linesize[0] < 0 ? bmdFrameFlagFlipVertical : bmdFrameFlagDefault;
76  else
77  return bmdFrameFlagDefault;
78  }
79 
80  virtual HRESULT STDMETHODCALLTYPE GetBytes (void **buffer)
81  {
83  if (_avframe->linesize[0] < 0)
84  *buffer = (void *)(_avframe->data[0] + _avframe->linesize[0] * (_avframe->height - 1));
85  else
86  *buffer = (void *)(_avframe->data[0]);
87  } else {
88  *buffer = (void *)(_avpacket->data);
89  }
90  return S_OK;
91  }
92 
93  virtual HRESULT STDMETHODCALLTYPE GetTimecode (BMDTimecodeFormat format, IDeckLinkTimecode **timecode) { return S_FALSE; }
94  virtual HRESULT STDMETHODCALLTYPE GetAncillaryData(IDeckLinkVideoFrameAncillary **ancillary)
95  {
96  *ancillary = _ancillary;
97  if (_ancillary) {
98  _ancillary->AddRef();
99  return S_OK;
100  } else {
101  return S_FALSE;
102  }
103  }
104  virtual HRESULT STDMETHODCALLTYPE SetAncillaryData(IDeckLinkVideoFrameAncillary *ancillary)
105  {
106  if (_ancillary)
107  _ancillary->Release();
108  _ancillary = ancillary;
109  _ancillary->AddRef();
110  return S_OK;
111  }
112  virtual HRESULT STDMETHODCALLTYPE QueryInterface(REFIID iid, LPVOID *ppv) { return E_NOINTERFACE; }
113  virtual ULONG STDMETHODCALLTYPE AddRef(void) { return ++_refs; }
114  virtual ULONG STDMETHODCALLTYPE Release(void)
115  {
116  int ret = --_refs;
117  if (!ret) {
120  if (_ancillary)
121  _ancillary->Release();
122  delete this;
123  }
124  return ret;
125  }
126 
131  IDeckLinkVideoFrameAncillary *_ancillary;
132  int _height;
133  int _width;
134 
135 private:
136  std::atomic<int> _refs;
137 };
138 
139 class decklink_output_callback : public IDeckLinkVideoOutputCallback
140 {
141 public:
142  virtual HRESULT STDMETHODCALLTYPE ScheduledFrameCompleted(IDeckLinkVideoFrame *_frame, BMDOutputFrameCompletionResult result)
143  {
144  decklink_frame *frame = static_cast<decklink_frame *>(_frame);
145  struct decklink_ctx *ctx = frame->_ctx;
146 
147  if (frame->_avframe)
148  av_frame_unref(frame->_avframe);
149  if (frame->_avpacket)
150  av_packet_unref(frame->_avpacket);
151 
152  pthread_mutex_lock(&ctx->mutex);
153  ctx->frames_buffer_available_spots++;
154  pthread_cond_broadcast(&ctx->cond);
155  pthread_mutex_unlock(&ctx->mutex);
156 
157  return S_OK;
158  }
159  virtual HRESULT STDMETHODCALLTYPE ScheduledPlaybackHasStopped(void) { return S_OK; }
160  virtual HRESULT STDMETHODCALLTYPE QueryInterface(REFIID iid, LPVOID *ppv) { return E_NOINTERFACE; }
161  virtual ULONG STDMETHODCALLTYPE AddRef(void) { return 1; }
162  virtual ULONG STDMETHODCALLTYPE Release(void) { return 1; }
163 };
164 
166 {
167  struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
168  struct decklink_ctx *ctx = (struct decklink_ctx *)cctx->ctx;
170 
171  if (ctx->video) {
172  av_log(avctx, AV_LOG_ERROR, "Only one video stream is supported!\n");
173  return -1;
174  }
175 
176  if (c->codec_id == AV_CODEC_ID_WRAPPED_AVFRAME) {
177  if (c->format != AV_PIX_FMT_UYVY422) {
178  av_log(avctx, AV_LOG_ERROR, "Unsupported pixel format!"
179  " Only AV_PIX_FMT_UYVY422 is supported.\n");
180  return -1;
181  }
182  ctx->raw_format = bmdFormat8BitYUV;
183  } else if (c->codec_id != AV_CODEC_ID_V210) {
184  av_log(avctx, AV_LOG_ERROR, "Unsupported codec type!"
185  " Only V210 and wrapped frame with AV_PIX_FMT_UYVY422 are supported.\n");
186  return -1;
187  } else {
188  ctx->raw_format = bmdFormat10BitYUV;
189  }
190 
191  if (ff_decklink_set_configs(avctx, DIRECTION_OUT) < 0) {
192  av_log(avctx, AV_LOG_ERROR, "Could not set output configuration\n");
193  return -1;
194  }
195  if (ff_decklink_set_format(avctx, c->width, c->height,
196  st->time_base.num, st->time_base.den, c->field_order)) {
197  av_log(avctx, AV_LOG_ERROR, "Unsupported video size, framerate or field order!"
198  " Check available formats with -list_formats 1.\n");
199  return -1;
200  }
201  if (ctx->supports_vanc && ctx->dlo->EnableVideoOutput(ctx->bmd_mode, bmdVideoOutputVANC) != S_OK) {
202  av_log(avctx, AV_LOG_WARNING, "Could not enable video output with VANC! Trying without...\n");
203  ctx->supports_vanc = 0;
204  }
205  if (!ctx->supports_vanc && ctx->dlo->EnableVideoOutput(ctx->bmd_mode, bmdVideoOutputFlagDefault) != S_OK) {
206  av_log(avctx, AV_LOG_ERROR, "Could not enable video output!\n");
207  return -1;
208  }
209 
210  /* Set callback. */
211  ctx->output_callback = new decklink_output_callback();
212  ctx->dlo->SetScheduledFrameCompletionCallback(ctx->output_callback);
213 
214  ctx->frames_preroll = st->time_base.den * ctx->preroll;
215  if (st->time_base.den > 1000)
216  ctx->frames_preroll /= 1000;
217 
218  /* Buffer twice as many frames as the preroll. */
219  ctx->frames_buffer = ctx->frames_preroll * 2;
220  ctx->frames_buffer = FFMIN(ctx->frames_buffer, 60);
221  pthread_mutex_init(&ctx->mutex, NULL);
222  pthread_cond_init(&ctx->cond, NULL);
223  ctx->frames_buffer_available_spots = ctx->frames_buffer;
224 
225  av_log(avctx, AV_LOG_DEBUG, "output: %s, preroll: %d, frames buffer size: %d\n",
226  avctx->url, ctx->frames_preroll, ctx->frames_buffer);
227 
228  /* The device expects the framerate to be fixed. */
229  avpriv_set_pts_info(st, 64, st->time_base.num, st->time_base.den);
230 
231  ctx->video = 1;
232 
233  return 0;
234 }
235 
237 {
238  struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
239  struct decklink_ctx *ctx = (struct decklink_ctx *)cctx->ctx;
241 
242  if (ctx->audio) {
243  av_log(avctx, AV_LOG_ERROR, "Only one audio stream is supported!\n");
244  return -1;
245  }
246  if (c->sample_rate != 48000) {
247  av_log(avctx, AV_LOG_ERROR, "Unsupported sample rate!"
248  " Only 48kHz is supported.\n");
249  return -1;
250  }
251  if (c->ch_layout.nb_channels != 2 && c->ch_layout.nb_channels != 8 && c->ch_layout.nb_channels != 16) {
252  av_log(avctx, AV_LOG_ERROR, "Unsupported number of channels!"
253  " Only 2, 8 or 16 channels are supported.\n");
254  return -1;
255  }
256  if (ctx->dlo->EnableAudioOutput(bmdAudioSampleRate48kHz,
257  bmdAudioSampleType16bitInteger,
258  c->ch_layout.nb_channels,
259  bmdAudioOutputStreamTimestamped) != S_OK) {
260  av_log(avctx, AV_LOG_ERROR, "Could not enable audio output!\n");
261  return -1;
262  }
263  if (ctx->dlo->BeginAudioPreroll() != S_OK) {
264  av_log(avctx, AV_LOG_ERROR, "Could not begin audio preroll!\n");
265  return -1;
266  }
267 
268  /* The device expects the sample rate to be fixed. */
269  avpriv_set_pts_info(st, 64, 1, c->sample_rate);
270  ctx->channels = c->ch_layout.nb_channels;
271 
272  ctx->audio = 1;
273 
274  return 0;
275 }
276 
278 {
279  struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
280  struct decklink_ctx *ctx = (struct decklink_ctx *)cctx->ctx;
281 
282  if (ctx->playback_started) {
283  BMDTimeValue actual;
284  ctx->dlo->StopScheduledPlayback(ctx->last_pts * ctx->bmd_tb_num,
285  &actual, ctx->bmd_tb_den);
286  ctx->dlo->DisableVideoOutput();
287  if (ctx->audio)
288  ctx->dlo->DisableAudioOutput();
289  }
290 
291  ff_decklink_cleanup(avctx);
292 
293  if (ctx->output_callback)
294  delete ctx->output_callback;
295 
296  pthread_mutex_destroy(&ctx->mutex);
297  pthread_cond_destroy(&ctx->cond);
298 
299 #if CONFIG_LIBKLVANC
300  klvanc_context_destroy(ctx->vanc_ctx);
301 #endif
302 
303  av_freep(&cctx->ctx);
304 
305  return 0;
306 }
307 
308 #if CONFIG_LIBKLVANC
309 static void construct_cc(AVFormatContext *avctx, struct decklink_ctx *ctx,
310  AVPacket *pkt, struct klvanc_line_set_s *vanc_lines)
311 {
312  struct klvanc_packet_eia_708b_s *cdp;
313  uint16_t *cdp_words;
314  uint16_t len;
315  uint8_t cc_count;
316  size_t size;
317  int ret, i;
318 
320  if (!data)
321  return;
322 
323  cc_count = size / 3;
324 
325  ret = klvanc_create_eia708_cdp(&cdp);
326  if (ret)
327  return;
328 
329  ret = klvanc_set_framerate_EIA_708B(cdp, ctx->bmd_tb_num, ctx->bmd_tb_den);
330  if (ret) {
331  av_log(avctx, AV_LOG_ERROR, "Invalid framerate specified: %lld/%lld\n",
332  ctx->bmd_tb_num, ctx->bmd_tb_den);
333  klvanc_destroy_eia708_cdp(cdp);
334  return;
335  }
336 
337  if (cc_count > KLVANC_MAX_CC_COUNT) {
338  av_log(avctx, AV_LOG_ERROR, "Illegal cc_count received: %d\n", cc_count);
339  cc_count = KLVANC_MAX_CC_COUNT;
340  }
341 
342  /* CC data */
343  cdp->header.ccdata_present = 1;
344  cdp->header.caption_service_active = 1;
345  cdp->ccdata.cc_count = cc_count;
346  for (i = 0; i < cc_count; i++) {
347  if (data [3*i] & 0x04)
348  cdp->ccdata.cc[i].cc_valid = 1;
349  cdp->ccdata.cc[i].cc_type = data[3*i] & 0x03;
350  cdp->ccdata.cc[i].cc_data[0] = data[3*i+1];
351  cdp->ccdata.cc[i].cc_data[1] = data[3*i+2];
352  }
353 
354  klvanc_finalize_EIA_708B(cdp, ctx->cdp_sequence_num++);
355  ret = klvanc_convert_EIA_708B_to_words(cdp, &cdp_words, &len);
356  klvanc_destroy_eia708_cdp(cdp);
357  if (ret != 0) {
358  av_log(avctx, AV_LOG_ERROR, "Failed converting 708 packet to words\n");
359  return;
360  }
361 
362  ret = klvanc_line_insert(ctx->vanc_ctx, vanc_lines, cdp_words, len, 11, 0);
363  free(cdp_words);
364  if (ret != 0) {
365  av_log(avctx, AV_LOG_ERROR, "VANC line insertion failed\n");
366  return;
367  }
368 }
369 
370 static int decklink_construct_vanc(AVFormatContext *avctx, struct decklink_ctx *ctx,
372 {
373  struct klvanc_line_set_s vanc_lines = { 0 };
374  int ret = 0, i;
375 
376  if (!ctx->supports_vanc)
377  return 0;
378 
379  construct_cc(avctx, ctx, pkt, &vanc_lines);
380 
381  IDeckLinkVideoFrameAncillary *vanc;
382  int result = ctx->dlo->CreateAncillaryData(bmdFormat10BitYUV, &vanc);
383  if (result != S_OK) {
384  av_log(avctx, AV_LOG_ERROR, "Failed to create vanc\n");
385  ret = AVERROR(EIO);
386  goto done;
387  }
388 
389  /* Now that we've got all the VANC lines in a nice orderly manner, generate the
390  final VANC sections for the Decklink output */
391  for (i = 0; i < vanc_lines.num_lines; i++) {
392  struct klvanc_line_s *line = vanc_lines.lines[i];
393  int real_line;
394  void *buf;
395 
396  if (!line)
397  break;
398 
399  /* FIXME: include hack for certain Decklink cards which mis-represent
400  line numbers for pSF frames */
401  real_line = line->line_number;
402 
403  result = vanc->GetBufferForVerticalBlankingLine(real_line, &buf);
404  if (result != S_OK) {
405  av_log(avctx, AV_LOG_ERROR, "Failed to get VANC line %d: %d", real_line, result);
406  continue;
407  }
408 
409  /* Generate the full line taking into account all VANC packets on that line */
410  result = klvanc_generate_vanc_line_v210(ctx->vanc_ctx, line, (uint8_t *) buf,
411  ctx->bmd_width);
412  if (result) {
413  av_log(avctx, AV_LOG_ERROR, "Failed to generate VANC line\n");
414  continue;
415  }
416  }
417 
418  result = frame->SetAncillaryData(vanc);
419  vanc->Release();
420  if (result != S_OK) {
421  av_log(avctx, AV_LOG_ERROR, "Failed to set vanc: %d", result);
422  ret = AVERROR(EIO);
423  }
424 
425 done:
426  for (i = 0; i < vanc_lines.num_lines; i++)
427  klvanc_line_free(vanc_lines.lines[i]);
428 
429  return ret;
430 }
431 #endif
432 
434 {
435  struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
436  struct decklink_ctx *ctx = (struct decklink_ctx *)cctx->ctx;
437  AVStream *st = avctx->streams[pkt->stream_index];
438  AVFrame *avframe = NULL, *tmp = (AVFrame *)pkt->data;
439  AVPacket *avpacket = NULL;
441  uint32_t buffered;
442  HRESULT hr;
443 
445  if (tmp->format != AV_PIX_FMT_UYVY422 ||
446  tmp->width != ctx->bmd_width ||
447  tmp->height != ctx->bmd_height) {
448  av_log(avctx, AV_LOG_ERROR, "Got a frame with invalid pixel format or dimension.\n");
449  return AVERROR(EINVAL);
450  }
451 
452  avframe = av_frame_clone(tmp);
453  if (!avframe) {
454  av_log(avctx, AV_LOG_ERROR, "Could not clone video frame.\n");
455  return AVERROR(EIO);
456  }
457 
458  frame = new decklink_frame(ctx, avframe, st->codecpar->codec_id, avframe->height, avframe->width);
459  } else {
460  avpacket = av_packet_clone(pkt);
461  if (!avpacket) {
462  av_log(avctx, AV_LOG_ERROR, "Could not clone video frame.\n");
463  return AVERROR(EIO);
464  }
465 
466  frame = new decklink_frame(ctx, avpacket, st->codecpar->codec_id, ctx->bmd_height, ctx->bmd_width);
467 
468 #if CONFIG_LIBKLVANC
469  if (decklink_construct_vanc(avctx, ctx, pkt, frame))
470  av_log(avctx, AV_LOG_ERROR, "Failed to construct VANC\n");
471 #endif
472  }
473 
474  if (!frame) {
475  av_log(avctx, AV_LOG_ERROR, "Could not create new frame.\n");
476  av_frame_free(&avframe);
477  av_packet_free(&avpacket);
478  return AVERROR(EIO);
479  }
480 
481  /* Always keep at most one second of frames buffered. */
482  pthread_mutex_lock(&ctx->mutex);
483  while (ctx->frames_buffer_available_spots == 0) {
484  pthread_cond_wait(&ctx->cond, &ctx->mutex);
485  }
486  ctx->frames_buffer_available_spots--;
487  pthread_mutex_unlock(&ctx->mutex);
488 
489  /* Schedule frame for playback. */
490  hr = ctx->dlo->ScheduleVideoFrame((class IDeckLinkVideoFrame *) frame,
491  pkt->pts * ctx->bmd_tb_num,
492  ctx->bmd_tb_num, ctx->bmd_tb_den);
493  /* Pass ownership to DeckLink, or release on failure */
494  frame->Release();
495  if (hr != S_OK) {
496  av_log(avctx, AV_LOG_ERROR, "Could not schedule video frame."
497  " error %08x.\n", (uint32_t) hr);
498  return AVERROR(EIO);
499  }
500 
501  ctx->dlo->GetBufferedVideoFrameCount(&buffered);
502  av_log(avctx, AV_LOG_DEBUG, "Buffered video frames: %d.\n", (int) buffered);
503  if (pkt->pts > 2 && buffered <= 2)
504  av_log(avctx, AV_LOG_WARNING, "There are not enough buffered video frames."
505  " Video may misbehave!\n");
506 
507  /* Preroll video frames. */
508  if (!ctx->playback_started && pkt->pts > ctx->frames_preroll) {
509  av_log(avctx, AV_LOG_DEBUG, "Ending audio preroll.\n");
510  if (ctx->audio && ctx->dlo->EndAudioPreroll() != S_OK) {
511  av_log(avctx, AV_LOG_ERROR, "Could not end audio preroll!\n");
512  return AVERROR(EIO);
513  }
514  av_log(avctx, AV_LOG_DEBUG, "Starting scheduled playback.\n");
515  if (ctx->dlo->StartScheduledPlayback(0, ctx->bmd_tb_den, 1.0) != S_OK) {
516  av_log(avctx, AV_LOG_ERROR, "Could not start scheduled playback!\n");
517  return AVERROR(EIO);
518  }
519  ctx->playback_started = 1;
520  }
521 
522  return 0;
523 }
524 
526 {
527  struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
528  struct decklink_ctx *ctx = (struct decklink_ctx *)cctx->ctx;
529  int sample_count = pkt->size / (ctx->channels << 1);
530  uint32_t buffered;
531 
532  ctx->dlo->GetBufferedAudioSampleFrameCount(&buffered);
533  if (pkt->pts > 1 && !buffered)
534  av_log(avctx, AV_LOG_WARNING, "There's no buffered audio."
535  " Audio will misbehave!\n");
536 
537  if (ctx->dlo->ScheduleAudioSamples(pkt->data, sample_count, pkt->pts,
538  bmdAudioSampleRate48kHz, NULL) != S_OK) {
539  av_log(avctx, AV_LOG_ERROR, "Could not schedule audio samples.\n");
540  return AVERROR(EIO);
541  }
542 
543  return 0;
544 }
545 
546 extern "C" {
547 
549 {
550  struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
551  struct decklink_ctx *ctx;
552  unsigned int n;
553  int ret;
554 
555  ctx = (struct decklink_ctx *) av_mallocz(sizeof(struct decklink_ctx));
556  if (!ctx)
557  return AVERROR(ENOMEM);
558  ctx->list_devices = cctx->list_devices;
559  ctx->list_formats = cctx->list_formats;
560  ctx->preroll = cctx->preroll;
561  ctx->duplex_mode = cctx->duplex_mode;
562  if (cctx->link > 0 && (unsigned int)cctx->link < FF_ARRAY_ELEMS(decklink_link_conf_map))
563  ctx->link = decklink_link_conf_map[cctx->link];
564  cctx->ctx = ctx;
565 #if CONFIG_LIBKLVANC
566  if (klvanc_context_create(&ctx->vanc_ctx) < 0) {
567  av_log(avctx, AV_LOG_ERROR, "Cannot create VANC library context\n");
568  return AVERROR(ENOMEM);
569  }
570  ctx->supports_vanc = 1;
571 #endif
572 
573  /* List available devices and exit. */
574  if (ctx->list_devices) {
575  ff_decklink_list_devices_legacy(avctx, 0, 1);
576  return AVERROR_EXIT;
577  }
578 
579  ret = ff_decklink_init_device(avctx, avctx->url);
580  if (ret < 0)
581  return ret;
582 
583  /* Get output device. */
584  if (ctx->dl->QueryInterface(IID_IDeckLinkOutput, (void **) &ctx->dlo) != S_OK) {
585  av_log(avctx, AV_LOG_ERROR, "Could not open output device from '%s'\n",
586  avctx->url);
587  ret = AVERROR(EIO);
588  goto error;
589  }
590 
591  /* List supported formats. */
592  if (ctx->list_formats) {
594  ret = AVERROR_EXIT;
595  goto error;
596  }
597 
598  /* Setup streams. */
599  ret = AVERROR(EIO);
600  for (n = 0; n < avctx->nb_streams; n++) {
601  AVStream *st = avctx->streams[n];
603  if (c->codec_type == AVMEDIA_TYPE_AUDIO) {
604  if (decklink_setup_audio(avctx, st))
605  goto error;
606  } else if (c->codec_type == AVMEDIA_TYPE_VIDEO) {
607  if (decklink_setup_video(avctx, st))
608  goto error;
609  } else {
610  av_log(avctx, AV_LOG_ERROR, "Unsupported stream type.\n");
611  goto error;
612  }
613  }
614 
615  return 0;
616 
617 error:
618  ff_decklink_cleanup(avctx);
619  return ret;
620 }
621 
623 {
624  struct decklink_cctx *cctx = (struct decklink_cctx *)avctx->priv_data;
625  struct decklink_ctx *ctx = (struct decklink_ctx *)cctx->ctx;
626  AVStream *st = avctx->streams[pkt->stream_index];
627 
628  ctx->last_pts = FFMAX(ctx->last_pts, pkt->pts);
629 
631  return decklink_write_video_packet(avctx, pkt);
632  else if (st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
633  return decklink_write_audio_packet(avctx, pkt);
634 
635  return AVERROR(EIO);
636 }
637 
639 {
640  return ff_decklink_list_devices(avctx, device_list, 0, 1);
641 }
642 
643 } /* extern "C" */
error
static void error(const char *err)
Definition: target_bsf_fuzzer.c:31
av_packet_unref
void av_packet_unref(AVPacket *pkt)
Wipe the packet.
Definition: avpacket.c:422
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:186
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
AVCodecParameters::codec_type
enum AVMediaType codec_type
General type of the encoded data.
Definition: codec_par.h:58
AVCodecParameters
This struct describes the properties of an encoded stream.
Definition: codec_par.h:54
pthread_mutex_init
static av_always_inline int pthread_mutex_init(pthread_mutex_t *mutex, const pthread_mutexattr_t *attr)
Definition: os2threads.h:104
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:99
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:330
tmp
static uint8_t tmp[11]
Definition: aes_ctr.c:28
AVFormatContext::streams
AVStream ** streams
A list of all streams in the file.
Definition: avformat.h:1172
AVFrame::width
int width
Definition: frame.h:402
AVPacket::data
uint8_t * data
Definition: packet.h:374
data
const char data[16]
Definition: mxf.c:146
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
av_packet_free
void av_packet_free(AVPacket **pkt)
Free the packet, if the packet is reference counted, it will be unreferenced first.
Definition: avpacket.c:73
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:351
avpriv_set_pts_info
void avpriv_set_pts_info(AVStream *st, int pts_wrap_bits, unsigned int pts_num, unsigned int pts_den)
Set the time base and wrapping info for a given stream.
Definition: avformat.c:771
AVRational::num
int num
Numerator.
Definition: rational.h:59
pkt
AVPacket * pkt
Definition: movenc.c:59
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:180
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
av_cold
#define av_cold
Definition: attributes.h:90
width
#define width
format
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate. The lists are not just lists
AVMEDIA_TYPE_AUDIO
@ AVMEDIA_TYPE_AUDIO
Definition: avutil.h:202
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:201
ctx
AVFormatContext * ctx
Definition: movenc.c:48
av_frame_clone
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:465
codec_id
enum AVCodecID codec_id
Definition: vaapi_decode.c:388
AV_CODEC_ID_WRAPPED_AVFRAME
@ AV_CODEC_ID_WRAPPED_AVFRAME
Passthrough codec, AVFrames wrapped in AVPacket.
Definition: codec_id.h:594
pthread_cond_broadcast
static av_always_inline int pthread_cond_broadcast(pthread_cond_t *cond)
Definition: os2threads.h:162
if
if(ret)
Definition: filter_design.txt:179
AVFormatContext
Format I/O context.
Definition: avformat.h:1104
internal.h
AVStream::codecpar
AVCodecParameters * codecpar
Codec parameters associated with this stream.
Definition: avformat.h:861
result
and forward the result(frame or status change) to the corresponding input. If nothing is possible
AVStream::time_base
AVRational time_base
This is the fundamental unit of time (in seconds) in terms of which frame timestamps are represented.
Definition: avformat.h:877
NULL
#define NULL
Definition: coverity.c:32
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
pthread_mutex_unlock
#define pthread_mutex_unlock(a)
Definition: ffprobe.c:79
AVCodecID
AVCodecID
Identify the syntax and semantics of the bitstream.
Definition: codec_id.h:49
AVFormatContext::nb_streams
unsigned int nb_streams
Number of elements in AVFormatContext.streams.
Definition: avformat.h:1160
AVPacket::size
int size
Definition: packet.h:375
AVFormatContext::url
char * url
input or output URL.
Definition: avformat.h:1187
size
int size
Definition: twinvq_data.h:10344
AV_CODEC_ID_V210
@ AV_CODEC_ID_V210
Definition: codec_id.h:179
avdevice.h
height
#define height
line
Definition: graph2dot.c:48
pthread_cond_destroy
static av_always_inline int pthread_cond_destroy(pthread_cond_t *cond)
Definition: os2threads.h:144
pthread_mutex_destroy
static av_always_inline int pthread_mutex_destroy(pthread_mutex_t *mutex)
Definition: os2threads.h:112
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:269
AVPacket::pts
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: packet.h:367
av_packet_get_side_data
uint8_t * av_packet_get_side_data(const AVPacket *pkt, enum AVPacketSideDataType type, size_t *size)
Get side information from packet.
Definition: avpacket.c:251
internal.h
AV_PKT_DATA_A53_CC
@ AV_PKT_DATA_A53_CC
ATSC A53 Part 4 Closed Captions.
Definition: packet.h:243
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:478
av_mallocz
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:254
len
int len
Definition: vorbis_enc_data.h:426
ret
ret
Definition: filter_design.txt:187
AVStream
Stream structure.
Definition: avformat.h:838
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
AVDeviceInfoList
List of devices.
Definition: avdevice.h:343
avformat.h
AV_PIX_FMT_UYVY422
@ AV_PIX_FMT_UYVY422
packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
Definition: pixfmt.h:81
AVFrame::height
int height
Definition: frame.h:402
buffer
the frame and frame reference mechanism is intended to as much as expensive copies of that data while still allowing the filters to produce correct results The data is stored in buffers represented by AVFrame structures Several references can point to the same frame buffer
Definition: filter_design.txt:49
AVRational::den
int den
Denominator.
Definition: rational.h:60
AVPacket::stream_index
int stream_index
Definition: packet.h:376
pthread_cond_wait
static av_always_inline int pthread_cond_wait(pthread_cond_t *cond, pthread_mutex_t *mutex)
Definition: os2threads.h:192
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:201
AVCodecParameters::codec_id
enum AVCodecID codec_id
Specific type of the encoded data (the codec used).
Definition: codec_par.h:62
AVPacket
This structure stores compressed data.
Definition: packet.h:351
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:34
imgutils.h
AVFrame::linesize
int linesize[AV_NUM_DATA_POINTERS]
For video, a positive or negative value, which is typically indicating the size in bytes of each pict...
Definition: frame.h:375
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
pthread_cond_init
static av_always_inline int pthread_cond_init(pthread_cond_t *cond, const pthread_condattr_t *attr)
Definition: os2threads.h:133
AVERROR_EXIT
#define AVERROR_EXIT
Immediate exit was requested; the called function should not be restarted.
Definition: error.h:58
AVFormatContext::priv_data
void * priv_data
Format private data.
Definition: avformat.h:1132
av_packet_clone
AVPacket * av_packet_clone(const AVPacket *src)
Create a new packet that references the same data as src.
Definition: avpacket.c:466
pthread_mutex_lock
#define pthread_mutex_lock(a)
Definition: ffprobe.c:75