FFmpeg
mpeg12dec.c
Go to the documentation of this file.
1 /*
2  * MPEG-1/2 decoder
3  * Copyright (c) 2000, 2001 Fabrice Bellard
4  * Copyright (c) 2002-2013 Michael Niedermayer <michaelni@gmx.at>
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 /**
24  * @file
25  * MPEG-1/2 decoder
26  */
27 
28 #include "config_components.h"
29 
30 #define UNCHECKED_BITSTREAM_READER 1
31 #include <inttypes.h>
32 #include <stdatomic.h>
33 
34 #include "libavutil/attributes.h"
35 #include "libavutil/emms.h"
36 #include "libavutil/imgutils.h"
37 #include "libavutil/internal.h"
38 #include "libavutil/mem_internal.h"
39 #include "libavutil/reverse.h"
40 #include "libavutil/stereo3d.h"
41 #include "libavutil/timecode.h"
42 
43 #include "avcodec.h"
44 #include "codec_internal.h"
45 #include "decode.h"
46 #include "error_resilience.h"
47 #include "hwaccel_internal.h"
48 #include "hwconfig.h"
49 #include "idctdsp.h"
50 #include "mpeg_er.h"
51 #include "mpeg12.h"
52 #include "mpeg12codecs.h"
53 #include "mpeg12data.h"
54 #include "mpeg12dec.h"
55 #include "mpegutils.h"
56 #include "mpegvideo.h"
57 #include "mpegvideodata.h"
58 #include "mpegvideodec.h"
59 #include "profiles.h"
60 #include "startcode.h"
61 
62 #define A53_MAX_CC_COUNT 2000
63 
70 };
71 
72 typedef struct Mpeg1Context {
74  AVPanScan pan_scan; /* some temporary storage for the panscan */
79  uint8_t afd;
80  int has_afd;
84  AVRational frame_rate_ext; /* MPEG-2 specific framerate modificator */
85  unsigned frame_rate_index;
86  int sync; /* Did we reach a sync point like a GOP/SEQ/KEYFrame? */
88  int tmpgexs;
91  int vbv_delay;
93  int64_t timecode_frame_start; /*< GOP timecode frame start number, in non drop frame format */
94 } Mpeg1Context;
95 
96 /* as H.263, but only 17 codes */
97 static int mpeg_decode_motion(MpegEncContext *s, int fcode, int pred)
98 {
99  int code, sign, val, shift;
100 
101  code = get_vlc2(&s->gb, ff_mv_vlc, MV_VLC_BITS, 2);
102  if (code == 0)
103  return pred;
104  if (code < 0)
105  return 0xffff;
106 
107  sign = get_bits1(&s->gb);
108  shift = fcode - 1;
109  val = code;
110  if (shift) {
111  val = (val - 1) << shift;
112  val |= get_bits(&s->gb, shift);
113  val++;
114  }
115  if (sign)
116  val = -val;
117  val += pred;
118 
119  /* modulo decoding */
120  return sign_extend(val, 5 + shift);
121 }
122 
123 #define MAX_INDEX (64 - 1)
124 #define check_scantable_index(ctx, x) \
125  do { \
126  if ((x) > MAX_INDEX) { \
127  av_log(ctx->avctx, AV_LOG_ERROR, "ac-tex damaged at %d %d\n", \
128  ctx->mb_x, ctx->mb_y); \
129  return AVERROR_INVALIDDATA; \
130  } \
131  } while (0)
132 
134  int16_t *block, int n)
135 {
136  int level, i, j, run;
137  const uint8_t *const scantable = s->intra_scantable.permutated;
138  const uint16_t *quant_matrix = s->inter_matrix;
139  const int qscale = s->qscale;
140 
141  {
142  OPEN_READER(re, &s->gb);
143  i = -1;
144  // special case for first coefficient, no need to add second VLC table
145  UPDATE_CACHE(re, &s->gb);
146  if (((int32_t) GET_CACHE(re, &s->gb)) < 0) {
147  level = (3 * qscale * quant_matrix[0]) >> 5;
148  level = (level - 1) | 1;
149  if (GET_CACHE(re, &s->gb) & 0x40000000)
150  level = -level;
151  block[0] = level;
152  i++;
153  SKIP_BITS(re, &s->gb, 2);
154  if (((int32_t) GET_CACHE(re, &s->gb)) <= (int32_t) 0xBFFFFFFF)
155  goto end;
156  }
157  /* now quantify & encode AC coefficients */
158  for (;;) {
159  GET_RL_VLC(level, run, re, &s->gb, ff_mpeg1_rl_vlc,
160  TEX_VLC_BITS, 2, 0);
161 
162  if (level != 0) {
163  i += run;
164  if (i > MAX_INDEX)
165  break;
166  j = scantable[i];
167  level = ((level * 2 + 1) * qscale * quant_matrix[j]) >> 5;
168  level = (level - 1) | 1;
169  level = (level ^ SHOW_SBITS(re, &s->gb, 1)) -
170  SHOW_SBITS(re, &s->gb, 1);
171  SKIP_BITS(re, &s->gb, 1);
172  } else {
173  /* escape */
174  run = SHOW_UBITS(re, &s->gb, 6) + 1;
175  LAST_SKIP_BITS(re, &s->gb, 6);
176  UPDATE_CACHE(re, &s->gb);
177  level = SHOW_SBITS(re, &s->gb, 8);
178  SKIP_BITS(re, &s->gb, 8);
179  if (level == -128) {
180  level = SHOW_UBITS(re, &s->gb, 8) - 256;
181  SKIP_BITS(re, &s->gb, 8);
182  } else if (level == 0) {
183  level = SHOW_UBITS(re, &s->gb, 8);
184  SKIP_BITS(re, &s->gb, 8);
185  }
186  i += run;
187  if (i > MAX_INDEX)
188  break;
189  j = scantable[i];
190  if (level < 0) {
191  level = -level;
192  level = ((level * 2 + 1) * qscale * quant_matrix[j]) >> 5;
193  level = (level - 1) | 1;
194  level = -level;
195  } else {
196  level = ((level * 2 + 1) * qscale * quant_matrix[j]) >> 5;
197  level = (level - 1) | 1;
198  }
199  }
200 
201  block[j] = level;
202  if (((int32_t) GET_CACHE(re, &s->gb)) <= (int32_t) 0xBFFFFFFF)
203  break;
204  UPDATE_CACHE(re, &s->gb);
205  }
206 end:
207  LAST_SKIP_BITS(re, &s->gb, 2);
208  CLOSE_READER(re, &s->gb);
209  }
210 
212 
213  s->block_last_index[n] = i;
214  return 0;
215 }
216 
218  int16_t *block, int n)
219 {
220  int level, i, j, run;
221  const uint8_t *const scantable = s->intra_scantable.permutated;
222  const uint16_t *quant_matrix;
223  const int qscale = s->qscale;
224  int mismatch;
225 
226  mismatch = 1;
227 
228  {
229  OPEN_READER(re, &s->gb);
230  i = -1;
231  if (n < 4)
232  quant_matrix = s->inter_matrix;
233  else
234  quant_matrix = s->chroma_inter_matrix;
235 
236  // Special case for first coefficient, no need to add second VLC table.
237  UPDATE_CACHE(re, &s->gb);
238  if (((int32_t) GET_CACHE(re, &s->gb)) < 0) {
239  level = (3 * qscale * quant_matrix[0]) >> 5;
240  if (GET_CACHE(re, &s->gb) & 0x40000000)
241  level = -level;
242  block[0] = level;
243  mismatch ^= level;
244  i++;
245  SKIP_BITS(re, &s->gb, 2);
246  if (((int32_t) GET_CACHE(re, &s->gb)) <= (int32_t) 0xBFFFFFFF)
247  goto end;
248  }
249 
250  /* now quantify & encode AC coefficients */
251  for (;;) {
252  GET_RL_VLC(level, run, re, &s->gb, ff_mpeg1_rl_vlc,
253  TEX_VLC_BITS, 2, 0);
254 
255  if (level != 0) {
256  i += run;
257  if (i > MAX_INDEX)
258  break;
259  j = scantable[i];
260  level = ((level * 2 + 1) * qscale * quant_matrix[j]) >> 5;
261  level = (level ^ SHOW_SBITS(re, &s->gb, 1)) -
262  SHOW_SBITS(re, &s->gb, 1);
263  SKIP_BITS(re, &s->gb, 1);
264  } else {
265  /* escape */
266  run = SHOW_UBITS(re, &s->gb, 6) + 1;
267  LAST_SKIP_BITS(re, &s->gb, 6);
268  UPDATE_CACHE(re, &s->gb);
269  level = SHOW_SBITS(re, &s->gb, 12);
270  SKIP_BITS(re, &s->gb, 12);
271 
272  i += run;
273  if (i > MAX_INDEX)
274  break;
275  j = scantable[i];
276  if (level < 0) {
277  level = ((-level * 2 + 1) * qscale * quant_matrix[j]) >> 5;
278  level = -level;
279  } else {
280  level = ((level * 2 + 1) * qscale * quant_matrix[j]) >> 5;
281  }
282  }
283 
284  mismatch ^= level;
285  block[j] = level;
286  if (((int32_t) GET_CACHE(re, &s->gb)) <= (int32_t) 0xBFFFFFFF)
287  break;
288  UPDATE_CACHE(re, &s->gb);
289  }
290 end:
291  LAST_SKIP_BITS(re, &s->gb, 2);
292  CLOSE_READER(re, &s->gb);
293  }
294  block[63] ^= (mismatch & 1);
295 
297 
298  s->block_last_index[n] = i;
299  return 0;
300 }
301 
303  int16_t *block, int n)
304 {
305  int level, dc, diff, i, j, run;
306  int component;
307  const RL_VLC_ELEM *rl_vlc;
308  const uint8_t *const scantable = s->intra_scantable.permutated;
309  const uint16_t *quant_matrix;
310  const int qscale = s->qscale;
311  int mismatch;
312 
313  /* DC coefficient */
314  if (n < 4) {
315  quant_matrix = s->intra_matrix;
316  component = 0;
317  } else {
318  quant_matrix = s->chroma_intra_matrix;
319  component = (n & 1) + 1;
320  }
321  diff = decode_dc(&s->gb, component);
322  dc = s->last_dc[component];
323  dc += diff;
324  s->last_dc[component] = dc;
325  block[0] = dc * (1 << (3 - s->intra_dc_precision));
326  ff_tlog(s->avctx, "dc=%d\n", block[0]);
327  mismatch = block[0] ^ 1;
328  i = 0;
329  if (s->intra_vlc_format)
331  else
333 
334  {
335  OPEN_READER(re, &s->gb);
336  /* now quantify & encode AC coefficients */
337  for (;;) {
338  UPDATE_CACHE(re, &s->gb);
339  GET_RL_VLC(level, run, re, &s->gb, rl_vlc,
340  TEX_VLC_BITS, 2, 0);
341 
342  if (level == 127) {
343  break;
344  } else if (level != 0) {
345  i += run;
346  if (i > MAX_INDEX)
347  break;
348  j = scantable[i];
349  level = (level * qscale * quant_matrix[j]) >> 4;
350  level = (level ^ SHOW_SBITS(re, &s->gb, 1)) -
351  SHOW_SBITS(re, &s->gb, 1);
352  LAST_SKIP_BITS(re, &s->gb, 1);
353  } else {
354  /* escape */
355  run = SHOW_UBITS(re, &s->gb, 6) + 1;
356  SKIP_BITS(re, &s->gb, 6);
357  level = SHOW_SBITS(re, &s->gb, 12);
358  LAST_SKIP_BITS(re, &s->gb, 12);
359  i += run;
360  if (i > MAX_INDEX)
361  break;
362  j = scantable[i];
363  if (level < 0) {
364  level = (-level * qscale * quant_matrix[j]) >> 4;
365  level = -level;
366  } else {
367  level = (level * qscale * quant_matrix[j]) >> 4;
368  }
369  }
370 
371  mismatch ^= level;
372  block[j] = level;
373  }
374  CLOSE_READER(re, &s->gb);
375  }
376  block[63] ^= mismatch & 1;
377 
379 
380  s->block_last_index[n] = i;
381  return 0;
382 }
383 
384 static inline int get_dmv(MpegEncContext *s)
385 {
386  if (get_bits1(&s->gb))
387  return 1 - (get_bits1(&s->gb) << 1);
388  else
389  return 0;
390 }
391 
392 /* motion type (for MPEG-2) */
393 #define MT_FIELD 1
394 #define MT_FRAME 2
395 #define MT_16X8 2
396 #define MT_DMV 3
397 
398 static int mpeg_decode_mb(MpegEncContext *s, int16_t block[12][64])
399 {
400  int i, j, k, cbp, val, mb_type, motion_type;
401  const int mb_block_count = 4 + (1 << s->chroma_format);
402  int ret;
403 
404  ff_tlog(s->avctx, "decode_mb: x=%d y=%d\n", s->mb_x, s->mb_y);
405 
406  av_assert2(s->mb_skipped == 0);
407 
408  if (s->mb_skip_run-- != 0) {
409  if (s->pict_type == AV_PICTURE_TYPE_P) {
410  s->mb_skipped = 1;
411  s->cur_pic.mb_type[s->mb_x + s->mb_y * s->mb_stride] =
413  } else {
414  int mb_type;
415 
416  if (s->mb_x)
417  mb_type = s->cur_pic.mb_type[s->mb_x + s->mb_y * s->mb_stride - 1];
418  else
419  // FIXME not sure if this is allowed in MPEG at all
420  mb_type = s->cur_pic.mb_type[s->mb_width + (s->mb_y - 1) * s->mb_stride - 1];
421  if (IS_INTRA(mb_type)) {
422  av_log(s->avctx, AV_LOG_ERROR, "skip with previntra\n");
423  return AVERROR_INVALIDDATA;
424  }
425  s->cur_pic.mb_type[s->mb_x + s->mb_y * s->mb_stride] =
426  mb_type | MB_TYPE_SKIP;
427 
428  if ((s->mv[0][0][0] | s->mv[0][0][1] | s->mv[1][0][0] | s->mv[1][0][1]) == 0)
429  s->mb_skipped = 1;
430  }
431 
432  return 0;
433  }
434 
435  switch (s->pict_type) {
436  default:
437  case AV_PICTURE_TYPE_I:
438  if (get_bits1(&s->gb) == 0) {
439  if (get_bits1(&s->gb) == 0) {
440  av_log(s->avctx, AV_LOG_ERROR,
441  "Invalid mb type in I-frame at %d %d\n",
442  s->mb_x, s->mb_y);
443  return AVERROR_INVALIDDATA;
444  }
445  mb_type = MB_TYPE_QUANT | MB_TYPE_INTRA;
446  } else {
447  mb_type = MB_TYPE_INTRA;
448  }
449  break;
450  case AV_PICTURE_TYPE_P:
451  mb_type = get_vlc2(&s->gb, ff_mb_ptype_vlc, MB_PTYPE_VLC_BITS, 1);
452  if (mb_type < 0) {
453  av_log(s->avctx, AV_LOG_ERROR,
454  "Invalid mb type in P-frame at %d %d\n", s->mb_x, s->mb_y);
455  return AVERROR_INVALIDDATA;
456  }
457  break;
458  case AV_PICTURE_TYPE_B:
459  mb_type = get_vlc2(&s->gb, ff_mb_btype_vlc, MB_BTYPE_VLC_BITS, 1);
460  if (mb_type < 0) {
461  av_log(s->avctx, AV_LOG_ERROR,
462  "Invalid mb type in B-frame at %d %d\n", s->mb_x, s->mb_y);
463  return AVERROR_INVALIDDATA;
464  }
465  break;
466  }
467  ff_tlog(s->avctx, "mb_type=%x\n", mb_type);
468 // motion_type = 0; /* avoid warning */
469  if (IS_INTRA(mb_type)) {
470  s->bdsp.clear_blocks(s->block[0]);
471 
472  if (!s->chroma_y_shift)
473  s->bdsp.clear_blocks(s->block[6]);
474 
475  /* compute DCT type */
476  // FIXME: add an interlaced_dct coded var?
477  if (s->picture_structure == PICT_FRAME &&
478  !s->frame_pred_frame_dct)
479  s->interlaced_dct = get_bits1(&s->gb);
480 
481  if (IS_QUANT(mb_type))
482  s->qscale = mpeg_get_qscale(s);
483 
484  if (s->concealment_motion_vectors) {
485  /* just parse them */
486  if (s->picture_structure != PICT_FRAME)
487  skip_bits1(&s->gb); /* field select */
488 
489  s->mv[0][0][0] =
490  s->last_mv[0][0][0] =
491  s->last_mv[0][1][0] = mpeg_decode_motion(s, s->mpeg_f_code[0][0],
492  s->last_mv[0][0][0]);
493  s->mv[0][0][1] =
494  s->last_mv[0][0][1] =
495  s->last_mv[0][1][1] = mpeg_decode_motion(s, s->mpeg_f_code[0][1],
496  s->last_mv[0][0][1]);
497 
498  check_marker(s->avctx, &s->gb, "after concealment_motion_vectors");
499  } else {
500  /* reset mv prediction */
501  memset(s->last_mv, 0, sizeof(s->last_mv));
502  }
503  s->mb_intra = 1;
504 
505  if (s->codec_id == AV_CODEC_ID_MPEG2VIDEO) {
506  for (i = 0; i < mb_block_count; i++)
507  if ((ret = mpeg2_decode_block_intra(s, s->block[i], i)) < 0)
508  return ret;
509  } else {
510  for (i = 0; i < 6; i++) {
512  s->intra_matrix,
513  s->intra_scantable.permutated,
514  s->last_dc, s->block[i],
515  i, s->qscale);
516  if (ret < 0) {
517  av_log(s->avctx, AV_LOG_ERROR, "ac-tex damaged at %d %d\n",
518  s->mb_x, s->mb_y);
519  return ret;
520  }
521 
522  s->block_last_index[i] = ret;
523  }
524  }
525  } else {
526  if (mb_type & MB_TYPE_ZERO_MV) {
527  av_assert2(mb_type & MB_TYPE_CBP);
528 
529  s->mv_dir = MV_DIR_FORWARD;
530  if (s->picture_structure == PICT_FRAME) {
531  if (s->picture_structure == PICT_FRAME
532  && !s->frame_pred_frame_dct)
533  s->interlaced_dct = get_bits1(&s->gb);
534  s->mv_type = MV_TYPE_16X16;
535  } else {
536  s->mv_type = MV_TYPE_FIELD;
537  mb_type |= MB_TYPE_INTERLACED;
538  s->field_select[0][0] = s->picture_structure - 1;
539  }
540 
541  if (IS_QUANT(mb_type))
542  s->qscale = mpeg_get_qscale(s);
543 
544  s->last_mv[0][0][0] = 0;
545  s->last_mv[0][0][1] = 0;
546  s->last_mv[0][1][0] = 0;
547  s->last_mv[0][1][1] = 0;
548  s->mv[0][0][0] = 0;
549  s->mv[0][0][1] = 0;
550  } else {
551  av_assert2(mb_type & MB_TYPE_BIDIR_MV);
552  // FIXME decide if MBs in field pictures are MB_TYPE_INTERLACED
553  /* get additional motion vector type */
554  if (s->picture_structure == PICT_FRAME && s->frame_pred_frame_dct) {
555  motion_type = MT_FRAME;
556  } else {
557  motion_type = get_bits(&s->gb, 2);
558  if (s->picture_structure == PICT_FRAME && HAS_CBP(mb_type))
559  s->interlaced_dct = get_bits1(&s->gb);
560  }
561 
562  if (IS_QUANT(mb_type))
563  s->qscale = mpeg_get_qscale(s);
564 
565  /* motion vectors */
566  s->mv_dir = MB_TYPE_MV_2_MV_DIR(mb_type);
567  ff_tlog(s->avctx, "motion_type=%d\n", motion_type);
568  switch (motion_type) {
569  case MT_FRAME: /* or MT_16X8 */
570  if (s->picture_structure == PICT_FRAME) {
571  mb_type |= MB_TYPE_16x16;
572  s->mv_type = MV_TYPE_16X16;
573  for (i = 0; i < 2; i++) {
574  if (HAS_MV(mb_type, i)) {
575  /* MT_FRAME */
576  s->mv[i][0][0] =
577  s->last_mv[i][0][0] =
578  s->last_mv[i][1][0] =
579  mpeg_decode_motion(s, s->mpeg_f_code[i][0],
580  s->last_mv[i][0][0]);
581  s->mv[i][0][1] =
582  s->last_mv[i][0][1] =
583  s->last_mv[i][1][1] =
584  mpeg_decode_motion(s, s->mpeg_f_code[i][1],
585  s->last_mv[i][0][1]);
586  /* full_pel: only for MPEG-1 */
587  if (s->full_pel[i]) {
588  s->mv[i][0][0] *= 2;
589  s->mv[i][0][1] *= 2;
590  }
591  }
592  }
593  } else {
594  mb_type |= MB_TYPE_16x8 | MB_TYPE_INTERLACED;
595  s->mv_type = MV_TYPE_16X8;
596  for (i = 0; i < 2; i++) {
597  if (HAS_MV(mb_type, i)) {
598  /* MT_16X8 */
599  for (j = 0; j < 2; j++) {
600  s->field_select[i][j] = get_bits1(&s->gb);
601  for (k = 0; k < 2; k++) {
602  val = mpeg_decode_motion(s, s->mpeg_f_code[i][k],
603  s->last_mv[i][j][k]);
604  s->last_mv[i][j][k] = val;
605  s->mv[i][j][k] = val;
606  }
607  }
608  }
609  }
610  }
611  break;
612  case MT_FIELD:
613  s->mv_type = MV_TYPE_FIELD;
614  if (s->picture_structure == PICT_FRAME) {
615  mb_type |= MB_TYPE_16x8 | MB_TYPE_INTERLACED;
616  for (i = 0; i < 2; i++) {
617  if (HAS_MV(mb_type, i)) {
618  for (j = 0; j < 2; j++) {
619  s->field_select[i][j] = get_bits1(&s->gb);
620  val = mpeg_decode_motion(s, s->mpeg_f_code[i][0],
621  s->last_mv[i][j][0]);
622  s->last_mv[i][j][0] = val;
623  s->mv[i][j][0] = val;
624  ff_tlog(s->avctx, "fmx=%d\n", val);
625  val = mpeg_decode_motion(s, s->mpeg_f_code[i][1],
626  s->last_mv[i][j][1] >> 1);
627  s->last_mv[i][j][1] = 2 * val;
628  s->mv[i][j][1] = val;
629  ff_tlog(s->avctx, "fmy=%d\n", val);
630  }
631  }
632  }
633  } else {
634  av_assert0(!s->progressive_sequence);
635  mb_type |= MB_TYPE_16x16 | MB_TYPE_INTERLACED;
636  for (i = 0; i < 2; i++) {
637  if (HAS_MV(mb_type, i)) {
638  s->field_select[i][0] = get_bits1(&s->gb);
639  for (k = 0; k < 2; k++) {
640  val = mpeg_decode_motion(s, s->mpeg_f_code[i][k],
641  s->last_mv[i][0][k]);
642  s->last_mv[i][0][k] = val;
643  s->last_mv[i][1][k] = val;
644  s->mv[i][0][k] = val;
645  }
646  }
647  }
648  }
649  break;
650  case MT_DMV:
651  if (s->progressive_sequence){
652  av_log(s->avctx, AV_LOG_ERROR, "MT_DMV in progressive_sequence\n");
653  return AVERROR_INVALIDDATA;
654  }
655  s->mv_type = MV_TYPE_DMV;
656  for (i = 0; i < 2; i++) {
657  if (HAS_MV(mb_type, i)) {
658  int dmx, dmy, mx, my, m;
659  const int my_shift = s->picture_structure == PICT_FRAME;
660 
661  mx = mpeg_decode_motion(s, s->mpeg_f_code[i][0],
662  s->last_mv[i][0][0]);
663  s->last_mv[i][0][0] = mx;
664  s->last_mv[i][1][0] = mx;
665  dmx = get_dmv(s);
666  my = mpeg_decode_motion(s, s->mpeg_f_code[i][1],
667  s->last_mv[i][0][1] >> my_shift);
668  dmy = get_dmv(s);
669 
670 
671  s->last_mv[i][0][1] = my * (1 << my_shift);
672  s->last_mv[i][1][1] = my * (1 << my_shift);
673 
674  s->mv[i][0][0] = mx;
675  s->mv[i][0][1] = my;
676  s->mv[i][1][0] = mx; // not used
677  s->mv[i][1][1] = my; // not used
678 
679  if (s->picture_structure == PICT_FRAME) {
680  mb_type |= MB_TYPE_16x16 | MB_TYPE_INTERLACED;
681 
682  // m = 1 + 2 * s->top_field_first;
683  m = s->top_field_first ? 1 : 3;
684 
685  /* top -> top pred */
686  s->mv[i][2][0] = ((mx * m + (mx > 0)) >> 1) + dmx;
687  s->mv[i][2][1] = ((my * m + (my > 0)) >> 1) + dmy - 1;
688  m = 4 - m;
689  s->mv[i][3][0] = ((mx * m + (mx > 0)) >> 1) + dmx;
690  s->mv[i][3][1] = ((my * m + (my > 0)) >> 1) + dmy + 1;
691  } else {
692  mb_type |= MB_TYPE_16x16;
693 
694  s->mv[i][2][0] = ((mx + (mx > 0)) >> 1) + dmx;
695  s->mv[i][2][1] = ((my + (my > 0)) >> 1) + dmy;
696  if (s->picture_structure == PICT_TOP_FIELD)
697  s->mv[i][2][1]--;
698  else
699  s->mv[i][2][1]++;
700  }
701  }
702  }
703  break;
704  default:
705  av_log(s->avctx, AV_LOG_ERROR,
706  "00 motion_type at %d %d\n", s->mb_x, s->mb_y);
707  return AVERROR_INVALIDDATA;
708  }
709  }
710 
711  s->mb_intra = 0;
712  s->last_dc[0] = s->last_dc[1] = s->last_dc[2] = 128 << s->intra_dc_precision;
713  if (HAS_CBP(mb_type)) {
714  s->bdsp.clear_blocks(s->block[0]);
715 
716  cbp = get_vlc2(&s->gb, ff_mb_pat_vlc, MB_PAT_VLC_BITS, 1);
717  if (mb_block_count > 6) {
718  cbp *= 1 << mb_block_count - 6;
719  cbp |= get_bits(&s->gb, mb_block_count - 6);
720  s->bdsp.clear_blocks(s->block[6]);
721  }
722  if (cbp <= 0) {
723  av_log(s->avctx, AV_LOG_ERROR,
724  "invalid cbp %d at %d %d\n", cbp, s->mb_x, s->mb_y);
725  return AVERROR_INVALIDDATA;
726  }
727 
728  if (s->codec_id == AV_CODEC_ID_MPEG2VIDEO) {
729  cbp <<= 12 - mb_block_count;
730 
731  for (i = 0; i < mb_block_count; i++) {
732  if (cbp & (1 << 11)) {
733  if ((ret = mpeg2_decode_block_non_intra(s, s->block[i], i)) < 0)
734  return ret;
735  } else {
736  s->block_last_index[i] = -1;
737  }
738  cbp += cbp;
739  }
740  } else {
741  for (i = 0; i < 6; i++) {
742  if (cbp & 32) {
743  if ((ret = mpeg1_decode_block_inter(s, s->block[i], i)) < 0)
744  return ret;
745  } else {
746  s->block_last_index[i] = -1;
747  }
748  cbp += cbp;
749  }
750  }
751  } else {
752  for (i = 0; i < 12; i++)
753  s->block_last_index[i] = -1;
754  }
755  }
756 
757  s->cur_pic.mb_type[s->mb_x + s->mb_y * s->mb_stride] = mb_type;
758 
759  return 0;
760 }
761 
763 {
764  Mpeg1Context *s = avctx->priv_data;
765  MpegEncContext *s2 = &s->mpeg_enc_ctx;
766  int ret;
767 
768  s2->out_format = FMT_MPEG1;
769 
770  if ( avctx->codec_tag != AV_RL32("VCR2")
771  && avctx->codec_tag != AV_RL32("BW10"))
772  avctx->coded_width = avctx->coded_height = 0; // do not trust dimensions from input
773  ret = ff_mpv_decode_init(s2, avctx);
774  if (ret < 0)
775  return ret;
776 
778 
780  avctx->color_range = AVCOL_RANGE_MPEG;
781  return 0;
782 }
783 
785 #if CONFIG_MPEG1_NVDEC_HWACCEL
787 #endif
788 #if CONFIG_MPEG1_VDPAU_HWACCEL
790 #endif
793 };
794 
796 #if CONFIG_MPEG2_NVDEC_HWACCEL
798 #endif
799 #if CONFIG_MPEG2_VDPAU_HWACCEL
801 #endif
802 #if CONFIG_MPEG2_DXVA2_HWACCEL
804 #endif
805 #if CONFIG_MPEG2_D3D11VA_HWACCEL
808 #endif
809 #if CONFIG_MPEG2_D3D12VA_HWACCEL
811 #endif
812 #if CONFIG_MPEG2_VAAPI_HWACCEL
814 #endif
815 #if CONFIG_MPEG2_VIDEOTOOLBOX_HWACCEL
817 #endif
820 };
821 
822 static const enum AVPixelFormat mpeg12_pixfmt_list_422[] = {
825 };
826 
827 static const enum AVPixelFormat mpeg12_pixfmt_list_444[] = {
830 };
831 
833 {
834  Mpeg1Context *s1 = avctx->priv_data;
835  MpegEncContext *s = &s1->mpeg_enc_ctx;
836  const enum AVPixelFormat *pix_fmts;
837 
838  if (CONFIG_GRAY && (avctx->flags & AV_CODEC_FLAG_GRAY))
839  return AV_PIX_FMT_GRAY8;
840 
841  if (s->chroma_format < CHROMA_422)
845  else if (s->chroma_format == CHROMA_422)
847  else
849 
850  return ff_get_format(avctx, pix_fmts);
851 }
852 
853 /* Call this function when we know all parameters.
854  * It may be called in different places for MPEG-1 and MPEG-2. */
856 {
857  Mpeg1Context *s1 = avctx->priv_data;
858  MpegEncContext *s = &s1->mpeg_enc_ctx;
859  int ret;
860 
861  if (avctx->codec_id == AV_CODEC_ID_MPEG1VIDEO) {
862  // MPEG-1 aspect
863  AVRational aspect_inv = av_d2q(ff_mpeg1_aspect[s1->aspect_ratio_info], 255);
864  avctx->sample_aspect_ratio = (AVRational) { aspect_inv.den, aspect_inv.num };
865  } else { // MPEG-2
866  // MPEG-2 aspect
867  if (s1->aspect_ratio_info > 1) {
868  AVRational dar =
870  (AVRational) { s1->pan_scan.width,
871  s1->pan_scan.height }),
872  (AVRational) { s->width, s->height });
873 
874  /* We ignore the spec here and guess a bit as reality does not
875  * match the spec, see for example res_change_ffmpeg_aspect.ts
876  * and sequence-display-aspect.mpg.
877  * issue1613, 621, 562 */
878  if ((s1->pan_scan.width == 0) || (s1->pan_scan.height == 0) ||
879  (av_cmp_q(dar, (AVRational) { 4, 3 }) &&
880  av_cmp_q(dar, (AVRational) { 16, 9 }))) {
881  s->avctx->sample_aspect_ratio =
883  (AVRational) { s->width, s->height });
884  } else {
885  s->avctx->sample_aspect_ratio =
887  (AVRational) { s1->pan_scan.width, s1->pan_scan.height });
888 // issue1613 4/3 16/9 -> 16/9
889 // res_change_ffmpeg_aspect.ts 4/3 225/44 ->4/3
890 // widescreen-issue562.mpg 4/3 16/9 -> 16/9
891 // s->avctx->sample_aspect_ratio = av_mul_q(s->avctx->sample_aspect_ratio, (AVRational) {s->width, s->height});
892  ff_dlog(avctx, "aspect A %d/%d\n",
895  ff_dlog(avctx, "aspect B %d/%d\n", s->avctx->sample_aspect_ratio.num,
896  s->avctx->sample_aspect_ratio.den);
897  }
898  } else {
899  s->avctx->sample_aspect_ratio =
901  }
902  } // MPEG-2
903 
904  if (av_image_check_sar(s->width, s->height,
905  avctx->sample_aspect_ratio) < 0) {
906  av_log(avctx, AV_LOG_WARNING, "ignoring invalid SAR: %u/%u\n",
907  avctx->sample_aspect_ratio.num,
908  avctx->sample_aspect_ratio.den);
909  avctx->sample_aspect_ratio = (AVRational){ 0, 1 };
910  }
911 
912  if (!s->context_initialized ||
913  avctx->coded_width != s->width ||
914  avctx->coded_height != s->height ||
915  s1->save_chroma_format != s->chroma_format ||
916  (s1->save_progressive_seq != s->progressive_sequence && FFALIGN(s->height, 16) != FFALIGN(s->height, 32)) ||
917  0) {
918  if (s->context_initialized)
920 
921  ret = ff_set_dimensions(avctx, s->width, s->height);
922  if (ret < 0)
923  return ret;
924 
925  if (avctx->codec_id == AV_CODEC_ID_MPEG2VIDEO && s1->bit_rate &&
926  (s1->bit_rate != 0x3FFFF*400)) {
927  avctx->rc_max_rate = s1->bit_rate;
928  } else if (avctx->codec_id == AV_CODEC_ID_MPEG1VIDEO && s1->bit_rate &&
929  (s1->bit_rate != 0x3FFFF*400 || s1->vbv_delay != 0xFFFF)) {
930  avctx->bit_rate = s1->bit_rate;
931  }
932  s1->save_progressive_seq = s->progressive_sequence;
933  s1->save_chroma_format = s->chroma_format;
934 
935  /* low_delay may be forced, in this case we will have B-frames
936  * that behave like P-frames. */
937  avctx->has_b_frames = !s->low_delay;
938 
939  if (avctx->codec_id == AV_CODEC_ID_MPEG1VIDEO) {
940  // MPEG-1 fps
943  } else { // MPEG-2
944  // MPEG-2 fps
945  av_reduce(&s->avctx->framerate.num,
946  &s->avctx->framerate.den,
949  1 << 30);
950 
951  switch (s->chroma_format) {
953  case CHROMA_422:
955  default: av_assert0(0);
956  }
957  } // MPEG-2
958 
959  avctx->pix_fmt = mpeg_get_pixelformat(avctx);
960 
961  if ((ret = ff_mpv_common_init(s)) < 0)
962  return ret;
963  if (!s->avctx->lowres)
964  for (int i = 0; i < s->slice_context_count; i++)
965  ff_mpv_framesize_disable(&s->thread_context[i]->sc);
966  }
967  return 0;
968 }
969 
970 static int mpeg1_decode_picture(AVCodecContext *avctx, const uint8_t *buf,
971  int buf_size)
972 {
973  Mpeg1Context *s1 = avctx->priv_data;
974  MpegEncContext *s = &s1->mpeg_enc_ctx;
975  int ref, f_code, vbv_delay, ret;
976 
977  ret = init_get_bits8(&s->gb, buf, buf_size);
978  if (ret < 0)
979  return ret;
980 
981  ref = get_bits(&s->gb, 10); /* temporal ref */
982  s->pict_type = get_bits(&s->gb, 3);
983  if (s->pict_type == 0 || s->pict_type > 3)
984  return AVERROR_INVALIDDATA;
985 
986  vbv_delay = get_bits(&s->gb, 16);
987  s1->vbv_delay = vbv_delay;
988  if (s->pict_type == AV_PICTURE_TYPE_P ||
989  s->pict_type == AV_PICTURE_TYPE_B) {
990  s->full_pel[0] = get_bits1(&s->gb);
991  f_code = get_bits(&s->gb, 3);
992  if (f_code == 0 && (avctx->err_recognition & (AV_EF_BITSTREAM|AV_EF_COMPLIANT)))
993  return AVERROR_INVALIDDATA;
994  f_code += !f_code;
995  s->mpeg_f_code[0][0] = f_code;
996  s->mpeg_f_code[0][1] = f_code;
997  }
998  if (s->pict_type == AV_PICTURE_TYPE_B) {
999  s->full_pel[1] = get_bits1(&s->gb);
1000  f_code = get_bits(&s->gb, 3);
1001  if (f_code == 0 && (avctx->err_recognition & (AV_EF_BITSTREAM|AV_EF_COMPLIANT)))
1002  return AVERROR_INVALIDDATA;
1003  f_code += !f_code;
1004  s->mpeg_f_code[1][0] = f_code;
1005  s->mpeg_f_code[1][1] = f_code;
1006  }
1007 
1008  if (avctx->debug & FF_DEBUG_PICT_INFO)
1009  av_log(avctx, AV_LOG_DEBUG,
1010  "vbv_delay %d, ref %d type:%d\n", vbv_delay, ref, s->pict_type);
1011 
1012  return 0;
1013 }
1014 
1016 {
1017  MpegEncContext *s = &s1->mpeg_enc_ctx;
1018  int horiz_size_ext, vert_size_ext;
1019  int bit_rate_ext;
1020 
1021  skip_bits(&s->gb, 1); /* profile and level esc*/
1022  s->avctx->profile = get_bits(&s->gb, 3);
1023  s->avctx->level = get_bits(&s->gb, 4);
1024  s->progressive_sequence = get_bits1(&s->gb); /* progressive_sequence */
1025  s->chroma_format = get_bits(&s->gb, 2); /* chroma_format 1=420, 2=422, 3=444 */
1026 
1027  if (!s->chroma_format) {
1028  s->chroma_format = CHROMA_420;
1029  av_log(s->avctx, AV_LOG_WARNING, "Chroma format invalid\n");
1030  }
1031 
1032  horiz_size_ext = get_bits(&s->gb, 2);
1033  vert_size_ext = get_bits(&s->gb, 2);
1034  s->width |= (horiz_size_ext << 12);
1035  s->height |= (vert_size_ext << 12);
1036  bit_rate_ext = get_bits(&s->gb, 12); /* XXX: handle it */
1037  s1->bit_rate += (bit_rate_ext << 18) * 400LL;
1038  check_marker(s->avctx, &s->gb, "after bit rate extension");
1039  s->avctx->rc_buffer_size += get_bits(&s->gb, 8) * 1024 * 16 << 10;
1040 
1041  s->low_delay = get_bits1(&s->gb);
1042  if (s->avctx->flags & AV_CODEC_FLAG_LOW_DELAY)
1043  s->low_delay = 1;
1044 
1045  s1->frame_rate_ext.num = get_bits(&s->gb, 2) + 1;
1046  s1->frame_rate_ext.den = get_bits(&s->gb, 5) + 1;
1047 
1048  ff_dlog(s->avctx, "sequence extension\n");
1049  s->codec_id = s->avctx->codec_id = AV_CODEC_ID_MPEG2VIDEO;
1050 
1051  if (s->avctx->debug & FF_DEBUG_PICT_INFO)
1052  av_log(s->avctx, AV_LOG_DEBUG,
1053  "profile: %d, level: %d ps: %d cf:%d vbv buffer: %d, bitrate:%"PRId64"\n",
1054  s->avctx->profile, s->avctx->level, s->progressive_sequence, s->chroma_format,
1055  s->avctx->rc_buffer_size, s1->bit_rate);
1056 }
1057 
1059 {
1060  MpegEncContext *s = &s1->mpeg_enc_ctx;
1061  int color_description, w, h;
1062 
1063  skip_bits(&s->gb, 3); /* video format */
1064  color_description = get_bits1(&s->gb);
1065  if (color_description) {
1066  s->avctx->color_primaries = get_bits(&s->gb, 8);
1067  s->avctx->color_trc = get_bits(&s->gb, 8);
1068  s->avctx->colorspace = get_bits(&s->gb, 8);
1069  }
1070  w = get_bits(&s->gb, 14);
1071  skip_bits(&s->gb, 1); // marker
1072  h = get_bits(&s->gb, 14);
1073  // remaining 3 bits are zero padding
1074 
1075  s1->pan_scan.width = 16 * w;
1076  s1->pan_scan.height = 16 * h;
1077 
1078  if (s->avctx->debug & FF_DEBUG_PICT_INFO)
1079  av_log(s->avctx, AV_LOG_DEBUG, "sde w:%d, h:%d\n", w, h);
1080 }
1081 
1083 {
1084  MpegEncContext *s = &s1->mpeg_enc_ctx;
1085  int i, nofco;
1086 
1087  nofco = 1;
1088  if (s->progressive_sequence) {
1089  if (s->repeat_first_field) {
1090  nofco++;
1091  if (s->top_field_first)
1092  nofco++;
1093  }
1094  } else {
1095  if (s->picture_structure == PICT_FRAME) {
1096  nofco++;
1097  if (s->repeat_first_field)
1098  nofco++;
1099  }
1100  }
1101  for (i = 0; i < nofco; i++) {
1102  s1->pan_scan.position[i][0] = get_sbits(&s->gb, 16);
1103  skip_bits(&s->gb, 1); // marker
1104  s1->pan_scan.position[i][1] = get_sbits(&s->gb, 16);
1105  skip_bits(&s->gb, 1); // marker
1106  }
1107 
1108  if (s->avctx->debug & FF_DEBUG_PICT_INFO)
1109  av_log(s->avctx, AV_LOG_DEBUG,
1110  "pde (%"PRId16",%"PRId16") (%"PRId16",%"PRId16") (%"PRId16",%"PRId16")\n",
1111  s1->pan_scan.position[0][0], s1->pan_scan.position[0][1],
1112  s1->pan_scan.position[1][0], s1->pan_scan.position[1][1],
1113  s1->pan_scan.position[2][0], s1->pan_scan.position[2][1]);
1114 }
1115 
1116 static int load_matrix(MpegEncContext *s, uint16_t matrix0[64],
1117  uint16_t matrix1[64], int intra)
1118 {
1119  int i;
1120 
1121  for (i = 0; i < 64; i++) {
1122  int j = s->idsp.idct_permutation[ff_zigzag_direct[i]];
1123  int v = get_bits(&s->gb, 8);
1124  if (v == 0) {
1125  av_log(s->avctx, AV_LOG_ERROR, "matrix damaged\n");
1126  return AVERROR_INVALIDDATA;
1127  }
1128  if (intra && i == 0 && v != 8) {
1129  av_log(s->avctx, AV_LOG_DEBUG, "intra matrix specifies invalid DC quantizer %d, ignoring\n", v);
1130  v = 8; // needed by pink.mpg / issue1046
1131  }
1132  matrix0[j] = v;
1133  if (matrix1)
1134  matrix1[j] = v;
1135  }
1136  return 0;
1137 }
1138 
1140 {
1141  ff_dlog(s->avctx, "matrix extension\n");
1142 
1143  if (get_bits1(&s->gb))
1144  load_matrix(s, s->chroma_intra_matrix, s->intra_matrix, 1);
1145  if (get_bits1(&s->gb))
1146  load_matrix(s, s->chroma_inter_matrix, s->inter_matrix, 0);
1147  if (get_bits1(&s->gb))
1148  load_matrix(s, s->chroma_intra_matrix, NULL, 1);
1149  if (get_bits1(&s->gb))
1150  load_matrix(s, s->chroma_inter_matrix, NULL, 0);
1151 }
1152 
1154 {
1155  MpegEncContext *s = &s1->mpeg_enc_ctx;
1156 
1157  s->full_pel[0] = s->full_pel[1] = 0;
1158  s->mpeg_f_code[0][0] = get_bits(&s->gb, 4);
1159  s->mpeg_f_code[0][1] = get_bits(&s->gb, 4);
1160  s->mpeg_f_code[1][0] = get_bits(&s->gb, 4);
1161  s->mpeg_f_code[1][1] = get_bits(&s->gb, 4);
1162  s->mpeg_f_code[0][0] += !s->mpeg_f_code[0][0];
1163  s->mpeg_f_code[0][1] += !s->mpeg_f_code[0][1];
1164  s->mpeg_f_code[1][0] += !s->mpeg_f_code[1][0];
1165  s->mpeg_f_code[1][1] += !s->mpeg_f_code[1][1];
1166  if (!s->pict_type && s->context_initialized) {
1167  av_log(s->avctx, AV_LOG_ERROR, "Missing picture start code\n");
1168  if (s->avctx->err_recognition & AV_EF_EXPLODE)
1169  return AVERROR_INVALIDDATA;
1170  av_log(s->avctx, AV_LOG_WARNING, "Guessing pict_type from mpeg_f_code\n");
1171  if (s->mpeg_f_code[1][0] == 15 && s->mpeg_f_code[1][1] == 15) {
1172  if (s->mpeg_f_code[0][0] == 15 && s->mpeg_f_code[0][1] == 15)
1173  s->pict_type = AV_PICTURE_TYPE_I;
1174  else
1175  s->pict_type = AV_PICTURE_TYPE_P;
1176  } else
1177  s->pict_type = AV_PICTURE_TYPE_B;
1178  }
1179 
1180  s->intra_dc_precision = get_bits(&s->gb, 2);
1181  s->picture_structure = get_bits(&s->gb, 2);
1182  s->top_field_first = get_bits1(&s->gb);
1183  s->frame_pred_frame_dct = get_bits1(&s->gb);
1184  s->concealment_motion_vectors = get_bits1(&s->gb);
1185  s->q_scale_type = get_bits1(&s->gb);
1186  s->intra_vlc_format = get_bits1(&s->gb);
1187  s->alternate_scan = get_bits1(&s->gb);
1188  s->repeat_first_field = get_bits1(&s->gb);
1189  s->chroma_420_type = get_bits1(&s->gb);
1190  s->progressive_frame = get_bits1(&s->gb);
1191 
1192  // We only initialize intra_scantable.permutated, as this is all we use.
1193  ff_permute_scantable(s->intra_scantable.permutated,
1194  s->alternate_scan ? ff_alternate_vertical_scan : ff_zigzag_direct,
1195  s->idsp.idct_permutation);
1196 
1197  /* composite display not parsed */
1198  ff_dlog(s->avctx, "intra_dc_precision=%d\n", s->intra_dc_precision);
1199  ff_dlog(s->avctx, "picture_structure=%d\n", s->picture_structure);
1200  ff_dlog(s->avctx, "top field first=%d\n", s->top_field_first);
1201  ff_dlog(s->avctx, "repeat first field=%d\n", s->repeat_first_field);
1202  ff_dlog(s->avctx, "conceal=%d\n", s->concealment_motion_vectors);
1203  ff_dlog(s->avctx, "intra_vlc_format=%d\n", s->intra_vlc_format);
1204  ff_dlog(s->avctx, "alternate_scan=%d\n", s->alternate_scan);
1205  ff_dlog(s->avctx, "frame_pred_frame_dct=%d\n", s->frame_pred_frame_dct);
1206  ff_dlog(s->avctx, "progressive_frame=%d\n", s->progressive_frame);
1207 
1208  return 0;
1209 }
1210 
1211 static int mpeg_field_start(Mpeg1Context *s1, const uint8_t *buf, int buf_size)
1212 {
1213  MpegEncContext *s = &s1->mpeg_enc_ctx;
1214  AVCodecContext *avctx = s->avctx;
1215  int second_field = 0;
1216  int ret;
1217 
1218  if (!(avctx->flags2 & AV_CODEC_FLAG2_CHUNKS)) {
1219  if (s->mb_width * s->mb_height * 11LL / (33 * 2 * 8) > buf_size)
1220  return AVERROR_INVALIDDATA;
1221  }
1222 
1223  /* start frame decoding */
1224  if (s->first_field || s->picture_structure == PICT_FRAME) {
1225  AVFrameSideData *pan_scan;
1226 
1227  if ((ret = ff_mpv_frame_start(s, avctx)) < 0)
1228  return ret;
1229 
1230  if (s->picture_structure != PICT_FRAME) {
1231  s->cur_pic.ptr->f->flags |= AV_FRAME_FLAG_TOP_FIELD_FIRST *
1232  (s->picture_structure == PICT_TOP_FIELD);
1233 
1234  for (int i = 0; i < 3; i++) {
1235  if (s->picture_structure == PICT_BOTTOM_FIELD) {
1236  s->cur_pic.data[i] = FF_PTR_ADD(s->cur_pic.data[i],
1237  s->cur_pic.linesize[i]);
1238  }
1239  s->cur_pic.linesize[i] *= 2;
1240  }
1241  }
1242 
1244 
1245  /* first check if we must repeat the frame */
1246  s->cur_pic.ptr->f->repeat_pict = 0;
1247  if (s->repeat_first_field) {
1248  if (s->progressive_sequence) {
1249  if (s->top_field_first)
1250  s->cur_pic.ptr->f->repeat_pict = 4;
1251  else
1252  s->cur_pic.ptr->f->repeat_pict = 2;
1253  } else if (s->progressive_frame) {
1254  s->cur_pic.ptr->f->repeat_pict = 1;
1255  }
1256  }
1257 
1258  ret = ff_frame_new_side_data(s->avctx, s->cur_pic.ptr->f,
1259  AV_FRAME_DATA_PANSCAN, sizeof(s1->pan_scan),
1260  &pan_scan);
1261  if (ret < 0)
1262  return ret;
1263  if (pan_scan)
1264  memcpy(pan_scan->data, &s1->pan_scan, sizeof(s1->pan_scan));
1265 
1266  if (s1->a53_buf_ref) {
1268  s->avctx, s->cur_pic.ptr->f, AV_FRAME_DATA_A53_CC,
1269  &s1->a53_buf_ref);
1270  if (ret < 0)
1271  return ret;
1272  }
1273 
1274  if (s1->has_stereo3d) {
1275  AVStereo3D *stereo = av_stereo3d_create_side_data(s->cur_pic.ptr->f);
1276  if (!stereo)
1277  return AVERROR(ENOMEM);
1278 
1279  stereo->type = s1->stereo3d_type;
1280  s1->has_stereo3d = 0;
1281  }
1282 
1283  if (s1->has_afd) {
1284  AVFrameSideData *sd;
1285  ret = ff_frame_new_side_data(s->avctx, s->cur_pic.ptr->f,
1286  AV_FRAME_DATA_AFD, 1, &sd);
1287  if (ret < 0)
1288  return ret;
1289  if (sd)
1290  *sd->data = s1->afd;
1291  s1->has_afd = 0;
1292  }
1293  } else { // second field
1294  second_field = 1;
1295  if (!s->cur_pic.ptr) {
1296  av_log(s->avctx, AV_LOG_ERROR, "first field missing\n");
1297  return AVERROR_INVALIDDATA;
1298  }
1299 
1300  if (s->avctx->hwaccel) {
1301  if ((ret = FF_HW_SIMPLE_CALL(s->avctx, end_frame)) < 0) {
1302  av_log(avctx, AV_LOG_ERROR,
1303  "hardware accelerator failed to decode first field\n");
1304  return ret;
1305  }
1306  }
1308  if (ret < 0)
1309  return ret;
1310 
1311  for (int i = 0; i < 3; i++) {
1312  s->cur_pic.data[i] = s->cur_pic.ptr->f->data[i];
1313  if (s->picture_structure == PICT_BOTTOM_FIELD)
1314  s->cur_pic.data[i] +=
1315  s->cur_pic.ptr->f->linesize[i];
1316  }
1317  }
1318 
1319  if (avctx->hwaccel) {
1320  if ((ret = FF_HW_CALL(avctx, start_frame, NULL, buf, buf_size)) < 0)
1321  return ret;
1322  } else if (s->codec_tag == MKTAG('V', 'C', 'R', '2')) {
1323  // Exchange UV
1324  FFSWAP(uint8_t*, s->cur_pic.data[1], s->cur_pic.data[2]);
1325  FFSWAP(ptrdiff_t, s->cur_pic.linesize[1], s->cur_pic.linesize[2]);
1326  if (!second_field) {
1327  FFSWAP(uint8_t*, s->next_pic.data[1], s->next_pic.data[2]);
1328  FFSWAP(ptrdiff_t, s->next_pic.linesize[1], s->next_pic.linesize[2]);
1329  FFSWAP(uint8_t*, s->last_pic.data[1], s->last_pic.data[2]);
1330  FFSWAP(ptrdiff_t, s->last_pic.linesize[1], s->last_pic.linesize[2]);
1331  }
1332  }
1333 
1334  return 0;
1335 }
1336 
1337 #define DECODE_SLICE_ERROR -1
1338 #define DECODE_SLICE_OK 0
1339 
1340 /**
1341  * Decode a slice.
1342  * MpegEncContext.mb_y must be set to the MB row from the startcode.
1343  * @return DECODE_SLICE_ERROR if the slice is damaged,
1344  * DECODE_SLICE_OK if this slice is OK
1345  */
1346 static int mpeg_decode_slice(MpegEncContext *s, int mb_y,
1347  const uint8_t **buf, int buf_size)
1348 {
1349  AVCodecContext *avctx = s->avctx;
1350  const int lowres = s->avctx->lowres;
1351  const int field_pic = s->picture_structure != PICT_FRAME;
1352  int ret;
1353 
1354  s->resync_mb_x =
1355  s->resync_mb_y = -1;
1356 
1357  av_assert0(mb_y < s->mb_height);
1358 
1359  ret = init_get_bits8(&s->gb, *buf, buf_size);
1360  if (ret < 0)
1361  return ret;
1362 
1363  if (s->codec_id != AV_CODEC_ID_MPEG1VIDEO && s->mb_height > 2800/16)
1364  skip_bits(&s->gb, 3);
1365 
1367  s->interlaced_dct = 0;
1368 
1369  s->qscale = mpeg_get_qscale(s);
1370 
1371  if (s->qscale == 0) {
1372  av_log(s->avctx, AV_LOG_ERROR, "qscale == 0\n");
1373  return AVERROR_INVALIDDATA;
1374  }
1375 
1376  /* extra slice info */
1377  if (skip_1stop_8data_bits(&s->gb) < 0)
1378  return AVERROR_INVALIDDATA;
1379 
1380  s->mb_x = 0;
1381 
1382  if (mb_y == 0 && s->codec_tag == AV_RL32("SLIF")) {
1383  skip_bits1(&s->gb);
1384  } else {
1385  while (get_bits_left(&s->gb) > 0) {
1386  int code = get_vlc2(&s->gb, ff_mbincr_vlc,
1387  MBINCR_VLC_BITS, 2);
1388  if (code < 0) {
1389  av_log(s->avctx, AV_LOG_ERROR, "first mb_incr damaged\n");
1390  return AVERROR_INVALIDDATA;
1391  }
1392  if (code >= 33) {
1393  if (code == 33)
1394  s->mb_x += 33;
1395  /* otherwise, stuffing, nothing to do */
1396  } else {
1397  s->mb_x += code;
1398  break;
1399  }
1400  }
1401  }
1402 
1403  if (s->mb_x >= (unsigned) s->mb_width) {
1404  av_log(s->avctx, AV_LOG_ERROR, "initial skip overflow\n");
1405  return AVERROR_INVALIDDATA;
1406  }
1407 
1408  if (avctx->hwaccel) {
1409  const uint8_t *buf_end, *buf_start = *buf - 4; /* include start_code */
1410  int start_code = -1;
1411  buf_end = avpriv_find_start_code(buf_start + 2, *buf + buf_size, &start_code);
1412  if (buf_end < *buf + buf_size)
1413  buf_end -= 4;
1414  s->mb_y = mb_y;
1415  if (FF_HW_CALL(avctx, decode_slice, buf_start, buf_end - buf_start) < 0)
1416  return DECODE_SLICE_ERROR;
1417  *buf = buf_end;
1418  return DECODE_SLICE_OK;
1419  }
1420 
1421  s->resync_mb_x = s->mb_x;
1422  s->resync_mb_y = s->mb_y = mb_y;
1423  s->mb_skip_run = 0;
1425 
1426  if (s->mb_y == 0 && s->mb_x == 0 && (s->first_field || s->picture_structure == PICT_FRAME)) {
1427  if (s->avctx->debug & FF_DEBUG_PICT_INFO) {
1428  av_log(s->avctx, AV_LOG_DEBUG,
1429  "qp:%d fc:%2d%2d%2d%2d %c %s %s %s %s dc:%d pstruct:%d fdct:%d cmv:%d qtype:%d ivlc:%d rff:%d %s\n",
1430  s->qscale,
1431  s->mpeg_f_code[0][0], s->mpeg_f_code[0][1],
1432  s->mpeg_f_code[1][0], s->mpeg_f_code[1][1],
1433  s->pict_type == AV_PICTURE_TYPE_I ? 'I' :
1434  (s->pict_type == AV_PICTURE_TYPE_P ? 'P' :
1435  (s->pict_type == AV_PICTURE_TYPE_B ? 'B' : 'S')),
1436  s->progressive_sequence ? "ps" : "",
1437  s->progressive_frame ? "pf" : "",
1438  s->alternate_scan ? "alt" : "",
1439  s->top_field_first ? "top" : "",
1440  s->intra_dc_precision, s->picture_structure,
1441  s->frame_pred_frame_dct, s->concealment_motion_vectors,
1442  s->q_scale_type, s->intra_vlc_format,
1443  s->repeat_first_field, s->chroma_420_type ? "420" : "");
1444  }
1445  }
1446 
1447  for (;;) {
1448  if ((ret = mpeg_decode_mb(s, s->block)) < 0)
1449  return ret;
1450 
1451  // Note motion_val is normally NULL unless we want to extract the MVs.
1452  if (s->cur_pic.motion_val[0]) {
1453  const int wrap = s->b8_stride;
1454  int xy = s->mb_x * 2 + s->mb_y * 2 * wrap;
1455  int b8_xy = 4 * (s->mb_x + s->mb_y * s->mb_stride);
1456  int motion_x, motion_y, dir, i;
1457 
1458  for (i = 0; i < 2; i++) {
1459  for (dir = 0; dir < 2; dir++) {
1460  if (s->mb_intra ||
1461  (dir == 1 && s->pict_type != AV_PICTURE_TYPE_B)) {
1462  motion_x = motion_y = 0;
1463  } else if (s->mv_type == MV_TYPE_16X16 ||
1464  (s->mv_type == MV_TYPE_FIELD && field_pic)) {
1465  motion_x = s->mv[dir][0][0];
1466  motion_y = s->mv[dir][0][1];
1467  } else { /* if ((s->mv_type == MV_TYPE_FIELD) || (s->mv_type == MV_TYPE_16X8)) */
1468  motion_x = s->mv[dir][i][0];
1469  motion_y = s->mv[dir][i][1];
1470  }
1471 
1472  s->cur_pic.motion_val[dir][xy][0] = motion_x;
1473  s->cur_pic.motion_val[dir][xy][1] = motion_y;
1474  s->cur_pic.motion_val[dir][xy + 1][0] = motion_x;
1475  s->cur_pic.motion_val[dir][xy + 1][1] = motion_y;
1476  s->cur_pic.ref_index [dir][b8_xy] =
1477  s->cur_pic.ref_index [dir][b8_xy + 1] = s->field_select[dir][i];
1478  av_assert2(s->field_select[dir][i] == 0 ||
1479  s->field_select[dir][i] == 1);
1480  }
1481  xy += wrap;
1482  b8_xy += 2;
1483  }
1484  }
1485 
1486  s->dest[0] += 16 >> lowres;
1487  s->dest[1] +=(16 >> lowres) >> s->chroma_x_shift;
1488  s->dest[2] +=(16 >> lowres) >> s->chroma_x_shift;
1489 
1490  ff_mpv_reconstruct_mb(s, s->block);
1491 
1492  if (++s->mb_x >= s->mb_width) {
1493  const int mb_size = 16 >> s->avctx->lowres;
1494  int left;
1495 
1496  ff_mpeg_draw_horiz_band(s, mb_size * (s->mb_y >> field_pic), mb_size);
1497 
1498  s->mb_x = 0;
1499  s->mb_y += 1 << field_pic;
1500 
1501  if (s->mb_y >= s->mb_height) {
1502  int left = get_bits_left(&s->gb);
1503  int is_d10 = s->chroma_format == CHROMA_422 &&
1504  s->pict_type == AV_PICTURE_TYPE_I &&
1505  avctx->profile == 0 && avctx->level == 5 &&
1506  s->intra_dc_precision == 2 &&
1507  s->q_scale_type == 1 && s->alternate_scan == 0 &&
1508  s->progressive_frame == 0
1509  /* vbv_delay == 0xBBB || 0xE10 */;
1510 
1511  if (left >= 32 && !is_d10) {
1512  GetBitContext gb = s->gb;
1513  align_get_bits(&gb);
1514  if (show_bits(&gb, 24) == 0x060E2B) {
1515  av_log(avctx, AV_LOG_DEBUG, "Invalid MXF data found in video stream\n");
1516  is_d10 = 1;
1517  }
1518  if (left > 32 && show_bits_long(&gb, 32) == 0x201) {
1519  av_log(avctx, AV_LOG_DEBUG, "skipping m704 alpha (unsupported)\n");
1520  goto eos;
1521  }
1522  }
1523 
1524  if (left < 0 ||
1525  (left && show_bits(&s->gb, FFMIN(left, 23)) && !is_d10) ||
1526  ((avctx->err_recognition & (AV_EF_BITSTREAM | AV_EF_AGGRESSIVE)) && left > 8)) {
1527  av_log(avctx, AV_LOG_ERROR, "end mismatch left=%d %0X at %d %d\n",
1528  left, left>0 ? show_bits(&s->gb, FFMIN(left, 23)) : 0, s->mb_x, s->mb_y);
1529  return AVERROR_INVALIDDATA;
1530  } else
1531  goto eos;
1532  }
1533  // There are some files out there which are missing the last slice
1534  // in cases where the slice is completely outside the visible
1535  // area, we detect this here instead of running into the end expecting
1536  // more data
1537  left = get_bits_left(&s->gb);
1538  if (s->mb_y >= ((s->height + 15) >> 4) &&
1539  !s->progressive_sequence &&
1540  left <= 25 &&
1541  left >= 0 &&
1542  s->mb_skip_run == -1 &&
1543  (!left || show_bits(&s->gb, left) == 0))
1544  goto eos;
1545 
1547  }
1548 
1549  /* skip mb handling */
1550  if (s->mb_skip_run == -1) {
1551  /* read increment again */
1552  s->mb_skip_run = 0;
1553  for (;;) {
1554  int code = get_vlc2(&s->gb, ff_mbincr_vlc,
1555  MBINCR_VLC_BITS, 2);
1556  if (code < 0) {
1557  av_log(s->avctx, AV_LOG_ERROR, "mb incr damaged\n");
1558  return AVERROR_INVALIDDATA;
1559  }
1560  if (code >= 33) {
1561  if (code == 33) {
1562  s->mb_skip_run += 33;
1563  } else if (code == 35) {
1564  if (s->mb_skip_run != 0 || show_bits(&s->gb, 15) != 0) {
1565  av_log(s->avctx, AV_LOG_ERROR, "slice mismatch\n");
1566  return AVERROR_INVALIDDATA;
1567  }
1568  goto eos; /* end of slice */
1569  }
1570  /* otherwise, stuffing, nothing to do */
1571  } else {
1572  s->mb_skip_run += code;
1573  break;
1574  }
1575  }
1576  if (s->mb_skip_run) {
1577  int i;
1578  if (s->pict_type == AV_PICTURE_TYPE_I) {
1579  av_log(s->avctx, AV_LOG_ERROR,
1580  "skipped MB in I-frame at %d %d\n", s->mb_x, s->mb_y);
1581  return AVERROR_INVALIDDATA;
1582  }
1583 
1584  /* skip mb */
1585  s->mb_intra = 0;
1586  for (i = 0; i < 12; i++)
1587  s->block_last_index[i] = -1;
1588  s->last_dc[0] = s->last_dc[1] = s->last_dc[2] = 128 << s->intra_dc_precision;
1589  if (s->picture_structure == PICT_FRAME)
1590  s->mv_type = MV_TYPE_16X16;
1591  else
1592  s->mv_type = MV_TYPE_FIELD;
1593  if (s->pict_type == AV_PICTURE_TYPE_P) {
1594  /* if P type, zero motion vector is implied */
1595  s->mv_dir = MV_DIR_FORWARD;
1596  s->mv[0][0][0] = s->mv[0][0][1] = 0;
1597  s->last_mv[0][0][0] = s->last_mv[0][0][1] = 0;
1598  s->last_mv[0][1][0] = s->last_mv[0][1][1] = 0;
1599  s->field_select[0][0] = (s->picture_structure - 1) & 1;
1600  } else {
1601  /* if B type, reuse previous vectors and directions */
1602  s->mv[0][0][0] = s->last_mv[0][0][0];
1603  s->mv[0][0][1] = s->last_mv[0][0][1];
1604  s->mv[1][0][0] = s->last_mv[1][0][0];
1605  s->mv[1][0][1] = s->last_mv[1][0][1];
1606  s->field_select[0][0] = (s->picture_structure - 1) & 1;
1607  s->field_select[1][0] = (s->picture_structure - 1) & 1;
1608  }
1609  }
1610  }
1611  }
1612 eos: // end of slice
1613  if (get_bits_left(&s->gb) < 0) {
1614  av_log(s->avctx, AV_LOG_ERROR, "overread %d\n", -get_bits_left(&s->gb));
1615  return AVERROR_INVALIDDATA;
1616  }
1617  *buf += (get_bits_count(&s->gb) - 1) / 8;
1618  ff_dlog(s->avctx, "Slice start:%d %d end:%d %d\n", s->resync_mb_x, s->resync_mb_y, s->mb_x, s->mb_y);
1619  return 0;
1620 }
1621 
1623 {
1624  MpegEncContext *s = *(void **) arg;
1625  const uint8_t *buf = s->gb.buffer;
1626  int mb_y = s->start_mb_y;
1627  const int field_pic = s->picture_structure != PICT_FRAME;
1628 
1629  s->er.error_count = (3 * (s->end_mb_y - s->start_mb_y) * s->mb_width) >> field_pic;
1630 
1631  for (;;) {
1632  uint32_t start_code;
1633  int ret;
1634 
1635  ret = mpeg_decode_slice(s, mb_y, &buf, s->gb.buffer_end - buf);
1636  emms_c();
1637  ff_dlog(c, "ret:%d resync:%d/%d mb:%d/%d ts:%d/%d ec:%d\n",
1638  ret, s->resync_mb_x, s->resync_mb_y, s->mb_x, s->mb_y,
1639  s->start_mb_y, s->end_mb_y, s->er.error_count);
1640  if (ret < 0) {
1641  if (c->err_recognition & AV_EF_EXPLODE)
1642  return ret;
1643  if (s->resync_mb_x >= 0 && s->resync_mb_y >= 0)
1644  ff_er_add_slice(&s->er, s->resync_mb_x, s->resync_mb_y,
1645  s->mb_x, s->mb_y,
1647  } else {
1648  ff_er_add_slice(&s->er, s->resync_mb_x, s->resync_mb_y,
1649  s->mb_x - 1, s->mb_y,
1651  }
1652 
1653  if (s->mb_y == s->end_mb_y)
1654  return 0;
1655 
1656  start_code = -1;
1657  buf = avpriv_find_start_code(buf, s->gb.buffer_end, &start_code);
1658  if (start_code < SLICE_MIN_START_CODE || start_code > SLICE_MAX_START_CODE)
1659  return AVERROR_INVALIDDATA;
1661  if (s->codec_id != AV_CODEC_ID_MPEG1VIDEO && s->mb_height > 2800/16)
1662  mb_y += (*buf&0xE0)<<2;
1663  mb_y <<= field_pic;
1664  if (s->picture_structure == PICT_BOTTOM_FIELD)
1665  mb_y++;
1666  if (mb_y >= s->end_mb_y)
1667  return AVERROR_INVALIDDATA;
1668  }
1669 }
1670 
1671 /**
1672  * Handle slice ends.
1673  * @return 1 if it seems to be the last slice
1674  */
1675 static int slice_end(AVCodecContext *avctx, AVFrame *pict, int *got_output)
1676 {
1677  Mpeg1Context *s1 = avctx->priv_data;
1678  MpegEncContext *s = &s1->mpeg_enc_ctx;
1679 
1680  if (!s->context_initialized || !s->cur_pic.ptr)
1681  return 0;
1682 
1683  if (s->avctx->hwaccel) {
1684  int ret = FF_HW_SIMPLE_CALL(s->avctx, end_frame);
1685  if (ret < 0) {
1686  av_log(avctx, AV_LOG_ERROR,
1687  "hardware accelerator failed to decode picture\n");
1688  return ret;
1689  }
1690  }
1691 
1692  /* end of slice reached */
1693  if (/* s->mb_y << field_pic == s->mb_height && */ !s->first_field && !s1->first_slice) {
1694  /* end of image */
1695 
1696  ff_er_frame_end(&s->er, NULL);
1697 
1699 
1700  if (s->pict_type == AV_PICTURE_TYPE_B || s->low_delay) {
1701  int ret = av_frame_ref(pict, s->cur_pic.ptr->f);
1702  if (ret < 0)
1703  return ret;
1704  ff_print_debug_info(s, s->cur_pic.ptr, pict);
1705  ff_mpv_export_qp_table(s, pict, s->cur_pic.ptr, FF_MPV_QSCALE_TYPE_MPEG2);
1706  *got_output = 1;
1707  } else {
1708  /* latency of 1 frame for I- and P-frames */
1709  if (s->last_pic.ptr && !s->last_pic.ptr->dummy) {
1710  int ret = av_frame_ref(pict, s->last_pic.ptr->f);
1711  if (ret < 0)
1712  return ret;
1713  ff_print_debug_info(s, s->last_pic.ptr, pict);
1714  ff_mpv_export_qp_table(s, pict, s->last_pic.ptr, FF_MPV_QSCALE_TYPE_MPEG2);
1715  *got_output = 1;
1716  }
1717  }
1718 
1719  return 1;
1720  } else {
1721  return 0;
1722  }
1723 }
1724 
1726  const uint8_t *buf, int buf_size)
1727 {
1728  Mpeg1Context *s1 = avctx->priv_data;
1729  MpegEncContext *s = &s1->mpeg_enc_ctx;
1730  int width, height;
1731  int i, v, j;
1732 
1733  int ret = init_get_bits8(&s->gb, buf, buf_size);
1734  if (ret < 0)
1735  return ret;
1736 
1737  width = get_bits(&s->gb, 12);
1738  height = get_bits(&s->gb, 12);
1739  if (width == 0 || height == 0) {
1740  av_log(avctx, AV_LOG_WARNING,
1741  "Invalid horizontal or vertical size value.\n");
1743  return AVERROR_INVALIDDATA;
1744  }
1745  s1->aspect_ratio_info = get_bits(&s->gb, 4);
1746  if (s1->aspect_ratio_info == 0) {
1747  av_log(avctx, AV_LOG_ERROR, "aspect ratio has forbidden 0 value\n");
1749  return AVERROR_INVALIDDATA;
1750  }
1751  s1->frame_rate_index = get_bits(&s->gb, 4);
1752  if (s1->frame_rate_index == 0 || s1->frame_rate_index > 13) {
1753  av_log(avctx, AV_LOG_WARNING,
1754  "frame_rate_index %d is invalid\n", s1->frame_rate_index);
1755  s1->frame_rate_index = 1;
1756  }
1757  s1->bit_rate = get_bits(&s->gb, 18) * 400;
1758  if (check_marker(s->avctx, &s->gb, "in sequence header") == 0) {
1759  return AVERROR_INVALIDDATA;
1760  }
1761 
1762  s->avctx->rc_buffer_size = get_bits(&s->gb, 10) * 1024 * 16;
1763  skip_bits(&s->gb, 1);
1764 
1765  /* get matrix */
1766  if (get_bits1(&s->gb)) {
1767  load_matrix(s, s->chroma_intra_matrix, s->intra_matrix, 1);
1768  } else {
1769  for (i = 0; i < 64; i++) {
1770  j = s->idsp.idct_permutation[i];
1772  s->intra_matrix[j] = v;
1773  s->chroma_intra_matrix[j] = v;
1774  }
1775  }
1776  if (get_bits1(&s->gb)) {
1777  load_matrix(s, s->chroma_inter_matrix, s->inter_matrix, 0);
1778  } else {
1779  for (i = 0; i < 64; i++) {
1780  int j = s->idsp.idct_permutation[i];
1782  s->inter_matrix[j] = v;
1783  s->chroma_inter_matrix[j] = v;
1784  }
1785  }
1786 
1787  if (show_bits(&s->gb, 23) != 0) {
1788  av_log(s->avctx, AV_LOG_ERROR, "sequence header damaged\n");
1789  return AVERROR_INVALIDDATA;
1790  }
1791 
1792  s->width = width;
1793  s->height = height;
1794 
1795  /* We set MPEG-2 parameters so that it emulates MPEG-1. */
1796  s->progressive_sequence = 1;
1797  s->progressive_frame = 1;
1798  s->picture_structure = PICT_FRAME;
1799  s->first_field = 0;
1800  s->frame_pred_frame_dct = 1;
1801  s->chroma_format = CHROMA_420;
1802  s->codec_id =
1803  s->avctx->codec_id = AV_CODEC_ID_MPEG1VIDEO;
1804  if (s->avctx->flags & AV_CODEC_FLAG_LOW_DELAY)
1805  s->low_delay = 1;
1806 
1807  if (s->avctx->debug & FF_DEBUG_PICT_INFO)
1808  av_log(s->avctx, AV_LOG_DEBUG, "vbv buffer: %d, bitrate:%"PRId64", aspect_ratio_info: %d \n",
1809  s->avctx->rc_buffer_size, s1->bit_rate, s1->aspect_ratio_info);
1810 
1811  return 0;
1812 }
1813 
1815 {
1816  Mpeg1Context *s1 = avctx->priv_data;
1817  MpegEncContext *s = &s1->mpeg_enc_ctx;
1818  int i, v, ret;
1819 
1820  /* start new MPEG-1 context decoding */
1821  if (s->context_initialized)
1823 
1824  s->width = avctx->coded_width;
1825  s->height = avctx->coded_height;
1826  avctx->has_b_frames = 0; // true?
1827  s->low_delay = 1;
1828 
1829  avctx->pix_fmt = mpeg_get_pixelformat(avctx);
1830 
1831  if ((ret = ff_mpv_common_init(s)) < 0)
1832  return ret;
1833  if (!s->avctx->lowres)
1834  for (int i = 0; i < s->slice_context_count; i++)
1835  ff_mpv_framesize_disable(&s->thread_context[i]->sc);
1836 
1837  for (i = 0; i < 64; i++) {
1838  int j = s->idsp.idct_permutation[i];
1840  s->intra_matrix[j] = v;
1841  s->chroma_intra_matrix[j] = v;
1842 
1844  s->inter_matrix[j] = v;
1845  s->chroma_inter_matrix[j] = v;
1846  }
1847 
1848  s->progressive_sequence = 1;
1849  s->progressive_frame = 1;
1850  s->picture_structure = PICT_FRAME;
1851  s->first_field = 0;
1852  s->frame_pred_frame_dct = 1;
1853  s->chroma_format = CHROMA_420;
1854  if (s->codec_tag == AV_RL32("BW10")) {
1855  s->codec_id = s->avctx->codec_id = AV_CODEC_ID_MPEG1VIDEO;
1856  } else {
1857  s->codec_id = s->avctx->codec_id = AV_CODEC_ID_MPEG2VIDEO;
1858  }
1859  s1->save_progressive_seq = s->progressive_sequence;
1860  s1->save_chroma_format = s->chroma_format;
1861  return 0;
1862 }
1863 
1865  const char *label)
1866 {
1867  Mpeg1Context *s1 = avctx->priv_data;
1868 
1870 
1871  if (!s1->cc_format) {
1872  s1->cc_format = format;
1873 
1874  av_log(avctx, AV_LOG_DEBUG, "CC: first seen substream is %s format\n", label);
1875  }
1876 
1877 #if FF_API_CODEC_PROPS
1881 #endif
1882 }
1883 
1885  const uint8_t *p, int buf_size)
1886 {
1887  Mpeg1Context *s1 = avctx->priv_data;
1888 
1889  if ((!s1->cc_format || s1->cc_format == CC_FORMAT_A53_PART4) &&
1890  buf_size >= 6 &&
1891  p[0] == 'G' && p[1] == 'A' && p[2] == '9' && p[3] == '4' &&
1892  p[4] == 3 && (p[5] & 0x40)) {
1893  /* extract A53 Part 4 CC data */
1894  int cc_count = p[5] & 0x1f;
1895  if (cc_count > 0 && buf_size >= 7 + cc_count * 3) {
1896  int old_size = s1->a53_buf_ref ? s1->a53_buf_ref->size : 0;
1897  const uint64_t new_size = (old_size + cc_count
1898  * UINT64_C(3));
1899  int ret;
1900 
1901  if (new_size > 3*A53_MAX_CC_COUNT)
1902  return AVERROR(EINVAL);
1903 
1904  ret = av_buffer_realloc(&s1->a53_buf_ref, new_size);
1905  if (ret >= 0)
1906  memcpy(s1->a53_buf_ref->data + old_size, p + 7, cc_count * UINT64_C(3));
1907 
1908  mpeg_set_cc_format(avctx, CC_FORMAT_A53_PART4, "A/53 Part 4");
1909  }
1910  return 1;
1911  } else if ((!s1->cc_format || s1->cc_format == CC_FORMAT_SCTE20) &&
1912  buf_size >= 2 &&
1913  p[0] == 0x03 && (p[1]&0x7f) == 0x01) {
1914  /* extract SCTE-20 CC data */
1915  GetBitContext gb;
1916  int cc_count = 0;
1917  int i, ret;
1918 
1919  ret = init_get_bits8(&gb, p + 2, buf_size - 2);
1920  if (ret < 0)
1921  return ret;
1922  cc_count = get_bits(&gb, 5);
1923  if (cc_count > 0) {
1924  int old_size = s1->a53_buf_ref ? s1->a53_buf_ref->size : 0;
1925  const uint64_t new_size = (old_size + cc_count
1926  * UINT64_C(3));
1927  if (new_size > 3*A53_MAX_CC_COUNT)
1928  return AVERROR(EINVAL);
1929 
1930  ret = av_buffer_realloc(&s1->a53_buf_ref, new_size);
1931  if (ret >= 0) {
1932  uint8_t field, cc1, cc2;
1933  uint8_t *cap = s1->a53_buf_ref->data + old_size;
1934 
1935  memset(cap, 0, cc_count * 3);
1936  for (i = 0; i < cc_count && get_bits_left(&gb) >= 26; i++) {
1937  skip_bits(&gb, 2); // priority
1938  field = get_bits(&gb, 2);
1939  skip_bits(&gb, 5); // line_offset
1940  cc1 = get_bits(&gb, 8);
1941  cc2 = get_bits(&gb, 8);
1942  skip_bits(&gb, 1); // marker
1943 
1944  if (!field) { // forbidden
1945  cap[0] = cap[1] = cap[2] = 0x00;
1946  } else {
1947  field = (field == 2 ? 1 : 0);
1948  if (!s1->mpeg_enc_ctx.top_field_first) field = !field;
1949  cap[0] = 0x04 | field;
1950  cap[1] = ff_reverse[cc1];
1951  cap[2] = ff_reverse[cc2];
1952  }
1953  cap += 3;
1954  }
1955  }
1956 
1957  mpeg_set_cc_format(avctx, CC_FORMAT_SCTE20, "SCTE-20");
1958  }
1959  return 1;
1960  } else if ((!s1->cc_format || s1->cc_format == CC_FORMAT_DVD) &&
1961  buf_size >= 11 &&
1962  p[0] == 'C' && p[1] == 'C' && p[2] == 0x01 && p[3] == 0xf8) {
1963  /* extract DVD CC data
1964  *
1965  * uint32_t user_data_start_code 0x000001B2 (big endian)
1966  * uint16_t user_identifier 0x4343 "CC"
1967  * uint8_t user_data_type_code 0x01
1968  * uint8_t caption_block_size 0xF8
1969  * uint8_t
1970  * bit 7 caption_odd_field_first 1=odd field (CC1/CC2) first 0=even field (CC3/CC4) first
1971  * bit 6 caption_filler 0
1972  * bit 5:1 caption_block_count number of caption blocks (pairs of caption words = frames). Most DVDs use 15 per start of GOP.
1973  * bit 0 caption_extra_field_added 1=one additional caption word
1974  *
1975  * struct caption_field_block {
1976  * uint8_t
1977  * bit 7:1 caption_filler 0x7F (all 1s)
1978  * bit 0 caption_field_odd 1=odd field (this is CC1/CC2) 0=even field (this is CC3/CC4)
1979  * uint8_t caption_first_byte
1980  * uint8_t caption_second_byte
1981  * } caption_block[(caption_block_count * 2) + caption_extra_field_added];
1982  *
1983  * Some DVDs encode caption data for both fields with caption_field_odd=1. The only way to decode the fields
1984  * correctly is to start on the field indicated by caption_odd_field_first and count between odd/even fields.
1985  * Don't assume that the first caption word is the odd field. There do exist MPEG files in the wild that start
1986  * on the even field. There also exist DVDs in the wild that encode an odd field count and the
1987  * caption_extra_field_added/caption_odd_field_first bits change per packet to allow that. */
1988  int cc_count = 0;
1989  int i, ret;
1990  // There is a caption count field in the data, but it is often
1991  // incorrect. So count the number of captions present.
1992  for (i = 5; i + 6 <= buf_size && ((p[i] & 0xfe) == 0xfe); i += 6)
1993  cc_count++;
1994  // Transform the DVD format into A53 Part 4 format
1995  if (cc_count > 0) {
1996  int old_size = s1->a53_buf_ref ? s1->a53_buf_ref->size : 0;
1997  const uint64_t new_size = (old_size + cc_count
1998  * UINT64_C(6));
1999  if (new_size > 3*A53_MAX_CC_COUNT)
2000  return AVERROR(EINVAL);
2001 
2002  ret = av_buffer_realloc(&s1->a53_buf_ref, new_size);
2003  if (ret >= 0) {
2004  uint8_t field1 = !!(p[4] & 0x80);
2005  uint8_t *cap = s1->a53_buf_ref->data + old_size;
2006  p += 5;
2007  for (i = 0; i < cc_count; i++) {
2008  cap[0] = (p[0] == 0xff && field1) ? 0xfc : 0xfd;
2009  cap[1] = p[1];
2010  cap[2] = p[2];
2011  cap[3] = (p[3] == 0xff && !field1) ? 0xfc : 0xfd;
2012  cap[4] = p[4];
2013  cap[5] = p[5];
2014  cap += 6;
2015  p += 6;
2016  }
2017  }
2018 
2019  mpeg_set_cc_format(avctx, CC_FORMAT_DVD, "DVD");
2020  }
2021  return 1;
2022  } else if ((!s1->cc_format || s1->cc_format == CC_FORMAT_DISH) &&
2023  buf_size >= 12 &&
2024  p[0] == 0x05 && p[1] == 0x02) {
2025  /* extract Dish Network CC data */
2026  const uint8_t cc_header = 0xf8 | 0x04 /* valid */ | 0x00 /* line 21 field 1 */;
2027  uint8_t cc_data[4] = {0};
2028  int cc_count = 0;
2029  uint8_t cc_type = p[7];
2030  p += 8;
2031  buf_size -= 8;
2032 
2033  if (cc_type == 0x05 && buf_size >= 7) {
2034  cc_type = p[6];
2035  p += 7;
2036  buf_size -= 7;
2037  }
2038 
2039  if (cc_type == 0x02 && buf_size >= 4) { /* 2-byte caption, can be repeated */
2040  cc_count = 1;
2041  cc_data[0] = p[1];
2042  cc_data[1] = p[2];
2043  cc_type = p[3];
2044 
2045  /* Only repeat characters when the next type flag
2046  * is 0x04 and the characters are repeatable (i.e., less than
2047  * 32 with the parity stripped).
2048  */
2049  if (cc_type == 0x04 && (cc_data[0] & 0x7f) < 32) {
2050  cc_count = 2;
2051  cc_data[2] = cc_data[0];
2052  cc_data[3] = cc_data[1];
2053  }
2054  } else if (cc_type == 0x04 && buf_size >= 5) { /* 4-byte caption, not repeated */
2055  cc_count = 2;
2056  cc_data[0] = p[1];
2057  cc_data[1] = p[2];
2058  cc_data[2] = p[3];
2059  cc_data[3] = p[4];
2060  }
2061 
2062  if (cc_count > 0) {
2063  int ret;
2064  int old_size = s1->a53_buf_ref ? s1->a53_buf_ref->size : 0;
2065  const uint64_t new_size = (old_size + cc_count * UINT64_C(3));
2066  if (new_size > 3 * A53_MAX_CC_COUNT)
2067  return AVERROR(EINVAL);
2068 
2069  ret = av_buffer_realloc(&s1->a53_buf_ref, new_size);
2070  if (ret >= 0) {
2071  uint8_t *cap = s1->a53_buf_ref->data + old_size;
2072  cap[0] = cc_header;
2073  cap[1] = cc_data[0];
2074  cap[2] = cc_data[1];
2075  if (cc_count == 2) {
2076  cap[3] = cc_header;
2077  cap[4] = cc_data[2];
2078  cap[5] = cc_data[3];
2079  }
2080  }
2081 
2082  mpeg_set_cc_format(avctx, CC_FORMAT_DISH, "Dish Network");
2083  }
2084  return 1;
2085  }
2086  return 0;
2087 }
2088 
2090  const uint8_t *p, int buf_size)
2091 {
2092  Mpeg1Context *s = avctx->priv_data;
2093  const uint8_t *buf_end = p + buf_size;
2094  Mpeg1Context *s1 = avctx->priv_data;
2095 
2096 #if 0
2097  int i;
2098  for(i=0; !(!p[i-2] && !p[i-1] && p[i]==1) && i<buf_size; i++){
2099  av_log(avctx, AV_LOG_ERROR, "%c", p[i]);
2100  }
2101  av_log(avctx, AV_LOG_ERROR, "\n");
2102 #endif
2103 
2104  if (buf_size > 29){
2105  int i;
2106  for(i=0; i<20; i++)
2107  if (!memcmp(p+i, "\0TMPGEXS\0", 9)){
2108  s->tmpgexs= 1;
2109  }
2110  }
2111  /* we parse the DTG active format information */
2112  if (buf_end - p >= 5 &&
2113  p[0] == 'D' && p[1] == 'T' && p[2] == 'G' && p[3] == '1') {
2114  int flags = p[4];
2115  p += 5;
2116  if (flags & 0x80) {
2117  /* skip event id */
2118  p += 2;
2119  }
2120  if (flags & 0x40) {
2121  if (buf_end - p < 1)
2122  return;
2123  s1->has_afd = 1;
2124  s1->afd = p[0] & 0x0f;
2125  }
2126  } else if (buf_end - p >= 6 &&
2127  p[0] == 'J' && p[1] == 'P' && p[2] == '3' && p[3] == 'D' &&
2128  p[4] == 0x03) { // S3D_video_format_length
2129  // the 0x7F mask ignores the reserved_bit value
2130  const uint8_t S3D_video_format_type = p[5] & 0x7F;
2131 
2132  if (S3D_video_format_type == 0x03 ||
2133  S3D_video_format_type == 0x04 ||
2134  S3D_video_format_type == 0x08 ||
2135  S3D_video_format_type == 0x23) {
2136 
2137  s1->has_stereo3d = 1;
2138 
2139  switch (S3D_video_format_type) {
2140  case 0x03:
2142  break;
2143  case 0x04:
2145  break;
2146  case 0x08:
2148  break;
2149  case 0x23:
2151  break;
2152  }
2153  }
2154  } else if (mpeg_decode_a53_cc(avctx, p, buf_size)) {
2155  return;
2156  }
2157 }
2158 
2160  const uint8_t *buf, int buf_size)
2161 {
2162  Mpeg1Context *s1 = avctx->priv_data;
2163  MpegEncContext *s = &s1->mpeg_enc_ctx;
2164  int broken_link;
2165  int64_t tc;
2166 
2167  int ret = init_get_bits8(&s->gb, buf, buf_size);
2168  if (ret < 0)
2169  return ret;
2170 
2171  tc = s1->timecode_frame_start = get_bits(&s->gb, 25);
2172 
2173  s1->closed_gop = get_bits1(&s->gb);
2174  /* broken_link indicates that after editing the
2175  * reference frames of the first B-Frames after GOP I-Frame
2176  * are missing (open gop) */
2177  broken_link = get_bits1(&s->gb);
2178 
2179  if (s->avctx->debug & FF_DEBUG_PICT_INFO) {
2180  char tcbuf[AV_TIMECODE_STR_SIZE];
2182  av_log(s->avctx, AV_LOG_DEBUG,
2183  "GOP (%s) closed_gop=%d broken_link=%d\n",
2184  tcbuf, s1->closed_gop, broken_link);
2185  }
2186 
2187  return 0;
2188 }
2189 
2191  Mpeg1Context *const s)
2192 {
2193  if (HAVE_THREADS && (avctx->active_thread_type & FF_THREAD_SLICE) &&
2194  !avctx->hwaccel) {
2195  MpegEncContext *const s2 = &s->mpeg_enc_ctx;
2196  int error_count = 0;
2197 
2198  avctx->execute(avctx, slice_decode_thread,
2199  s2->thread_context, NULL,
2200  s->slice_count, sizeof(void *));
2201 
2202  for (int i = 0; i < s->slice_count; i++) {
2203  MpegEncContext *const slice = s2->thread_context[i];
2204  int slice_err = atomic_load_explicit(&slice->er.error_count,
2205  memory_order_relaxed);
2206  // error_count can get set to INT_MAX on serious errors.
2207  // So use saturated addition.
2208  if ((unsigned)slice_err > INT_MAX - error_count) {
2209  error_count = INT_MAX;
2210  break;
2211  }
2212  error_count += slice_err;
2213  }
2214  atomic_store_explicit(&s2->er.error_count, error_count,
2215  memory_order_relaxed);
2216  }
2217 }
2218 
2219 static int decode_chunks(AVCodecContext *avctx, AVFrame *picture,
2220  int *got_output, const uint8_t *buf, int buf_size)
2221 {
2222  Mpeg1Context *s = avctx->priv_data;
2223  MpegEncContext *s2 = &s->mpeg_enc_ctx;
2224  const uint8_t *buf_ptr = buf;
2225  const uint8_t *buf_end = buf + buf_size;
2226  int ret, input_size;
2227  int last_code = 0, skip_frame = 0;
2228  int picture_start_code_seen = 0;
2229 
2230  for (;;) {
2231  /* find next start code */
2232  uint32_t start_code = -1;
2233  buf_ptr = avpriv_find_start_code(buf_ptr, buf_end, &start_code);
2234  if (start_code > 0x1ff) {
2235  if (!skip_frame) {
2237 
2238  ret = slice_end(avctx, picture, got_output);
2239  if (ret < 0)
2240  return ret;
2241  }
2242  s2->pict_type = 0;
2243 
2244  if (avctx->err_recognition & AV_EF_EXPLODE && s2->er.error_count)
2245  return AVERROR_INVALIDDATA;
2246 
2247  return FFMAX(0, buf_ptr - buf);
2248  }
2249 
2250  input_size = buf_end - buf_ptr;
2251 
2252  if (avctx->debug & FF_DEBUG_STARTCODE)
2253  av_log(avctx, AV_LOG_DEBUG, "%3"PRIX32" at %"PTRDIFF_SPECIFIER" left %d\n",
2254  start_code, buf_ptr - buf, input_size);
2255 
2256  /* prepare data for next start code */
2257  switch (start_code) {
2258  case SEQ_START_CODE:
2259  if (last_code == 0) {
2260  mpeg1_decode_sequence(avctx, buf_ptr, input_size);
2261  if (buf != avctx->extradata)
2262  s->sync = 1;
2263  } else {
2264  av_log(avctx, AV_LOG_ERROR,
2265  "ignoring SEQ_START_CODE after %X\n", last_code);
2266  if (avctx->err_recognition & AV_EF_EXPLODE)
2267  return AVERROR_INVALIDDATA;
2268  }
2269  break;
2270 
2271  case PICTURE_START_CODE:
2272  if (picture_start_code_seen && s2->picture_structure == PICT_FRAME) {
2273  /* If it's a frame picture, there can't be more than one picture header.
2274  Yet, it does happen and we need to handle it. */
2275  av_log(avctx, AV_LOG_WARNING, "ignoring extra picture following a frame-picture\n");
2276  break;
2277  }
2278  picture_start_code_seen = 1;
2279 
2280  if (buf == avctx->extradata && avctx->codec_tag == AV_RL32("AVmp")) {
2281  av_log(avctx, AV_LOG_WARNING, "ignoring picture start code in AVmp extradata\n");
2282  break;
2283  }
2284 
2285  if (s2->width <= 0 || s2->height <= 0) {
2286  av_log(avctx, AV_LOG_ERROR, "Invalid frame dimensions %dx%d.\n",
2287  s2->width, s2->height);
2288  return AVERROR_INVALIDDATA;
2289  }
2290 
2291  if (s->tmpgexs){
2292  s2->intra_dc_precision= 3;
2293  s2->intra_matrix[0]= 1;
2294  }
2295  if (s->slice_count) {
2297  s->slice_count = 0;
2298  }
2299  if (last_code == 0 || last_code == SLICE_MIN_START_CODE) {
2300  ret = mpeg_decode_postinit(avctx);
2301  if (ret < 0) {
2302  av_log(avctx, AV_LOG_ERROR,
2303  "mpeg_decode_postinit() failure\n");
2304  return ret;
2305  }
2306 
2307  /* We have a complete image: we try to decompress it. */
2308  if (mpeg1_decode_picture(avctx, buf_ptr, input_size) < 0)
2309  s2->pict_type = 0;
2310  s->first_slice = 1;
2311  last_code = PICTURE_START_CODE;
2312  } else {
2313  av_log(avctx, AV_LOG_ERROR,
2314  "ignoring pic after %X\n", last_code);
2315  if (avctx->err_recognition & AV_EF_EXPLODE)
2316  return AVERROR_INVALIDDATA;
2317  }
2318  break;
2319  case EXT_START_CODE:
2320  ret = init_get_bits8(&s2->gb, buf_ptr, input_size);
2321  if (ret < 0)
2322  return ret;
2323 
2324  switch (get_bits(&s2->gb, 4)) {
2325  case 0x1:
2326  if (last_code == 0) {
2328  } else {
2329  av_log(avctx, AV_LOG_ERROR,
2330  "ignoring seq ext after %X\n", last_code);
2331  if (avctx->err_recognition & AV_EF_EXPLODE)
2332  return AVERROR_INVALIDDATA;
2333  }
2334  break;
2335  case 0x2:
2337  break;
2338  case 0x3:
2340  break;
2341  case 0x7:
2343  break;
2344  case 0x8:
2345  if (last_code == PICTURE_START_CODE) {
2347  if (ret < 0)
2348  return ret;
2349  } else {
2350  av_log(avctx, AV_LOG_ERROR,
2351  "ignoring pic cod ext after %X\n", last_code);
2352  if (avctx->err_recognition & AV_EF_EXPLODE)
2353  return AVERROR_INVALIDDATA;
2354  }
2355  break;
2356  }
2357  break;
2358  case USER_START_CODE:
2359  mpeg_decode_user_data(avctx, buf_ptr, input_size);
2360  break;
2361  case GOP_START_CODE:
2362  if (last_code == 0) {
2363  s2->first_field = 0;
2364  ret = mpeg_decode_gop(avctx, buf_ptr, input_size);
2365  if (ret < 0)
2366  return ret;
2367  s->sync = 1;
2368  } else {
2369  av_log(avctx, AV_LOG_ERROR,
2370  "ignoring GOP_START_CODE after %X\n", last_code);
2371  if (avctx->err_recognition & AV_EF_EXPLODE)
2372  return AVERROR_INVALIDDATA;
2373  }
2374  break;
2375  default:
2377  start_code <= SLICE_MAX_START_CODE && last_code == PICTURE_START_CODE) {
2378  if (s2->progressive_sequence && !s2->progressive_frame) {
2379  s2->progressive_frame = 1;
2380  av_log(s2->avctx, AV_LOG_ERROR,
2381  "interlaced frame in progressive sequence, ignoring\n");
2382  }
2383 
2384  if (s2->picture_structure == 0 ||
2386  av_log(s2->avctx, AV_LOG_ERROR,
2387  "picture_structure %d invalid, ignoring\n",
2388  s2->picture_structure);
2390  }
2391 
2393  av_log(s2->avctx, AV_LOG_WARNING, "invalid frame_pred_frame_dct\n");
2394 
2395  if (s2->picture_structure == PICT_FRAME) {
2396  s2->first_field = 0;
2397  s2->v_edge_pos = 16 * s2->mb_height;
2398  } else {
2399  s2->first_field ^= 1;
2400  s2->v_edge_pos = 8 * s2->mb_height;
2401  memset(s2->mbskip_table, 0, s2->mb_stride * s2->mb_height);
2402  }
2403  }
2405  start_code <= SLICE_MAX_START_CODE && last_code != 0) {
2406  const int field_pic = s2->picture_structure != PICT_FRAME;
2407  int mb_y = start_code - SLICE_MIN_START_CODE;
2408  last_code = SLICE_MIN_START_CODE;
2409  if (s2->codec_id != AV_CODEC_ID_MPEG1VIDEO && s2->mb_height > 2800/16)
2410  mb_y += (*buf_ptr&0xE0)<<2;
2411 
2412  mb_y <<= field_pic;
2414  mb_y++;
2415 
2416  if (buf_end - buf_ptr < 2) {
2417  av_log(s2->avctx, AV_LOG_ERROR, "slice too small\n");
2418  return AVERROR_INVALIDDATA;
2419  }
2420 
2421  if (mb_y >= s2->mb_height) {
2422  av_log(s2->avctx, AV_LOG_ERROR,
2423  "slice below image (%d >= %d)\n", mb_y, s2->mb_height);
2424  return AVERROR_INVALIDDATA;
2425  }
2426 
2427  if (!s2->last_pic.ptr) {
2428  /* Skip B-frames if we do not have reference frames and
2429  * GOP is not closed. */
2430  if (s2->pict_type == AV_PICTURE_TYPE_B) {
2431  if (!s->closed_gop) {
2432  skip_frame = 1;
2433  av_log(s2->avctx, AV_LOG_DEBUG,
2434  "Skipping B slice due to open GOP\n");
2435  break;
2436  }
2437  }
2438  }
2440  s->sync = 1;
2441  if (!s2->next_pic.ptr) {
2442  /* Skip P-frames if we do not have a reference frame or
2443  * we have an invalid header. */
2444  if (s2->pict_type == AV_PICTURE_TYPE_P && !s->sync) {
2445  skip_frame = 1;
2446  av_log(s2->avctx, AV_LOG_DEBUG,
2447  "Skipping P slice due to !sync\n");
2448  break;
2449  }
2450  }
2451  if ((avctx->skip_frame >= AVDISCARD_NONREF &&
2452  s2->pict_type == AV_PICTURE_TYPE_B) ||
2453  (avctx->skip_frame >= AVDISCARD_NONKEY &&
2454  s2->pict_type != AV_PICTURE_TYPE_I) ||
2455  avctx->skip_frame >= AVDISCARD_ALL) {
2456  skip_frame = 1;
2457  break;
2458  }
2459 
2460  if (!s2->context_initialized)
2461  break;
2462 
2463  if (s2->codec_id == AV_CODEC_ID_MPEG2VIDEO) {
2464  if (mb_y < avctx->skip_top ||
2465  mb_y >= s2->mb_height - avctx->skip_bottom)
2466  break;
2467  }
2468 
2469  if (!s2->pict_type) {
2470  av_log(avctx, AV_LOG_ERROR, "Missing picture start code\n");
2471  if (avctx->err_recognition & AV_EF_EXPLODE)
2472  return AVERROR_INVALIDDATA;
2473  break;
2474  }
2475 
2476  if (s->first_slice) {
2477  skip_frame = 0;
2478  s->first_slice = 0;
2479  if ((ret = mpeg_field_start(s, buf, buf_size)) < 0)
2480  return ret;
2481  }
2482  if (!s2->cur_pic.ptr) {
2483  av_log(avctx, AV_LOG_ERROR,
2484  "current_picture not initialized\n");
2485  return AVERROR_INVALIDDATA;
2486  }
2487 
2488  if (HAVE_THREADS &&
2489  (avctx->active_thread_type & FF_THREAD_SLICE) &&
2490  !avctx->hwaccel) {
2491  int threshold = (s2->mb_height * s->slice_count +
2492  s2->slice_context_count / 2) /
2493  s2->slice_context_count;
2494  if (threshold <= mb_y) {
2495  MpegEncContext *thread_context = s2->thread_context[s->slice_count];
2496 
2497  thread_context->start_mb_y = mb_y;
2498  thread_context->end_mb_y = s2->mb_height;
2499  if (s->slice_count) {
2500  s2->thread_context[s->slice_count - 1]->end_mb_y = mb_y;
2501  ret = ff_update_duplicate_context(thread_context, s2);
2502  if (ret < 0)
2503  return ret;
2504  }
2505  ret = init_get_bits8(&thread_context->gb, buf_ptr, input_size);
2506  if (ret < 0)
2507  return ret;
2508  s->slice_count++;
2509  }
2510  buf_ptr += 2; // FIXME add minimum number of bytes per slice
2511  } else {
2512  ret = mpeg_decode_slice(s2, mb_y, &buf_ptr, input_size);
2513  emms_c();
2514 
2515  if (ret < 0) {
2516  if (avctx->err_recognition & AV_EF_EXPLODE)
2517  return ret;
2518  if (s2->resync_mb_x >= 0 && s2->resync_mb_y >= 0)
2519  ff_er_add_slice(&s2->er, s2->resync_mb_x,
2520  s2->resync_mb_y, s2->mb_x, s2->mb_y,
2522  } else {
2523  ff_er_add_slice(&s2->er, s2->resync_mb_x,
2524  s2->resync_mb_y, s2->mb_x - 1, s2->mb_y,
2526  }
2527  }
2528  }
2529  break;
2530  }
2531  }
2532 }
2533 
2534 static int mpeg_decode_frame(AVCodecContext *avctx, AVFrame *picture,
2535  int *got_output, AVPacket *avpkt)
2536 {
2537  const uint8_t *buf = avpkt->data;
2538  int ret;
2539  int buf_size = avpkt->size;
2540  Mpeg1Context *s = avctx->priv_data;
2541  MpegEncContext *s2 = &s->mpeg_enc_ctx;
2542 
2543  if (buf_size == 0 || (buf_size == 4 && AV_RB32(buf) == SEQ_END_CODE)) {
2544  /* special case for last picture */
2545  if (s2->low_delay == 0 && s2->next_pic.ptr) {
2546  int ret = av_frame_ref(picture, s2->next_pic.ptr->f);
2547  if (ret < 0)
2548  return ret;
2549 
2551 
2552  *got_output = 1;
2553  }
2554  return buf_size;
2555  }
2556 
2557  if (!s2->context_initialized &&
2558  (s2->codec_tag == AV_RL32("VCR2") || s2->codec_tag == AV_RL32("BW10")))
2559  vcr2_init_sequence(avctx);
2560 
2561  s->slice_count = 0;
2562 
2563  if (avctx->extradata && !s->extradata_decoded) {
2564  ret = decode_chunks(avctx, picture, got_output,
2565  avctx->extradata, avctx->extradata_size);
2566  if (*got_output) {
2567  av_log(avctx, AV_LOG_ERROR, "picture in extradata\n");
2568  av_frame_unref(picture);
2569  *got_output = 0;
2570  }
2571  s->extradata_decoded = 1;
2572  if (ret < 0 && (avctx->err_recognition & AV_EF_EXPLODE)) {
2574  return ret;
2575  }
2576  }
2577 
2578  ret = decode_chunks(avctx, picture, got_output, buf, buf_size);
2579  if (ret<0 || *got_output) {
2581 
2582  if (s->timecode_frame_start != -1 && *got_output) {
2583  char tcbuf[AV_TIMECODE_STR_SIZE];
2584  AVFrameSideData *tcside = av_frame_new_side_data(picture,
2586  sizeof(int64_t));
2587  if (!tcside)
2588  return AVERROR(ENOMEM);
2589  memcpy(tcside->data, &s->timecode_frame_start, sizeof(int64_t));
2590 
2591  av_timecode_make_mpeg_tc_string(tcbuf, s->timecode_frame_start);
2592  av_dict_set(&picture->metadata, "timecode", tcbuf, 0);
2593 
2594  s->timecode_frame_start = -1;
2595  }
2596  }
2597 
2598  return ret;
2599 }
2600 
2601 static av_cold void flush(AVCodecContext *avctx)
2602 {
2603  Mpeg1Context *s = avctx->priv_data;
2604 
2605  s->sync = 0;
2606  s->closed_gop = 0;
2607 
2608  av_buffer_unref(&s->a53_buf_ref);
2609  ff_mpeg_flush(avctx);
2610 }
2611 
2613 {
2614  Mpeg1Context *s = avctx->priv_data;
2615 
2616  av_buffer_unref(&s->a53_buf_ref);
2617  return ff_mpv_decode_close(avctx);
2618 }
2619 
2621  .p.name = "mpeg1video",
2622  CODEC_LONG_NAME("MPEG-1 video"),
2623  .p.type = AVMEDIA_TYPE_VIDEO,
2624  .p.id = AV_CODEC_ID_MPEG1VIDEO,
2625  .priv_data_size = sizeof(Mpeg1Context),
2629  .p.capabilities = AV_CODEC_CAP_DRAW_HORIZ_BAND | AV_CODEC_CAP_DR1 |
2631  .caps_internal = FF_CODEC_CAP_SKIP_FRAME_FILL_PARAM,
2632  .flush = flush,
2633  .p.max_lowres = 3,
2634  .hw_configs = (const AVCodecHWConfigInternal *const []) {
2635 #if CONFIG_MPEG1_NVDEC_HWACCEL
2636  HWACCEL_NVDEC(mpeg1),
2637 #endif
2638 #if CONFIG_MPEG1_VDPAU_HWACCEL
2639  HWACCEL_VDPAU(mpeg1),
2640 #endif
2641 #if CONFIG_MPEG1_VIDEOTOOLBOX_HWACCEL
2642  HWACCEL_VIDEOTOOLBOX(mpeg1),
2643 #endif
2644  NULL
2645  },
2646 };
2647 
2648 #define M2V_OFFSET(x) offsetof(Mpeg1Context, x)
2649 #define M2V_PARAM AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_DECODING_PARAM
2650 
2651 static const AVOption mpeg2video_options[] = {
2652  { "cc_format", "extract a specific Closed Captions format",
2653  M2V_OFFSET(cc_format), AV_OPT_TYPE_INT, { .i64 = CC_FORMAT_AUTO },
2654  CC_FORMAT_AUTO, CC_FORMAT_DISH, M2V_PARAM, .unit = "cc_format" },
2655 
2656  { "auto", "pick first seen CC substream", 0, AV_OPT_TYPE_CONST,
2657  { .i64 = CC_FORMAT_AUTO }, .flags = M2V_PARAM, .unit = "cc_format" },
2658  { "a53", "pick A/53 Part 4 CC substream", 0, AV_OPT_TYPE_CONST,
2659  { .i64 = CC_FORMAT_A53_PART4 }, .flags = M2V_PARAM, .unit = "cc_format" },
2660  { "scte20", "pick SCTE-20 CC substream", 0, AV_OPT_TYPE_CONST,
2661  { .i64 = CC_FORMAT_SCTE20 }, .flags = M2V_PARAM, .unit = "cc_format" },
2662  { "dvd", "pick DVD CC substream", 0, AV_OPT_TYPE_CONST,
2663  { .i64 = CC_FORMAT_DVD }, .flags = M2V_PARAM, .unit = "cc_format" },
2664  { "dish", "pick Dish Network CC substream", 0, AV_OPT_TYPE_CONST,
2665  { .i64 = CC_FORMAT_DISH }, .flags = M2V_PARAM, .unit = "cc_format" },
2666  { NULL }
2667 };
2668 
2669 static const AVClass mpeg2video_class = {
2670  .class_name = "MPEG-2 video",
2671  .item_name = av_default_item_name,
2672  .option = mpeg2video_options,
2673  .version = LIBAVUTIL_VERSION_INT,
2674  .category = AV_CLASS_CATEGORY_DECODER,
2675 };
2676 
2678  .p.name = "mpeg2video",
2679  CODEC_LONG_NAME("MPEG-2 video"),
2680  .p.type = AVMEDIA_TYPE_VIDEO,
2681  .p.id = AV_CODEC_ID_MPEG2VIDEO,
2682  .p.priv_class = &mpeg2video_class,
2683  .priv_data_size = sizeof(Mpeg1Context),
2687  .p.capabilities = AV_CODEC_CAP_DRAW_HORIZ_BAND | AV_CODEC_CAP_DR1 |
2689  .caps_internal = FF_CODEC_CAP_SKIP_FRAME_FILL_PARAM,
2690  .flush = flush,
2691  .p.max_lowres = 3,
2693  .hw_configs = (const AVCodecHWConfigInternal *const []) {
2694 #if CONFIG_MPEG2_DXVA2_HWACCEL
2695  HWACCEL_DXVA2(mpeg2),
2696 #endif
2697 #if CONFIG_MPEG2_D3D11VA_HWACCEL
2698  HWACCEL_D3D11VA(mpeg2),
2699 #endif
2700 #if CONFIG_MPEG2_D3D11VA2_HWACCEL
2701  HWACCEL_D3D11VA2(mpeg2),
2702 #endif
2703 #if CONFIG_MPEG2_D3D12VA_HWACCEL
2704  HWACCEL_D3D12VA(mpeg2),
2705 #endif
2706 #if CONFIG_MPEG2_NVDEC_HWACCEL
2707  HWACCEL_NVDEC(mpeg2),
2708 #endif
2709 #if CONFIG_MPEG2_VAAPI_HWACCEL
2710  HWACCEL_VAAPI(mpeg2),
2711 #endif
2712 #if CONFIG_MPEG2_VDPAU_HWACCEL
2713  HWACCEL_VDPAU(mpeg2),
2714 #endif
2715 #if CONFIG_MPEG2_VIDEOTOOLBOX_HWACCEL
2716  HWACCEL_VIDEOTOOLBOX(mpeg2),
2717 #endif
2718  NULL
2719  },
2720 };
2721 
2722 //legacy decoder
2724  .p.name = "mpegvideo",
2725  CODEC_LONG_NAME("MPEG-1 video"),
2726  .p.type = AVMEDIA_TYPE_VIDEO,
2727  .p.id = AV_CODEC_ID_MPEG2VIDEO,
2728  .priv_data_size = sizeof(Mpeg1Context),
2732  .p.capabilities = AV_CODEC_CAP_DRAW_HORIZ_BAND | AV_CODEC_CAP_DR1 |
2734  .caps_internal = FF_CODEC_CAP_SKIP_FRAME_FILL_PARAM,
2735  .flush = flush,
2736  .p.max_lowres = 3,
2737 };
2738 
2739 typedef struct IPUContext {
2741 
2742  int flags;
2743  DECLARE_ALIGNED(32, int16_t, block)[6][64];
2744 } IPUContext;
2745 
2747  int *got_frame, AVPacket *avpkt)
2748 {
2749  IPUContext *s = avctx->priv_data;
2750  MpegEncContext *m = &s->m;
2751  GetBitContext *gb = &m->gb;
2752  int ret;
2753 
2754  // Check for minimal intra MB size (considering mb header, luma & chroma dc VLC, ac EOB VLC)
2755  if (avpkt->size*8LL < (avctx->width+15)/16 * ((avctx->height+15)/16) * (2LL + 3*4 + 2*2 + 2*6))
2756  return AVERROR_INVALIDDATA;
2757 
2758  ret = ff_get_buffer(avctx, frame, 0);
2759  if (ret < 0)
2760  return ret;
2761 
2762  ret = init_get_bits8(gb, avpkt->data, avpkt->size);
2763  if (ret < 0)
2764  return ret;
2765 
2766  s->flags = get_bits(gb, 8);
2767  m->intra_dc_precision = s->flags & 3;
2768  m->q_scale_type = !!(s->flags & 0x40);
2769  m->intra_vlc_format = !!(s->flags & 0x20);
2770  m->alternate_scan = !!(s->flags & 0x10);
2771 
2773  s->flags & 0x10 ? ff_alternate_vertical_scan : ff_zigzag_direct,
2774  m->idsp.idct_permutation);
2775 
2776  m->last_dc[0] = m->last_dc[1] = m->last_dc[2] = 1 << (7 + (s->flags & 3));
2777  m->qscale = 1;
2778 
2779  for (int y = 0; y < avctx->height; y += 16) {
2780  int intraquant;
2781 
2782  for (int x = 0; x < avctx->width; x += 16) {
2783  if (x || y) {
2784  if (!get_bits1(gb))
2785  return AVERROR_INVALIDDATA;
2786  }
2787  if (get_bits1(gb)) {
2788  intraquant = 0;
2789  } else {
2790  if (!get_bits1(gb))
2791  return AVERROR_INVALIDDATA;
2792  intraquant = 1;
2793  }
2794 
2795  if (s->flags & 4)
2796  skip_bits1(gb);
2797 
2798  if (intraquant)
2799  m->qscale = mpeg_get_qscale(m);
2800 
2801  memset(s->block, 0, sizeof(s->block));
2802 
2803  for (int n = 0; n < 6; n++) {
2804  if (s->flags & 0x80) {
2806  m->intra_matrix,
2808  m->last_dc, s->block[n],
2809  n, m->qscale);
2810  } else {
2811  ret = mpeg2_decode_block_intra(m, s->block[n], n);
2812  }
2813 
2814  if (ret < 0)
2815  return ret;
2816  }
2817 
2818  m->idsp.idct_put(frame->data[0] + y * frame->linesize[0] + x,
2819  frame->linesize[0], s->block[0]);
2820  m->idsp.idct_put(frame->data[0] + y * frame->linesize[0] + x + 8,
2821  frame->linesize[0], s->block[1]);
2822  m->idsp.idct_put(frame->data[0] + (y + 8) * frame->linesize[0] + x,
2823  frame->linesize[0], s->block[2]);
2824  m->idsp.idct_put(frame->data[0] + (y + 8) * frame->linesize[0] + x + 8,
2825  frame->linesize[0], s->block[3]);
2826  m->idsp.idct_put(frame->data[1] + (y >> 1) * frame->linesize[1] + (x >> 1),
2827  frame->linesize[1], s->block[4]);
2828  m->idsp.idct_put(frame->data[2] + (y >> 1) * frame->linesize[2] + (x >> 1),
2829  frame->linesize[2], s->block[5]);
2830  }
2831  }
2832 
2833  align_get_bits(gb);
2834  if (get_bits_left(gb) != 32)
2835  return AVERROR_INVALIDDATA;
2836 
2837  *got_frame = 1;
2838 
2839  return avpkt->size;
2840 }
2841 
2843 {
2844  IPUContext *s = avctx->priv_data;
2845  MpegEncContext *m = &s->m;
2846 
2847  avctx->pix_fmt = AV_PIX_FMT_YUV420P;
2848  m->avctx = avctx;
2849 
2850  ff_idctdsp_init(&m->idsp, avctx);
2852 
2853  for (int i = 0; i < 64; i++) {
2854  int j = m->idsp.idct_permutation[i];
2856  m->intra_matrix[j] = v;
2857  m->chroma_intra_matrix[j] = v;
2858  }
2859 
2860  return 0;
2861 }
2862 
2864  .p.name = "ipu",
2865  CODEC_LONG_NAME("IPU Video"),
2866  .p.type = AVMEDIA_TYPE_VIDEO,
2867  .p.id = AV_CODEC_ID_IPU,
2868  .priv_data_size = sizeof(IPUContext),
2869  .init = ipu_decode_init,
2871  .p.capabilities = AV_CODEC_CAP_DR1,
2872 };
PICT_FRAME
#define PICT_FRAME
Definition: mpegutils.h:33
vcr2_init_sequence
static int vcr2_init_sequence(AVCodecContext *avctx)
Definition: mpeg12dec.c:1814
flags
const SwsFlags flags[]
Definition: swscale.c:61
HWACCEL_D3D12VA
#define HWACCEL_D3D12VA(codec)
Definition: hwconfig.h:80
ff_mpv_common_init
av_cold int ff_mpv_common_init(MpegEncContext *s)
init common structure for both encoder and decoder.
Definition: mpegvideo.c:386
hwconfig.h
AVCodecContext::hwaccel
const struct AVHWAccel * hwaccel
Hardware accelerator in use.
Definition: avcodec.h:1405
FF_ENABLE_DEPRECATION_WARNINGS
#define FF_ENABLE_DEPRECATION_WARNINGS
Definition: internal.h:73
MV_TYPE_16X16
#define MV_TYPE_16X16
1 vector for the whole mb
Definition: mpegvideo.h:185
Mpeg1Context::has_afd
int has_afd
Definition: mpeg12dec.c:80
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:216
AV_TIMECODE_STR_SIZE
#define AV_TIMECODE_STR_SIZE
Definition: timecode.h:33
AV_PIX_FMT_CUDA
@ AV_PIX_FMT_CUDA
HW acceleration through CUDA.
Definition: pixfmt.h:260
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
MpegEncContext::progressive_sequence
int progressive_sequence
Definition: mpegvideo.h:285
M2V_OFFSET
#define M2V_OFFSET(x)
Definition: mpeg12dec.c:2648
ff_mb_pat_vlc
VLCElem ff_mb_pat_vlc[512]
Definition: mpeg12.c:145
level
uint8_t level
Definition: svq3.c:208
AV_EF_EXPLODE
#define AV_EF_EXPLODE
abort decoding on minor error detection
Definition: defs.h:51
Mpeg1Context::a53_buf_ref
AVBufferRef * a53_buf_ref
Definition: mpeg12dec.c:77
ff_mpeg2_aspect
const AVRational ff_mpeg2_aspect[16]
Definition: mpeg12data.c:380
AVPanScan::position
int16_t position[3][2]
position of the top left corner in 1/16 pel for up to 3 fields/frames
Definition: defs.h:271
show_bits_long
static unsigned int show_bits_long(GetBitContext *s, int n)
Show 0-32 bits.
Definition: get_bits.h:478
mpeg_decode_a53_cc
static int mpeg_decode_a53_cc(AVCodecContext *avctx, const uint8_t *p, int buf_size)
Definition: mpeg12dec.c:1884
get_bits_left
static int get_bits_left(GetBitContext *gb)
Definition: get_bits.h:678
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
decode_slice
static int decode_slice(AVCodecContext *c, void *arg)
Definition: ffv1dec.c:355
ff_mpv_decode_init
av_cold int ff_mpv_decode_init(MpegEncContext *s, AVCodecContext *avctx)
Initialize the given MpegEncContext for decoding.
Definition: mpegvideo_dec.c:47
AV_CLASS_CATEGORY_DECODER
@ AV_CLASS_CATEGORY_DECODER
Definition: log.h:35
AV_STEREO3D_SIDEBYSIDE_QUINCUNX
@ AV_STEREO3D_SIDEBYSIDE_QUINCUNX
Views are next to each other, but when upscaling apply a checkerboard pattern.
Definition: stereo3d.h:114
FF_MPV_QSCALE_TYPE_MPEG2
#define FF_MPV_QSCALE_TYPE_MPEG2
Definition: mpegvideodec.h:41
mem_internal.h
ff_get_format
int ff_get_format(AVCodecContext *avctx, const enum AVPixelFormat *fmt)
Select the (possibly hardware accelerated) pixel format.
Definition: decode.c:1203
mpeg_decode_frame
static int mpeg_decode_frame(AVCodecContext *avctx, AVFrame *picture, int *got_output, AVPacket *avpkt)
Definition: mpeg12dec.c:2534
MpegEncContext::gb
GetBitContext gb
Definition: mpegvideo.h:282
AV_EF_COMPLIANT
#define AV_EF_COMPLIANT
consider all spec non compliances as errors
Definition: defs.h:55
MpegEncContext::top_field_first
int top_field_first
Definition: mpegvideo.h:293
SEQ_END_CODE
#define SEQ_END_CODE
Definition: mpeg12.h:28
av_frame_new_side_data
AVFrameSideData * av_frame_new_side_data(AVFrame *frame, enum AVFrameSideDataType type, size_t size)
Add a new side data to a frame.
Definition: frame.c:645
check_scantable_index
#define check_scantable_index(ctx, x)
Definition: mpeg12dec.c:124
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
AV_FRAME_DATA_A53_CC
@ AV_FRAME_DATA_A53_CC
ATSC A53 Part 4 Closed Captions.
Definition: frame.h:59
MT_FIELD
#define MT_FIELD
Definition: mpeg12dec.c:393
EXT_START_CODE
#define EXT_START_CODE
Definition: cavs.h:39
MV_TYPE_16X8
#define MV_TYPE_16X8
2 vectors, one per 16x8 block
Definition: mpegvideo.h:187
av_div_q
AVRational av_div_q(AVRational b, AVRational c)
Divide one rational by another.
Definition: rational.c:88
AVPanScan
Pan Scan area.
Definition: defs.h:250
AVCodecContext::err_recognition
int err_recognition
Error recognition; may misdetect some more or less valid parts as errors.
Definition: avcodec.h:1398
SLICE_MAX_START_CODE
#define SLICE_MAX_START_CODE
Definition: cavs.h:38
int64_t
long long int64_t
Definition: coverity.c:34
MB_TYPE_16x8
#define MB_TYPE_16x8
Definition: mpegutils.h:42
get_bits_count
static int get_bits_count(const GetBitContext *s)
Definition: get_bits.h:249
Mpeg1Context::vbv_delay
int vbv_delay
Definition: mpeg12dec.c:91
ipu_decode_init
static av_cold int ipu_decode_init(AVCodecContext *avctx)
Definition: mpeg12dec.c:2842
ff_update_duplicate_context
int ff_update_duplicate_context(MpegEncContext *dst, const MpegEncContext *src)
Definition: mpegvideo.c:204
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:410
start_code
static const uint8_t start_code[]
Definition: videotoolboxenc.c:230
w
uint8_t w
Definition: llviddspenc.c:38
HWACCEL_DXVA2
#define HWACCEL_DXVA2(codec)
Definition: hwconfig.h:64
ff_mpegvideo_decoder
const FFCodec ff_mpegvideo_decoder
Definition: mpeg12dec.c:2723
AVPacket::data
uint8_t * data
Definition: packet.h:535
mpeg_decode_mb
static int mpeg_decode_mb(MpegEncContext *s, int16_t block[12][64])
Definition: mpeg12dec.c:398
Mpeg1Context::closed_gop
int closed_gop
Definition: mpeg12dec.c:87
mpeg2_decode_block_intra
static int mpeg2_decode_block_intra(MpegEncContext *s, int16_t *block, int n)
Definition: mpeg12dec.c:302
AVOption
AVOption.
Definition: opt.h:429
HWACCEL_D3D11VA2
#define HWACCEL_D3D11VA2(codec)
Definition: hwconfig.h:66
ff_reverse
const uint8_t ff_reverse[256]
Definition: reverse.c:23
MpegEncContext::last_dc
int last_dc[3]
last DC values for MPEG-1
Definition: mpegvideo.h:142
MB_TYPE_16x16
#define MB_TYPE_16x16
Definition: mpegutils.h:41
AV_PIX_FMT_D3D11VA_VLD
@ AV_PIX_FMT_D3D11VA_VLD
HW decoding through Direct3D11 via old API, Picture.data[3] contains a ID3D11VideoDecoderOutputView p...
Definition: pixfmt.h:254
FFCodec
Definition: codec_internal.h:127
ff_mpv_framesize_disable
static void ff_mpv_framesize_disable(ScratchpadContext *sc)
Disable allocating the ScratchpadContext's buffers in future calls to ff_mpv_framesize_alloc().
Definition: mpegpicture.h:143
PICT_BOTTOM_FIELD
#define PICT_BOTTOM_FIELD
Definition: mpegutils.h:32
FF_HW_SIMPLE_CALL
#define FF_HW_SIMPLE_CALL(avctx, function)
Definition: hwaccel_internal.h:176
ff_er_add_slice
void ff_er_add_slice(ERContext *s, int startx, int starty, int endx, int endy, int status)
Add a slice.
Definition: error_resilience.c:826
ff_init_block_index
void ff_init_block_index(MpegEncContext *s)
Definition: mpegvideo.c:514
reverse.h
mpegvideo.h
MpegEncContext::avctx
struct AVCodecContext * avctx
Definition: mpegvideo.h:82
UPDATE_CACHE
#define UPDATE_CACHE(name, gb)
Definition: get_bits.h:208
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
Mpeg1Context::first_slice
int first_slice
Definition: mpeg12dec.c:89
ER_DC_END
#define ER_DC_END
Definition: error_resilience.h:33
mpeg_decode_postinit
static int mpeg_decode_postinit(AVCodecContext *avctx)
Definition: mpeg12dec.c:855
MpegEncContext::height
int height
picture size. must be a multiple of 16
Definition: mpegvideo.h:87
mpegutils.h
ff_set_dimensions
int ff_set_dimensions(AVCodecContext *s, int width, int height)
Check that the provided frame dimensions are valid and set them on the codec context.
Definition: utils.c:91
ER_MV_ERROR
#define ER_MV_ERROR
Definition: error_resilience.h:31
ff_idctdsp_init
av_cold void ff_idctdsp_init(IDCTDSPContext *c, AVCodecContext *avctx)
Definition: idctdsp.c:228
SEQ_START_CODE
#define SEQ_START_CODE
Definition: mpeg12.h:29
FF_DEBUG_PICT_INFO
#define FF_DEBUG_PICT_INFO
Definition: avcodec.h:1375
MV_TYPE_DMV
#define MV_TYPE_DMV
2 vectors, special mpeg2 Dual Prime Vectors
Definition: mpegvideo.h:189
CC_FORMAT_DISH
@ CC_FORMAT_DISH
Definition: mpeg12dec.c:69
MpegEncContext::out_format
enum OutputFormat out_format
output format
Definition: mpegvideo.h:88
AV_FRAME_FLAG_TOP_FIELD_FIRST
#define AV_FRAME_FLAG_TOP_FIELD_FIRST
A flag to mark frames where the top field is displayed first if the content is interlaced.
Definition: frame.h:638
GET_CACHE
#define GET_CACHE(name, gb)
Definition: get_bits.h:246
skip_bits
static void skip_bits(GetBitContext *s, int n)
Definition: get_bits.h:364
ff_mpeg2_rl_vlc
RL_VLC_ELEM ff_mpeg2_rl_vlc[674]
Definition: mpeg12.c:148
MpegEncContext::intra_scantable
ScanTable intra_scantable
Definition: mpegvideo.h:78
AVCodecContext::framerate
AVRational framerate
Definition: avcodec.h:551
ff_permute_scantable
av_cold void ff_permute_scantable(uint8_t dst[64], const uint8_t src[64], const uint8_t permutation[64])
Definition: idctdsp.c:30
close
static av_cold void close(AVCodecParserContext *s)
Definition: apv_parser.c:135
AV_STEREO3D_SIDEBYSIDE
@ AV_STEREO3D_SIDEBYSIDE
Views are next to each other.
Definition: stereo3d.h:64
get_bits
static unsigned int get_bits(GetBitContext *s, int n)
Read 1-25 bits.
Definition: get_bits.h:318
mx
uint8_t ptrdiff_t const uint8_t ptrdiff_t int intptr_t mx
Definition: dsp.h:57
MT_DMV
#define MT_DMV
Definition: mpeg12dec.c:396
ff_mpv_reconstruct_mb
void ff_mpv_reconstruct_mb(MpegEncContext *s, int16_t block[12][64])
Definition: mpegvideo_dec.c:1067
MpegEncContext::mb_height
int mb_height
number of MBs horizontally & vertically
Definition: mpegvideo.h:104
ff_mbincr_vlc
VLCElem ff_mbincr_vlc[538]
Definition: mpeg12.c:142
FFCodec::p
AVCodec p
The public AVCodec.
Definition: codec_internal.h:131
MpegEncContext::pict_type
int pict_type
AV_PICTURE_TYPE_I, AV_PICTURE_TYPE_P, AV_PICTURE_TYPE_B, ...
Definition: mpegvideo.h:163
slice_end
static int slice_end(AVCodecContext *avctx, AVFrame *pict, int *got_output)
Handle slice ends.
Definition: mpeg12dec.c:1675
FMT_MPEG1
@ FMT_MPEG1
Definition: mpegvideo.h:54
decode_chunks
static int decode_chunks(AVCodecContext *avctx, AVFrame *picture, int *got_output, const uint8_t *buf, int buf_size)
Definition: mpeg12dec.c:2219
AVCodecContext::skip_frame
enum AVDiscard skip_frame
Skip decoding for selected frames.
Definition: avcodec.h:1662
mpeg_decode_quant_matrix_extension
static void mpeg_decode_quant_matrix_extension(MpegEncContext *s)
Definition: mpeg12dec.c:1139
AV_STEREO3D_2D
@ AV_STEREO3D_2D
Video is not stereoscopic (and metadata has to be there).
Definition: stereo3d.h:52
MpegEncContext::picture_structure
int picture_structure
Definition: mpegvideo.h:289
wrap
#define wrap(func)
Definition: neontest.h:65
timecode.h
GetBitContext
Definition: get_bits.h:108
AV_EF_BITSTREAM
#define AV_EF_BITSTREAM
detect bitstream specification deviations
Definition: defs.h:49
AVPanScan::width
int width
width and height in 1/16 pel
Definition: defs.h:263
slice_decode_thread
static int slice_decode_thread(AVCodecContext *c, void *arg)
Definition: mpeg12dec.c:1622
AVCodecContext::flags
int flags
AV_CODEC_FLAG_*.
Definition: avcodec.h:488
IDCTDSPContext::idct_put
void(* idct_put)(uint8_t *dest, ptrdiff_t line_size, int16_t *block)
block -> idct -> clip to unsigned 8 bit -> dest.
Definition: idctdsp.h:62
MB_TYPE_CBP
#define MB_TYPE_CBP
Definition: mpegutils.h:47
val
static double val(void *priv, double ch)
Definition: aeval.c:77
Mpeg1Context::tmpgexs
int tmpgexs
Definition: mpeg12dec.c:88
HWACCEL_VDPAU
#define HWACCEL_VDPAU(codec)
Definition: hwconfig.h:72
AV_CODEC_FLAG_LOW_DELAY
#define AV_CODEC_FLAG_LOW_DELAY
Force low delay.
Definition: avcodec.h:314
mpeg12_pixfmt_list_444
static enum AVPixelFormat mpeg12_pixfmt_list_444[]
Definition: mpeg12dec.c:827
MpegEncContext::width
int width
Definition: mpegvideo.h:87
AVCodecContext::coded_height
int coded_height
Definition: avcodec.h:607
mpeg1_decode_sequence
static int mpeg1_decode_sequence(AVCodecContext *avctx, const uint8_t *buf, int buf_size)
Definition: mpeg12dec.c:1725
av_reduce
int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max)
Reduce a fraction.
Definition: rational.c:35
HAS_CBP
#define HAS_CBP(a)
Definition: mpegutils.h:87
AVRational::num
int num
Numerator.
Definition: rational.h:59
GOP_START_CODE
#define GOP_START_CODE
Definition: mpeg12.h:30
MpegEncContext::frame_pred_frame_dct
int frame_pred_frame_dct
Definition: mpegvideo.h:292
ff_frame_new_side_data_from_buf
int ff_frame_new_side_data_from_buf(const AVCodecContext *avctx, AVFrame *frame, enum AVFrameSideDataType type, AVBufferRef **buf)
Similar to ff_frame_new_side_data, but using an existing buffer ref.
Definition: decode.c:2070
IPUContext
Definition: mpeg12dec.c:2739
mpeg1_hwaccel_pixfmt_list_420
static enum AVPixelFormat mpeg1_hwaccel_pixfmt_list_420[]
Definition: mpeg12dec.c:784
mpeg12.h
mpegvideodec.h
ff_mpeg2video_decoder
const FFCodec ff_mpeg2video_decoder
Definition: mpeg12dec.c:2677
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:210
Mpeg1Context::frame_rate_index
unsigned frame_rate_index
Definition: mpeg12dec.c:85
ipu_decode_frame
static int ipu_decode_frame(AVCodecContext *avctx, AVFrame *frame, int *got_frame, AVPacket *avpkt)
Definition: mpeg12dec.c:2746
HAS_MV
#define HAS_MV(a, dir)
Definition: mpegutils.h:91
ER_DC_ERROR
#define ER_DC_ERROR
Definition: error_resilience.h:30
av_cold
#define av_cold
Definition: attributes.h:90
mpeg2_hwaccel_pixfmt_list_420
static enum AVPixelFormat mpeg2_hwaccel_pixfmt_list_420[]
Definition: mpeg12dec.c:795
init_get_bits8
static int init_get_bits8(GetBitContext *s, const uint8_t *buffer, int byte_size)
Initialize GetBitContext.
Definition: get_bits.h:528
mpeg1_decode_picture
static int mpeg1_decode_picture(AVCodecContext *avctx, const uint8_t *buf, int buf_size)
Definition: mpeg12dec.c:970
Mpeg1Context::save_progressive_seq
int save_progressive_seq
Definition: mpeg12dec.c:83
emms_c
#define emms_c()
Definition: emms.h:63
CLOSE_READER
#define CLOSE_READER(name, gb)
Definition: get_bits.h:184
AVCodecContext::extradata_size
int extradata_size
Definition: avcodec.h:515
AVCodecContext::has_b_frames
int has_b_frames
Size of the frame reordering buffer in the decoder.
Definition: avcodec.h:697
A53_MAX_CC_COUNT
#define A53_MAX_CC_COUNT
Definition: mpeg12dec.c:62
Mpeg1Context::stereo3d_type
enum AVStereo3DType stereo3d_type
Definition: mpeg12dec.c:75
ff_er_frame_end
void ff_er_frame_end(ERContext *s, int *decode_error_flags)
Indicate that a frame has finished decoding and perform error concealment in case it has been enabled...
Definition: error_resilience.c:896
ff_mpeg_flush
av_cold void ff_mpeg_flush(AVCodecContext *avctx)
Definition: mpegvideo_dec.c:414
FF_CODEC_DECODE_CB
#define FF_CODEC_DECODE_CB(func)
Definition: codec_internal.h:341
stereo3d.h
AV_PIX_FMT_DXVA2_VLD
@ AV_PIX_FMT_DXVA2_VLD
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer.
Definition: pixfmt.h:134
s
#define s(width, name)
Definition: cbs_vp9.c:198
ff_mv_vlc
VLCElem ff_mv_vlc[266]
Definition: mpeg12.c:137
CHROMA_422
#define CHROMA_422
Definition: mpegvideo.h:302
MPVWorkPicture::ptr
MPVPicture * ptr
RefStruct reference.
Definition: mpegpicture.h:99
ff_mpeg1_aspect
const float ff_mpeg1_aspect[16]
Definition: mpeg12data.c:359
MB_TYPE_ZERO_MV
#define MB_TYPE_ZERO_MV
Definition: mpeg12dec.h:28
SHOW_SBITS
#define SHOW_SBITS(name, gb, num)
Definition: get_bits.h:243
ff_mpeg_er_frame_start
void ff_mpeg_er_frame_start(MpegEncContext *s)
Definition: mpeg_er.c:46
flush
static av_cold void flush(AVCodecContext *avctx)
Definition: mpeg12dec.c:2601
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:41
Mpeg1Context::aspect_ratio_info
unsigned aspect_ratio_info
Definition: mpeg12dec.c:82
pix_fmts
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:298
mpeg_decode_sequence_display_extension
static void mpeg_decode_sequence_display_extension(Mpeg1Context *s1)
Definition: mpeg12dec.c:1058
Mpeg1Context::pan_scan
AVPanScan pan_scan
Definition: mpeg12dec.c:74
get_sbits
static int get_sbits(GetBitContext *s, int n)
Definition: get_bits.h:303
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:231
PICT_TOP_FIELD
#define PICT_TOP_FIELD
Definition: mpegutils.h:31
decode.h
mpeg12_pixfmt_list_422
static enum AVPixelFormat mpeg12_pixfmt_list_422[]
Definition: mpeg12dec.c:822
SKIP_BITS
#define SKIP_BITS(name, gb, num)
Definition: get_bits.h:224
field
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this field
Definition: writing_filters.txt:78
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
AVCodecContext::rc_max_rate
int64_t rc_max_rate
maximum bitrate
Definition: avcodec.h:1270
MpegEncContext::cur_pic
MPVWorkPicture cur_pic
copy of the current picture structure.
Definition: mpegvideo.h:139
CODEC_LONG_NAME
#define CODEC_LONG_NAME(str)
Definition: codec_internal.h:326
AVCodecContext::codec_id
enum AVCodecID codec_id
Definition: avcodec.h:441
my
uint8_t ptrdiff_t const uint8_t ptrdiff_t int intptr_t intptr_t my
Definition: dsp.h:57
arg
const char * arg
Definition: jacosubdec.c:67
rl_vlc
static const VLCElem * rl_vlc[2]
Definition: mobiclip.c:278
ff_mpv_common_end
av_cold void ff_mpv_common_end(MpegEncContext *s)
Definition: mpegvideo.c:471
MpegEncContext::mb_stride
int mb_stride
mb_width+1 used for some arrays to allow simple addressing of left & top MBs without sig11
Definition: mpegvideo.h:105
if
if(ret)
Definition: filter_design.txt:179
ff_mpv_unref_picture
void ff_mpv_unref_picture(MPVWorkPicture *pic)
Definition: mpegpicture.c:98
MpegEncContext::low_delay
int low_delay
no reordering needed / has no B-frames
Definition: mpegvideo.h:255
AVDISCARD_ALL
@ AVDISCARD_ALL
discard all
Definition: defs.h:229
MB_PTYPE_VLC_BITS
#define MB_PTYPE_VLC_BITS
Definition: mpeg12vlc.h:39
LIBAVUTIL_VERSION_INT
#define LIBAVUTIL_VERSION_INT
Definition: version.h:85
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:76
PTRDIFF_SPECIFIER
#define PTRDIFF_SPECIFIER
Definition: internal.h:128
ff_mpv_export_qp_table
int ff_mpv_export_qp_table(const MpegEncContext *s, AVFrame *f, const MPVPicture *p, int qp_type)
Definition: mpegvideo_dec.c:375
NULL
#define NULL
Definition: coverity.c:32
format
New swscale design to change SwsGraph is what coordinates multiple passes These can include cascaded scaling error diffusion and so on Or we could have separate passes for the vertical and horizontal scaling In between each SwsPass lies a fully allocated image buffer Graph passes may have different levels of e g we can have a single threaded error diffusion pass following a multi threaded scaling pass SwsGraph is internally recreated whenever the image format
Definition: swscale-v2.txt:14
run
uint8_t run
Definition: svq3.c:207
AVCodecContext::color_range
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: avcodec.h:669
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
ER_AC_ERROR
#define ER_AC_ERROR
Definition: error_resilience.h:29
MpegEncContext::mb_y
int mb_y
Definition: mpegvideo.h:204
SLICE_MIN_START_CODE
#define SLICE_MIN_START_CODE
Definition: mpeg12.h:32
mpeg12_execute_slice_threads
static void mpeg12_execute_slice_threads(AVCodecContext *avctx, Mpeg1Context *const s)
Definition: mpeg12dec.c:2190
hwaccel_internal.h
Mpeg1Context::sync
int sync
Definition: mpeg12dec.c:86
MpegEncContext::next_pic
MPVWorkPicture next_pic
copy of the next picture structure.
Definition: mpegvideo.h:133
AVCHROMA_LOC_LEFT
@ AVCHROMA_LOC_LEFT
MPEG-2/4 4:2:0, H.264 default for 4:2:0.
Definition: pixfmt.h:771
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
ff_mpv_decode_close
av_cold int ff_mpv_decode_close(AVCodecContext *avctx)
Definition: mpegvideo_dec.c:128
AVCHROMA_LOC_TOPLEFT
@ AVCHROMA_LOC_TOPLEFT
ITU-R 601, SMPTE 274M 296M S314M(DV 4:1:1), mpeg2 4:2:2.
Definition: pixfmt.h:773
AVCodecContext::bit_rate
int64_t bit_rate
the average bitrate
Definition: avcodec.h:481
mpeg_decode_picture_display_extension
static void mpeg_decode_picture_display_extension(Mpeg1Context *s1)
Definition: mpeg12dec.c:1082
M2V_PARAM
#define M2V_PARAM
Definition: mpeg12dec.c:2649
av_default_item_name
const char * av_default_item_name(void *ptr)
Return the context name.
Definition: log.c:240
AV_PICTURE_TYPE_I
@ AV_PICTURE_TYPE_I
Intra.
Definition: avutil.h:278
get_bits1
static unsigned int get_bits1(GetBitContext *s)
Definition: get_bits.h:371
profiles.h
CC_FORMAT_A53_PART4
@ CC_FORMAT_A53_PART4
Definition: mpeg12dec.c:66
FF_PTR_ADD
#define FF_PTR_ADD(ptr, off)
Definition: internal.h:80
LAST_SKIP_BITS
#define LAST_SKIP_BITS(name, gb, num)
Definition: get_bits.h:230
MB_TYPE_QUANT
#define MB_TYPE_QUANT
Definition: mpegutils.h:48
avpriv_find_start_code
const uint8_t * avpriv_find_start_code(const uint8_t *p, const uint8_t *end, uint32_t *state)
MB_TYPE_BIDIR_MV
#define MB_TYPE_BIDIR_MV
Definition: mpegutils.h:51
lowres
static int lowres
Definition: ffplay.c:330
ff_mpeg1_rl_vlc
RL_VLC_ELEM ff_mpeg1_rl_vlc[680]
Definition: mpeg12.c:147
MB_BTYPE_VLC_BITS
#define MB_BTYPE_VLC_BITS
Definition: mpeg12vlc.h:40
CC_FORMAT_AUTO
@ CC_FORMAT_AUTO
Definition: mpeg12dec.c:65
AV_PIX_FMT_D3D12
@ AV_PIX_FMT_D3D12
Hardware surfaces for Direct3D 12.
Definition: pixfmt.h:440
AV_PIX_FMT_GRAY8
@ AV_PIX_FMT_GRAY8
Y , 8bpp.
Definition: pixfmt.h:81
mpeg12codecs.h
MpegEncContext::slice_context_count
int slice_context_count
number of used thread_contexts
Definition: mpegvideo.h:121
get_vlc2
static av_always_inline int get_vlc2(GetBitContext *s, const VLCElem *table, int bits, int max_depth)
Parse a vlc code.
Definition: get_bits.h:635
AV_FRAME_DATA_AFD
@ AV_FRAME_DATA_AFD
Active Format Description data consisting of a single byte as specified in ETSI TS 101 154 using AVAc...
Definition: frame.h:90
AVCodecContext::level
int level
Encoding level descriptor.
Definition: avcodec.h:1628
atomic_load_explicit
#define atomic_load_explicit(object, order)
Definition: stdatomic.h:96
c
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
Definition: undefined.txt:32
AV_CODEC_ID_MPEG1VIDEO
@ AV_CODEC_ID_MPEG1VIDEO
Definition: codec_id.h:53
MpegEncContext::idsp
IDCTDSPContext idsp
Definition: mpegvideo.h:173
ff_mpv_alloc_dummy_frames
int ff_mpv_alloc_dummy_frames(MpegEncContext *s)
Ensure that the dummy frames are allocated according to pict_type if necessary.
Definition: mpegvideo_dec.c:266
ff_dlog
#define ff_dlog(a,...)
Definition: tableprint_vlc.h:28
Mpeg1Context::save_chroma_format
int save_chroma_format
Definition: mpeg12dec.c:83
startcode.h
CC_FORMAT_DVD
@ CC_FORMAT_DVD
Definition: mpeg12dec.c:68
IS_INTRA
#define IS_INTRA(x, y)
AVDISCARD_NONKEY
@ AVDISCARD_NONKEY
discard all frames except keyframes
Definition: defs.h:228
check_marker
static int check_marker(void *logctx, GetBitContext *s, const char *msg)
Definition: mpegvideodec.h:89
ERContext::error_count
atomic_int error_count
Definition: error_resilience.h:67
AVCodecContext::flags2
int flags2
AV_CODEC_FLAG2_*.
Definition: avcodec.h:495
ff_get_buffer
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
Definition: decode.c:1638
init
int(* init)(AVBSFContext *ctx)
Definition: dts2pts.c:368
mpeg2video_options
static const AVOption mpeg2video_options[]
Definition: mpeg12dec.c:2651
AV_CODEC_CAP_DR1
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
Definition: codec.h:52
AV_CODEC_FLAG_GRAY
#define AV_CODEC_FLAG_GRAY
Only decode/encode grayscale.
Definition: avcodec.h:302
AVPacket::size
int size
Definition: packet.h:536
dc
Tag MUST be and< 10hcoeff half pel interpolation filter coefficients, hcoeff[0] are the 2 middle coefficients[1] are the next outer ones and so on, resulting in a filter like:...eff[2], hcoeff[1], hcoeff[0], hcoeff[0], hcoeff[1], hcoeff[2] ... the sign of the coefficients is not explicitly stored but alternates after each coeff and coeff[0] is positive, so ...,+,-,+,-,+,+,-,+,-,+,... hcoeff[0] is not explicitly stored but found by subtracting the sum of all stored coefficients with signs from 32 hcoeff[0]=32 - hcoeff[1] - hcoeff[2] - ... a good choice for hcoeff and htaps is htaps=6 hcoeff={40,-10, 2} an alternative which requires more computations at both encoder and decoder side and may or may not be better is htaps=8 hcoeff={42,-14, 6,-2}ref_frames minimum of the number of available reference frames and max_ref_frames for example the first frame after a key frame always has ref_frames=1spatial_decomposition_type wavelet type 0 is a 9/7 symmetric compact integer wavelet 1 is a 5/3 symmetric compact integer wavelet others are reserved stored as delta from last, last is reset to 0 if always_reset||keyframeqlog quality(logarithmic quantizer scale) stored as delta from last, last is reset to 0 if always_reset||keyframemv_scale stored as delta from last, last is reset to 0 if always_reset||keyframe FIXME check that everything works fine if this changes between framesqbias dequantization bias stored as delta from last, last is reset to 0 if always_reset||keyframeblock_max_depth maximum depth of the block tree stored as delta from last, last is reset to 0 if always_reset||keyframequant_table quantization tableHighlevel bitstream structure:==============================--------------------------------------------|Header|--------------------------------------------|------------------------------------|||Block0||||split?||||yes no||||......... intra?||||:Block01 :yes no||||:Block02 :....... ..........||||:Block03 ::y DC ::ref index:||||:Block04 ::cb DC ::motion x :||||......... :cr DC ::motion y :||||....... ..........|||------------------------------------||------------------------------------|||Block1|||...|--------------------------------------------|------------ ------------ ------------|||Y subbands||Cb subbands||Cr subbands||||--- ---||--- ---||--- ---|||||LL0||HL0||||LL0||HL0||||LL0||HL0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||LH0||HH0||||LH0||HH0||||LH0||HH0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HL1||LH1||||HL1||LH1||||HL1||LH1|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HH1||HL2||||HH1||HL2||||HH1||HL2|||||...||...||...|||------------ ------------ ------------|--------------------------------------------Decoding process:=================------------|||Subbands|------------||||------------|Intra DC||||LL0 subband prediction ------------|\ Dequantization ------------------- \||Reference frames|\ IDWT|------- -------|Motion \|||Frame 0||Frame 1||Compensation . OBMC v -------|------- -------|--------------. \------> Frame n output Frame Frame<----------------------------------/|...|------------------- Range Coder:============Binary Range Coder:------------------- The implemented range coder is an adapted version based upon "Range encoding: an algorithm for removing redundancy from a digitised message." by G. N. N. Martin. The symbols encoded by the Snow range coder are bits(0|1). The associated probabilities are not fix but change depending on the symbol mix seen so far. bit seen|new state ---------+----------------------------------------------- 0|256 - state_transition_table[256 - old_state];1|state_transition_table[old_state];state_transition_table={ 0, 0, 0, 0, 0, 0, 0, 0, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 190, 191, 192, 194, 194, 195, 196, 197, 198, 199, 200, 201, 202, 202, 204, 205, 206, 207, 208, 209, 209, 210, 211, 212, 213, 215, 215, 216, 217, 218, 219, 220, 220, 222, 223, 224, 225, 226, 227, 227, 229, 229, 230, 231, 232, 234, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 248, 0, 0, 0, 0, 0, 0, 0};FIXME Range Coding of integers:------------------------- FIXME Neighboring Blocks:===================left and top are set to the respective blocks unless they are outside of the image in which case they are set to the Null block top-left is set to the top left block unless it is outside of the image in which case it is set to the left block if this block has no larger parent block or it is at the left side of its parent block and the top right block is not outside of the image then the top right block is used for top-right else the top-left block is used Null block y, cb, cr are 128 level, ref, mx and my are 0 Motion Vector Prediction:=========================1. the motion vectors of all the neighboring blocks are scaled to compensate for the difference of reference frames scaled_mv=(mv *(256 *(current_reference+1)/(mv.reference+1))+128)> the median of the scaled top and top right vectors is used as motion vector prediction the used motion vector is the sum of the predictor and(mvx_diff, mvy_diff) *mv_scale Intra DC Prediction block[y][x] dc[1]
Definition: snow.txt:400
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:94
MpegEncContext::qscale
int qscale
QP.
Definition: mpegvideo.h:161
AV_CODEC_ID_IPU
@ AV_CODEC_ID_IPU
Definition: codec_id.h:310
AV_FRAME_DATA_PANSCAN
@ AV_FRAME_DATA_PANSCAN
The data is the AVPanScan struct defined in libavcodec.
Definition: frame.h:53
CC_FORMAT_SCTE20
@ CC_FORMAT_SCTE20
Definition: mpeg12dec.c:67
height
#define height
Definition: dsp.h:89
av_frame_ref
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:276
MT_FRAME
#define MT_FRAME
Definition: mpeg12dec.c:394
codec_internal.h
DECLARE_ALIGNED
#define DECLARE_ALIGNED(n, t, v)
Definition: mem_internal.h:104
shift
static int shift(int a, int b)
Definition: bonk.c:261
IPUContext::flags
int flags
Definition: mpeg12dec.c:2742
MpegEncContext::intra_matrix
uint16_t intra_matrix[64]
matrix transmitted in the bitstream
Definition: mpegvideo.h:215
mpeg_field_start
static int mpeg_field_start(Mpeg1Context *s1, const uint8_t *buf, int buf_size)
Definition: mpeg12dec.c:1211
ff_mpeg1_clean_buffers
void ff_mpeg1_clean_buffers(MpegEncContext *s)
Definition: mpeg12.c:125
MpegEncContext::v_edge_pos
int v_edge_pos
horizontal / vertical position of the right/bottom edge (pixel replication)
Definition: mpegvideo.h:107
Mpeg1Context::bit_rate
int64_t bit_rate
Definition: mpeg12dec.c:92
VLCElem
Definition: vlc.h:32
ff_mpeg1video_decoder
const FFCodec ff_mpeg1video_decoder
Definition: mpeg12dec.c:2620
ff_frame_new_side_data
int ff_frame_new_side_data(const AVCodecContext *avctx, AVFrame *frame, enum AVFrameSideDataType type, size_t size, AVFrameSideData **psd)
Wrapper around av_frame_new_side_data, which rejects side data overridden by the demuxer.
Definition: decode.c:2032
AV_RB32
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_WL24 unsigned int_TMPL AV_WL16 uint64_t_TMPL AV_WB64 unsigned int_TMPL AV_RB32
Definition: bytestream.h:96
FF_CODEC_CAP_SKIP_FRAME_FILL_PARAM
#define FF_CODEC_CAP_SKIP_FRAME_FILL_PARAM
The decoder extracts and fills its parameters even if the frame is skipped due to the skip_frame sett...
Definition: codec_internal.h:54
AVFrameSideData::data
uint8_t * data
Definition: frame.h:267
MB_TYPE_SKIP
#define MB_TYPE_SKIP
Definition: mpegutils.h:61
FF_THREAD_SLICE
#define FF_THREAD_SLICE
Decode more than one part of a single frame at once.
Definition: avcodec.h:1573
ff_mpeg_draw_horiz_band
void ff_mpeg_draw_horiz_band(MpegEncContext *s, int y, int h)
Definition: mpegvideo_dec.c:406
PICTURE_START_CODE
#define PICTURE_START_CODE
Definition: mpeg12.h:31
USER_START_CODE
#define USER_START_CODE
Definition: cavs.h:40
AVCodecContext::skip_bottom
int skip_bottom
Number of macroblock rows at the bottom which are skipped.
Definition: avcodec.h:1690
AVCodecHWConfigInternal
Definition: hwconfig.h:25
MpegEncContext::mbskip_table
uint8_t * mbskip_table
used to avoid copy if macroblock skipped (for black regions for example) and used for B-frame encodin...
Definition: mpegvideo.h:153
ff_mpeg1_default_intra_matrix
const uint16_t ff_mpeg1_default_intra_matrix[256]
Definition: mpeg12data.c:31
diff
static av_always_inline int diff(const struct color_info *a, const struct color_info *b, const int trans_thresh)
Definition: vf_paletteuse.c:166
MpegEncContext::context_initialized
int context_initialized
Definition: mpegvideo.h:102
ff_mpv_frame_start
int ff_mpv_frame_start(MpegEncContext *s, AVCodecContext *avctx)
generic function called after decoding the header and before a frame is decoded.
Definition: mpegvideo_dec.c:309
MB_TYPE_INTERLACED
#define MB_TYPE_INTERLACED
Definition: mpegutils.h:45
OPEN_READER
#define OPEN_READER(name, gb)
Definition: get_bits.h:173
Mpeg1Context::has_stereo3d
int has_stereo3d
Definition: mpeg12dec.c:76
mpeg_decode_init
static av_cold int mpeg_decode_init(AVCodecContext *avctx)
Definition: mpeg12dec.c:762
AV_CODEC_CAP_SLICE_THREADS
#define AV_CODEC_CAP_SLICE_THREADS
Codec supports slice-based (or partition-based) multithreading.
Definition: codec.h:99
HWACCEL_D3D11VA
#define HWACCEL_D3D11VA(codec)
Definition: hwconfig.h:78
mpegvideodata.h
attributes.h
ff_mpeg1_decode_block_intra
int ff_mpeg1_decode_block_intra(GetBitContext *gb, const uint16_t *quant_matrix, const uint8_t *scantable, int last_dc[3], int16_t *block, int index, int qscale)
Definition: mpeg12.c:193
MV_TYPE_FIELD
#define MV_TYPE_FIELD
2 vectors, one per field
Definition: mpegvideo.h:188
skip_bits1
static void skip_bits1(GetBitContext *s)
Definition: get_bits.h:396
AV_PIX_FMT_D3D11
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:336
HWACCEL_NVDEC
#define HWACCEL_NVDEC(codec)
Definition: hwconfig.h:68
mpeg2video_class
static const AVClass mpeg2video_class
Definition: mpeg12dec.c:2669
AV_PIX_FMT_VAAPI
@ AV_PIX_FMT_VAAPI
Hardware acceleration through VA-API, data[3] contains a VASurfaceID.
Definition: pixfmt.h:126
AVBufferRef::size
size_t size
Size of data in bytes.
Definition: buffer.h:94
ff_mpeg2_video_profiles
const AVProfile ff_mpeg2_video_profiles[]
Definition: profiles.c:116
AV_PIX_FMT_VDPAU
@ AV_PIX_FMT_VDPAU
HW acceleration through VDPAU, Picture.data[3] contains a VdpVideoSurface.
Definition: pixfmt.h:194
CHROMA_444
#define CHROMA_444
Definition: mpegvideo.h:303
emms.h
AV_PIX_FMT_VIDEOTOOLBOX
@ AV_PIX_FMT_VIDEOTOOLBOX
hardware decoding through Videotoolbox
Definition: pixfmt.h:305
ff_print_debug_info
void ff_print_debug_info(const MpegEncContext *s, const MPVPicture *p, AVFrame *pict)
Definition: mpegvideo_dec.c:368
av_assert2
#define av_assert2(cond)
assert() equivalent, that does lie in speed critical code.
Definition: avassert.h:68
MpegEncContext::progressive_frame
int progressive_frame
Definition: mpegvideo.h:307
CHROMA_420
#define CHROMA_420
Definition: mpegvideo.h:301
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
code
and forward the test the status of outputs and forward it to the corresponding return FFERROR_NOT_READY If the filters stores internally one or a few frame for some it can consider them to be part of the FIFO and delay acknowledging a status change accordingly Example code
Definition: filter_design.txt:178
AV_CODEC_FLAG2_SHOW_ALL
#define AV_CODEC_FLAG2_SHOW_ALL
Show all frames before the first keyframe.
Definition: avcodec.h:360
ff_alternate_vertical_scan
const uint8_t ff_alternate_vertical_scan[64]
Definition: mpegvideodata.c:63
AVCodecContext::extradata
uint8_t * extradata
Out-of-band global headers that may be used by some codecs.
Definition: avcodec.h:514
show_bits
static unsigned int show_bits(GetBitContext *s, int n)
Show 1-25 bits.
Definition: get_bits.h:354
internal.h
mpeg_set_cc_format
static void mpeg_set_cc_format(AVCodecContext *avctx, enum Mpeg2ClosedCaptionsFormat format, const char *label)
Definition: mpeg12dec.c:1864
AV_STEREO3D_TOPBOTTOM
@ AV_STEREO3D_TOPBOTTOM
Views are on top of each other.
Definition: stereo3d.h:76
IS_QUANT
#define IS_QUANT(a)
Definition: mpegutils.h:85
MpegEncContext::mb_x
int mb_x
Definition: mpegvideo.h:204
ff_mpeg12_init_vlcs
av_cold void ff_mpeg12_init_vlcs(void)
Definition: mpeg12.c:185
atomic_store_explicit
#define atomic_store_explicit(object, desired, order)
Definition: stdatomic.h:90
FF_DEBUG_STARTCODE
#define FF_DEBUG_STARTCODE
Definition: avcodec.h:1382
MpegEncContext::thread_context
struct MpegEncContext * thread_context[MAX_THREADS]
Definition: mpegvideo.h:118
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
av_d2q
AVRational av_d2q(double d, int max)
Convert a double precision floating point number to a rational.
Definition: rational.c:106
MB_TYPE_MV_2_MV_DIR
#define MB_TYPE_MV_2_MV_DIR(a)
Definition: mpegutils.h:93
MB_PAT_VLC_BITS
#define MB_PAT_VLC_BITS
Definition: mpeg12vlc.h:38
mpeg1_decode_block_inter
static int mpeg1_decode_block_inter(MpegEncContext *s, int16_t *block, int n)
Definition: mpeg12dec.c:133
av_frame_unref
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:494
IPUContext::m
MpegEncContext m
Definition: mpeg12dec.c:2740
AVCodec::name
const char * name
Name of the codec implementation.
Definition: codec.h:179
MpegEncContext::last_pic
MPVWorkPicture last_pic
copy of the previous picture structure.
Definition: mpegvideo.h:127
MpegEncContext::intra_vlc_format
int intra_vlc_format
Definition: mpegvideo.h:296
AVCodecContext::chroma_sample_location
enum AVChromaLocation chroma_sample_location
This defines the location of chroma samples.
Definition: avcodec.h:676
MAX_INDEX
#define MAX_INDEX
Definition: mpeg12dec.c:123
MpegEncContext::er
ERContext er
Definition: mpegvideo.h:333
AVCodecContext::height
int height
Definition: avcodec.h:592
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:631
AVCOL_RANGE_MPEG
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
Definition: pixfmt.h:733
HWACCEL_VIDEOTOOLBOX
#define HWACCEL_VIDEOTOOLBOX(codec)
Definition: hwconfig.h:74
idctdsp.h
avcodec.h
av_cmp_q
static int av_cmp_q(AVRational a, AVRational b)
Compare two rationals.
Definition: rational.h:89
GET_RL_VLC
#define GET_RL_VLC(level, run, name, gb, table, bits, max_depth, need_update)
Definition: get_bits.h:589
ff_zigzag_direct
const uint8_t ff_zigzag_direct[64]
Definition: mathtables.c:98
ff_mpeg12_frame_rate_tab
const AVRational ff_mpeg12_frame_rate_tab[]
Definition: mpeg12framerate.c:24
mpeg_decode_gop
static int mpeg_decode_gop(AVCodecContext *avctx, const uint8_t *buf, int buf_size)
Definition: mpeg12dec.c:2159
ret
ret
Definition: filter_design.txt:187
AV_EF_AGGRESSIVE
#define AV_EF_AGGRESSIVE
consider things that a sane encoder/muxer should not do as an error
Definition: defs.h:56
pred
static const float pred[4]
Definition: siprdata.h:259
AV_FRAME_DATA_GOP_TIMECODE
@ AV_FRAME_DATA_GOP_TIMECODE
The GOP timecode in 25 bit timecode format.
Definition: frame.h:125
FFSWAP
#define FFSWAP(type, a, b)
Definition: macros.h:52
AVClass::class_name
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
Definition: log.h:81
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
ff_mpeg1_default_non_intra_matrix
const uint16_t ff_mpeg1_default_non_intra_matrix[64]
Definition: mpeg12data.c:42
AVStereo3D::type
enum AVStereo3DType type
How views are packed within the video.
Definition: stereo3d.h:207
align_get_bits
static const uint8_t * align_get_bits(GetBitContext *s)
Definition: get_bits.h:544
TEX_VLC_BITS
#define TEX_VLC_BITS
Definition: dvdec.c:147
MPVPicture::f
struct AVFrame * f
Definition: mpegpicture.h:59
left
Tag MUST be and< 10hcoeff half pel interpolation filter coefficients, hcoeff[0] are the 2 middle coefficients[1] are the next outer ones and so on, resulting in a filter like:...eff[2], hcoeff[1], hcoeff[0], hcoeff[0], hcoeff[1], hcoeff[2] ... the sign of the coefficients is not explicitly stored but alternates after each coeff and coeff[0] is positive, so ...,+,-,+,-,+,+,-,+,-,+,... hcoeff[0] is not explicitly stored but found by subtracting the sum of all stored coefficients with signs from 32 hcoeff[0]=32 - hcoeff[1] - hcoeff[2] - ... a good choice for hcoeff and htaps is htaps=6 hcoeff={40,-10, 2} an alternative which requires more computations at both encoder and decoder side and may or may not be better is htaps=8 hcoeff={42,-14, 6,-2}ref_frames minimum of the number of available reference frames and max_ref_frames for example the first frame after a key frame always has ref_frames=1spatial_decomposition_type wavelet type 0 is a 9/7 symmetric compact integer wavelet 1 is a 5/3 symmetric compact integer wavelet others are reserved stored as delta from last, last is reset to 0 if always_reset||keyframeqlog quality(logarithmic quantizer scale) stored as delta from last, last is reset to 0 if always_reset||keyframemv_scale stored as delta from last, last is reset to 0 if always_reset||keyframe FIXME check that everything works fine if this changes between framesqbias dequantization bias stored as delta from last, last is reset to 0 if always_reset||keyframeblock_max_depth maximum depth of the block tree stored as delta from last, last is reset to 0 if always_reset||keyframequant_table quantization tableHighlevel bitstream structure:==============================--------------------------------------------|Header|--------------------------------------------|------------------------------------|||Block0||||split?||||yes no||||......... intra?||||:Block01 :yes no||||:Block02 :....... ..........||||:Block03 ::y DC ::ref index:||||:Block04 ::cb DC ::motion x :||||......... :cr DC ::motion y :||||....... ..........|||------------------------------------||------------------------------------|||Block1|||...|--------------------------------------------|------------ ------------ ------------|||Y subbands||Cb subbands||Cr subbands||||--- ---||--- ---||--- ---|||||LL0||HL0||||LL0||HL0||||LL0||HL0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||LH0||HH0||||LH0||HH0||||LH0||HH0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HL1||LH1||||HL1||LH1||||HL1||LH1|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HH1||HL2||||HH1||HL2||||HH1||HL2|||||...||...||...|||------------ ------------ ------------|--------------------------------------------Decoding process:=================------------|||Subbands|------------||||------------|Intra DC||||LL0 subband prediction ------------|\ Dequantization ------------------- \||Reference frames|\ IDWT|------- -------|Motion \|||Frame 0||Frame 1||Compensation . OBMC v -------|------- -------|--------------. \------> Frame n output Frame Frame<----------------------------------/|...|------------------- Range Coder:============Binary Range Coder:------------------- The implemented range coder is an adapted version based upon "Range encoding: an algorithm for removing redundancy from a digitised message." by G. N. N. Martin. The symbols encoded by the Snow range coder are bits(0|1). The associated probabilities are not fix but change depending on the symbol mix seen so far. bit seen|new state ---------+----------------------------------------------- 0|256 - state_transition_table[256 - old_state];1|state_transition_table[old_state];state_transition_table={ 0, 0, 0, 0, 0, 0, 0, 0, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 190, 191, 192, 194, 194, 195, 196, 197, 198, 199, 200, 201, 202, 202, 204, 205, 206, 207, 208, 209, 209, 210, 211, 212, 213, 215, 215, 216, 217, 218, 219, 220, 220, 222, 223, 224, 225, 226, 227, 227, 229, 229, 230, 231, 232, 234, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 248, 0, 0, 0, 0, 0, 0, 0};FIXME Range Coding of integers:------------------------- FIXME Neighboring Blocks:===================left and top are set to the respective blocks unless they are outside of the image in which case they are set to the Null block top-left is set to the top left block unless it is outside of the image in which case it is set to the left block if this block has no larger parent block or it is at the left side of its parent block and the top right block is not outside of the image then the top right block is used for top-right else the top-left block is used Null block y, cb, cr are 128 level, ref, mx and my are 0 Motion Vector Prediction:=========================1. the motion vectors of all the neighboring blocks are scaled to compensate for the difference of reference frames scaled_mv=(mv *(256 *(current_reference+1)/(mv.reference+1))+128)> the median of the scaled left
Definition: snow.txt:386
mpeg_get_pixelformat
static enum AVPixelFormat mpeg_get_pixelformat(AVCodecContext *avctx)
Definition: mpeg12dec.c:832
AV_CODEC_FLAG2_CHUNKS
#define AV_CODEC_FLAG2_CHUNKS
Input bitstream might be truncated at a packet boundaries instead of only at frame boundaries.
Definition: avcodec.h:351
AV_RL32
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_RL32
Definition: bytestream.h:92
mpeg12data.h
skip_1stop_8data_bits
static int skip_1stop_8data_bits(GetBitContext *gb)
Definition: get_bits.h:683
AVCodecContext
main external API structure.
Definition: avcodec.h:431
AVCodecContext::active_thread_type
int active_thread_type
Which multithreading methods are in use by the codec.
Definition: avcodec.h:1580
av_timecode_make_mpeg_tc_string
char * av_timecode_make_mpeg_tc_string(char *buf, uint32_t tc25bit)
Get the timecode string from the 25-bit timecode format (MPEG GOP format).
Definition: timecode.c:147
MpegEncContext::intra_dc_precision
int intra_dc_precision
Definition: mpegvideo.h:291
AVCodecContext::execute
int(* execute)(struct AVCodecContext *c, int(*func)(struct AVCodecContext *c2, void *arg), void *arg2, int *ret, int count, int size)
The codec may call this to execute several independent things.
Definition: avcodec.h:1591
SHOW_UBITS
#define SHOW_UBITS(name, gb, num)
Definition: get_bits.h:242
AV_PICTURE_TYPE_B
@ AV_PICTURE_TYPE_B
Bi-dir predicted.
Definition: avutil.h:280
mpeg12dec.h
AVCHROMA_LOC_CENTER
@ AVCHROMA_LOC_CENTER
MPEG-1 4:2:0, JPEG 4:2:0, H.263 4:2:0.
Definition: pixfmt.h:772
AVRational::den
int den
Denominator.
Definition: rational.h:60
error_resilience.h
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
FF_HW_CALL
#define FF_HW_CALL(avctx, function,...)
Definition: hwaccel_internal.h:173
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Underlying C type is int.
Definition: opt.h:259
AVCodecContext::profile
int profile
profile
Definition: avcodec.h:1618
AVFrame::metadata
AVDictionary * metadata
metadata.
Definition: frame.h:688
Mpeg1Context::cc_format
enum Mpeg2ClosedCaptionsFormat cc_format
Definition: mpeg12dec.c:78
sign_extend
static av_const int sign_extend(int val, unsigned bits)
Definition: mathops.h:131
ff_mpv_frame_end
void ff_mpv_frame_end(MpegEncContext *s)
Definition: mpegvideo_dec.c:360
ref
static int ref[MAX_W *MAX_W]
Definition: jpeg2000dwt.c:117
Mpeg1Context::slice_count
int slice_count
Definition: mpeg12dec.c:81
AV_CODEC_CAP_DELAY
#define AV_CODEC_CAP_DELAY
Encoder or decoder requires flushing with NULL input at the end in order to give the complete and cor...
Definition: codec.h:76
MpegEncContext::resync_mb_x
int resync_mb_x
x position of last resync marker
Definition: mpegvideo.h:221
FF_CODEC_PROPERTY_CLOSED_CAPTIONS
#define FF_CODEC_PROPERTY_CLOSED_CAPTIONS
Definition: avcodec.h:1639
av_mul_q
AVRational av_mul_q(AVRational b, AVRational c)
Multiply two rationals.
Definition: rational.c:80
AV_PIX_FMT_YUV444P
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:78
av_buffer_realloc
int av_buffer_realloc(AVBufferRef **pbuf, size_t size)
Reallocate a given buffer.
Definition: buffer.c:183
ff_mb_ptype_vlc
VLCElem ff_mb_ptype_vlc[64]
Definition: mpeg12.c:143
AVCodecContext::debug
int debug
debug
Definition: avcodec.h:1374
FF_DISABLE_DEPRECATION_WARNINGS
#define FF_DISABLE_DEPRECATION_WARNINGS
Definition: internal.h:72
AVCodecContext::coded_width
int coded_width
Bitstream width / height, may be different from width/height e.g.
Definition: avcodec.h:607
get_dmv
static int get_dmv(MpegEncContext *s)
Definition: mpeg12dec.c:384
AV_PICTURE_TYPE_P
@ AV_PICTURE_TYPE_P
Predicted.
Definition: avutil.h:279
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:200
mpeg_decode_end
static av_cold int mpeg_decode_end(AVCodecContext *avctx)
Definition: mpeg12dec.c:2612
AV_PIX_FMT_YUV422P
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:77
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
IDCTDSPContext::idct_permutation
uint8_t idct_permutation[64]
IDCT input permutation.
Definition: idctdsp.h:86
ff_ipu_decoder
const FFCodec ff_ipu_decoder
Definition: mpeg12dec.c:2863
av_stereo3d_create_side_data
AVStereo3D * av_stereo3d_create_side_data(AVFrame *frame)
Allocate a complete AVFrameSideData and add it to the frame.
Definition: stereo3d.c:54
AVFrameSideData
Structure to hold side data for an AVFrame.
Definition: frame.h:265
ER_MV_END
#define ER_MV_END
Definition: error_resilience.h:34
MpegEncContext::first_field
int first_field
is 1 for the first field of a field picture 0 otherwise
Definition: mpegvideo.h:310
MpegEncContext::q_scale_type
int q_scale_type
Definition: mpegvideo.h:295
AVCodecContext::codec_tag
unsigned int codec_tag
fourcc (LSB first, so "ABCD" -> ('D'<<24) + ('C'<<16) + ('B'<<8) + 'A').
Definition: avcodec.h:456
Mpeg1Context::mpeg_enc_ctx
MpegEncContext mpeg_enc_ctx
Definition: mpeg12dec.c:73
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
MV_DIR_FORWARD
#define MV_DIR_FORWARD
Definition: mpegvideo.h:181
ff_tlog
#define ff_tlog(a,...)
Definition: tableprint_vlc.h:29
AVPacket
This structure stores compressed data.
Definition: packet.h:512
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:458
ScanTable::permutated
uint8_t permutated[64]
Definition: mpegvideo.h:49
av_dict_set
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags)
Set the given entry in *pm, overwriting an existing entry.
Definition: dict.c:86
mpeg_get_qscale
static int mpeg_get_qscale(MpegEncContext *s)
Definition: mpegvideodec.h:80
mpeg_decode_sequence_extension
static void mpeg_decode_sequence_extension(Mpeg1Context *s1)
Definition: mpeg12dec.c:1015
HWACCEL_VAAPI
#define HWACCEL_VAAPI(codec)
Definition: hwconfig.h:70
mpeg_er.h
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:592
int32_t
int32_t
Definition: audioconvert.c:56
imgutils.h
AVCodecContext::properties
attribute_deprecated unsigned properties
Properties of the stream that gets decoded.
Definition: avcodec.h:1637
AV_CODEC_CAP_DRAW_HORIZ_BAND
#define AV_CODEC_CAP_DRAW_HORIZ_BAND
Decoder can use draw_horiz_band callback.
Definition: codec.h:44
AVStereo3DType
AVStereo3DType
List of possible 3D Types.
Definition: stereo3d.h:48
block
The exact code depends on how similar the blocks are and how related they are to the block
Definition: filter_design.txt:207
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
Mpeg1Context::frame_rate_ext
AVRational frame_rate_ext
Definition: mpeg12dec.c:84
mpeg_decode_motion
static int mpeg_decode_motion(MpegEncContext *s, int fcode, int pred)
Definition: mpeg12dec.c:97
AVERROR_INVALIDDATA
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:61
IPUContext::block
int16_t block[6][64]
Definition: mpeg12dec.c:2743
AVPanScan::height
int height
Definition: defs.h:264
MKTAG
#define MKTAG(a, b, c, d)
Definition: macros.h:55
ff_mb_btype_vlc
VLCElem ff_mb_btype_vlc[64]
Definition: mpeg12.c:144
MpegEncContext::resync_mb_y
int resync_mb_y
y position of last resync marker
Definition: mpegvideo.h:222
mpeg_decode_user_data
static void mpeg_decode_user_data(AVCodecContext *avctx, const uint8_t *p, int buf_size)
Definition: mpeg12dec.c:2089
h
h
Definition: vp9dsp_template.c:2070
MpegEncContext::end_mb_y
int end_mb_y
end mb_y of this thread (so current thread should process start_mb_y <= row < end_mb_y)
Definition: mpegvideo.h:116
Mpeg2ClosedCaptionsFormat
Mpeg2ClosedCaptionsFormat
Definition: mpeg12dec.c:64
ER_AC_END
#define ER_AC_END
Definition: error_resilience.h:32
AVStereo3D
Stereo 3D type: this structure describes how two videos are packed within a single video surface,...
Definition: stereo3d.h:203
av_image_check_sar
int av_image_check_sar(unsigned int w, unsigned int h, AVRational sar)
Check if the given sample aspect ratio of an image is valid.
Definition: imgutils.c:323
MV_VLC_BITS
#define MV_VLC_BITS
Definition: mpeg12vlc.h:34
Mpeg1Context::timecode_frame_start
int64_t timecode_frame_start
Definition: mpeg12dec.c:93
width
#define width
Definition: dsp.h:89
MpegEncContext::start_mb_y
int start_mb_y
start mb_y of this thread (so current thread should process start_mb_y <= row < end_mb_y)
Definition: mpegvideo.h:115
AVDISCARD_NONREF
@ AVDISCARD_NONREF
discard all non reference
Definition: defs.h:225
MpegEncContext::alternate_scan
int alternate_scan
Definition: mpegvideo.h:297
DECODE_SLICE_OK
#define DECODE_SLICE_OK
Definition: mpeg12dec.c:1338
AV_CODEC_ID_MPEG2VIDEO
@ AV_CODEC_ID_MPEG2VIDEO
preferred ID for MPEG-1/2 video decoding
Definition: codec_id.h:54
DECODE_SLICE_ERROR
#define DECODE_SLICE_ERROR
Definition: mpeg12dec.c:1337
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Special option type for declaring named constants.
Definition: opt.h:299
MpegEncContext
MpegEncContext.
Definition: mpegvideo.h:64
load_matrix
static int load_matrix(MpegEncContext *s, uint16_t matrix0[64], uint16_t matrix1[64], int intra)
Definition: mpeg12dec.c:1116
MpegEncContext::codec_id
enum AVCodecID codec_id
Definition: mpegvideo.h:95
AVCodecContext::sample_aspect_ratio
AVRational sample_aspect_ratio
sample aspect ratio (0 if unknown) That is the width of a pixel divided by the height of the pixel.
Definition: avcodec.h:616
MB_TYPE_FORWARD_MV
#define MB_TYPE_FORWARD_MV
Definition: mpegutils.h:49
decode_dc
static int decode_dc(GetBitContext *gb, int component)
Definition: mpeg12dec.h:30
Mpeg1Context::afd
uint8_t afd
Definition: mpeg12dec.c:79
Mpeg1Context
Definition: mpeg12dec.c:72
MpegEncContext::chroma_intra_matrix
uint16_t chroma_intra_matrix[64]
Definition: mpegvideo.h:216
mpeg_decode_picture_coding_extension
static int mpeg_decode_picture_coding_extension(Mpeg1Context *s1)
Definition: mpeg12dec.c:1153
Mpeg1Context::extradata_decoded
int extradata_decoded
Definition: mpeg12dec.c:90
mpeg2_decode_block_non_intra
static int mpeg2_decode_block_non_intra(MpegEncContext *s, int16_t *block, int n)
Definition: mpeg12dec.c:217
MB_TYPE_INTRA
#define MB_TYPE_INTRA
Definition: mpegutils.h:64
MBINCR_VLC_BITS
#define MBINCR_VLC_BITS
Definition: mpeg12vlc.h:37
mpeg_decode_slice
static int mpeg_decode_slice(MpegEncContext *s, int mb_y, const uint8_t **buf, int buf_size)
Decode a slice.
Definition: mpeg12dec.c:1346
MpegEncContext::chroma_format
int chroma_format
Definition: mpegvideo.h:300
MpegEncContext::codec_tag
int codec_tag
internal codec_tag upper case converted from avctx codec_tag
Definition: mpegvideo.h:98