FFmpeg
mpegvideo_dec.c
Go to the documentation of this file.
1 /*
2  * Common mpeg video decoding code
3  * Copyright (c) 2000,2001 Fabrice Bellard
4  * Copyright (c) 2002-2004 Michael Niedermayer <michaelni@gmx.at>
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 #include <limits.h>
24 
25 #include "config_components.h"
26 
27 #include "libavutil/avassert.h"
28 #include "libavutil/emms.h"
29 #include "libavutil/imgutils.h"
30 #include "libavutil/internal.h"
32 
33 #include "avcodec.h"
34 #include "decode.h"
35 #include "h264chroma.h"
36 #include "internal.h"
37 #include "mpegutils.h"
38 #include "mpegvideo.h"
39 #include "mpegvideodec.h"
40 #include "mpeg4videodec.h"
41 #include "libavutil/refstruct.h"
42 #include "thread.h"
43 #include "threadprogress.h"
44 #include "wmv2dec.h"
45 
47 {
48  enum ThreadingStatus thread_status;
49 
51 
52  s->avctx = avctx;
53  s->width = avctx->coded_width;
54  s->height = avctx->coded_height;
55  s->codec_id = avctx->codec->id;
56  s->workaround_bugs = avctx->workaround_bugs;
57 
58  /* convert fourcc to upper case */
59  s->codec_tag = ff_toupper4(avctx->codec_tag);
60 
62 
63  ff_h264chroma_init(&s->h264chroma, 8); //for lowres
64 
65  if (s->picture_pool) // VC-1 can call this multiple times
66  return 0;
67 
68  thread_status = ff_thread_sync_ref(avctx, offsetof(MpegEncContext, picture_pool));
69  if (thread_status != FF_THREAD_IS_COPY) {
70  s->picture_pool = ff_mpv_alloc_pic_pool(thread_status != FF_THREAD_NO_FRAME_THREADING);
71  if (!s->picture_pool)
72  return AVERROR(ENOMEM);
73  }
74  return 0;
75 }
76 
78  const AVCodecContext *src)
79 {
80  MpegEncContext *const s1 = src->priv_data;
81  MpegEncContext *const s = dst->priv_data;
82  int ret;
83 
84  if (dst == src)
85  return 0;
86 
87  av_assert0(s != s1);
88 
89  // FIXME can parameters change on I-frames?
90  // in that case dst may need a reinit
91  if (!s->context_initialized) {
92  void *private_ctx = s->private_ctx;
93  int err;
94  memcpy(s, s1, sizeof(*s));
95 
96  s->context_initialized = 0;
97  s->context_reinit = 0;
98  s->avctx = dst;
99  s->private_ctx = private_ctx;
100 
101  if (s1->context_initialized) {
102  if ((err = ff_mpv_common_init(s)) < 0)
103  return err;
104  }
105  }
106 
107  if (s->height != s1->height || s->width != s1->width || s->context_reinit) {
108  s->height = s1->height;
109  s->width = s1->width;
111  return ret;
112  }
113 
114  s->quarter_sample = s1->quarter_sample;
115 
116  s->picture_number = s1->picture_number;
117 
118  ff_mpv_replace_picture(&s->cur_pic, &s1->cur_pic);
119  ff_mpv_replace_picture(&s->last_pic, &s1->last_pic);
120  ff_mpv_replace_picture(&s->next_pic, &s1->next_pic);
121 
122  s->linesize = s1->linesize;
123  s->uvlinesize = s1->uvlinesize;
124 
125  // Error/bug resilience
126  s->workaround_bugs = s1->workaround_bugs;
127  s->padding_bug_score = s1->padding_bug_score;
128 
129  // MPEG-4 timing info
130  memcpy(&s->last_time_base, &s1->last_time_base,
131  (char *) &s1->pb_field_time + sizeof(s1->pb_field_time) -
132  (char *) &s1->last_time_base);
133 
134  // B-frame info
135  s->low_delay = s1->low_delay;
136 
137  // MPEG-2/interlacing info
138  memcpy(&s->progressive_sequence, &s1->progressive_sequence,
139  (char *) &s1->rtp_mode - (char *) &s1->progressive_sequence);
140 
141  return 0;
142 }
143 
145 {
147 
148  av_refstruct_pool_uninit(&s->picture_pool);
150  return 0;
151 }
152 
154 {
155  int err = 0;
156 
157  if (!s->context_initialized)
158  return AVERROR(EINVAL);
159 
161 
162  ff_mpv_unref_picture(&s->last_pic);
163  ff_mpv_unref_picture(&s->next_pic);
164  ff_mpv_unref_picture(&s->cur_pic);
165 
166  if ((s->width || s->height) &&
167  (err = av_image_check_size(s->width, s->height, 0, s->avctx)) < 0)
168  goto fail;
169 
170  /* set chroma shifts */
171  err = av_pix_fmt_get_chroma_sub_sample(s->avctx->pix_fmt,
172  &s->chroma_x_shift,
173  &s->chroma_y_shift);
174  if (err < 0)
175  goto fail;
176 
177  if ((err = ff_mpv_init_context_frame(s)))
178  goto fail;
179 
180  memset(s->thread_context, 0, sizeof(s->thread_context));
181  s->thread_context[0] = s;
182 
183  if (s->width && s->height) {
185  if (err < 0)
186  goto fail;
187  }
188  s->context_reinit = 0;
189 
190  return 0;
191  fail:
193  s->context_reinit = 1;
194  return err;
195 }
196 
197 static int alloc_picture(MpegEncContext *s, MPVWorkPicture *dst, int reference)
198 {
199  AVCodecContext *avctx = s->avctx;
200  MPVPicture *pic = av_refstruct_pool_get(s->picture_pool);
201  int ret;
202 
203  if (!pic)
204  return AVERROR(ENOMEM);
205 
206  dst->ptr = pic;
207 
208  pic->reference = reference;
209 
210  /* WM Image / Screen codecs allocate internal buffers with different
211  * dimensions / colorspaces; ignore user-defined callbacks for these. */
216  reference ? AV_GET_BUFFER_FLAG_REF : 0);
217  } else {
218  pic->f->width = avctx->width;
219  pic->f->height = avctx->height;
220  pic->f->format = avctx->pix_fmt;
222  }
223  if (ret < 0)
224  goto fail;
225 
226  ret = ff_mpv_pic_check_linesize(avctx, pic->f, &s->linesize, &s->uvlinesize);
227  if (ret < 0)
228  goto fail;
229 
231  if (ret < 0)
232  goto fail;
233 
234  av_assert1(s->mb_width == s->buffer_pools.alloc_mb_width);
235  av_assert1(s->mb_height == s->buffer_pools.alloc_mb_height ||
236  FFALIGN(s->mb_height, 2) == s->buffer_pools.alloc_mb_height);
237  av_assert1(s->mb_stride == s->buffer_pools.alloc_mb_stride);
238  ret = ff_mpv_alloc_pic_accessories(s->avctx, dst, &s->sc,
239  &s->buffer_pools, s->mb_height);
240  if (ret < 0)
241  goto fail;
242 
243  return 0;
244 fail:
246  return ret;
247 }
248 
250 {
251  MPVPicture *pic;
252  int ret = alloc_picture(s, dst, 1);
253  if (ret < 0)
254  return ret;
255 
256  pic = dst->ptr;
257  pic->dummy = 1;
258 
259  ff_thread_progress_report(&pic->progress, INT_MAX);
260 
261  return 0;
262 }
263 
264 static void color_frame(AVFrame *frame, int luma)
265 {
266  int h_chroma_shift, v_chroma_shift;
267 
268  for (int i = 0; i < frame->height; i++)
269  memset(frame->data[0] + frame->linesize[0] * i, luma, frame->width);
270 
271  if (!frame->data[1])
272  return;
273  av_pix_fmt_get_chroma_sub_sample(frame->format, &h_chroma_shift, &v_chroma_shift);
274  for (int i = 0; i < AV_CEIL_RSHIFT(frame->height, v_chroma_shift); i++) {
275  memset(frame->data[1] + frame->linesize[1] * i,
276  0x80, AV_CEIL_RSHIFT(frame->width, h_chroma_shift));
277  memset(frame->data[2] + frame->linesize[2] * i,
278  0x80, AV_CEIL_RSHIFT(frame->width, h_chroma_shift));
279  }
280 }
281 
283 {
284  AVCodecContext *avctx = s->avctx;
285  int ret;
286 
287  av_assert1(!s->last_pic.ptr || s->last_pic.ptr->f->buf[0]);
288  av_assert1(!s->next_pic.ptr || s->next_pic.ptr->f->buf[0]);
289  if (!s->last_pic.ptr && s->pict_type != AV_PICTURE_TYPE_I) {
290  if (s->pict_type == AV_PICTURE_TYPE_B && s->next_pic.ptr)
292  "allocating dummy last picture for B frame\n");
293  else if (s->codec_id != AV_CODEC_ID_H261 /* H.261 has no keyframes */ &&
294  (s->picture_structure == PICT_FRAME || s->first_field))
296  "warning: first frame is no keyframe\n");
297 
298  /* Allocate a dummy frame */
299  ret = alloc_dummy_frame(s, &s->last_pic);
300  if (ret < 0)
301  return ret;
302 
303  if (!avctx->hwaccel) {
304  int luma_val = s->codec_id == AV_CODEC_ID_FLV1 || s->codec_id == AV_CODEC_ID_H263 ? 16 : 0x80;
305  color_frame(s->last_pic.ptr->f, luma_val);
306  }
307  }
308  if (!s->next_pic.ptr && s->pict_type == AV_PICTURE_TYPE_B) {
309  /* Allocate a dummy frame */
310  ret = alloc_dummy_frame(s, &s->next_pic);
311  if (ret < 0)
312  return ret;
313  }
314 
315  av_assert0(s->pict_type == AV_PICTURE_TYPE_I || (s->last_pic.ptr &&
316  s->last_pic.ptr->f->buf[0]));
317 
318  return 0;
319 }
320 
321 /**
322  * generic function called after decoding
323  * the header and before a frame is decoded.
324  */
326 {
327  int ret;
328 
329  s->mb_skipped = 0;
330 
332  av_log(avctx, AV_LOG_ERROR, "Attempt to start a frame outside SETUP state\n");
333  return AVERROR_BUG;
334  }
335 
336  ff_mpv_unref_picture(&s->cur_pic);
337  ret = alloc_picture(s, &s->cur_pic,
338  s->pict_type != AV_PICTURE_TYPE_B && !s->droppable);
339  if (ret < 0)
340  return ret;
341 
342  s->cur_pic.ptr->f->flags |= AV_FRAME_FLAG_TOP_FIELD_FIRST * !!s->top_field_first;
343  s->cur_pic.ptr->f->flags |= AV_FRAME_FLAG_INTERLACED *
344  (!s->progressive_frame && !s->progressive_sequence);
345  s->cur_pic.ptr->field_picture = s->picture_structure != PICT_FRAME;
346 
347  s->cur_pic.ptr->f->pict_type = s->pict_type;
348  if (s->pict_type == AV_PICTURE_TYPE_I)
349  s->cur_pic.ptr->f->flags |= AV_FRAME_FLAG_KEY;
350  else
351  s->cur_pic.ptr->f->flags &= ~AV_FRAME_FLAG_KEY;
352 
353  if (s->pict_type != AV_PICTURE_TYPE_B) {
354  ff_mpv_workpic_from_pic(&s->last_pic, s->next_pic.ptr);
355  if (!s->droppable)
356  ff_mpv_workpic_from_pic(&s->next_pic, s->cur_pic.ptr);
357  }
358  ff_dlog(s->avctx, "L%p N%p C%p L%p N%p C%p type:%d drop:%d\n",
359  (void*)s->last_pic.ptr, (void*)s->next_pic.ptr, (void*)s->cur_pic.ptr,
360  s->last_pic.ptr ? s->last_pic.ptr->f->data[0] : NULL,
361  s->next_pic.ptr ? s->next_pic.ptr->f->data[0] : NULL,
362  s->cur_pic.ptr ? s->cur_pic.ptr->f->data[0] : NULL,
363  s->pict_type, s->droppable);
364 
366  if (ret < 0)
367  return ret;
368 
369  if (s->avctx->debug & FF_DEBUG_NOMC)
370  color_frame(s->cur_pic.ptr->f, 0x80);
371 
372  return 0;
373 }
374 
375 /* called after a frame has been decoded. */
377 {
378  emms_c();
379 
380  if (s->cur_pic.reference)
381  ff_thread_progress_report(&s->cur_pic.ptr->progress, INT_MAX);
382 }
383 
385 {
386  ff_print_debug_info2(s->avctx, pict, p->mb_type,
387  p->qscale_table, p->motion_val,
388  p->mb_width, p->mb_height, p->mb_stride, s->quarter_sample);
389 }
390 
392  const MPVPicture *p, int qp_type)
393 {
394  AVVideoEncParams *par;
395  int mult = (qp_type == FF_MPV_QSCALE_TYPE_MPEG1) ? 2 : 1;
396  unsigned int nb_mb = p->mb_height * p->mb_width;
397 
398  if (!(s->avctx->export_side_data & AV_CODEC_EXPORT_DATA_VIDEO_ENC_PARAMS))
399  return 0;
400 
402  if (!par)
403  return AVERROR(ENOMEM);
404 
405  for (unsigned y = 0; y < p->mb_height; y++)
406  for (unsigned x = 0; x < p->mb_width; x++) {
407  const unsigned int block_idx = y * p->mb_width + x;
408  const unsigned int mb_xy = y * p->mb_stride + x;
409  AVVideoBlockParams *const b = av_video_enc_params_block(par, block_idx);
410 
411  b->src_x = x * 16;
412  b->src_y = y * 16;
413  b->w = 16;
414  b->h = 16;
415 
416  b->delta_qp = p->qscale_table[mb_xy] * mult;
417  }
418 
419  return 0;
420 }
421 
423 {
424  ff_draw_horiz_band(s->avctx, s->cur_pic.ptr->f,
425  s->last_pic.ptr ? s->last_pic.ptr->f : NULL,
426  y, h, s->picture_structure,
427  s->first_field, s->low_delay);
428 }
429 
431 {
432  MpegEncContext *const s = avctx->priv_data;
433 
434  ff_mpv_unref_picture(&s->cur_pic);
435  ff_mpv_unref_picture(&s->last_pic);
436  ff_mpv_unref_picture(&s->next_pic);
437 
438  s->mb_x = s->mb_y = 0;
439 
440  s->pp_time = 0;
441 }
442 
444 {
445  if (s->pict_type != AV_PICTURE_TYPE_B && !s->partitioned_frame && !s->er.error_occurred)
446  ff_thread_progress_report(&s->cur_pic.ptr->progress, s->mb_y);
447 }
448 
449 
451  uint8_t *dest, const uint8_t *src,
452  int field_based, int field_select,
453  int src_x, int src_y,
454  int width, int height, ptrdiff_t stride,
455  int h_edge_pos, int v_edge_pos,
456  int w, int h, const h264_chroma_mc_func *pix_op,
457  int motion_x, int motion_y)
458 {
459  const int lowres = s->avctx->lowres;
460  const int op_index = lowres;
461  const int s_mask = (2 << lowres) - 1;
462  int emu = 0;
463  int sx, sy;
464 
465  av_assert2(op_index <= 3);
466 
467  if (s->quarter_sample) {
468  motion_x /= 2;
469  motion_y /= 2;
470  }
471 
472  sx = motion_x & s_mask;
473  sy = motion_y & s_mask;
474  src_x += motion_x >> lowres + 1;
475  src_y += motion_y >> lowres + 1;
476 
477  src += src_y * stride + src_x;
478 
479  if ((unsigned)src_x > FFMAX( h_edge_pos - (!!sx) - w, 0) ||
480  (unsigned)src_y > FFMAX((v_edge_pos >> field_based) - (!!sy) - h, 0)) {
481  s->vdsp.emulated_edge_mc(s->sc.edge_emu_buffer, src,
482  s->linesize, s->linesize,
483  w + 1, (h + 1) << field_based,
484  src_x, src_y * (1 << field_based),
486  src = s->sc.edge_emu_buffer;
487  emu = 1;
488  }
489 
490  sx = (sx << 2) >> lowres;
491  sy = (sy << 2) >> lowres;
492  if (field_select)
493  src += s->linesize;
494  pix_op[op_index](dest, src, stride, h, sx, sy);
495  return emu;
496 }
497 
498 /* apply one mpeg motion vector to the three components */
500  uint8_t *dest_y,
501  uint8_t *dest_cb,
502  uint8_t *dest_cr,
503  int field_based,
504  int bottom_field,
505  int field_select,
506  uint8_t *const *ref_picture,
507  const h264_chroma_mc_func *pix_op,
508  int motion_x, int motion_y,
509  int h, int mb_y)
510 {
511  const uint8_t *ptr_y, *ptr_cb, *ptr_cr;
512  int mx, my, src_x, src_y, uvsrc_x, uvsrc_y, sx, sy, uvsx, uvsy;
513  ptrdiff_t uvlinesize, linesize;
514  const int lowres = s->avctx->lowres;
515  const int op_index = lowres - 1 + s->chroma_x_shift;
516  const int block_s = 8 >> lowres;
517  const int s_mask = (2 << lowres) - 1;
518  const int h_edge_pos = s->h_edge_pos >> lowres;
519  const int v_edge_pos = s->v_edge_pos >> lowres;
520  int hc = s->chroma_y_shift ? (h+1-bottom_field)>>1 : h;
521 
522  av_assert2(op_index <= 3);
523 
524  linesize = s->cur_pic.linesize[0] << field_based;
525  uvlinesize = s->cur_pic.linesize[1] << field_based;
526 
527  // FIXME obviously not perfect but qpel will not work in lowres anyway
528  if (s->quarter_sample) {
529  motion_x /= 2;
530  motion_y /= 2;
531  }
532 
533  if (field_based) {
534  motion_y += (bottom_field - field_select)*((1 << lowres)-1);
535  }
536 
537  sx = motion_x & s_mask;
538  sy = motion_y & s_mask;
539  src_x = s->mb_x * 2 * block_s + (motion_x >> lowres + 1);
540  src_y = (mb_y * 2 * block_s >> field_based) + (motion_y >> lowres + 1);
541 
542  if (s->out_format == FMT_H263) {
543  uvsx = ((motion_x >> 1) & s_mask) | (sx & 1);
544  uvsy = ((motion_y >> 1) & s_mask) | (sy & 1);
545  uvsrc_x = src_x >> 1;
546  uvsrc_y = src_y >> 1;
547  } else if (s->out_format == FMT_H261) {
548  // even chroma mv's are full pel in H261
549  mx = motion_x / 4;
550  my = motion_y / 4;
551  uvsx = (2 * mx) & s_mask;
552  uvsy = (2 * my) & s_mask;
553  uvsrc_x = s->mb_x * block_s + (mx >> lowres);
554  uvsrc_y = mb_y * block_s + (my >> lowres);
555  } else {
556  if (s->chroma_y_shift) {
557  mx = motion_x / 2;
558  my = motion_y / 2;
559  uvsx = mx & s_mask;
560  uvsy = my & s_mask;
561  uvsrc_x = s->mb_x * block_s + (mx >> lowres + 1);
562  uvsrc_y = (mb_y * block_s >> field_based) + (my >> lowres + 1);
563  } else {
564  if (s->chroma_x_shift) {
565  //Chroma422
566  mx = motion_x / 2;
567  uvsx = mx & s_mask;
568  uvsy = motion_y & s_mask;
569  uvsrc_y = src_y;
570  uvsrc_x = s->mb_x*block_s + (mx >> (lowres+1));
571  } else {
572  //Chroma444
573  uvsx = motion_x & s_mask;
574  uvsy = motion_y & s_mask;
575  uvsrc_x = src_x;
576  uvsrc_y = src_y;
577  }
578  }
579  }
580 
581  ptr_y = ref_picture[0] + src_y * linesize + src_x;
582  ptr_cb = ref_picture[1] + uvsrc_y * uvlinesize + uvsrc_x;
583  ptr_cr = ref_picture[2] + uvsrc_y * uvlinesize + uvsrc_x;
584 
585  if ((unsigned) src_x > FFMAX( h_edge_pos - (!!sx) - 2 * block_s, 0) || uvsrc_y<0 ||
586  (unsigned) src_y > FFMAX((v_edge_pos >> field_based) - (!!sy) - FFMAX(h, hc<<s->chroma_y_shift), 0)) {
587  s->vdsp.emulated_edge_mc(s->sc.edge_emu_buffer, ptr_y,
588  linesize >> field_based, linesize >> field_based,
589  17, 17 + field_based,
590  src_x, src_y * (1 << field_based), h_edge_pos,
591  v_edge_pos);
592  ptr_y = s->sc.edge_emu_buffer;
593  if (!CONFIG_GRAY || !(s->avctx->flags & AV_CODEC_FLAG_GRAY)) {
594  uint8_t *ubuf = s->sc.edge_emu_buffer + 18 * s->linesize;
595  uint8_t *vbuf =ubuf + 10 * s->uvlinesize;
596  if (s->workaround_bugs & FF_BUG_IEDGE)
597  vbuf -= s->uvlinesize;
598  s->vdsp.emulated_edge_mc(ubuf, ptr_cb,
599  uvlinesize >> field_based, uvlinesize >> field_based,
600  9, 9 + field_based,
601  uvsrc_x, uvsrc_y * (1 << field_based),
602  h_edge_pos >> 1, v_edge_pos >> 1);
603  s->vdsp.emulated_edge_mc(vbuf, ptr_cr,
604  uvlinesize >> field_based,uvlinesize >> field_based,
605  9, 9 + field_based,
606  uvsrc_x, uvsrc_y * (1 << field_based),
607  h_edge_pos >> 1, v_edge_pos >> 1);
608  ptr_cb = ubuf;
609  ptr_cr = vbuf;
610  }
611  }
612 
613  // FIXME use this for field pix too instead of the obnoxious hack which changes picture.f->data
614  if (bottom_field) {
615  dest_y += s->linesize;
616  dest_cb += s->uvlinesize;
617  dest_cr += s->uvlinesize;
618  }
619 
620  if (field_select) {
621  ptr_y += s->linesize;
622  ptr_cb += s->uvlinesize;
623  ptr_cr += s->uvlinesize;
624  }
625 
626  sx = (sx << 2) >> lowres;
627  sy = (sy << 2) >> lowres;
628  pix_op[lowres - 1](dest_y, ptr_y, linesize, h, sx, sy);
629 
630  if (!CONFIG_GRAY || !(s->avctx->flags & AV_CODEC_FLAG_GRAY)) {
631  uvsx = (uvsx << 2) >> lowres;
632  uvsy = (uvsy << 2) >> lowres;
633  if (hc) {
634  pix_op[op_index](dest_cb, ptr_cb, uvlinesize, hc, uvsx, uvsy);
635  pix_op[op_index](dest_cr, ptr_cr, uvlinesize, hc, uvsx, uvsy);
636  }
637  }
638  // FIXME h261 lowres loop filter
639 }
640 
642  uint8_t *dest_cb, uint8_t *dest_cr,
643  uint8_t *const *ref_picture,
644  const h264_chroma_mc_func * pix_op,
645  int mx, int my)
646 {
647  const int lowres = s->avctx->lowres;
648  const int op_index = lowres;
649  const int block_s = 8 >> lowres;
650  const int s_mask = (2 << lowres) - 1;
651  const int h_edge_pos = s->h_edge_pos >> lowres + 1;
652  const int v_edge_pos = s->v_edge_pos >> lowres + 1;
653  int emu = 0, src_x, src_y, sx, sy;
654  ptrdiff_t offset;
655  const uint8_t *ptr;
656 
657  av_assert2(op_index <= 3);
658 
659  if (s->quarter_sample) {
660  mx /= 2;
661  my /= 2;
662  }
663 
664  /* In case of 8X8, we construct a single chroma motion vector
665  with a special rounding */
668 
669  sx = mx & s_mask;
670  sy = my & s_mask;
671  src_x = s->mb_x * block_s + (mx >> lowres + 1);
672  src_y = s->mb_y * block_s + (my >> lowres + 1);
673 
674  offset = src_y * s->uvlinesize + src_x;
675  ptr = ref_picture[1] + offset;
676  if ((unsigned) src_x > FFMAX(h_edge_pos - (!!sx) - block_s, 0) ||
677  (unsigned) src_y > FFMAX(v_edge_pos - (!!sy) - block_s, 0)) {
678  s->vdsp.emulated_edge_mc(s->sc.edge_emu_buffer, ptr,
679  s->uvlinesize, s->uvlinesize,
680  9, 9,
681  src_x, src_y, h_edge_pos, v_edge_pos);
682  ptr = s->sc.edge_emu_buffer;
683  emu = 1;
684  }
685  sx = (sx << 2) >> lowres;
686  sy = (sy << 2) >> lowres;
687  pix_op[op_index](dest_cb, ptr, s->uvlinesize, block_s, sx, sy);
688 
689  ptr = ref_picture[2] + offset;
690  if (emu) {
691  s->vdsp.emulated_edge_mc(s->sc.edge_emu_buffer, ptr,
692  s->uvlinesize, s->uvlinesize,
693  9, 9,
694  src_x, src_y, h_edge_pos, v_edge_pos);
695  ptr = s->sc.edge_emu_buffer;
696  }
697  pix_op[op_index](dest_cr, ptr, s->uvlinesize, block_s, sx, sy);
698 }
699 
700 /**
701  * motion compensation of a single macroblock
702  * @param s context
703  * @param dest_y luma destination pointer
704  * @param dest_cb chroma cb/u destination pointer
705  * @param dest_cr chroma cr/v destination pointer
706  * @param dir direction (0->forward, 1->backward)
707  * @param ref_picture array[3] of pointers to the 3 planes of the reference picture
708  * @param pix_op halfpel motion compensation function (average or put normally)
709  * the motion vectors are taken from s->mv and the MV type from s->mv_type
710  */
711 static inline void MPV_motion_lowres(MpegEncContext *s,
712  uint8_t *dest_y, uint8_t *dest_cb,
713  uint8_t *dest_cr,
714  int dir, uint8_t *const *ref_picture,
715  const h264_chroma_mc_func *pix_op)
716 {
717  int mx, my;
718  int mb_x, mb_y;
719  const int lowres = s->avctx->lowres;
720  const int block_s = 8 >>lowres;
721 
722  mb_x = s->mb_x;
723  mb_y = s->mb_y;
724 
725  switch (s->mv_type) {
726  case MV_TYPE_16X16:
727  mpeg_motion_lowres(s, dest_y, dest_cb, dest_cr,
728  0, 0, 0,
729  ref_picture, pix_op,
730  s->mv[dir][0][0], s->mv[dir][0][1],
731  2 * block_s, mb_y);
732  break;
733  case MV_TYPE_8X8:
734  mx = 0;
735  my = 0;
736  for (int i = 0; i < 4; i++) {
737  hpel_motion_lowres(s, dest_y + ((i & 1) + (i >> 1) *
738  s->linesize) * block_s,
739  ref_picture[0], 0, 0,
740  (2 * mb_x + (i & 1)) * block_s,
741  (2 * mb_y + (i >> 1)) * block_s,
742  s->width, s->height, s->linesize,
743  s->h_edge_pos >> lowres, s->v_edge_pos >> lowres,
744  block_s, block_s, pix_op,
745  s->mv[dir][i][0], s->mv[dir][i][1]);
746 
747  mx += s->mv[dir][i][0];
748  my += s->mv[dir][i][1];
749  }
750 
751  if (!CONFIG_GRAY || !(s->avctx->flags & AV_CODEC_FLAG_GRAY))
752  chroma_4mv_motion_lowres(s, dest_cb, dest_cr, ref_picture,
753  pix_op, mx, my);
754  break;
755  case MV_TYPE_FIELD:
756  if (s->picture_structure == PICT_FRAME) {
757  /* top field */
758  mpeg_motion_lowres(s, dest_y, dest_cb, dest_cr,
759  1, 0, s->field_select[dir][0],
760  ref_picture, pix_op,
761  s->mv[dir][0][0], s->mv[dir][0][1],
762  block_s, mb_y);
763  /* bottom field */
764  mpeg_motion_lowres(s, dest_y, dest_cb, dest_cr,
765  1, 1, s->field_select[dir][1],
766  ref_picture, pix_op,
767  s->mv[dir][1][0], s->mv[dir][1][1],
768  block_s, mb_y);
769  } else {
770  if (s->picture_structure != s->field_select[dir][0] + 1 &&
771  s->pict_type != AV_PICTURE_TYPE_B && !s->first_field) {
772  ref_picture = s->cur_pic.ptr->f->data;
773  }
774  mpeg_motion_lowres(s, dest_y, dest_cb, dest_cr,
775  0, 0, s->field_select[dir][0],
776  ref_picture, pix_op,
777  s->mv[dir][0][0],
778  s->mv[dir][0][1], 2 * block_s, mb_y >> 1);
779  }
780  break;
781  case MV_TYPE_16X8:
782  for (int i = 0; i < 2; i++) {
783  uint8_t *const *ref2picture;
784 
785  if (s->picture_structure == s->field_select[dir][i] + 1 ||
786  s->pict_type == AV_PICTURE_TYPE_B || s->first_field) {
787  ref2picture = ref_picture;
788  } else {
789  ref2picture = s->cur_pic.ptr->f->data;
790  }
791 
792  mpeg_motion_lowres(s, dest_y, dest_cb, dest_cr,
793  0, 0, s->field_select[dir][i],
794  ref2picture, pix_op,
795  s->mv[dir][i][0], s->mv[dir][i][1] +
796  2 * block_s * i, block_s, mb_y >> 1);
797 
798  dest_y += 2 * block_s * s->linesize;
799  dest_cb += (2 * block_s >> s->chroma_y_shift) * s->uvlinesize;
800  dest_cr += (2 * block_s >> s->chroma_y_shift) * s->uvlinesize;
801  }
802  break;
803  case MV_TYPE_DMV:
804  if (s->picture_structure == PICT_FRAME) {
805  for (int i = 0; i < 2; i++) {
806  for (int j = 0; j < 2; j++) {
807  mpeg_motion_lowres(s, dest_y, dest_cb, dest_cr,
808  1, j, j ^ i,
809  ref_picture, pix_op,
810  s->mv[dir][2 * i + j][0],
811  s->mv[dir][2 * i + j][1],
812  block_s, mb_y);
813  }
814  pix_op = s->h264chroma.avg_h264_chroma_pixels_tab;
815  }
816  } else {
817  for (int i = 0; i < 2; i++) {
818  mpeg_motion_lowres(s, dest_y, dest_cb, dest_cr,
819  0, 0, s->picture_structure != i + 1,
820  ref_picture, pix_op,
821  s->mv[dir][2 * i][0],s->mv[dir][2 * i][1],
822  2 * block_s, mb_y >> 1);
823 
824  // after put we make avg of the same block
825  pix_op = s->h264chroma.avg_h264_chroma_pixels_tab;
826 
827  // opposite parity is always in the same
828  // frame if this is second field
829  if (!s->first_field) {
830  ref_picture = s->cur_pic.ptr->f->data;
831  }
832  }
833  }
834  break;
835  default:
836  av_assert2(0);
837  }
838 }
839 
840 /**
841  * find the lowest MB row referenced in the MVs
842  */
844 {
845  int my_max = INT_MIN, my_min = INT_MAX, qpel_shift = !s->quarter_sample;
846  int off, mvs;
847 
848  if (s->picture_structure != PICT_FRAME || s->mcsel)
849  goto unhandled;
850 
851  switch (s->mv_type) {
852  case MV_TYPE_16X16:
853  mvs = 1;
854  break;
855  case MV_TYPE_16X8:
856  mvs = 2;
857  break;
858  case MV_TYPE_8X8:
859  mvs = 4;
860  break;
861  default:
862  goto unhandled;
863  }
864 
865  for (int i = 0; i < mvs; i++) {
866  int my = s->mv[dir][i][1];
867  my_max = FFMAX(my_max, my);
868  my_min = FFMIN(my_min, my);
869  }
870 
871  off = ((FFMAX(-my_min, my_max) << qpel_shift) + 63) >> 6;
872 
873  return av_clip(s->mb_y + off, 0, s->mb_height - 1);
874 unhandled:
875  return s->mb_height - 1;
876 }
877 
878 /* add block[] to dest[] */
879 static inline void add_dct(MpegEncContext *s,
880  int16_t *block, int i, uint8_t *dest, int line_size)
881 {
882  if (s->block_last_index[i] >= 0) {
883  s->idsp.idct_add(dest, line_size, block);
884  }
885 }
886 
887 /* put block[] to dest[] */
888 static inline void put_dct(MpegEncContext *s,
889  int16_t *block, int i, uint8_t *dest, int line_size, int qscale)
890 {
891  s->dct_unquantize_intra(s, block, i, qscale);
892  s->idsp.idct_put(dest, line_size, block);
893 }
894 
895 static inline void add_dequant_dct(MpegEncContext *s,
896  int16_t *block, int i, uint8_t *dest, int line_size, int qscale)
897 {
898  if (s->block_last_index[i] >= 0) {
899  s->dct_unquantize_inter(s, block, i, qscale);
900 
901  s->idsp.idct_add(dest, line_size, block);
902  }
903 }
904 
905 #define NOT_MPEG12_H261 0
906 #define MAY_BE_MPEG12_H261 1
907 #define DEFINITELY_MPEG12_H261 2
908 
909 /* generic function called after a macroblock has been parsed by the decoder.
910 
911  Important variables used:
912  s->mb_intra : true if intra macroblock
913  s->mv_dir : motion vector direction
914  s->mv_type : motion vector type
915  s->mv : motion vector
916  s->interlaced_dct : true if interlaced dct used (mpeg2)
917  */
918 static av_always_inline
920  int lowres_flag, int is_mpeg12)
921 {
922 #define IS_MPEG12_H261(s) (is_mpeg12 == MAY_BE_MPEG12_H261 ? ((s)->out_format <= FMT_H261) : is_mpeg12)
923  uint8_t *dest_y = s->dest[0], *dest_cb = s->dest[1], *dest_cr = s->dest[2];
924  int dct_linesize, dct_offset;
925  const int linesize = s->cur_pic.linesize[0]; //not s->linesize as this would be wrong for field pics
926  const int uvlinesize = s->cur_pic.linesize[1];
927  const int block_size = lowres_flag ? 8 >> s->avctx->lowres : 8;
928 
929  dct_linesize = linesize << s->interlaced_dct;
930  dct_offset = s->interlaced_dct ? linesize : linesize * block_size;
931 
932  if (!s->mb_intra) {
933  /* motion handling */
934  if (HAVE_THREADS && is_mpeg12 != DEFINITELY_MPEG12_H261 &&
935  s->avctx->active_thread_type & FF_THREAD_FRAME) {
936  if (s->mv_dir & MV_DIR_FORWARD) {
937  ff_thread_progress_await(&s->last_pic.ptr->progress,
939  }
940  if (s->mv_dir & MV_DIR_BACKWARD) {
941  ff_thread_progress_await(&s->next_pic.ptr->progress,
943  }
944  }
945 
946  if (lowres_flag) {
947  const h264_chroma_mc_func *op_pix = s->h264chroma.put_h264_chroma_pixels_tab;
948 
949  if (s->mv_dir & MV_DIR_FORWARD) {
950  MPV_motion_lowres(s, dest_y, dest_cb, dest_cr, 0, s->last_pic.data, op_pix);
951  op_pix = s->h264chroma.avg_h264_chroma_pixels_tab;
952  }
953  if (s->mv_dir & MV_DIR_BACKWARD) {
954  MPV_motion_lowres(s, dest_y, dest_cb, dest_cr, 1, s->next_pic.data, op_pix);
955  }
956  } else {
957  const op_pixels_func (*op_pix)[4];
958  const qpel_mc_func (*op_qpix)[16];
959 
960  if ((is_mpeg12 == DEFINITELY_MPEG12_H261 || !s->no_rounding) || s->pict_type == AV_PICTURE_TYPE_B) {
961  op_pix = s->hdsp.put_pixels_tab;
962  op_qpix = s->qdsp.put_qpel_pixels_tab;
963  } else {
964  op_pix = s->hdsp.put_no_rnd_pixels_tab;
965  op_qpix = s->qdsp.put_no_rnd_qpel_pixels_tab;
966  }
967  if (s->mv_dir & MV_DIR_FORWARD) {
968  ff_mpv_motion(s, dest_y, dest_cb, dest_cr, 0, s->last_pic.data, op_pix, op_qpix);
969  op_pix = s->hdsp.avg_pixels_tab;
970  op_qpix = s->qdsp.avg_qpel_pixels_tab;
971  }
972  if (s->mv_dir & MV_DIR_BACKWARD) {
973  ff_mpv_motion(s, dest_y, dest_cb, dest_cr, 1, s->next_pic.data, op_pix, op_qpix);
974  }
975  }
976 
977  /* skip dequant / idct if we are really late ;) */
978  if (s->avctx->skip_idct) {
979  if ( (s->avctx->skip_idct >= AVDISCARD_NONREF && s->pict_type == AV_PICTURE_TYPE_B)
980  ||(s->avctx->skip_idct >= AVDISCARD_NONKEY && s->pict_type != AV_PICTURE_TYPE_I)
981  || s->avctx->skip_idct >= AVDISCARD_ALL)
982  return;
983  }
984 
985  /* add dct residue */
986  if (!(IS_MPEG12_H261(s) || s->msmpeg4_version != MSMP4_UNUSED ||
987  (s->codec_id == AV_CODEC_ID_MPEG4 && !s->mpeg_quant))) {
988  add_dequant_dct(s, block[0], 0, dest_y , dct_linesize, s->qscale);
989  add_dequant_dct(s, block[1], 1, dest_y + block_size, dct_linesize, s->qscale);
990  add_dequant_dct(s, block[2], 2, dest_y + dct_offset , dct_linesize, s->qscale);
991  add_dequant_dct(s, block[3], 3, dest_y + dct_offset + block_size, dct_linesize, s->qscale);
992 
993  if (!CONFIG_GRAY || !(s->avctx->flags & AV_CODEC_FLAG_GRAY)) {
994  av_assert2(s->chroma_y_shift);
995  add_dequant_dct(s, block[4], 4, dest_cb, uvlinesize, s->chroma_qscale);
996  add_dequant_dct(s, block[5], 5, dest_cr, uvlinesize, s->chroma_qscale);
997  }
998  } else if (is_mpeg12 == DEFINITELY_MPEG12_H261 || lowres_flag || (s->codec_id != AV_CODEC_ID_WMV2)) {
999  add_dct(s, block[0], 0, dest_y , dct_linesize);
1000  add_dct(s, block[1], 1, dest_y + block_size, dct_linesize);
1001  add_dct(s, block[2], 2, dest_y + dct_offset , dct_linesize);
1002  add_dct(s, block[3], 3, dest_y + dct_offset + block_size, dct_linesize);
1003 
1004  if (!CONFIG_GRAY || !(s->avctx->flags & AV_CODEC_FLAG_GRAY)) {
1005  if (s->chroma_y_shift) {//Chroma420
1006  add_dct(s, block[4], 4, dest_cb, uvlinesize);
1007  add_dct(s, block[5], 5, dest_cr, uvlinesize);
1008  } else {
1009  //chroma422
1010  dct_linesize = uvlinesize << s->interlaced_dct;
1011  dct_offset = s->interlaced_dct ? uvlinesize : uvlinesize*block_size;
1012 
1013  add_dct(s, block[4], 4, dest_cb, dct_linesize);
1014  add_dct(s, block[5], 5, dest_cr, dct_linesize);
1015  add_dct(s, block[6], 6, dest_cb + dct_offset, dct_linesize);
1016  add_dct(s, block[7], 7, dest_cr + dct_offset, dct_linesize);
1017  if (!s->chroma_x_shift) {//Chroma444
1018  add_dct(s, block[8], 8, dest_cb + block_size, dct_linesize);
1019  add_dct(s, block[9], 9, dest_cr + block_size, dct_linesize);
1020  add_dct(s, block[10], 10, dest_cb + block_size + dct_offset, dct_linesize);
1021  add_dct(s, block[11], 11, dest_cr + block_size + dct_offset, dct_linesize);
1022  }
1023  }
1024  } //fi gray
1025  } else if (CONFIG_WMV2_DECODER) {
1026  ff_wmv2_add_mb(s, block, dest_y, dest_cb, dest_cr);
1027  }
1028  } else {
1029  /* Only MPEG-4 Simple Studio Profile is supported in > 8-bit mode.
1030  TODO: Integrate 10-bit properly into mpegvideo.c so that ER works properly */
1031  if (is_mpeg12 != DEFINITELY_MPEG12_H261 && CONFIG_MPEG4_DECODER &&
1032  /* s->codec_id == AV_CODEC_ID_MPEG4 && */
1033  s->avctx->bits_per_raw_sample > 8) {
1034  ff_mpeg4_decode_studio(s, dest_y, dest_cb, dest_cr, block_size,
1035  uvlinesize, dct_linesize, dct_offset);
1036  } else if (!IS_MPEG12_H261(s)) {
1037  /* dct only in intra block */
1038  put_dct(s, block[0], 0, dest_y , dct_linesize, s->qscale);
1039  put_dct(s, block[1], 1, dest_y + block_size, dct_linesize, s->qscale);
1040  put_dct(s, block[2], 2, dest_y + dct_offset , dct_linesize, s->qscale);
1041  put_dct(s, block[3], 3, dest_y + dct_offset + block_size, dct_linesize, s->qscale);
1042 
1043  if (!CONFIG_GRAY || !(s->avctx->flags & AV_CODEC_FLAG_GRAY)) {
1044  if (s->chroma_y_shift) {
1045  put_dct(s, block[4], 4, dest_cb, uvlinesize, s->chroma_qscale);
1046  put_dct(s, block[5], 5, dest_cr, uvlinesize, s->chroma_qscale);
1047  } else {
1048  dct_offset >>= 1;
1049  dct_linesize >>= 1;
1050  put_dct(s, block[4], 4, dest_cb, dct_linesize, s->chroma_qscale);
1051  put_dct(s, block[5], 5, dest_cr, dct_linesize, s->chroma_qscale);
1052  put_dct(s, block[6], 6, dest_cb + dct_offset, dct_linesize, s->chroma_qscale);
1053  put_dct(s, block[7], 7, dest_cr + dct_offset, dct_linesize, s->chroma_qscale);
1054  }
1055  }
1056  } else {
1057  s->idsp.idct_put(dest_y, dct_linesize, block[0]);
1058  s->idsp.idct_put(dest_y + block_size, dct_linesize, block[1]);
1059  s->idsp.idct_put(dest_y + dct_offset, dct_linesize, block[2]);
1060  s->idsp.idct_put(dest_y + dct_offset + block_size, dct_linesize, block[3]);
1061 
1062  if (!CONFIG_GRAY || !(s->avctx->flags & AV_CODEC_FLAG_GRAY)) {
1063  if (s->chroma_y_shift) {
1064  s->idsp.idct_put(dest_cb, uvlinesize, block[4]);
1065  s->idsp.idct_put(dest_cr, uvlinesize, block[5]);
1066  } else {
1067  dct_linesize = uvlinesize << s->interlaced_dct;
1068  dct_offset = s->interlaced_dct ? uvlinesize : uvlinesize*block_size;
1069 
1070  s->idsp.idct_put(dest_cb, dct_linesize, block[4]);
1071  s->idsp.idct_put(dest_cr, dct_linesize, block[5]);
1072  s->idsp.idct_put(dest_cb + dct_offset, dct_linesize, block[6]);
1073  s->idsp.idct_put(dest_cr + dct_offset, dct_linesize, block[7]);
1074  if (!s->chroma_x_shift) { //Chroma444
1075  s->idsp.idct_put(dest_cb + block_size, dct_linesize, block[8]);
1076  s->idsp.idct_put(dest_cr + block_size, dct_linesize, block[9]);
1077  s->idsp.idct_put(dest_cb + block_size + dct_offset, dct_linesize, block[10]);
1078  s->idsp.idct_put(dest_cr + block_size + dct_offset, dct_linesize, block[11]);
1079  }
1080  }
1081  } //gray
1082  }
1083  }
1084 }
1085 
1087 {
1088  const int mb_xy = s->mb_y * s->mb_stride + s->mb_x;
1089  uint8_t *mbskip_ptr = &s->mbskip_table[mb_xy];
1090 
1091  s->cur_pic.qscale_table[mb_xy] = s->qscale;
1092 
1093  /* avoid copy if macroblock skipped in last frame too */
1094  if (s->mb_skipped) {
1095  s->mb_skipped = 0;
1096  av_assert2(s->pict_type != AV_PICTURE_TYPE_I);
1097  *mbskip_ptr = 1;
1098  } else if (!s->cur_pic.reference) {
1099  *mbskip_ptr = 1;
1100  } else{
1101  *mbskip_ptr = 0; /* not skipped */
1102  }
1103 
1104  if (s->avctx->debug & FF_DEBUG_DCT_COEFF) {
1105  /* print DCT coefficients */
1106  av_log(s->avctx, AV_LOG_DEBUG, "DCT coeffs of MB at %dx%d:\n", s->mb_x, s->mb_y);
1107  for (int i = 0; i < 6; i++) {
1108  for (int j = 0; j < 64; j++) {
1109  av_log(s->avctx, AV_LOG_DEBUG, "%5d",
1110  block[i][s->idsp.idct_permutation[j]]);
1111  }
1112  av_log(s->avctx, AV_LOG_DEBUG, "\n");
1113  }
1114  }
1115 
1116  av_assert2((s->out_format <= FMT_H261) == (s->out_format == FMT_H261 || s->out_format == FMT_MPEG1));
1117  if (!s->avctx->lowres) {
1118 #if !CONFIG_SMALL
1119  if (s->out_format <= FMT_H261)
1121  else
1123 #else
1125 #endif
1126  } else
1128 }
ff_h263_round_chroma
static int ff_h263_round_chroma(int x)
Definition: motion_est.h:107
PICT_FRAME
#define PICT_FRAME
Definition: mpegutils.h:33
ff_mpv_common_init
av_cold int ff_mpv_common_init(MpegEncContext *s)
init common structure for both encoder and decoder.
Definition: mpegvideo.c:685
ff_draw_horiz_band
void ff_draw_horiz_band(AVCodecContext *avctx, const AVFrame *cur, const AVFrame *last, int y, int h, int picture_structure, int first_field, int low_delay)
Draw a horizontal band if supported.
Definition: mpegutils.c:54
AVCodecContext::hwaccel
const struct AVHWAccel * hwaccel
Hardware accelerator in use.
Definition: avcodec.h:1445
h264_chroma_mc_func
void(* h264_chroma_mc_func)(uint8_t *dst, const uint8_t *src, ptrdiff_t srcStride, int h, int x, int y)
Definition: h264chroma.h:25
MV_TYPE_16X16
#define MV_TYPE_16X16
1 vector for the whole mb
Definition: mpegvideo.h:264
MpegEncContext::progressive_sequence
int progressive_sequence
Definition: mpegvideo.h:438
av_clip
#define av_clip
Definition: common.h:100
ff_thread_progress_report
void ff_thread_progress_report(ThreadProgress *pro, int n)
This function is a no-op in no-op mode; otherwise it notifies other threads that a certain level of p...
Definition: threadprogress.c:53
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
AVCodecContext::workaround_bugs
int workaround_bugs
Work around bugs in encoders which sometimes cannot be detected automatically.
Definition: avcodec.h:1367
threadprogress.h
ff_mpv_init_context_frame
int ff_mpv_init_context_frame(MpegEncContext *s)
Initialize and allocates MpegEncContext fields dependent on the resolution.
Definition: mpegvideo.c:510
ff_mpv_motion
void ff_mpv_motion(MpegEncContext *s, uint8_t *dest_y, uint8_t *dest_cb, uint8_t *dest_cr, int dir, uint8_t *const *ref_picture, const op_pixels_func(*pix_op)[4], const qpel_mc_func(*qpix_op)[16])
Definition: mpegvideo_motion.c:819
mpeg4videodec.h
MV_TYPE_16X8
#define MV_TYPE_16X8
2 vectors, one per 16x8 block
Definition: mpegvideo.h:266
ff_thread_can_start_frame
int ff_thread_can_start_frame(AVCodecContext *avctx)
Definition: pthread_frame.c:1017
AV_CODEC_ID_MPEG4
@ AV_CODEC_ID_MPEG4
Definition: codec_id.h:64
MpegEncContext::dct_offset
uint16_t(* dct_offset)[64]
Definition: mpegvideo.h:330
put_dct
static void put_dct(MpegEncContext *s, int16_t *block, int i, uint8_t *dest, int line_size, int qscale)
Definition: mpegvideo_dec.c:888
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:410
MpegEncContext::workaround_bugs
int workaround_bugs
workaround bugs in encoders which cannot be detected automatically
Definition: mpegvideo.h:114
ff_mpv_report_decode_progress
void ff_mpv_report_decode_progress(MpegEncContext *s)
Definition: mpegvideo_dec.c:443
AVFrame::width
int width
Definition: frame.h:482
w
uint8_t w
Definition: llviddspenc.c:38
internal.h
b
#define b
Definition: input.c:41
ff_toupper4
unsigned int ff_toupper4(unsigned int x)
Definition: to_upper4.h:29
MpegEncContext::MSMP4_UNUSED
@ MSMP4_UNUSED
Definition: mpegvideo.h:416
MpegEncContext::dest
uint8_t * dest[3]
Definition: mpegvideo.h:293
mpegvideo.h
ff_wmv2_add_mb
void ff_wmv2_add_mb(MpegEncContext *s, int16_t block1[6][64], uint8_t *dest_y, uint8_t *dest_cb, uint8_t *dest_cr)
Definition: wmv2dec.c:85
MpegEncContext::avctx
struct AVCodecContext * avctx
Definition: mpegvideo.h:91
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
MAY_BE_MPEG12_H261
#define MAY_BE_MPEG12_H261
Definition: mpegvideo_dec.c:906
FMT_H261
@ FMT_H261
Definition: mpegvideo.h:64
MpegEncContext::height
int height
picture size. must be a multiple of 16
Definition: mpegvideo.h:96
mpegutils.h
thread.h
MV_DIR_BACKWARD
#define MV_DIR_BACKWARD
Definition: mpegvideo.h:261
MV_TYPE_DMV
#define MV_TYPE_DMV
2 vectors, special mpeg2 Dual Prime Vectors
Definition: mpegvideo.h:268
AV_CODEC_ID_H261
@ AV_CODEC_ID_H261
Definition: codec_id.h:55
AV_FRAME_FLAG_TOP_FIELD_FIRST
#define AV_FRAME_FLAG_TOP_FIELD_FIRST
A flag to mark frames where the top field is displayed first if the content is interlaced.
Definition: frame.h:674
AV_VIDEO_ENC_PARAMS_MPEG2
@ AV_VIDEO_ENC_PARAMS_MPEG2
Definition: video_enc_params.h:65
mx
uint8_t ptrdiff_t const uint8_t ptrdiff_t int intptr_t mx
Definition: dsp.h:53
ff_mpv_reconstruct_mb
void ff_mpv_reconstruct_mb(MpegEncContext *s, int16_t block[12][64])
Definition: mpegvideo_dec.c:1086
MPVPicture::mb_type
uint32_t * mb_type
types and macros are defined in mpegutils.h
Definition: mpegpicture.h:68
FMT_MPEG1
@ FMT_MPEG1
Definition: mpegvideo.h:63
AVCodecContext::codec
const struct AVCodec * codec
Definition: avcodec.h:460
fail
#define fail()
Definition: checkasm.h:193
MpegEncContext::padding_bug_score
int padding_bug_score
used to detect the VERY common padding bug in MPEG-4
Definition: mpegvideo.h:393
MpegEncContext::linesize
ptrdiff_t linesize
line size, in bytes, may be different from width
Definition: mpegvideo.h:129
MPVPicture::motion_val
int16_t(*[2] motion_val)[2]
Definition: mpegpicture.h:65
av_pix_fmt_get_chroma_sub_sample
int av_pix_fmt_get_chroma_sub_sample(enum AVPixelFormat pix_fmt, int *h_shift, int *v_shift)
Utility function to access log2_chroma_w log2_chroma_h from the pixel format AVPixFmtDescriptor.
Definition: pixdesc.c:3341
hpel_motion_lowres
static int hpel_motion_lowres(MpegEncContext *s, uint8_t *dest, const uint8_t *src, int field_based, int field_select, int src_x, int src_y, int width, int height, ptrdiff_t stride, int h_edge_pos, int v_edge_pos, int w, int h, const h264_chroma_mc_func *pix_op, int motion_x, int motion_y)
Definition: mpegvideo_dec.c:450
MpegEncContext::width
int width
Definition: mpegvideo.h:96
AVCodecContext::coded_height
int coded_height
Definition: avcodec.h:647
refstruct.h
AVVideoEncParams
Video encoding parameters for a given frame.
Definition: video_enc_params.h:73
MPVPicture::dummy
int dummy
Picture is a dummy and should not be output.
Definition: mpegpicture.h:81
mult
static int16_t mult(Float11 *f1, Float11 *f2)
Definition: g726.c:60
ff_mpv_common_end
void ff_mpv_common_end(MpegEncContext *s)
Definition: mpegvideo.c:772
avassert.h
mpegvideodec.h
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:209
av_cold
#define av_cold
Definition: attributes.h:90
AV_FRAME_FLAG_KEY
#define AV_FRAME_FLAG_KEY
A flag to mark frames that are keyframes.
Definition: frame.h:661
emms_c
#define emms_c()
Definition: emms.h:63
ff_mpv_common_frame_size_change
int ff_mpv_common_frame_size_change(MpegEncContext *s)
Definition: mpegvideo_dec.c:153
ff_hwaccel_frame_priv_alloc
int ff_hwaccel_frame_priv_alloc(AVCodecContext *avctx, void **hwaccel_picture_private)
Allocate a hwaccel frame private data if the provided avctx uses a hwaccel method that needs it.
Definition: decode.c:2260
s
#define s(width, name)
Definition: cbs_vp9.c:198
MpegEncContext::last_time_base
int last_time_base
Definition: mpegvideo.h:377
AV_CEIL_RSHIFT
#define AV_CEIL_RSHIFT(a, b)
Definition: common.h:60
MpegEncContext::h_edge_pos
int h_edge_pos
Definition: mpegvideo.h:127
AV_GET_BUFFER_FLAG_REF
#define AV_GET_BUFFER_FLAG_REF
The decoder will keep a reference to the frame and may reuse it later.
Definition: avcodec.h:431
ff_thread_get_buffer
int ff_thread_get_buffer(AVCodecContext *avctx, AVFrame *f, int flags)
Wrapper around get_buffer() for frame-multithreaded codecs.
Definition: pthread_frame.c:1053
AV_CODEC_ID_WMV2
@ AV_CODEC_ID_WMV2
Definition: codec_id.h:70
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:40
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:230
decode.h
limits.h
AV_CODEC_ID_VC1IMAGE
@ AV_CODEC_ID_VC1IMAGE
Definition: codec_id.h:204
MpegEncContext::cur_pic
MPVWorkPicture cur_pic
copy of the current picture structure.
Definition: mpegvideo.h:177
AVCodecContext::codec_id
enum AVCodecID codec_id
Definition: avcodec.h:461
my
uint8_t ptrdiff_t const uint8_t ptrdiff_t int intptr_t intptr_t my
Definition: dsp.h:53
FMT_H263
@ FMT_H263
Definition: mpegvideo.h:65
ff_mpv_unref_picture
void ff_mpv_unref_picture(MPVWorkPicture *pic)
Definition: mpegpicture.c:98
MpegEncContext::low_delay
int low_delay
no reordering needed / has no B-frames
Definition: mpegvideo.h:389
AVDISCARD_ALL
@ AVDISCARD_ALL
discard all
Definition: defs.h:221
MpegEncContext::picture_pool
struct AVRefStructPool * picture_pool
Pool for MPVPictures.
Definition: mpegvideo.h:131
MpegEncContext::field_select
int field_select[2][2]
Definition: mpegvideo.h:275
ff_thread_progress_await
void ff_thread_progress_await(const ThreadProgress *pro_c, int n)
This function is a no-op in no-op mode; otherwise it waits until other threads have reached a certain...
Definition: threadprogress.c:64
ff_mpv_export_qp_table
int ff_mpv_export_qp_table(const MpegEncContext *s, AVFrame *f, const MPVPicture *p, int qp_type)
Definition: mpegvideo_dec.c:391
NULL
#define NULL
Definition: coverity.c:32
MpegEncContext::mb_y
int mb_y
Definition: mpegvideo.h:286
ff_mpv_idct_init
av_cold void ff_mpv_idct_init(MpegEncContext *s)
Definition: mpegvideo.c:307
MpegEncContext::next_pic
MPVWorkPicture next_pic
copy of the next picture structure.
Definition: mpegvideo.h:165
ff_mpv_common_defaults
av_cold void ff_mpv_common_defaults(MpegEncContext *s)
Set the given MpegEncContext to common defaults (same for encoding and decoding).
Definition: mpegvideo.c:483
DEFINITELY_MPEG12_H261
#define DEFINITELY_MPEG12_H261
Definition: mpegvideo_dec.c:907
AV_PICTURE_TYPE_I
@ AV_PICTURE_TYPE_I
Intra.
Definition: avutil.h:279
FF_BUG_IEDGE
#define FF_BUG_IEDGE
Definition: avcodec.h:1382
av_refstruct_pool_get
void * av_refstruct_pool_get(AVRefStructPool *pool)
Get an object from the pool, reusing an old one from the pool when available.
Definition: refstruct.c:297
IS_MPEG12_H261
#define IS_MPEG12_H261(s)
lowres
static int lowres
Definition: ffplay.c:330
FF_THREAD_IS_COPY
@ FF_THREAD_IS_COPY
Definition: thread.h:61
alloc_dummy_frame
static int av_cold alloc_dummy_frame(MpegEncContext *s, MPVWorkPicture *dst)
Definition: mpegvideo_dec.c:249
op_pixels_func
void(* op_pixels_func)(uint8_t *block, const uint8_t *pixels, ptrdiff_t line_size, int h)
Definition: hpeldsp.h:38
FF_MPV_QSCALE_TYPE_MPEG1
#define FF_MPV_QSCALE_TYPE_MPEG1
Definition: mpegvideodec.h:40
MPVPicture::reference
int reference
Definition: mpegpicture.h:86
qpel_mc_func
void(* qpel_mc_func)(uint8_t *dst, const uint8_t *src, ptrdiff_t stride)
Definition: qpeldsp.h:65
MpegEncContext::private_ctx
void * private_ctx
Definition: mpegvideo.h:94
MV_TYPE_8X8
#define MV_TYPE_8X8
4 vectors (H.263, MPEG-4 4MV)
Definition: mpegvideo.h:265
ff_mpv_alloc_dummy_frames
int ff_mpv_alloc_dummy_frames(MpegEncContext *s)
Ensure that the dummy frames are allocated according to pict_type if necessary.
Definition: mpegvideo_dec.c:282
ff_dlog
#define ff_dlog(a,...)
Definition: tableprint_vlc.h:28
MpegEncContext::pb_field_time
uint16_t pb_field_time
like above, just for interlaced
Definition: mpegvideo.h:384
add_dct
static void add_dct(MpegEncContext *s, int16_t *block, int i, uint8_t *dest, int line_size)
Definition: mpegvideo_dec.c:879
av_video_enc_params_create_side_data
AVVideoEncParams * av_video_enc_params_create_side_data(AVFrame *frame, enum AVVideoEncParamsType type, unsigned int nb_blocks)
Allocates memory for AVEncodeInfoFrame plus an array of.
Definition: video_enc_params.c:58
FF_DEBUG_DCT_COEFF
#define FF_DEBUG_DCT_COEFF
Definition: avcodec.h:1420
AVDISCARD_NONKEY
@ AVDISCARD_NONKEY
discard all frames except keyframes
Definition: defs.h:220
f
f
Definition: af_crystalizer.c:122
MPVPicture::mb_stride
int mb_stride
mb_stride of the tables
Definition: mpegpicture.h:79
ff_print_debug_info2
void ff_print_debug_info2(AVCodecContext *avctx, AVFrame *pict, const uint32_t *mbtype_table, const int8_t *qscale_table, int16_t(*const motion_val[2])[2], int mb_width, int mb_height, int mb_stride, int quarter_sample)
Print debugging info for the given picture.
Definition: mpegutils.c:155
AV_CODEC_FLAG_GRAY
#define AV_CODEC_FLAG_GRAY
Only decode/encode grayscale.
Definition: avcodec.h:322
MpegEncContext::qscale
int qscale
QP.
Definition: mpegvideo.h:199
height
#define height
Definition: dsp.h:85
dst
uint8_t ptrdiff_t const uint8_t ptrdiff_t int intptr_t intptr_t int int16_t * dst
Definition: dsp.h:83
MpegEncContext::v_edge_pos
int v_edge_pos
horizontal / vertical position of the right/bottom edge (pixel replication)
Definition: mpegvideo.h:127
AV_CODEC_ID_H263
@ AV_CODEC_ID_H263
Definition: codec_id.h:56
h264chroma.h
ff_mpeg_draw_horiz_band
void ff_mpeg_draw_horiz_band(MpegEncContext *s, int y, int h)
Definition: mpegvideo_dec.c:422
AVFrame::format
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:497
MpegEncContext::quarter_sample
int quarter_sample
1->qpel, 0->half pel ME/MC
Definition: mpegvideo.h:386
MpegEncContext::context_initialized
int context_initialized
Definition: mpegvideo.h:119
ff_mpv_frame_start
int ff_mpv_frame_start(MpegEncContext *s, AVCodecContext *avctx)
generic function called after decoding the header and before a frame is decoded.
Definition: mpegvideo_dec.c:325
ff_mpeg_flush
void ff_mpeg_flush(AVCodecContext *avctx)
Definition: mpegvideo_dec.c:430
offset
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf offset
Definition: writing_filters.txt:86
MV_TYPE_FIELD
#define MV_TYPE_FIELD
2 vectors, one per field
Definition: mpegvideo.h:267
FF_THREAD_NO_FRAME_THREADING
@ FF_THREAD_NO_FRAME_THREADING
Definition: thread.h:63
color_frame
static void color_frame(AVFrame *frame, int luma)
Definition: mpegvideo_dec.c:264
MPVPicture::mb_width
int mb_width
mb_width of the tables
Definition: mpegpicture.h:77
lowest_referenced_row
static int lowest_referenced_row(MpegEncContext *s, int dir)
find the lowest MB row referenced in the MVs
Definition: mpegvideo_dec.c:843
AV_CODEC_ID_MSS2
@ AV_CODEC_ID_MSS2
Definition: codec_id.h:221
FF_THREAD_FRAME
#define FF_THREAD_FRAME
Decode more than one frame at once.
Definition: avcodec.h:1612
AVCodec::id
enum AVCodecID id
Definition: codec.h:201
emms.h
MPVPicture::hwaccel_picture_private
void * hwaccel_picture_private
RefStruct reference for hardware accelerator private data.
Definition: mpegpicture.h:75
avcodec_default_get_buffer2
int avcodec_default_get_buffer2(AVCodecContext *s, AVFrame *frame, int flags)
The default callback for AVCodecContext.get_buffer2().
Definition: get_buffer.c:253
ff_print_debug_info
void ff_print_debug_info(const MpegEncContext *s, const MPVPicture *p, AVFrame *pict)
Definition: mpegvideo_dec.c:384
mpv_reconstruct_mb_internal
static av_always_inline void mpv_reconstruct_mb_internal(MpegEncContext *s, int16_t block[12][64], int lowres_flag, int is_mpeg12)
Definition: mpegvideo_dec.c:919
av_assert2
#define av_assert2(cond)
assert() equivalent, that does lie in speed critical code.
Definition: avassert.h:67
MpegEncContext::uvlinesize
ptrdiff_t uvlinesize
line size, for chroma in bytes, may be different from width
Definition: mpegvideo.h:130
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
MPVPicture::qscale_table
int8_t * qscale_table
Definition: mpegpicture.h:62
internal.h
mpeg_motion_lowres
static av_always_inline void mpeg_motion_lowres(MpegEncContext *s, uint8_t *dest_y, uint8_t *dest_cb, uint8_t *dest_cr, int field_based, int bottom_field, int field_select, uint8_t *const *ref_picture, const h264_chroma_mc_func *pix_op, int motion_x, int motion_y, int h, int mb_y)
Definition: mpegvideo_dec.c:499
av_assert1
#define av_assert1(cond)
assert() equivalent, that does not lie in speed critical code.
Definition: avassert.h:56
MpegEncContext::mb_x
int mb_x
Definition: mpegvideo.h:286
av_always_inline
#define av_always_inline
Definition: attributes.h:49
FFMIN
#define FFMIN(a, b)
Definition: macros.h:49
AVVideoBlockParams
Data structure for storing block-level encoding information.
Definition: video_enc_params.h:120
MpegEncContext::last_pic
MPVWorkPicture last_pic
copy of the previous picture structure.
Definition: mpegvideo.h:159
MPVPicture::mb_height
int mb_height
mb_height of the tables
Definition: mpegpicture.h:78
AVCodecContext::height
int height
Definition: avcodec.h:632
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:671
MPV_motion_lowres
static void MPV_motion_lowres(MpegEncContext *s, uint8_t *dest_y, uint8_t *dest_cb, uint8_t *dest_cr, int dir, uint8_t *const *ref_picture, const h264_chroma_mc_func *pix_op)
motion compensation of a single macroblock
Definition: mpegvideo_dec.c:711
AV_FRAME_FLAG_INTERLACED
#define AV_FRAME_FLAG_INTERLACED
A flag to mark frames whose content is interlaced.
Definition: frame.h:669
MpegEncContext::picture_number
int picture_number
Definition: mpegvideo.h:122
FF_DEBUG_NOMC
#define FF_DEBUG_NOMC
Definition: avcodec.h:1429
avcodec.h
ff_mpv_workpic_from_pic
void ff_mpv_workpic_from_pic(MPVWorkPicture *wpic, MPVPicture *pic)
Definition: mpegpicture.c:128
stride
#define stride
Definition: h264pred_template.c:536
chroma_4mv_motion_lowres
static void chroma_4mv_motion_lowres(MpegEncContext *s, uint8_t *dest_cb, uint8_t *dest_cr, uint8_t *const *ref_picture, const h264_chroma_mc_func *pix_op, int mx, int my)
Definition: mpegvideo_dec.c:641
ret
ret
Definition: filter_design.txt:187
wmv2dec.h
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:264
ff_mpv_decode_init
int ff_mpv_decode_init(MpegEncContext *s, AVCodecContext *avctx)
Initialize the given MpegEncContext for decoding.
Definition: mpegvideo_dec.c:46
ff_thread_sync_ref
enum ThreadingStatus ff_thread_sync_ref(AVCodecContext *avctx, size_t offset)
Allows to synchronize objects whose lifetime is the whole decoding process among all frame threads.
Definition: decode.c:1913
MPVPicture::f
struct AVFrame * f
Definition: mpegpicture.h:59
AV_CODEC_EXPORT_DATA_VIDEO_ENC_PARAMS
#define AV_CODEC_EXPORT_DATA_VIDEO_ENC_PARAMS
Decoding only.
Definition: avcodec.h:415
ff_mpv_free_context_frame
void ff_mpv_free_context_frame(MpegEncContext *s)
Frees and resets MpegEncContext fields depending on the resolution as well as the slice thread contex...
Definition: mpegvideo.c:747
ff_mpeg_update_thread_context
int ff_mpeg_update_thread_context(AVCodecContext *dst, const AVCodecContext *src)
Definition: mpegvideo_dec.c:77
ff_mpeg4_decode_studio
void ff_mpeg4_decode_studio(MpegEncContext *s, uint8_t *dest_y, uint8_t *dest_cb, uint8_t *dest_cr, int block_size, int uvlinesize, int dct_linesize, int dct_offset)
Definition: mpeg4videodec.c:253
AVCodecContext
main external API structure.
Definition: avcodec.h:451
AVFrame::height
int height
Definition: frame.h:482
alloc_picture
static int alloc_picture(MpegEncContext *s, MPVWorkPicture *dst, int reference)
Definition: mpegvideo_dec.c:197
AV_PICTURE_TYPE_B
@ AV_PICTURE_TYPE_B
Bi-dir predicted.
Definition: avutil.h:281
add_dequant_dct
static void add_dequant_dct(MpegEncContext *s, int16_t *block, int i, uint8_t *dest, int line_size, int qscale)
Definition: mpegvideo_dec.c:895
ff_mpv_frame_end
void ff_mpv_frame_end(MpegEncContext *s)
Definition: mpegvideo_dec.c:376
NOT_MPEG12_H261
#define NOT_MPEG12_H261
Definition: mpegvideo_dec.c:905
ff_mpv_pic_check_linesize
int ff_mpv_pic_check_linesize(void *logctx, const AVFrame *f, ptrdiff_t *linesizep, ptrdiff_t *uvlinesizep)
Definition: mpegpicture.c:181
ff_h264chroma_init
av_cold void ff_h264chroma_init(H264ChromaContext *c, int bit_depth)
Definition: h264chroma.c:41
ff_mpv_replace_picture
void ff_mpv_replace_picture(MPVWorkPicture *dst, const MPVWorkPicture *src)
Definition: mpegpicture.c:121
AVCodecContext::coded_width
int coded_width
Bitstream width / height, may be different from width/height e.g.
Definition: avcodec.h:647
ff_mpv_init_duplicate_contexts
int ff_mpv_init_duplicate_contexts(MpegEncContext *s)
Initialize an MpegEncContext's thread contexts.
Definition: mpegvideo.c:386
MPVWorkPicture
Definition: mpegpicture.h:95
ThreadingStatus
ThreadingStatus
Definition: thread.h:60
MPVPicture::progress
ThreadProgress progress
Definition: mpegpicture.h:92
av_refstruct_pool_uninit
static void av_refstruct_pool_uninit(AVRefStructPool **poolp)
Mark the pool as being available for freeing.
Definition: refstruct.h:292
AVCodecContext::codec_tag
unsigned int codec_tag
fourcc (LSB first, so "ABCD" -> ('D'<<24) + ('C'<<16) + ('B'<<8) + 'A').
Definition: avcodec.h:476
FFALIGN
#define FFALIGN(x, a)
Definition: macros.h:78
MV_DIR_FORWARD
#define MV_DIR_FORWARD
Definition: mpegvideo.h:260
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:478
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:632
imgutils.h
AVERROR_BUG
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:52
block
The exact code depends on how similar the blocks are and how related they are to the block
Definition: filter_design.txt:207
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
av_video_enc_params_block
static av_always_inline AVVideoBlockParams * av_video_enc_params_block(AVVideoEncParams *par, unsigned int idx)
Get the block at the specified.
Definition: video_enc_params.h:143
h
h
Definition: vp9dsp_template.c:2070
AV_CODEC_ID_WMV3IMAGE
@ AV_CODEC_ID_WMV3IMAGE
Definition: codec_id.h:203
av_image_check_size
int av_image_check_size(unsigned int w, unsigned int h, int log_offset, void *log_ctx)
Check if the given dimension of an image is valid, meaning that all bytes of the image can be address...
Definition: imgutils.c:318
MPVPicture
MPVPicture.
Definition: mpegpicture.h:58
width
#define width
Definition: dsp.h:85
AV_CODEC_ID_FLV1
@ AV_CODEC_ID_FLV1
Definition: codec_id.h:73
AVDISCARD_NONREF
@ AVDISCARD_NONREF
discard all non reference
Definition: defs.h:217
ff_mpv_alloc_pic_accessories
int ff_mpv_alloc_pic_accessories(AVCodecContext *avctx, MPVWorkPicture *wpic, ScratchpadContext *sc, BufferPoolContext *pools, int mb_height)
Allocate an MPVPicture's accessories (but not the AVFrame's buffer itself) and set the MPVWorkPicture...
Definition: mpegpicture.c:237
MpegEncContext
MpegEncContext.
Definition: mpegvideo.h:73
ff_mpv_alloc_pic_pool
av_cold AVRefStructPool * ff_mpv_alloc_pic_pool(int init_progress)
Allocate a pool of MPVPictures.
Definition: mpegpicture.c:90
src
#define src
Definition: vp8dsp.c:248
video_enc_params.h
ff_mpv_decode_close
int ff_mpv_decode_close(AVCodecContext *avctx)
Definition: mpegvideo_dec.c:144
MpegEncContext::rtp_mode
int rtp_mode
Definition: mpegvideo.h:467