37 #define VP9_SYNCCODE 0x498342
116 #define HWACCEL_MAX (CONFIG_VP9_DXVA2_HWACCEL + CONFIG_VP9_D3D11VA_HWACCEL + CONFIG_VP9_VAAPI_HWACCEL)
124 if (!(s->pix_fmt == s->
gf_fmt && w == s->
w && h == s->
h)) {
128 switch (s->pix_fmt) {
130 #if CONFIG_VP9_DXVA2_HWACCEL
133 #if CONFIG_VP9_D3D11VA_HWACCEL
136 #if CONFIG_VP9_VAAPI_HWACCEL
142 #if CONFIG_VP9_VAAPI_HWACCEL
148 *fmtp++ = s->pix_fmt;
167 s->last_fmt = s->pix_fmt;
170 s->
cols = (w + 7) >> 3;
171 s->
rows = (h + 7) >> 3;
173 #define assign(var, type, n) var = (type) p; p += s->sb_cols * (n) * sizeof(*var)
216 int chroma_blocks, chroma_eobs, bytesperpixel = s->
bytesperpixel;
223 chroma_blocks = 64 * 64 >> (s->
ss_h + s->
ss_v);
224 chroma_eobs = 16 * 16 >> (s->
ss_h + s->
ss_v);
230 16 * 16 + 2 * chroma_eobs) * sbs);
241 16 * 16 + 2 * chroma_eobs);
267 return m - ((v + 1) >> 1);
274 static const int inv_map_table[255] = {
275 7, 20, 33, 46, 59, 72, 85, 98, 111, 124, 137, 150, 163, 176,
276 189, 202, 215, 228, 241, 254, 1, 2, 3, 4, 5, 6, 8, 9,
277 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 21, 22, 23, 24,
278 25, 26, 27, 28, 29, 30, 31, 32, 34, 35, 36, 37, 38, 39,
279 40, 41, 42, 43, 44, 45, 47, 48, 49, 50, 51, 52, 53, 54,
280 55, 56, 57, 58, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69,
281 70, 71, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84,
282 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 99, 100,
283 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 112, 113, 114, 115,
284 116, 117, 118, 119, 120, 121, 122, 123, 125, 126, 127, 128, 129, 130,
285 131, 132, 133, 134, 135, 136, 138, 139, 140, 141, 142, 143, 144, 145,
286 146, 147, 148, 149, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160,
287 161, 162, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175,
288 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 190, 191,
289 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 203, 204, 205, 206,
290 207, 208, 209, 210, 211, 212, 213, 214, 216, 217, 218, 219, 220, 221,
291 222, 223, 224, 225, 226, 227, 229, 230, 231, 232, 233, 234, 235, 236,
292 237, 238, 239, 240, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251,
340 s->
s.
h.
bpp = 8 + bits * 2;
349 s->pix_fmt = pix_fmt_rgb[
bits];
361 static const enum AVPixelFormat pix_fmt_for_ss[3][2 ][2 ] = {
385 s->pix_fmt = pix_fmt_for_ss[
bits][1][1];
396 int c, i, j, k, l, m,
n, w,
h, max, size2, ret, sharp;
550 for (i = 0; i < 4; i++)
553 for (i = 0; i < 2; i++)
572 for (i = 0; i < 7; i++)
576 for (i = 0; i < 3; i++)
583 for (i = 0; i < 8; i++) {
597 int qyac, qydc, quvac, quvdc, lflvl, sh;
610 qyac = av_clip_uintp2(qyac, 8);
629 av_clip_uintp2(lflvl + (s->
s.
h.
lf_delta.
ref[0] * (1 << sh)), 6);
630 for (j = 1; j < 4; j++) {
653 for (max = 0; (s->
sb_cols >> max) >= 4; max++) ;
654 max =
FFMAX(0, max - 1);
675 for (i = 0; i < 3; i++) {
681 "Ref pixfmt (%s) did not match current frame (%s)",
685 }
else if (refw == w && refh == h) {
688 if (w * 2 < refw || h * 2 < refh || w > 16 * refw || h > 16 * refh) {
690 "Invalid ref frame dimensions %dx%d for frame size %dx%d\n",
694 s->
mvscale[i][0] = (refw << 14) / w;
695 s->
mvscale[i][1] = (refh << 14) / h;
724 if (size2 > size - (data2 - data)) {
757 for (i = 0; i < 2; i++)
760 for (i = 0; i < 2; i++)
761 for (j = 0; j < 2; j++)
765 for (i = 0; i < 2; i++)
766 for (j = 0; j < 3; j++)
774 for (i = 0; i < 4; i++) {
777 for (j = 0; j < 2; j++)
778 for (k = 0; k < 2; k++)
779 for (l = 0; l < 6; l++)
780 for (m = 0; m < 6; m++) {
783 if (m >= 3 && l == 0)
785 for (n = 0; n < 3; n++) {
794 for (j = 0; j < 2; j++)
795 for (k = 0; k < 2; k++)
796 for (l = 0; l < 6; l++)
797 for (m = 0; m < 6; m++) {
811 for (i = 0; i < 3; i++)
815 for (i = 0; i < 7; i++)
816 for (j = 0; j < 3; j++)
822 for (i = 0; i < 4; i++)
823 for (j = 0; j < 2; j++)
828 for (i = 0; i < 4; i++)
837 for (i = 0; i < 5; i++)
846 for (i = 0; i < 5; i++) {
857 for (i = 0; i < 5; i++)
863 for (i = 0; i < 4; i++)
864 for (j = 0; j < 9; j++)
869 for (i = 0; i < 4; i++)
870 for (j = 0; j < 4; j++)
871 for (k = 0; k < 3; k++)
878 for (i = 0; i < 3; i++)
882 for (i = 0; i < 2; i++) {
887 for (j = 0; j < 10; j++)
896 for (j = 0; j < 10; j++)
902 for (i = 0; i < 2; i++) {
903 for (j = 0; j < 2; j++)
904 for (k = 0; k < 3; k++)
909 for (j = 0; j < 3; j++)
916 for (i = 0; i < 2; i++) {
928 return (data2 - data) + size2;
932 ptrdiff_t yoff, ptrdiff_t uvoff,
enum BlockLevel bl)
940 ptrdiff_t hbs = 4 >> bl;
948 }
else if (col + hbs < s->cols) {
949 if (row + hbs < s->rows) {
957 yoff += hbs * 8 * y_stride;
958 uvoff += hbs * 8 * uv_stride >> s->
ss_v;
963 yoff += hbs * 8 * bytesperpixel;
964 uvoff += hbs * 8 * bytesperpixel >> s->
ss_h;
968 decode_sb(avctx, row, col, lflvl, yoff, uvoff, bl + 1);
970 yoff + 8 * hbs * bytesperpixel,
971 uvoff + (8 * hbs * bytesperpixel >> s->
ss_h), bl + 1);
972 yoff += hbs * 8 * y_stride;
973 uvoff += hbs * 8 * uv_stride >> s->
ss_v;
974 decode_sb(avctx, row + hbs, col, lflvl, yoff, uvoff, bl + 1);
975 decode_sb(avctx, row + hbs, col + hbs, lflvl,
976 yoff + 8 * hbs * bytesperpixel,
977 uvoff + (8 * hbs * bytesperpixel >> s->
ss_h), bl + 1);
984 decode_sb(avctx, row, col, lflvl, yoff, uvoff, bl + 1);
986 yoff + 8 * hbs * bytesperpixel,
987 uvoff + (8 * hbs * bytesperpixel >> s->
ss_h), bl + 1);
992 }
else if (row + hbs < s->rows) {
995 decode_sb(avctx, row, col, lflvl, yoff, uvoff, bl + 1);
996 yoff += hbs * 8 * y_stride;
997 uvoff += hbs * 8 * uv_stride >> s->
ss_v;
998 decode_sb(avctx, row + hbs, col, lflvl, yoff, uvoff, bl + 1);
1005 decode_sb(avctx, row, col, lflvl, yoff, uvoff, bl + 1);
1011 ptrdiff_t yoff, ptrdiff_t uvoff,
enum BlockLevel bl)
1015 ptrdiff_t hbs = 4 >> bl;
1023 }
else if (s->
b->
bl == bl) {
1026 yoff += hbs * 8 * y_stride;
1027 uvoff += hbs * 8 * uv_stride >> s->
ss_v;
1030 yoff += hbs * 8 * bytesperpixel;
1031 uvoff += hbs * 8 * bytesperpixel >> s->
ss_h;
1036 if (col + hbs < s->cols) {
1037 if (row + hbs < s->rows) {
1038 decode_sb_mem(avctx, row, col + hbs, lflvl, yoff + 8 * hbs * bytesperpixel,
1039 uvoff + (8 * hbs * bytesperpixel >> s->
ss_h), bl + 1);
1040 yoff += hbs * 8 * y_stride;
1041 uvoff += hbs * 8 * uv_stride >> s->
ss_v;
1042 decode_sb_mem(avctx, row + hbs, col, lflvl, yoff, uvoff, bl + 1);
1044 yoff + 8 * hbs * bytesperpixel,
1045 uvoff + (8 * hbs * bytesperpixel >> s->
ss_h), bl + 1);
1047 yoff += hbs * 8 * bytesperpixel;
1048 uvoff += hbs * 8 * bytesperpixel >> s->
ss_h;
1049 decode_sb_mem(avctx, row, col + hbs, lflvl, yoff, uvoff, bl + 1);
1051 }
else if (row + hbs < s->rows) {
1052 yoff += hbs * 8 * y_stride;
1053 uvoff += hbs * 8 * uv_stride >> s->
ss_v;
1054 decode_sb_mem(avctx, row + hbs, col, lflvl, yoff, uvoff, bl + 1);
1061 int sb_start = ( idx *
n) >> log2_n;
1062 int sb_end = ((idx + 1) * n) >> log2_n;
1063 *start =
FFMIN(sb_start, n) << 3;
1064 *end =
FFMIN(sb_end, n) << 3;
1079 for (i = 0; i < 3; i++) {
1084 for (i = 0; i < 8; i++) {
1106 int ret, tile_row, tile_col, i,
ref, row, col;
1109 ptrdiff_t yoff, uvoff, ls_y, ls_uv;
1115 }
else if (ret == 0) {
1129 for (i = 0; i < 8; i++) {
1171 for (i = 0; i < 8; i++) {
1213 "Failed to allocate block buffers\n");
1219 for (i = 0; i < 4; i++) {
1220 for (j = 0; j < 2; j++)
1221 for (k = 0; k < 2; k++)
1222 for (l = 0; l < 6; l++)
1223 for (m = 0; m < 6; m++)
1260 if (tile_size > size) {
1277 row += 8, yoff += ls_y * 64, uvoff += ls_uv * 64 >> s->
ss_v) {
1279 ptrdiff_t yoff2 = yoff, uvoff2 = uvoff;
1297 memcpy(&s->
c, &s->
c_b[tile_col],
sizeof(s->
c));
1301 col < s->tile_col_end;
1302 col += 8, yoff2 += 64 * bytesperpixel,
1303 uvoff2 += 64 * bytesperpixel >> s->
ss_h, lflvl_ptr++) {
1307 memset(lflvl_ptr->
mask, 0,
sizeof(lflvl_ptr->
mask));
1319 memcpy(&s->
c_b[tile_col], &s->
c,
sizeof(s->
c));
1327 if (row + 8 < s->
rows) {
1329 f->
data[0] + yoff + 63 * ls_y,
1330 8 * s->
cols * bytesperpixel);
1332 f->
data[1] + uvoff + ((64 >> s->
ss_v) - 1) * ls_uv,
1333 8 * s->
cols * bytesperpixel >> s->
ss_h);
1335 f->
data[2] + uvoff + ((64 >> s->
ss_v) - 1) * ls_uv,
1336 8 * s->
cols * bytesperpixel >> s->
ss_h);
1343 lflvl_ptr = s->
lflvl;
1344 for (col = 0; col < s->
cols;
1345 col += 8, yoff2 += 64 * bytesperpixel,
1346 uvoff2 += 64 * bytesperpixel >> s->
ss_h, lflvl_ptr++) {
1363 }
while (s->
pass++ == 1);
1368 for (i = 0; i < 8; i++) {
1390 for (i = 0; i < 3; i++)
1392 for (i = 0; i < 8; i++)
1401 for (i = 0; i < 3; i++) {
1409 for (i = 0; i < 8; i++) {
1444 for (i = 0; i < 3; i++) {
1447 if (ssrc->s.frames[i].tf.f->buf[0]) {
1452 for (i = 0; i < 8; i++) {
1455 if (ssrc->next_refs[i].f->buf[0]) {
1464 s->
ss_v = ssrc->ss_v;
1465 s->
ss_h = ssrc->ss_h;
1470 s->
gf_fmt = ssrc->gf_fmt;
1473 s->
s.
h.
bpp = ssrc->s.h.bpp;
1475 s->pix_fmt = ssrc->pix_fmt;
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / SMPTE RP177 Annex B
av_cold void ff_videodsp_init(VideoDSPContext *ctx, int bpc)
int(* start_frame)(AVCodecContext *avctx, const uint8_t *buf, uint32_t buf_size)
Called at the beginning of each frame or field picture.
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
uint8_t * segmentation_map
#define AV_PIX_FMT_YUV440P10
const uint8_t ff_vp9_default_kf_partition_probs[4][4][3]
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it...
static void vp9_decode_flush(AVCodecContext *avctx)
This structure describes decoded (raw) audio or video data.
static void set_tile_offset(int *start, int *end, int idx, int log2_n, int n)
ptrdiff_t const GLvoid * data
static void flush(AVCodecContext *avctx)
uint8_t left_segpred_ctx[8]
VP5 and VP6 compatible video decoder (common features)
static av_always_inline int get_sbits_inv(GetBitContext *gb, int n)
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
static unsigned int get_bits(GetBitContext *s, int n)
Read 1-25 bits.
static int init_thread_copy(AVCodecContext *avctx)
static av_always_inline int vp8_rac_get_tree(VP56RangeCoder *c, const int8_t(*tree)[2], const uint8_t *probs)
int ff_set_dimensions(AVCodecContext *s, int width, int height)
Check that the provided frame dimensions are valid and set them on the codec context.
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
static av_cold int init(AVCodecContext *avctx)
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601 ...
static av_cold int vp9_decode_init(AVCodecContext *avctx)
enum AVColorRange color_range
MPEG vs JPEG YUV range.
struct ProbContext::@145 mv_comp[2]
#define AV_PIX_FMT_GBRP10
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
uint8_t left_uv_nnz_ctx[2][16]
#define AV_PIX_FMT_YUV420P12
const int16_t ff_vp9_dc_qlookup[3][256]
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB)
const int16_t ff_vp9_ac_qlookup[3][256]
functionally identical to above
uint8_t * intra_pred_data[3]
struct AVHWAccel * hwaccel
Hardware accelerator in use.
#define av_assert0(cond)
assert() equivalent, that is always enabled.
uint8_t coef[4][2][2][6][6][3]
const int8_t ff_vp9_partition_tree[3][2]
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
#define av_assert2(cond)
assert() equivalent, that does lie in speed critical code.
AVColorSpace
YUV colorspace type.
void ff_vp9_adapt_probs(VP9Context *s)
static void free_buffers(VP9Context *s)
static av_cold int end(AVCodecContext *avctx)
Multithreading support functions.
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
#define FF_CODEC_PROPERTY_LOSSLESS
uint64_t_TMPL AV_WL64 unsigned int_TMPL AV_WL32 unsigned int_TMPL AV_WL24 unsigned int_TMPL AV_WL16 uint64_t_TMPL AV_WB64 unsigned int_TMPL AV_RB32
static int update_size(AVCodecContext *avctx, int w, int h)
AVBufferRef * hwaccel_priv_buf
static int get_bits_count(const GetBitContext *s)
int ff_thread_ref_frame(ThreadFrame *dst, ThreadFrame *src)
bitstream reader API header.
uint8_t * above_uv_nnz_ctx[2]
#define AV_PIX_FMT_YUV422P12
void ff_thread_finish_setup(AVCodecContext *avctx)
If the codec defines update_thread_context(), call this when they are ready for the next thread to st...
av_cold void ff_vp9dsp_init(VP9DSPContext *dsp, int bpp, int bitexact)
static void decode_sb_mem(AVCodecContext *avctx, int row, int col, VP9Filter *lflvl, ptrdiff_t yoff, ptrdiff_t uvoff, enum BlockLevel bl)
void ff_vp9_loopfilter_sb(AVCodecContext *avctx, VP9Filter *lflvl, int row, int col, ptrdiff_t yoff, ptrdiff_t uvoff)
static av_cold int vp9_decode_free(AVCodecContext *avctx)
uint8_t partition[4][4][3]
int width
width and height of the video frame
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
static int decode_frame_header(AVCodecContext *avctx, const uint8_t *data, int size, int *ref)
void ff_thread_release_buffer(AVCodecContext *avctx, ThreadFrame *f)
Wrapper around release_buffer() frame-for multithreaded codecs.
uint8_t left_partition_ctx[8]
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
int active_thread_type
Which multithreading methods are in use by the codec.
int flags
AV_CODEC_FLAG_*.
void * hwaccel_picture_private
simple assert() macros that are a bit more flexible than ISO C assert().
const char * name
Name of the codec implementation.
#define AV_PIX_FMT_YUV444P10
int16_t * uvblock_base[2]
#define AV_CODEC_CAP_FRAME_THREADS
Codec supports frame-level multithreading.
uint8_t * above_filter_ctx
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
#define ONLY_IF_THREADS_ENABLED(x)
Define a function with only the non-default version specified.
int ff_vp56_init_range_decoder(VP56RangeCoder *c, const uint8_t *buf, int buf_size)
enum AVPictureType pict_type
Picture type of the frame.
#define AV_CODEC_FLAG_BITEXACT
Use only bitexact stuff (except (I)DCT).
#define FF_THREAD_FRAME
Decode more than one frame at once.
uint8_t left_y_nnz_ctx[16]
uint8_t left_mode_ctx[16]
unsigned eob[4][2][2][6][6][2]
ITU-R BT2020 non-constant luminance system.
void ff_thread_report_progress(ThreadFrame *f, int n, int field)
Notify later decoding threads when part of their reference picture is ready.
void * av_fast_realloc(void *ptr, unsigned int *size, size_t min_size)
Reallocate the given buffer if it is not large enough, otherwise do nothing.
uint8_t * above_partition_ctx
uint8_t * above_segpred_ctx
#define FF_ARRAY_ELEMS(a)
the normal 2^n-1 "JPEG" YUV ranges
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
static av_always_inline int vp56_rac_get_prob_branchy(VP56RangeCoder *c, int prob)
static int init_frames(AVCodecContext *avctx)
VP56mv(* above_mv_ctx)[2]
Libavcodec external API header.
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
static int init_get_bits8(GetBitContext *s, const uint8_t *buffer, int byte_size)
Initialize GetBitContext.
int ff_thread_get_buffer(AVCodecContext *avctx, ThreadFrame *f, int flags)
Wrapper around get_buffer() for frame-multithreaded codecs.
main external API structure.
uint8_t * data
The data buffer.
static int update_block_buffers(AVCodecContext *avctx)
static int vp9_frame_ref(AVCodecContext *avctx, VP9Frame *dst, VP9Frame *src)
AVBufferRef * av_buffer_allocz(int size)
Same as av_buffer_alloc(), except the returned buffer will be initialized to zero.
static unsigned int get_bits1(GetBitContext *s)
#define AV_PIX_FMT_YUV420P10
unsigned partition[4][4][4]
uint8_t * above_y_nnz_ctx
static void skip_bits(GetBitContext *s, int n)
struct VP9Context::@147 prob_ctx[4]
enum AVColorSpace colorspace
YUV colorspace type.
#define AV_PIX_FMT_YUV440P12
enum AVPixelFormat ff_thread_get_format(AVCodecContext *avctx, const enum AVPixelFormat *fmt)
Wrapper around get_format() for frame-multithreaded codecs.
static int read_colorspace_details(AVCodecContext *avctx)
uint8_t * above_intra_ctx
int allocate_progress
Whether to allocate progress for frame threading.
static unsigned int get_bits_long(GetBitContext *s, int n)
Read 0-32 bits.
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer. ...
static enum AVPixelFormat pix_fmts[]
static int vp8_rac_get_uint(VP56RangeCoder *c, int bits)
#define AV_PIX_FMT_GBRP12
#define AV_PIX_FMT_YUV422P10
static void vp9_frame_unref(AVCodecContext *avctx, VP9Frame *f)
static int vp9_decode_frame(AVCodecContext *avctx, void *frame, int *got_frame, AVPacket *pkt)
#define AV_PIX_FMT_YUV444P12
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
the normal 219*2^(n-8) "MPEG" YUV ranges
static av_always_inline int inv_recenter_nonneg(int v, int m)
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
#define FF_DISABLE_DEPRECATION_WARNINGS
common internal api header.
static int ref[MAX_W *MAX_W]
#define assign(var, type, n)
enum AVPixelFormat pix_fmt last_fmt gf_fmt
static void decode_sb(AVCodecContext *avctx, int row, int col, VP9Filter *lflvl, ptrdiff_t yoff, ptrdiff_t uvoff, enum BlockLevel bl)
AVBufferRef * av_buffer_ref(AVBufferRef *buf)
Create a new reference to an AVBuffer.
static av_always_inline int vp8_rac_get(VP56RangeCoder *c)
struct VP9Context::@148 prob
Core video DSP helper functions.
struct VP9Context::@149 counts
struct VP9Context::@146 filter_lut
static int vp9_frame_alloc(AVCodecContext *avctx, VP9Frame *f)
#define FF_ENABLE_DEPRECATION_WARNINGS
int frame_priv_data_size
Size of per-frame hardware accelerator private data.
struct AVCodecInternal * internal
Private context used for internal data.
static int decode012(GetBitContext *gb)
int key_frame
1 -> keyframe, 0-> not
static const uint8_t * align_get_bits(GetBitContext *s)
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed...
planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
static int decode(AVCodecContext *avctx, AVFrame *frame, int *got_frame, AVPacket *pkt)
HW decoding through Direct3D11, Picture.data[3] contains a ID3D11VideoDecoderOutputView pointer...
static int update_prob(VP56RangeCoder *c, int p)
#define av_malloc_array(a, b)
const ProbContext ff_vp9_default_probs
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
const AVProfile ff_vp9_profiles[]
int(* decode_slice)(AVCodecContext *avctx, const uint8_t *buf, uint32_t buf_size)
Callback for each slice.
int(* end_frame)(AVCodecContext *avctx)
Called at the end of each frame or field picture.
const uint8_t ff_vp9_default_coef_probs[4][2][2][6][6][3]
AVPixelFormat
Pixel format.
This structure stores compressed data.
void ff_vp9_decode_block(AVCodecContext *avctx, int row, int col, VP9Filter *lflvl, ptrdiff_t yoff, ptrdiff_t uvoff, enum BlockLevel bl, enum BlockPartition bp)
#define AV_GET_BUFFER_FLAG_REF
The decoder will keep a reference to the frame and may reuse it later.
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() for allocating buffers and supports custom allocators.
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
int block_alloc_using_2pass