35 #if HAVE_OPENJPEG_2_1_OPENJPEG_H
36 # include <openjpeg-2.1/openjpeg.h>
37 #elif HAVE_OPENJPEG_2_0_OPENJPEG_H
38 # include <openjpeg-2.0/openjpeg.h>
39 #elif HAVE_OPENJPEG_1_5_OPENJPEG_H
40 # include <openjpeg-1.5/openjpeg.h>
42 # include <openjpeg.h>
45 #if HAVE_OPENJPEG_2_1_OPENJPEG_H || HAVE_OPENJPEG_2_0_OPENJPEG_H
46 # define OPENJPEG_MAJOR_VERSION 2
47 # define OPJ(x) OPJ_##x
49 # define OPENJPEG_MAJOR_VERSION 1
55 #if OPENJPEG_MAJOR_VERSION == 1
57 #endif // OPENJPEG_MAJOR_VERSION == 1
59 #if OPENJPEG_MAJOR_VERSION == 1
61 #endif // OPENJPEG_MAJOR_VERSION == 1
87 #if OPENJPEG_MAJOR_VERSION == 2
88 typedef struct PacketWriter {
93 static OPJ_SIZE_T stream_write(
void *out_buffer, OPJ_SIZE_T nb_bytes,
void *
user_data)
97 int remaining = packet->
size - writer->pos;
98 if (nb_bytes > remaining) {
99 OPJ_SIZE_T needed = nb_bytes - remaining;
101 if (needed > max_growth) {
102 return (OPJ_SIZE_T)-1;
105 return (OPJ_SIZE_T)-1;
108 memcpy(packet->
data + writer->pos, out_buffer, nb_bytes);
109 writer->pos += (
int)nb_bytes;
113 static OPJ_OFF_T stream_skip(OPJ_OFF_T nb_bytes,
void *user_data)
118 if (writer->pos == 0) {
119 return (OPJ_SIZE_T)-1;
121 if (nb_bytes + writer->pos < 0) {
122 nb_bytes = -writer->pos;
125 int remaining = packet->
size - writer->pos;
126 if (nb_bytes > remaining) {
127 OPJ_SIZE_T needed = nb_bytes - remaining;
129 if (needed > max_growth) {
130 return (OPJ_SIZE_T)-1;
133 return (OPJ_SIZE_T)-1;
137 writer->pos += (
int)nb_bytes;
141 static OPJ_BOOL
stream_seek(OPJ_OFF_T nb_bytes,
void *user_data)
148 if (nb_bytes > packet->
size) {
154 writer->pos = (
int)nb_bytes;
157 #endif // OPENJPEG_MAJOR_VERSION == 2
172 p->image_offset_x0 = 0;
173 p->image_offset_y0 = 0;
176 p->cblockw_init = 32;
177 p->cblockh_init = 32;
181 p->prog_order =
OPJ(CPRL);
187 p->subsampling_dx = 1;
188 p->subsampling_dy = 1;
199 opj_image_cmptparm_t cmptparm[4] = {{0}};
205 OPJ_COLOR_SPACE color_space =
OPJ(CLRSPC_UNKNOWN);
207 sub_dx[0] = sub_dx[3] = 1;
208 sub_dy[0] = sub_dy[3] = 1;
219 color_space =
OPJ(CLRSPC_GRAY);
232 color_space =
OPJ(CLRSPC_SRGB);
267 color_space =
OPJ(CLRSPC_SYCC);
271 "The requested pixel format '%s' is not supported\n",
276 for (i = 0; i < numcomps; i++) {
279 cmptparm[i].sgnd = 0;
280 cmptparm[i].dx = sub_dx[i];
281 cmptparm[i].dy = sub_dy[i];
282 cmptparm[i].w = (avctx->
width + sub_dx[i] - 1) / sub_dx[i];
283 cmptparm[i].h = (avctx->
height + sub_dy[i] - 1) / sub_dy[i];
286 img = opj_image_create(numcomps, cmptparm, color_space);
295 img->x1 = (avctx->
width - 1) * parameters->subsampling_dx + 1;
296 img->y1 = (avctx->
height - 1) * parameters->subsampling_dy + 1;
306 opj_set_default_encoder_parameters(&ctx->
enc_params);
308 #if HAVE_OPENJPEG_2_1_OPENJPEG_H
310 case OPJ_CINEMA2K_24:
312 ctx->
enc_params.max_cs_size = OPJ_CINEMA_24_CS;
313 ctx->
enc_params.max_comp_size = OPJ_CINEMA_24_COMP;
315 case OPJ_CINEMA2K_48:
317 ctx->
enc_params.max_cs_size = OPJ_CINEMA_48_CS;
318 ctx->
enc_params.max_comp_size = OPJ_CINEMA_48_COMP;
320 case OPJ_CINEMA4K_24:
322 ctx->
enc_params.max_cs_size = OPJ_CINEMA_24_CS;
323 ctx->
enc_params.max_comp_size = OPJ_CINEMA_24_COMP;
329 if (ctx->
enc_params.rsiz == OPJ_PROFILE_CINEMA_4K) {
336 if (ctx->
enc_params.rsiz == OPJ_PROFILE_CINEMA_2K) {
346 "Invalid parameter pairing: cinema_mode and profile conflict.\n");
372 #if OPENJPEG_MAJOR_VERSION == 1
379 #endif // OPENJPEG_MAJOR_VERSION == 1
384 #if OPENJPEG_MAJOR_VERSION == 1
385 opj_image_destroy(ctx->
image);
387 #endif // OPENJPEG_MAJOR_VERSION == 1
398 const int numcomps = image->numcomps;
400 for (compno = 0; compno < numcomps; ++compno) {
401 if (image->comps[compno].w > frame->
linesize[0] / numcomps) {
407 for (compno = 0; compno < numcomps; ++compno) {
408 for (y = 0; y < avctx->
height; ++y) {
409 image_line = image->comps[compno].data + y * image->comps[compno].w;
410 frame_index = y * frame->
linesize[0] + compno;
411 for (x = 0; x < avctx->
width; ++x) {
412 image_line[x] = frame->
data[0][frame_index];
413 frame_index += numcomps;
415 for (; x < image->comps[compno].w; ++x) {
416 image_line[x] = image_line[x - 1];
419 for (; y < image->comps[compno].h; ++y) {
420 image_line = image->comps[compno].data + y * image->comps[compno].w;
421 for (x = 0; x < image->comps[compno].w; ++x) {
422 image_line[x] = image_line[x - (
int)image->comps[compno].w];
437 const int numcomps = image->numcomps;
438 uint16_t *frame_ptr = (uint16_t *)frame->
data[0];
440 for (compno = 0; compno < numcomps; ++compno) {
441 if (image->comps[compno].w > frame->
linesize[0] / numcomps) {
447 for (compno = 0; compno < numcomps; ++compno) {
448 for (y = 0; y < avctx->
height; ++y) {
449 image_line = image->comps[compno].data + y * image->comps[compno].w;
450 frame_index = y * (frame->
linesize[0] / 2) + compno;
451 for (x = 0; x < avctx->
width; ++x) {
452 image_line[x] = frame_ptr[frame_index] >> 4;
453 frame_index += numcomps;
455 for (; x < image->comps[compno].w; ++x) {
456 image_line[x] = image_line[x - 1];
459 for (; y < image->comps[compno].h; ++y) {
460 image_line = image->comps[compno].data + y * image->comps[compno].w;
461 for (x = 0; x < image->comps[compno].w; ++x) {
462 image_line[x] = image_line[x - (
int)image->comps[compno].w];
477 const int numcomps = image->numcomps;
478 uint16_t *frame_ptr = (uint16_t*)frame->
data[0];
480 for (compno = 0; compno < numcomps; ++compno) {
481 if (image->comps[compno].w > frame->
linesize[0] / numcomps) {
487 for (compno = 0; compno < numcomps; ++compno) {
488 for (y = 0; y < avctx->
height; ++y) {
489 image_line = image->comps[compno].data + y * image->comps[compno].w;
490 frame_index = y * (frame->
linesize[0] / 2) + compno;
491 for (x = 0; x < avctx->
width; ++x) {
492 image_line[x] = frame_ptr[frame_index];
493 frame_index += numcomps;
495 for (; x < image->comps[compno].w; ++x) {
496 image_line[x] = image_line[x - 1];
499 for (; y < image->comps[compno].h; ++y) {
500 image_line = image->comps[compno].data + y * image->comps[compno].w;
501 for (x = 0; x < image->comps[compno].w; ++x) {
502 image_line[x] = image_line[x - (
int)image->comps[compno].w];
519 const int numcomps = image->numcomps;
521 for (compno = 0; compno < numcomps; ++compno) {
522 if (image->comps[compno].w > frame->
linesize[compno]) {
528 for (compno = 0; compno < numcomps; ++compno) {
529 width = (avctx->
width + image->comps[compno].dx - 1) / image->comps[compno].dx;
530 height = (avctx->
height + image->comps[compno].dy - 1) / image->comps[compno].dy;
531 for (y = 0; y <
height; ++y) {
532 image_line = image->comps[compno].data + y * image->comps[compno].w;
533 frame_index = y * frame->
linesize[compno];
534 for (x = 0; x <
width; ++x)
535 image_line[x] = frame->
data[compno][frame_index++];
536 for (; x < image->comps[compno].w; ++x) {
537 image_line[x] = image_line[x - 1];
540 for (; y < image->comps[compno].h; ++y) {
541 image_line = image->comps[compno].data + y * image->comps[compno].w;
542 for (x = 0; x < image->comps[compno].w; ++x) {
543 image_line[x] = image_line[x - (
int)image->comps[compno].w];
560 const int numcomps = image->numcomps;
563 for (compno = 0; compno < numcomps; ++compno) {
564 if (image->comps[compno].w > frame->
linesize[compno]) {
570 for (compno = 0; compno < numcomps; ++compno) {
571 width = (avctx->
width + image->comps[compno].dx - 1) / image->comps[compno].dx;
572 height = (avctx->
height + image->comps[compno].dy - 1) / image->comps[compno].dy;
573 frame_ptr = (uint16_t *)frame->
data[compno];
575 image_line = image->comps[compno].data + y * image->comps[compno].w;
576 frame_index = y * (frame->
linesize[compno] / 2);
577 for (x = 0; x <
width; ++x)
578 image_line[x] = frame_ptr[frame_index++];
579 for (; x < image->comps[compno].w; ++x) {
580 image_line[x] = image_line[x - 1];
583 for (; y < image->comps[compno].h; ++y) {
584 image_line = image->comps[compno].data + y * image->comps[compno].w;
585 for (x = 0; x < image->comps[compno].w; ++x) {
586 image_line[x] = image_line[x - (
int)image->comps[compno].w];
601 #if OPENJPEG_MAJOR_VERSION == 1
602 opj_image_t *image = ctx->
image;
603 opj_cinfo_t *compress =
NULL;
604 opj_cio_t *stream =
NULL;
606 #else // OPENJPEG_MAJOR_VERSION == 2
607 PacketWriter writer = { 0 };
608 opj_codec_t *compress =
NULL;
609 opj_stream_t *stream =
NULL;
616 #endif // OPENJPEG_MAJOR_VERSION == 1
697 "The frame's pixel format '%s' is not supported\n",
706 "Could not copy the frame data to the internal image buffer\n");
711 #if OPENJPEG_MAJOR_VERSION == 2
715 #endif // OPENJPEG_MAJOR_VERSION == 2
717 compress = opj_create_compress(ctx->
format);
724 #if OPENJPEG_MAJOR_VERSION == 1
725 opj_setup_encoder(compress, &ctx->
enc_params, image);
726 stream = opj_cio_open((opj_common_ptr) compress,
NULL, 0);
727 #else // OPENJPEG_MAJOR_VERSION == 2
736 if (!opj_setup_encoder(compress, &ctx->
enc_params, image)) {
741 stream = opj_stream_default_create(OPJ_STREAM_WRITE);
742 #endif // OPENJPEG_MAJOR_VERSION == 1
749 #if OPENJPEG_MAJOR_VERSION == 1
754 opj_set_event_mgr((opj_common_ptr) compress, &ctx->
event_mgr, avctx);
755 if (!opj_encode(compress, stream, image,
NULL)) {
761 len = cio_tell(stream);
766 memcpy(pkt->
data, stream->buffer, len);
767 #else // OPENJPEG_MAJOR_VERSION == 2
769 opj_stream_set_write_function(stream, stream_write);
770 opj_stream_set_skip_function(stream, stream_skip);
772 #if HAVE_OPENJPEG_2_1_OPENJPEG_H
773 opj_stream_set_user_data(stream, &writer,
NULL);
774 #elif HAVE_OPENJPEG_2_0_OPENJPEG_H
775 opj_stream_set_user_data(stream, &writer);
777 #error Missing call to opj_stream_set_user_data
780 if (!opj_start_compress(compress, image, stream) ||
781 !opj_encode(compress, stream) ||
782 !opj_end_compress(compress, stream)) {
789 #endif // OPENJPEG_MAJOR_VERSION == 1
796 #if OPENJPEG_MAJOR_VERSION == 2
797 opj_stream_destroy(stream);
798 opj_destroy_codec(compress);
799 opj_image_destroy(image);
801 opj_cio_close(stream);
802 opj_destroy_compress(compress);
809 #if OPENJPEG_MAJOR_VERSION == 1
812 opj_image_destroy(ctx->
image);
814 #endif // OPENJPEG_MAJOR_VERSION == 1
818 #define OFFSET(x) offsetof(LibOpenJPEGContext, x)
819 #define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM
854 .
name =
"libopenjpeg",
#define AV_PIX_FMT_YUVA422P16
static int libopenjpeg_copy_unpacked16(AVCodecContext *avctx, const AVFrame *frame, opj_image_t *image)
#define AV_PIX_FMT_YUVA422P9
const AVPixFmtDescriptor * av_pix_fmt_desc_get(enum AVPixelFormat pix_fmt)
This structure describes decoded (raw) audio or video data.
ptrdiff_t const GLvoid * data
#define AV_PIX_FMT_YUVA420P10
#define AV_PIX_FMT_YUV444P14
8 bits gray, 8 bits alpha
#define AV_PIX_FMT_YUVA422P10
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
#define AV_LOG_WARNING
Something somehow does not look correct.
#define LIBAVUTIL_VERSION_INT
packed RGB 8:8:8, 24bpp, RGBRGB...
void av_shrink_packet(AVPacket *pkt, int size)
Reduce packet size, correctly zeroing padding.
static av_cold int init(AVCodecContext *avctx)
#define AV_PIX_FMT_RGBA64
AVCodec ff_libopenjpeg_encoder
#define AV_PIX_FMT_GBRP10
static void stream_seek(VideoState *is, int64_t pos, int64_t rel, int seek_by_bytes)
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
static const AVClass openjpeg_class
#define AV_PIX_FMT_YUV420P12
static int libopenjpeg_copy_packed8(AVCodecContext *avctx, const AVFrame *frame, opj_image_t *image)
uint8_t log2_chroma_w
Amount to shift the luma width right to find the chroma width.
static av_cold int libopenjpeg_encode_close(AVCodecContext *avctx)
#define AV_CODEC_CAP_INTRA_ONLY
Codec is intra only.
opj_cparameters_t enc_params
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
static int libopenjpeg_copy_unpacked8(AVCodecContext *avctx, const AVFrame *frame, opj_image_t *image)
planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
AVComponentDescriptor comp[4]
Parameters that describe how pixels are packed.
static void cinema_parameters(opj_cparameters_t *p)
#define AV_PIX_FMT_YUVA420P9
static void warning_callback(const char *msg, void *data)
#define AV_PIX_FMT_YUV444P16
#define AV_PIX_FMT_YUV422P12
#define AV_PIX_FMT_YUVA420P16
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
planar YUV 4:2:2 24bpp, (1 Cr & Cb sample per 2x1 Y & A samples)
static opj_image_t * mj2_create_image(AVCodecContext *avctx, opj_cparameters_t *parameters)
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
uint8_t log2_chroma_h
Amount to shift the luma height right to find the chroma height.
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
AVS_Value void * user_data
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
#define AV_PIX_FMT_YUVA444P16
simple assert() macros that are a bit more flexible than ISO C assert().
static int libopenjpeg_copy_packed16(AVCodecContext *avctx, const AVFrame *frame, opj_image_t *image)
const char * name
Name of the codec implementation.
#define AV_PIX_FMT_YUV444P10
#define AV_CODEC_CAP_FRAME_THREADS
Codec supports frame-level multithreading.
packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
int flags
A combination of AV_PKT_FLAG values.
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
#define AV_PIX_FMT_YUV422P9
uint8_t nb_components
The number of components each pixel has, (1-4)
#define AV_PIX_FMT_GBRP16
#define AV_PIX_FMT_GRAY16
int width
picture width / height.
#define AV_PIX_FMT_YUVA444P10
#define AV_PIX_FMT_YUV444P9
#define AV_PIX_FMT_GBRP14
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
static av_cold int libopenjpeg_encode_init(AVCodecContext *avctx)
#define AV_PIX_FMT_YUV420P16
#define AV_PIX_FMT_YUV420P14
Libavcodec external API header.
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
planar YUV 4:4:4 32bpp, (1 Cr & Cb sample per 1x1 Y & A samples)
Descriptor that unambiguously describes how the bits of a pixel are stored in the up to 4 data planes...
main external API structure.
static int libopenjpeg_copy_packed12(AVCodecContext *avctx, const AVFrame *frame, opj_image_t *image)
#define AV_PIX_FMT_YUV420P10
planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
static const char * format
Describe the class of an AVClass context structure.
opj_event_mgr_t event_mgr
#define AV_PIX_FMT_YUV420P9
int ff_alloc_packet2(AVCodecContext *avctx, AVPacket *avpkt, int64_t size, int64_t min_size)
Check AVPacket size and/or allocate data.
static void info_callback(const char *msg, void *data)
static enum AVPixelFormat pix_fmts[]
#define AV_PIX_FMT_YUV422P14
#define AV_PIX_FMT_GBRP12
#define AV_PIX_FMT_YUV422P10
#define AV_PIX_FMT_YUV444P12
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
static int libopenjpeg_encode_frame(AVCodecContext *avctx, AVPacket *pkt, const AVFrame *frame, int *got_packet)
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
common internal api header.
common internal and external API header
#define AV_PIX_FMT_YUVA444P9
int av_grow_packet(AVPacket *pkt, int grow_by)
Increase packet size, correctly zeroing padding.
planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
#define AV_INPUT_BUFFER_PADDING_SIZE
Required number of additionally allocated bytes at the end of the input bitstream for decoding...
static const AVOption options[]
static void error_callback(const char *msg, void *data)
planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
int depth
Number of bits in the component.
#define AVERROR_EXTERNAL
Generic error in an external library.
AVPixelFormat
Pixel format.
This structure stores compressed data.
#define AV_PIX_FMT_YUV422P16