Go to the documentation of this file.
26 #include <media/NdkMediaFormat.h>
27 #include <media/NdkMediaCodec.h>
28 #include <android/native_window_jni.h>
63 #define OFFSET(x) offsetof(struct JNIAMediaCodecListFields, x)
67 {
"android/media/MediaCodecList",
"findDecoderForFormat",
"(Landroid/media/MediaFormat;)Ljava/lang/String;",
FF_JNI_METHOD,
OFFSET(find_decoder_for_format_id), 0 },
70 {
"android/media/MediaCodecList",
"getCodecInfoAt",
"(I)Landroid/media/MediaCodecInfo;",
FF_JNI_STATIC_METHOD,
OFFSET(get_codec_info_at_id), 1 },
73 {
"android/media/MediaCodecInfo",
"getName",
"()Ljava/lang/String;",
FF_JNI_METHOD,
OFFSET(get_name_id), 1 },
74 {
"android/media/MediaCodecInfo",
"getCapabilitiesForType",
"(Ljava/lang/String;)Landroid/media/MediaCodecInfo$CodecCapabilities;",
FF_JNI_METHOD,
OFFSET(get_codec_capabilities_id), 1 },
75 {
"android/media/MediaCodecInfo",
"getSupportedTypes",
"()[Ljava/lang/String;",
FF_JNI_METHOD,
OFFSET(get_supported_types_id), 1 },
76 {
"android/media/MediaCodecInfo",
"isEncoder",
"()Z",
FF_JNI_METHOD,
OFFSET(is_encoder_id), 1 },
77 {
"android/media/MediaCodecInfo",
"isSoftwareOnly",
"()Z",
FF_JNI_METHOD,
OFFSET(is_software_only_id), 0 },
80 {
"android/media/MediaCodecInfo$CodecCapabilities",
"colorFormats",
"[I",
FF_JNI_FIELD,
OFFSET(color_formats_id), 1 },
81 {
"android/media/MediaCodecInfo$CodecCapabilities",
"profileLevels",
"[Landroid/media/MediaCodecInfo$CodecProfileLevel;",
FF_JNI_FIELD,
OFFSET(profile_levels_id), 1 },
84 {
"android/media/MediaCodecInfo$CodecProfileLevel",
"profile",
"I",
FF_JNI_FIELD,
OFFSET(profile_id), 1 },
85 {
"android/media/MediaCodecInfo$CodecProfileLevel",
"level",
"I",
FF_JNI_FIELD,
OFFSET(level_id), 1 },
115 #define OFFSET(x) offsetof(struct JNIAMediaFormatFields, x)
121 {
"android/media/MediaFormat",
"containsKey",
"(Ljava/lang/String;)Z",
FF_JNI_METHOD,
OFFSET(contains_key_id), 1 },
123 {
"android/media/MediaFormat",
"getInteger",
"(Ljava/lang/String;)I",
FF_JNI_METHOD,
OFFSET(get_integer_id), 1 },
124 {
"android/media/MediaFormat",
"getLong",
"(Ljava/lang/String;)J",
FF_JNI_METHOD,
OFFSET(get_long_id), 1 },
125 {
"android/media/MediaFormat",
"getFloat",
"(Ljava/lang/String;)F",
FF_JNI_METHOD,
OFFSET(get_float_id), 1 },
126 {
"android/media/MediaFormat",
"getByteBuffer",
"(Ljava/lang/String;)Ljava/nio/ByteBuffer;",
FF_JNI_METHOD,
OFFSET(get_bytebuffer_id), 1 },
127 {
"android/media/MediaFormat",
"getString",
"(Ljava/lang/String;)Ljava/lang/String;",
FF_JNI_METHOD,
OFFSET(get_string_id), 1 },
129 {
"android/media/MediaFormat",
"setInteger",
"(Ljava/lang/String;I)V",
FF_JNI_METHOD,
OFFSET(set_integer_id), 1 },
130 {
"android/media/MediaFormat",
"setLong",
"(Ljava/lang/String;J)V",
FF_JNI_METHOD,
OFFSET(set_long_id), 1 },
131 {
"android/media/MediaFormat",
"setFloat",
"(Ljava/lang/String;F)V",
FF_JNI_METHOD,
OFFSET(set_float_id), 1 },
132 {
"android/media/MediaFormat",
"setByteBuffer",
"(Ljava/lang/String;Ljava/nio/ByteBuffer;)V",
FF_JNI_METHOD,
OFFSET(set_bytebuffer_id), 1 },
133 {
"android/media/MediaFormat",
"setString",
"(Ljava/lang/String;Ljava/lang/String;)V",
FF_JNI_METHOD,
OFFSET(set_string_id), 1 },
135 {
"android/media/MediaFormat",
"toString",
"()Ljava/lang/String;",
FF_JNI_METHOD,
OFFSET(to_string_id), 1 },
209 #define OFFSET(x) offsetof(struct JNIAMediaCodecFields, x)
214 {
"android/media/MediaCodec",
"INFO_OUTPUT_BUFFERS_CHANGED",
"I",
FF_JNI_STATIC_FIELD,
OFFSET(info_output_buffers_changed_id), 1 },
215 {
"android/media/MediaCodec",
"INFO_OUTPUT_FORMAT_CHANGED",
"I",
FF_JNI_STATIC_FIELD,
OFFSET(info_output_format_changed_id), 1 },
217 {
"android/media/MediaCodec",
"BUFFER_FLAG_CODEC_CONFIG",
"I",
FF_JNI_STATIC_FIELD,
OFFSET(buffer_flag_codec_config_id), 1 },
218 {
"android/media/MediaCodec",
"BUFFER_FLAG_END_OF_STREAM",
"I",
FF_JNI_STATIC_FIELD,
OFFSET(buffer_flag_end_of_stream_id), 1 },
223 {
"android/media/MediaCodec",
"createByCodecName",
"(Ljava/lang/String;)Landroid/media/MediaCodec;",
FF_JNI_STATIC_METHOD,
OFFSET(create_by_codec_name_id), 1 },
224 {
"android/media/MediaCodec",
"createDecoderByType",
"(Ljava/lang/String;)Landroid/media/MediaCodec;",
FF_JNI_STATIC_METHOD,
OFFSET(create_decoder_by_type_id), 1 },
225 {
"android/media/MediaCodec",
"createEncoderByType",
"(Ljava/lang/String;)Landroid/media/MediaCodec;",
FF_JNI_STATIC_METHOD,
OFFSET(create_encoder_by_type_id), 1 },
227 {
"android/media/MediaCodec",
"getName",
"()Ljava/lang/String;",
FF_JNI_METHOD,
OFFSET(get_name_id), 1 },
229 {
"android/media/MediaCodec",
"configure",
"(Landroid/media/MediaFormat;Landroid/view/Surface;Landroid/media/MediaCrypto;I)V",
FF_JNI_METHOD,
OFFSET(configure_id), 1 },
235 {
"android/media/MediaCodec",
"getOutputFormat",
"()Landroid/media/MediaFormat;",
FF_JNI_METHOD,
OFFSET(get_output_format_id), 1 },
237 {
"android/media/MediaCodec",
"dequeueInputBuffer",
"(J)I",
FF_JNI_METHOD,
OFFSET(dequeue_input_buffer_id), 1 },
238 {
"android/media/MediaCodec",
"queueInputBuffer",
"(IIIJI)V",
FF_JNI_METHOD,
OFFSET(queue_input_buffer_id), 1 },
239 {
"android/media/MediaCodec",
"getInputBuffer",
"(I)Ljava/nio/ByteBuffer;",
FF_JNI_METHOD,
OFFSET(get_input_buffer_id), 0 },
240 {
"android/media/MediaCodec",
"getInputBuffers",
"()[Ljava/nio/ByteBuffer;",
FF_JNI_METHOD,
OFFSET(get_input_buffers_id), 1 },
242 {
"android/media/MediaCodec",
"dequeueOutputBuffer",
"(Landroid/media/MediaCodec$BufferInfo;J)I",
FF_JNI_METHOD,
OFFSET(dequeue_output_buffer_id), 1 },
243 {
"android/media/MediaCodec",
"getOutputBuffer",
"(I)Ljava/nio/ByteBuffer;",
FF_JNI_METHOD,
OFFSET(get_output_buffer_id), 0 },
244 {
"android/media/MediaCodec",
"getOutputBuffers",
"()[Ljava/nio/ByteBuffer;",
FF_JNI_METHOD,
OFFSET(get_output_buffers_id), 1 },
245 {
"android/media/MediaCodec",
"releaseOutputBuffer",
"(IZ)V",
FF_JNI_METHOD,
OFFSET(release_output_buffer_id), 1 },
246 {
"android/media/MediaCodec",
"releaseOutputBuffer",
"(IJ)V",
FF_JNI_METHOD,
OFFSET(release_output_buffer_at_time_id), 0 },
248 {
"android/media/MediaCodec",
"setInputSurface",
"(Landroid/view/Surface;)V",
FF_JNI_METHOD,
OFFSET(set_input_surface_id), 0 },
249 {
"android/media/MediaCodec",
"signalEndOfInputStream",
"()V",
FF_JNI_METHOD,
OFFSET(signal_end_of_input_stream_id), 0 },
253 {
"android/media/MediaCodec.BufferInfo",
"<init>",
"()V",
FF_JNI_METHOD,
OFFSET(init_id), 1 },
254 {
"android/media/MediaCodec.BufferInfo",
"flags",
"I",
FF_JNI_FIELD,
OFFSET(flags_id), 1 },
255 {
"android/media/MediaCodec.BufferInfo",
"offset",
"I",
FF_JNI_FIELD,
OFFSET(offset_id), 1 },
256 {
"android/media/MediaCodec.BufferInfo",
"presentationTimeUs",
"J",
FF_JNI_FIELD,
OFFSET(presentation_time_us_id), 1 },
257 {
"android/media/MediaCodec.BufferInfo",
"size",
"I",
FF_JNI_FIELD,
OFFSET(size_id), 1 },
295 #define JNI_GET_ENV_OR_RETURN(env, log_ctx, ret) do { \
296 (env) = ff_jni_get_env(log_ctx); \
302 #define JNI_GET_ENV_OR_RETURN_VOID(env, log_ctx) do { \
303 (env) = ff_jni_get_env(log_ctx); \
312 static const int AVCProfileBaseline = 0x01;
313 static const int AVCProfileMain = 0x02;
314 static const int AVCProfileExtended = 0x04;
315 static const int AVCProfileHigh = 0x08;
316 static const int AVCProfileHigh10 = 0x10;
317 static const int AVCProfileHigh422 = 0x20;
318 static const int AVCProfileHigh444 = 0x40;
319 static const int AVCProfileConstrainedBaseline = 0x10000;
320 static const int AVCProfileConstrainedHigh = 0x80000;
322 static const int HEVCProfileMain = 0x01;
323 static const int HEVCProfileMain10 = 0x02;
324 static const int HEVCProfileMainStill = 0x04;
325 static const int HEVCProfileMain10HDR10 = 0x1000;
326 static const int HEVCProfileMain10HDR10Plus = 0x2000;
328 static const int VP9Profile0 = 0x01;
329 static const int VP9Profile1 = 0x02;
330 static const int VP9Profile2 = 0x04;
331 static const int VP9Profile3 = 0x08;
332 static const int VP9Profile2HDR = 0x1000;
333 static const int VP9Profile3HDR = 0x2000;
334 static const int VP9Profile2HDR10Plus = 0x4000;
335 static const int VP9Profile3HDR10Plus = 0x8000;
337 static const int MPEG4ProfileSimple = 0x01;
338 static const int MPEG4ProfileSimpleScalable = 0x02;
339 static const int MPEG4ProfileCore = 0x04;
340 static const int MPEG4ProfileMain = 0x08;
341 static const int MPEG4ProfileNbit = 0x10;
342 static const int MPEG4ProfileScalableTexture = 0x20;
343 static const int MPEG4ProfileSimpleFBA = 0x80;
344 static const int MPEG4ProfileSimpleFace = 0x40;
345 static const int MPEG4ProfileBasicAnimated = 0x100;
346 static const int MPEG4ProfileHybrid = 0x200;
347 static const int MPEG4ProfileAdvancedRealTime = 0x400;
348 static const int MPEG4ProfileCoreScalable = 0x800;
349 static const int MPEG4ProfileAdvancedCoding = 0x1000;
350 static const int MPEG4ProfileAdvancedCore = 0x2000;
351 static const int MPEG4ProfileAdvancedScalable = 0x4000;
352 static const int MPEG4ProfileAdvancedSimple = 0x8000;
355 static const int AV1ProfileMain8 = 0x1;
356 static const int AV1ProfileMain10 = 0x2;
357 static const int AV1ProfileMain10HDR10 = 0x1000;
358 static const int AV1ProfileMain10HDR10Plus = 0x2000;
361 (void)AVCProfileConstrainedHigh;
362 (void)HEVCProfileMain10HDR10;
363 (void)HEVCProfileMain10HDR10Plus;
364 (void)VP9Profile2HDR;
365 (void)VP9Profile3HDR;
366 (void)VP9Profile2HDR10Plus;
367 (void)VP9Profile3HDR10Plus;
368 (void)MPEG4ProfileSimpleFace;
369 (void)AV1ProfileMain10;
370 (void)AV1ProfileMain10HDR10;
371 (void)AV1ProfileMain10HDR10Plus;
376 return AVCProfileBaseline;
378 return AVCProfileConstrainedBaseline;
380 return AVCProfileMain;
383 return AVCProfileExtended;
385 return AVCProfileHigh;
388 return AVCProfileHigh10;
391 return AVCProfileHigh422;
395 return AVCProfileHigh444;
400 return HEVCProfileMain;
402 return HEVCProfileMainStill;
404 return HEVCProfileMain10;
421 return MPEG4ProfileSimple;
423 return MPEG4ProfileSimpleScalable;
425 return MPEG4ProfileCore;
427 return MPEG4ProfileMain;
429 return MPEG4ProfileNbit;
431 return MPEG4ProfileScalableTexture;
433 return MPEG4ProfileSimpleFBA;
435 return MPEG4ProfileBasicAnimated;
437 return MPEG4ProfileHybrid;
439 return MPEG4ProfileAdvancedRealTime;
441 return MPEG4ProfileCoreScalable;
443 return MPEG4ProfileAdvancedCoding;
445 return MPEG4ProfileAdvancedCore;
447 return MPEG4ProfileAdvancedScalable;
449 return MPEG4ProfileAdvancedSimple;
459 return AV1ProfileMain8;
477 char *supported_type =
NULL;
483 jobject codec_name =
NULL;
487 jobjectArray types =
NULL;
489 jobject capabilities =
NULL;
490 jobject profile_level =
NULL;
491 jobjectArray profile_levels =
NULL;
508 for(
i = 0;
i < codec_count;
i++) {
528 if (is_encoder != encoder) {
538 if (is_software_only) {
553 (*env)->DeleteLocalRef(env, codec_name);
558 strstr(
name,
"OMX.google") ||
559 strstr(
name,
"OMX.ffmpeg") ||
560 (strstr(
name,
"OMX.SEC") && strstr(
name,
".sw.")) ||
561 !strcmp(
name,
"OMX.qcom.video.decoder.hevcswvdec")) {
565 type_count = (*env)->GetArrayLength(env, types);
566 for (j = 0; j < type_count; j++) {
570 type = (*env)->GetObjectArrayElement(env, types, j);
576 if (!supported_type) {
589 profile_levels = (*env)->GetObjectField(env, capabilities, jfields.
profile_levels_id);
594 profile_count = (*env)->GetArrayLength(env, profile_levels);
595 if (!profile_count) {
598 for (k = 0; k < profile_count; k++) {
599 int supported_profile = 0;
606 profile_level = (*env)->GetObjectArrayElement(env, profile_levels, k);
611 supported_profile = (*env)->GetIntField(env, profile_level, jfields.
profile_id);
616 found_codec =
profile == supported_profile;
618 (*env)->DeleteLocalRef(env, profile_level);
619 profile_level =
NULL;
627 (*env)->DeleteLocalRef(env, profile_levels);
628 profile_levels =
NULL;
630 (*env)->DeleteLocalRef(env, capabilities);
633 (*env)->DeleteLocalRef(env,
type);
644 (*env)->DeleteLocalRef(env,
info);
647 (*env)->DeleteLocalRef(env, types);
658 (*env)->DeleteLocalRef(env, codec_name);
659 (*env)->DeleteLocalRef(env,
info);
660 (*env)->DeleteLocalRef(env,
type);
661 (*env)->DeleteLocalRef(env, types);
662 (*env)->DeleteLocalRef(env, capabilities);
663 (*env)->DeleteLocalRef(env, profile_level);
664 (*env)->DeleteLocalRef(env, profile_levels);
682 jobject
object =
NULL;
700 object = (*env)->NewObject(env,
format->jfields.mediaformat_class,
format->jfields.init_id);
705 format->object = (*env)->NewGlobalRef(env,
object);
711 (*env)->DeleteLocalRef(env,
object);
742 format->object = (*env)->NewGlobalRef(env,
object);
768 (*env)->DeleteGlobalRef(env,
format->object);
807 jboolean contains_key;
819 contains_key = (*env)->CallBooleanMethod(env,
format->object,
format->jfields.contains_key_id,
key);
825 *
out = (*env)->CallIntMethod(env,
format->object,
format->jfields.get_integer_id,
key);
833 (*env)->DeleteLocalRef(env,
key);
844 jboolean contains_key;
856 contains_key = (*env)->CallBooleanMethod(env,
format->object,
format->jfields.contains_key_id,
key);
870 (*env)->DeleteLocalRef(env,
key);
881 jboolean contains_key;
893 contains_key = (*env)->CallBooleanMethod(env,
format->object,
format->jfields.contains_key_id,
key);
899 *
out = (*env)->CallFloatMethod(env,
format->object,
format->jfields.get_float_id,
key);
907 (*env)->DeleteLocalRef(env,
key);
918 jboolean contains_key;
931 contains_key = (*env)->CallBooleanMethod(env,
format->object,
format->jfields.contains_key_id,
key);
943 *
data = (*env)->GetDirectBufferAddress(env,
result);
944 *
size = (*env)->GetDirectBufferCapacity(env,
result);
959 (*env)->DeleteLocalRef(env,
key);
960 (*env)->DeleteLocalRef(env,
result);
971 jboolean contains_key;
984 contains_key = (*env)->CallBooleanMethod(env,
format->object,
format->jfields.contains_key_id,
key);
1004 (*env)->DeleteLocalRef(env,
key);
1005 (*env)->DeleteLocalRef(env,
result);
1031 (*env)->DeleteLocalRef(env,
key);
1055 (*env)->DeleteLocalRef(env,
key);
1079 (*env)->DeleteLocalRef(env,
key);
1086 jstring
string =
NULL;
1103 (*env)->CallVoidMethod(env,
format->object,
format->jfields.set_string_id,
key,
string);
1109 (*env)->DeleteLocalRef(env,
key);
1110 (*env)->DeleteLocalRef(env,
string);
1152 (*env)->DeleteLocalRef(env,
key);
1153 (*env)->DeleteLocalRef(env,
buffer);
1210 #define CREATE_CODEC_BY_NAME 0
1211 #define CREATE_DECODER_BY_TYPE 1
1212 #define CREATE_ENCODER_BY_TYPE 2
1219 jstring jarg =
NULL;
1220 jobject
object =
NULL;
1221 jobject buffer_info =
NULL;
1222 jmethodID create_id =
NULL;
1253 object = (*env)->CallStaticObjectMethod(env,
1261 codec->
object = (*env)->NewGlobalRef(env,
object);
1279 codec->
buffer_info = (*env)->NewGlobalRef(env, buffer_info);
1286 (*env)->DeleteLocalRef(env, jarg);
1287 (*env)->DeleteLocalRef(env,
object);
1288 (*env)->DeleteLocalRef(env, buffer_info);
1291 (*env)->DeleteGlobalRef(env, codec->
object);
1301 #define DECLARE_FF_AMEDIACODEC_CREATE_FUNC(name, method) \
1302 static FFAMediaCodec *mediacodec_jni_##name(const char *arg) \
1304 return codec_create(method, arg); \
1334 (*env)->DeleteGlobalRef(env, codec->
object);
1365 (*env)->DeleteLocalRef(env,
name);
1582 jobject input_buffers =
NULL;
1598 codec->
input_buffers = (*env)->NewGlobalRef(env, input_buffers);
1610 ret = (*env)->GetDirectBufferAddress(env,
buffer);
1613 (*env)->DeleteLocalRef(env,
buffer);
1614 (*env)->DeleteLocalRef(env, input_buffers);
1625 jobject output_buffers =
NULL;
1641 codec->
output_buffers = (*env)->NewGlobalRef(env, output_buffers);
1653 ret = (*env)->GetDirectBufferAddress(env,
buffer);
1656 (*env)->DeleteLocalRef(env,
buffer);
1657 (*env)->DeleteLocalRef(env, output_buffers);
1668 jobject mediaformat =
NULL;
1679 (*env)->DeleteLocalRef(env, mediaformat);
1791 .createCodecByName = mediacodec_jni_createCodecByName,
1792 .createDecoderByType = mediacodec_jni_createDecoderByType,
1793 .createEncoderByType = mediacodec_jni_createEncoderByType,
1846 media_status_t (*
getName)(AMediaCodec*,
char** out_name);
1877 format->libmedia = dlopen(
"libmediandk.so", RTLD_NOW);
1881 #define GET_OPTIONAL_SYMBOL(sym) \
1882 format->sym = dlsym(format->libmedia, "AMediaFormat_" #sym);
1887 #undef GET_OPTIONAL_SYMBOL
1892 format->impl = AMediaFormat_new();
1901 dlclose(
format->libmedia);
1920 if (
format->impl && (AMediaFormat_delete(
format->impl) != AMEDIA_OK))
1923 dlclose(
format->libmedia);
1932 const char *str = AMediaFormat_toString(
format->impl);
2043 const char *lib_name =
"libmediandk.so";
2049 codec->
libmedia = dlopen(lib_name, RTLD_NOW);
2053 #define GET_SYMBOL(sym) \
2054 codec->sym = dlsym(codec->libmedia, "AMediaCodec_" #sym); \
2056 av_log(codec, AV_LOG_INFO, #sym "() unavailable from %s\n", lib_name);
2068 codec->
impl = AMediaCodec_createCodecByName(
arg);
2071 codec->
impl = AMediaCodec_createDecoderByType(
arg);
2074 codec->
impl = AMediaCodec_createEncoderByType(
arg);
2091 #define DECLARE_NDK_AMEDIACODEC_CREATE_FUNC(name, method) \
2092 static FFAMediaCodec *mediacodec_ndk_##name(const char *arg) \
2094 return ndk_codec_create(method, arg); \
2111 if (codec->
impl && (AMediaCodec_delete(codec->
impl) != AMEDIA_OK))
2114 ANativeWindow_release(codec->
window);
2131 ANativeWindow *native_window =
NULL;
2137 native_window = ANativeWindow_fromSurface(env,
window->surface);
2139 codec->
window = native_window;
2140 }
else if (
window->native_window) {
2141 native_window =
window->native_window;
2150 if (
flags & AMEDIACODEC_CONFIGURE_FLAG_ENCODE) {
2157 if (
status != AMEDIA_OK) {
2166 if (
status != AMEDIA_OK) {
2172 if (
status != AMEDIA_OK) {
2181 #define MEDIACODEC_NDK_WRAPPER(method) \
2182 static int mediacodec_ndk_ ## method(FFAMediaCodec* ctx) \
2184 FFAMediaCodecNdk *codec = (FFAMediaCodecNdk *)ctx; \
2185 media_status_t status = AMediaCodec_ ## method (codec->impl); \
2187 if (status != AMEDIA_OK) { \
2188 av_log(codec, AV_LOG_ERROR, #method " failed, %d\n", status); \
2189 return AVERROR_EXTERNAL; \
2202 return AMediaCodec_getInputBuffer(codec->
impl, idx,
out_size);
2208 return AMediaCodec_getOutputBuffer(codec->
impl, idx,
out_size);
2214 return AMediaCodec_dequeueInputBuffer(codec->
impl, timeoutUs);
2219 uint64_t time, uint32_t
flags)
2228 AMediaCodecBufferInfo buf_info = {0};
2231 ret = AMediaCodec_dequeueOutputBuffer(codec->
impl, &buf_info, timeoutUs);
2232 info->offset = buf_info.offset;
2233 info->size = buf_info.size;
2234 info->presentationTimeUs = buf_info.presentationTimeUs;
2235 info->flags = buf_info.flags;
2243 AMediaFormat *
format = AMediaCodec_getOutputFormat(codec->
impl);
2255 status = AMediaCodec_releaseOutputBuffer(codec->
impl, idx, render);
2256 if (
status != AMEDIA_OK) {
2269 status = AMediaCodec_releaseOutputBufferAtTime(codec->
impl, idx, timestampNs);
2270 if (
status != AMEDIA_OK) {
2280 return idx == AMEDIACODEC_INFO_TRY_AGAIN_LATER;
2285 return idx == AMEDIACODEC_INFO_OUTPUT_BUFFERS_CHANGED;
2290 return idx == AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED;
2295 return AMEDIACODEC_BUFFER_FLAG_CODEC_CONFIG;
2300 return AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM;
2310 return AMEDIACODEC_CONFIGURE_FLAG_ENCODE;
2329 if (
status != AMEDIA_OK) {
2366 .createCodecByName = mediacodec_ndk_createCodecByName,
2367 .createDecoderByType = mediacodec_ndk_createDecoderByType,
2368 .createEncoderByType = mediacodec_ndk_createEncoderByType,
2372 .start = mediacodec_ndk_start,
2373 .stop = mediacodec_ndk_stop,
2374 .flush = mediacodec_ndk_flush,
2433 #if __ANDROID_API__ >= 24
2445 ret = android_get_device_api_level();
2448 jclass versionClass;
2449 jfieldID sdkIntFieldID;
2452 versionClass = (*env)->FindClass(env,
"android/os/Build$VERSION");
2453 sdkIntFieldID = (*env)->GetStaticFieldID(env, versionClass,
"SDK_INT",
"I");
2454 ret = (*env)->GetStaticIntField(env, versionClass, sdkIntFieldID);
2455 (*env)->DeleteLocalRef(env, versionClass);
static void error(const char *err)
#define AV_LOG_WARNING
Something somehow does not look correct.
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
#define AV_PROFILE_H264_HIGH_10_INTRA
#define AV_PROFILE_MPEG4_ADVANCED_SIMPLE
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
#define AV_PROFILE_MPEG4_N_BIT
AVColorTransferCharacteristic
Color Transfer Characteristic.
#define AV_PROFILE_MPEG4_SIMPLE_SCALABLE
jstring ff_jni_utf_chars_to_jstring(JNIEnv *env, const char *utf_chars, void *log_ctx)
@ AVCOL_TRC_LINEAR
"Linear transfer characteristics"
#define AV_PROFILE_H264_MAIN
int av_strcasecmp(const char *a, const char *b)
Locale-independent case-insensitive compare.
#define AV_PROFILE_HEVC_MAIN
@ AVCOL_RANGE_JPEG
Full range content.
int ff_jni_reset_jfields(JNIEnv *env, void *jfields, const struct FFJniField *jfields_mapping, int global, void *log_ctx)
#define AV_PROFILE_MPEG4_BASIC_ANIMATED_TEXTURE
AVColorPrimaries
Chromaticity coordinates of the source primaries.
#define AV_PROFILE_AV1_PROFESSIONAL
#define AV_PROFILE_MPEG4_CORE_SCALABLE
int ff_jni_init_jfields(JNIEnv *env, void *jfields, const struct FFJniField *jfields_mapping, int global, void *log_ctx)
#define AV_PROFILE_MPEG4_ADVANCED_SCALABLE_TEXTURE
#define AV_PROFILE_H264_EXTENDED
@ AVCOL_SPC_BT470BG
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601
static SDL_Window * window
#define AV_PROFILE_MPEG4_SCALABLE_TEXTURE
#define AV_PROFILE_MPEG4_SIMPLE_FACE_ANIMATION
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf type
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
#define FF_ARRAY_ELEMS(a)
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample format(the sample packing is implied by the sample format) and sample rate. The lists are not just lists
#define AV_PROFILE_MPEG4_HYBRID
@ AVCOL_SPC_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC / functionally identical to above
#define AV_PROFILE_H264_HIGH_10
char * ff_jni_jstring_to_utf_chars(JNIEnv *env, jstring string, void *log_ctx)
#define av_assert0(cond)
assert() equivalent, that is always enabled.
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
#define AV_PROFILE_H264_HIGH_422_INTRA
@ AVCOL_PRI_BT470BG
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM
#define AV_PROFILE_H264_HIGH_422
@ AVCOL_PRI_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC
#define LIBAVUTIL_VERSION_INT
Describe the class of an AVClass context structure.
and forward the result(frame or status change) to the corresponding input. If nothing is possible
#define AV_PROFILE_HEVC_MAIN_STILL_PICTURE
@ AVCOL_PRI_BT709
also ITU-R BT1361 / IEC 61966-2-4 / SMPTE RP 177 Annex B
const char * av_default_item_name(void *ptr)
Return the context name.
void(* flush)(AVBSFContext *ctx)
#define AV_PROFILE_HEVC_MAIN_10
@ AVCOL_RANGE_UNSPECIFIED
@ AVCOL_PRI_BT2020
ITU-R BT2020.
#define AV_PROFILE_MPEG4_ADVANCED_CORE
#define AV_PROFILE_MPEG4_ADVANCED_CODING
#define AVERROR_EXTERNAL
Generic error in an external library.
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf offset
#define AV_PROFILE_AV1_HIGH
#define AV_PROFILE_MPEG4_SIMPLE
int ff_jni_exception_check(JNIEnv *env, int log, void *log_ctx)
#define i(width, name, range_min, range_max)
@ AVCOL_SPC_BT2020_NCL
ITU-R BT2020 non-constant luminance system.
AVColorSpace
YUV colorspace type.
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default value
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
#define AV_PROFILE_MPEG4_MAIN
#define AV_PROFILE_MPEG4_SIMPLE_STUDIO
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
#define AV_PROFILE_H264_HIGH_444_PREDICTIVE
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
#define AV_PROFILE_H264_BASELINE
#define AV_PROFILE_MPEG4_CORE
Tag MUST be and< 10hcoeff half pel interpolation filter coefficients, hcoeff[0] are the 2 middle coefficients[1] are the next outer ones and so on, resulting in a filter like:...eff[2], hcoeff[1], hcoeff[0], hcoeff[0], hcoeff[1], hcoeff[2] ... the sign of the coefficients is not explicitly stored but alternates after each coeff and coeff[0] is positive, so ...,+,-,+,-,+,+,-,+,-,+,... hcoeff[0] is not explicitly stored but found by subtracting the sum of all stored coefficients with signs from 32 hcoeff[0]=32 - hcoeff[1] - hcoeff[2] - ... a good choice for hcoeff and htaps is htaps=6 hcoeff={40,-10, 2} an alternative which requires more computations at both encoder and decoder side and may or may not be better is htaps=8 hcoeff={42,-14, 6,-2}ref_frames minimum of the number of available reference frames and max_ref_frames for example the first frame after a key frame always has ref_frames=1spatial_decomposition_type wavelet type 0 is a 9/7 symmetric compact integer wavelet 1 is a 5/3 symmetric compact integer wavelet others are reserved stored as delta from last, last is reset to 0 if always_reset||keyframeqlog quality(logarithmic quantizer scale) stored as delta from last, last is reset to 0 if always_reset||keyframemv_scale stored as delta from last, last is reset to 0 if always_reset||keyframe FIXME check that everything works fine if this changes between framesqbias dequantization bias stored as delta from last, last is reset to 0 if always_reset||keyframeblock_max_depth maximum depth of the block tree stored as delta from last, last is reset to 0 if always_reset||keyframequant_table quantization tableHighlevel bitstream structure:==============================--------------------------------------------|Header|--------------------------------------------|------------------------------------|||Block0||||split?||||yes no||||......... intra?||||:Block01 :yes no||||:Block02 :....... ..........||||:Block03 ::y DC ::ref index:||||:Block04 ::cb DC ::motion x :||||......... :cr DC ::motion y :||||....... ..........|||------------------------------------||------------------------------------|||Block1|||...|--------------------------------------------|------------ ------------ ------------|||Y subbands||Cb subbands||Cr subbands||||--- ---||--- ---||--- ---|||||LL0||HL0||||LL0||HL0||||LL0||HL0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||LH0||HH0||||LH0||HH0||||LH0||HH0|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HL1||LH1||||HL1||LH1||||HL1||LH1|||||--- ---||--- ---||--- ---||||--- ---||--- ---||--- ---|||||HH1||HL2||||HH1||HL2||||HH1||HL2|||||...||...||...|||------------ ------------ ------------|--------------------------------------------Decoding process:=================------------|||Subbands|------------||||------------|Intra DC||||LL0 subband prediction ------------|\ Dequantization ------------------- \||Reference frames|\ IDWT|------- -------|Motion \|||Frame 0||Frame 1||Compensation . OBMC v -------|------- -------|--------------. \------> Frame n output Frame Frame<----------------------------------/|...|------------------- Range Coder:============Binary Range Coder:------------------- The implemented range coder is an adapted version based upon "Range encoding: an algorithm for removing redundancy from a digitised message." by G. N. N. Martin. The symbols encoded by the Snow range coder are bits(0|1). The associated probabilities are not fix but change depending on the symbol mix seen so far. bit seen|new state ---------+----------------------------------------------- 0|256 - state_transition_table[256 - old_state];1|state_transition_table[old_state];state_transition_table={ 0, 0, 0, 0, 0, 0, 0, 0, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 190, 191, 192, 194, 194, 195, 196, 197, 198, 199, 200, 201, 202, 202, 204, 205, 206, 207, 208, 209, 209, 210, 211, 212, 213, 215, 215, 216, 217, 218, 219, 220, 220, 222, 223, 224, 225, 226, 227, 227, 229, 229, 230, 231, 232, 234, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 248, 0, 0, 0, 0, 0, 0, 0};FIXME Range Coding of integers:------------------------- FIXME Neighboring Blocks:===================left and top are set to the respective blocks unless they are outside of the image in which case they are set to the Null block top-left is set to the top left block unless it is outside of the image in which case it is set to the left block if this block has no larger parent block or it is at the left side of its parent block and the top right block is not outside of the image then the top right block is used for top-right else the top-left block is used Null block y, cb, cr are 128 level, ref, mx and my are 0 Motion Vector Prediction:=========================1. the motion vectors of all the neighboring blocks are scaled to compensate for the difference of reference frames scaled_mv=(mv *(256 *(current_reference+1)/(mv.reference+1))+128)> the median of the scaled left
main external API structure.
@ AVCOL_TRC_ARIB_STD_B67
ARIB STD-B67, known as "Hybrid log-gamma".
#define AV_PROFILE_H264_HIGH
the frame and frame reference mechanism is intended to as much as expensive copies of that data while still allowing the filters to produce correct results The data is stored in buffers represented by AVFrame structures Several references can point to the same frame buffer
#define AV_PROFILE_MPEG4_ADVANCED_REAL_TIME
@ AVCOL_TRC_SMPTE170M
also ITU-R BT601-6 525 or 625 / ITU-R BT1358 525 or 625 / ITU-R BT1700 NTSC
#define AV_PROFILE_H264_CONSTRAINED_BASELINE
JNIEnv * ff_jni_get_env(void *log_ctx)
char * av_strdup(const char *s)
Duplicate a string.
#define AV_PROFILE_H264_HIGH_444
#define AV_PROFILE_H264_HIGH_444_INTRA
#define flags(name, subs,...)
#define AV_PROFILE_AV1_MAIN
@ AVCOL_SPC_BT709
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / derived in SMPTE RP 177 Annex B
AVColorRange
Visual content value range.