[FFmpeg-user] video created using ffmpeg and H264 codec does not work on android devices

Susheel Tickoo ticks.sunny84 at gmail.com
Tue Apr 9 11:46:42 CEST 2013


Hi,

I have created a video using ffmpeg and H264 codec ,but the video does not
play on the android devices.
The code i am using is as follows:

JNIEXPORT void Java_com_canvasm_mediclinic_VideoGenerator_generate(JNIEnv
*pEnv, jobject pObj,jobjectArray stringArray,int famerate,int width,int
height,jstring videoFilename)
{
 AVCodec *codec;
 AVCodecContext *c= NULL;
 //int framesnum=5;
 int i,looper, out_size, size, x, y,j;
 int ret,pts,got_pkt_ptr;

 int imagecount= (*pEnv)->GetArrayLength(pEnv, stringArray);
 int retval=-10;
 uint8_t endcode[]={0,0,1,0xb7};
 AVPacket outpacket;
 FILE *f;
 AVFrame *picture,*encoded_avframe;
 jbyte *raw_record;
 char logdatadata[100];
 int returnvalue = -1,numBytes =-1;
 const char *gVideoFileName = (char *)(*pEnv)->GetStringUTFChars(pEnv,
videoFilename, NULL);
 /* find the mpeg1 video encoder */
 codec = avcodec_find_encoder_by_name("libx264");
 if (!codec) {
  __android_log_write(ANDROID_LOG_INFO, "record","codec not found");
  exit(1);
 }
 c= avcodec_alloc_context();
 c->bit_rate = 500000;
 c->width = width;
 c->height = height;
 c->time_base= (AVRational){1,famerate};
 c->gop_size = 12; // emit one intra frame every ten frames
 c->max_b_frames=1;
 c->pix_fmt = PIX_FMT_YUV420P;
 c->codec_type = AVMEDIA_TYPE_VIDEO;
 c->codec_id = codec->id;
 c->max_b_frames = 0;
 c->me_range = 16;
 c->max_qdiff = 4;
 c->qmin = 10;
 c->qmax = 26;
 c->qcompress = 0.6;
 c->trellis=0;
 c->level = 30;
 c->refs = 5;
 c->coder_type = 0;
 c->scenechange_threshold = 0;
 //new
 c->flags|=CODEC_FLAG_LOOP_FILTER;//new
 c->scenechange_threshold = 40; //new
 c-> rc_buffer_size = 0;
 c->gop_size=250; //new
 c->max_b_frames=1;//new
 c->me_method=7;
 c->me_cmp|= 1;
 c->me_subpel_quality = 6;
 c->qmax=51;
 c->keyint_min=25;
 av_opt_set(c->priv_data,"subq","6",0);
 av_opt_set(c->priv_data,"crf","20.0",0);
 av_opt_set(c->priv_data,"weighted_p_pred","0",0);
 av_opt_set(c->priv_data,"profile","baseline",AV_OPT_SEARCH_CHILDREN);
 av_opt_set(c->priv_data,"preset","medium",0);
 av_opt_set(c->priv_data,"tune","zerolatency",0);
 av_opt_set(c->priv_data,"x264opts","rc-lookahead=0",0);


 /* open it */
 retval = avcodec_open(c, codec);
 if ( retval < 0)
 {
 __android_log_write(ANDROID_LOG_INFO, "record","could not open codec");
  exit(1);
 }
 f = fopen(gVideoFileName, "ab");
 if (!f) {
  __android_log_write(ANDROID_LOG_INFO, "record","could not open video
file");
  exit(1);
 }
 pts = 0;
 for(i=0;i<=imagecount;i++) {

  jboolean isCp;
  int progress = 0;
  float temp;
  jstring string;
  if(i==imagecount)
   string = (jstring) (*pEnv)->GetObjectArrayElement(pEnv, stringArray,
imagecount-1);
  else
   string = (jstring) (*pEnv)->GetObjectArrayElement(pEnv, stringArray, i);
  const char *rawString = (*pEnv)->GetStringUTFChars(pEnv, string, &isCp);
  picture = OpenImage(rawString,width,height,i);
  av_init_packet(&outpacket);
  fflush(stdout);

  {
   picture->pts=i ;//c->frame_number;
   do{
    out_size = avcodec_encode_video2(c, &outpacket,
picture,&got_pkt_ptr);
   }while(!got_pkt_ptr);
  }

  returnvalue = fwrite(outpacket.data, 1, outpacket.size, f);
  av_free_packet(&outpacket);
 }

 /* get the delayed frames */
 for(got_pkt_ptr =1; got_pkt_ptr; i++) {
  fflush(stdout);
  avcodec_encode_video2(c, &outpacket, NULL,&got_pkt_ptr);
  if(got_pkt_ptr)
  {
   fwrite(outpacket.data, 1, outpacket.size, f);
   av_free_packet(&outpacket);
  }
 }
 fwrite(endcode,1,sizeof(endcode),f);
 fclose(f);
 avcodec_close(c);
 av_free(c);
}
AVFrame* OpenImage(const char* imageFileName,int w,int h,int index)
{
 AVFrame *pFrame;
 AVCodec *pCodec ;
 AVFormatContext *pFormatCtx = NULL;
 AVCodecContext *pCodecCtx;
 uint8_t *buffer;
 int frameFinished,framesNumber = 0,retval = -1,numBytes=0;
 AVPacket packet;
 char logdatadata[100];
 int result = -1;



 result=avformat_open_input(&pFormatCtx,imageFileName,NULL,NULL);
 if(result!=0)
 {
  __android_log_write(ANDROID_LOG_INFO, "record",
    "Can't open image file ");
  return NULL;
 }
 pCodecCtx = pFormatCtx->streams[0]->codec;
 pCodecCtx->width = w;
 pCodecCtx->height = h;
 pCodecCtx->pix_fmt = PIX_FMT_YUV420P;

 // Find the decoder for the video stream
 pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
 if (!pCodec)
 {
  __android_log_write(ANDROID_LOG_INFO, "record",
    "Can't open image file ");
  return NULL;
 }
 pFrame = avcodec_alloc_frame();
 numBytes = avpicture_get_size(PIX_FMT_YUV420P, pCodecCtx->width,
pCodecCtx->height);
 buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t));
 retval = avpicture_fill((AVPicture *) pFrame, buffer, PIX_FMT_YUV420P,
pCodecCtx->width, pCodecCtx->height);
 // Open codec
 if(avcodec_open(pCodecCtx, pCodec)<0)
 {
  __android_log_write(ANDROID_LOG_INFO, "record","Could not open codec");
  return NULL;
 }
 if (!pFrame)
 {
  __android_log_write(ANDROID_LOG_INFO, "record","Can't allocate memory for
AVFrame\n");
  return NULL;
 }
 int readval = -5;
 while (readval = av_read_frame(pFormatCtx, &packet) >= 0)
 {
  if(packet.stream_index != 0)
   continue;
  int ret = avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished,
&packet);
  if (ret > 0)
  {
   __android_log_write(ANDROID_LOG_INFO, "record","Frame is decoded\n");
   pFrame->quality = 4;
   av_free_packet(&packet);
   av_close_input_file(pFormatCtx);
   return pFrame;
  }
  else
  {
   __android_log_write(ANDROID_LOG_INFO, "record","error while decoding
frame \n");
  }
 }
}

Please let me know where i am going wrong?

I have also tried the sampel example code that comes with ffmpeg
doc/examples/decoding_encoding.c
but the video created using that too is not playing on android devices.

I am using ffmpeg version 1.2;

any help will be greatly appriciated:
regards,
susheel tickoo


More information about the ffmpeg-user mailing list