[Libav-user] FFMpeg - x264

Denis info at denisgottardello.it
Wed Sep 14 20:31:29 CEST 2011


In data mercoledì 14 settembre 2011 17:44:34, Roger Pack ha scritto:
> > Vlc does not work.
> > Wich is the error? I can not to find it. I can not understand wich is the
> > right way for store a frame in a h264 file.
> 
> Maybe you're setting the pts timestamps wrong?


I have made a small step forward. Now I can produce a h264 file that I can see with mplayer and vlc but I have some trouble.
Mplayer plays the video at 50 fps instead of 25. Vlc plays the video at the right fps.
Another program says that the video has a wrong timestamp (pts dts). This is what the program says:

PachetCount: 1 , 6144 bytes, pBufferSize: 32768 
PachetCount: 2 , 26624 bytes, pBufferSize: 26624 
[mpegts @ 0xb2502da0] Invalid timestamps stream=0, pts=0, dts=8589927392, size=1163
[mpegts @ 0xb2502da0] Invalid timestamps stream=0, pts=14400, dts=8589930992, size=19
[mpegts @ 0xb2502da0] max_analyze_duration 5000000 reached at 5000000
[mpegts @ 0xb2502da0] Estimating duration from bitrate, this may be inaccurate
dump_format begin 
Input #0, mpegts, from 'foo.mpg':
  Duration: N/A, start: 0.000000, bitrate: N/A
  Program 1 
    Metadata:
      service_name    : Service01
      service_provider: FFmpeg
    Stream #0.0[0x100]: Video: h264 (High), yuv420p, 320x240, 25 fps, 25 tbr, 90k tbn, 50 tbc
dump_format end 
0 
"fps: 25" 

And this is the function:

void QThCamera::run() {
    qDebug() << "QThCamera::run() start";
    QString MPGFileName= "a.mpg", MJPEGFileName= "a.mjpg";

    CvCapture *Capture= NULL;
    Capture= cvCreateCameraCapture(Index, Width, Height);
    if (!Capture) qDebug() << "Cannot open or initialize webcam!";
    else {
        if (ExternalFrame) cvNamedWindow("Frame", CV_WINDOW_AUTOSIZE);
        IplImage *frame= 0;
        CvFont font;
        cvInitFont(&font, CV_FONT_HERSHEY_SIMPLEX, 0.5f, 0.5f);

        AVOutputFormat *pOutputFormat= av_guess_format("mpegts", NULL, NULL);
        if (!pOutputFormat) {
            qDebug() << "Could not set output format, using MPEG.";
            pOutputFormat= av_guess_format("mpeg", NULL, NULL);
        }
        if (!pOutputFormat) qDebug() << "Could not set output format.";
        else {
            AVFormatContext *pFormatCtx;
            if (avformat_alloc_output_context2(&pFormatCtx, pOutputFormat, NULL, NULL)< 0) qDebug() << "avformat_alloc_output_context2 Error!";
            else {
                AVStream *pVideoStream= av_new_stream(pFormatCtx, 0);
                if (!pVideoStream) qDebug() << "av_new_stream Error!";
                else {
                    AVCodecContext *pCodecCtx= pVideoStream->codec;
                    pCodecCtx->codec_id= CODEC_ID_H264;
                    pCodecCtx->codec_type= AVMEDIA_TYPE_VIDEO;
                    pCodecCtx->bit_rate= 40000;
                    pCodecCtx->width= Width;
                    pCodecCtx->height= Height;
                    pCodecCtx->time_base.den= 25;
                    pCodecCtx->time_base.num= 1;
                    pCodecCtx->gop_size= 10;
                    pCodecCtx->pix_fmt= PIX_FMT_YUV420P;
                    if (pFormatCtx->oformat->flags & AVFMT_GLOBALHEADER) pCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
                    av_dump_format(pFormatCtx, 0, MPGFileName.toStdString().c_str(), 1);
                    AVCodec *pCodec= avcodec_find_encoder(pCodecCtx->codec_id);
                    if (!pCodec) qDebug() << "avcodec_find_encoder Error!";
                    else {
                        if (avcodec_open(pCodecCtx, pCodec)< 0) qDebug() << "avcodec_open Error!";
                        else {
                            if (avio_open(&pFormatCtx->pb, MPGFileName.toStdString().c_str(), AVIO_FLAG_WRITE)< 0) qDebug() << "url_fopen Error!";
                            else {
                                if (avformat_write_header(pFormatCtx, NULL)!= 0) qDebug() << "av_write_header Error!";
                                int BYTEPIC= Width * Height * 3;
                                uint8_t *pOutBuffer= (uint8_t*)malloc(BYTEPIC); {
                                    QFile QFFileOut;
                                    QFFileOut.setFileName(MJPEGFileName);
                                    QFFileOut.open(QIODevice::WriteOnly);
                                    int Frames= 0;
                                    QDateTime QDTStart= QDateTime::currentDateTime();
                                    while (DoStart) {
                                        if (!cvSetChannel(Capture, Channel)) qDebug() << "Unable to set channel.";
                                        frame= cvQueryFrame(Capture);
                                        if (frame) {
                                            if (MainForm->ui->QCBAutoBrightness->isChecked()) {
                                                int Brightness= BrightnessOfAPixel(frame);
                                                Brightness= ((Brightness * 200 / 256) - 100) * -1;
                                                IplImage *frame2= ContrastBrightness(frame, 0, Brightness); {
                                                    cvCopyImage(frame2, frame);
                                                }{
                                                    cvReleaseImage(&frame2);
                                                }
                                            } else {
                                                IplImage *frame2= ContrastBrightness(frame, MainForm->ui->QHSContrast->value(), MainForm->ui->QHSBrightness->value()); {
                                                    cvCopyImage(frame2, frame);
                                                }{
                                                    cvReleaseImage(&frame2);
                                                }
                                            }
                                            if (MainForm->ui->QCBShowDateTime->isChecked()) cvPutText(frame, QDateTime::currentDateTime().toString("dd-MM-yyyy hh:mm:ss").toAscii(), cvPoint(10, 20), &font, cvScalar(255, 255, 255));
                                            if (MainForm->ui->QRBRotate90->isChecked()) {
                                                IplImage *frame2= Rotate(frame, 90); {
                                                    cvCopyImage(frame2, frame);
                                                }{
                                                    cvReleaseImage(&frame2);
                                                }
                                            } else if (MainForm->ui->QRBRotate180->isChecked()) {
                                                IplImage *frame2= Rotate(frame, 180); {
                                                    cvCopyImage(frame2, frame);
                                                }{
                                                    cvReleaseImage(&frame2);
                                                }
                                            } else if (MainForm->ui->QRBRotate270->isChecked()) {
                                                IplImage *frame2= Rotate(frame, 270); {
                                                    cvCopyImage(frame2, frame);
                                                }{
                                                    cvReleaseImage(&frame2);
                                                }
                                            }
                                            if (ExternalFrame) cvShowImage("Frame", frame);
                                            QImage Image(frame->width, frame->height, QImage::Format_RGB888);
                                            const unsigned char *data= NULL;
                                            data= (unsigned char*)(frame->imageData);
                                            Image= QImage(data, frame->width, frame->height, frame->widthStep, QImage::Format_RGB888);
                                            if (DoStart) emit SendNewImage(Image);


                                            QByteArray ByteArray;
                                            QBuffer Buffer(&ByteArray);
                                            Image.save(&Buffer, "JPG");
                                            QFFileOut.write(ByteArray);
                                            QFFileOut.write(QDateTime::currentDateTime().toString("yyyyMMddhhmmsszzz").toAscii());


                                            AVFrame *pAVFrame= avcodec_alloc_frame();
                                            uint8_t *pBuffer= (uint8_t*)malloc(avpicture_get_size(PIX_FMT_YUV420P, Width, Height)); {
                                                avpicture_fill((AVPicture*)pAVFrame, pBuffer, PIX_FMT_YUV420P, Width, Height);
                                                IplImageToAVFrame(frame, pAVFrame, Width, Height, PIX_FMT_YUV420P);
                                                pAVFrame->pts= Frames;
                                                int OutSize= avcodec_encode_video(pCodecCtx, pOutBuffer, BYTEPIC, pAVFrame);
                                                if (OutSize> 0) {
                                                    AVPacket Packet;
                                                    av_init_packet(&Packet);
                                                    if (pCodecCtx->coded_frame->pts != AV_NOPTS_VALUE) {
                                                        Packet.pts= av_rescale_q(pCodecCtx->coded_frame->pts, pCodecCtx->time_base, pVideoStream->time_base);
                                                    }
                                                    //if (pCodecCtx->coded_frame->key_frame) Packet.flags |= AV_PKT_FLAG_KEY;
                                                    Packet.stream_index= pVideoStream->index;
                                                    Packet.data= pOutBuffer;
                                                    Packet.size= OutSize;
                                                    if (av_interleaved_write_frame(pFormatCtx, &Packet)!= 0) qDebug() << "av_interleaved_write_frame Error!";
                                                }
                                                Frames++;
                                                if (Frames> pCodecCtx->time_base.den / pCodecCtx->time_base.num * 10) break;
                                            }{
                                                av_free(pAVFrame);
                                                free(pBuffer);
                                            }


                                        } else qDebug() << "QThCamera::run() frame= false";
                                    }
                                    QFFileOut.close();
                                    qDebug() << Frames / QDTStart.secsTo(QDateTime::currentDateTime());
                                }{
                                    free(pOutBuffer);
                                }
                                if (av_write_trailer(pFormatCtx)!= 0) qDebug() << "av_write_header Error!";
                                avio_close(pFormatCtx->pb);
                            }
                            avcodec_close(pVideoStream->codec);
                        }
                    }
                    for (int count= 0; (unsigned)count< pFormatCtx->nb_streams; count++) {
                        av_freep(&pFormatCtx->streams[count]->codec);
                        av_freep(&pFormatCtx->streams[count]);
                    }
                }
                av_free(pFormatCtx);
            }
        }
        cvReleaseCapture(&Capture);
        if (ExternalFrame) cvDestroyWindow("Frame");
    }
    qDebug() << "QThCamera::run() stop";
}





Now, if I have understand, all works in this mode:
1) In order to encode something with FFMpeg you must define a AVFormatContext object, that is a container.
2) Use "av_guess_format("mpegts", NULL, NULL);" for explain wich is the container type. In this case "mpegts" means mpeg4.
3) In the container you must define a stream that can be CODEC_ID_H264
4) With "av_interleaved_write_frame(pFormatCtx, &Packet)" you can insert a packet that contains a frame in the stream.

Is that right?

That I not have understand is: The pts and dts values must be express at container level? Stream level? Frame level? Packet level?
Wich is the right way to calculate pts and dts value?
The only thing that I do is this "pAVFrame->pts= Frames;" in order to not obtain this error: "non-strictly-monotonic PTS"



-- 
www.denisgottardello.it
Skype: mrdebug
Videosurveillance and home automation! 
http://www.denisgottardello.it/DomusBoss/DomusBossIndice.php
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://ffmpeg.org/pipermail/libav-user/attachments/20110914/10947c7f/attachment.html>


More information about the Libav-user mailing list