[Libav-user] RTSP Audio/Video Synchronization
Alessio Volpe
alessio.volpe.av at gmail.com
Tue Mar 24 23:45:14 CET 2015
Thanks for the reply.
I whant to save the audio and video streams synchronized into a file avi.
Il 24/Mar/2015 21:12 "wm4" <nfxjfg at googlemail.com> ha scritto:
> On Tue, 24 Mar 2015 17:31:16 +0100
> Alessio Volpe <alessio.volpe.av at gmail.com> wrote:
>
> > Hi, this is my program:
> >
> > -------------------------------------------------------------
> >
> > #include <stdio.h>
> > #include <stdlib.h>
> > #include <libavcodec/avcodec.h>
> > #include <libavformat/avformat.h>
> > #include <libavformat/avio.h>
> > #include <sys/time.h>
> >
> > time_t get_time()
> > {
> > struct timeval tv;
> >
> > gettimeofday( &tv, NULL );
> >
> > return tv.tv_sec;
> > }
> >
> > int main( int argc, char* argv[] )
> > {
> > AVFormatContext *ifcx = NULL;
> > AVInputFormat *ifmt;
> > AVCodecContext *iccx_video, *iccx_audio;
> > AVCodec *icodec;
> > AVStream *ist_video, *ist_audio;
> > int i_index_video, i_index_audio;
> > time_t timenow, timestart;
> > int got_key_frame = 0;
> >
> > AVFormatContext *ofcx;
> > AVOutputFormat *ofmt;
> > AVCodecContext *occx;
> > AVCodec *ocodec;
> > AVStream *ost_video, *ost_audio;
> > int o_index_video, o_index_audio;
> >
> > AVPacket pkt;
> >
> > int ix, ix_video, ix_audio;
> >
> > const char *sFileInput;
> > const char *sFileOutput;
> > int bRunTime;
> >
> > //Indirizzo RTSP
> > sFileInput = "rtsp://10.4.1.175/media/video1";
> >
> > //File di output
> > sFileOutput = "camera.avi";
> >
> > //Tempo di run dell'acquisizione
> > bRunTime = 15; //Registra 15 secondi
> >
> > // Initialize library
> > av_log_set_level( AV_LOG_DEBUG );
> > av_register_all();
> > avcodec_register_all();
> > avformat_network_init();
> >
> > //
> > // Input
> > //
> >
> > //open rtsp
> > if ( avformat_open_input( &ifcx, sFileInput, NULL, NULL) != 0 ) {
> > printf( "ERROR: Cannot open input file\n" );
> > return EXIT_FAILURE;
> > }
> >
> > if ( avformat_find_stream_info( ifcx, NULL ) < 0 ) {
> > printf( "ERROR: Cannot find stream info\n" );
> > avformat_close_input( &ifcx );
> > return EXIT_FAILURE;
> > }
> >
> > snprintf( ifcx->filename, sizeof( ifcx->filename ), "%s", sFileInput );
> >
> > //search video stream
> > i_index_video = -1;
> > for ( ix = 0; ix < ifcx->nb_streams; ix++ ) {
> > iccx_video = ifcx->streams[ ix ]->codec;
> > if ( iccx_video->codec_type == AVMEDIA_TYPE_VIDEO ) {
> > ist_video = ifcx->streams[ ix ];
> > i_index_video = ix;
> > break;
> > }
> > }
> > if ( i_index_video < 0 ) {
> > printf( "ERROR: Cannot find input video stream\n" );
> > avformat_close_input( &ifcx );
> > return EXIT_FAILURE;
> > }
> >
> >
> > //search audio stream
> > i_index_audio = -1;
> > for ( ix = 0; ix < ifcx->nb_streams; ix++ ) {
> > iccx_audio = ifcx->streams[ ix ]->codec;
> > if ( iccx_audio->codec_type == AVMEDIA_TYPE_AUDIO ) {
> > ist_audio = ifcx->streams[ ix ];
> > i_index_audio = ix;
> > break;
> > }
> > }
> > if ( i_index_audio < 0 ) {
> > printf( "ERROR: Cannot find input video stream\n" );
> > avformat_close_input( &ifcx );
> > return EXIT_FAILURE;
> > }
> >
> > //
> > // Output
> > //
> >
> > //open output file
> > ofmt = av_guess_format( NULL, sFileOutput, NULL ); //Return the output
> > format
> > ofcx = avformat_alloc_context();
> > ofcx->oformat = ofmt;
> > avio_open2( &ofcx->pb, sFileOutput, AVIO_FLAG_WRITE, NULL, NULL );
> >
> > // Create Video output stream
> > ost_video = avformat_new_stream( ofcx, NULL );
> > ost_audio = avformat_new_stream( ofcx, NULL );
> >
> > avcodec_copy_context( ost_video->codec, iccx_video ); //Copia il codec
> > dello stream di input
> > avcodec_copy_context( ost_audio->codec, iccx_audio );
> >
> >
> > ost_video->sample_aspect_ratio.num =
> iccx_video->sample_aspect_ratio.num;
> > ost_video->sample_aspect_ratio.den =
> iccx_video->sample_aspect_ratio.den;
> >
> > // Assume r_frame_rate is accurate
> > ost_video->r_frame_rate = ist_video->r_frame_rate;
> > ost_video->avg_frame_rate = ost_video->r_frame_rate;
> > ost_video->time_base = (AVRational){ost_video->r_frame_rate.den,
> > ost_video->r_frame_rate.num}; //ost->time_base = av_inv_q(
> > ost->r_frame_rate ); //error
> > ost_video->codec->time_base = ost_video->time_base;
> >
> > // Create Audio output stream
> > ost_audio->sample_aspect_ratio.num =
> iccx_audio->sample_aspect_ratio.num;
> > ost_audio->sample_aspect_ratio.den =
> iccx_audio->sample_aspect_ratio.den;
> >
> >
> > ost_audio->r_frame_rate = ist_audio->r_frame_rate;
> > ost_audio->avg_frame_rate = ost_audio->r_frame_rate;
> > ost_audio->time_base = (AVRational){ost_audio->r_frame_rate.den,
> > ost_audio->r_frame_rate.num}; //ost->time_base = av_inv_q(
> > ost->r_frame_rate ); //error
> > ost_audio->codec->time_base = ost_audio->time_base;
> >
> > avformat_write_header( ofcx, NULL );
> >
> > snprintf( ofcx->filename, sizeof( ofcx->filename ), "%s", sFileOutput
> );
> >
> > //start reading packets from stream and write them to file
> >
> > av_dump_format( ifcx, 0, ifcx->filename, 0 ); //INFO INPUT
> > av_dump_format( ofcx, 0, ofcx->filename, 1 ); //INFO OUTPUT
> >
> > timestart = timenow = get_time();
> >
> > ix_video = 0;
> > ix_audio = 0;
> >
> > double video_pts, audio_pts;
> >
> > av_init_packet( &pkt );
> >
> > double audio_time, video_time;
> >
> > while ( av_read_frame( ifcx, &pkt ) >= 0 && timenow - timestart <=
> > bRunTime ) { //&& (getchar() != 'q')){
> > av_packet_rescale_ts(&pkt,
> > ofcx->streams[i_index_video]->codec->time_base,
> > ifcx->streams[i_index_video]->time_base);
> > if ( pkt.stream_index == i_index_video ) { //packet is video
> > //Make sure we start on a key frame - UN I-FRAME
> > if ( timestart == timenow && ! ( pkt.flags & AV_PKT_FLAG_KEY ) ) {
> > timestart = timenow = get_time();
> > continue;
> > }
> > got_key_frame = 1;
> >
> > // video_pts = (double)ost_video->pts.val *
> ost_video->time_base.num /
> > ost_video->time_base.den;
> > // audio_pts = (double)ost_audio->pts.val *
> ost_audio->time_base.num /
> > ost_audio->time_base.den;
> >
> > pkt.stream_index = ost_video->id;
> > // /* prepare packet for muxing */
> > // pkt.dts = av_rescale_q_rnd(pkt.dts,
> > ofcx->streams[i_index_video]->codec->time_base,
> > ofcx->streams[i_index_video]->time_base,
> > AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX);
> > // pkt.pts = av_rescale_q_rnd(pkt.pts,
> > ofcx->streams[i_index_video]->codec->time_base,
> > ofcx->streams[i_index_video]->time_base,
> > AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX);
> > // pkt.duration = av_rescale_q(pkt.duration,
> > ofcx->streams[i_index_video]->codec->time_base,
> > ofcx->streams[i_index_video]->time_base);
> >
> >
> > pkt.pts = ix_video++;
> > pkt.dts = pkt.pts;
> >
> > // /*Also, some streams have multiple ticks-per-frame, so if the
> video
> > runs at double speed you might need to this right below the above line:
> >
> > // pkt.pts *= ifcx->streams[0]->codec->ticks_per_frame;
> > // pkt.dts *= ifcx->streams[0]->codec->ticks_per_frame;
> >
> > //av_write_frame( ofcx, &pkt );
> > av_interleaved_write_frame( ofcx, &pkt );
> > }
> > else{ //packet is audio
> >
> > pkt.pts = ix_video++;
> > pkt.dts = pkt.pts;
> >
> > //av_write_frame( ofcx, &pkt );
> > av_interleaved_write_frame( ofcx, &pkt );
> >
> > }
> >
> > //CICLO PER SINCRONIZZARE E SCRIVERE SU DISCO
> >
> > // printf("vpcopy[%d].pts = %d", i, vpcopy[i].pts);
> > // printf("\n");
> >
> > // if(i == 30) {
> > // for(j=0; j<30-1; j++)
> > // {
> > // min = j;
> >
> > // for(k=j+1; k<30; k++)
> > // if(vpcopy[j].pts < vpcopy[min].pts) //cambiare questa
> > condizione per invertire l'ordine
> > // min = k;
> >
> > // temp=vpcopy[min];
> > // vpcopy[min]=vpcopy[j];
> > // vpcopy[j]=temp;
> >
> > // printf("vpcopy[%d].pts = %d", i, vpcopy[i].pts);
> > // printf("\n");
> >
> > // av_interleaved_write_frame( ofcx, &vpcopy[j] );
> > // }
> > // i = 0;
> > // }
> >
> >
> > av_free_packet( &pkt );
> > av_init_packet( &pkt );
> >
> > timenow = get_time();
> > }
> > av_read_pause( ifcx );
> > av_write_trailer( ofcx );
> > avio_close( ofcx->pb );
> > avformat_free_context( ofcx );
> >
> > avformat_network_deinit();
> >
> > return EXIT_SUCCESS;
> > }
> >
> > -------------------------------------------------------------
> >
> > I would like to synchronize the video and audio.
> >
> > How should I use the pts and dts?
>
> It looks like you just want to receive the data and display it. This
> should be helpful: http://dranger.com/ffmpeg/ (it starts with the
> basics, but also touches A/V sync).
> _______________________________________________
> Libav-user mailing list
> Libav-user at ffmpeg.org
> http://ffmpeg.org/mailman/listinfo/libav-user
>
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <https://ffmpeg.org/pipermail/libav-user/attachments/20150324/0637fa2f/attachment.html>
More information about the Libav-user
mailing list