[Libav-user] Audio is second behind
Denis Ivanov
denisx.ivanov at gmail.com
Thu Dec 17 12:57:45 EET 2020
Hi,
I have three problems.
First problem is that when I transcode Video and Audio the audio is a
second behind the video. I think the problem is that the video stream is
not delayed as the audio. I've run ffplay with video/audio and I can see
that the video and audio is delayed about a second, but when running from
code the video is not delayed at all (I have SDL preview) its instant.
Maybe that's one of the reasons they are out of sync.
Second problem is that when running same code with same video, if the
camera is recording in low light, the resulting file is 'fast-forwarded'.
E.g. if I record 12 sec video, the resulting file is indeed 12 secs, but
the frames recorded fit in 7 secs, playing fast and after the 7 second the
video freezes.
Third problem, which is not so bad, but it is a problem. There is config
option where if the user wants specific resolution to record with it. Of
course using `dshow` I'm check if the resolution is supported from the
input device first. With some cameras, setting 'video_size' even when
supported the decoder fails.
Now some code:
Creating the Input format context
private AVFormatContext* CreateFormatContext()
{
AVDictionary* options = null;
ffmpeg.av_dict_set(&options, "packet-buffering", "0", 0);
ffmpeg.av_dict_set(&options, "sync", "1", 0);
ffmpeg.av_dict_set(&options, "rtsp_transport", "tcp", 0);
ffmpeg.av_dict_set(&options, "reconnect", "1", 0);
//ffmpeg.av_dict_set(&options, "analyzeduration", "2000000", 0);
//ffmpeg.av_dict_set(&options, "probesize", (16384 *
16).ToString(), 0);
ffmpeg.av_dict_set(&options, "max_delay", "0", 0);
ffmpeg.av_dict_set(&options, "reorder_queue_size", "0", 0);
ffmpeg.av_dict_set(&options, "skip_frame", "8", 0);
ffmpeg.av_dict_set(&options, "skip_loop_filter", "48", 0);
ffmpeg.av_dict_set(&options, "rtbufsize", "1500M", 0);
if (!string.IsNullOrEmpty(_resolution))
{
var resolution =
DShow.GetConfiguredCameraResolution(_streamUrl, _resolution);
if (resolution != null)
{
ffmpeg.av_dict_set(&options, "video_size",
resolution.Resolution, 0);
//ffmpeg.av_dict_set(&options, "r",
resolution.FrameRate.ToString(), 0);
}
else
{
}
}
AVFormatContext* pInputFmtCtx = ffmpeg.avformat_alloc_context();
AVInputFormat* inputFormat = null;
if (!string.IsNullOrEmpty(_format))
{
inputFormat = ffmpeg.av_find_input_format(_format);
if (inputFormat == null)
{
}
}
int ret = ffmpeg.avformat_open_input(&pInputFmtCtx, _streamUrl,
inputFormat, &options);
if (ret != 0)
{
}
return pInputFmtCtx;
}
Creating the decoder
AVStream* videoStream = InputFormatContext->streams[VideoStreamIndex];
AVCodecParameters* videoCodecParams = videoStream->codecpar;
AVCodec* videoDecoder =
ffmpeg.avcodec_find_decoder(videoCodecParams->codec_id);
VideoDecodeContext =
ffmpeg.avcodec_alloc_context3(videoDecoder);
if (ffmpeg.avcodec_parameters_to_context(VideoDecodeContext,
videoCodecParams) < 0)
{
}
if (ffmpeg.avcodec_open2(VideoDecodeContext, videoDecoder,
null) < 0)
{
}
Creating the OutputFormatContext
private AVFormatContext* CreateOutputContext()
{
//open the output context
AVFormatContext* pOutputFmtCtx = null;
if (ffmpeg.avformat_alloc_output_context2(&pOutputFmtCtx, null,
null, _fileName) != 0)
{
}
return pOutputFmtCtx;
}
Creating H264 decoder
private void CreateH264Encoder(AVStream* inputStream, AVStream*
outputStream)
{
AVRational framerate =
ffmpeg.av_guess_frame_rate(_inputContext.InputFormatContext, inputStream,
null);
var key =
Registry.LocalMachine.OpenSubKey(@"HARDWARE\DESCRIPTION\System\CentralProcessor\0\");
var processorName = key.GetValue("ProcessorNameString");
AVCodec* videoEncoder;
if (processorName.ToString().IndexOf("AMD",
StringComparison.InvariantCultureIgnoreCase) != -1)
{
videoEncoder =
ffmpeg.avcodec_find_encoder_by_name("libx264");
PixelFormat = AVPixelFormat.AV_PIX_FMT_YUV420P;
} else
{
videoEncoder =
ffmpeg.avcodec_find_encoder_by_name("h264_qsv");
if (videoEncoder == null)
{
videoEncoder =
ffmpeg.avcodec_find_encoder_by_name("libx264");
PixelFormat = AVPixelFormat.AV_PIX_FMT_YUV420P;
}
}
if (videoEncoder == null)
{
}
VideoEncodeContext =
ffmpeg.avcodec_alloc_context3(videoEncoder);
if (VideoEncodeContext == null)
{
}
VideoEncodeContext->width =
_inputContext.VideoDecodeContext->width;
VideoEncodeContext->height =
_inputContext.VideoDecodeContext->height;
VideoEncodeContext->pix_fmt = PixelFormat;
VideoEncodeContext->bit_rate = H264_ENCODER_BIT_RATE;
VideoEncodeContext->rc_buffer_size = H264_ENCODER_BUFFER_SIZE;
VideoEncodeContext->rc_max_rate = H264_ENCODER_MAX_RATE;
VideoEncodeContext->rc_min_rate = H264_ENCODER_MIN_RATE;
VideoEncodeContext->framerate = framerate;
VideoEncodeContext->max_b_frames = 0;
VideoEncodeContext->time_base = ffmpeg.av_inv_q(framerate);
VideoEncodeContext->flags |= ffmpeg.AV_CODEC_FLAG_GLOBAL_HEADER;
ffmpeg.av_opt_set(VideoEncodeContext->priv_data, "preset",
"slow", 0);
ffmpeg.av_opt_set(VideoEncodeContext->priv_data, "vprofile",
"baseline", 0);
if (ffmpeg.avcodec_open2(VideoEncodeContext, videoEncoder,
null) < 0)
{
}
ffmpeg.avcodec_parameters_from_context(outputStream->codecpar,
VideoEncodeContext);
}
Decoding/Encoding is the same as the latest api examples so I'll skip it.
Writing the frame to the file
AVStream* out_stream =
_outputContext.OutputFormatContext->streams[out_stream_index];
ffmpeg.av_packet_rescale_ts(avPacketPtr, ctx->time_base,
out_stream->time_base);
ret =
ffmpeg.av_interleaved_write_frame(_outputContext.OutputFormatContext,
avPacketPtr);
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <https://ffmpeg.org/pipermail/libav-user/attachments/20201217/a48cad3b/attachment.html>
More information about the Libav-user
mailing list