I need to play RTSP video from an IP camera. If I use ffplay or VLC, everything is fine. But if I use FFmpeg API (C#, FFmpeg.AutoGen), then artifacts appear on the frames. What could be wrong with using FFmpeg API?
UPDATE
On Christoph's advice, I tried replacing UDP with TCP for ffplay.
Both TCP and UDP give a delay of 3-4 seconds (my application gives a delay of less than one second).
Both TCP and UDP work without artifacts.
When UDP is used, there are error messages at the beginning:
max delay reached. need to consume packet
RTP: missed 436 packets
Invalid level prefix
error while decoding MB 17 28
These errors are only at the beginning. Then there are no errors. When TCP is used, no errors are reported at all.
The
discardcorruptflag in my application does not fix artifacts.If I use TCP in my application (
rtsp_transport=tcpflag), the artifacts disappear. This is good. But I wouldn't want to create an extra load on the network because of TCP. In addition, there may be a video delay.
How do ffplay and VLC work on UDP without artifacts? Why can't API do this?
AVFormatContext* pFormatContext = ffmpeg.avformat_alloc_context();
int error;
error = ffmpeg.avformat_open_input(&pFormatContext, rtspUrl, null, null);
if (error != 0)
throw new ApplicationException(Helpers.GetErrorMessage(error));
error = ffmpeg.avformat_find_stream_info(pFormatContext, null);
if (error != 0)
throw new ApplicationException(Helpers.GetErrorMessage(error));
AVStream* pStream = null;
for (var i = 0; i < pFormatContext->nb_streams; i++)
{
if (pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
{
pStream = pFormatContext->streams[i];
}
}
if (pStream == null)
throw new ApplicationException(@"Could not found video stream.");
AVCodecContext codecContext = *pStream->codec;
int width = codecContext.width;
int height = codecContext.height;
AVPixelFormat sourcePixFmt = codecContext.pix_fmt;
AVCodecID codecId = codecContext.codec_id;
AVPixelFormat destinationPixFmt = AVPixelFormat.AV_PIX_FMT_BGR24;
if (sourcePixFmt == AVPixelFormat.AV_PIX_FMT_NONE && codecId == AVCodecID.AV_CODEC_ID_H264)
{
sourcePixFmt = AVPixelFormat.AV_PIX_FMT_YUV420P;
}
SwsContext* pConvertContext = ffmpeg.sws_getContext(width, height, sourcePixFmt, width, height, destinationPixFmt, ffmpeg.SWS_FAST_BILINEAR, null, null, null);
if (pConvertContext == null)
throw new ApplicationException(@"Could not initialize the conversion context.");
AVFrame* pConvertedFrame = ffmpeg.av_frame_alloc();
int convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixFmt, width, height, 1);
IntPtr convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize);
byte_ptrArray4 dstData = new byte_ptrArray4();
int_array4 dstLinesize = new int_array4();
ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize, (byte*)convertedFrameBufferPtr, destinationPixFmt, width, height, 1);
WorkEnabled = true;
AVCodec* pCodec = ffmpeg.avcodec_find_decoder(codecId);
if (pCodec == null)
throw new ApplicationException(@"Unsupported codec.");
AVCodecContext* pCodecContext = &codecContext;
if ((pCodec->capabilities & ffmpeg.AV_CODEC_CAP_TRUNCATED) == ffmpeg.AV_CODEC_CAP_TRUNCATED)
{
pCodecContext->flags |= ffmpeg.AV_CODEC_FLAG_TRUNCATED;
}
error = ffmpeg.avcodec_open2(pCodecContext, pCodec, null);
if (error < 0)
throw new ApplicationException(Helpers.GetErrorMessage(error));
AVFrame* pDecodedFrame = ffmpeg.av_frame_alloc();
AVPacket packet = new AVPacket();
AVPacket* pPacket = &packet;
ffmpeg.av_init_packet(pPacket);
Bitmap bitmap = null;
Bitmap previousbBitmap = null;
while (WorkEnabled)
{
try
{
do
{
error = ffmpeg.av_read_frame(pFormatContext, pPacket);
if (error == ffmpeg.AVERROR_EOF)
{
Thread.Sleep(1000);
break;
}
if (error < 0)
throw new ApplicationException(Helpers.GetErrorMessage(error));
if (pPacket->stream_index != pStream->index) continue;
error = ffmpeg.avcodec_send_packet(pCodecContext, pPacket);
if (error < 0)
throw new ApplicationException(Helpers.GetErrorMessage(error));
error = ffmpeg.avcodec_receive_frame(pCodecContext, pDecodedFrame);
} while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN) && WorkEnabled);
if (error == ffmpeg.AVERROR_EOF)
{
Thread.Sleep(1000);
continue;
}
if (error < 0)
throw new ApplicationException(Helpers.GetErrorMessage(error));
if (pPacket->stream_index != pStream->index) continue;
// YUV->RGB
ffmpeg.sws_scale(pConvertContext, pDecodedFrame->data, pDecodedFrame->linesize, 0, height, dstData, dstLinesize);
}
catch (Exception)
{}
finally
{
ffmpeg.av_packet_unref(pPacket);
ffmpeg.av_frame_unref(pDecodedFrame);
}
bitmap = new Bitmap(width, height, dstLinesize[0], PixelFormat.Format24bppRgb, convertedFrameBufferPtr);
previousbBitmap = bitmap;
System.Windows.Application.Current.Dispatcher.Invoke(() => _pictureBox.Image = bitmap);
}
1 Answer 1
Similar problem was discussed here and here. It turned out that for UDP it is necessary to increase the buffer size.
ffmpeg.av_dict_set(&formatOpts, "rtsp_transport", "udp", 0);
ffmpeg.av_dict_set(&formatOpts, "buffer_size", "200000", 0);
ffmpeg.avformat_open_input(&pFormatContext, url, null, &formatOpts);
And the artifacts disappeared!
codec_ctx->err_recognition |= AV_EF_EXPLODE; // Discard corrupted frames codec_ctx->flags2 |= AV_CODEC_FLAG2_DROP_FRAME_TIMECODE; // Drop corrupted frames