[FFmpeg-user] Unable to see the clear H264 frames after few frames.

Keestu Pillo get2jils at gmail.com
Tue Feb 4 16:29:16 CET 2014


Hello all,

I have been trying to capture rtsp streaming data in the ffmpeg, and trying
the below code. it displays the frame properly for few seconds and after
than the the picture is not clear. At the first stage, the intention is
atleast to capture the frame and display periodically.

NOTE I AM GETTING YUV FORMAT AS AN INPUT FRAME.

I am very much new to this concept. :)

#include <stdio.h>
#include <time.h>

#include <android/log.h>

#include <libavcodec/avcodec.h>

#include <libavformat/avformat.h>
#include <libavformat/avio.h>
#include <libswscale/swscale.h>
#include <libavutil/avstring.h>
#include <libavfilter/avfilter.h>

#include "libavutil/avstring.h"
#include "libavutil/colorspace.h"
#include "libavutil/mathematics.h"
#include "libavutil/pixdesc.h"
#include "libavutil/imgutils.h"
#include "libavutil/dict.h"
#include "libavutil/parseutils.h"
#include "libavutil/samplefmt.h"
#include "libavutil/avassert.h"
#include "libavformat/avformat.h"
#include "libavdevice/avdevice.h"
#include "libswscale/swscale.h"
#include "libavutil/opt.h"
#include "libavcodec/avfft.h"
#include "libswresample/swresample.h"


#define  LOG_TAG    "your-log-tag"

#define  LOGD(...)  __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG,
__VA_ARGS__)
#define  LOGE(...)  __android_log_print(ANDROID_LOG_ERROR, LOG_TAG,
__VA_ARGS__)

#include "SDL.h"


static AVPacket flush_pkt;
AVDictionary *codec_opts ;
AVPacket packet;

int i,videoStreamIndex, finished;
int rettexture;
int retcopy, retpresent;

uint8_t *picture_buf_yuv, *picture_buf_rgb;
AVFormatContext* context;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
AVCodec *h264Codec;
AVFrame  *pFrame, *pFrameRGB;
AVPicture *pFrameYUV;
static struct SWsContext *img_convert_ctx_yuv420p;
static struct SWsContext *img_convert_ctx_rgb;

SDL_Window *window;
SDL_Renderer *renderer;
SDL_RendererInfo  rendererInfo;
SDL_Texture   *textureRGB, *textureYUV;
struct SDL_Surface * screen;
SDL_mutex *mutex;

struct SwsContext *img_convert_ctx1 =NULL;
SDL_Texture *texture1;
struct SwsContext *img_convert_ctx;
AVCodec *codec;
int noOfStreams;

int ffmpeg_register_all ()
{
av_register_all();
avcodec_register_all();
avdevice_register_all ();
avfilter_register_all ();
avformat_network_init();

}

static int read_thread(void *arg)
{
//pFrame = av_frame_alloc ();
av_free_packet (&packet);
while (av_read_frame (context, &packet) >=0)
{
LOGD (" Received PACKET....i");

if(packet.stream_index == videoStreamIndex ) {
LOGD (" Becore decoding pCodecCtx w : H  %d, %d  and format is %d ",
pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt);

avcodec_decode_video2 (pCodecCtx, pFrame, &finished, &packet);
LOGD (" After decoding pCodecCtx w : H  %d, %d ", pCodecCtx->width,
pCodecCtx->height);

if ( finished) {
img_convert_ctx_rgb = sws_getContext (1280, 720, PIX_FMT_RGB24, 1280, 720,
PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
img_convert_ctx_yuv420p = sws_getContext (1280, 720, pCodecCtx->pix_fmt,
1280, 720, PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
if ( img_convert_ctx_rgb == NULL || img_convert_ctx_yuv420p == NULL)
{
__android_log_print(ANDROID_LOG_DEBUG, "ffmpegguard", "Conversion. Context
Creation Failed... %s " , SDL_GetError());
return 0;
}

  //sws_scale (img_convert_ctx_rgb, (const uint8_t* const*)pFrame->data,
 pFrame->linesize, 0, 720, pFrameRGB->data, pFrameRGB->linesize);
  sws_scale (img_convert_ctx_yuv420p, (const uint8_t* const*)pFrame->data,
 pFrame->linesize, 0, 720, pFrameYUV->data, pFrameYUV->linesize);
// rettexture = SDL_UpdateYUVTexture(textureYUV, NULL, pFrameYUV->data[0],
pFrameYUV->linesize[0], pFrameYUV->data[1], pFrameYUV->linesize[1],
pFrameYUV->data[2], pFrameYUV->linesize[2]);
rettexture = SDL_UpdateYUVTexture(textureYUV, NULL, pFrame->data[0],
pFrame->linesize[0], pFrame->data[1], pFrame->linesize[1], pFrame->data[2],
pFrame->linesize[2]);
//av_frame_free (pFrame);  // Is it required?.
//av_free_packet(packet);
 break ;
}  // Received Complete Frame..

}  // Video Stream Index.
} // While
}


int main(int argc, char *argv[])
{

int flags;
flags = SDL_INIT_VIDEO | SDL_INIT_TIMER;
if (SDL_Init (flags)) {
LOGD ("Could not intialize Video for SDL: %s \n", SDL_GetError());
}
else
LOGD (" SUCCESS: SDL_Init ");

// ffmpeg Register all services..
ffmpeg_register_all ();
/*
if(SDL_CreateWindowAndRenderer(0, 0, 0, &window, &renderer) < 0)
exit(2);
*/

// window = SDL_CreateWindow ("Test ffmpeg",SDL_WINDOWPOS_UNDEFINED,
SDL_WINDOWPOS_UNDEFINED, 800, 480,
SDL_WINDOW_SHOWN|SDL_WINDOW_ALLOW_HIGHDPI);
window = SDL_CreateWindow ("Test ffmpeg",SDL_WINDOWPOS_UNDEFINED,
SDL_WINDOWPOS_UNDEFINED, 1280, 720,
SDL_WINDOW_SHOWN|SDL_WINDOW_ALLOW_HIGHDPI);
// What this HIGHDPI Means ??

if ( window != NULL )
{
LOGD (" WINDOW CREATED.. , create Renderer ..");
renderer = SDL_CreateRenderer (window, -1, 0);
}
else
{
LOGD (" Invalid SDL Window ");
}


mutex = SDL_CreateMutex ();
   if ( !mutex)
   {
LOGD ("Unable to Create Mutex ");
   }



__android_log_print(ANDROID_LOG_DEBUG, "ffmpegguard", "SDL Thread Started..
New Read Thread.. ");



pFrame = avcodec_alloc_frame ();
context = avformat_alloc_context();

if (context != NULL )
{
__android_log_print(ANDROID_LOG_DEBUG, "ffmpegguard",
"avformat_alloc_context().. Allocated ");

} else {
__android_log_print(ANDROID_LOG_DEBUG, "ffmpegguard",
"avformat_alloc_context().. NOT ALLOCATED... ");
}


int err;
err = avformat_open_input (&context, "rtsp://<ip>:Port", NULL, NULL);
if ( err < 0) {
__android_log_print(ANDROID_LOG_DEBUG, "ffmpegguard", "Unable to open
rtsp... ");
 return -1;
}

for (i = 0; i < context->nb_streams; i++)
{
// Find the Decoder.
codec = avcodec_find_decoder(context->streams[i]->codec->codec_id);
if (codec->type  == AVMEDIA_TYPE_VIDEO ) {
__android_log_print(ANDROID_LOG_DEBUG, "ffmpegguard", "Found Video
Streaming..  ");
videoStreamIndex = i;

}
}

// Play RTSP
av_read_play(context);

// Get Codec Context.
pCodecCtx = context->streams[videoStreamIndex]->codec;
if ( pCodecCtx == NULL )
__android_log_print(ANDROID_LOG_DEBUG, "ffmpegguard", "HoHO... CodecCtx is
NULL>>> ");
else
__android_log_print(ANDROID_LOG_DEBUG, "ffmpegguard", "HoHO... CodecCtx is
<<<OK>>> ");


//Find the Decoder.
pCodec = avcodec_find_decoder (pCodecCtx->codec_id);
if (pCodec == NULL) {
__android_log_print(ANDROID_LOG_DEBUG, "ffmpegguard", "Oops Not Found the
decorder of Codec Context. ");
} else {
__android_log_print(ANDROID_LOG_DEBUG, "ffmpegguard", "Ok. Found the
decoder of Video Stream. ");
}

// Ofcourse Open the decoder.
if ( avcodec_open2 (pCodecCtx, pCodec, NULL) < 0) {
__android_log_print(ANDROID_LOG_DEBUG, "ffmpegguard", "aiyooo.. unable to
oopen decoder. ");
} else {
__android_log_print(ANDROID_LOG_DEBUG, "ffmpegguard", "sariisarii opened
the decoder..");
}


pFrameYUV = avcodec_alloc_frame ();
pFrameRGB = avcodec_alloc_frame ();


// Allocation to keep raw data ..
int  numBytes_yuv, numBytes_rgb;
numBytes_yuv  = avpicture_get_size(PIX_FMT_YUV420P, pCodecCtx->width,
pCodecCtx->height);
numBytes_rgb  = avpicture_get_size(PIX_FMT_RGB24, pCodecCtx->width,
pCodecCtx->height);

picture_buf_yuv =  (uint8_t*)(av_malloc(numBytes_yuv));
picture_buf_rgb =  (uint8_t*)(av_malloc(numBytes_rgb));

int retCode;
retCode = avpicture_fill((AVPicture *)pFrameYUV, picture_buf_yuv,
PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
retCode = avpicture_fill((AVPicture *)pFrameRGB, picture_buf_rgb,
PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height);

// Let's Read the packet

av_init_packet(&packet);

int w = pCodecCtx->width;  // Why me getting 0 ?
int h = pCodecCtx->height;

  __android_log_print(ANDROID_LOG_DEBUG, "ffmpegguard", "Width and Height
of PCodeccCtx.. %d .. %d " , w, h);


//texture1 = SDL_CreateTexture  (renderer, SDL_PIXELFORMAT_IYUV,
SDL_TEXTUREACCESS_STREAMING, pCodecCtx->width, pCodecCtx->height);
textureYUV = SDL_CreateTexture  (renderer, SDL_PIXELFORMAT_IYUV,
SDL_TEXTUREACCESS_STREAMING,1280, 720);
textureRGB = SDL_CreateTexture  (renderer,SDL_PIXELFORMAT_RGB24,
SDL_TEXTUREACCESS_STREAMING,1280, 720);
if ( textureRGB == NULL || textureYUV == NULL) {
__android_log_print(ANDROID_LOG_DEBUG, "ffmpegguard", "BIG PROBLEM...
TEXTURE1 IS FAILED..I %s " , SDL_GetError());
return -1;
}
else
__android_log_print(ANDROID_LOG_DEBUG, "ffmpegguard", " SUCCECSS...... ");


//SDL_CreateThread(read_thread, "asdf", (void*)NULL);
read_thread(NULL);

/* Main render loop */
Uint8 done = 0;
SDL_Event event;
while(!done)
{
read_thread(NULL);
/* Check for events */

while(SDL_PollEvent(&event))
{
__android_log_print(ANDROID_LOG_DEBUG, "ffmpegguard", "EVENT...
POLLING..");
if(event.type == SDL_QUIT || event.type == SDL_KEYDOWN || event.type ==
SDL_FINGERDOWN)
{
done = 1;
}
}
SDL_SetRenderDrawColor(renderer, 0, 0, 0, 255);
SDL_RenderClear(renderer);
 retcopy = SDL_RenderCopy(renderer, textureYUV, NULL, NULL);
__android_log_print(ANDROID_LOG_DEBUG, "ffmpegguard", "HOOK 3");

__android_log_print(ANDROID_LOG_DEBUG, "ffmpegguard", "Return value of
SDL_RenderCopy.. %d ", retcopy);
SDL_RenderPresent(renderer);
SDL_Delay (500);
}

exit(0);
}


More information about the ffmpeg-user mailing list