[Libav-user] Encode yuv to mp4 by h.264 codec using ffmpeg library
lekha mishra
mishra.ni.lekha at gmail.com
Fri Apr 8 08:04:33 CEST 2011
Hi all,
I am creating an cocoa application.I am using the ffmpeg library.
In which I have to encode a yuv to mp4 using h264 codec.
I have searched a lot but could not find the actual solution for encoding.
There are the steps, I am following:
1. I takes the .yuv file as input and decodes it into frames.
2. Convert the frame to AVPicture.
3. Encodes for each AVPicture.
4. write the encoded data to a file with mp4 extension.
But my mp4 file is not playing on player.
What am I missing?
Am I going the right direction?
This is my source code:
- (void)applicationDidFinishLaunching:(NSNotification *)aNotification {
int counter=1;
int videoStream,i;
av_register_all();
AVFormatContext *pFormatCtx;
if(av_open_input_file(&pFormatCtx, "/Users/Shared/test.avi", nil,0,
nil)!=0)
NSLog(@"could not open file");
if (av_find_stream_info(pFormatCtx)<0) {
NSLog(@"4444444444");
}
dump_format(pFormatCtx, 0, "/Users/Shared/test.avi", 0);
AVCodecContext *pCodecCtx;
// Find the first video stream
videoStream=-1;
for(i=0; i<pFormatCtx->nb_streams; i++)
if(pFormatCtx->streams[i]->codec->codec_type==CODEC_TYPE_VIDEO) {
videoStream=i;
break;
}
if(videoStream==-1)
NSLog(@"4444444444"); // Didn't find a video stream
// Get a pointer to the codec context for the video stream
pCodecCtx=pFormatCtx->streams[videoStream]->codec;
AVCodec *pCodec;
// Find the decoder for the video stream
pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
if(pCodec==NULL) {
fprintf(stderr, "Unsupported codec!\n");
NSLog(@"4444444444"); // Codec not found
}
// Open codec
if(avcodec_open(pCodecCtx, pCodec)<0)
NSLog(@"4444444444"); // Could not open codec
AVFrame *pFrame;
// Allocate video frame
pFrame=avcodec_alloc_frame();
int frameFinished;
AVPacket packet;
//=============================================================================
//==================================================================
i=0;
//======================Lekha
char *filename = "/Users/Shared/abc.mp4";
AVCodec *codec;
//AVPicture *avPicture;
AVCodecContext *c= NULL;
int out_size, size, outbuf_size;
FILE *f;
AVFrame *picture;
uint8_t *outbuf;
printf("Video encoding\n");
/* find the mpeg video encoder */
codec = avcodec_find_encoder(CODEC_ID_MPEG4);
if (!codec) {
fprintf(stderr, "codec not found\n");
exit(1);
}
c= avcodec_alloc_context();
picture= avcodec_alloc_frame();
/* put sample parameters */
c->bit_rate = 346000;
/* resolution must be a multiple of two */
c->width = 640;
c->height = 480;
/* frames per second */
c->time_base= (AVRational){1,25};
c->gop_size = 10; /* emit one intra frame every ten frames */
c->max_b_frames=1;
c->pix_fmt = PIX_FMT_YUV420P;
/* open it */
if (avcodec_open(c, codec) < 0) {
fprintf(stderr, "could not open codec\n");
exit(1);
}
f = fopen(filename, "wb");
if (!f) {
fprintf(stderr, "could not open %s\n", filename);
exit(1);
}
/* alloc image and output buffer */
outbuf_size = 930000;
outbuf = malloc(outbuf_size);
size = c->width * c->height;
#pragma mark -
AVFrame* outpic = avcodec_alloc_frame();
int nbytes = avpicture_get_size(PIX_FMT_YUV420P, c->width, c->height);
//create buffer for the output image
uint8_t* outbuffer = (uint8_t*)av_malloc(nbytes);
while(av_read_frame(pFormatCtx, &packet)>=0) {
// Is this a packet from the video stream?
if(packet.stream_index==videoStream) {
// Decode video frame
NSLog(@"decoding start-------------");
avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished,
&packet);
// Did we get a video frame?
if(frameFinished) {
for(i=0;i<1;i++)
{
fflush(stdout);
int numBytes = avpicture_get_size(PIX_FMT_YUV420P,
pCodecCtx->width, pCodecCtx->height);
uint8_t *buffer = (uint8_t
*)av_malloc(numBytes*sizeof(uint8_t));
//NSImage *image = [[NSImage
alloc]initWithContentsOfFile:[NSString
stringWithFormat:@"/Users/Shared/frame%d.ppm",
i]];
int outPutWidth = c->width;
int outPutHeight = c->height;
AVPicture pict;
avpicture_alloc(&pict, PIX_FMT_RGB24, outPutWidth,
outPutHeight);
// Setup scaler
static int sws_flags = SWS_FAST_BILINEAR;
img_convert_ctx = sws_getContext(c->width,
c->height,
c->pix_fmt,
outPutWidth,
outPutHeight,
PIX_FMT_RGB24,
sws_flags, NULL, NULL,
NULL);
CGImageRef newCgImage1 = [self
imageFromAVPicture:(AVPicture)pict width:outPutWidth height:outPutHeight];
NSImage *image = [[NSImage alloc]
initWithCGImage:newCgImage1 size:NSMakeSize(outPutWidth, outPutHeight)];
CGImageSourceRef source;
source = CGImageSourceCreateWithData((CFDataRef)[image
TIFFRepresentation], NULL);
CGImageRef newCgImage =
CGImageSourceCreateImageAtIndex(source, 0, NULL);
CGDataProviderRef dataProvider =
CGImageGetDataProvider(newCgImage);
CFDataRef bitmapData =
CGDataProviderCopyData(dataProvider);
buffer = (uint8_t *)CFDataGetBytePtr(bitmapData);
avpicture_fill((AVPicture*)picture, buffer,
PIX_FMT_RGB8, c->width, c->height);
avpicture_fill((AVPicture*)outpic, outbuffer,
PIX_FMT_YUV420P, c->width, c->height);
struct SwsContext* fooContext = sws_getContext(c->width,
c->height,
PIX_FMT_RGB8,
pCodecCtx->width, pCodecCtx->height,
PIX_FMT_YUV420P,
SWS_FAST_BILINEAR, NULL, NULL, NULL);
//perform the conversion
sws_scale(fooContext, picture->data, picture->linesize,
0, c->height, outpic->data, outpic->linesize);
// Here is where I try to convert to YUV
/* encode the image */
out_size = avcodec_encode_video(c, outbuf, outbuf_size,
outpic);
printf("encoding frame %3d (size=%5d)\n", i, out_size);
fwrite(outbuf, 1, out_size, f);
free(buffer);
buffer = NULL;
//FILE *pFile;
// char szFilename[32];
// int y;
//
// // Open file
// sprintf(szFilename, "/Users/Shared/frame%d.ppm", i);
// pFile=fopen(szFilename, "wb");
// if(pFile==NULL)
// return;
//
// // Write header
// fprintf(pFile, "P6\n%d %d\n255\n",640,480);
//
// // Write pixel data
// for(y=0; y<480; y++)
// fwrite(pFrame->data[0]+y*pFrame->linesize[0], 1,
640*3, pFile);
//
// // Close file
// fclose(pFile);
}
}
}
}
/* get the delayed frames */
//for(; out_size; i++) {
// fflush(stdout);
//
// out_size = avcodec_encode_video(c, outbuf, outbuf_size, NULL);
// printf("write frame %3d (size=%5d)\n", i, out_size);
// fwrite(outbuf, 1, outbuf_size, f);
// }
/* add sequence end code to have a real mpeg file */
outbuf[0] = 0x00;
outbuf[1] = 0x00;
outbuf[2] = 0x01;
outbuf[3] = 0xb7;
fwrite(outbuf, 1, 4, f);
fclose(f);
free(outbuf);
avcodec_close(c);
av_free(c);
av_free(picture);
printf("\n");
NSLog(@"finished");
}
here is my code:
If there is any sample code/ tutorial related to same topic then please
help.
T
--
Lekha Mishra
New Generation Application Pvt. Ltd
Software Developer
90444149852
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://ffmpeg.org/pipermail/libav-user/attachments/20110408/8bbd6cea/attachment.html>
More information about the Libav-user
mailing list