四、IOS-FFmpeg编码
概要:就是上一章解码的一个逆过程:yuv420编码为H264格式。
代码仓库:https://github.com/wulang150/FFmpegTest.git
全部代码:
#import "EncoderViewController.h"
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libavutil/opt.h>
#include <libavutil/imgutils.h>
@interface EncoderViewController ()
{
AVPacket *pkt;
AVFrame *frame;
AVCodecContext *codecCtx;
// AVCodec *codec;
NSInteger allSize;
}
@end
@implementation EncoderViewController
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view.
self.view.backgroundColor = [UIColor whiteColor];
self.title = @"编码";
}
- (void)touchesBegan:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event{
[self mainFunc];
}
- (void)mainFunc{
av_register_all();
avcodec_register_all();
FILE *f;
// uint8_t endcode[] = { 0, 0, 1, 0xb7 };
NSString *filePath = [CommonFunc getDefaultPath:@"movie.h264"];
const char *filename = [filePath cStringUsingEncoding:NSASCIIStringEncoding];
//保存的文件
f = fopen(filename, "wb");
if (!f) {
fprintf(stderr, "Could not open %s\n", filename);
exit(1);
}
//分辨率
int width = 1600, height = 1200;
pkt = av_packet_alloc();
if(!pkt){
return;
}
frame = alloc_picture(AV_PIX_FMT_YUV420P, width, height);
if(frame==NULL){
return;
}
//创建编码上下文
codecCtx = [self findEncoder:frame];
if(codecCtx==NULL){
return;
}
[self readYUV:width height:height callback:^(AVFrame *tframe) {
[self yuv420ToH264:tframe codeCtx:self->codecCtx callBack:^(AVPacket *pkt) {
fwrite(pkt->data, 1, pkt->size, f);
}];
}];
NSLog(@">>>>>begin!!!!!");
[self yuv420ToH264:NULL codeCtx:codecCtx callBack:^(AVPacket *pkt) {
fwrite(pkt->data, 1, pkt->size, f);
}];
NSLog(@">>>>>end allSize=%.2fKB",allSize/1024.0);
//结尾处理
/* add sequence end code to have a real MPEG file */
// fwrite(endcode, 1, sizeof(endcode), f);
fclose(f);
avcodec_free_context(&codecCtx);
av_frame_free(&frame);
av_packet_free(&pkt);
}
- (void)readYUV:(int)width height:(int)height callback:(void(^)(AVFrame *tframe))callBack{
//读取yuv数据
// NSString *yuvPath = [[NSBundle mainBundle] pathForResource:@"vedio" ofType:@"yuv"];
// NSString *yuvPath = [CommonFunc getDocumentWithFile:@"vedio.yuv"];
NSString *yuvPath = [CommonFunc getDocumentWithFile:@"11_23_07_movie.yuv"];
if(![[NSFileManager defaultManager] fileExistsAtPath:yuvPath]){
NSLog(@"file error!");
return;
}
// NSString *yPath = [CommonFunc getDefaultPath:@"output_420_y.y"];
FILE *fp=fopen([yuvPath UTF8String],"rb+");
// FILE *fp1=fopen([yPath UTF8String],"wb+");
unsigned char *pic=(unsigned char *)malloc(width*height*3/2);
int i = 0;
while (true)
{
unsigned long ret = fread(pic,1,width*height*3/2,fp);
if(ret<width*height*3/2){
break;
}
memcpy(frame->data[0], pic, width*height);
// frame->linesize[0] = width;
memcpy(frame->data[1], pic+width*height, width*height/4);
// frame->linesize[1] = width/2;
memcpy(frame->data[2], pic+width*height*5/4, width*height/4);
// frame->linesize[2] = width/2;
frame->pts = i++;
if(callBack){
callBack(frame);
}
}
free(pic);
fclose(fp);
}
AVFrame *alloc_picture(enum AVPixelFormat pix_fmt, int width, int height)
{
AVFrame *picture;
int ret;
picture = av_frame_alloc();
if (!picture)
return NULL;
picture->format = pix_fmt;
picture->width = width;
picture->height = height;
/* allocate the buffers for the frame data */
ret = av_frame_get_buffer(picture, 32);
if (ret < 0) {
fprintf(stderr, "Could not allocate frame data.\n");
return NULL;
}
return picture;
}
//找到生产对应的编码器
- (AVCodecContext *)findEncoder:(AVFrame *)frame{
AVCodecContext *c= NULL;
int ret = -1;
AVCodec *codec = avcodec_find_encoder(AV_CODEC_ID_H264);
if (!codec) {
fprintf(stderr, "Codec not found\n");
return c;
}
c = avcodec_alloc_context3(codec);
if (!c) {
fprintf(stderr, "Could not allocate video codec context\n");
return c;
}
/* put sample parameters */
c->bit_rate = 900000;
/* resolution must be a multiple of two */
c->width = frame->width;
c->height = frame->height;
/* frames per second */
c->time_base = (AVRational){1, 15};
// c->framerate = (AVRational){15, 1};
/* emit one intra frame every ten frames
* check frame pict_type before passing frame
* to encoder, if frame->pict_type is AV_PICTURE_TYPE_I
* then gop_size is ignored and the output of encoder
* will always be I frame irrespective to gop_size
*/
c->gop_size = 30;
c->max_b_frames = 0;
c->pix_fmt = frame->format;
// if (codec->id == AV_CODEC_ID_H264)
// av_opt_set(c->priv_data, "preset", "slow", 0);
/* open it */
ret = avcodec_open2(c, codec, NULL);
if (ret < 0) {
fprintf(stderr, "Could not open codec: %s\n", av_err2str(ret));
return NULL;
}
return c;
}
- (void)yuv420ToH264:(AVFrame *)frame codeCtx:(AVCodecContext *)codecCtx callBack:(void(^)(AVPacket *enPkt))callBack{
if(codecCtx==NULL){
return;
}
int ret;
/* send the frame to the encoder */
ret = avcodec_send_frame(codecCtx, frame);
if (ret < 0) {
fprintf(stderr, "Error sending a frame for encoding\n");
return;
}
if (frame)
printf("Send frame %3"PRId64"\n", frame->pts);
while (ret >= 0) {
ret = avcodec_receive_packet(codecCtx, pkt);
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
return;
else if (ret < 0) {
fprintf(stderr, "Error during encoding\n");
return;
}
// pkt->pts = pkt->dts = pkt->pts * (codecCtx->time_base.den) /codecCtx->time_base.num / 15;
printf("Write packet pts=%lld dts=%lld (size=%5d)\n", pkt->pts, pkt->dts, pkt->size);
allSize += pkt->size;
if(callBack){
callBack(pkt);
}
av_packet_unref(pkt);
}
}
@end
上一篇,我们得到了解码后的yuv420格式的文件,这次就得去读取那个文件,然后进行编码。
一、yuv420读取为AVFrame
FILE *fp=fopen([yuvPath UTF8String],"rb+");
unsigned char *pic=(unsigned char *)malloc(width*height*3/2);
int i = 0;
while (true)
{
unsigned long ret = fread(pic,1,width*height*3/2,fp);
if(ret<width*height*3/2){
break;
}
memcpy(frame->data[0], pic, width*height);
// frame->linesize[0] = width;
memcpy(frame->data[1], pic+width*height, width*height/4);
// frame->linesize[1] = width/2;
memcpy(frame->data[2], pic+width*height*5/4, width*height/4);
// frame->linesize[2] = width/2;
frame->pts = i++;
if(callBack){
callBack(frame);
}
}
上面可以看到,就是读取每一个yuv图片数据,然后分别写入
AVFrame->data[0](Y)
AVFrame->data[1](U)
AVFrame->data[2](V)
然后自加AVFrame->pts
二、初始化编码器
//得到编码器
AVCodec *codec = avcodec_find_encoder(AV_CODEC_ID_H264);
//得到编码上下文
c = avcodec_alloc_context3(codec);
//设置码流
c->bit_rate = 900000;
//设置分辨率
c->width = frame->width;
c->height = frame->height;
//设置timebase
c->time_base = (AVRational){1, 15};
//设置编码前的格式
c->pix_fmt = frame->format;
//打开编码器
ret = avcodec_open2(c, codec, NULL);
上面写到的,都是必须配置的,还有一些其他参数,可以根据需求配置。
三、进行编码
ret = avcodec_send_frame(codecCtx, frame);
ret = avcodec_receive_packet(codecCtx, pkt);
av_packet_unref(pkt);
跟解码的类似。需要注意的是,pkt不可以定义为局部变量。
[self yuv420ToH264:NULL codeCtx:codecCtx callBack:^(AVPacket *pkt) {
fwrite(pkt->data, 1, pkt->size, f);
}];
最后还得传空值,让编码器把剩下的给输出出来:
四、验证
只需用ffplay来播放就可以了
ffplay 11_26_30_movie.h264
或者转为mp4文件
ffmpeg -i 09_58_11_movie.h264 -f mp4 my.mp4
如果可以正常播放,不花屏,就证明编码成功了。
写到这里,突然产生了一个疑问,如果编码后得到的AVPacket为一帧编码后的数据,我是直接写到文件的。那么ffplay怎么去区分每一帧的呢?我看了下内存数据,每一帧是以00000001开头的。所以,ffplay是以这个作为分割?这个还有待考究。
五、改变大小
如果我想改变编码后的大小,第一反应就是去修改上面提到的码流值:c->bit_rate = 900000。
其实,影响编码大小的,不只是码流,还跟timebase有关。说白了,ffmpeg就是根据码流和timebase来计算每一帧大概的编码大小。
如果想很好地改变编码后的大小,可以按下面的步骤:
(1)、得到正确的timebase的值
如果你发现通过修改bitrate后,编码后的大小变化不大。那么可能是timebase值设置不当。那么怎么配置正确的timebase呢?可以根据AVFrame中的timebase,具体你可以看它的pts,dts,duration值,推断大概的timebase。
比如:
你以帧率为15来算,那么每一帧的时间为1/15秒
1、比如第二帧 pts为1,那么1xtimebase = 1/15,所以timebase为(AVRational){1, 15}
2、比如第二帧pts为1024,同理1024xtimebase = 1/15,所以timebase为1/15除以1024,所以为(AVRational){1, 15360}
(2)、改变bitrate
设置好timebase后,就可以通过改变bitrate得到想要的大小的。比如码流为900000,我改为450000,大小就接近减少一半。当然也不会无限小,也不会无限大。压缩都是有个量的。