iOS 视频合成 (图片和视频的合成 ,视频跟音频的合成)
这个 给我的感觉就像是 PPT 播放一样。这里找了一些资料,学习视频合成方面的知识。
一:图片和视频的合成:
@interface ViewController ()
@property(nonatomic, strong)NSMutableArray *imageArr;
@property(nonatomic, strong)NSString *theVideoPath;
@end
@implementation ViewController
- (void)viewDidLoad {
[super viewDidLoad];
self.imageArr =[[NSMutableArray alloc]initWithObjects:
[UIImage imageNamed:@"1.jpg"],[UIImage imageNamed:@"2.jpg"],[UIImage imageNamed:@"3.jpg"],[UIImage imageNamed:@"4.jpg"],[UIImage imageNamed:@"5.jpg"],[UIImage imageNamed:@"6.jpg"],[UIImage imageNamed:@"7"],[UIImage imageNamed:@"8"],[UIImage imageNamed:@"9.jpg"],[UIImage imageNamed:@"10.jpg"],[UIImage imageNamed:@"11.jpg"],[UIImage imageNamed:@"12.jpg"],[UIImage imageNamed:@"13.jpg"],[UIImage imageNamed:@"14.jpg"],[UIImage imageNamed:@"15.jpg"],[UIImage imageNamed:@"16.jpg"],[UIImage imageNamed:@"17.jpg"],[UIImage imageNamed:@"18.jpg"],[UIImage imageNamed:@"19.jpg"],[UIImage imageNamed:@"20.jpg"],[UIImage imageNamed:@"21.jpg"],[UIImage imageNamed:@"22.jpg"],[UIImage imageNamed:@"23.jpg"],nil];
UIButton * button =[UIButton buttonWithType:UIButtonTypeRoundedRect];
[button setFrame:CGRectMake(100,100, 100,100)];
[button setTitle:@"合成"forState:UIControlStateNormal];
[button addTarget:self action:@selector(testCompressionSession)forControlEvents:UIControlEventTouchUpInside];
[self.view addSubview:button];
UIButton * button1 =[UIButton buttonWithType:UIButtonTypeRoundedRect];
[button1 setFrame:CGRectMake(100,200, 100,100)];
[button1 setTitle:@"播放"forState:UIControlStateNormal];
[button1 addTarget:self action:@selector(playAction)forControlEvents:UIControlEventTouchUpInside];
[self.view addSubview:button1];
// Do any additional setup after loading the view, typically from a nib.
}
-(void)testCompressionSession
{
NSLog(@"开始");
//NSString *moviePath = [[NSBundle mainBundle]pathForResource:@"Movie" ofType:@"mov"];
NSArray *paths =NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask,YES);
NSString *moviePath =[[paths objectAtIndex:0]stringByAppendingPathComponent:[NSString stringWithFormat:@"%@.mp4",@"2016全球三大超跑宣传片_超清"]];
self.theVideoPath=moviePath;
CGSize size =CGSizeMake(320,400);//定义视频的大小
//[self writeImages:_imageArr ToMovieAtPath:moviePath withSize:size inDuration:4 byFPS:30];//第2中方法
NSError *error =nil;
unlink([moviePath UTF8String]);
NSLog(@"path->%@",moviePath);
//—-initialize compression engine
AVAssetWriter *videoWriter =[[AVAssetWriter alloc]initWithURL:[NSURL fileURLWithPath:moviePath]fileType:AVFileTypeQuickTimeMovie error:&error];
NSParameterAssert(videoWriter);
if(error)
NSLog(@"error =%@", [error localizedDescription]);
NSDictionary *videoSettings =[NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264,AVVideoCodecKey,
[NSNumber numberWithInt:size.width],AVVideoWidthKey,
[NSNumber numberWithInt:size.height],AVVideoHeightKey,nil];
AVAssetWriterInput *writerInput =[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSDictionary*sourcePixelBufferAttributesDictionary =[NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32ARGB],kCVPixelBufferPixelFormatTypeKey,nil];
AVAssetWriterInputPixelBufferAdaptor *adaptor =[AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
if ([videoWriter canAddInput:writerInput])
NSLog(@"11111");
else
NSLog(@"22222");
[videoWriter addInput:writerInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//合成多张图片为一个视频文件
dispatch_queue_t dispatchQueue =dispatch_queue_create("mediaInputQueue",NULL);
int __block frame =0;
[writerInput requestMediaDataWhenReadyOnQueue:dispatchQueue usingBlock:^{
while([writerInput isReadyForMoreMediaData])
{
if(++frame >=[self.imageArr count]*10)
{
[writerInput markAsFinished];
[videoWriter finishWriting];
break;}
CVPixelBufferRef buffer =NULL;
int idx =frame/10;
NSLog(@"idx==%d",idx);
buffer =(CVPixelBufferRef)
[self pixelBufferFromCGImage:[[self.imageArr objectAtIndex:idx]CGImage]size:size];
if (buffer){
if(![adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(frame,10)])
NSLog(@"FAIL");
else
NSLog(@"OK");
CFRelease(buffer);}}}];}
- (CVPixelBufferRef)pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size{
NSDictionary *options =[NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES],kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES],kCVPixelBufferCGBitmapContextCompatibilityKey,nil];
CVPixelBufferRef pxbuffer =NULL;
CVReturn status =CVPixelBufferCreate(kCFAllocatorDefault,size.width,size.height,kCVPixelFormatType_32ARGB,(__bridge CFDictionaryRef) options,&pxbuffer);
NSParameterAssert(status ==kCVReturnSuccess && pxbuffer !=NULL);
CVPixelBufferLockBaseAddress(pxbuffer,0);
void *pxdata =CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata !=NULL);
CGColorSpaceRef rgbColorSpace=CGColorSpaceCreateDeviceRGB();
CGContextRef context =CGBitmapContextCreate(pxdata,size.width,size.height,8,4*size.width,rgbColorSpace,kCGImageAlphaPremultipliedFirst);
NSParameterAssert(context);
CGContextDrawImage(context,CGRectMake(0,0,CGImageGetWidth(image),CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer,0);
return pxbuffer;}
-(void)playAction{
MPMoviePlayerViewController *theMovie =[[MPMoviePlayerViewController alloc]initWithContentURL:[NSURL fileURLWithPath:self.theVideoPath]];
[self presentMoviePlayerViewControllerAnimated:theMovie];
theMovie.moviePlayer.movieSourceType=MPMovieSourceTypeFile;[theMovie.moviePlayer play];}
//第二种方式
- (void)writeImages:(NSArray *)imagesArray ToMovieAtPath:(NSString *)path withSize:(CGSize)size inDuration:(float)duration byFPS:(int32_t)fps{
//Wire the writer:
NSError *error =nil;
AVAssetWriter *videoWriter =[[AVAssetWriter alloc]initWithURL:[NSURL fileURLWithPath:path]fileType:AVFileTypeQuickTimeMovie error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings =[NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264,AVVideoCodecKey,
[NSNumber numberWithInt:size.width],AVVideoWidthKey,
[NSNumber numberWithInt:size.height],AVVideoHeightKey,nil];
AVAssetWriterInput* videoWriterInput =[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
AVAssetWriterInputPixelBufferAdaptor *adaptor =[AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//Write some samples:
CVPixelBufferRef buffer =NULL;
int frameCount =0;
int imagesCount = [imagesArray count];
float averageTime =duration/imagesCount;
int averageFrame =(int)(averageTime * fps);
for(UIImage *img in imagesArray){
buffer=[self pixelBufferFromCGImage:[img CGImage]size:size];
BOOL append_ok =NO;
int j =0;
while (!append_ok && j <= 30)
{
if(adaptor.assetWriterInput.readyForMoreMediaData)
{
printf("appending %d attemp%d\n", frameCount, j);
CMTime frameTime =CMTimeMake(frameCount,(int32_t)fps);float frameSeconds =CMTimeGetSeconds(frameTime);
NSLog(@"frameCount:%d,kRecordingFPS:%d,frameSeconds:%f",frameCount,fps,frameSeconds);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
if(buffer)
[NSThread sleepForTimeInterval:0.05];}else{
printf("adaptor not ready %d,%d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];}
j++;}
if (!append_ok){
printf("error appendingimage %d times %d\n", frameCount, j);}
frameCount = frameCount + averageFrame;}
//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWriting];NSLog(@"finishWriting");}
二:视频跟音频的合成
// 混合音乐
-(void)merge{
// mbp提示框
// [MBProgressHUD showMessage:@"正在处理中"];
// 路径
NSString *documents = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents"];
// 声音来源
NSURL *audioInputUrl = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:@"蓝瘦香菇" ofType:@"mp3"]];
// 视频来源
NSURL *videoInputUrl = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:@"2016全球三大超跑宣传片_超清" ofType:@"mp4"]];
// 最终合成输出路径
NSString *outPutFilePath = [documents stringByAppendingPathComponent:@"merge.mp4"];
// 添加合成路径
NSURL *outputFileUrl = [NSURL fileURLWithPath:outPutFilePath];
// 时间起点
CMTime nextClistartTime = kCMTimeZero;
// 创建可变的音视频组合
AVMutableComposition *comosition = [AVMutableComposition composition];
// 视频采集
AVURLAsset *videoAsset = [[AVURLAsset alloc] initWithURL:videoInputUrl options:nil];
// 视频时间范围
CMTimeRange videoTimeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
// 视频通道 枚举 kCMPersistentTrackID_Invalid = 0
AVMutableCompositionTrack *videoTrack = [comosition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
// 视频采集通道
AVAssetTrack *videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] firstObject];
// 把采集轨道数据加入到可变轨道之中
[videoTrack insertTimeRange:videoTimeRange ofTrack:videoAssetTrack atTime:nextClistartTime error:nil];
// 声音采集
AVURLAsset *audioAsset = [[AVURLAsset alloc] initWithURL:audioInputUrl options:nil];
// 因为视频短这里就直接用视频长度了,如果自动化需要自己写判断
CMTimeRange audioTimeRange = videoTimeRange;
// 音频通道
AVMutableCompositionTrack *audioTrack = [comosition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
// 音频采集通道
AVAssetTrack *audioAssetTrack = [[audioAsset tracksWithMediaType:AVMediaTypeAudio] firstObject];
// 加入合成轨道之中
[audioTrack insertTimeRange:audioTimeRange ofTrack:audioAssetTrack atTime:nextClistartTime error:nil];
// 创建一个输出
AVAssetExportSession *assetExport = [[AVAssetExportSession alloc] initWithAsset:comosition presetName:AVAssetExportPresetMediumQuality];
// 输出类型
assetExport.outputFileType = AVFileTypeQuickTimeMovie;
// 输出地址
assetExport.outputURL = outputFileUrl;
// 优化
assetExport.shouldOptimizeForNetworkUse = YES;
// 合成完毕
[assetExport exportAsynchronouslyWithCompletionHandler:^{
// 回到主线程
dispatch_async(dispatch_get_main_queue(), ^{
// 调用播放方法 outputFileUrl 这个就是合成视频跟音频的视频
[self playWithUrl:outputFileUrl];
});
}];
}