iOS AVFoundation 利用裁剪后的图片(视频帧)拼接
2016-07-27 本文已影响1979人
飞行的孤独员FG
1.初始化视频设置
- (void)initVideoSetting{
NSDictionary *outputSettings =
[NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:720], AVVideoWidthKey,
[NSNumber numberWithInt:1280], AVVideoHeightKey,
AVVideoCodecH264, AVVideoCodecKey,
nil];
_assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:outputSettings];
_pixelBufferAdaptor =[[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:_assetWriterInput sourcePixelBufferAttributes:[NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA],kCVPixelBufferPixelFormatTypeKey,nil]];
NSString *outputFielPath=[NSTemporaryDirectory() stringByAppendingString:@"1613.mov"];
NSURL *fileUrl=[NSURL fileURLWithPath:outputFielPath];
_assetWriter = [[AVAssetWriter alloc] initWithURL:fileUrl fileType:AVFileTypeQuickTimeMovie error:nil];
[_assetWriter addInput:_assetWriterInput];
_assetWriterInput.expectsMediaDataInRealTime = YES;
[_assetWriter startWriting];
[_assetWriter startSessionAtSourceTime:kCMTimeZero];
}
2.在视频处理帧的代理方法中调用裁剪合成方法
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
@autoreleasepool {
@try {
[self cropSampleBuffer:sampleBuffer inRect:裁剪的尺寸];
} @catch ( NSException *e) {
}
}
}
3.裁剪sampleBuffer的方法
- (void)cropSampleBuffer:(CMSampleBufferRef)sampleBuffer inRect:(CGRect)rect{
CFRetain(sampleBuffer);
UIImage *originImage = [self imageFromSampleBuffer:sampleBuffer];//请自行搜索
UIImage *cropImage = [self cropImage:originImage atRect:rect];//请自行搜索
[self startTakingVideo:[self pixelBufferFromUIImage:cropImage]];
CFRelease(sampleBuffer);
}
4.将裁减后的UIImage转成CVPixelBufferRef
CGImageRef image = [originImage CGImage];
int height = 1280;
int width = 720;
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, width,
height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
&pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, width,
height, 8, 4*width, rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
NSParameterAssert(context);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
5.开始将裁减后的图和成视频
- (void)startTakingVideo:(CVPixelBufferRef)sampleBuffer{
// a very dense way to keep track of the time at which this frame
// occurs relative to the output stream, but it's just an example!
static int64_t frameNumber = 0;
if(_assetWriterInput.readyForMoreMediaData)
[_pixelBufferAdaptor appendPixelBuffer:sampleBuffer
withPresentationTime:CMTimeMake(frameNumber, 25)];
frameNumber++;
}
6.停止录制,并保存视频
- (void)stopTakingVideo{
[_assetWriter finishWriting];//方法已经deprecated,如果找到麻烦告诉下博主
}
最后一些注意事项
请不要使用AVCaptureMovieFileOutput
AVCaptureMovieFileOutput是和AVAssetWriterInputPixelBufferAdaptor
有冲突的,具体的内容还是参见苹果官方文档