iOS实现视频效果和部分动画效果

小视频录制和播放

2016-06-24  本文已影响1743人  zone1026

前段时间项目开发过程中遇到一个需求,想做一个类似微信那样的小视频,然后在录制视频的自定义图层上播放。于是就研究了 AVFoundation 的一些东西。实际开发过程中也遇到了一些问题,所以在这里做下记录。另外参考了SBVideoCaptureDemo的源码。

使用AVCaptureSession、AVCaptureMovieFileOutput、AVCaptureDeviceInput、AVCaptureVideoPreviewLayer来录制视频,并通过AVAssetExportSeeion压缩视频并转换为 MP4 格式。

使用AVPlayerLayer、AVPlayer、AVPlayerItem、NSURL自定义播放视频

1、视频的录制


判断用户的设备对视频录制的支持情况

1、视频录制之前要先判断摄像头是否可用。

2、摄像头是否被授权。

自定义频录制

对所用的几个类做简单说明

AVCaptureSession:媒体(音、视频)捕获会话,负责把捕获的音视频数据输出到输出设备中。一个AVCaptureSession可以有多个输入输出流。

AVCaptureDevice:输入设备,包括麦克风、摄像头,通过该对象可以设置物理设备的一些属性(例如相机聚焦等)。

AVCaptureDeviceInput:设备输入数据管理对象,可以根据AVCaptureDevice创建对应的AVCaptureDeviceInput对象,该对象将会被添加到AVCaptureSession中管理。

AVCaptureVideoPreviewLayer:相机拍摄预览图层,是CALayer的子类,使用该对象可以看到视频录制效果,创建该对象需要指定对应的AVCaptureSession对象。

AVCaptureMovieFileOutput:视频输出流。把一个输入或者输出添加到AVCaptureSession之后AVCaptureSession就会在所有相符的输入、输出设备之间 建立连接(AVCaptionConnection)。

//此状态表示视频制作时的各个状态

typedefNS_ENUM(NSInteger, VideoState)

{

VideoStateFree = 0,

VideoStateWillStartRecord,

VideoStateDidStartRecord,

VideoStateWillEndRecord,

VideoStateDidEndRecord,

VideoStateWillStartMerge,

VideoStateDidStartMerge,

};

//与VideoState不同

//此状态表示用户操作时的状态,比如:已经开始录制、停止录制

typedefNS_ENUM(NSInteger, RecordOptState)

{

RecordOptStateFree = 0,

RecordOptStateBegin,

RecordOptStateEnd,

};

//录制时用户手指所处区域,可以用来判断是在录制区域还是在取消录制区域

typedefNS_ENUM(NSInteger, CurrentRecordRegion)

{

CurrentRecordRegionFree = 0,

CurrentRecordRegionRecord,

CurrentRecordRegionCancelRecord,

};

初始化相关设置

self.captureSession= [[AVCaptureSessionalloc]init];

AVCaptureDevice*frontCamera =nil;

AVCaptureDevice*backCamera =nil;

NSArray*cameras = [AVCaptureDevicedevicesWithMediaType:AVMediaTypeVideo];

for(AVCaptureDevice*cameraincameras) {

if(AVCaptureDevicePositionFront== camera.position) {//前置摄像头

frontCamera = camera;

}

elseif(AVCaptureDevicePositionBack== camera.position)

{

backCamera = camera;

}

//默认使用后摄像机

[backCamera lockForConfiguration:nil];//先锁定设备

if([backCamera isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) {

[backCamerasetExposureMode:AVCaptureExposureModeContinuousAutoExposure];//曝光量调节

}

if([backCameraisFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) {//焦点CGPoint

[backCamerasetFocusMode:AVCaptureFocusModeContinuousAutoFocus];

}

[backCameraunlockForConfiguration];

[self.captureSessionbeginConfiguration];

//input device

self.videoDeviceInput= [AVCaptureDeviceInput deviceInputWithDevice:backCamera error:nil];

AVCaptureDeviceInput*audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]error:nil];

if([self.captureSessioncanAddInput:self.videoDeviceInput]) {

[self.captureSessionaddInput:self.videoDeviceInput];

}

if([self.captureSessioncanAddInput:audioDeviceInput]) {

[self.captureSessionaddInput:audioDeviceInput];

}

//output device

self.movieFileOutput= [[AVCaptureMovieFileOutputalloc]init];

if([self.captureSessioncanAddOutput:self.movieFileOutput]) {

[self.captureSessionaddOutput:self.movieFileOutput];

}

//preset

if([self.captureSessioncanSetSessionPreset:AVCaptureSessionPreset640x480]) {

self.captureSession.sessionPreset=AVCaptureSessionPreset640x480;//AVCaptureSessionPresetLow

}

//preview layer

self.preViewLayer= [AVCaptureVideoPreviewLayerlayerWithSession:self.captureSession];

self.preViewLayer.videoGravity=AVLayerVideoGravityResizeAspectFill;

[self.captureSession commitConfiguration];

[self.captureSession startRunning];//会话 开始运行

注意:改变设备属性前一定要首先调用lockForConfiguration方法加锁,调用完之后使用unlockForConfiguration方法解锁。对相机设置时,要判断当前设备是否支持改设置。比如:isExposureModeSupported、isFocusModeSupported等。

//开始录制

- (void)startRecordingToOutputFileURL

{

_videoState=VideoStateWillStartRecord;

_recordOptState=RecordOptStateBegin;

//根据设备输出获得连接

AVCaptureConnection*captureConnection = [self.movieFileOutputconnectionWithMediaType:AVMediaTypeVideo];

//根据连接取得设备输出的数据

if(![self.movieFileOutputisRecording]) {

//预览图层和视频方向保持一致

captureConnection.videoOrientation= [self.preViewLayerconnection].videoOrientation;

[self.movieFileOutput startRecordingToOutputFileURL:[NSURL fileURLWithPath:[self getVideoSaveFilePathString]] recordingDelegate:self];//开始录制

}

else

{

[selfstopCurrentVideoRecording];

}

//停止录制

- (void)stopCurrentVideoRecording

{

[self stopCountDurTimer];//停止计时器

_videoState=VideoStateWillEndRecord;

[self.movieFileOutput stopRecording];//停止录制

}

#pragma mark - AVCaptureFileOutputRecordingDelegate

- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections

{

_videoState = VideoStateDidStartRecord;

self.videoSaveFilePath = [fileURL absoluteString];

self.currentFileURL = fileURL;

self.currentVideoDur = 0.0f;

self.totalVideoDur = 0.0f;

[self startCountDurTimer];//启动录制计时器

//这里抛出开始录制 代理

if (RecordOptStateEnd == _recordOptState) {//时间太短,还没开始录制,就已经松开了录制按钮,要停止正在录制的视频

[self stopCurrentVideoRecording];

}

}

- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error

{

_videoState = VideoStateDidEndRecord;

self.totalVideoDur += _currentVideoDur;

//这里抛出录制完成 代理

if (CurrentRecordRegionRecord == [self getCurrentRecordRegion]) {

if (self.totalVideoDur < MIN_VIDEO_DUR) {//录制时间太短

[self removeMovFile];//移除mov格式的视频文件

_videoState = VideoStateFree;

}

}

else

{

[self removeMovFile];//移除mov格式的视频文件

_videoState = VideoStateFree;

}

}

//将mov格式转化成MP4

- (void)mergeAndExportVideosAtFileURLs:(NSArray*)fileURLArray

{

_videoState = VideoStateWillStartMerge;

NSError *error = nil;

//渲染尺寸

CGSize renderSize = CGSizeMake(0, 0);

NSMutableArray *layerInstructionArray = [NSMutableArray array];

//用来合成视频

AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];

CMTime totalDuration = kCMTimeZero;

//先取assetTrack 也为了取renderSize

NSMutableArray *assetTrackArray = [NSMutableArray array];

NSMutableArray *assetArray = [NSMutableArray array];

for (NSURL *fileURL in fileURLArray) {

//AVAsset:素材库里的素材

AVAsset *asset = [AVAsset assetWithURL:fileURL];

if (!asset) {

continue;

}

[assetArray addObject:asset];

//素材的轨道

AVAssetTrack *assetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];//返回一个数组AVAssetTracks资产

[assetTrackArray addObject:assetTrack];

renderSize.width = MAX(renderSize.width, assetTrack.naturalSize.height);

renderSize.height = MAX(renderSize.height, assetTrack.naturalSize.width);

}

CGFloat renderW = 320;//MIN(renderSize.width, renderSize.height);

for (NSInteger i = 0; i < [assetArray count] && i < assetTrackArray.count; i++) {

AVAsset *asset = [assetArray objectAtIndex:i];

AVAssetTrack *assetTrack = [assetTrackArray objectAtIndex:i];

//文件中的音频轨道,里面可以插入各种对应的素材

AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];

NSArray*dataSourceArray= [asset tracksWithMediaType:AVMediaTypeAudio];//获取声道,即麦克风相关信息

[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:((dataSourceArray.count > 0)?[dataSourceArray objectAtIndex:0]:nil) atTime:totalDuration error:nil];

//工程文件中的轨道,有音频轨,里面可以插入各种对应的素材

AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];

[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:assetTrack atTime:totalDuration error:&error];

//视频轨道中的一个视频,可以缩放、旋转等

AVMutableVideoCompositionLayerInstruction *layerInstrucition = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];

totalDuration = CMTimeAdd(totalDuration, asset.duration);

CGFloat rate = renderW / MIN(assetTrack.naturalSize.width, assetTrack.naturalSize.height);

CGAffineTransform layerTransform = CGAffineTransformMake(assetTrack.preferredTransform.a, assetTrack.preferredTransform.b, assetTrack.preferredTransform.c, assetTrack.preferredTransform.d, assetTrack.preferredTransform.tx * rate, assetTrack.preferredTransform.ty * rate);

layerTransform = CGAffineTransformConcat(layerTransform, CGAffineTransformMake(1, 0, 0, 1, 0, -(assetTrack.naturalSize.width - assetTrack.naturalSize.height) / 2.0));//向上移动取中部影相

layerTransform = CGAffineTransformScale(layerTransform, rate, rate);//放缩,解决前后摄像结果大小不对称

[layerInstrucition setTransform:layerTransform atTime:kCMTimeZero];

[layerInstrucition setOpacity:0.0 atTime:totalDuration];

//data

[layerInstructionArray addObject:layerInstrucition];

}

//get save path

NSURL *mergeFileURL = [NSURL fileURLWithPath:[self getVideoMergeFilePathString]];

//export

AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];

mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, totalDuration);

mainInstruction.layerInstructions = layerInstructionArray;

AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];

mainCompositionInst.instructions = @[mainInstruction];

mainCompositionInst.frameDuration = CMTimeMake(1, 100);

//    mainCompositionInst.renderSize = CGSizeMake(renderW, renderW * (sH/sW));

mainCompositionInst.renderSize = CGSizeMake(renderW, renderW * 0.75);//4:3比列

//资源导出

AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];

exporter.videoComposition = mainCompositionInst;

exporter.outputURL = mergeFileURL;

exporter.outputFileType = AVFileTypeMPEG4;//视频格式MP4

exporter.shouldOptimizeForNetworkUse = YES;

[exporter exportAsynchronouslyWithCompletionHandler:^{

dispatch_async(dispatch_get_main_queue(), ^{

_videoState = VideoStateDidStartMerge;

//抛出转换成功 代理

[self removeMovFile];//移除MOV格式视频

});

}];

}

//计算视频大小

- (NSInteger) getFileSize:(NSString*) path

{

path = [pathstringByReplacingOccurrencesOfString:@"file://"withString:@""];

NSFileManager* filemanager = [NSFileManagerdefaultManager];

if([filemanagerfileExistsAtPath:path]){

NSDictionary* attributes = [filemanagerattributesOfItemAtPath:patherror:nil];

NSNumber*theFileSize;

if( (theFileSize = [attributesobjectForKey:NSFileSize]) )

return[theFileSizeintValue]/1024;

else

return-1;

}

else

{

return-1;

}

}

//拉近、拉远镜头

- (void)changeDeviceVideoZoomFactor

{

AVCaptureDevice*backCamera = [selfgetCameraDevice:NO];

CGFloatcurrent = 1.0;

if(1.0 == backCamera.videoZoomFactor) {

current = 2.0f;

if(current > backCamera.activeFormat.videoMaxZoomFactor) {

current = backCamera.activeFormat.videoMaxZoomFactor;

}

}

NSError*error =nil;

if([backCameralockForConfiguration:&error]) {

[backCamerarampToVideoZoomFactor:currentwithRate:10];

[backCameraunlockForConfiguration];

}

else

{

NSLog(@"锁定设备过程error,错误信息:%@",error.localizedDescription);

}

}

- (AVCaptureDevice*)getCameraDevice:(BOOL)isFront

{

NSArray*cameras = [AVCaptureDevicedevicesWithMediaType:AVMediaTypeVideo];

AVCaptureDevice*frontCamera;

AVCaptureDevice*backCamera;

for(AVCaptureDevice*cameraincameras) {

if(AVCaptureDevicePositionFront== camera.position) {

frontCamera = camera;

}

elseif(AVCaptureDevicePositionBack== camera.position)

{

backCamera = camera;

}

}

if(isFront) {

returnfrontCamera;

}

returnbackCamera;

}


2、自定义播放视频

//注意:播放视频的URL是fileURLWithPath。格式是:“file://var

- (instancetype)initVideoFileURL:(NSURL*)videoFileURL withFrame:(CGRect)frame withView:(UIView*)view

{

self= [superinit];

if(self) {

self.videoFileURL= videoFileURL;

[selfregisterNotficationMessage];

[selfinitPlayLayer:framewithView:view];

}

returnself;

}

- (void)initPlayLayer:(CGRect)rect withView:(UIView*)view

{

if(!_videoFileURL) {

return;

}

AVAsset*asset = [AVURLAssetURLAssetWithURL:_videoFileURLoptions:nil];

self.playerItem= [AVPlayerItemplayerItemWithAsset:asset];

//self.player = [AVPlayer playerWithPlayerItem:self.playerItem];

self.player= [[AVPlayeralloc]init];

self.playerLayer= [AVPlayerLayerplayerLayerWithPlayer:self.player];

[self.playersetVolume:0.0f];//静音

[self.playerseekToTime:kCMTimeZero];

[self.playersetActionAtItemEnd:AVPlayerActionAtItemEndNone];

[self.playerreplaceCurrentItemWithPlayerItem:self.playerItem];

self.playerLayer.frame= rect;

self.playerLayer.videoGravity=AVLayerVideoGravityResizeAspectFill;

[view.layeraddSublayer:self.playerLayer];

}

- (void)playSight

{

[self.playerItemseekToTime:kCMTimeZero];

[self.playerplay];

}

- (void)pauseSight

{

[self.playerItemseekToTime:kCMTimeZero];

[self.playerpause];

}

- (void)releaseVideoPlayer

{

[selfremoveNotificationMessage];

if(self.player) {

[self.playerpause];

[self.playerreplaceCurrentItemWithPlayerItem:nil];

}

if(self.playerLayer) {

[self.playerLayerremoveFromSuperlayer];

}

self.player=nil;

self.playerLayer=nil;

self.playerItem=nil;

self.videoFileURL=nil;

}

#pragma mark - notification message

- (void)registerNotficationMessage

{

[[NSNotificationCenterdefaultCenter]addObserver:selfselector:@selector(avPlayerItemDidPlayToEnd:)name:AVPlayerItemDidPlayToEndTimeNotificationobject:nil];

}

- (void)removeNotificationMessage

{

[[NSNotificationCenterdefaultCenter]removeObserver:selfname:AVPlayerItemDidPlayToEndTimeNotificationobject:nil];

}

- (void)avPlayerItemDidPlayToEnd:(NSNotification*)notification

{

if(notification.object!=self.playerItem) {

return;

}

[self.playerItemseekToTime:kCMTimeZero];

[self.playerplay];

}

有关AVFoundation的知识点,还有很多。以后如果有其它的需求再做研究。

源码地址 https://github.com/zone1026/SightRecorder

上一篇下一篇

猜你喜欢

热点阅读