了解AVFoundation
2020-10-10 本文已影响0人
花开富贵_cc
AVFoundation
-
捕捉会话: AVCaptureSession
-
捕捉设备 AVCaptureDevice
-
捕捉设备输入 AVCaptureDeviceInput
-
捕捉设备输出 AVCaptureOutput 抽象类
AVCaptureStillImageOutput 输出静态图片类
AVCaputureMovieFileOutput 输出文件
AVCaputureAudioDataOutput 音频数据
AVCaputureVideoDataOutput 视频数据 -
捕捉连接类 AVCaptureConnection 根据所捕捉的输入设备的渲染媒体类型自动建立输入和输出的连接
-
捕捉预览 AVCaptureVideoPreviewLayer
设置 Session
1.初始化
self.captureSession = [[AVCaptureSession alloc]init];
2.设置分辨率
/*
AVCaptureSessionPresetHigh
AVCaptureSessionPresetMedium
AVCaptureSessionPresetLow
AVCaptureSessionPreset640x480
AVCaptureSessionPreset1280x720
AVCaptureSessionPresetPhoto
*/
//设置图像的分辨率
self.captureSession.sessionPreset = AVCaptureSessionPresetHigh;
3.配置输入设备(注意转换为AVCaptureDeviceInput)
4.配置输入设备包括音频输入,视频输入
//拿到默认视频捕捉设备 iOS系统默认后置摄像头为默认视频捕捉设备
AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];//AVMediaTypeVideo 视频设备
//一定要将捕捉设备转化AVCaptureDeviceInput
//将捕捉设备封装成AVCaptureDeviceInput
//注意:为会话添加捕捉设备,必须将设备封装成AVCaptureDeviceInput对象
AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:error];
//判断videoInput是否有效
if (videoInput)
{
//摄像头属于公共设备
//canAddInput:测试是否能被添加到会话中
if ([self.captureSession canAddInput:videoInput])
{
//将videoInput 添加到 captureSession中
[self.captureSession addInput:videoInput];
self.activeVideoInput = videoInput;
}
}else
{
return NO;
}
//添加音频输入设备,内置麦克风
//选择默认音频捕捉设备 即返回一个内置麦克风
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
//为这个设备创建一个捕捉设备输入
//一定要将捕捉设备转化AVCaptureDeviceInput
//将捕捉设备封装成AVCaptureDeviceInput
//注意:为会话添加捕捉设备,必须将设备封装成AVCaptureDeviceInput对象
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:error];
//判断audioInput是否有效
if (audioInput) {
//canAddInput:测试是否能被添加到会话中
if ([self.captureSession canAddInput:audioInput])
{
//将audioInput 添加到 captureSession中
[self.captureSession addInput:audioInput];
}
}else
{
return NO;
}
5.配置输出(静态图像输出,视频文件输出)
//设置输出(照片/视频文件)
//AVCaptureStillImageOutput 实例 从摄像头捕捉静态图片
self.imageOutput = [[AVCaptureStillImageOutput alloc]init];
//配置字典:希望捕捉到JPEG格式的图片
self.imageOutput.outputSettings = @{AVVideoCodecKey:AVVideoCodecJPEG};
//输出连接 判断是否可用,可用则添加到输出连接中去
if ([self.captureSession canAddOutput:self.imageOutput])
{
[self.captureSession addOutput:self.imageOutput];
}
//创建一个AVCaptureMovieFileOutput 实例,用于将Quick Time 电影录制到文件系统
self.movieOutput = [[AVCaptureMovieFileOutput alloc]init];
//输出连接 判断是否可用,可用则添加到输出连接中去
if ([self.captureSession canAddOutput:self.movieOutput])
{
[self.captureSession addOutput:self.movieOutput];
}
⚠️注意在为session添加输入输出时,注意一定判断能否添加,因为摄像头并不隶属任何一个app,它是公共设备
切换摄像头
//切换摄像头
- (BOOL)switchCameras {
//判断是否有多个摄像头
if (![self canSwitchCameras])
{
return NO;
}
//获取当前设备的反向设备
NSError *error;
AVCaptureDevice *videoDevice = [self inactiveCamera];
//将输入设备封装成AVCaptureDeviceInput
AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
//判断videoInput 是否为nil
if (videoInput)
{
//标注原配置变化开始
[self.captureSession beginConfiguration];
//将捕捉会话中,原本的捕捉输入设备移除
[self.captureSession removeInput:self.activeVideoInput];
//判断新的设备是否能加入
if ([self.captureSession canAddInput:videoInput])
{
//能加入成功,则将videoInput 作为新的视频捕捉设备
[self.captureSession addInput:videoInput];
//将获得设备 改为 videoInput
self.activeVideoInput = videoInput;
}else
{
//如果新设备,无法加入。则将原本的视频捕捉设备重新加入到捕捉会话中
[self.captureSession addInput:self.activeVideoInput];
}
//配置完成后, AVCaptureSession commitConfiguration 会分批的将所有变更整合在一起。
[self.captureSession commitConfiguration];
}else
{
//创建AVCaptureDeviceInput 出现错误,则通知委托来处理该错误
[self.delegate deviceConfigurationFailedWithError:error];
return NO;
}
return YES;
}
点击聚焦方法的实现
- (BOOL)cameraSupportsTapToFocus {
//询问激活中的摄像头是否支持兴趣点对焦
return [[self activeCamera]isFocusPointOfInterestSupported];
}
- (void)focusAtPoint:(CGPoint)point {
AVCaptureDevice *device = [self activeCamera];
//是否支持兴趣点对焦 & 是否自动对焦模式
if (device.isFocusPointOfInterestSupported && [device isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
NSError *error;
//锁定设备准备配置,如果获得了锁
//因为配置时,不能让多个对象对他进行更改,所以对该过程上锁
if ([device lockForConfiguration:&error]) {
//将focusPointOfInterest属性设置CGPoint
device.focusPointOfInterest = point;
//focusMode 设置为AVCaptureFocusModeAutoFocus。自动聚焦
device.focusMode = AVCaptureFocusModeAutoFocus;
//释放该锁定
[device unlockForConfiguration];
}else{
//错误时,则返回给错误处理代理
[self.delegate deviceConfigurationFailedWithError:error];
}
}
}
点击曝光的方法实现
- (BOOL)cameraSupportsTapToExpose {
//询问设备是否支持对一个兴趣点进行曝光
return [[self activeCamera] isExposurePointOfInterestSupported];
}
static const NSString *THCameraAdjustingExposureContext;
- (void)exposeAtPoint:(CGPoint)point {
AVCaptureDevice *device = [self activeCamera];
AVCaptureExposureMode exposureMode =AVCaptureExposureModeContinuousAutoExposure;
//判断是否支持 AVCaptureExposureModeContinuousAutoExposure 模式
if (device.isExposurePointOfInterestSupported && [device isExposureModeSupported:exposureMode]) {
[device isExposureModeSupported:exposureMode];
NSError *error;
//锁定设备准备配置
if ([device lockForConfiguration:&error])
{
//配置期望值
device.exposurePointOfInterest = point;
device.exposureMode = exposureMode;
//判断设备是否支持锁定曝光的模式。
if ([device isExposureModeSupported:AVCaptureExposureModeLocked]) {
//支持,则使用kvo确定设备的adjustingExposure属性的状态。
[device addObserver:self forKeyPath:@"adjustingExposure" options:NSKeyValueObservingOptionNew context:&THCameraAdjustingExposureContext];
}
//释放该锁定
[device unlockForConfiguration];
}else
{
[self.delegate deviceConfigurationFailedWithError:error];
}
}
}
- (void)observeValueForKeyPath:(NSString *)keyPath
ofObject:(id)object
change:(NSDictionary *)change
context:(void *)context {
//判断context(上下文)是否为THCameraAdjustingExposureContext
if (context == &THCameraAdjustingExposureContext) {
//获取device
AVCaptureDevice *device = (AVCaptureDevice *)object;
//判断设备是否不再调整曝光等级,确认设备的exposureMode是否可以设置为AVCaptureExposureModeLocked
if(!device.isAdjustingExposure && [device isExposureModeSupported:AVCaptureExposureModeLocked])
{
//移除作为adjustingExposure 的self,就不会得到后续变更的通知
[object removeObserver:self forKeyPath:@"adjustingExposure" context:&THCameraAdjustingExposureContext];
//异步方式调回主队列,
dispatch_async(dispatch_get_main_queue(), ^{
NSError *error;
if ([device lockForConfiguration:&error]) {
//修改exposureMode
device.exposureMode = AVCaptureExposureModeLocked;
//释放该锁定
[device unlockForConfiguration];
}else
{
[self.delegate deviceConfigurationFailedWithError:error];
}
});
}
}else
{
[super observeValueForKeyPath:keyPath ofObject:object change:change context:context];
}
}
闪光灯
//判断是否有闪光灯
- (BOOL)cameraHasFlash {
return [[self activeCamera]hasFlash];
}
//闪光灯模式
- (AVCaptureFlashMode)flashMode {
return [[self activeCamera]flashMode];
}
//设置闪光灯
- (void)setFlashMode:(AVCaptureFlashMode)flashMode {
//获取会话
AVCaptureDevice *device = [self activeCamera];
//判断是否支持闪光灯模式
if ([device isFlashModeSupported:flashMode]) {
//如果支持,则锁定设备
NSError *error;
if ([device lockForConfiguration:&error]) {
//修改闪光灯模式
device.flashMode = flashMode;
//修改完成,解锁释放设备
[device unlockForConfiguration];
}else
{
[self.delegate deviceConfigurationFailedWithError:error];
}
}
}
手电筒
//是否支持手电筒
- (BOOL)cameraHasTorch {
return [[self activeCamera]hasTorch];
}
//手电筒模式
- (AVCaptureTorchMode)torchMode {
return [[self activeCamera]torchMode];
}
//设置是否打开手电筒
- (void)setTorchMode:(AVCaptureTorchMode)torchMode {
AVCaptureDevice *device = [self activeCamera];
if ([device isTorchModeSupported:torchMode]) {
NSError *error;
if ([device lockForConfiguration:&error]) {
device.torchMode = torchMode;
[device unlockForConfiguration];
}else
{
[self.delegate deviceConfigurationFailedWithError:error];
}
}
}
拍摄静态图片的方法实现
- (void)captureStillImage {
//获取连接
AVCaptureConnection *connection = [self.imageOutput connectionWithMediaType:AVMediaTypeVideo];
//程序只支持纵向,但是如果用户横向拍照时,需要调整结果照片的方向
//判断是否支持设置视频方向
if (connection.isVideoOrientationSupported) {
//获取方向值
connection.videoOrientation = [self currentVideoOrientation];
}
//定义一个handler 块,会返回1个图片的NSData数据
id handler = ^(CMSampleBufferRef sampleBuffer,NSError *error)
{
if (sampleBuffer != NULL) {
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:sampleBuffer];
UIImage *image = [[UIImage alloc]initWithData:imageData];
//重点:捕捉图片成功后,将图片传递出去
[self writeImageToAssetsLibrary:image];
}else
{
NSLog(@"NULL sampleBuffer:%@",[error localizedDescription]);
}
};
//捕捉静态图片
[self.imageOutput captureStillImageAsynchronouslyFromConnection:connection completionHandler:handler];
}
/*
Assets Library 框架
用来让开发者通过代码方式访问iOS photo
注意:会访问到相册,需要修改plist 权限。否则会导致项目崩溃
*/
- (void)writeImageToAssetsLibrary:(UIImage *)image {
//创建ALAssetsLibrary 实例
ALAssetsLibrary *library = [[ALAssetsLibrary alloc]init];
//参数1:图片(参数为CGImageRef 所以image.CGImage)
//参数2:方向参数 转为NSUInteger
//参数3:写入成功、失败处理
[library writeImageToSavedPhotosAlbum:image.CGImage
orientation:(NSUInteger)image.imageOrientation
completionBlock:^(NSURL *assetURL, NSError *error) {
//成功后,发送捕捉图片通知。用于绘制程序的左下角的缩略图
if (!error)
{
[self postThumbnailNotifification:image];
}else
{
//失败打印错误信息
id message = [error localizedDescription];
NSLog(@"%@",message);
}
}];
}
拍摄视频的方法实现
//判断是否录制状态
- (BOOL)isRecording {
return self.movieOutput.isRecording;
}
//开始录制
- (void)startRecording {
if (![self isRecording]) {
//获取当前视频捕捉连接信息,用于捕捉视频数据配置一些核心属性
AVCaptureConnection * videoConnection = [self.movieOutput connectionWithMediaType:AVMediaTypeVideo];
//判断是否支持设置videoOrientation 属性。
if([videoConnection isVideoOrientationSupported])
{
//支持则修改当前视频的方向
videoConnection.videoOrientation = [self currentVideoOrientation];
}
//判断是否支持视频稳定 可以显著提高视频的质量。只会在录制视频文件涉及
if([videoConnection isVideoStabilizationSupported])
{
videoConnection.enablesVideoStabilizationWhenAvailable = YES;
}
AVCaptureDevice *device = [self activeCamera];
//摄像头可以进行平滑对焦模式操作。即减慢摄像头镜头对焦速度。当用户移动拍摄时摄像头会尝试快速自动对焦。
if (device.isSmoothAutoFocusEnabled) {
NSError *error;
if ([device lockForConfiguration:&error]) {
device.smoothAutoFocusEnabled = YES;
[device unlockForConfiguration];
}else
{
[self.delegate deviceConfigurationFailedWithError:error];
}
}
//查找写入捕捉视频的唯一文件系统URL.
self.outputURL = [self uniqueURL];
//摄像头配置完成
//开始录制。直接录制成一个QuckTime视频文件-->存储到相册(涉及硬编码-AVFoundation完成)
//在捕捉输出上调用方法 参数1:录制保存路径 参数2:代理
[self.movieOutput startRecordingToOutputFileURL:self.outputURL recordingDelegate:self];
}
}
- (CMTime)recordedDuration {
return self.movieOutput.recordedDuration;
}
//写入视频唯一文件系统URL
- (NSURL *)uniqueURL {
NSFileManager *fileManager = [NSFileManager defaultManager];
//temporaryDirectoryWithTemplateString 可以将文件写入的目的创建一个唯一命名的目录;
//临时文件,下载中的文件没有后缀名
NSString *dirPath = [fileManager temporaryDirectoryWithTemplateString:@"kamera.XXXXXX"];
if (dirPath) {
NSString *filePath = [dirPath stringByAppendingPathComponent:@"kamera_movie.mov"];
return [NSURL fileURLWithPath:filePath];
}
return nil;
}
//停止录制
- (void)stopRecording {
//是否正在录制
if ([self isRecording]) {
[self.movieOutput stopRecording];
}
}
#pragma mark - AVCaptureFileOutputRecordingDelegate
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL
fromConnections:(NSArray *)connections
error:(NSError *)error {
//错误
if (error) {
[self.delegate mediaCaptureFailedWithError:error];
}else
{
//写入
[self writeVideoToAssetsLibrary:[self.outputURL copy]];
}
self.outputURL = nil;
}
//写入捕捉到的视频
- (void)writeVideoToAssetsLibrary:(NSURL *)videoURL {
//ALAssetsLibrary 实例 提供写入视频的接口
ALAssetsLibrary *library = [[ALAssetsLibrary alloc]init];
//写资源库写入前,检查视频是否可被写入 (写入前尽量养成判断的习惯)
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:videoURL]) {
//创建block块
ALAssetsLibraryWriteVideoCompletionBlock completionBlock;
completionBlock = ^(NSURL *assetURL,NSError *error)
{
if (error) {
[self.delegate assetLibraryWriteFailedWithError:error];
}else
{
//用于界面展示视频缩略图
[self generateThumbnailForVideoAtURL:videoURL];
}
};
//执行实际写入资源库的动作
[library writeVideoAtPathToSavedPhotosAlbum:videoURL completionBlock:completionBlock];
}
}