AVCaptureSession的简单使用,获取视频流,展示,录
2019-04-22 本文已影响0人
码农耕
使用步骤:
1.创建视频捕捉会话
_session = [[AVCaptureSession alloc]init];
_session.sessionPreset = AVCaptureSessionPresetHigh;//属性预设
2.设置会话输入
// 视频输入
AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:error];
if (videoInput) {
if ([_session canAddInput:videoInput]){
[_session addInput:videoInput];
}
}
_deviceInput = videoInput;
// 音频输入
AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput *audioIn = [[AVCaptureDeviceInput alloc] initWithDevice:audioDevice error:error];
if ([_session canAddInput:audioIn]){
[_session addInput:audioIn];
}
3.设置会话输出(data或者file)
3.1:data输出
// 视频输出
AVCaptureVideoDataOutput *videoOut = [[AVCaptureVideoDataOutput alloc] init];
[videoOut setAlwaysDiscardsLateVideoFrames:YES];
[videoOut setVideoSettings:@{(id)kCVPixelBufferPixelFormatTypeKey: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA]}];
[videoOut setSampleBufferDelegate:self queue:captureQueue];
if ([_session canAddOutput:videoOut]){
[_session addOutput:videoOut];
}
_videoOutput = videoOut;
_videoConnection = [videoOut connectionWithMediaType:AVMediaTypeVideo];
// 音频输出
AVCaptureAudioDataOutput *audioOut = [[AVCaptureAudioDataOutput alloc] init];
[audioOut setSampleBufferDelegate:self queue:captureQueue];
if ([_session canAddOutput:audioOut]){
[_session addOutput:audioOut];
}
_audioConnection = [audioOut connectionWithMediaType:AVMediaTypeAudio];
#pragma mark - delegate
-(void)captureOutput:(AVCaptureOutput *)output didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
// NSLog(@"%@",sampleBuffer);
}
-(void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
// NSLog(@"%@",sampleBuffer);
@autoreleasepool {
//视频
// if (connection == [_videoConnection connectionWithMediaType:AVMediaTypeVideo]) {
//
//
// }
//
// //音频
// if (connection == [_audioConnection connectionWithMediaType:AVMediaTypeAudio]) {
//
// }
}
}
// 静态图片输出
AVCaptureStillImageOutput *imageOutput = [[AVCaptureStillImageOutput alloc] init];
if (@available(iOS 11.0, *)) {
imageOutput.outputSettings = @{AVVideoCodecKey:AVVideoCodecTypeJPEG};
} else {
imageOutput.outputSettings = @{AVVideoCodecKey:AVVideoCodecJPEG};
}
if ([_session canAddOutput:imageOutput]) {
[_session addOutput:imageOutput];
}
_imageOutput = imageOutput;
//视频流保存图片,拍照
- (void)getVidioImage
{
AVCaptureConnection *connection = [_imageOutput connectionWithMediaType:AVMediaTypeVideo];
[_imageOutput captureStillImageAsynchronouslyFromConnection:connection completionHandler:^(CMSampleBufferRef _Nullable imageDataSampleBuffer, NSError * _Nullable error) {
if (error) {
return;
}
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [[UIImage alloc]initWithData:imageData];
NSLog(@"%@",image);
}];
}
3.2: file输出
// 3.1初始化设备输出对象,用于获得输出数据
FileOutput=[[AVCaptureMovieFileOutput alloc]init];
// 3.2设置输出对象的一些属性
AVCaptureConnection *captureConnection=[FileOutput connectionWithMediaType:AVMediaTypeVideo];
//设置防抖
//视频防抖 是在 iOS 6 和 iPhone 4S 发布时引入的功能。到了 iPhone 6,增加了更强劲和流畅的防抖模式,被称为影院级的视频防抖动。相关的 API 也有所改动 (目前为止并没有在文档中反映出来,不过可以查看头文件)。防抖并不是在捕获设备上配置的,而是在 AVCaptureConnection 上设置。由于不是所有的设备格式都支持全部的防抖模式,所以在实际应用中应事先确认具体的防抖模式是否支持:
if ([captureConnection isVideoStabilizationSupported ]) {
captureConnection.preferredVideoStabilizationMode=AVCaptureVideoStabilizationModeAuto;
}
//预览图层和视频方向保持一致
captureConnection.videoOrientation = [previewLayer connection].videoOrientation;
// 3.3将设备输出添加到会话中
if ([_session canAddOutput:FileOutput]) {
[_session addOutput:FileOutput];
}
// 3.4保存视频到本地
[FileOutput startRecordingToOutputFileURL:videoUrl recordingDelegate:self];
3.5保存视频的代理
- (void)captureOutput:(AVCaptureFileOutput *)output didFinishRecordingToOutputFileAtURL: (NSURL *)outputFileURL fromConnections:(NSArray<AVCaptureConnection *> *)connections error:(nullable NSError *)error
{
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:outputFileURL completionBlock:^(NSURL *assetURL, NSError *error) {
if (error) {
NSLog(@"Save video fail:%@",error);
} else {
NSLog(@"Save video succeed.");
}
}];
}
-(void)captureOutput:(AVCaptureFileOutput *)output didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray<AVCaptureConnection *> *)connections{}
4.设置视频预览
previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:_session];
UIView * aView = self.view;
previewLayer.frame = CGRectMake(0, 0, self.view.frame.size.width, self.view.frame.size.height);
[aView.layer addSublayer:previewLayer];
5.开始捕捉
if (!_session.isRunning){
[_session startRunning];
}
6.其他
6.1//切换前后相机
-(void)shiftCamera
{
_isCamenaBack = !_isCamenaBack;
//切换至前置摄像头
if(_isCamenaBack){
AVCaptureDevice *device=nil;
NSArray *devices=[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for(AVCaptureDevice *tmp in devices)
{
if(tmp.position==AVCaptureDevicePositionFront)
device=tmp;
}
[_session beginConfiguration];
[_session removeInput:_deviceInput];
_deviceInput=nil;
_deviceInput=[[AVCaptureDeviceInput alloc]initWithDevice:device error:nil];
if([_session canAddInput:_deviceInput])
[_session addInput:_deviceInput];
[_session commitConfiguration];
}else{//切换至后置摄像头
AVCaptureDevice *device=nil;
NSArray *devices=[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for(AVCaptureDevice *tmp in devices)
{
if(tmp.position==AVCaptureDevicePositionBack)
device=tmp;
}
[_session beginConfiguration];
[_session removeInput:_deviceInput];
_deviceInput=nil;
_deviceInput=[[AVCaptureDeviceInput alloc]initWithDevice:device error:nil];
if([_session canAddInput:_deviceInput])
[_session addInput:_deviceInput];
[_session commitConfiguration];
}
}
6.2:闪光灯,对焦等等