iOS 实时摄像头获取
2022-03-03 本文已影响0人
Yimmm
需求:在录屏的时候,需要实时获取摄像头内容并且展示于屏幕左下角。
解决方案:使用AVCaptureSession采集视频帧数据,通过回调输出摄像头内容。
1. 新建一个RealTimeCameraView(输出摄像头内容的UIView):
//
// RealTimeCameraView.h
// MyTest
//
// Created by Yim on 2021/8/2.
// 实时获取摄像头内容
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>
NS_ASSUME_NONNULL_BEGIN
@interface RealTimeCameraView : UIView
/**
初始化摄像头 - 参数
devicePosition:设置前后摄像头
viedoOrientation:设置是竖屏相机还是横屏相机
*/
- (void)setupCameraWithPosition:(AVCaptureDevicePosition)devicePosition onVideoOrientation:(AVCaptureVideoOrientation)viedoOrientation;
@end
NS_ASSUME_NONNULL_END
//
// RealTimeCameraView.m
// MyTest
//
// Created by Yim on 2021/8/2.
//
#import "RealTimeCameraView.h"
#import <CoreGraphics/CoreGraphics.h>
#import <CoreVideo/CoreVideo.h>
#import <CoreMedia/CoreMedia.h>
@interface RealTimeCameraView() <AVCaptureVideoDataOutputSampleBufferDelegate>
@property (nonatomic, strong) AVCaptureSession *captureSession; // 管理输入输出音视频流
@property (nonatomic, strong) UIImageView *imageView; // 输出图像
// @property (nonatomic, strong) CALayer *customLayer; // 输出图像
@property (nonatomic, strong) AVCaptureVideoPreviewLayer *prevLayer; // 相机预览
@end
@implementation RealTimeCameraView
#pragma mark - setup
- (instancetype)init
{
self = [super init];
if (self) {
}
return self;
}
- (void)setupCameraWithPosition:(AVCaptureDevicePosition)devicePosition onVideoOrientation:(AVCaptureVideoOrientation)viedoOrientation
{
AVCaptureDevice * testDevice;
// 创建Camera镜头组,实现镜头自动变焦
NSArray<AVCaptureDeviceType> * deviceTypeArr = @[AVCaptureDeviceTypeBuiltInWideAngleCamera,AVCaptureDeviceTypeBuiltInTripleCamera,AVCaptureDeviceTypeBuiltInDualWideCamera,AVCaptureDeviceTypeBuiltInTrueDepthCamera,AVCaptureDeviceTypeBuiltInUltraWideCamera,AVCaptureDeviceTypeBuiltInTelephotoCamera,AVCaptureDeviceTypeBuiltInDualCamera,AVCaptureDeviceTypeBuiltInMicrophone];
AVCaptureDeviceDiscoverySession * myDiscoverySesion = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:deviceTypeArr mediaType:AVMediaTypeVideo position:devicePosition];
for (AVCaptureDevice *item in myDiscoverySesion.devices) {
// 找到对应的摄像头
if ([item position] == devicePosition) {
testDevice = item;
break;
}
}
// 如果没有找到镜头,就不做操作,防止崩溃
if (testDevice != nil) {
AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput
deviceInputWithDevice:testDevice error:nil];
AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc] init];
captureOutput.alwaysDiscardsLateVideoFrames = YES;
dispatch_queue_t queue;
queue = dispatch_queue_create("cameraQueue", NULL);
[captureOutput setSampleBufferDelegate:self queue:queue];
NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
[captureOutput setVideoSettings:videoSettings];
self.captureSession = [[AVCaptureSession alloc] init];
[self.captureSession addInput:captureInput];
[self.captureSession addOutput:captureOutput];
[self.captureSession startRunning];
// FIXME:用CALayer.contents显示有可能会导致内存溢出,程序崩溃。
// self.customLayer = [CALayer layer];
// self.customLayer.frame = self.bounds;
// self.customLayer.transform = CATransform3DRotate(CATransform3DIdentity, M_PI/1.0f, 0, 0, 1);
// self.customLayer.affineTransform = CGAffineTransformRotate(CGAffineTransformIdentity, M_PI);
// self.customLayer.contentsGravity = kCAGravityResizeAspect;
// [self.layer addSublayer:self.customLayer];
// 解决录屏图像问题
self.imageView = [[UIImageView alloc] init];
self.imageView.frame = self.bounds;
[self addSubview:self.imageView];
// 相机预览
self.prevLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
self.prevLayer.frame = self.bounds;
// 指定屏幕方向
self.prevLayer.connection.videoOrientation = viedoOrientation;
self.prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.layer addSublayer:self.prevLayer];
}
}
#pragma mark - AVCaptureSession delegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer,0);
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
// 执行此方法有可能会导致内存飙升,程序崩溃。
// [self.customLayer performSelectorOnMainThread:@selector(setContents:) withObject: (__bridge id) newImage waitUntilDone:YES];
// 新图层的输出图像方向
UIImage * image = [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationDown];
CGImageRelease(newImage);
// 解决录屏图像
[self.imageView performSelectorOnMainThread:@selector(setImage:) withObject:image waitUntilDone:YES];
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
}
@end
其中引用RealTimeCameraView
并且录屏时,使用CALayer
的(setContents:)
方法会导致内存一直飙升,导致程序崩溃,因此改用动态执行setImage:
方法来输出图像。
2. 引用RealTimeCameraView
在合适的地方引用RealTimeCameraView
{
RealTimeCameraView * camera = [[RealTimeCameraView alloc] initWithFrame:CGRectMake(0, 0, 0, 0)];
// 因为录课时时强制横屏,摄像头的Orientation参数也要设置成统一横屏方向。
[camera setupCameraWithPosition:AVCaptureDevicePositionFront onVideoOrientation:AVCaptureVideoOrientationLandscapeLeft];
[self addSubview:camera];
}
DEMO演示效果如下:
demo Lookin工程实机演示:
实际运用