iOS 自定义相机
2018-04-26 本文已影响0人
沉默着欢喜丶
WechatIMG2.jpeg
WechatIMG1.jpeg
WechatIMG1.jpeg
自定义相机,实现上面两张图片的功能。
首先实现相机的基本功能:数据流的输入和输出
定义属性:
//捕获设备,通常是前置摄像头,后置摄像头,麦克风(音频输入)
@property(nonatomic)AVCaptureDevice *device;
//AVCaptureDeviceInput 代表输入设备,他使用AVCaptureDevice 来初始化
@property(nonatomic)AVCaptureDeviceInput *input;
//当启动摄像头开始捕获输入
@property(nonatomic)AVCaptureMetadataOutput *output;
@property (nonatomic)AVCaptureStillImageOutput *ImageOutPut;
//session:由他把输入输出结合在一起,并开始启动捕获设备(摄像头)
@property(nonatomic)AVCaptureSession *session;
//图像预览层,实时显示捕获的图像
@property(nonatomic)AVCaptureVideoPreviewLayer *previewLayer;
数据的获取和展示:
- (void)customCamera{
self.view.backgroundColor = [UIColor whiteColor];
//使用AVMediaTypeVideo 指明self.device代表视频,默认使用后置摄像头进行初始化
self.device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
//使用设备初始化输入
self.input = [[AVCaptureDeviceInput alloc]initWithDevice:self.device error:nil];
//生成输出对象
self.output = [[AVCaptureMetadataOutput alloc]init];
self.ImageOutPut = [[AVCaptureStillImageOutput alloc] init];
//生成会话,用来结合输入输出
self.session = [[AVCaptureSession alloc]init];
if ([self.session canSetSessionPreset:AVCaptureSessionPresetHigh]) {
self.session.sessionPreset = AVCaptureSessionPresetHigh;
}
if ([self.session canAddInput:self.input]) {
[self.session addInput:self.input];
}
if ([self.session canAddOutput:self.ImageOutPut]) {
[self.session addOutput:self.ImageOutPut];
}
//使用self.session,初始化预览层,self.session负责驱动input进行信息的采集,layer负责把图像渲染显示
self.previewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:self.session];
//228x362
// self.previewLayer.frame = CGRectMake((kScreenWidth - 228.0 / 375.0) / 2, (kScreenHeight - 362.0 / 667.0) / 2, 228.0 / 375.0, 362.0 / 667.0);
self.previewLayer.frame = CGRectMake(0, 0, kScreenWidth, kScreenHeight);
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[self.view.layer addSublayer:self.previewLayer];
//开始启动
[self.session startRunning];
if ([_device lockForConfiguration:nil]) {
if ([_device isFlashModeSupported:AVCaptureFlashModeAuto]) {
[_device setFlashMode:AVCaptureFlashModeAuto];
}
//自动白平衡
if ([_device isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeAutoWhiteBalance]) {
[_device setWhiteBalanceMode:AVCaptureWhiteBalanceModeAutoWhiteBalance];
}
[_device unlockForConfiguration];
}
}
此时已经生成了一个全屏的相机。要实现图1的效果,需要在预览层self.previewLayer上面添加一个图片(四周0.7透明度,中间全透明)以及拍摄和取消两个按钮。
_bgImage = [[UIImageView alloc] init];
[self.view addSubview:_bgImage];
[_bgImage mas_makeConstraints:^(MASConstraintMaker *make) {
make.edges.mas_equalTo(self.view);
}];
_bgImage.image = [UIImage imageNamed:@"Mine_camera_bg"];
_bgImage.alpha = 0.7;
_bgImage.userInteractionEnabled = YES;
_PhotoButton = [UIButton buttonWithType:UIButtonTypeCustom];
[_PhotoButton setImage:[UIImage imageNamed:@"Mine_camera_button"] forState: UIControlStateNormal];
// [_PhotoButton setImage:[UIImage imageNamed:@"Mine_camera_button"] forState:UIControlStateNormal];
[_PhotoButton addTarget:self action:@selector(shutterCamera) forControlEvents:UIControlEventTouchUpInside];
[_bgImage addSubview:_PhotoButton];
[_PhotoButton mas_makeConstraints:^(MASConstraintMaker *make) {
make.centerX.mas_equalTo(_bgImage.mas_centerX);
make.bottom.mas_offset(-41);
make.size.mas_equalTo(CGSizeMake(70, 70));
}];
UIButton *quitBtn = [UIButton buttonWithType:UIButtonTypeCustom];
[quitBtn setTitle:@"取消" forState:UIControlStateNormal];
[quitBtn setTitleColor:Colorffffff forState:UIControlStateNormal];
[quitBtn.titleLabel setFont:[UIFont systemFontOfSize:16]];
[_bgImage addSubview:quitBtn];
[quitBtn mas_makeConstraints:^(MASConstraintMaker *make) {
make.centerY.mas_equalTo(_PhotoButton.mas_centerY);
make.right.mas_equalTo(_PhotoButton.mas_left).offset(-60);
make.size.mas_equalTo(CGSizeMake(60, 50));
}];
WEAKSELF
[quitBtn addTapBlock:^(id obj) {
STRONGSELF
[strongSelf dismissViewControllerAnimated:YES completion:^{
}];
}];
点击拍摄,生成图片。
- (void) shutterCamera
{
AVCaptureConnection * videoConnection = [self.ImageOutPut connectionWithMediaType:AVMediaTypeVideo];
if (!videoConnection) {
NSLog(@"take photo failed!");
return;
}
videoConnection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeStandard;
[self.ImageOutPut captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (imageDataSampleBuffer == NULL) {
return;
}
NSData * imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
self.image = [UIImage imageWithData:imageData];
[self.session stopRunning];
//显示预览层
[self showImageAction];
}];
}
此时生成了一张全屏的图片,如果要实现图2的预览图片界面。需要把图片进行几步处理。
1.之前设置图像输出的时候,设置了AVCaptureSessionPresetHigh这个参数,意思是生成的图片质量是当前设备支持的最高设备,所以此时生成的图片的尺寸并不是屏幕的尺寸,例如我使用的8p,生成的图片的尺寸为1080*1992。 所以第一步需要将图片的尺寸转换成屏幕尺寸。
2.然后在图片中截取和图1上面中间透明的一块图片。
3.将图片旋转90度,生成图2中横向的图片。
//将原来的图片尺寸更改为屏幕尺寸
self.image = [self image:self.image scaleToSize:CGSizeMake(kScreenWidth, kScreenHeight)];
//生成一个固定尺寸的图片
CGSize oldImageSize = CGSizeMake(self.image.size.width * 3, self.image.size.height * 3);
CGFloat newImageWidth = 228.0 / 375.0 * oldImageSize.width;
CGFloat newImageHeight = 362.0 / 667.0 * oldImageSize.height;
UIImage *scaleImage = [self imageFromImage:self.image inRect:CGRectMake((oldImageSize.width - newImageWidth) / 2, (oldImageSize.height - newImageHeight) / 2, newImageWidth, newImageHeight)];
//旋转图片
self.showImage = [scaleImage imageByRotateLeft90];
此时生成的self.showImage就是我们最终想要的图片。
上面代码用到的方法
//截取图片
-(UIImage*)image:(UIImage *)imageI scaleToSize:(CGSize)size{
/*
UIGraphicsBeginImageContextWithOptions(CGSize size, BOOL opaque, CGFloat scale)
CGSize size:指定将来创建出来的bitmap的大小
BOOL opaque:设置透明YES代表透明,NO代表不透明
CGFloat scale:代表缩放,0代表不缩放
创建出来的bitmap就对应一个UIImage对象
*/
UIGraphicsBeginImageContextWithOptions(size, NO, 3.0); //此处将画布放大三倍,这样在retina屏截取时不会影响像素
[imageI drawInRect:CGRectMake(0, 0, size.width, size.height)];
UIImage* scaledImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return scaledImage;
}
需要注意的是,在这里将画布放大了三倍,获得的图片尺寸虽然是屏幕的正常尺寸,但是在计算尺寸的时候要将宽和高都*3来计算。
-(UIImage *)imageFromImage:(UIImage *)imageI inRect:(CGRect)rect{
CGImageRef sourceImageRef = [imageI CGImage];
CGImageRef newImageRef = CGImageCreateWithImageInRect(sourceImageRef, rect);
UIImage *newImage = [UIImage imageWithCGImage:newImageRef];
return newImage;
}
最后添加图2的UI就大功告成了。
最后,还有一些相机的功能。
切换摄像头
- (void)changeCamera{
NSUInteger cameraCount = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] count];
if (cameraCount > 1) {
NSError *error;
CATransition *animation = [CATransition animation];
animation.duration = .5f;
animation.timingFunction = [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionEaseInEaseOut];
animation.type = @"oglFlip";
AVCaptureDevice *newCamera = nil;
AVCaptureDeviceInput *newInput = nil;
AVCaptureDevicePosition position = [[_input device] position];
if (position == AVCaptureDevicePositionFront){
newCamera = [self cameraWithPosition:AVCaptureDevicePositionBack];
animation.subtype = kCATransitionFromLeft;
}
else {
newCamera = [self cameraWithPosition:AVCaptureDevicePositionFront];
animation.subtype = kCATransitionFromRight;
}
newInput = [AVCaptureDeviceInput deviceInputWithDevice:newCamera error:nil];
[self.previewLayer addAnimation:animation forKey:nil];
if (newInput != nil) {
[self.session beginConfiguration];
[self.session removeInput:_input];
if ([self.session canAddInput:newInput]) {
[self.session addInput:newInput];
self.input = newInput;
} else {
[self.session addInput:self.input];
}
[self.session commitConfiguration];
} else if (error) {
NSLog(@"toggle carema failed, error = %@", error);
}
}
}
控制闪关灯开关
//- (void)FlashOn{
// if ([_device lockForConfiguration:nil]) {
// if (_isflashOn) {
// if ([_device isFlashModeSupported:AVCaptureFlashModeOff]) {
// [_device setFlashMode:AVCaptureFlashModeOff];
// _isflashOn = NO;
// [_flashButton setTitle:@"闪光灯关" forState:UIControlStateNormal];
// }
// }else{
// if ([_device isFlashModeSupported:AVCaptureFlashModeOn]) {
// [_device setFlashMode:AVCaptureFlashModeOn];
// _isflashOn = YES;
// [_flashButton setTitle:@"闪光灯开" forState:UIControlStateNormal];
// }
// }
//
// [_device unlockForConfiguration];
// }
//}
聚焦
- (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition)position{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for ( AVCaptureDevice *device in devices )
if ( device.position == position ) return device;
return nil;
}
- (void)focusGesture:(UITapGestureRecognizer*)gesture{
CGPoint point = [gesture locationInView:gesture.view];
[self focusAtPoint:point];
}
- (void)focusAtPoint:(CGPoint)point{
CGSize size = self.view.bounds.size;
CGPoint focusPoint = CGPointMake( point.y /size.height ,1-point.x/size.width );
NSError *error;
if ([self.device lockForConfiguration:&error]) {
if ([self.device isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
[self.device setFocusPointOfInterest:focusPoint];
[self.device setFocusMode:AVCaptureFocusModeAutoFocus];
}
if ([self.device isExposureModeSupported:AVCaptureExposureModeAutoExpose ]) {
[self.device setExposurePointOfInterest:focusPoint];
[self.device setExposureMode:AVCaptureExposureModeAutoExpose];
}
[self.device unlockForConfiguration];
_focusView.center = point;
_focusView.hidden = NO;
[UIView animateWithDuration:0.3 animations:^{
_focusView.transform = CGAffineTransformMakeScale(1.25, 1.25);
}completion:^(BOOL finished) {
[UIView animateWithDuration:0.5 animations:^{
_focusView.transform = CGAffineTransformIdentity;
} completion:^(BOOL finished) {
_focusView.hidden = YES;
}];
}];
}
}