Swift.一个自定制蒙层界面的相机
2018-11-22 本文已影响12人
王四猫
效果图.gif
实现方式:
-
新建一个ViewController,在其中使用AVFoundation框架中AVCaptureSession,AVCaptureVideoPreviewLayer,AVCaptureDevice实现前置相机功能.
-
绘制自定义蒙层View,实现页面效果.
-
遵循AVCaptureVideoDataOutputSampleBufferDelegate.实现拍照功能.
-
新建ViewController实现photo预览
-
使用protocol实现相片回调.
1.实现前置相机功能.
private func drawCamera() {
/// SessionPreset,用于设置output输出流的bitrate或者说画面质量
captureSession.sessionPreset = AVCaptureSession.Preset.photo
/// 获取输入设备,builtInWideAngleCamera是通用相机,AVMediaType.video代表视频媒体,front表示前置摄像头,如果需要后置摄像头修改为back
let availableDevices = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaType.video, position: .front).devices
/// 获取前置摄像头
captureDevice = availableDevices.first
beginSession()
}
/// 开始相机功能
private func beginSession() {
captureSession.beginConfiguration()
do {
/// 将前置摄像头作为session的input输入流
let captureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)
captureSession.addInput(captureDeviceInput)
}catch {
print(error.localizedDescription)
}
/// 设定视频预览层,也就是相机预览layer
let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
self.previewLayer = previewLayer
self.view.layer.addSublayer(self.previewLayer)
self.previewLayer.frame = self.view.layer.frame
/// 相机页面展现形式
self.previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill /// 拉伸充满frame
/// 设定输出流
let dataOutput = AVCaptureVideoDataOutput()
/// 指定像素格式
dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString):NSNumber(value:kCVPixelFormatType_32BGRA)] as [String : Any]
/// 是否直接丢弃处理旧帧时捕获的新帧,默认为True,如果改为false会大幅提高内存使用
dataOutput.alwaysDiscardsLateVideoFrames = true
/// 将输出流加入session
if captureSession.canAddOutput(dataOutput) {
captureSession.addOutput(dataOutput)
}
/// beginConfiguration()和commitConfiguration()方法中的修改将在commit时同时提交
captureSession.commitConfiguration()
captureSession.startRunning()
/// 开新线程进行输出流代理方法调用
let queue = DispatchQueue(label: "com.brianadvent.captureQueue")
dataOutput.setSampleBufferDelegate(self, queue: queue)
}
2.绘制自定义蒙层View,实现页面效果.
private func drawCoverView() {
self.view.addSubview(overlayView)
self.view.addSubview(clearView)
let maskLayer = CAShapeLayer()
let path = CGMutablePath()
// 裁切View左侧side
path.addRect(CGRect(x: 0, y: 0, width: self.clearView.frame.origin.x, height: self.overlayView.frame.size.height))
// 裁切View右侧side
path.addRect(CGRect(
x: self.clearView.frame.origin.x + self.clearView.frame.size.width, y: 0, width: self.overlayView.frame.size.width - self.clearView.frame.origin.x - self.clearView.frame.size.width, height: self.overlayView.frame.size.height))
// 裁切View上方side
path.addRect(CGRect(x: 0, y: 0, width: self.overlayView.frame.size.width, height: self.clearView.frame.origin.y))
// 裁切View下方side
path.addRect(CGRect(x: 0, y: self.clearView.frame.origin.y + self.clearView.frame.size.height, width: self.overlayView.frame.size.width, height: self.overlayView.frame.size.height - self.clearView.frame.origin.y + self.clearView.frame.size.height))
maskLayer.path = path
/// 修改overlayView.将裁切View区域空白出来
self.overlayView.layer.mask = maskLayer
path.closeSubpath()
}
3.遵循AVCaptureVideoDataOutputSampleBufferDelegate.实现拍照功能.
/// 相册输出流代理
extension EWPhotoPickerViewController: AVCaptureVideoDataOutputSampleBufferDelegate{
/// 输出流代理方法,实时调用
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
/// 判断takePhoto状态,如果为True代表执行拍照动作
if takePhoto {
takePhoto = false
/// 获取预览photo并将其传到相片预览页
if let image = self.getImageFromSampleBuffer(buffer: sampleBuffer) {
DispatchQueue.main.async {
let photoShowVC = EWPhotoShowViewController()
photoShowVC.photoShowImageView.image = image
photoShowVC.delegate = self.delegate
self.navigationController?.pushViewController(photoShowVC, animated: true)
self.stopCaptureSession()
}
}
}
}
}
4. 新建ViewController实现photo预览
open class EWPhotoShowViewController: UIViewController {
public var delegate: EWPhotoFinishDelegate?
public let photoShowImageView: UIImageView = {
let imageView = UIImageView(frame: UIScreen.main.bounds)
imageView.contentMode = .center
return imageView
}()
override open func viewDidLoad() {
super.viewDidLoad()
self.view.addSubview(photoShowImageView)
self.view.backgroundColor = UIColor.black
self.navigationItem.rightBarButtonItem = UIBarButtonItem(title: "完成", style: .plain, target: self, action: #selector(onClickCompleteButton))
}
@objc private func onClickCompleteButton(){
if delegate != nil{
if self.delegate!.responds(to: #selector(EWPhotoFinishDelegate.photo(_:didFinished:))) {
self.delegate!.photo(self, didFinished: self.photoShowImageView.image!)
}
}
}
}
5. 使用protocol实现相片回调.
/// 预览后照片返回协议
@objc public protocol EWPhotoFinishDelegate : NSObjectProtocol {
func photo(_ viewController: EWPhotoShowViewController, didFinished photo: UIImage)
}
extension ViewController: EWPhotoFinishDelegate{
func photo(_ viewController: EWPhotoShowViewController, didFinished photo: UIImage) {
viewController.navigationController?.dismiss(animated: true, completion: {
self.showImageView.image = photo
})
}
}
总结:
主要运用了AVFoundation中AVCaptureSession相关知识,以这种方式调用本机硬件设备,可以实现自定义相机界面,还是有很多情景需要类似功能的.
还有很多属性可以添加,例如闪光灯效果,点击聚焦,点击切换镜头等功能,在这里没有实现,如果有需要的朋友可以私密我给加上.
demo地址: EWPhotoPicker
有问题欢迎探讨.