iOS 视频录制 AVCaptureFileOutput

2024-08-01  本文已影响0人  大成小栈

https://www.jianshu.com/p/e3ecec9a07cb
https://www.jianshu.com/p/1406715deabf
https://blog.51cto.com/u_16175471/10250749
https://zhuanlan.zhihu.com/p/589761635?utm_id=0
https://stackoverflow.com/questions/43681929/didoutputsamplebuffer-delegate-never-gets-called-ios-swift-3

视频录制过程中,数据的输出有两种方式:AVCaptureFileOutput 和 AVCaptureVideoDataOutput。

一般二者并不能同时兼容,如以下代码,在某些机型上是不能触发录制动作的:

import Photos
import PhotosUI
import AVFoundation

class VideoRecorder: NSObject {
    
    // 结束回调
    typealias VideoRecorderCallback = (_ didFinish: Bool, _ videoUrl: URL, _ progress: CGFloat) -> Void
    // 翻转回调
    typealias SwapCallback = ((_ position: AVCaptureDevice.Position) -> Void)
    // 会话错误回调
    typealias ErrorCallback = () -> Void
    
    // 录制过程回调
    var recordAction: VideoRecorderCallback?
    // 录制过程出错
    var errorAction: ErrorCallback?
    // 翻转回调
    var swapAction: SwapCallback?
    
    // 视频捕获会话,协调input和output间的数据传输
    let captureSession = AVCaptureSession()
    // 将捕获到的视频输出到文件
    let fileOut = AVCaptureMovieFileOutput()
    // 视频输入
    private var videoInput: AVCaptureDeviceInput?
    // 视频输出
    private lazy var videoDataOutput = AVCaptureVideoDataOutput().then {
        $0.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)]
        $0.setSampleBufferDelegate(self, queue: serialQueue)
    }
    // 音频输出
    private lazy var audioDataOutput = AVCaptureAudioDataOutput().then {
        $0.setSampleBufferDelegate(self, queue: serialQueue)
    }
    // 视频输入设备,前后摄像头
    var camera: AVCaptureDevice?
    // 录制计时
    private var timer: Timer?
    // 文件存储位置url
    private var fileUrl: URL
    // 超时后自动停止
    var limitDuration: CGFloat = 59.9
    // 设置帧率
    var frameDuration = CMTime(value: 1, timescale: 25)
    // 设置码率
    var bitRate: Int = 2000 * 1024
    // 摄像头正在翻转
    var isSwapping: Bool = false
    // 即将开始录制
    var toStartRecording: Bool = false
    // 即将停止录制
    var toStopRecording: Bool = false
    // 预览视图
    lazy var previewLayer: AVCaptureVideoPreviewLayer = .init(session: captureSession)
    // 串行队列
    private let serialQueue = DispatchQueue(label: "VideoRecorderQueue")

    // 是否正在录制
    var isRecording: Bool {
        fileOut.isRecording
    }
    
    // MARK: - LifeCycle
    
    deinit {
        sessionStopRunning()
        NotificationCenter.default.removeObserver(self)
        print("Running ☠️ \(Self.self) 💀 deinit")
    }
    
    init(fileUrl: URL) {
        self.fileUrl = fileUrl
        super.init()
        clearOldFile()
        initCaptureSession()
        sessionStartRunning()
    }
    
    private func clearOldFile() {
        if FileManager.default.fileExists(atPath: fileUrl.path) {
            try? FileManager.default.removeItem(at: fileUrl)
        }
    }
    
    private func initCaptureSession() {
        guard let newCamera = getCamera(with: .front) else {
            errorAction?()
            return
        }
        camera = newCamera
        serialQueue.async { [weak self] in
            guard let self else { return }
            captureSession.beginConfiguration()
            configureSessionPreset(for: newCamera)
            sessionAddOutput()
            sessionAddInput(for: newCamera)
            captureSession.commitConfiguration()
        }
        
        do {
            try newCamera.lockForConfiguration()
            if newCamera.isFocusPointOfInterestSupported && newCamera.isFocusModeSupported(.autoFocus) {
                newCamera.focusMode = .autoFocus
            }
            newCamera.unlockForConfiguration()
        } catch {
            errorAction?()
        }
    }
    
    private func configureSessionPreset(for device: AVCaptureDevice) {
        if device.supportsSessionPreset(.hd1280x720) {
            captureSession.sessionPreset = .hd1280x720
        } else {
            captureSession.sessionPreset = .high
        }
    }
    
    private func sessionRemoveInputs() {
        if let allInputs = captureSession.inputs as? [AVCaptureDeviceInput] {
            for input in allInputs {
                captureSession.removeInput(input)
            }
        }
    }
    
    private func sessionAddOutput() {
        // 视频输出文件
        addOutput(fileOut)
        // 输出视频的码率
        setBitRate(fileOut: fileOut)
        // 视频数据流
        addOutput(videoDataOutput)
        // 音频数据流
        addOutput(audioDataOutput)
    }
    
    private func sessionAddInput(for camera: AVCaptureDevice) {
        // 加音频设备
        if let audioDevice = AVCaptureDevice.default(for: .audio) {
            addInput(for: audioDevice)
        }
        // 加摄像头
        configCamera(camera)
        addInput(for: camera)
        configConnection(for: camera)
    }
    
    private func configConnection(for camera: AVCaptureDevice) {
        if let connection = videoDataOutput.connection(with: .video) {
            if connection.isVideoMirroringSupported {
                connection.automaticallyAdjustsVideoMirroring = false
                connection.isVideoMirrored = camera.position == .front
            }
            if connection.isVideoStabilizationSupported {
                connection.preferredVideoStabilizationMode = .cinematic
            }
        }
    }
    
    private func addInput(for device: AVCaptureDevice) {
        do {
            let input = try AVCaptureDeviceInput(device: device)
            if captureSession.canAddInput(input) {
                captureSession.addInput(input)
                // 更新全局变量
                videoInput = input
            } else {
                errorAction?()
            }
        } catch {
            errorAction?()
        }
    }
    
    private func addOutput(_ output: AVCaptureOutput) {
        if captureSession.canAddOutput(output) {
            captureSession.addOutput(output)
        } else {
            errorAction?()
        }
    }
    
    private func configCamera(_ camera: AVCaptureDevice) {
        do {
            try camera.lockForConfiguration()
            camera.activeVideoMinFrameDuration = frameDuration
            camera.activeVideoMaxFrameDuration = frameDuration
            
            if camera.isSmoothAutoFocusSupported {
                camera.isSmoothAutoFocusEnabled = true
            }
            camera.unlockForConfiguration()
        } catch {
            errorAction?()
        }
    }
    
    private func setBitRate(fileOut: AVCaptureMovieFileOutput) {
        if let connection = fileOut.connection(with: .video) {
            let compressionSettings: [String: Any] = [AVVideoAverageBitRateKey: bitRate]
            let codecSettings: [String: Any] = [AVVideoCodecKey: AVVideoCodecType.h264,
                                                AVVideoCompressionPropertiesKey: compressionSettings]
            fileOut.setOutputSettings(codecSettings, for: connection)
        }
    }
    
    // MARK: - timer
    
    func resumeTimer() {
        cancelTimer()
        timer = Timer.scheduledTimer(withTimeInterval: 0.1, repeats: true) { [weak self] _ in
            guard let self = self else { return }
            let duration: CGFloat = fileOut.recordedDuration.seconds
            if duration >= limitDuration {
                stopRecording()
            } else {
                recordAction?(false, fileUrl, duration / limitDuration)
            }
        }
    }

    func cancelTimer() {
        timer?.invalidate()
        timer = nil
    }
    
    // MARK: - Actions
    
    func startRecording() {
        if !isRecording, !toStartRecording {
            if !captureSession.isRunning {
                sessionStartRunning()
            }
            toStartRecording = true
            resumeTimer()
        }
    }

    func stopRecording() {
        if isRecording, !toStopRecording {
            toStopRecording = true
            cancelTimer()
        }
    }

    func sessionStartRunning() {
        serialQueue.async { [weak self] in
            guard let self else { return }
            if !captureSession.isRunning {
                captureSession.startRunning()
            }
        }
    }

    func sessionStopRunning() {
        stopRecording()
        captureSession.stopRunning()
    }
    
    // MARK: - swap Camera
    
    private func getCamera(with position: AVCaptureDevice.Position) -> AVCaptureDevice? {
        let discoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera],
                                                                  mediaType: .video,
                                                                   position: .unspecified)
        for item in discoverySession.devices where item.position == position {
            return item
        }
        return nil
    }
    
    private func transAnimate() {
        let transition = CATransition()
        transition.duration = 0.4
        transition.delegate = self
        transition.timingFunction = CAMediaTimingFunction(name: CAMediaTimingFunctionName.easeInEaseOut)
        transition.type = CATransitionType(rawValue: "flip")
        if camera?.position == .front {
            transition.subtype = .fromLeft
        } else {
            transition.subtype = .fromRight
        }
        previewLayer.add(transition, forKey: "changeCamera")
    }
    
    func swapCamera(callback: ((_ position: AVCaptureDevice.Position) -> Void)?) {
        guard !isSwapping, let videoInput = videoInput else {
            return
        }
        
        isSwapping = true
        captureSession.stopRunning()
        swapAction = callback
        
        serialQueue.sync { [weak self] in
            guard let self else { return }
            captureSession.beginConfiguration()
            sessionRemoveInputs()
            let toPosition: AVCaptureDevice.Position = videoInput.device.position == .back ? .front : .back
            if let newCamera = getCamera(with: toPosition) {
                camera = newCamera
                sessionAddInput(for: newCamera)
            }
            captureSession.commitConfiguration()
        }
        
        transAnimate()
    }
    
    // MARK: - flash Light
    
    func setFlash(callback: ((_ torchMode: AVCaptureDevice.TorchMode) -> Void)?) {
        guard let camera = getCamera(with: .back) else { return }
        
        do {
            try camera.lockForConfiguration()
            if camera.torchMode == AVCaptureDevice.TorchMode.off {
                camera.torchMode = AVCaptureDevice.TorchMode.on
                callback?(.on)
            } else {
                camera.torchMode = AVCaptureDevice.TorchMode.off
                callback?(.off)
            }
            camera.unlockForConfiguration()
        } catch let error as NSError {
            print("setFlash Error: \(error)")
        }
    }
    
}

extension VideoRecorder: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
    
    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        guard output == videoDataOutput else { return }
        
        if toStartRecording {
            toStartRecording = false
            fileOut.startRecording(to: fileUrl, recordingDelegate: self)
            print("当前线程: \(Thread.current)")
            DispatchQueue.main.async { [weak self] in
                self?.resumeTimer()
            }
        } else if isRecording && toStopRecording {
            toStopRecording = false
            fileOut.stopRecording()
            DispatchQueue.main.async { [weak self] in
                self?.cancelTimer()
            }
        }
    }
}

// MARK: - CAAnimationDelegate
extension VideoRecorder: CAAnimationDelegate {
    
    func animationDidStart(_ anim: CAAnimation) {
        sessionStartRunning()
    }
    
    func animationDidStop(_ anim: CAAnimation, finished flag: Bool) {
        isSwapping = false
        if let position = videoInput?.device.position {
            swapAction?(position)
        }
    }
}

// MARK: - AVCaptureFileOutputRecordingDelegate

extension VideoRecorder: AVCaptureFileOutputRecordingDelegate {
    
    func fileOutput(_ output: AVCaptureFileOutput, didStartRecordingTo fileURL: URL, from connections: [AVCaptureConnection]) {
        recordAction?(false, fileURL, 0.0)
        if let connection = previewLayer.connection, connection.isVideoOrientationSupported {
            connection.videoOrientation = .portrait
        }
    }
    
    func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
        let duration: CGFloat = fileOut.recordedDuration.seconds
        recordAction?(true, outputFileURL, duration / limitDuration)
    }
}

那么,我们把AVCaptureVideoDataOutput去掉,只保留AVCaptureMovieFileOutput方式:

import Photos
import PhotosUI
import AVFoundation

class VideoRecorder: NSObject {
    
    // 结束回调
    typealias VideoRecorderCallback = (_ didFinish: Bool, _ videoUrl: URL, _ progress: CGFloat) -> Void
    // 翻转回调
    typealias SwapCallback = ((_ position: AVCaptureDevice.Position) -> Void)
    // 会话错误回调
    typealias ErrorCallback = () -> Void
    
    // 录制过程回调
    var recordAction: VideoRecorderCallback?
    // 录制过程出错
    var errorAction: ErrorCallback?
    // 翻转回调
    var swapAction: SwapCallback?
    
    // 视频捕获会话,协调input和output间的数据传输
    let captureSession = AVCaptureSession()
    // 将捕获到的视频输出到文件
    let fileOut = AVCaptureMovieFileOutput()
    // 视频输入
    private var videoInput: AVCaptureDeviceInput?
    // 预览视图
    lazy var previewLayer: AVCaptureVideoPreviewLayer = .init(session: captureSession)
    // 串行队列
    private let serialQueue = DispatchQueue(label: "VideoRecorderQueue")
    
    // 视频输入设备,前后摄像头
    var camera: AVCaptureDevice?
    // 录制计时
    private var timer: Timer?
    // 文件存储位置url
    private var fileUrl: URL
    // 超时后自动停止
    var limitDuration: CGFloat = 59.9
    // 设置帧率
    var frameDuration = CMTime(value: 1, timescale: 25)
    // 设置码率
    var bitRate: Int = 2000 * 1024
    // 摄像头正在翻转
    var isSwapping: Bool = false
    
    // 是否正在录制
    var isRecording: Bool = false
    
    // MARK: - LifeCycle
    
    deinit {
        sessionStopRunning()
        NotificationCenter.default.removeObserver(self)
        print("Running ☠️ \(Self.self) 💀 deinit")
    }
    
    init(fileUrl: URL) {
        self.fileUrl = fileUrl
        super.init()
        clearOldFile()
        initCaptureSession()
        sessionStartRunning()
    }
    
    private func clearOldFile() {
        if FileManager.default.fileExists(atPath: fileUrl.path) {
            try? FileManager.default.removeItem(at: fileUrl)
        }
    }
    
    private func initCaptureSession() {
        guard let newCamera = getCamera(with: .front) else {
            errorAction?()
            return
        }
        camera = newCamera
        serialQueue.async { [weak self] in
            guard let self else { return }
            captureSession.beginConfiguration()
            configureSessionPreset(for: newCamera)
            sessionAddOutput()
            sessionAddInput(for: newCamera)
            captureSession.commitConfiguration()
        }
        
        do {
            try newCamera.lockForConfiguration()
            if newCamera.isFocusPointOfInterestSupported && newCamera.isFocusModeSupported(.autoFocus) {
                newCamera.focusMode = .autoFocus
            }
            newCamera.unlockForConfiguration()
        } catch {
            errorAction?()
        }
    }
    
    private func configureSessionPreset(for device: AVCaptureDevice) {
        if device.supportsSessionPreset(.hd1280x720) {
            captureSession.sessionPreset = .hd1280x720
        } else {
            captureSession.sessionPreset = .high
        }
    }
    
    private func sessionRemoveInputs() {
        if let allInputs = captureSession.inputs as? [AVCaptureDeviceInput] {
            for input in allInputs {
                captureSession.removeInput(input)
            }
        }
    }
    
    private func sessionAddOutput() {
        // 视频输出文件
        addOutput(fileOut)
        // 输出视频的码率
        setBitRate(fileOut: fileOut)
    }
    
    private func sessionAddInput(for camera: AVCaptureDevice) {
        // 加音频设备
        if let audioDevice = AVCaptureDevice.default(for: .audio) {
            addInput(for: audioDevice)
        }
        // 加摄像头
        configCamera(camera)
        addInput(for: camera)
        configConnection(for: camera)
    }
    
    private func configConnection(for camera: AVCaptureDevice) {
        if let connection = fileOut.connection(with: .video) {
            if connection.isVideoMirroringSupported {
                connection.automaticallyAdjustsVideoMirroring = false
                connection.isVideoMirrored = camera.position == .front
            }
            if connection.isVideoStabilizationSupported {
                connection.preferredVideoStabilizationMode = .cinematic
            }
        }
    }
    
    private func addInput(for device: AVCaptureDevice) {
        do {
            let input = try AVCaptureDeviceInput(device: device)
            if captureSession.canAddInput(input) {
                captureSession.addInput(input)
                // 更新全局变量
                videoInput = input
            } else {
                errorAction?()
            }
        } catch {
            errorAction?()
        }
    }
    
    private func addOutput(_ output: AVCaptureOutput) {
        if captureSession.canAddOutput(output) {
            captureSession.addOutput(output)
        } else {
            errorAction?()
        }
    }
    
    private func configCamera(_ camera: AVCaptureDevice) {
        do {
            try camera.lockForConfiguration()
            camera.activeVideoMinFrameDuration = frameDuration
            camera.activeVideoMaxFrameDuration = frameDuration
            
            if camera.isSmoothAutoFocusSupported {
                camera.isSmoothAutoFocusEnabled = true
            }
            camera.unlockForConfiguration()
        } catch {
            errorAction?()
        }
    }
    
    private func setBitRate(fileOut: AVCaptureMovieFileOutput) {
        if let connection = fileOut.connection(with: .video) {
            let compressionSettings: [String: Any] = [AVVideoAverageBitRateKey: bitRate]
            let codecSettings: [String: Any] = [AVVideoCodecKey: AVVideoCodecType.h264,
                                                AVVideoCompressionPropertiesKey: compressionSettings]
            fileOut.setOutputSettings(codecSettings, for: connection)
        }
    }
    
    // MARK: - swap Camera
    
    private func getCamera(with position: AVCaptureDevice.Position) -> AVCaptureDevice? {
        let discoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera],
                                                                  mediaType: .video,
                                                                   position: .unspecified)
        for item in discoverySession.devices where item.position == position {
            return item
        }
        return nil
    }
    
    private func transAnimate() {
        let transition = CATransition()
        transition.duration = 0.4
        transition.delegate = self
        transition.timingFunction = CAMediaTimingFunction(name: CAMediaTimingFunctionName.easeInEaseOut)
        transition.type = CATransitionType(rawValue: "flip")
        if camera?.position == .front {
            transition.subtype = .fromLeft
        } else {
            transition.subtype = .fromRight
        }
        previewLayer.add(transition, forKey: "changeCamera")
    }
    
    func swapCamera(callback: ((_ position: AVCaptureDevice.Position) -> Void)?) {
        guard !isSwapping, let videoInput = videoInput else {
            return
        }
        
        isSwapping = true
        captureSession.stopRunning()
        swapAction = callback
        
        serialQueue.sync { [weak self] in
            guard let self else { return }
            captureSession.beginConfiguration()
            sessionRemoveInputs()
            let toPosition: AVCaptureDevice.Position = videoInput.device.position == .back ? .front : .back
            if let newCamera = getCamera(with: toPosition) {
                camera = newCamera
                sessionAddInput(for: newCamera)
            }
            captureSession.commitConfiguration()
        }
        
        transAnimate()
    }
    
    // MARK: - flash Light
    
    func setFlash(callback: ((_ torchMode: AVCaptureDevice.TorchMode) -> Void)?) {
        guard let camera = getCamera(with: .back) else { return }
        do {
            try camera.lockForConfiguration()
            if camera.torchMode == AVCaptureDevice.TorchMode.off {
                camera.torchMode = AVCaptureDevice.TorchMode.on
                callback?(.on)
            } else {
                camera.torchMode = AVCaptureDevice.TorchMode.off
                callback?(.off)
            }
            camera.unlockForConfiguration()
        } catch let error as NSError {
            print("setFlash Error: \(error)")
        }
    }
    
    // MARK: - timer
    
    func resumeTimer() {
        cancelTimer()
        timer = Timer.scheduledTimer(withTimeInterval: 0.1, repeats: true) { [weak self] _ in
            guard let self = self else { return }
            let duration: CGFloat = fileOut.recordedDuration.seconds
            if duration >= limitDuration {
                stopRecording()
            } else {
                recordAction?(false, fileUrl, duration / limitDuration)
            }
        }
    }

    func cancelTimer() {
        timer?.invalidate()
        timer = nil
    }
    
    // MARK: - Actions
    
    func startRecording() {
        if !isRecording, !isSwapping {
            isRecording = true
            if !captureSession.isRunning {
                sessionStartRunning()
            }
            serialQueue.async { [weak self] in
                guard let self = self else { return }
                fileOut.startRecording(to: fileUrl, recordingDelegate: self)
            }
            resumeTimer()
        }
    }

    func stopRecording() {
        if isRecording, !isSwapping {
            isRecording = false
            cancelTimer()
            serialQueue.async { [weak self] in
                guard let self = self else { return }
                fileOut.stopRecording()
            }
        }
    }

    func sessionStartRunning() {
        serialQueue.async { [weak self] in
            guard let self else { return }
            if !captureSession.isRunning {
                captureSession.startRunning()
            }
        }
    }

    func sessionStopRunning() {
        stopRecording()
        captureSession.stopRunning()
    }
    
}

// MARK: - CAAnimationDelegate
extension VideoRecorder: CAAnimationDelegate {
    
    func animationDidStart(_ anim: CAAnimation) {
        sessionStartRunning()
    }
    
    func animationDidStop(_ anim: CAAnimation, finished flag: Bool) {
        isSwapping = false
        if let position = videoInput?.device.position {
            swapAction?(position)
        }
    }
}

// MARK: - AVCaptureFileOutputRecordingDelegate

extension VideoRecorder: AVCaptureFileOutputRecordingDelegate {
    
    func fileOutput(_ output: AVCaptureFileOutput, didStartRecordingTo fileURL: URL, from connections: [AVCaptureConnection]) {
        recordAction?(false, fileURL, 0.0)
        if let connection = previewLayer.connection, connection.isVideoOrientationSupported {
            connection.videoOrientation = .portrait
        }
    }
    
    func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
        let duration: CGFloat = fileOut.recordedDuration.seconds
        recordAction?(true, outputFileURL, duration / limitDuration)
    }
}

这样即可正常运行,录制并输出。

你也可以保留AVCaptureMovieFileOutput去掉AVCaptureVideoDataOutput,这样需要重新修改上面的逻辑,可以根据第一部分进一步修改。

常见问题:

I'm having the same issue. *** Terminating app due to uncaught exception 'NSGenericException', reason: '*** -[AVCaptureSession startRunning] startRunning may not be called between calls to beginConfiguration and commitConfiguration'. It mainly appears on iOS17 devices, and a small number of iOS16 devices. The reason for this is that on both systems, a nomain-thread is used to startRunning the session, as there is a thread warning when the main thread starts. This crashes inside the API method, suspected to be thread call. Did you add the sub try to be the AVCaptureVideoPreviewLayer class? I've fixed this, I would recommend creating the AVCaptureVideoPreviewLayer instance at initialization time in the CameraPreViewController class. Regarding synchronizing access to the capture manager, I would recommend creating a dispatch queue just for this purpose, storing it as a property of the Camera-SessionManager class, then wrapping the calls in startRunning and stopRunning within dispatch sync blocks using this dispatch queue.

  1. init session
  2. create AVCaptureVideoPreviewLayer
  3. setting session input and output
  4. session startRunner with custom dispatch_queue_t
上一篇下一篇

猜你喜欢

热点阅读