基于AVCaptureSession的视频录制工具

2020-09-25  本文已影响0人  zxh123456

基于AVCaptureSession的视频录制工具封装
视频输出使用的AVAssetWriter写入沙盒

使用:

//初始化
self.cTool = [CameraTool cameraWithPreView:self.baseView.preView AndFinishShootingBlock:^(NSURL * url) {
  //url 视频输出地址
}];
//开始预览并录制(注意下block循环引用问题)
 [self.cTool startCapture:^{
                //开始录制
                [self.cTool startRecord];
                //......
                //结束录制        
                [self.cTool stopRecord];
}];

CameraTool.h

//
//  CameraTool.h
//  mvvm
//
//  Created by 朱鑫华 on 2020/8/16.
//  Copyright © 2020 朱鑫华. All rights reserved.
//

#import <Foundation/Foundation.h>

NS_ASSUME_NONNULL_BEGIN

typedef void (^CameraFinishedBlock)(NSURL *);

@interface CameraTool : NSObject
/**
  preView - 父容器
  block - 结束操作
 */
+(instancetype)cameraWithPreView:(UIView *)preView AndFinishShootingBlock:(CameraFinishedBlock) block;

-(void)startCapture:(void(^)(void))block;
-(void)stopCapture;
-(void)startRecord;
-(void)stopRecord;

@end

NS_ASSUME_NONNULL_END

CameraTool.m

//
//  CameraTool.m
//  mvvm
//
//  Created by 朱鑫华 on 2020/8/16.
//  Copyright © 2020 朱鑫华. All rights reserved.
//

#import "CameraTool.h"
#import <AVFoundation/AVFoundation.h>

@interface CameraTool() <AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate>

@property (nonatomic , strong) AVCaptureSession *captureSession;
@property (nonatomic , strong) AVCaptureDeviceInput *videoInput;
@property (nonatomic , strong) AVCaptureDeviceInput *audioInput;
@property (nonatomic , strong) AVCaptureVideoDataOutput *videoOutput;
@property (nonatomic , strong) AVCaptureAudioDataOutput *audioOutput;
@property (nonatomic , strong) dispatch_queue_t sessionQeue;
@property (nonatomic , strong)  AVCaptureVideoPreviewLayer *preViewLayer;
@property (nonatomic , assign) BOOL isRecording;
@property (nonatomic , assign) BOOL canWrite;
@property (nonatomic , strong) AVAssetWriter *writer;
@property (nonatomic , strong) AVAssetWriterInput *writerAudioInput;
@property (nonatomic , strong) AVAssetWriterInput *writerVideoInput;
@property (nonatomic , strong) AVPlayer *player;
@property (nonatomic , strong) AVPlayerLayer *playerLayer;
@property (nonatomic , strong) UIButton *palyBtn;

@property (nonatomic , strong) CALayer *maskLayer;

@property (nonatomic , copy) CameraFinishedBlock shootingFinishBlcok;

@end


@implementation CameraTool

+(instancetype)cameraWithPreView:(UIView *)preView AndFinishShootingBlock:(nonnull CameraFinishedBlock)block{
    CameraTool *tool = [[self alloc] initWithPreView:preView];
    tool.shootingFinishBlcok = block;
    return tool;
}

-(instancetype)initWithPreView:(UIView *)preview{
    if (self = [super init]) {
        [self initializeDataWithPreView:preview];
    }
    return self;
}

-(void)initializeDataWithPreView:(UIView *)preView{
    _sessionQeue = dispatch_queue_create("cameraQueue", NULL);
    
    _captureSession = [[AVCaptureSession alloc] init];
    if ([_captureSession canSetSessionPreset:AVCaptureSessionPresetHigh]) {
        [_captureSession setSessionPreset:AVCaptureSessionPresetHigh];
    }else if([_captureSession canSetSessionPreset:AVCaptureSessionPresetMedium]){
        [_captureSession setSessionPreset:AVCaptureSessionPresetMedium];
    }else if ([_captureSession canSetSessionPreset:AVCaptureSessionPresetLow]){
        [_captureSession setSessionPreset:AVCaptureSessionPresetLow];
    }
    
    _preViewLayer = [AVCaptureVideoPreviewLayer layerWithSession:_captureSession];
    _preViewLayer.masksToBounds = YES;
    _preViewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
    _preViewLayer.frame = preView.bounds;
    [preView.layer addSublayer:_preViewLayer];
    
    [self createMaskLayer:preView.bounds];
    [self setCaptureInput];
    [self setCaptureOutput];
}

-(void)createMaskLayer:(CGRect)rect{
    CAShapeLayer *shapeLayer = [CAShapeLayer layer];
    _maskLayer = shapeLayer;
    UIBezierPath *apath = [UIBezierPath bezierPathWithRoundedRect:rect cornerRadius:0];
    [apath appendPath:[UIBezierPath bezierPathWithArcCenter:CGPointMake(rect.size.width * 0.5 , rect.size.height * 0.5) radius:115 startAngle:0 endAngle:2*M_PI clockwise:NO]];
    shapeLayer.path = apath.CGPath;
    shapeLayer.strokeColor = [UIColor clearColor].CGColor;
    shapeLayer.fillColor = [UIColor colorWithWhite:0 alpha:0.3].CGColor;
    
    CAShapeLayer *shape2 = [CAShapeLayer layer];
    UIBezierPath *spath = [UIBezierPath bezierPathWithArcCenter:CGPointMake(rect.size.width * 0.5 , rect.size.height * 0.5) radius:115 startAngle:0 endAngle:2*M_PI clockwise:NO];
    shape2.path = spath.CGPath;
    shape2.strokeColor = ColorHexString(@"#0C2340").CGColor;
    shape2.lineWidth = 4;
    shape2.fillColor = [UIColor clearColor].CGColor;
    [shapeLayer addSublayer:shape2];
}

///输入设置
-(void)setCaptureInput{
    //视频输入
     AVCaptureDevice *device = [self cameraWithPosition:AVCaptureDevicePositionFront];
    self.videoInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:nil];
    if ([self.captureSession canAddInput:self.videoInput]) {
        [self.captureSession addInput:self.videoInput];
    }
    
    //音频输入
    AVCaptureDevice *audioDevice = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject];
    self.audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:nil];
    if ([self.captureSession canAddInput:self.audioInput]) {
        [self.captureSession addInput:self.audioInput];
    }
}

///输入设置
-(void)setCaptureOutput{
    self.videoOutput = [[AVCaptureVideoDataOutput alloc] init];
    [self.videoOutput setVideoSettings:@{
        (id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA)
    }];
    [self.videoOutput setSampleBufferDelegate:self queue:self.sessionQeue];
    if ([self.captureSession canAddOutput:self.videoOutput]) {
        [self.captureSession addOutput:self.videoOutput];
    }
    
    self.audioOutput = [[AVCaptureAudioDataOutput alloc] init];
    [self.audioOutput setSampleBufferDelegate:self queue:self.sessionQeue];
    if ([self.captureSession canAddOutput:self.audioOutput]) {
        [self.captureSession addOutput:self.audioOutput];
    }
    
//    AVCaptureConnection *connection = [self.videoOutput connectionWithMediaType:AVMediaTypeVideo];
//    // 设置前置摄像头拍照不镜像
//       AVCaptureDevicePosition currentPosition=[[self.videoInput device] position];
//    if (currentPosition == AVCaptureDevicePositionUnspecified || currentPosition == AVCaptureDevicePositionFront) {
//         connection.videoMirrored = YES;
//    } else {
//         connection.videoMirrored = NO;
//    }

}

#pragma mark - 预览相关
-(void)startCapture:(void (^)(void))block{
    if (![self.captureSession isRunning]) {
        __weak typeof(self) weakSelf = self;
        [self->_maskLayer removeFromSuperlayer];
        [self->_preViewLayer addSublayer:_maskLayer];
        dispatch_async(self.sessionQeue, ^{
            [weakSelf.captureSession startRunning];
            dispatch_async(dispatch_get_main_queue(), ^{
                block();
            });
        });
    }
}
-(void)stopCapture{
    if ([self.captureSession isRunning]) {
        __weak typeof(self) weakSelf = self;
        dispatch_async(self.sessionQeue, ^{
            [weakSelf.captureSession stopRunning];
        });
    }
}

#pragma mark - 拍摄相关
-(void)startRecord{
    __weak typeof(self) weakSelf = self;
    dispatch_async(self.sessionQeue, ^{
        
        NSURL *fileUrl = [self createVideoPath];
        NSFileManager *fm = [NSFileManager defaultManager] ;
        BOOL res = [fm removeItemAtURL:fileUrl error:nil];
        
        NSError *error;
        //创建AVAssetWriter
        weakSelf.writer = [AVAssetWriter assetWriterWithURL:fileUrl fileType:AVFileTypeMPEG4 error:&error];
        NSDictionary *dict =  @{
                   AVEncoderBitRatePerChannelKey : @(28000),
                   AVFormatIDKey : @(kAudioFormatMPEG4AAC),
                   AVNumberOfChannelsKey : @(1),
                   AVSampleRateKey : @(22050)
               };
        NSDictionary *audioOutputSetting = dict;
        self.writerAudioInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSetting];
        //输入是否调整处理成实时数据
        self.writerAudioInput.expectsMediaDataInRealTime = YES;
        
        NSDictionary *videoOutputSetting = @{
            AVVideoCodecKey : AVVideoCodecH264,
            AVVideoWidthKey : @(1280),
            AVVideoHeightKey : @(720),
            AVVideoCompressionPropertiesKey:@{
                    AVVideoAverageBitRateKey : @(1280*720*3),
            AVVideoExpectedSourceFrameRateKey : @(15),
            AVVideoMaxKeyFrameIntervalKey : @(15),
            AVVideoProfileLevelKey : AVVideoProfileLevelH264BaselineAutoLevel
            }
        };
        self.writerVideoInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:videoOutputSetting];
        //输入是否调整处理成实时数据
        self.writerVideoInput.expectsMediaDataInRealTime = YES;
        
        //画面需要旋转90度
        weakSelf.writerVideoInput.transform = CGAffineTransformMakeRotation(M_PI / 2.0);
        if ([weakSelf.writer canAddInput:weakSelf.writerVideoInput]) {
            [weakSelf.writer addInput:weakSelf.writerVideoInput];
        }
        if ([weakSelf.writer canAddInput:weakSelf.writerAudioInput]) {
            [weakSelf.writer addInput:weakSelf.writerAudioInput];
        }
        weakSelf.isRecording = YES;
        weakSelf.canWrite = NO;
        
    });
}
-(void)stopRecord{
    __weak typeof(self) weakSelf = self;
    dispatch_async(self.sessionQeue, ^{
        weakSelf.isRecording = NO;
        if (weakSelf.writer.status == AVAssetWriterStatusWriting) {
            [weakSelf.writerVideoInput markAsFinished];
            [weakSelf.writerAudioInput markAsFinished];
            [weakSelf.writer finishWritingWithCompletionHandler:^{
                dispatch_async(dispatch_get_main_queue(), ^{
                     //通知代理写入完成,地址:weakSelf.writer.outputURL
                    NSLog(@"文件地址:%@",weakSelf.writer.outputURL.absoluteString);
                    [self stopCapture];
                    if (self.shootingFinishBlcok) {
                        self.shootingFinishBlcok(weakSelf.writer.outputURL);
                    }
                });
            }];
        }
    });
}

#pragma mark - 视频处理
-(void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(nonnull CMSampleBufferRef)sampleBuffer fromConnection:(nonnull AVCaptureConnection *)connection{
    [self appendSampleBuffer:sampleBuffer];
}

-(void)appendSampleBuffer:(CMSampleBufferRef)sampleBuffer{
    if (self.isRecording == NO) {
        return;
    }
    //获取 mediaType
    CMTextFormatDescriptionRef formatDes = CMSampleBufferGetFormatDescription(sampleBuffer);
    CMMediaType mediaType = CMFormatDescriptionGetMediaType(formatDes);
    if (mediaType == kCMMediaType_Video) {
        if (!self.canWrite) {
            CMTime timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
            if ([self.writer startWriting]) {
//                NSLog(@"xie数据");
                [self.writer startSessionAtSourceTime:timestamp];
            }
            self.canWrite = YES;
        }
        
        if (self.canWrite && self.writerVideoInput.readyForMoreMediaData) {
//            NSLog(@"拼接视频数据");
            BOOL success = [self.writerVideoInput appendSampleBuffer:sampleBuffer];
            if (!success) {
//                NSLog(@"写入失败");
            }
        }
        
    }else if (mediaType == kCMMediaType_Audio){
        if (self.writerAudioInput.readyForMoreMediaData) {
//            NSLog(@"拼接饮品数据");
            BOOL success = [self.writerAudioInput appendSampleBuffer:sampleBuffer];
            if (!success) {
//                NSLog(@"==yinpin写入失败");
            }
        }
    }
}


-(NSURL *)createVideoPath{
    NSString *random = @"faceAuth";//NSString stringWithFormat:@"video%d_%d",arc4random()%999999,(int)[[NSDate date] timeIntervalSince1970] % 10000];
    NSString *path = [NSTemporaryDirectory() stringByAppendingPathComponent:[NSString stringWithFormat: @"%@.mp4",random]];
    return [NSURL fileURLWithPath:path];
}


- (AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition)position
{
    NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
    for ( AVCaptureDevice *device in devices )
        if ( device.position == position )
            return device;
    return nil;
}
-(void)dealloc{
    NSLog(@"%s dealloc",__func__);
}



@end
上一篇下一篇

猜你喜欢

热点阅读