iOS视频开发iOS图像、音频开发视频开发

使用VideoToolbox硬编码H.264

2017-01-12  本文已影响437人  sxyxsp123
引言

从iOS8开始,苹果将VideoToolbox.framework开放了出来,使开发者可以使用iOS设备内置的硬件设备来进行视频的编码和解码工作。硬件编解码的好处是,复杂的计算由专门的硬件电路完成,往往比使用cpu计算更高效,速度更快,功耗也更低。H.264是目前很流行的编码层视频压缩格式,目前项目中的协议层有rtmp与http,但是视频的编码层都是使用的H.264。
关于H.264编码格式相关知识请看深入浅出理解视频编码H264结构本篇不再赘述。在iOS平台上对视频数据进行H.264编码有两种方式:

  1. 软件编码:用ffmpeg等开源库进行编码,他是用cpu进行相关计算的,效率比较低,但是比较通用,是跨平台的。
  2. 硬件编码:用VideoToolbox今天编码,他是用GPU进行相关计算的,效率很高。

在熟悉H.264的过程中,为更好的了解H.264,尝试用VideoToolbox硬编码与硬解码H.264的原始码流。
今天我们主要来看看使用VideoToolbox硬编码H.264。

用VideoToolbox硬编码H.264步骤如下:

  1. 初始化摄像头,output设定的时候,需要设置delegate和输出队列。在delegate方法,处理采集好的图像。

  2. 初始化VideoToolbox,设置各种属性。

  3. 获取每一帧数并编码。

  4. 每一帧数据编码完成后,在回调方法中判断是不是关键帧,如果是关键帧需要用CMSampleBufferGetFormatDescription获取CMFormatDescriptionRef,然后用
    CMVideoFormatDescriptionGetH264ParameterSetAtIndex取得PPS和SPS;最后把每一帧的所有NALU数据前四个字节变成0x00 00 00 01之后再写入文件。

  5. 循环步骤3步骤4。

  6. 调用VTCompressionSessionCompleteFrames完成编码,然后销毁session:VTCompressionSessionInvalidate,释放session。

详细代码

 //
//  AVVideoToolBoxh264VC.m
//  AudioandVideo
//
//  Created by sunpeng on 17/1/10.
//  Copyright © 2017年 mac. All rights reserved.
//

#import "AVVideoToolBoxh264VC.h"
#import <AVFoundation/AVFoundation.h>
#import <AVFoundation/AVCaptureOutput.h>
#import <AssetsLibrary/AssetsLibrary.h>
#import <VideoToolbox/VideoToolbox.h>

@interface AVVideoToolBoxh264VC ()<AVCaptureVideoDataOutputSampleBufferDelegate> {

    int frameID;
    NSFileHandle *fileHandle;
    dispatch_queue_t encodeQueue;
    VTCompressionSessionRef encodingSession;
    CMFormatDescriptionRef format;
    
}



//camera
@property (nonatomic, strong) AVCaptureSession           * captureSession;
@property (nonatomic, strong) AVCaptureDeviceInput       * captureDeviceInput;
@property (nonatomic, strong) AVCaptureStillImageOutput  * captureImageOutput;
@property (nonatomic, strong) AVCaptureMovieFileOutput   * captureMovieFileOutput;
@property (nonatomic, strong) AVCaptureVideoPreviewLayer * captureVideoPreLayer;

@end

@implementation AVVideoToolBoxh264VC

- (void)viewDidLoad {
    [super viewDidLoad];
    // Do any additional setup after loading the view.
    self.view.backgroundColor = [UIColor whiteColor];
    self.title = @"使用VideoToolbox硬编码H.264";
    
    [self initCamera];
    [self initVideoToolBox];
    [self.captureSession startRunning];
    
}

- (void)viewWillDisappear:(BOOL)animated{
    [super viewWillDisappear:animated];
    [self.captureSession stopRunning];
    [self endVodeoToolBox];
}

- (void)initCamera{
    //创建回话
    _captureSession = [[AVCaptureSession alloc] init];
    if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset1280x720]) {
        _captureSession.sessionPreset = AVCaptureSessionPreset1280x720;
    }
    
    AVCaptureDevice * captureDevice = [self getCameraDeviceWithPosition:AVCaptureDevicePositionBack];
    if (!captureDevice) {
        NSLog(@"获取后置摄像头失败");
        return;
    }
    
    
    NSError * error = nil;
    _captureDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:captureDevice error:&error];
    if (error) {
        NSLog(@"获取输入设备出错--%@",error.localizedDescription);
        return;
    }
    
    
    //    //将设备输入添加到回话中
    if ([_captureSession canAddInput:_captureDeviceInput]) {
        [_captureSession addInput:_captureDeviceInput];
        AVCaptureConnection *captureConnection=[_captureMovieFileOutput connectionWithMediaType:AVMediaTypeVideo];
        if ([captureConnection isVideoStabilizationSupported ]) {
            captureConnection.preferredVideoStabilizationMode=AVCaptureVideoStabilizationModeAuto;
        }
        
    }
    
    //创建输出设备并添加到回话中
    AVCaptureVideoDataOutput * output = [[AVCaptureVideoDataOutput alloc] init];
    [_captureSession addOutput:output];
    dispatch_queue_t queue = dispatch_queue_create("myQueue", NULL);
    [output setSampleBufferDelegate:self queue:queue];
    output.videoSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
    
    
    //创建视频预览层,用于实时展示摄像头状态
    _captureVideoPreLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_captureSession];
    CALayer * layer = self.view.layer;
    layer.masksToBounds = YES;
    
    _captureVideoPreLayer.frame = layer.bounds;
    _captureVideoPreLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
    
    [self.view.layer addSublayer:_captureVideoPreLayer];


    NSString *file = [[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject] stringByAppendingPathComponent:@"abc.h264"];
    [[NSFileManager defaultManager] removeItemAtPath:file error:nil];
    [[NSFileManager defaultManager] createFileAtPath:file contents:nil attributes:nil];
    fileHandle = [NSFileHandle fileHandleForWritingAtPath:file];
    

}

//获取摄像头
- (AVCaptureDevice *)getCameraDeviceWithPosition:(AVCaptureDevicePosition) position{
    
    NSArray * cameras = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
    for (AVCaptureDevice * camera in cameras) {
        if ([camera position] == position) {
            return camera;
        }
    }
    
    return nil;
}

//初始化videoToolBox
- (void)initVideoToolBox{
   
    encodeQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0);
    dispatch_sync(encodeQueue, ^{
       
        frameID = 0;
        int Width = 1280, height = 720;
        //创建编码回话  encodedPerFrameH264 是编码一帧数据后的回调方法
        OSStatus status = VTCompressionSessionCreate(NULL, Width, height, kCMVideoCodecType_H264, NULL, NULL, NULL, encodedPerFrameH264, (__bridge void * )(self), &encodingSession);
        NSLog(@"H264: VTCompressionSessionCreate %d", (int)status);
        if (status != 0) {
            NSLog(@"H264: unable to create a H264 Session");
            return ;
        }
        //-----------------------------------------设置编码会话各种属性
        //设置实时编码输出 ,避免延迟
        VTSessionSetProperty(encodingSession, kVTCompressionPropertyKey_RealTime, kCFBooleanTrue);
        VTSessionSetProperty(encodingSession, kVTCompressionPropertyKey_ProfileLevel, kVTProfileLevel_H264_Baseline_AutoLevel);
        
        //设置关键帧间隔(GOPsize)
        int frameInterval = 10;
        CFNumberRef frameIntervalRef = CFNumberCreate(kCFAllocatorDefault, kCFNumberIntType, &frameInterval);
        VTSessionSetProperty(encodingSession, kVTCompressionPropertyKey_MaxKeyFrameInterval, frameIntervalRef);
        
        //设置期望帧率
        int fps = 10;
        CFNumberRef fpsRef = CFNumberCreate(kCFAllocatorDefault, kCFNumberIntType, &fps);
        VTSessionSetProperty(encodingSession, kVTCompressionPropertyKey_ExpectedFrameRate, fpsRef);
        
        //设置码率, 上限, 单位是bps
        int bitRate = Width * height * 3 * 4 * 8;
        CFNumberRef bitRateRef = CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &bitRate);
        VTSessionSetProperty(encodingSession, kVTCompressionPropertyKey_AverageBitRate, bitRateRef);
        
        //设置码率, 均值,单位是byte
        int bitRateLimit = Width * height * 3 * 4;
        CFNumberRef bitRateLimitRef = CFNumberCreate(kCFAllocatorDefault , kCFNumberSInt32Type, &bitRateLimit);
        VTSessionSetProperty(encodingSession, kVTCompressionPropertyKey_DataRateLimits, bitRateLimitRef);
        
        //tell the encoder to start encoding
        VTCompressionSessionPrepareToEncodeFrames(encodingSession);
        
        
    });
}

//编码一帧原始图像
- (void)encodePerFrame:(CMSampleBufferRef) sampleBuffer{

    CVImageBufferRef imageBuffer = (CVImageBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer);
    //帧世界,如果不设置会导致时间轴过长
    CMTime presentationTimeStamp = CMTimeMake(frameID++, 1000);
    VTEncodeInfoFlags flags;
    
    //编码
    OSStatus statusCode = VTCompressionSessionEncodeFrame(encodingSession, imageBuffer, presentationTimeStamp, kCMTimeInvalid, NULL, NULL, &flags);
    if (statusCode != noErr) {
        NSLog(@"H264: VTCompressionSessionEncodeFrame failed with %d", (int) statusCode);
        
        VTCompressionSessionInvalidate(encodingSession);
        CFRelease(encodingSession);
        encodingSession = NULL;
        return;
    }
    
    NSLog(@"H264: VTCompressionSessionEncodeFrame Success");
}

//编码一帧后到回调方法
void encodedPerFrameH264(void * outputCallbackRefCon, void * sourceFrameRefCon, OSStatus status, VTEncodeInfoFlags infoFlags, CMSampleBufferRef sampleBuffer){

    NSLog(@"encodedPerFrameH264 called with status %d infoFlags %d", (int)status, (int)infoFlags);
    if (status != 0) {
        return;
    }
    
    if (!CMSampleBufferDataIsReady(sampleBuffer)) {
        NSLog(@"encodedPerFrameH264 data is not ready");
        return;
    }
    
    AVVideoToolBoxh264VC * encoder = (__bridge AVVideoToolBoxh264VC *)outputCallbackRefCon;
    bool keyframe = !CFDictionaryContainsKey((CFArrayGetValueAtIndex(CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, true), 0) ), kCMSampleAttachmentKey_NotSync);
    //判断当前帧是否为关键帧
    if (keyframe) {
        CMFormatDescriptionRef format = CMSampleBufferGetFormatDescription(sampleBuffer);
        size_t sparameterSetSize, sparameterSetCount;
        const uint8_t * sparameterSet;
        OSStatus statusCode = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 0, &sparameterSet, &sparameterSetSize, &sparameterSetCount, 0);
        if (statusCode == noErr) {
            size_t pparameterSetSize, pparameterSetCount;
            const uint8_t * pparameterSet;
            OSStatus statusCode = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 1, &pparameterSet, &pparameterSetSize, &pparameterSetCount, 0);
            if (statusCode == noErr) {
                //found pps
                NSData * sps = [NSData dataWithBytes:sparameterSet length:sparameterSetSize];
                NSData * pps = [NSData dataWithBytes:pparameterSet length:pparameterSetSize];
                if (encoder) {
                    [encoder setSpsAndPps:sps pps:pps];
                }
            }
        }
    }
    
    CMBlockBufferRef dataBuffer = CMSampleBufferGetDataBuffer(sampleBuffer);
    size_t length , totalLength;
    char * dataPointer;
    OSStatus statusCodeRet = CMBlockBufferGetDataPointer(dataBuffer, 0, &length, &totalLength, &dataPointer);
    if (statusCodeRet == noErr) {
        size_t bufferOffset = 0;
        // 返回的nalu数据前四个字节不是0001的startcode,而是大端模式的帧长度length
        static const int AVCCHeaderLength = 4;
        //循环获取nalu  数据
        while (bufferOffset < totalLength - AVCCHeaderLength) {
            uint32_t NALUnitLength = 0;
            // Read the NAL unit length
            memcpy(&NALUnitLength, dataPointer + bufferOffset, AVCCHeaderLength);
            
            // 从大端转系统端
            NALUnitLength = CFSwapInt32BigToHost(NALUnitLength);
            
            NSData* data = [[NSData alloc] initWithBytes:(dataPointer + bufferOffset + AVCCHeaderLength) length:NALUnitLength];
            [encoder setEncodedData:data isKeyFrame:keyframe];
            
            // Move to the next NAL unit in the block buffer
            bufferOffset += AVCCHeaderLength + NALUnitLength;
            
        }
    }

}

//设置sps pps
- (void)setSpsAndPps:(NSData *) sps pps: (NSData *) pps{

    NSLog(@"setSpsAndPps %d %d", (int)[sps length], (int)[pps length] );
    
    const char bytes[] = "\x00\x00\x00\x01";
    //string literals hava implicit trailing '\0'
    size_t length = (sizeof(bytes) - 1);
    NSData * ByteHeader = [NSData dataWithBytes:bytes length:length];
    
    [fileHandle writeData:ByteHeader];
    [fileHandle writeData:sps];
    [fileHandle writeData:ByteHeader];
    [fileHandle writeData:pps];
}

//写入一帧编码过后的数据到文件
- (void)setEncodedData: (NSData *)data isKeyFrame:(BOOL) isKeyFrame{
    NSLog(@"setEncodedData %d", (int)[data length]);
    if (fileHandle != NULL) {
        const char bytes[] = "\x00\x00\x00\x01";
        //string literals hava implicit trailing '\0'
        size_t length = (sizeof(bytes) -1);
        NSData * ByteHeader = [NSData dataWithBytes:bytes length:length];
        
        [fileHandle writeData:ByteHeader];
        [fileHandle writeData:data];
    }
}

- (void)endVodeoToolBox{
    VTCompressionSessionCompleteFrames(encodingSession, kCMTimeInvalid);
    VTCompressionSessionInvalidate(encodingSession);
    CFRelease(encodingSession);
    encodingSession = NULL;
}

#pragma mark - AVCaptureFileOutputDelegate//获取视频流中每一帧数据(图片)
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
  
    dispatch_sync(encodeQueue, ^{
        [self encodePerFrame:sampleBuffer];
    });
 
    
}

@end




















未完待续

上一篇下一篇

猜你喜欢

热点阅读