iOS实用功能

iOS 短视频断点录制分段回删

2019-11-30  本文已影响0人  夜半敲门话不语

前言

目前是准备做一个美颜相机类的项目,这篇将介绍视频录制的一些思路。
代码已上传MagicCamera,你的star和fork是对我最好的支持和动力。

方案

大致分为两种:

方案一
思路:计算暂停时间
GPUImageMovieWriter 中采用的就是这一种方式,大致思路如下:

// 设置暂停
- (void)setPaused:(BOOL)newValue {
    if (_paused != newValue) {
        _paused = newValue;
        
        if (_paused) {
            discont = YES;
        }
    }
}

// 新的一帧数据

if (!isRecording || _paused)    // 如果是暂停状态则放弃写入
{
    [firstInputFramebuffer unlock];
    return;
}

if (discont) {                  // 恢复录制时,判断是否暂停过,重新计算时间
    discont = NO;
    CMTime current;
    
    if (offsetTime.value > 0) {
        current = CMTimeSubtract(frameTime, offsetTime);
    } else {
        current = frameTime;
    }
    
    CMTime offset  = CMTimeSubtract(current, previousFrameTime);
    
    if (offsetTime.value == 0) {
        offsetTime = offset;
    } else {
        offsetTime = CMTimeAdd(offsetTime, offset);
    }
}

if (offsetTime.value > 0) {
    frameTime = CMTimeSubtract(frameTime, offsetTime);
}

方案二
思路:采用AVAssetWriter 生成多个视频文件,如果不需要对帧数据进行处理也可以采用AVCaptureMovieFileOutput

合成:

基础 本文采用方案二

下面介绍一下视频录制需要用到的类:

框架

实现

摄像头采集、渲染等借鉴于GPUImage,这部分可以直接采用GPUImage,文件写入部分需要自定义或者在GPUImage的基础上重新实现 GPUImageMovieWriter

- (void)processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer
{
    CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    
    int bufferWidth = (int) CVPixelBufferGetWidth(pixelBuffer);
    int bufferHeight = (int) CVPixelBufferGetHeight(pixelBuffer);

    CMTime currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
    
    CFTypeRef colorAttachments = CVBufferGetAttachment(pixelBuffer, kCVImageBufferYCbCrMatrixKey, NULL);
    
    if (colorAttachments == kCVImageBufferYCbCrMatrix_ITU_R_601_4) {
        if (isFullYUVRange) {
            _preferredConversion = kMKColorConversion601FullRange;
        }
        else {
            _preferredConversion = kMKColorConversion601;
        }
    }
    else {
        _preferredConversion = kMKColorConversion709;
    }
    
    // 这部分创建采集纹理,参考GPUImage。(自己实现的过程中会出现黑屏, 报错等 CVOpenGLESTextureCacheCreateTextureFromImage failed (error: -6683))
    [_myContext useAsCurrentContext];
    
    if ([MKGPUImageContext supportsFastTextureUpload]) {
        
        if (CVPixelBufferGetPlaneCount(pixelBuffer) > 0) { // Check for YUV planar inputs to do RGB conversion
            CVPixelBufferLockBaseAddress(pixelBuffer, 0);
            
            CVOpenGLESTextureRef _luminanceTextureRef;
            CVOpenGLESTextureRef _chrominanceTextureRef;
            
            if ( (imageBufferWidth != bufferWidth) && (imageBufferHeight != bufferHeight) )
            {
                imageBufferWidth = bufferWidth;
                imageBufferHeight = bufferHeight;
            }
            
            CVReturn err;
            
            // Y-plane
            glActiveTexture(GL_TEXTURE4);
            err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [_myContext coreVideoTextureCache], pixelBuffer, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &_luminanceTextureRef);
            
            if (err)
            {
                NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
            }
            
            luminanceTexture = CVOpenGLESTextureGetName(_luminanceTextureRef);
            glBindTexture(GL_TEXTURE_2D, luminanceTexture);
            glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
            glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
            
            // UV-plane
            glActiveTexture(GL_TEXTURE5);
            err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [_myContext coreVideoTextureCache], pixelBuffer, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &_chrominanceTextureRef);
            
            if (err)
            {
                NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
            }
            
            chrominanceTexture = CVOpenGLESTextureGetName(_chrominanceTextureRef);
            glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
            glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
            glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
            
            [self convertYUVToRGBOutput];
            
            if (MKGPUImageRotationSwapsWidthAndHeight(internalRotation))
            {
                imageBufferWidth = bufferHeight;
                imageBufferHeight = bufferWidth;
            }
            
            CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
            CFRelease(_luminanceTextureRef);
            CFRelease(_chrominanceTextureRef);
            textureId = [_outputFramebuffer texture];
        }
    
    } else {
        
        CVPixelBufferLockBaseAddress(pixelBuffer, 0);
        int bytesPerRow = (int) CVPixelBufferGetBytesPerRow(pixelBuffer);
        MKGPUTextureOptions options;
        options.minFilter = GL_LINEAR;
        options.magFilter = GL_LINEAR;
        options.wrapS = GL_CLAMP_TO_EDGE;
        options.wrapT = GL_CLAMP_TO_EDGE;
        options.internalFormat = GL_RGBA;
        options.format = GL_BGRA;
        options.type = GL_UNSIGNED_BYTE;

        _outputFramebuffer = [[_myContext framebufferCache] fetchFramebufferForSize:CGSizeMake(bytesPerRow/4, bufferHeight) textureOptions:options missCVPixelBuffer:YES];
        [_outputFramebuffer activateFramebuffer];

        glBindTexture(GL_TEXTURE_2D, [_outputFramebuffer texture]);

        glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bytesPerRow / 4, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(pixelBuffer));
        textureId = [_outputFramebuffer texture];

        imageBufferWidth = bytesPerRow / 4;
        imageBufferHeight = bufferHeight;
        
        CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
    }
    
    int rotatedImageBufferWidth = bufferWidth, rotatedImageBufferHeight = bufferHeight;
    if (GPUImageRotationSwapsWidthAndHeight(internalRotation))
    {
        rotatedImageBufferWidth = bufferHeight;
        rotatedImageBufferHeight = bufferWidth;
    }
    
    // 特效处理
    if ([self.delegate respondsToSelector:@selector(effectsProcessingTexture:inputSize:rotateMode:)]) {
        [self.delegate effectsProcessingTexture:textureId inputSize:CGSizeMake(imageBufferWidth, imageBufferHeight) rotateMode:outputRotation];
    }
    
    // 写入处理过的视频帧
    [_segmentMovieWriter processVideoTextureId:textureId AtRotationMode:outputRotation AtTime:currentTime];
    
    // 渲染
    if ([self.delegate respondsToSelector:@selector(renderTexture:inputSize:rotateMode:)]) {
        [self.delegate renderTexture:textureId inputSize:CGSizeMake(rotatedImageBufferWidth, rotatedImageBufferHeight) rotateMode:outputRotation];
    }

    [_outputFramebuffer unlock];
    _outputFramebuffer = nil;
}

文件写入

- (void)startWriting {

    dispatch_async(dispatch_get_global_queue(0, 0), ^{
        
        NSError *error = nil;

        NSString *fileType = AVFileTypeQuickTimeMovie;
        self.assetWriter = [AVAssetWriter assetWriterWithURL:[self outputURL]
                                                    fileType:fileType
                                                       error:&error];
        
        if (!self.assetWriter || error) {
            NSString *formatString = @"Could not create AVAssetWriter: %@";
            NSLog(@"%@", [NSString stringWithFormat:formatString, error]);
            return;
        }
        
        // use default output settings if none specified
        if (_videoSettings == nil) {
            NSMutableDictionary *settings = [[NSMutableDictionary alloc] init];
            [settings setObject:AVVideoCodecH264 forKey:AVVideoCodecKey];
            [settings setObject:[NSNumber numberWithInt:videoSize.width] forKey:AVVideoWidthKey];
            [settings setObject:[NSNumber numberWithInt:videoSize.height] forKey:AVVideoHeightKey];
            _videoSettings = settings;
        } else {    // custom output settings specified
            __unused NSString *videoCodec = [_videoSettings objectForKey:AVVideoCodecKey];
            __unused NSNumber *width = [_videoSettings objectForKey:AVVideoWidthKey];
            __unused NSNumber *height = [_videoSettings objectForKey:AVVideoHeightKey];
            
            NSAssert(videoCodec && width && height, @"OutputSettings is missing required parameters.");
            
            if( [_videoSettings objectForKey:@"EncodingLiveVideo"] ) {
                NSMutableDictionary *tmp = [_videoSettings mutableCopy];
                [tmp removeObjectForKey:@"EncodingLiveVideo"];
                _videoSettings = tmp;
            }
        }
        
        self.assetWriterVideoInput =  [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo
                                                                     outputSettings:self.videoSettings];
        self.assetWriterVideoInput.expectsMediaDataInRealTime = YES;
        
        NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
                                                               [NSNumber numberWithInt:videoSize.width], kCVPixelBufferWidthKey,
                                                               [NSNumber numberWithInt:videoSize.height], kCVPixelBufferHeightKey,
                                                               nil];
        self.assetWriterInputPixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:self.assetWriterVideoInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
        
        if ([self.assetWriter canAddInput:self.assetWriterVideoInput]) {
            [self.assetWriter addInput:self.assetWriterVideoInput];
        } else {
            NSLog(@"Unable to add video input.");
            return;
        }
        
        self.assetWriterAudioInput =
        [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio
                                       outputSettings:self.audioSettings];
        
        self.assetWriterAudioInput.expectsMediaDataInRealTime = YES;
        
        if ([self.assetWriter canAddInput:self.assetWriterAudioInput]) {
            [self.assetWriter addInput:self.assetWriterAudioInput];
        } else {
            NSLog(@"Unable to add audio input.");
        }
        
        runMSynchronouslyOnContextQueue(myContext, ^{
            [self.assetWriter startWriting];
        });
        self.isWriting = YES;
        self.firstSample = YES;
    });
}

效果

上一篇 下一篇

猜你喜欢

热点阅读