图像处理

GPUImage Filter链(2)技术实现

2017-02-21  本文已影响373人  南风无影

我要实现把(CVPixelBufferRef)pixel_buffer摄像头采集的数据,通过_rawDataInputFilter _rawDataOutputFilter加入到filter链, 再把数据传回来,实现接入第三方的filter。

init初始化
self.Process = [[ProcessPixelBuffer alloc]init];

转化接口:
CVPixelBufferRef output = NULL;
[self.Process processWithCVPixelBuffer:pixel_buffer];
output = pixel_buffer; //output转化后输出的数据

转化工具processWithCVPixelBuffer代码:

processWithCVPixelBuffer.m

#import "ProcessPixelBuffer.h"
#import "YFGPUImageCameraBeautyFilters.h"

@interface ProcessPixelBuffer (){
    GPUImageFilter                  *_beautityFilter;
    GPUImageRawDataInput *_rawDataInputFilter;
    CVPixelBufferRef imageBuffer;

}
@property (nonatomic, strong) dispatch_semaphore_t semaphore;

@end

@implementation ProcessPixelBuffer
- (instancetype)init
{
    self = [super init];
    if (self) {
        
        _semaphore = dispatch_semaphore_create(0);

        _rawDataInputFilter = [[GPUImageRawDataInput alloc] initWithBytes:nil size:CGSizeMake(0, 0)];
        

         _beautityFilter = [[GPUImageFilter alloc] initWithFragmentShaderFromString:kGPUImageBeautifyFragmentShaderString_Level_5];
        
        __weak typeof(self) ws = self;
        
        [_beautityFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *output, CMTime time) {
            GPUImageFramebuffer *imageFramebuffer = output.framebufferForOutput;
            glFinish();
            
            imageBuffer = [imageFramebuffer getRenderTarget];
            
            dispatch_semaphore_signal(ws.semaphore);
            
        }];

        [_rawDataInputFilter addTarget:_beautityFilter];

    }
    return self;
}

- (void)processWithCVPixelBuffer:(CVPixelBufferRef)pixelBuffer{
    
    CVPixelBufferLockBaseAddress(pixelBuffer, 0);
    
    //从 CVImageBufferRef 取得影像的细部信息
    uint8_t *base;
    size_t width, height, bytesPerRow, size;
    base = CVPixelBufferGetBaseAddress(pixelBuffer);
    width = CVPixelBufferGetWidth(pixelBuffer);
    height = CVPixelBufferGetHeight(pixelBuffer);
    bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
    
    [_rawDataInputFilter updateDataFromBytes:base size:CGSizeMake(bytesPerRow/4, height)];
    [_rawDataInputFilter processData];
    
    dispatch_semaphore_wait(self.semaphore, DISPATCH_TIME_FOREVER);
    
    CVPixelBufferLockBaseAddress(imageBuffer, 0);
    
    uint8_t *data = CVPixelBufferGetBaseAddress(imageBuffer);
    size = CVPixelBufferGetDataSize(imageBuffer);

    memcpy(base,data,size);
    
    CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
    CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
}

-(void)dealloc{
    [_beautityFilter removeAllTargets];
}
@end




processWithCVPixelBuffer.h


#import <Foundation/Foundation.h>
#import <CoreVideo/CoreVideo.h>

@interface ProcessPixelBuffer : NSObject

- (void)processWithCVPixelBuffer:(CVPixelBufferRef)pixelBuffer;

@end
修改GPUImage源码:
GPUImageRawDataInput.m

- (void)uploadBytes:(GLubyte *)bytesToUpload;
{
    [GPUImageContext useImageProcessingContext];

    // TODO: This probably isn't right, and will need to be corrected
    outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:uploadedImageSize textureOptions:self.outputTextureOptions onlyTexture:YES];
    
    glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
//  glTexImage2D(GL_TEXTURE_2D, 0, _pixelFormat, (int)uploadedImageSize.width, (int)uploadedImageSize.height, 0, (GLint)_pixelFormat, (GLenum)_pixelType, bytesToUpload); //gongjia mask
+    glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, (int)uploadedImageSize.width, (int)uploadedImageSize.height, 0, (GLint)GL_BGRA, (GLenum)_pixelType, bytesToUpload); //gongjia add
}

- (void)processData;
{
    if (dispatch_semaphore_wait(dataUpdateSemaphore, DISPATCH_TIME_NOW) != 0)
    {
        return;
    }
    
    runAsynchronouslyOnVideoProcessingQueue(^{
  +      [GPUImageContext useImageProcessingContext]; //gongjia add
        CGSize pixelSizeOfImage = [self outputImageSize];

GPUImageFrameBuffer.m新增
// modified by gongjia
- (CVPixelBufferRef)getRenderTarget {
    return renderTarget;
}

GPUImageFrameBuffer.h新增
- (CVPixelBufferRef)getRenderTarget;

这样改ProcessPixelBuffer.m还是有问题,进不去析构函数,原因就出在那个setFrameProcessingCompletionBlock上,这个坑前面遇到过,会造成cycle
所以要用weak;
__weak typeof(self) ws = self;

#import "ProcessPixelBuffer.h"
#import "YFGPUImageCameraBeautyFilters.h"

@interface ProcessPixelBuffer (){
    GPUImageFilter                  *_beautityFilter;
    GPUImageRawDataInput *_rawDataInputFilter;
}
@property (nonatomic, strong) dispatch_semaphore_t semaphore;
@property (nonatomic) CVPixelBufferRef imageBuffer;

@end

@implementation ProcessPixelBuffer
- (instancetype)init
{
    self = [super init];
    if (self) {
        
        _semaphore = dispatch_semaphore_create(0);

        _rawDataInputFilter = [[GPUImageRawDataInput alloc] initWithBytes:nil size:CGSizeMake(0, 0)];
        

         _beautityFilter = [[GPUImageFilter alloc] initWithFragmentShaderFromString:kGPUImageBeautifyFragmentShaderString_Level_5];
        
        __weak typeof(self) ws = self;
        
        [_beautityFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *output, CMTime time) {
            GPUImageFramebuffer *imageFramebuffer = output.framebufferForOutput;
            glFinish();
            
            ws.imageBuffer = [imageFramebuffer getRenderTarget];
            
            dispatch_semaphore_signal(ws.semaphore);
            
        }];

        [_rawDataInputFilter addTarget:_beautityFilter];

    }
    return self;
}

- (void)processWithCVPixelBuffer:(CVPixelBufferRef)pixelBuffer{
    
    CVPixelBufferLockBaseAddress(pixelBuffer, 0);
    
    //从 CVImageBufferRef 取得影像的细部信息
    uint8_t *base;
    size_t width, height, bytesPerRow, size;
    base = CVPixelBufferGetBaseAddress(pixelBuffer);
    width = CVPixelBufferGetWidth(pixelBuffer);
    height = CVPixelBufferGetHeight(pixelBuffer);
    bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer);
    
    [_rawDataInputFilter updateDataFromBytes:base size:CGSizeMake(bytesPerRow/4, height)];
    [_rawDataInputFilter processData];
    
    dispatch_semaphore_wait(self.semaphore, DISPATCH_TIME_FOREVER);
    
    CVPixelBufferLockBaseAddress(self.imageBuffer, 0);
    
    uint8_t *data = CVPixelBufferGetBaseAddress(self.imageBuffer);
    size = CVPixelBufferGetDataSize(self.imageBuffer);

    memcpy(base,data,size);
    
    CVPixelBufferUnlockBaseAddress(self.imageBuffer, 0);
    CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
}

-(void)dealloc{
    [_beautityFilter removeAllTargets];
}
@end


参考的是GPUImageHistogramEqualizationFilter.m源码

- (id)initWithHistogramType:(GPUImageHistogramType)newHistogramType
{
    if (!(self = [super init]))
    {
        return nil;
    }
    
    histogramFilter = [[GPUImageHistogramFilter alloc] initWithHistogramType:newHistogramType];
    [self addFilter:histogramFilter];
    
    GLubyte dummyInput[4 * 256]; // NB: No way to initialise GPUImageRawDataInput without providing bytes

    //实现协议GPUImageInput,可以接受响应链的图像信息,并且以二进制的格式返回数据;
    rawDataInputFilter = [[GPUImageRawDataInput alloc] initWithBytes:dummyInput size:CGSizeMake(256.0, 1.0) pixelFormat:GPUPixelFormatBGRA type:GPUPixelTypeUByte];

    //继承GPUImageOutput类,可以接受二进制数据,按照特定的颜色格式,把数据转成图像并传入响应链
    rawDataOutputFilter = [[GPUImageRawDataOutput alloc] initWithImageSize:CGSizeMake(256.0, 3.0) resultsInBGRAFormat:YES];
    
    __unsafe_unretained GPUImageRawDataOutput *_rawDataOutputFilter = rawDataOutputFilter;
    __unsafe_unretained GPUImageRawDataInput *_rawDataInputFilter = rawDataInputFilter;
    [rawDataOutputFilter setNewFrameAvailableBlock:^{
        
        unsigned int histogramBins[3][256];
        
        [_rawDataOutputFilter lockFramebufferForReading];

        //属性: 二进制数据的指针;
        GLubyte *data  = [_rawDataOutputFilter rawBytesForImage];
        data += [_rawDataOutputFilter bytesPerRowInOutput];

        histogramBins[0][0] = *data++;
        histogramBins[1][0] = *data++;
        histogramBins[2][0] = *data++;
        data++;
        
        for (unsigned int x = 1; x < 256; x++) {
            histogramBins[0][x] = histogramBins[0][x-1] + *data++;
            histogramBins[1][x] = histogramBins[1][x-1] + *data++;
            histogramBins[2][x] = histogramBins[2][x-1] + *data++;
            data++;
        }
        
        [_rawDataOutputFilter unlockFramebufferAfterReading];

        GLubyte colorMapping[4 * 256];
        GLubyte *_colorMapping = colorMapping;
        
        for (unsigned int x = 0; x < 256; x++) {
            *_colorMapping++ = (GLubyte) (((histogramBins[0][x] - histogramBins[0][0]) * 255) / histogramBins[0][255]);
            *_colorMapping++ = (GLubyte) (((histogramBins[1][x] - histogramBins[1][0]) * 255) / histogramBins[1][255]);
            *_colorMapping++ = (GLubyte) (((histogramBins[2][x] - histogramBins[2][0]) * 255) / histogramBins[2][255]);
            *_colorMapping++ = 255;
        }
        
        _colorMapping = colorMapping;
        [_rawDataInputFilter updateDataFromBytes:_colorMapping size:CGSizeMake(256.0, 1.0)];
        [_rawDataInputFilter processData];
    }];
    [histogramFilter addTarget:rawDataOutputFilter];
    
    NSString *fragmentShader = nil;
    switch (newHistogramType) {
        case kGPUImageHistogramRed:
            fragmentShader = kGPUImageRedHistogramEqualizationFragmentShaderString;
            break;
        case kGPUImageHistogramGreen:
            fragmentShader = kGPUImageGreenHistogramEqualizationFragmentShaderString;
            break;
        case kGPUImageHistogramBlue:
            fragmentShader = kGPUImageBlueHistogramEqualizationFragmentShaderString;
            break;
        default:
        case kGPUImageHistogramRGB:
            fragmentShader = kGPUImageRGBHistogramEqualizationFragmentShaderString;
            break;
        case kGPUImageHistogramLuminance:
            fragmentShader = kGPUImageLuminanceHistogramEqualizationFragmentShaderString;
            break;
    }
    GPUImageFilter *equalizationFilter = [[GPUImageTwoInputFilter alloc] initWithFragmentShaderFromString:fragmentShader];
    [rawDataInputFilter addTarget:equalizationFilter atTextureLocation:1];
    
    [self addFilter:equalizationFilter];
    
    self.initialFilters = [NSArray arrayWithObjects:histogramFilter, equalizationFilter, nil];
    self.terminalFilter = equalizationFilter;
    
    self.downsamplingFactor = 16;
    
    return self;
}

上一篇下一篇

猜你喜欢

热点阅读