iOS

CMSampleBufferRef 转Image 及镜像处理

2021-01-05  本文已影响0人  泛坷

Way1(不好用)

- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer
{

    // 为媒体数据设置一个CMSampleBuffer的Core Video图像缓存对象

    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);

    // 锁定pixel buffer的基地址

    CVPixelBufferLockBaseAddress(imageBuffer, 0);

    // 得到pixel buffer的基地址

    void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);

    // 得到pixel buffer的行字节数

    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);

    // 得到pixel buffer的宽和高

    size_t width =  CVPixelBufferGetWidth(imageBuffer);

    size_t height = CVPixelBufferGetHeight(imageBuffer);

    // 创建一个依赖于设备的RGB颜色空间

    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();

    // 用抽样缓存的数据创建一个位图格式的图形上下文(graphics context)对象

    CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,

    bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);

    // 根据这个位图context中的像素数据创建一个Quartz image对象

    CGImageRef quartzImage = CGBitmapContextCreateImage(context);

    // 解锁pixel buffer

    CVPixelBufferUnlockBaseAddress(imageBuffer,0);

    // 释放context和颜色空间

    CGContextRelease(context);

    CGColorSpaceRelease(colorSpace);

    // 用Quartz image创建一个UIImage对象image

    UIImage *image = [UIImage imageWithCGImage:quartzImage];

    // 释放Quartz image对象

    CGImageRelease(quartzImage);

    return (image);
}

way2 (不好用)

- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer
{ 
CVImageBufferRef buffer = CMSampleBufferGetImageBuffer(sampleBuffer);

    CVPixelBufferLockBaseAddress(buffer, 0);
    uint8_t *base;
    size_t width, height, bytesPerRow;
    base = (uint8_t *)CVPixelBufferGetBaseAddress(buffer);
    width = self.originImage.frame.size.width;
    height = self.originImage.frame.size.height;
    bytesPerRow = CVPixelBufferGetBytesPerRow(buffer);

    CGColorSpaceRef colorSpace;
    CGContextRef cgContext;
    colorSpace = CGColorSpaceCreateDeviceRGB();
    cgContext = CGBitmapContextCreate(base, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
    CGColorSpaceRelease(colorSpace);

    CGImageRef cgImage;
    UIImage *image;
    cgImage = CGBitmapContextCreateImage(cgContext);
    image = [UIImage imageWithCGImage:cgImage];
    CGImageRelease(cgImage);
    CGContextRelease(cgContext);

    CVPixelBufferUnlockBaseAddress(buffer, 0);
    
    return image;
}

way3 (灰度,改为rgb不好使)

-(UIImage *) imageFromSamplePlanerPixelBuffer:(CMSampleBufferRef) sampleBuffer{

    @autoreleasepool {
        // Get a CMSampleBuffer's Core Video image buffer for the media data
        CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
        // Lock the base address of the pixel buffer
        CVPixelBufferLockBaseAddress(imageBuffer, 0);

        // Get the number of bytes per row for the plane pixel buffer
        void *baseAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);

        // Get the number of bytes per row for the plane pixel buffer
        size_t bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer,0);
        // Get the pixel buffer width and height
        size_t width = CVPixelBufferGetWidth(imageBuffer);
        size_t height = CVPixelBufferGetHeight(imageBuffer);
        

        // Create a device-dependent gray color space
        CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceGray();

        // Create a bitmap graphics context with the sample buffer data
        CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
                                                     bytesPerRow, colorSpace, kCGImageAlphaNone);
        // Create a Quartz image from the pixel data in the bitmap graphics context
        CGImageRef quartzImage = CGBitmapContextCreateImage(context);
        // Unlock the pixel buffer
        CVPixelBufferUnlockBaseAddress(imageBuffer,0);

        // Free up the context and color space
        CGContextRelease(context);
        CGColorSpaceRelease(colorSpace);

        // Create an image object from the Quartz image
        UIImage *image = [UIImage imageWithCGImage:quartzImage];

        // Release the Quartz image
        CGImageRelease(quartzImage);

        return (image);
    }
}

way4(亲测好用)

- (UIImage *)imageFromSampleBuffer2:(CMSampleBufferRef)sampleBuffer {
    
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    CIImage *ciImage = [CIImage imageWithCVPixelBuffer:imageBuffer];
    CIContext *temporaryContext = [CIContext contextWithOptions:nil];
    CGImageRef videoImage = [temporaryContext createCGImage:ciImage fromRect:CGRectMake(0, 0, CVPixelBufferGetWidth(imageBuffer), CVPixelBufferGetHeight(imageBuffer))];
    
    UIImage *image = [[UIImage alloc] initWithCGImage:videoImage];
    CGImageRelease(videoImage);
    
    return image;
}

镜像处理

- (UIImage *)mirrorImage:(UIImage *)originImage{
    CGRect rect =  CGRectMake(0, 0, originImage.size.width , originImage.size.height);
    UIGraphicsBeginImageContextWithOptions(rect.size, false, 2);
    CGContextRef currentContext =  UIGraphicsGetCurrentContext();
    CGContextClipToRect(currentContext, rect);
    CGContextRotateCTM(currentContext, M_PI);
    CGContextTranslateCTM(currentContext, -rect.size.width, -rect.size.height);
    CGContextDrawImage(currentContext, rect, originImage.CGImage);
    UIImage* drawImage =  UIGraphicsGetImageFromCurrentImageContext();
    return [UIImage imageWithCGImage:drawImage.CGImage scale:originImage.scale orientation:originImage.imageOrientation];
}
上一篇下一篇

猜你喜欢

热点阅读