Flutter视频渲染YUV数据转换BGRA

2020-04-21  本文已影响0人  佐鼬_1282

由于Flutter的Skia引擎只支持BGRA格式的数据。因此,在视频渲染时,需要进行数据转换,这里主要谈谈YUV420数据转BGRA数据。

注意:在数据转换过程中有用到libyuv库,可以自行导入:https://github.com/lemenkov/libyuv

首先,简要概述下视频格式

/*

 kCVPixelFormatType类型的含义

 kCVPixelFormatType_{长度|序列}{颜色空间}{Planar|BiPlanar}{VideoRange|FullRange}

 Planar: 平面;BiPlanar:双平面

 平面/双平面主要应用在yuv上。uv分开存储的为Planar,反之是BiPlanar。所以,kCVPixelFormatType_420YpCbCr8PlanarFullRange是420p,kCVPixelFormatType_420YpCbCr8BiPlanarFullRange是nv12.

 */

static OSType KVideoPixelFormatType = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;

其次,数据转化、使用的整体流程

- (void)yuvPixelBufferWithData:(void *)dataFrame width:(int)w heigth:(int)h {

    //YUV数据字符

    unsignedchar*buffer = (unsignedchar*)dataFrame;

    /******

//    //数据是黑白的,采用下面libyuv的方式转换则正常

//    CVPixelBufferRef getCroppedPixelBuffer = [self copyDataFromBuffer:buffer toYUVPixelBufferWithWidth:w Height:h];

     ******/

    //使用libyuv库的 I420ToNV12() 方法转化数据正常。将 YUV420的char 转化为 CVPixelBuffer 类型的帧数据

    CVPixelBufferRefgetOriginalPixelBuffer = [selfyuv420FrameToPixelBuffer:bufferwidth:wheight:h];

    if(!getOriginalPixelBuffer) {

        return;

    }

    //压缩数据

    CMSampleBufferRefsampleBuffer = [selfpixelBufferToSampleBuffer:getOriginalPixelBuffer];

    if(!sampleBuffer) {

        return;

    }

    //通过libyuv库的方法,将sampleBuffer 转换为 32BGRA格式的CVPixelBuffer

    CVPixelBufferReffinalPixelBuffer = [selfconvertVideoSmapleBufferToBGRAData:sampleBuffer];

    if(!finalPixelBuffer) {

        return;

    }

    CFRelease(getOriginalPixelBuffer);//释放

    //实时更新帧数据,释放上一帧的内存,指向新一帧画面的内存

    CFRelease(_target);

    _target= finalPixelBuffer;

    //实时回调,使得Flutter侧能够实时共享到最新的texture

    _callback();

}

数据转换共分三步走

第一步:YUV420 数据转 CVPixelBufferRef(本质上还是YUV420数据)

/********************    YUV420 数据转 nv12的CVPixelBufferRef(本质上还是YUV420数据)    **********************/

- (CVPixelBufferRef)yuv420FrameToPixelBuffer:(constunsignedchar*)yuv420Framewidth:(int)frameWidthheight:(int)frameHeight

{

    if(yuv420Frame ==nil) {

        returnNULL;

    }

    CVPixelBufferRefpixelBuffer =  NULL;

    NSDictionary *pixelBufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSDictionary dictionary], (id)kCVPixelBufferIOSurfacePropertiesKey, nil];

    //为YUV420格式的CVPixelBuffer分配内存

    CVReturnresult =CVPixelBufferCreate(kCFAllocatorDefault, frameWidth, frameHeight,kCVPixelFormatType_420YpCbCr8BiPlanarFullRange, (__bridge  CFDictionaryRef)pixelBufferAttributes, &pixelBuffer);

    if(result !=kCVReturnSuccess) {

        NSLog(@"[yuv420FrameToPixelBuffer] Failed to create pixel buffer: %d", result);

        returnNULL;

    }

    //加锁

    result =CVPixelBufferLockBaseAddress(pixelBuffer,0);

    if(result !=kCVReturnSuccess) {

        CFRelease(pixelBuffer);

        NSLog(@"[yuv420FrameToPixelBuffer] Failed to lock base address: %d", result);

        return  NULL;

    }

    //获取pixelBuffer中的Y数据

    uint8 *dstY = (uint8 *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);

    intdstStrideY = (int)CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer,0);

    //获取pixelBuffer中的UV数据

    uint8* dstUV = (uint8*)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);

    intdstStrideUV = (int)CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer,1);

    UInt8*_planeData[3];

    NSUInteger_stride[3];

    uint8*_data = (uint8*)yuv420Frame;

    _planeData[0] = _data;//y数据

    _planeData[1] = _planeData[0] + frameWidth * frameHeight;//u

    _planeData[2] = _planeData[1] + frameWidth * frameHeight /4;//v

    _stride[0] = frameWidth;

    _stride[1] = frameWidth >>1;

    _stride[2] = frameWidth >>1;

    //使用libyuv库的方法进行数据转换

    intret =I420ToNV12(_planeData[0], (int)_stride[0],

                         _planeData[1], (int)_stride[1],

                         _planeData[2], (int)_stride[2],

                         dstY, dstStrideY,

                         dstUV, dstStrideUV,

                         frameWidth, frameHeight);

    //解锁

    CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);

    if(ret) {

        NSLog(@"[yuv420FrameToPixelBuffer] Error converting yuv420 VideoFrame to NV12: %d", result);

        CFRelease(pixelBuffer);

        returnNULL;

    }

    returnpixelBuffer;

}/********************    YUV420 数据转 nv12的CVPixelBufferRef(本质上还是YUV420数据)    **********************/

- (CVPixelBufferRef)yuv420FrameToPixelBuffer:(constunsignedchar*)yuv420Framewidth:(int)frameWidthheight:(int)frameHeight

{

    if(yuv420Frame ==nil) {

        returnNULL;

    }

    CVPixelBufferRefpixelBuffer =  NULL;

    NSDictionary *pixelBufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSDictionary dictionary], (id)kCVPixelBufferIOSurfacePropertiesKey, nil];

    //为YUV420格式的CVPixelBuffer分配内存

    CVReturnresult =CVPixelBufferCreate(kCFAllocatorDefault, frameWidth, frameHeight,kCVPixelFormatType_420YpCbCr8BiPlanarFullRange, (__bridge  CFDictionaryRef)pixelBufferAttributes, &pixelBuffer);

    if(result !=kCVReturnSuccess) {

        NSLog(@"[yuv420FrameToPixelBuffer] Failed to create pixel buffer: %d", result);

        returnNULL;

    }

    //加锁

    result =CVPixelBufferLockBaseAddress(pixelBuffer,0);

    if(result !=kCVReturnSuccess) {

        CFRelease(pixelBuffer);

        NSLog(@"[yuv420FrameToPixelBuffer] Failed to lock base address: %d", result);

        return  NULL;

    }

    //获取pixelBuffer中的Y数据

    uint8 *dstY = (uint8 *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);

    intdstStrideY = (int)CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer,0);

    //获取pixelBuffer中的UV数据

    uint8* dstUV = (uint8*)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);

    intdstStrideUV = (int)CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer,1);

    UInt8*_planeData[3];

    NSUInteger_stride[3];

    uint8*_data = (uint8*)yuv420Frame;

    _planeData[0] = _data;//y数据

    _planeData[1] = _planeData[0] + frameWidth * frameHeight;//u

    _planeData[2] = _planeData[1] + frameWidth * frameHeight /4;//v

    _stride[0] = frameWidth;

    _stride[1] = frameWidth >>1;

    _stride[2] = frameWidth >>1;

    //使用libyuv库的方法进行数据转换

    intret =I420ToNV12(_planeData[0], (int)_stride[0],

                         _planeData[1], (int)_stride[1],

                         _planeData[2], (int)_stride[2],

                         dstY, dstStrideY,

                         dstUV, dstStrideUV,

                         frameWidth, frameHeight);

    //解锁

    CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);

    if(ret) {

        NSLog(@"[yuv420FrameToPixelBuffer] Error converting yuv420 VideoFrame to NV12: %d", result);

        CFRelease(pixelBuffer);

        returnNULL;

    }

    returnpixelBuffer;

}

第二步:CVPixelBuffer数据压缩转换

//数据压缩

- (CMSampleBufferRef)pixelBufferToSampleBuffer:(CVPixelBufferRef)pixelBuffer {

    CMSampleBufferRefsampleBuffer;

    CMTime frameTime = CMTimeMakeWithSeconds([[NSDate  date] timeIntervalSince1970], 1000000000);

    CMSampleTimingInfotiming = {frameTime, frameTime,kCMTimeInvalid};

    CMVideoFormatDescriptionRefvideoInfo =  NULL ;

    CMVideoFormatDescriptionCreateForImageBuffer(NULL , pixelBuffer, &videoInfo);

    OSStatusstatus =CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer,true,  NULL,NULL, videoInfo, &timing, &sampleBuffer);

    //释放资源

    CFRelease(pixelBuffer);

    if(videoInfo) {

        CFRelease(videoInfo);

    }

    if(status !=noErr) {

        NSLog(@"[pixelBufferToSampleBuffer] Failed to create sample buffer with error %d.", ( int )status);

        returnNULL;

    }

    return  sampleBuffer;

}

第三步:CVPixelBuffer数据转换为BGRA格式

//转化_为kCVPixelFormatType_32BGRA类型数据(使Flutter的skia引擎可以绘制)

- (CVPixelBufferRef)convertVideoSmapleBufferToBGRAData:(CMSampleBufferRef)videoSample{

    //CVPixelBufferRef是CVImageBufferRef的别名,两者操作几乎一致。

    //获取CMSampleBuffer的图像地址

    CVImageBufferRefpixelBuffer =CMSampleBufferGetImageBuffer(videoSample);

    //VideoToolbox解码后的图像数据并不能直接给CPU访问,需先用CVPixelBufferLockBaseAddre()锁定地址才能从主存访问,否则调用CVPixelBufferGetBaseAddressOfPlane等函数则返回NULL或无效值。值得注意的是,CVPixelBufferLockBaseAddress自身的调用并不消耗多少性能,一般情况,锁定之后,往CVPixelBuffer拷贝内存才是相对耗时的操作,比如计算内存偏移。_

    CVPixelBufferLockBaseAddress(pixelBuffer, 0);

    //图像宽度(像素)

    size_tpixelWidth =CVPixelBufferGetWidth(pixelBuffer);

    //图像高度(像素)

    size_tpixelHeight =CVPixelBufferGetHeight(pixelBuffer);

    //获取CVImageBufferRef中的y数据

    uint8_t*y_frame = (unsignedchar*)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer,0);

    //获取CMVImageBufferRef中的uv数据

    uint8_t*uv_frame =(unsignedchar*)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer,1);

    // 创建一个空的32BGRA格式的CVPixelBufferRef

    NSDictionary *pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}};

    CVPixelBufferRefpixelBuffer1 =NULL;

    CVReturnresult =CVPixelBufferCreate(kCFAllocatorDefault, pixelWidth, pixelHeight,kCVPixelFormatType_32BGRA, (__bridgeCFDictionaryRef)pixelAttributes, &pixelBuffer1);

    if(result !=kCVReturnSuccess) {

        NSLog(@"[convertVideoSmapleBufferToBGRAData] Unable to create cvpixelbuffer %d", result);

        returnNULL;

    }

    CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);

    result =CVPixelBufferLockBaseAddress(pixelBuffer1,0);

    if(result !=kCVReturnSuccess) {

        CFRelease(pixelBuffer1);

        NSLog(@"[convertVideoSmapleBufferToBGRAData] Failed to lock base address: %d", result);

        returnNULL;

    }

    // 得到新创建的CVPixelBufferRef中 rgb数据的首地址

    uint8_t*rgb_data =CVPixelBufferGetBaseAddress(pixelBuffer1);

    // 使用libyuv为rgb_data写入数据,将NV12转换为BGRA

    int  ret =NV12ToARGB(y_frame, pixelWidth, uv_frame, pixelWidth, rgb_data, pixelWidth *4, pixelWidth, pixelHeight);

    if  (ret) {

        NSLog(@"[convertVideoSmapleBufferToBGRAData] Error converting NV12 VideoFrame to BGRA: %d", result);

        CFRelease(pixelBuffer1);

        returnNULL;

    }

    CVPixelBufferUnlockBaseAddress(pixelBuffer1, 0);

    return  pixelBuffer1;

}

上一篇下一篇

猜你喜欢

热点阅读