Flutter TextTrue ios 视频渲染

2019-08-21  本文已影响0人  janiokq
image image.png

网易云信播放器 Flutter 封装

事情是这样的 我们公司的业务是有 视频播放这一块业务 而且 是基于网易云信的 视频服务的 做的开发 。公司的App开发框架是使用 Flutter , 那么问题来了 Flutter 怎么 实现视频播放嘞 , 官方给出的解决方案 是 ### video_player 这个库的 实现 是 原生端做视频解码 然后通过 Texture 的方式 映射到 Flutter 中 但是解码器 IOS 使用的是 官方的 AVPlayer(苹果官方提供的播放器 啥都好 就是不支持流媒体播放 ) Android 解码器则是 exoplayer 很好很nice
但是

网易云信的视频 是加密的 只有自己的 播放器sdk 在能解码播放 android 和 ios 都支持流媒体 so 只能自己封装

Android 使用 SurfaceTexture 衔接 视频流 正常 但是 ios emmm 网易云信 播放器 返回 的 编码格式 是 NELP_YUV420 就是 YUV420 直接映射到 Flutter 黑屏 但是有声音

//获取 视频回调 数据
 [_player  registerGetVideoRawDataCB:NELP_YUV420 and:^(NELPVideoRawData *frame) {
 Videodata=frame;
 }];

因为Skia 引擎底层只支持了 BGRA 格式的视频数据 所以 和黑屏了

首先我们吧 YUV420 转换成 CVPixelBufferRef 方法如下
该方法依赖 libyuv 请自行导入

+ (CVPixelBufferRef)i420FrameToPixelBuffer:(NSData *)i420Frame width:( int )frameWidth height:( int )frameHeight

{

        

 int width = frameWidth;

 int height = frameHeight;

    

*if (i420Frame == nil) {

 return NULL;

 }

    

 CVPixelBufferRef pixelBuffer =  NULL ;

 NSDictionary *pixelBufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:

 [NSDictionary dictionary], ( id )kCVPixelBufferIOSurfacePropertiesKey,

 nil ];

    

 CVReturn result = CVPixelBufferCreate(kCFAllocatorDefault,

 width,

 height,

 kCVPixelFormatType_420YpCbCr8BiPlanarFullRange,

 ( __bridge  CFDictionaryRef)pixelBufferAttributes,

 &pixelBuffer);

    

  if  (result != kCVReturnSuccess) {

 NSLog(@"Failed to create pixel buffer: %d", result);

 return NULL ;

 }

    

 result = CVPixelBufferLockBaseAddress(pixelBuffer, 0);

    

  if  (result != kCVReturnSuccess) {

 CFRelease(pixelBuffer);

 NSLog(@"Failed to lock base address: %d", result);

 return  NULL ;

 }

        

 uint8 *dstY = (uint8 *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);

  int  dstStrideY = (**int**)CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);

 uint8* dstUV = (uint8*)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);

  int  dstStrideUV = ( int )CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);

    

 UInt8 *_planeData[3];

 NSUInteger _stride[3];

    

 CFDataRef dataref = ( __bridge  CFDataRef)i420Frame;

 uint8 * _data = (UInt8 *) CFDataGetBytePtr(dataref);

    

 _planeData[NEI420FramePlaneY] = _data;

 _planeData[NEI420FramePlaneU] = _planeData[NEI420FramePlaneY] + width * height;

 _planeData[NEI420FramePlaneV] = _planeData[NEI420FramePlaneU] + width * height / 4;

    

 _stride[NEI420FramePlaneY] = width;

 _stride[NEI420FramePlaneU] = width >> 1;

 _stride[NEI420FramePlaneV] = width >> 1;

    

#ifndef KLSMediaCaptureDemoCondense

    

 int  ret = libyuv::I420ToNV12(_planeData[NEI420FramePlaneY], ( int )_stride[NEI420FramePlaneY],

 _planeData[NEI420FramePlaneU], ( int )_stride[NEI420FramePlaneU],

 _planeData[NEI420FramePlaneV], ( int )_stride[NEI420FramePlaneV],

 dstY, dstStrideY,

 dstUV, dstStrideUV,

 width, height);

    

#endif

 CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);

#ifndef KLSMediaCaptureDemoCondense

    

 if (ret) {

 NSLog(@"Error converting I420 VideoFrame to NV12: %d", result);

 CFRelease(pixelBuffer);

  return NULL ;

 }

#endif

    

  return  pixelBuffer;

}



然后是 pixelBuffer To SampleBuffer


+ (CMSampleBufferRef)pixelBufferToSampleBuffer:(CVPixelBufferRef)pixelBuffer

{

 CMSampleBufferRef sampleBuffer;

 CMTime frameTime = CMTimeMakeWithSeconds([[NSDate  date] timeIntervalSince1970], 1000000000);

 CMSampleTimingInfo timing = {frameTime, frameTime, kCMTimeInvalid};

 CMVideoFormatDescriptionRef videoInfo =  NULL ;

 CMVideoFormatDescriptionCreateForImageBuffer( NULL , pixelBuffer, &videoInfo);

    

 OSStatus status = CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true ,  NULL , NULL , videoInfo, &timing, &sampleBuffer);

  if (status != noErr) {

 NSLog(@"Failed to create sample buffer with error %d.", ( int )status);

 }

    

 CVPixelBufferRelease(pixelBuffer);

  if (videoInfo)

 CFRelease(videoInfo);

    

  return  sampleBuffer;

}


最后吧 SmapleBuffer 转换 BGRA

  

  

_//转化_

-(CVPixelBufferRef)convertVideoSmapleBufferToBGRAData:(CMSampleBufferRef)videoSample{

    

 _//CVPixelBufferRef是CVImageBufferRef的别名,两者操作几乎一致。_

 _//获取CMSampleBuffer的图像地址_

 CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(videoSample);

 _//VideoToolbox解码后的图像数据并不能直接给CPU访问,需先用CVPixelBufferLockBaseAddress()锁定地址才能从主存访问,否则调用CVPixelBufferGetBaseAddressOfPlane等函数则返回NULL或无效值。值得注意的是,CVPixelBufferLockBaseAddress自身的调用并不消耗多少性能,一般情况,锁定之后,往CVPixelBuffer拷贝内存才是相对耗时的操作,比如计算内存偏移。_

 CVPixelBufferLockBaseAddress(pixelBuffer, 0);

 _//图像宽度(像素)_

 size_t pixelWidth = CVPixelBufferGetWidth(pixelBuffer);

 _//图像高度(像素)_

 size_t pixelHeight = CVPixelBufferGetHeight(pixelBuffer);

 _//获取CVImageBufferRef中的y数据_

 uint8_t *y_frame = ( unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);

 _//获取CMVImageBufferRef中的uv数据_

 uint8_t *uv_frame =( unsigned char  *) CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);

        

 _// 创建一个空的32BGRA格式的CVPixelBufferRef_

 NSDictionary *pixelAttributes = @{( id )kCVPixelBufferIOSurfacePropertiesKey : @{}};

 CVPixelBufferRef pixelBuffer1 = NULL ;

 CVReturn result = CVPixelBufferCreate(kCFAllocatorDefault,

 pixelWidth,pixelHeight,kCVPixelFormatType_32BGRA,

 ( __bridge  CFDictionaryRef)pixelAttributes,&pixelBuffer1);

  if  (result != kCVReturnSuccess) {

 NSLog(@"Unable to create cvpixelbuffer %d", result);

  return NULL ;

 }

 CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);

    

 result = CVPixelBufferLockBaseAddress(pixelBuffer1, 0);

 if (result != kCVReturnSuccess) {

 CFRelease(pixelBuffer1);

 NSLog(@"Failed to lock base address: %d", result);

  return NULL ;

 }

    

 _// 得到新创建的CVPixelBufferRef中 rgb数据的首地址_

 uint8_t *rgb_data = (uint8*)CVPixelBufferGetBaseAddress(pixelBuffer1);

    

 _// 使用libyuv为rgb_data写入数据,将NV12转换为BGRA_

  int  ret = NV12ToARGB(y_frame, pixelWidth, uv_frame, pixelWidth, rgb_data, pixelWidth * 4, pixelWidth, pixelHeight);

  if  (ret) {

 NSLog(@"Error converting NV12 VideoFrame to BGRA: %d", result);

 CFRelease(pixelBuffer1);

 return NULL ;

 }

 CVPixelBufferUnlockBaseAddress(pixelBuffer1, 0);

    

 return  pixelBuffer1;

}

方法如何使用


if(Videodata){

 int width,height;

 width=Videodata->width;

 height=Videodata->height;

 int len = Videodata->width * Videodata->height * 3 / 2;

 NSData * data = [[NSData alloc] initWithBytes:Videodata->UsrData length:len];

 CVPixelBufferRef originalPixelBuffer  = [NEYUVConverter i420FrameToPixelBuffer:data width:Videodata->width height:Videodata->height];

 CMSampleBufferRef sampleBuffer = [NEYUVConverter pixelBufferToSampleBuffer:originalPixelBuffer];

 CVPixelBufferRef finalPiexelBuffer;

 finalPiexelBuffer = [ self  convertVideoSmapleBufferToBGRAData:sampleBuffer];

 CVPixelBufferRelease(originalPixelBuffer);

 return finalPiexelBuffer;

 }


转载:姜姜和张张
原文地址
著作权归作者所有。商业转载请联系作者获得授权,非商业转载请注明出处。

上一篇下一篇

猜你喜欢

热点阅读