Unity渲染iOS中的NV12格式数据---(上)

2018-12-30  本文已影响20人  huqigu

在我们的项目中,iOS端视频播放使用的是第三方开源框架ijkplayer,在硬解码的时候,解码出的数据格式是NV12,而Unity只能渲染RGB格式,所以需要做转换。

解决方案:

在iOS端使用ffmpeg的函数将NV12格式转换为YUV420P格式,将YUV三个分量分别传入Unity中,然后在Unity中将YUV420P转换为RGB并渲染。

具体实现:
//获取到NV12数据的两个指针,一个指针指向Y数据,一个指针指向UV数据
UInt8 *bufferPtrY = (UInt8 *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer,0);
UInt8 *bufferPtrUV = (UInt8 *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer,1);
if (!bufferPtr || !bufferPtr1) {
    printf("!bufferPtrY || !bufferPtrUV\n");
    return;
}
size_t width = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0);
size_t height = CVPixelBufferGetHeightOfPlane(imageBuffer,0);
AVFrame *pFrame=*dest_frame;
if (pFrame == NULL)
{
     pFrame = av_frame_alloc();
     av_image_alloc(pFrame->data, pFrame->linesize, (int)width, (int)height, AV_PIX_FMT_YUV420P,1);
     *dest_frame = pFrame;
}
static int sws_flags =  SWS_FAST_BILINEAR;
    
uint8_t * srcSlice[8] ={bufferPtrY, bufferPtrUV, NULL, NULL,NULL,NULL,NULL,NULL};
int srcStride[8] = {(int)width, (int)width, 0, 0, 0,0,0,0};
    
if (img_convert_ctx == NULL)
{
    img_convert_ctx = sws_getContext(
                                      (int)width,
                                      (int)height,
                                       AV_PIX_FMT_NV12,
                                       (int)width,
                                       overlay->h,
                                       AV_PIX_FMT_YUV420P,
                                       sws_flags,
                                       NULL, NULL, NULL);
}
int ret = sws_scale(
              img_convert_ctx,
              srcSlice,
              srcStride,
              0,
              overlay->h,
              pFrame->data,
              pFrame->linesize);

此时pFrame变量即包含YUV420P格式的图像数据。

GLuint tex[3];
// 如果tex[0]为0,那么代表所有tex还没有初始化
if (0 == tex[0]) {
      // 初始化创建所有纹理
      for (int i = 0; i < 3; i++)
      {
          glGenTextures(1, &tex[i]);
          glBindTexture(GL_TEXTURE_2D, tex[i]);
          glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
          glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
          glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
          glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
      }
}
glBindTexture(GL_TEXTURE_2D, tex[0]);
glTexImage2D(GL_TEXTURE_2D, 0, GL_ALPHA, w, h, 0, GL_ALPHA, GL_UNSIGNED_BYTE, pData0);
            
glBindTexture(GL_TEXTURE_2D, tex[1]);
glTexImage2D(GL_TEXTURE_2D, 0, GL_ALPHA, w/2, h/2, 0, GL_ALPHA, GL_UNSIGNED_BYTE, pData1);
            
glBindTexture(GL_TEXTURE_2D, tex[2]);
glTexImage2D(GL_TEXTURE_2D, 0, GL_ALPHA, w/2, h/2, 0, GL_ALPHA, GL_UNSIGNED_BYTE, pData2);

// 通过消息发送给Unity
UnitySendMessage(VRController::Instance()->GetRenderCallbackGoName(), VRController::Instance()->GetRenderCallbackFuncName(), [[NSString stringWithFormat:@"%ld|%ld|%ld",tex[0], tex[1], tex[2]] UTF8String]);

其中pData0、pData1、pData2分别对应YUV三个方向的分量。

public void display(string texString) {
    string[] tex = texString.Split ('|');
    IntPtr nativeTex1 = (IntPtr)long.Parse (tex [0]);
    IntPtr nativeTex2 = (IntPtr)long.Parse (tex [1]);
    IntPtr nativeTex3 = (IntPtr)long.Parse (tex [2]);
    if (_videoTextureY == null) {
        _videoTextureY = Texture2D.CreateExternalTexture ((int)videoLineW, (int)videoH, TextureFormat.Alpha8,false, false, (IntPtr)nativeTex1);
        _videoTextureY.filterMode = FilterMode.Bilinear;
        _videoTextureY.wrapMode = TextureWrapMode.Clamp;
    }
    if (_videoTextureU == null) {
        _videoTextureU = Texture2D.CreateExternalTexture ((int)videoLineW / 2, (int)videoH / 2, TextureFormat.Alpha8,false, false, (IntPtr)nativeTex2);
        _videoTextureU.filterMode = FilterMode.Point;
        _videoTextureU.wrapMode = TextureWrapMode.Clamp;
    }
    if (_videoTextureV == null) {
        _videoTextureV = Texture2D.CreateExternalTexture ((int)videoLineW / 2, (int)videoH / 2, TextureFormat.Alpha8,false, false, (IntPtr)nativeTex3);
        _videoTextureV.filterMode = FilterMode.Point;
        _videoTextureV.wrapMode = TextureWrapMode.Clamp;
    }

    _videoTextureY.UpdateExternalTexture ((IntPtr)nativeTex1);
    _videoTextureU.UpdateExternalTexture ((IntPtr)nativeTex2);
    _videoTextureV.UpdateExternalTexture ((IntPtr)nativeTex3);
}
fixed4 yuv;
yuv.x = tex2D(_MainTexY, i.uv).a;  
yuv.y = tex2D(_MainTexU, i.uv).a - 0.5;  
yuv.z = tex2D(_MainTexV, i.uv).a - 0.5;
c.r = yuv.x + 1.403*yuv.z;
c.g = yuv.x - 0.344*yuv.y - 0.714*yuv.z;
c.b = yuv.x + 1.770*yuv.y;
上一篇下一篇

猜你喜欢

热点阅读