ijkplayer秘籍

IJKPlayer获取实时数据(中)- 添加软解码输出纹理

2017-07-25  本文已影响97人  ARVRSchool
1、添加YUVRotate库

YUVRotate下载地址:###
1-1、将YUVRotate库引入到项目中


image.png

1-2 、头文件引用 IJKMediaPlayer.xcodeproj->IJKMediaFramework(TARGETS ) -> Build Settings->Header Search Paths:

add  "$(SRCROOT)/YUVRotate/include"

1-3、进入到ff_ffplay.c,引入头文件

    #include "libyuv.h"
2、进入到ff_ffplay.c

查找到以下函数

static int decoder_decode_frame(FFPlayer *ffp, Decoder *d, AVFrame *frame, AVSubtitle *sub)

添加代码:


image.png
ffp_pixelbuffer_lock(ffp);
CVPixelBufferRef cvImage = NULL;
if (ffp->szt_pixelbuffer)
{
    cvImage = ffp->szt_pixelbuffer;
}
int ret = createCVPixelBuffer(ffp, ffp->is->video_st->codec, frame, &cvImage);
                    
 if (!ret)
{
      ffp->szt_pixelbuffer = cvImage;
}
ffp_pixelbuffer_unlock(ffp);

添加以下代码:(这两段代码要放在decoder_decode_frame 方法之前)

static int copyAVFrameToPixelBuffer(FFPlayer *ffp, AVCodecContext* avctx, const AVFrame* frame, CVPixelBufferRef cv_img, const size_t* plane_strides, const size_t* plane_rows)
{
    int i, j;
    size_t plane_count;
    int status;
    size_t rows;
    unsigned long src_stride;
    unsigned long dst_stride;
    uint8_t *src_addr;
    uint8_t *dst_addr;
    size_t copy_bytes;
    
    status = CVPixelBufferLockBaseAddress(cv_img, 0);
    if (status) {
        av_log(
               avctx,
               AV_LOG_ERROR,
               "Error: Could not lock base address of CVPixelBuffer: %d.\n",
               status
               );
    }
    
    uint8* src_y = frame->data[0];
    uint8* src_u = frame->data[1];
    uint8* src_v = frame->data[2];
    void* addr = CVPixelBufferGetBaseAddress(cv_img);
    
    int src_stride_y = frame->linesize[0];
    int src_stride_u = frame->linesize[1];
    int src_stride_v = frame->linesize[2];
    size_t dst_width = CVPixelBufferGetBytesPerRow(cv_img);
    
    int src_width = frame->width;
    int src_height = frame->height;
    
    I420ToARGB(src_y, src_stride_y,
               src_u, src_stride_u,
               src_v, src_stride_v,
               addr, dst_width,
               src_width, src_height);
    
    status = CVPixelBufferUnlockBaseAddress(cv_img, 0);
    if (status) {
        av_log(avctx, AV_LOG_ERROR, "Error: Could not unlock CVPixelBuffer base address: %d.\n", status);
        return AVERROR_EXTERNAL;
    }
    
    return 0;
}
int createCVPixelBuffer(FFPlayer *ffp, AVCodecContext* avctx, AVFrame* frame, CVPixelBufferRef* cvImage)
{
    size_t widths [AV_NUM_DATA_POINTERS];
    size_t heights[AV_NUM_DATA_POINTERS];
    size_t strides[AV_NUM_DATA_POINTERS];
    int status;
    
    memset(widths,  0, sizeof(widths));
    memset(heights, 0, sizeof(heights));
    memset(strides, 0, sizeof(strides));
    
    widths[0] = avctx->width;
    heights[0] = avctx->height;
    strides[0] = frame ? frame->linesize[0] : avctx->width;
    
    widths[1] = (avctx->width + 1)/2;
    heights[1] = (avctx->height + 1)/2;
    strides[1] = frame? frame->linesize[1] : (avctx ->width + 1)/2;
    
    widths[2] = (avctx->width + 1)/2;
    heights[2] = (avctx->height + 1)/2;
    strides[2] = frame? frame->linesize[2] : (avctx ->width + 1)/2;
    
    if (!ffp->szt_pixelbuffer)
    {
        status = CVPixelBufferCreate(
                                     kCFAllocatorDefault,
                                     frame->width,
                                     frame->height,
                                     kCVPixelFormatType_32BGRA,
                                     NULL,
                                     cvImage
                                     );
        if (status)
        {
            return AVERROR_EXTERNAL;
        }
    }
    
    
    status = copyAVFrameToPixelBuffer(ffp, avctx, frame, *cvImage, strides, heights);
    if (status)
    {
        CFRelease(*cvImage);
        *cvImage = NULL;
        return status;
    }
    
    return 0;
}

上一篇下一篇

猜你喜欢

热点阅读