音视频

基于iOS平台的最简单的FFmpeg视频播放器(三)

2017-10-31  本文已影响652人  Aiewing

如果说,视频的解码是最核心的一步,那么视频的显示播放,就是最复杂的一步,也是最难的一步。
接着上一篇文章的激情,这一篇文章主要是讲述解码后的数据是怎么有顺序,有规律地显示到我们的手机屏幕上的。

基于iOS平台的最简单的FFmpeg视频播放器(一)
基于iOS平台的最简单的FFmpeg视频播放器(二)
基于iOS平台的最简单的FFmpeg视频播放器(三)

正式开始

1.准备活动

1.1 初始化OpenGL的类

- (void)setupPresentView
{
    _glView = [[AieGLView alloc] initWithFrame:CGRectMake(0, self.view.frame.size.height - 200, 300, 200) decoder:_decoder];
    [self.view addSubview:_glView];
    
    self.view.backgroundColor = [UIColor clearColor];
}

1.2 处理解码后的数据

- (AieVideoFrame *)handleVideoFrame
{
    if (!_videoFrame->data[0]) {
        return nil;
    }
    
    AieVideoFrame * frame;
    if (_videoFrameFormat == AieVideoFrameFormatYUV) {
        AieVideoFrameYUV * yuvFrame = [[AieVideoFrameYUV alloc] init];
        
        yuvFrame.luma = copyFrameData(_videoFrame->data[0],
                                      _videoFrame->linesize[0],
                                      _videoCodecCtx->width,
                                      _videoCodecCtx->height);
        
        yuvFrame.chromaB = copyFrameData(_videoFrame->data[1],
                                      _videoFrame->linesize[1],
                                      _videoCodecCtx->width / 2,
                                      _videoCodecCtx->height / 2);
        
        yuvFrame.chromaR = copyFrameData(_videoFrame->data[2],
                                      _videoFrame->linesize[2],
                                      _videoCodecCtx->width / 2,
                                      _videoCodecCtx->height / 2);
        
        frame = yuvFrame;
    }
    
    frame.width = _videoCodecCtx->width;
    frame.height = _videoCodecCtx->height;
    // 以流中的时间为基础 预估的时间戳
    frame.position = av_frame_get_best_effort_timestamp(_videoFrame) * _videoTimeBase;
    
    // 获取当前帧的持续时间
    const int64_t frameDuration = av_frame_get_pkt_duration(_videoFrame);
    
    if (frameDuration) {
        frame.duration = frameDuration * _videoTimeBase;
        frame.duration += _videoFrame->repeat_pict * _videoTimeBase * 0.5;
    }
    else {
        frame.duration = 1.0 / _fps;
    }
    return frame;
}

1.2.1 AVFrame数据分析

if (!_videoFrame->data[0]) {
        return nil;
    }

1.2.2 把数据封装成自己的格式

AieVideoFrame * frame;
    if (_videoFrameFormat == AieVideoFrameFormatYUV) {
        AieVideoFrameYUV * yuvFrame = [[AieVideoFrameYUV alloc] init];
        
        yuvFrame.luma = copyFrameData(_videoFrame->data[0],
                                      _videoFrame->linesize[0],
                                      _videoCodecCtx->width,
                                      _videoCodecCtx->height);
        
        yuvFrame.chromaB = copyFrameData(_videoFrame->data[1],
                                      _videoFrame->linesize[1],
                                      _videoCodecCtx->width / 2,
                                      _videoCodecCtx->height / 2);
        
        yuvFrame.chromaR = copyFrameData(_videoFrame->data[2],
                                      _videoFrame->linesize[2],
                                      _videoCodecCtx->width / 2,
                                      _videoCodecCtx->height / 2);
        
        frame = yuvFrame;
    }
static NSData * copyFrameData(UInt8 *src, int linesize, int width, int height)
{
    width = MIN(linesize, width);
    NSMutableData *md = [NSMutableData dataWithLength: width * height];
    Byte *dst = md.mutableBytes;
    for (NSUInteger i = 0; i < height; ++i)
    {
        memcpy(dst, src, width);
        dst += width;
        src += linesize;
    }
    return md;
}

1.2.3 解码后数据的信息

    frame.width = _videoCodecCtx->width;
    frame.height = _videoCodecCtx->height;
    // 以流中的时间为基础 预估的时间戳
    frame.position = av_frame_get_best_effort_timestamp(_videoFrame) * _videoTimeBase;
    
    // 获取当前帧的持续时间
    const int64_t frameDuration = av_frame_get_pkt_duration(_videoFrame);
    
    if (frameDuration) {
        frame.duration = frameDuration * _videoTimeBase;
        frame.duration += _videoFrame->repeat_pict * _videoTimeBase * 0.5;
    }
    else {
        frame.duration = 1.0 / _fps;
    }

2. 开始播放视频

2.1 播放逻辑处理

dispatch_time_t popTime = dispatch_time(DISPATCH_TIME_NOW, 0.1 * NSEC_PER_SEC);
    dispatch_after(popTime, dispatch_get_main_queue(), ^(void){
        [self tick];
    });

2.2 播放视频

- (void)tick
{
    // 返回当前播放帧的播放时间
    CGFloat interval = [self presentFrame];
    const NSUInteger leftFrames =_videoFrames.count;
    
    // 当_videoFrames中已经没有解码过后的数据 或者剩余的时间小于_minBufferedDuration最小 就继续解码
    if (!leftFrames ||
        !(_bufferedDuration > _minBufferedDuration))  {
        [self asyncDecodeFrames];
    }
    
    // 播放完一帧之后 继续播放下一帧 两帧之间的播放间隔不能小于0.01秒
    const NSTimeInterval time = MAX(interval, 0.01);
    dispatch_time_t popTime = dispatch_time(DISPATCH_TIME_NOW, time * NSEC_PER_SEC);
    dispatch_after(popTime, dispatch_get_main_queue(), ^{
        [self tick];
    });
 
}

2.2.1 绘制图像

- (CGFloat)presentFrame
{
    CGFloat interval = 0;
    AieVideoFrame * frame;
    
    @synchronized (_videoFrames) {
        if (_videoFrames.count > 0) {
            frame = _videoFrames[0];
            [_videoFrames removeObjectAtIndex:0];
            _bufferedDuration -= frame.duration;
        }
    }
    
    if (frame) {
        if (_glView) {
            [_glView render:frame];
        }
        interval = frame.duration;
    }
    return interval;
}

2.2.2 再次开始解码

   const NSUInteger leftFrames =_videoFrames.count;
    if (0 == leftFrames) {
        return;
    }
    if (!leftFrames ||
        !(_bufferedDuration > _minBufferedDuration))
    {
        [self asyncDecodeFrames];
    }

2.2.3 播放下一帧

const NSTimeInterval time = MAX(interval, 0.01);
    dispatch_time_t popTime = dispatch_time(DISPATCH_TIME_NOW, time * NSEC_PER_SEC);
    dispatch_after(popTime, dispatch_get_main_queue(), ^{
        [self tick];
    });

结尾

上一篇 下一篇

猜你喜欢

热点阅读