FFmpeg

基于Qt和FFmpeg的简易视频播放器

2019-07-31  本文已影响5人  saltcc

1.前言

这里实现了简易的视频解码,后面会陆续发布音视频同步等相关文章

2.视频解码播放流程

FFmpeg解码视频与Qt显示播放流程


3.结构体概要介绍
4.函数概要功能
5.简易播放器的实现
项目构成
#include "mainwindow.h"
#include <QApplication>
#include <iostream>
#include <QDebug>
int main(int argc, char *argv[])
{
    QApplication a(argc, argv);
    MainWindow w;
    w.show();

    return a.exec();
}
#ifndef MAINWINDOW_H
#define MAINWINDOW_H

#include <QMainWindow>
#include <QPaintEvent>
#include <QImage>

#include "SimpleVideoPlayer.h"

namespace Ui {
class MainWindow;
}

class MainWindow : public QMainWindow
{
    Q_OBJECT

public:
    explicit MainWindow(QWidget *parent = 0);
    ~MainWindow();

protected:
    void paintEvent(QPaintEvent *event);

private:
    Ui::MainWindow *ui;

    SimpleVideoPlayer _simpleVp;
    QImage _Qimg;

private slots:
    void sigGetVideoFrame(QImage img);
};

#endif // MAINWINDOW_H
#include "mainwindow.h"
#include "ui_mainwindow.h"

#include <QPainter>

MainWindow::MainWindow(QWidget *parent) :
    QMainWindow(parent),
    ui(new Ui::MainWindow)
{
    ui->setupUi(this);

    //信号槽
    connect(&_simpleVp,SIGNAL(sigCreateVideoFrame(QImage)),this,SLOT(sigGetVideoFrame(QImage)));
    //视频文件路径
    _simpleVp.setVideo("D:/Qt/testffmpeg/testffmpeg/in2.mp4");
    _simpleVp.start();

}

MainWindow::~MainWindow()
{
    delete ui;
}

void MainWindow::paintEvent(QPaintEvent *event)
{
    QPainter painter(this);
    painter.setBrush(Qt::black);
    painter.drawRect(0, 0, this->width(), this->height());

    if (_Qimg.size().width() <= 0)
        return;

    //比例缩放
    QImage img = _Qimg.scaled(this->size(),Qt::KeepAspectRatio);
    int x = this->width() - img.width();
    int y = this->height() - img.height();

    x /= 2;
    y /= 2;

    //QPoint(x,y)为中心绘制图像
    painter.drawImage(QPoint(x,y),img);
}

void MainWindow::sigGetVideoFrame(QImage img)
{
    _Qimg = img;
    //paintEvent
    update();
}
#ifndef SIMPLEVIDEOPLAYER_H
#define SIMPLEVIDEOPLAYER_H

#include <QThread>
#include <QImage>

class SimpleVideoPlayer : public QThread
{
    Q_OBJECT

public:
    explicit SimpleVideoPlayer();
    ~SimpleVideoPlayer();

    void setVideo(const QString &videoPath)
    {
        this->_videoPath = videoPath;
    }

    inline void play();

signals:
    void sigCreateVideoFrame(QImage image);

protected:
    void run();

private:
    QString _videoPath;
};

#endif // SIMPLEVIDEOPLAYER_H

#include "SimpleVideoPlayer.h"
#include <iostream>

#include <QDebug>

extern "C"
{
    #include "libavcodec/avcodec.h"
    #include "libavformat/avformat.h"
    #include "libavutil/pixfmt.h"
    #include "libswscale/swscale.h"
}

SimpleVideoPlayer::SimpleVideoPlayer()
{

}

SimpleVideoPlayer::~SimpleVideoPlayer()
{

}

void SimpleVideoPlayer::play()
{
    start();
}

void SimpleVideoPlayer::run()
{
    if (_videoPath.isNull())
    {
        return;
    }

    char *file = _videoPath.toUtf8().data();

    //编解码器的注册,最新版本不需要调用
    av_register_all();

    //描述多媒体文件的构成及其基本信息
    AVFormatContext *pAVFormatCtx = avformat_alloc_context();

    if (avformat_open_input(&pAVFormatCtx, file, NULL, NULL) != 0)
    {
        std::cout<<"open file fail"<<std::endl;
        avformat_free_context(pAVFormatCtx);
        return;
    }

    //读取一部分视音频数据并且获得一些相关的信息
    if (avformat_find_stream_info(pAVFormatCtx, NULL) < 0)
    {
        std::cout<<"avformat find stream fail"<<std::endl;
        avformat_close_input(&pAVFormatCtx);
        return;
    }

    int iVideoIndex = -1;

    for (uint32_t i = 0; i < pAVFormatCtx->nb_streams; ++i)
    {
        //视频流
        if (pAVFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
        {
            iVideoIndex = i;
            break;
        }
    }

    if (iVideoIndex == -1)
    {
        std::cout<<"video stream not find"<<std::endl;
        avformat_close_input(&pAVFormatCtx);
        return;
    }

    //获取视频流的编解码器上下文的数据结构
    AVCodecContext *pAVCodecCtx = pAVFormatCtx->streams[iVideoIndex]->codec;

    if (pAVCodecCtx == NULL)
    {
        std::cout<<"get codec fail"<<std::endl;
        avformat_close_input(&pAVFormatCtx);
        return;
    }

    //编解码器信息的结构体
    AVCodec *pAVCodec = avcodec_find_decoder(pAVCodecCtx->codec_id);

    if (pAVCodec == NULL)
    {
        std::cout<<"not find decoder"<<std::endl;
        return;
    }

    //初始化一个视音频编解码器
    if (avcodec_open2(pAVCodecCtx, pAVCodec, NULL) < 0)
    {
        std::cout<<"avcodec_open2 fail"<<std::endl;
        return;
    }

    //AVFrame 存放从AVPacket中解码出来的原始数据
    AVFrame *pAVFrame = av_frame_alloc();
    AVFrame *pAVFrameRGB = av_frame_alloc();

    //用于视频图像的转换,将源数据转换为RGB32的目标数据
    SwsContext *pSwsCtx = sws_getContext(pAVCodecCtx->width, pAVCodecCtx->height, pAVCodecCtx->pix_fmt,
                                         pAVCodecCtx->width, pAVCodecCtx->height, PIX_FMT_RGB32,
                                         SWS_BICUBIC, NULL, NULL, NULL);
    int iNumBytes = avpicture_get_size(PIX_FMT_RGB32, pAVCodecCtx->width, pAVCodecCtx->height);

    uint8_t *pRgbBuffer = (uint8_t *)(av_malloc(iNumBytes * sizeof(uint8_t)));
    //为已经分配的空间的结构体AVPicture挂上一段用于保存数据的空间
    avpicture_fill((AVPicture *)pAVFrameRGB, pRgbBuffer, PIX_FMT_BGR32, pAVCodecCtx->width, pAVCodecCtx->height);

    //AVpacket 用来存放解码之前的数据
    AVPacket packet;
    av_new_packet(&packet, pAVCodecCtx->width * pAVCodecCtx->height);

    //读取码流中视频帧
    while (av_read_frame(pAVFormatCtx, &packet) >= 0)
    {
        if (packet.stream_index != iVideoIndex)
        {
            //清空packet中data以及buf的内容,并没有把packet本身
            av_free_packet(&packet);
            continue;
        }

        int iGotPic = -1;
        //解码一帧视频数据
        if (avcodec_decode_video2(pAVCodecCtx, pAVFrame, &iGotPic, &packet) < 0)
        {
            av_free_packet(&packet);
            std::cout<<"avcodec_decode_video2 fail"<<std::endl;
            break;
        }

        if (iGotPic > 0)
        {
            //转换像素
            sws_scale(pSwsCtx, (uint8_t const * const *)pAVFrame->data, pAVFrame->linesize, 0, pAVCodecCtx->height, pAVFrameRGB->data, pAVFrameRGB->linesize);

            //构造QImage
            QImage img(pRgbBuffer, pAVCodecCtx->width, pAVCodecCtx->height, QImage::Format_RGB32);

            //绘制QImage
            emit sigCreateVideoFrame(img);
        }

        av_free_packet(&packet);
        msleep(15);
    }

    //资源回收
    av_free(pAVFrame);
    av_free(pAVFrameRGB);
    sws_freeContext(pSwsCtx);
    avcodec_close(pAVCodecCtx);
    avformat_close_input(&pAVFormatCtx);
}
上一篇 下一篇

猜你喜欢

热点阅读