[Camera专题]Qcom- Video流异步处理并回调

2021-01-27  本文已影响0人  c枫_撸码的日子

1.前言

我们在做eis算法集成的时候,preview流或者video流需要经过算法的异步处理,在hal1框架上实现就稍微有些难度,hal3或者camX相对要容易一些。
如何去优雅的实现异步回调数据呢?


2.获取video数据的接口

hardware/qcom/camera/QCamera2/HAL/QCamera2HWICallbacks.cpp

void QCamera2HardwareInterface::video_stream_cb_routine(mm_camera_super_buf_t *super_frame,
                                                        QCameraStream *stream,
                                                        void *userdata)
{

  //1.拿到 QCamera2HardwareInterface指针
  QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
  //2.拿到视频帧
  mm_camera_buf_def_t *frame = super_frame->bufs[0];
  //3.视频帧的一些处理
  if (frame->buf_type == CAM_STREAM_BUF_TYPE_MPLANE) {
    if (pme->mParameters.getVideoBatchSize() == 0) {//一定会跑到这里面
        //计算视频帧的时间戳
        timeStamp = nsecs_t(frame->ts.tv_sec) * 1000000000LL + frame->ts.tv_nsec;
        pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_VIDEO);
        videoMemObj = (QCameraVideoMemory *)frame->mem_info;
        video_mem = NULL;
        if (NULL != videoMemObj && !(pme->m_bNeedVideoCb)) {
          video_mem = videoMemObj->getMemory(frame->buf_idx,
              (pme->mStoreMetaDataInFrame > 0)? true : false);
          triggerTCB = TRUE;
          LOGH("Video frame TimeStamp : %lld batch = 0 idx = %d",
              timeStamp, frame->frame_idx);
        }
        if (pme->m_bNeedVideoCb) {
          video_mem = pme->videoMemFb->getMemory(frame->buf_idx,
              (pme->mStoreMetaDataInFrame > 0)? true : false);
          triggerTCB = TRUE;
        }

    }else{
      ···
    }
  
  }else{
    ···
  }
    //4.把数据传递给 视频编码器 处理
    if ((NULL != video_mem) && (triggerTCB == TRUE)) {
        if ((pme->mDataCbTimestamp != NULL) &&
            pme->msgTypeEnabledWithLock(CAMERA_MSG_VIDEO_FRAME) > 0) { 
            qcamera_callback_argm_t cbArg;
            memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
            cbArg.cb_type = QCAMERA_DATA_TIMESTAMP_CALLBACK;
            cbArg.msg_type = CAMERA_MSG_VIDEO_FRAME;
            cbArg.data = video_mem;

            // For VT usecase, ISP uses AVtimer not CLOCK_BOOTTIME as time source.
            // So do not change video timestamp.
            if (!pme->mParameters.isAVTimerEnabled()) {
                // Convert Boottime from camera to Monotime for video if needed.
                // Otherwise, mBootToMonoTimestampOffset value will be 0.
                timeStamp = timeStamp - pme->mBootToMonoTimestampOffset;
            }    
            LOGD("Final video buffer TimeStamp : %lld ", timeStamp);
            cbArg.timestamp = timeStamp;
            int32_t rc = pme->m_cbNotifier.notifyCallback(cbArg);
            if (rc != NO_ERROR) {
                LOGE("fail sending data notify");
                stream->bufDone(frame->buf_idx);
            }    
        }    
    }    
    if (!pme->mParameters.isVideoFaceBeautification()) {
        free(super_frame);
    }
}

函数作用:处理视频流中的视频帧

视频帧将被发送到视频编码器。一旦视频编码器完成了视频帧的处理,它将调用另一个API (release_recording_frame)来返回帧,需要注意释放super_frame。

3.异步回调的实现

想要优雅的实现异步回调,那就需要利用c++ 11的新特性,简直好用到哭!
这里我们编写一个简单的demo,完成异步回调的实现。

3.1 编写Demo类

3.1.1 DemoCamera.h

#ifndef DEMOCAMETA_H_
#define DEMOCAMETA_H_

#include <functional>
#include <queue>
#include <pthread.h>

#include "QCamera2HWI.h"
#include "QCameraStream.h"

//自定义命名空间
namespace democamera {
namespace hal1 {

//封装video数据
struct VideoData{
        mm_camera_super_buf_t *super_frame;
        qcamera::QCameraStream *stream;
        qcamera::QCamera2HardwareInterface *pme;
        std::function<void(int)> cbHandler;
};
//demo类
class DemoCamera{

public:
    DemoCamera(int);//构造函数 
    virtual ~DemoCamera();//析构函数
    //模拟算法数据处理
    void processVideo(
        mm_camera_super_buf_t *super_frame,
        qcamera::QCameraStream *stream,
        qcamera::QCamera2HardwareInterface *pme,
        std::function<void(int)> cbHandler);
        void callBack(void);
public:
    int videoFrame_Size;//需要的video帧数
    std::queue<VideoData> videoDataQ;//缓存video的队列
    bool isStartVideo;//开始录制
    bool isStopVideo;//停止录制
};

}
}
#endif

3.1.2 DemoCamera.cpp

#define LOG_TAG "DemoCamera"

#include "DemoCamera.h"

extern "C" {
#include "mm_camera_dbg.h"
}

using namespace qcamera;
namespace democamera {
namespace hal1 {

DemoCamera::DemoCamera(int size)
{
    videoFrame_Size = size;//算法需要的video帧数量
    isStartVideo = 0;//是否开始录制视频
    isStopVideo = 0;//是否停止录制视频
    LOGE("videoFrame_Size =%d ",videoFrame_Size );
}
DemoCamera::~DemoCamera()
{
    video_Size = 0;//这里取释放一些资源
    LOGE("video_Size=%d ",video_Size);
}

void DemoCamera::processVideo(mm_camera_super_buf_t *super_frame,QCameraStream *stream,
    QCamera2HardwareInterface *pme,std::function<void(int)> cbHandler)
{
    VideoData d;
    d.super_frame = super_frame;
    d.stream = stream;
    d.pme = pme;
    d.cbHandler = cbHandler;
    videoDataQ.push(d);//把video的数据存储到队列里
    //调用算法处理
    //xxx算法处理视频帧
    //调用回调函数
    callBack();
}

void DemoCamera::callBack()
{
    if(!videoDataQ.empty())
    {
        //如果开始录制了视频,并且收到的视频帧达到算法需要的帧数
        if(isStartVideo && (videoFrame_Size == videoDataQ.size())){
            VideoData d = videoDataQ.front();
            mm_camera_buf_def_t *frame = d.super_frame->bufs[0];
            LOGE("zcf_cb:call cbHandler!videoDataQ.size = %d frame->idx=%d ",videoDataQ.size(),frame->frame_idx);
            std::function<void(int)> cbHandler =  d.cbHandler;//拿出回调函数
            cbHandler(0);//回调给系统
            videoDataQ.pop();//将数据弹出队列
        }
        //停止录制时,回调所有数据
        if(isStopVideo){
            LOGE("zcf_cb:录像停止 吐出所有数据 ");
            while(!videoDataQ.empty()){
            VideoData d = videoDataQ.front();
            mm_camera_buf_def_t *frame = d.super_frame->bufs[0];
            LOGE("zcf_cb:call cbHandler!videoDataQ.size = %d frame->idx=%d ",videoDataQ.size(),frame->frame_idx);
            std::function<void(int)> cbHandler =  d.cbHandler;
            cbHandler(0);
            videoDataQ.pop();
            }
        }
    }
}

}
}

3.2 DemoCamera加入编译

hardware/qcom/camera/QCamera2/Android.mk

-LOCAL_CFLAGS := -Wall -Wextra -Werror
+LOCAL_CFLAGS := -Wall -Wextra//防止 定义的变量不使用时会报错,
LOCAL_SRC_FILES += \
         util/QCameraExtZoomTranslator.cpp \
         util/QCameraPprocManager.cpp \
         util/QCameraBokeh.cpp \
         util/QCameraClearSight.cpp \
+        HAL/DemoCamera.cpp

3.3 初始化 DemoCamera

hardware/qcom/camera/QCamera2/HAL/QCamera2HWI.h

 #include "QCameraTrace.h"
 //前置声明DemoCamera类
+namespace democamera {
+namespace hal1 {
+  class DemoCamera;
+}
+}
 namespace qcamera {
···
 private:
      //使用智能指针 防止内存泄漏
+    std::unique_ptr<xtccamera::hal1::DemoCamera> mDemoCamera; 
     camera_device_t   mCameraDevice;
     uint32_t          mCameraId;
     mm_camera_vtbl_t *mCameraHandle;

hardware/qcom/camera/QCamera2/HAL/QCamera2HWI.cpp

 #include "QCameraTrace.h"
 #include "QCameraDisplay.h"
 
+#include "DemoCamera.h"

int QCamera2HardwareInterface::start_recording(struct camera_device *device)
{
+       hw->mDemoCamera->isStartVideo = 1;
+       hw->mDemoCamera->isStopVideo = 0;
}

int QCamera2HardwareInterface::stop_recording(struct camera_device *device)
{
+       hw->mDemoCamera->isStartVideo = 0;
+       hw->mDemoCamera->isStopVideo = 1;
}

int QCamera2HardwareInterface::openCamera(struct hw_device_t **hw_device)
{
    //初始化智能指针mDemoCamera
+   mDemoCamera.reset(new democamera::hal1::DemoCamera(5));//这里设置算法需要的帧数为5
    // Init params in the background
    // 1. It's safe to queue init job, even if alloc job is not yet complete.
    // It will be queued to the same thread, so the alloc is guaranteed to
    // finish first.
    // 2. However, it is not safe to begin param init until after camera is
    // open. That is why we wait until after camera open completes to schedule
    // this task.
    memset(&args, 0, sizeof(args));
    mParamInitJob = queueDeferredWork(CMD_DEF_PARAM_INIT, args);
}

3.4 调用算法处理Video流

hardware/qcom/camera/QCamera2/HAL/QCamera2HWICallbacks.cpp

 #include <stdlib.h>
+#include <functional>
···
+#include "DemoCamera.h"

void QCamera2HardwareInterface::video_stream_cb_routine(mm_camera_super_buf_t *super_frame,
                                                        QCameraStream *stream,
                                                        void *userdata)
{
···
  //1.拿到 QCamera2HardwareInterface指针
  QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
  //2.拿到视频帧
  mm_camera_buf_def_t *frame = super_frame->bufs[0];
···
    //打印帧信息
++    LOGE("zcf_c:Stream(%d), Timestamp: %ld %ld frame->idx=%d frame->buf_type=%d,getVideoBatchSize=%d",
          frame->stream_id,
          frame->ts.tv_sec,
          frame->ts.tv_nsec,
          frame->frame_idx,
          frame->buf_type,
          pme->mParameters.getVideoBatchSize());
    //3.视频帧的一些处理
    if (frame->buf_type == CAM_STREAM_BUF_TYPE_MPLANE) {
        if (pme->mParameters.getVideoBatchSize() == 0) { 

        //这里使用lamda表达式封装callBackHandler,然后赋值给包装器std::function<void(int)>
++          std::function<void(int)> callBackHandler =  [=](int result)mutable{

            timeStamp = nsecs_t(frame->ts.tv_sec) * 1000000000LL
                    + frame->ts.tv_nsec;
            pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_VIDEO);
            videoMemObj = (QCameraVideoMemory *)frame->mem_info;
            video_mem = NULL;
            if (NULL != videoMemObj && !(pme->m_bNeedVideoCb)) {
                video_mem = videoMemObj->getMemory(frame->buf_idx,
                        (pme->mStoreMetaDataInFrame > 0)? true : false);
                triggerTCB = TRUE;
                LOGH("ideo frame TimeStamp : %lld batch = 0 idx = %d",
                        timeStamp, frame->frame_idx);
            }
            if (pme->m_bNeedVideoCb) {
                video_mem = pme->videoMemFb->getMemory(frame->buf_idx,
                    (pme->mStoreMetaDataInFrame > 0)? true : false);
                triggerTCB = TRUE;
            }
              //4.把数据传递给 视频编码器 处理,这个直接封装在callBackHandler里面
                if (!result) {
                    /**
                        Code is copied from bottom of the outer function (QCamera2HardwareInterface::video_stream_cb_routine)
                        so the code will be executed here in this callback instead
                    */
                    LOGE("zcf_cf: result =%d frame->idx=%d video_mem=%p triggerTCB=%d",
                        result,frame->frame_idx,video_mem,triggerTCB);
                    if ((NULL != video_mem) && (triggerTCB == TRUE)) {
                        if ((pme->mDataCbTimestamp != NULL) && pme->msgTypeEnabledWithLock(CAMERA_MSG_VIDEO_FRAME) > 0) {
                            qcamera_callback_argm_t cbArg;
                            memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
                            cbArg.cb_type = QCAMERA_DATA_TIMESTAMP_CALLBACK;
                            cbArg.msg_type = CAMERA_MSG_VIDEO_FRAME;
                            cbArg.data = video_mem;

                            // For VT usecase, ISP uses AVtimer not CLOCK_BOOTTIME as time source.
                            // So do not change video timestamp.
                            if (!pme->mParameters.isAVTimerEnabled()) {
                                // Convert Boottime from camera to Monotime for video if needed.
                                // Otherwise, mBootToMonoTimestampOffset value will be 0.
                                timeStamp = timeStamp - pme->mBootToMonoTimestampOffset;
                            }
                            LOGE("zcf_cf: Final video buffer TimeStamp : %lld frame->idx=%d", timeStamp,frame->frame_idx);
                            cbArg.timestamp = timeStamp;
                            int32_t rc = pme->m_cbNotifier.notifyCallback(cbArg);
                            if (rc != NO_ERROR) {
                                LOGE("fail sending data notify");
                                stream->bufDone(frame->buf_idx);
                            }
                        }
                    }
                }
                free(super_frame);//释放资源
            };
            pme->mXTCCamera->processVideo(super_frame,stream,pme,callBackHandler);
            return;//这里return掉 不去调用步骤4 把数据传递给 视频编码器 处理
        }

}

3.5 编译运行

log如下:

从log中可以看到,前面累计的5帧数据都没有直接回调给系统,而是等算法处理后,第3帧数据才回调给系统。

3.6 问题:最后几帧没有正确回调

//步骤4:把数据传递给 视频编码器 处理
 if ((NULL != video_mem) && (triggerTCB == TRUE)) {
    if ((pme->mDataCbTimestamp != NULL) && pme->msgTypeEnabledWithLock(CAMERA_MSG_VIDEO_FRAME) > 0) {
    ···
  }
}

添加log 重新编译分析:
最后几帧数据
pme->msgTypeEnabledWithLock(CAMERA_MSG_VIDEO_FRAME) =0
因此没有正确回调


解决办法

最简单的办法就是去掉判断pme->msgTypeEnabledWithLock(CAMERA_MSG_VIDEO_FRAME) > 0

可以看到log正常回调了。
当然这种解法不一定是最好的,毕竟修改了系统原有的逻辑。
那么还有更好的解决方案吗?

Stay hungry,Stay foolish!

上一篇下一篇

猜你喜欢

热点阅读