嵌牛IT观察

安卓上使用FFMPEG解码视频并处理

2018-06-18  本文已影响2人  小怪兽大作战

【嵌牛导读】:安卓上OPENCV是经过剪切的,部分功能如VideoCapture不能使用。本文使用首先将FFMPEG封装呈.so库,然后使用JNI调用C语言实现了视频的解码处理。

【嵌牛鼻子】:FFMPEG  android JNI  视频解码

【嵌牛提问】:安卓上使用FFMPEG进行视频解码

【嵌牛正文】:

JNI调用NATIVE的FFMPEG

Android的opencv不支持videocapture解码视频,是因为Android上的opencv没有集成ffmpeg这个解码的库。所以需要自己把ffmpeg的源码打包成.so文件,在Android中使用JNI调用.so库,然后再c++中读取手机中的视频解码以后转为一个一个的MAT矩阵,以方便后期处理。下面是步骤

一.   将ffmpeg源码编译成.so库。我使用的是别人编译好的库。具体编译方法看这个网页。https://www.2cto.com/kf/201804/739639.html

二.  编写NATIVE函数

1.新建一个类FFmpegDecode的类,写两个native函数

2.使用Android studio上的终端。转到app\build\intermediates\classes\debug文件夹下

3.生成java的native函数所对应的c++函数头文件

4.然后在app\build\intermediates\classes\debug目录下找到生成的.h头文件

三.安卓上配置NDK。

1修改工程下的gradle.properties

修改app下的build.gradle

sourceSets.main.jni.srcDirs= []

sourceSets.main.jniLibs.srcDirs = ['src/main/libs','src/main/jniLibs']

//禁止自带的ndk功能task ndkBuild(type: Exec,description:'Compile JNI source with NDK') {

Properties properties = new Properties()

properties.load(project.rootProject.file('local.properties').newDataInputStream())

def ndkDir= properties.getProperty('ndk.dir')

if (org.apache.tools.ant.taskdefs.condition.Os.isFamily(org.apache.tools.ant.taskdefs.condition.Os.FAMILY_WINDOWS)) {

commandLine "$ndkDir/ndk-build.cmd",'-C',file('src/main/jni').absolutePath

} else{

commandLine "$ndkDir/ndk-build",'-C',file('src/main/jni').absolutePath

}

}

tasks.withType(JavaCompile) {

    compileTask

-> compileTask.dependsOn ndkBuild

}

task ndkClean(type: Exec,description:'Clean NDK Binaries') {

Properties properties = new Properties()

properties.load(project.rootProject.file('local.properties').newDataInputStream())

def ndkDir= properties.getProperty('ndk.dir')

if (org.apache.tools.ant.taskdefs.condition.Os.isFamily(org.apache.tools.ant.taskdefs.condition.Os.FAMILY_WINDOWS)) {

commandLine "$ndkDir/ndk-build.cmd",'clean','-C',file('src/main/jni').absolutePath

} else{

commandLine "$ndkDir/ndk-build",'clean','-C',file('src/main/jni').absolutePath

}

}

defaultConfig {

multiDexEnabled true

}

clean.dependsOn 'ndkClean'

2修改工程下的local.properties

3在app/src/main下新建jni文件夹。在jni文件夹下新建两个文件Android.mk和Application.mk。

4.将刚才生成的头文件复制到jni文件夹下。同时新建对应的.cpp文件。

5.在jni文件夹下面新建一个include文件夹,把ffmpeg的源码拷贝到该文件夹下

6.将之前编译好的.so文件放到jni文件夹下面

7.编写Android.mk文件

LOCAL_PATH := $(call my-dir)

#ffmpeg lib

include $(CLEAR_VARS)

LOCAL_MODULE := avcodec

LOCAL_SRC_FILES := libavcodec-56.so

include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)

LOCAL_MODULE := avdevice

LOCAL_SRC_FILES := libavdevice-56.so

include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)

LOCAL_MODULE := avfilter

LOCAL_SRC_FILES := libavfilter-5.so

include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)

LOCAL_MODULE := avformat

LOCAL_SRC_FILES := libavformat-56.so

include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)

LOCAL_MODULE := avutil

LOCAL_SRC_FILES := libavutil-54.so

include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)

LOCAL_MODULE := postproc

LOCAL_SRC_FILES := libpostproc-53.so

include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)

LOCAL_MODULE := swresample

LOCAL_SRC_FILES := libswresample-1.so

include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)

LOCAL_MODULE := swscale

LOCAL_SRC_FILES := libswscale-3.so

include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)

LOCAL_MODULE := yuv

LOCAL_SRC_FILES := libyuv.so

include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)

LOCAL_MODULE := opencv_java

LOCAL_SRC_FILES := libopencv_java.so

include $(PREBUILT_SHARED_LIBRARY)

include $(CLEAR_VARS)

#OPENCV_CAMERA_MODULES:=on

#OPENCV_INSTALL_MODULES:=off

include ..\..\..\..\native\jni\OpenCV.mk

LOCAL_MODULE     :=ffmdecode

LOCAL_SRC_FILES :=com_tinymonster_ffmpegstudy1_FFmpegDecode.cpp

LOCAL_C_INCLUDES += $(LOCAL_PATH)/include/ffmpeg

LOCAL_C_INCLUDES += $(LOCAL_PATH)/include/libyuv

LOCAL_LDLIBS     += -llog -ldl

LOCAL_SHARED_LIBRARIES := avcodec avdevice avfilter avformat avutil postprocswresample swscale yuv

include $(BUILD_SHARED_LIBRARY)

8.编写Application文件

APP_STL := gnustl_static

APP_CPPFLAGS := -frtti -fexceptions

APP_ABI := armeabi

9.将opencv的native库放到工程目录下。

10.点击屏幕右侧的gradle中的ndkbuild。

11.然后会看到jni下面生成了libs和obj两个文件夹。这两个文件夹下面生成的是对应的.so库

12.编写c代码,读取手机视频,解码视频,并转为OPENCV的MAT。

#define LOGI(FORMAT,...) __android_log_print(ANDROID_LOG_INFO,"ccj",FORMAT,##__VA_ARGS__);

#define LOGE(FORMAT,...) __android_log_print(ANDROID_LOG_ERROR,"ccj",FORMAT,##__VA_ARGS__);

using namespace cv;

JNIEXPORT jint JNICALLJava_com_tinymonster_ffmpegstudy1_FFmpegDecode_DecodeFile

  (JNIEnv* env, jclassobj, jstring input_){

LOGE("%s","1");

const

char *filename= env->GetStringUTFChars(input_, 0);

AVCodec*pCodec; //

解码器指针

AVCodecContext* pCodecCtx; //ffmpeg解码类的类成员

AVFrame* pAvFrame; //多媒体帧,保存解码后的数据帧

AVFormatContext* pFormatCtx; //保存视频流的信息

av_register_all(); //注册库中所有可用的文件格式和编码器

pFormatCtx= avformat_alloc_context();

if(avformat_open_input(&pFormatCtx, filename, NULL, NULL) != 0) { //检查文件头部

LOGE("%s","Can'tfind the stream!");

}

if(avformat_find_stream_info(pFormatCtx,NULL) < 0) { //查找流信息

LOGE("%s","Can'tfind the stream information !");

}

intvideoindex= -1;

for(int i=0; i< pFormatCtx->nb_streams; ++i) //遍历各个流,找到第一个视频流,并记录该流的编码信息

{

if (pFormatCtx->streams[i]->codec->codec_type== AVMEDIA_TYPE_VIDEO) {

videoindex= I;

break;

}

            }

if(videoindex==

-1) {

LOGE("%s","Don'tfind a video stream !");

return 1;

}

pCodecCtx= pFormatCtx->streams[videoindex]->codec; //得到一个指向视频流的上下文指针

pCodec= avcodec_find_decoder(pCodecCtx->codec_id); //到该格式的解码器

if (pCodec== NULL) {

LOGE("%s","Cant'tfind the decoder !");

return 2;

}

if(avcodec_open2(pCodecCtx,pCodec,NULL) < 0) { //打开解码器

LOGE("%s","Can't open the decoder !");

return 3;

}

pAvFrame= avcodec_alloc_frame(); //分配帧存储空间

AVFrame* pFrameBGR= avcodec_alloc_frame(); //存储解码后转换的RGB数据

        //

保存BGR,opencv中是按BGR来保存的

int size=avpicture_get_size(AV_PIX_FMT_BGR24, pCodecCtx->width, pCodecCtx->height);

uint8_t*out_buffer= (uint8_t*)av_malloc(size);

avpicture_fill((AVPicture*)pFrameBGR, out_buffer, AV_PIX_FMT_BGR24, pCodecCtx->width, pCodecCtx->height);

AVPacket* packet= (AVPacket*)malloc(sizeof(AVPacket));

LOGI("视频的文件格式:%s",pFormatCtx->iformat->name);

LOGI("视频时长:%d", (pFormatCtx->duration)/1000000);

LOGI("视频的宽高:%d,%d",pCodecCtx->width,pCodecCtx->height);

LOGI("解码器的名称:%s",pCodec->name);

struct SwsContext*img_convert_ctx;

img_convert_ctx= sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_BGR24, SWS_BICUBIC, NULL, NULL, NULL);

//opencv

cv::Mat pCvMat;

pCvMat.create(cv::Size(pCodecCtx->width, pCodecCtx->height), CV_8UC3);

int ret;

int got_picture;

//读取每一帧

int frame_count= 0;

while (av_read_frame(pFormatCtx, packet) >= 0)

                    {

if(packet->stream_index==videoindex)

                        {

ret= avcodec_decode_video2(pCodecCtx, pAvFrame, &got_picture, packet);

if(ret< 0)

                            {

printf("Decode Error.(解码错误)\n");

return 4;

}

LOGI("解码第%d帧",frame_count);

if (got_picture)

                            {

//YUV to RGB

sws_scale(img_convert_ctx, (const uint8_t* const*)pAvFrame->data, pAvFrame->linesize, 0, pCodecCtx->height, pFrameBGR->data, pFrameBGR->linesize);

memcpy(pCvMat.data, out_buffer, size);//拷贝

frame_count++;

LOGI("解码第%d帧",frame_count);

}

                        }

av_free_packet(packet);

}

av_free(out_buffer);

av_free(pFrameBGR);

av_free(pAvFrame);

avcodec_close(pCodecCtx);

avformat_close_input(&pFormatCtx);

sws_freeContext(img_convert_ctx);

retur 0;

}

13.这样就可以在JAVA代码中调用C++代码处理视频了                   

上一篇 下一篇

猜你喜欢

热点阅读