显示出画面
2021-09-25 本文已影响0人
大旺旺的弟弟小旺旺
绘制出画面,我们从activity开始。
1.创建布局 ,我们使用
<SurfaceView
android:id="@+id/surfaceView"
android:layout_width="match_parent"
android:layout_height="200dp">
</SurfaceView>
宽度和屏幕一样的宽,高度为200dp。
2.在手机目录下放入测试文件
String s = Environment.getExternalStorageDirectory().getAbsolutePath() + "/input.mp4";
player.start(s);
3.创建播放类
public class FFmpegdiaPlayer implements SurfaceHolder.Callback {
private SurfaceHolder surfaceHolder;
//加载动态库
static {
System.loadLibrary("play");
}
public FFmpegdiaPlayer(SurfaceView surfaceView) {
if (surfaceHolder != null){
surfaceHolder.removeCallback(this);
}
surfaceHolder = surfaceView.getHolder();
surfaceHolder.addCallback(this);
}
public native String stringFromJNI();
@Override
public void surfaceCreated(@NonNull SurfaceHolder holder) {
}
@Override
public void surfaceChanged(@NonNull SurfaceHolder holder, int format, int width, int height) {
this.surfaceHolder = holder;
}
@Override
public void surfaceDestroyed(@NonNull SurfaceHolder holder) {
}
public void start(String absolutePath) {
try {
natvie_start(absolutePath, surfaceHolder.getSurface());
}catch (Exception e) {
System.out.println("------------call error");
}
}
public native void natvie_start(String absolutePath, Surface surface);
}
4.cpp
4.1.创建渲染窗口
ANativeWindow *nativeWindow = ANativeWindow_fromSurface(env,surface);
4.2.将jstring转换为字符类型
// 将数据写入 到 缓存区
const char *path = env->GetStringUTFChars(absolute_path,0);
4.3,初始化,也可以需要那个设置开启那个。
avformat_network_init();
4.4.创建总的上下文
//总上下文
AVFormatContext *formatContext = avformat_alloc_context();
4.5.字典值的使用
//从字典中取值
AVDictionary *opts = NULL;
//如果这么久都没有打开,就认为有问题
av_dict_set(&opts, "timeout", "30000000", 0);
//打开文件
int ret = avformat_open_input(&formatContext,path,NULL,&opts);
4.6.读取流信息
int video_stream_index = 0;
//读取成功 读取视频流 解析出来
avformat_find_stream_info(formatContext,NULL);
4.7.读取视频流
//便利
for(int i= 0; i<formatContext->nb_streams;i++){
//视频流
if(formatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO){
video_stream_index = i;
break;
}
}
4.8.对视频流参数进行设置
//得到流的参数
AVCodecParameters *parameters = formatContext->streams[video_stream_index]->codecpar;
//我们现在对流进行解码 使用解码器的id 得到解码器
AVCodec *vcode = avcodec_find_decoder(parameters->codec_id);
//创建解码器 的上下文
AVCodecContext *avCodecContex = avcodec_alloc_context3(vcode);
//将解码器的 参数复制到 上下文
avcodec_parameters_to_context(avCodecContex,parameters);\
4.9.解码
//打开解码器
avcodec_open2(avCodecContex,vcode,NULL);
// 解码yuv数据
AVPacket *avPacket = av_packet_alloc();
// 读取视频流
SwsContext *swsContext = sws_getContext(avCodecContex->width,avCodecContex->height,
avCodecContex->pix_fmt,avCodecContex->width,
avCodecContex->height,AV_PIX_FMT_RGBA,
SWS_BILINEAR,0,0,0);
ANativeWindow_setBuffersGeometry(nativeWindow,avCodecContex->width,avCodecContex->height,WINDOW_FORMAT_RGBA_8888);
ANativeWindow_Buffer outBuffer;
while (av_read_frame(formatContext,avPacket)>=0){
avcodec_send_packet(avCodecContex,avPacket);
AVFrame *frame = av_frame_alloc();
ret = avcodec_receive_frame(avCodecContex,frame);
if (ret == AVERROR(EAGAIN)){
continue;
} else if (ret<0){
break;
}
//将yuv转化为RGB
uint8_t *dst_data[4];
int dst_linesize[4];
av_image_alloc(dst_data,dst_linesize,avCodecContex->width,avCodecContex->height,
AV_PIX_FMT_RGBA,1);
sws_scale(swsContext,frame->data,frame->linesize,0,frame->height,dst_data,dst_linesize);
ANativeWindow_lock(nativeWindow,&outBuffer,NULL);
uint8_t *fistWindown = static_cast<uint8_t *>(outBuffer.bits);
uint8_t *src_data = dst_data[0];
int destStride = outBuffer.stride *4;
int src_linesize = dst_linesize[0];
for(int i=0;i<outBuffer.height;i++){
memcpy(fistWindown+i*destStride,src_data+i*src_linesize,destStride);
}
ANativeWindow_unlockAndPost(nativeWindow);
LOGI("显示----");
}
LOGE("播放结束");
env->ReleaseStringUTFChars(absolute_path,path);
}
完整
extern "C"
JNIEXPORT void JNICALL
Java_com_kangwang_ffmpeddemo_FFmpegdiaPlayer_natvie_1start(JNIEnv *env, jobject thiz,
jstring absolute_path, jobject surface) {
// 渲染
ANativeWindow *nativeWindow = ANativeWindow_fromSurface(env,surface);
// 将数据写入 到 缓存区
LOGE("start_path");
const char *path = env->GetStringUTFChars(absolute_path,0);
// 视频 音频 绘制
LOGE("init_net");
avformat_network_init();
//总上下文
AVFormatContext *formatContext = avformat_alloc_context();
//从字典中取值
AVDictionary *opts = NULL;
//如果这么久都没有打开,就认为有问题
av_dict_set(&opts, "timeout", "30000000", 0);
//打开文件
int ret = avformat_open_input(&formatContext,path,NULL,&opts);
if (ret){ //为0表示成功
LOGI("---------------->失败");
return;
}else{
LOGI("------------->>success");
}
int video_stream_index = 0;
//读取成功 读取视频流 解析出来
avformat_find_stream_info(formatContext,NULL);
//便利
for(int i= 0; i<formatContext->nb_streams;i++){
//视频流
if(formatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO){
video_stream_index = i;
break;
}
}
//得到流的参数
AVCodecParameters *parameters = formatContext->streams[video_stream_index]->codecpar;
//我们现在对流进行解码 使用解码器的id 得到解码器
AVCodec *vcode = avcodec_find_decoder(parameters->codec_id);
//创建解码器 的上下文
AVCodecContext *avCodecContex = avcodec_alloc_context3(vcode);
//将解码器的 参数复制到 上下文
avcodec_parameters_to_context(avCodecContex,parameters);\
//打开解码器
avcodec_open2(avCodecContex,vcode,NULL);
// 解码yuv数据
AVPacket *avPacket = av_packet_alloc();
// 读取视频流
SwsContext *swsContext = sws_getContext(avCodecContex->width,avCodecContex->height,
avCodecContex->pix_fmt,avCodecContex->width,
avCodecContex->height,AV_PIX_FMT_RGBA,
SWS_BILINEAR,0,0,0);
ANativeWindow_setBuffersGeometry(nativeWindow,avCodecContex->width,avCodecContex->height,WINDOW_FORMAT_RGBA_8888);
ANativeWindow_Buffer outBuffer;
while (av_read_frame(formatContext,avPacket)>=0){
avcodec_send_packet(avCodecContex,avPacket);
AVFrame *frame = av_frame_alloc();
ret = avcodec_receive_frame(avCodecContex,frame);
if (ret == AVERROR(EAGAIN)){
continue;
} else if (ret<0){
break;
}
//将yuv转化为RGB
uint8_t *dst_data[4];
int dst_linesize[4];
av_image_alloc(dst_data,dst_linesize,avCodecContex->width,avCodecContex->height,
AV_PIX_FMT_RGBA,1);
sws_scale(swsContext,frame->data,frame->linesize,0,frame->height,dst_data,dst_linesize);
ANativeWindow_lock(nativeWindow,&outBuffer,NULL);
uint8_t *fistWindown = static_cast<uint8_t *>(outBuffer.bits);
uint8_t *src_data = dst_data[0];
int destStride = outBuffer.stride *4;
int src_linesize = dst_linesize[0];
for(int i=0;i<outBuffer.height;i++){
memcpy(fistWindown+i*destStride,src_data+i*src_linesize,destStride);
}
ANativeWindow_unlockAndPost(nativeWindow);
LOGI("显示----");
}
LOGE("播放结束");
env->ReleaseStringUTFChars(absolute_path,path);
}