Android FFmpeg04 --- 播放本地音频(Open
2022-04-15 本文已影响0人
沪漂意哥哥
一. CMakeLists.txt
cmake_minimum_required(VERSION 3.4.1)
include_directories(include)
#添加系统环境变量
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -L${CMAKE_SOURCE_DIR}/libs/${ANDROID_ABI}")
aux_source_directory(. SOURCE)
add_library(
native-lib
SHARED
${SOURCE})
find_library(
log-lib
log)
target_link_libraries(
native-lib
avcodec
avdevice
avfilter
avformat
avutil
swresample
swscale
${log-lib}
OpenSLES
android)
二. LYPlayer
public class LYPlayer {
private static final String TAG = "LYPlayer";
private String source;//数据源
private WlOnParparedListener mnOnParparedListener;
public LYPlayer() {
}
/**
* 设置数据源
* @param source
*/
public void setSource(String source) {
this.source = source;
}
/**
* 设置准备接口回调
* @param mnOnParparedListener
*/
public void setWlOnParparedListener(WlOnParparedListener mnOnParparedListener) {
this.mnOnParparedListener = mnOnParparedListener;
}
public void parpared() {
Log.i(TAG, "parpared");
if(TextUtils.isEmpty(source)) {
MyLog.d("source not be empty");
return;
}
new Thread(new Runnable() {
@Override
public void run() {
n_parpared(source);
}
}).start();
}
public void start() {
if(TextUtils.isEmpty(source)) {
MyLog.d("source is empty");
return;
}
new Thread(new Runnable() {
@Override
public void run() {
n_start();
}
}).start();
}
/**
* c++回调java的方法
*/
public void onCallParpared() {
if(mnOnParparedListener != null) {
mnOnParparedListener.onParpared();
}
}
private IPlayerListener playerListener;
public void setPlayerListener(IPlayerListener playerListener) {
this.playerListener = playerListener;
}
public void pause() {
n_pause();
}
public void resume() {
n_resume();
}
public void onCallTimeInfo(int currentTime, int totalTime) {
if (playerListener == null) {
return;
}
playerListener.onCurrentTime(currentTime, totalTime);
}
public void setMute(int mute) {
n_mute(mute);
}
public void setVolume(int percent){
if(percent >=0 && percent <= 100){
n_volume(percent);
}
}
public void seek(int secds) {
n_seek(secds);
}
public native void n_parpared(String source);
public native void n_start();
private native void n_resume();
private native void n_pause();
private native void n_mute(int mute);
private native void n_volume(int percent);
private native void n_seek(int secds);
}
三. native-lib.cpp
_JavaVM *javaVM = NULL;
LYCallJava *callJava = NULL;
LYFFmpeg *ffmpeg = NULL;
LYPlaystatus *playstatus = NULL;
extern "C"
JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM *vm, void *reserved) {
jint result = -1;
javaVM = vm;
JNIEnv *env;
if(vm->GetEnv((void **) &env, JNI_VERSION_1_4) != JNI_OK) {
return result;
}
return JNI_VERSION_1_4;
}
extern "C"
JNIEXPORT void JNICALL
Java_com_luisliuyi_demo_ffmpeg_player_LYPlayer_n_1parpared(JNIEnv *env, jobject instance,
jstring source_) {
const char *source = env->GetStringUTFChars(source_, 0);
if(ffmpeg == NULL) {
if(callJava == NULL) {
callJava = new LYCallJava(javaVM, env, &instance);
}
playstatus = new LYPlaystatus();
ffmpeg = new LYFFmpeg(playstatus, callJava, source);
ffmpeg->parpared();
}
}
extern "C"
JNIEXPORT void JNICALL
Java_com_luisliuyi_demo_ffmpeg_player_LYPlayer_n_1start(JNIEnv *env, jobject thiz) {
if(ffmpeg != NULL) {
ffmpeg->start();
}
}
extern "C"
JNIEXPORT void JNICALL
Java_com_luisliuyi_demo_ffmpeg_player_LYPlayer_n_1resume(JNIEnv *env, jobject thiz) {
if(ffmpeg != NULL) {
ffmpeg->resume();
}
}
extern "C"
JNIEXPORT void JNICALL
Java_com_luisliuyi_demo_ffmpeg_player_LYPlayer_n_1pause(JNIEnv *env, jobject thiz) {
if(ffmpeg != NULL) {
ffmpeg->pause();
}
}
extern "C"
JNIEXPORT void JNICALL
Java_com_luisliuyi_demo_ffmpeg_player_LYPlayer_n_1mute(JNIEnv *env, jobject thiz, jint mute) {
if(ffmpeg != NULL) {
ffmpeg->setMute(mute);
}
}
extern "C"
JNIEXPORT void JNICALL
Java_com_luisliuyi_demo_ffmpeg_player_LYPlayer_n_1volume(JNIEnv *env, jobject thiz, jint percent) {
if(ffmpeg != NULL) {
ffmpeg->setVolume(percent);
}
}
extern "C"
JNIEXPORT void JNICALL
Java_com_luisliuyi_demo_ffmpeg_player_LYPlayer_n_1seek(JNIEnv *env, jobject thiz, jint secds) {
if(ffmpeg != NULL) {
ffmpeg->seek(secds);
}
}
四. LYFFmpeg.cpp
#include "LYFFmpeg.h"
LYFFmpeg::LYFFmpeg(LYPlaystatus *playstatus, LYCallJava *callJava, const char *url) {
this->playstatus = playstatus;
this->callJava = callJava;
this->url = url;
pthread_mutex_init(&seek_mutex, NULL);
}
LYFFmpeg::~LYFFmpeg() {
pthread_mutex_destroy(&seek_mutex);
}
void *decodeFFmpeg(void *data) {
LYFFmpeg *wlFFmpeg = (LYFFmpeg *) data;
wlFFmpeg->decodeFFmpegThread();
pthread_exit(&wlFFmpeg->decodeThread);
}
void LYFFmpeg::parpared() {
pthread_create(&decodeThread, NULL, decodeFFmpeg, this);
}
void LYFFmpeg::decodeFFmpegThread() {
LOGE("decodeFFmpegThread");
av_register_all();
avformat_network_init();
pFormatCtx = avformat_alloc_context();
if(avformat_open_input(&pFormatCtx, url, NULL, NULL) != 0) {
if(LOG_DEBUG) {
LOGE("can not open url :%s", url);
}
return;
}
if(avformat_find_stream_info(pFormatCtx, NULL) < 0) {
if(LOG_DEBUG) {
LOGE("can not find streams from %s", url);
}
return;
}
for(int i = 0; i < pFormatCtx->nb_streams; i++) {
if(pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
if(audio == NULL) {
audio = new LYAudio(playstatus, pFormatCtx->streams[i]->codecpar->sample_rate, callJava);
audio->streamIndex = i;
audio->duration = pFormatCtx->duration / AV_TIME_BASE;
audio->time_base = pFormatCtx->streams[i]->time_base;
audio->codecpar = pFormatCtx->streams[i]->codecpar;
duration = audio->duration;
}
}
}
AVCodec *dec = avcodec_find_decoder(audio->codecpar->codec_id);
if(!dec) {
if(LOG_DEBUG) {
LOGE("can not find decoder");
}
return;
}
audio->avCodecContext = avcodec_alloc_context3(dec);
if(!audio->avCodecContext) {
if(LOG_DEBUG) {
LOGE("can not alloc new decodecctx");
}
return;
}
if(avcodec_parameters_to_context(audio->avCodecContext, audio->codecpar) < 0) {
if(LOG_DEBUG) {
LOGE("can not fill decodecctx");
}
return;
}
if(avcodec_open2(audio->avCodecContext, dec, 0) != 0) {
if(LOG_DEBUG) {
LOGE("cant not open audio strames");
}
return;
}
LOGE("decodeFFmpegThread end");
callJava->onCallParpared(CHILD_THREAD);
}
void LYFFmpeg::start() {
if(audio == NULL) {
if(LOG_DEBUG) {
LOGE("audio is null");
return;
}
}
audio->play();
int count = 0;
while(playstatus != NULL && !playstatus->exit) {
if(playstatus->seek) {
continue;
}
if(audio->queue->getQueueSize() > 40){
continue;
}
AVPacket *avPacket = av_packet_alloc();
if(av_read_frame(pFormatCtx, avPacket) == 0)
{
if(avPacket->stream_index == audio->streamIndex) {
audio->queue->putAvpacket(avPacket);
} else{
av_packet_free(&avPacket);
av_free(avPacket);
}
} else{
av_packet_free(&avPacket);
av_free(avPacket);
while(playstatus != NULL && !playstatus->exit) {
if(audio->queue->getQueueSize() > 0) {
continue;
} else{
playstatus->exit = true;
break;
}
}
}
}
if(LOG_DEBUG) {
LOGD("解码完成");
}
}
void LYFFmpeg::pause() {
if(audio != NULL) {
audio->pause();
}
}
void LYFFmpeg::resume() {
if(audio != NULL) {
audio->resume();
}
}
void LYFFmpeg::setMute(jint mute) {
if(audio != NULL) {
audio->setMute(mute);
}
}
void LYFFmpeg::setVolume(int percent) {
if(audio != NULL) {
audio->setVolume(percent);
}
}
void LYFFmpeg::seek(jint secds) {
if (duration <= 0) {
return;
}
if (secds >= 0 && secds <= duration) {
if (audio != NULL) {
playstatus->seek = true;
audio->queue->clearAvpacket();
audio->clock = 0;
audio->last_time = 0;
pthread_mutex_lock(&seek_mutex);
int64_t rel = secds * AV_TIME_BASE;
avformat_seek_file(pFormatCtx, -1, INT64_MIN, rel, INT64_MAX, 0);
pthread_mutex_unlock(&seek_mutex);
playstatus->seek = false;
}
}
}
五. LYAudio.cpp
#include "LYAudio.h"
LYAudio::LYAudio(LYPlaystatus *playstatus, int sample_rate, LYCallJava *callJava) {
this->playstatus = playstatus;
this->sample_rate = sample_rate;
queue = new LYQueue(playstatus);
buffer = (uint8_t *) av_malloc(sample_rate * 2 * 2);
this->callJava = callJava;
}
LYAudio::~LYAudio() {
}
void *decodPlay(void *data) {
LYAudio *wlAudio = (LYAudio *) data;
wlAudio->initOpenSLES();
pthread_exit(&wlAudio->thread_play);
}
void LYAudio::play() {
pthread_create(&thread_play, NULL, decodPlay, this);
}
void pcmBufferCallBack(SLAndroidSimpleBufferQueueItf bf, void * context) {
LYAudio *wlAudio = (LYAudio *) context;
if(wlAudio != NULL) {
int buffersize = wlAudio->resampleAudio();
if(buffersize > 0) {
wlAudio->clock+=buffersize/ ((double)(wlAudio->sample_rate * 2 * 2));
if(wlAudio->clock - wlAudio->last_time >= 0.25){
wlAudio->last_time = wlAudio->clock;
wlAudio->callJava->onCallTimeInfo(CHILD_THREAD,wlAudio->clock,wlAudio->duration);
}
(* wlAudio-> pcmBufferQueue)->Enqueue( wlAudio->pcmBufferQueue, (char *) wlAudio-> buffer, buffersize);
}
}
}
void LYAudio::initOpenSLES() {
LOGE("initOpenSLES begin");
SLresult result;
result = slCreateEngine(&engineObject, 0, 0, 0, 0, 0);
result = (*engineObject)->Realize(engineObject, SL_BOOLEAN_FALSE);
result = (*engineObject)->GetInterface(engineObject, SL_IID_ENGINE, &engineEngine);
//第二步,创建混音器
const SLInterfaceID mids[1] = {SL_IID_ENVIRONMENTALREVERB};
const SLboolean mreq[1] = {SL_BOOLEAN_FALSE};
result = (*engineEngine)->CreateOutputMix(engineEngine, &outputMixObject, 1, mids, mreq);
(void)result;
result = (*outputMixObject)->Realize(outputMixObject, SL_BOOLEAN_FALSE);
(void)result;
result = (*outputMixObject)->GetInterface(outputMixObject, SL_IID_ENVIRONMENTALREVERB, &outputMixEnvironmentalReverb);
if (SL_RESULT_SUCCESS == result) {
result = (*outputMixEnvironmentalReverb)->SetEnvironmentalReverbProperties(
outputMixEnvironmentalReverb, &reverbSettings);
(void)result;
}
SLDataLocator_OutputMix outputMix = {SL_DATALOCATOR_OUTPUTMIX, outputMixObject};
SLDataSink audioSnk = {&outputMix, 0};
// 第三步,配置PCM格式信息
SLDataLocator_AndroidSimpleBufferQueue android_queue={SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE,2};
SLDataFormat_PCM pcm={
SL_DATAFORMAT_PCM,//播放pcm格式的数据
2,//2个声道(立体声)
static_cast<SLuint32>(getCurrentSampleRateForOpensles(sample_rate)),//44100hz的频率
SL_PCMSAMPLEFORMAT_FIXED_16,//位数 16位
SL_PCMSAMPLEFORMAT_FIXED_16,//和位数一致就行
SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT,//立体声(前左前右)
SL_BYTEORDER_LITTLEENDIAN//结束标志
};
SLDataSource slDataSource = {&android_queue, &pcm};
const SLInterfaceID ids[3] = {SL_IID_BUFFERQUEUE,SL_IID_VOLUME,SL_IID_MUTESOLO};
const SLboolean req[3] = {SL_BOOLEAN_TRUE,SL_BOOLEAN_TRUE,SL_BOOLEAN_TRUE};
(*engineEngine)->CreateAudioPlayer(engineEngine, &pcmPlayerObject, &slDataSource, &audioSnk, 2, ids, req);
//初始化播放器
(*pcmPlayerObject)->Realize(pcmPlayerObject, SL_BOOLEAN_FALSE);
// 得到接口后调用 获取Player接口
(*pcmPlayerObject)->GetInterface(pcmPlayerObject, SL_IID_PLAY, &pcmPlayerPlay);
if(pcmPlayerPlay == NULL) {
LOGE("pcmPlayerPlay == NULL");
}
// 获取声道操作接口
(*pcmPlayerObject)->GetInterface(pcmPlayerObject, SL_IID_MUTESOLO, &pcmMutePlay);
if(pcmMutePlay == NULL) {
LOGE("pcmMutePlay == NULL");
}
// 拿控制 播放暂停恢复的句柄
(*pcmPlayerObject)->GetInterface(pcmPlayerObject,SL_IID_VOLUME,&pcmVolumePlay);
if(pcmVolumePlay == NULL) {
LOGE("pcmVolumePlay == NULL");
}
// 注册回调缓冲区 获取缓冲队列接口
(*pcmPlayerObject)->GetInterface(pcmPlayerObject, SL_IID_BUFFERQUEUE, &pcmBufferQueue);
if(pcmBufferQueue == NULL) {
LOGE("pcmBufferQueue == NULL");
}
//缓冲接口回调
(*pcmBufferQueue)->RegisterCallback(pcmBufferQueue, pcmBufferCallBack, this);
// 获取播放状态接口
(*pcmPlayerPlay)->SetPlayState(pcmPlayerPlay, SL_PLAYSTATE_PLAYING);
pcmBufferCallBack(pcmBufferQueue, this);
LOGE("initOpenSLES end");
}
int LYAudio::getCurrentSampleRateForOpensles(int sample_rate) {
int rate = 0;
switch (sample_rate)
{
case 8000:
rate = SL_SAMPLINGRATE_8;
break;
case 11025:
rate = SL_SAMPLINGRATE_11_025;
break;
case 12000:
rate = SL_SAMPLINGRATE_12;
break;
case 16000:
rate = SL_SAMPLINGRATE_16;
break;
case 22050:
rate = SL_SAMPLINGRATE_22_05;
break;
case 24000:
rate = SL_SAMPLINGRATE_24;
break;
case 32000:
rate = SL_SAMPLINGRATE_32;
break;
case 44100:
rate = SL_SAMPLINGRATE_44_1;
break;
case 48000:
rate = SL_SAMPLINGRATE_48;
break;
case 64000:
rate = SL_SAMPLINGRATE_64;
break;
case 88200:
rate = SL_SAMPLINGRATE_88_2;
break;
case 96000:
rate = SL_SAMPLINGRATE_96;
break;
case 192000:
rate = SL_SAMPLINGRATE_192;
break;
default:
rate = SL_SAMPLINGRATE_44_1;
}
return rate;
}
int LYAudio::resampleAudio() {
while(playstatus != NULL && !playstatus->exit) {
avPacket = av_packet_alloc();
if(queue->getAvpacket(avPacket) != 0) {
av_packet_free(&avPacket);
av_free(avPacket);
avPacket = NULL;
continue;
}
ret = avcodec_send_packet(avCodecContext, avPacket);
if(ret != 0) {
av_packet_free(&avPacket);
av_free(avPacket);
avPacket = NULL;
continue;
}
avFrame = av_frame_alloc();
ret = avcodec_receive_frame(avCodecContext, avFrame);
if(ret == 0) {
if(avFrame->channels && avFrame->channel_layout == 0) {
avFrame->channel_layout = av_get_default_channel_layout(avFrame->channels);
} else if(avFrame->channels == 0 && avFrame->channel_layout > 0){
avFrame->channels = av_get_channel_layout_nb_channels(avFrame->channel_layout);
}
SwrContext *swr_ctx;
swr_ctx = swr_alloc_set_opts(
NULL,
AV_CH_LAYOUT_STEREO,
AV_SAMPLE_FMT_S16,
avFrame->sample_rate,
avFrame->channel_layout,
(AVSampleFormat) avFrame->format,
avFrame->sample_rate,
NULL, NULL
);
if(!swr_ctx || swr_init(swr_ctx) <0) {
av_packet_free(&avPacket);
av_free(avPacket);
avPacket = NULL;
av_frame_free(&avFrame);
av_free(avFrame);
avFrame = NULL;
swr_free(&swr_ctx);
continue;
}
int nb = swr_convert(
swr_ctx,
&buffer,
avFrame->nb_samples,
(const uint8_t **) avFrame->data,
avFrame->nb_samples);
int out_channels = av_get_channel_layout_nb_channels(AV_CH_LAYOUT_STEREO);
data_size = nb * out_channels * av_get_bytes_per_sample(AV_SAMPLE_FMT_S16);
now_time= avFrame->pts * av_q2d(time_base);
if(now_time < clock) {
now_time = clock;
}
clock = now_time;
av_packet_free(&avPacket);
av_free(avPacket);
avPacket = NULL;
av_frame_free(&avFrame);
av_free(avFrame);
avFrame = NULL;
swr_free(&swr_ctx);
break;
} else{
av_packet_free(&avPacket);
av_free(avPacket);
avPacket = NULL;
av_frame_free(&avFrame);
av_free(avFrame);
avFrame = NULL;
continue;
}
}
return data_size;
}
void LYAudio::pause() {
if(pcmPlayerPlay != NULL) {
(*pcmPlayerPlay)->SetPlayState(pcmPlayerPlay, SL_PLAYSTATE_PAUSED);
}
}
void LYAudio::resume() {
if(pcmPlayerPlay != NULL) {
(*pcmPlayerPlay)->SetPlayState(pcmPlayerPlay, SL_PLAYSTATE_PLAYING);
}
}
void LYAudio::setMute(jint mute) {
if(pcmMutePlay == NULL) {
return;
}
this->mute = mute;
if(mute == 0){
//right
LOGE("0000");
(*pcmMutePlay)->SetChannelMute(pcmMutePlay, 1, false);
(*pcmMutePlay)->SetChannelMute(pcmMutePlay, 0, true);
} else if(mute == 1) {
LOGE("11111");
//left
(*pcmMutePlay)->SetChannelMute(pcmMutePlay, 1, true);
(*pcmMutePlay)->SetChannelMute(pcmMutePlay, 0, false);
} else if(mute == 2) {
LOGE("2222");
//center
(*pcmMutePlay)->SetChannelMute(pcmMutePlay, 1, false);
(*pcmMutePlay)->SetChannelMute(pcmMutePlay, 0, false);
}
}
void LYAudio::setVolume(int percent) {
LOGE("setVolume %d", percent);
if(pcmVolumePlay != NULL) {
if (percent > 30) {
(*pcmVolumePlay)->SetVolumeLevel(pcmVolumePlay, (100 - percent) * -20);
} else if (percent > 25) {
(*pcmVolumePlay)->SetVolumeLevel(pcmVolumePlay, (100 - percent) * -22);
} else if (percent > 20) {
(*pcmVolumePlay)->SetVolumeLevel(pcmVolumePlay, (100 - percent) * -25);
} else if (percent > 15) {
(*pcmVolumePlay)->SetVolumeLevel(pcmVolumePlay, (100 - percent) * -28);
} else if (percent > 10) {
(*pcmVolumePlay)->SetVolumeLevel(pcmVolumePlay, (100 - percent) * -30);
} else if (percent > 5) {
(*pcmVolumePlay)->SetVolumeLevel(pcmVolumePlay, (100 - percent) * -34);
} else if (percent > 3) {
(*pcmVolumePlay)->SetVolumeLevel(pcmVolumePlay, (100 - percent) * -37);
} else if (percent > 0) {
(*pcmVolumePlay)->SetVolumeLevel(pcmVolumePlay, (100 - percent) * -40);
} else {
(*pcmVolumePlay)->SetVolumeLevel(pcmVolumePlay, (100 - percent) * -100);
}
}
}
六. 代码地址
https://gitee.com/luisliuyi/android-ffmpeg03.git