音乐律动

2022-09-26  本文已影响0人  DarcyZhou

1. Demo

1.1 实现流程

  1. 使用MediaPlayer播放传入的音乐,并拿到mediaPlayerId
  2. 使用Visualizer类拿到MediaPlayer播放中的音频数据(wave/fft)
  3. 将数据用自定义控件展现出来

1.2 准备工作

使用Visualizer需要录音的动态权限, 如果播放sd卡音频需要STORAGE权限。

    private static final String[] PERMISSIONS = new String[]{
          Manifest.permission.RECORD_AUDIO,
          Manifest.permission.MODIFY_AUDIO_SETTINGS
    };

    ActivityCompat.requestPermissions(MainActivity.this, PERMISSIONS, 1);
    -------
    <uses-permission android:name="android.permission.RECORD_AUDIO" />
    <uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
    <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
    <uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />

1.3 开始播放

    private MediaPlayer.OnPreparedListener preparedListener = new MediaPlayer.OnPreparedListener() {...}
    /**
     * 播放音频
     *
     * @param raw 资源文件id
     */
    private  void doPlay(final int raw) {
        try {
            mediaPlayer = MediaPlayer.create(MyApp.getInstance(), raw);
            if (mediaPlayer == null) {
                Logger.e(TAG, "mediaPlayer is null");
                return;
            }

            mediaPlayer.setOnErrorListener(errorListener);
            mediaPlayer.setOnPreparedListener(preparedListener);
        } catch (Exception e) {
            Logger.e(e, TAG, e.getMessage());
        }
    }

    /**
    * 获取MediaPlayerId
    * 可视化类Visualizer需要此参数
    * @return  MediaPlayerId
    */
    public int getMediaPlayerId() {
        return mediaPlayer.getAudioSessionId();
    }

1.4 获取音频数据

  1. 设置可视化数据的数据大小 范围[Visualizer.getCaptureSizeRange()[0]~Visualizer.getCaptureSizeRange()[1]]
  2. 设置可视化数据的采集频率范围[0~Visualizer.getMaxCaptureRate()]
private Visualizer.OnDataCaptureListener dataCaptureListener = new Visualizer.OnDataCaptureListener() {
        @Override
        public void onWaveFormDataCapture(Visualizer visualizer, final byte[] waveform, int samplingRate) {
            audioView.post(new Runnable() {
                @Override
                public void run() {
                    audioView.setWaveData(waveform);
                }
            });
        }

        @Override
        public void onFftDataCapture(Visualizer visualizer, final byte[] fft, int samplingRate) {
            audioView2.post(new Runnable() {
                @Override
                public void run() {
                    audioView2.setWaveData(fft);
                }
            });
        }
    };

private void initVisualizer() {
    try {
        // 1:获取AudioSessionId
        int mediaPlayerId = mediaPlayer.getMediaPlayerId();
        if (visualizer != null) {
            visualizer.release();
        }
        visualizer = new Visualizer(mediaPlayerId); // 2:实例化

        /**
         *可视化数据的大小: getCaptureSizeRange()[0]为最小值,getCaptureSizeRange()[1]为最大值
         */
        int captureSize = Visualizer.getCaptureSizeRange()[1];
        int captureRate = Visualizer.getMaxCaptureRate() * 3 / 4;
        // 3:设置参数
        visualizer.setCaptureSize(captureSize);
        visualizer.setDataCaptureListener(dataCaptureListener, captureRate, true, true);
        visualizer.setScalingMode(Visualizer.SCALING_MODE_NORMALIZED);
        visualizer.setEnabled(true); // 4:开启捕获数据
    } catch (Exception e) {
        Logger.e(TAG, "请检查录音权限");
    }
}   

1.5 自定义控件

  1. 处理数据: visualizer 回调中的数据中是存在负数的,需要转换一下,用于显示
byte 的范围: -128~127
    /**
     * 预处理数据
     *
     * @return
     */
    private static byte[] readyData(byte[] fft) {
        byte[] newData = new byte[LUMP_COUNT];
        byte abs;
        for (int i = 0; i < LUMP_COUNT; i++) {
            abs = (byte) Math.abs(fft[i]);
            //描述:Math.abs -128时越界
            newData[i] = abs < 0 ? 127 : abs;
        }
        return newData;
    }
  1. 紧接着就是根据数据去绘制图形
    @Override
    protected void onDraw(Canvas canvas) {
        super.onDraw(canvas);
        wavePath.reset();

        for (int i = 0; i < LUMP_COUNT; i++) {
            if (waveData == null) {
                canvas.drawRect((LUMP_WIDTH + LUMP_SPACE) * i,
                        LUMP_MAX_HEIGHT - LUMP_MIN_HEIGHT,
                        (LUMP_WIDTH + LUMP_SPACE) * i + LUMP_WIDTH,
                        LUMP_MAX_HEIGHT,
                        lumpPaint);
                continue;
            }

            switch (upShowStyle) {
                case STYLE_HOLLOW_LUMP:
                    drawLump(canvas, i, false);
                    break;
                case STYLE_WAVE:
                    drawWave(canvas, i, false);
                    break;
                default:
                    break;
            }

            switch (downShowStyle) {
                case STYLE_HOLLOW_LUMP:
                    drawLump(canvas, i, true);
                    break;
                case STYLE_WAVE:
                    drawWave(canvas, i, true);
                    break;
                default:
                    break;
            }
        }
    }

    /**
     * 绘制矩形条
     */
    private void drawLump(Canvas canvas, int i, boolean reversal) {
        int minus = reversal ? -1 : 1;

        if (waveData[i] < 0) {
            Logger.w("waveData", "waveData[i] < 0 data: %s", waveData[i]);
        }
        float top = (LUMP_MAX_HEIGHT - (LUMP_MIN_HEIGHT + waveData[i] * SCALE) * minus);

        canvas.drawRect(LUMP_SIZE * i,
                top,
                LUMP_SIZE * i + LUMP_WIDTH,
                LUMP_MAX_HEIGHT,
                lumpPaint);
    }

    /**
     * 绘制曲线
     * 这里使用贝塞尔曲线来绘制
     */
    private void drawWave(Canvas canvas, int i, boolean reversal) {
        if (pointList == null || pointList.size() < 2) {
            return;
        }
        float ratio = SCALE * (reversal ? -1 : 1);
        if (i < pointList.size() - 2) {
            Point point = pointList.get(i);
            Point nextPoint = pointList.get(i + 1);
            int midX = (point.x + nextPoint.x) >> 1;
            if (i == 0) {
                wavePath.moveTo(point.x, LUMP_MAX_HEIGHT - point.y * ratio);
            }
            wavePath.cubicTo(midX, LUMP_MAX_HEIGHT - point.y * ratio,
                    midX, LUMP_MAX_HEIGHT - nextPoint.y * ratio,
                    nextPoint.x, LUMP_MAX_HEIGHT - nextPoint.y * ratio);

            canvas.drawPath(wavePath, lumpPaint);
        }
    }

2. 源代码分析

音乐律动功能主要是通过Visualizer去获取播放音乐的时域和频域数据,然后再将数据显示出来即可达到律动的效果。主要过程在1.4 获取音频数据中,分析如下:

2.1 获取AudioSessionId

使用mediaplayer播放音乐时都会设置audio session id,即指定mediaplayer以便获取对应的数据。在mediaplayer的create()方法中会有如下代码进行设置。(若没有设置,则系统会默认分配一个)

  1. 设置
mp.setAudioSessionId(audioSessionId);
  1. 获取
public native int getAudioSessionId();

2.2 实例化

Visualizer实例化时只需传入audioSession参数。

    public Visualizer(int audioSession)
    throws UnsupportedOperationException, RuntimeException {
        int[] id = new int[1];

        synchronized (mStateLock) {
            mState = STATE_UNINITIALIZED;
            // native initialization
            int result = native_setup(new WeakReference<Visualizer>(this), audioSession, id,
                    ActivityThread.currentOpPackageName());
            if (result != SUCCESS && result != ALREADY_EXISTS) {
                Log.e(TAG, "Error code "+result+" when initializing Visualizer.");
                switch (result) {
                case ERROR_INVALID_OPERATION:
                    throw (new UnsupportedOperationException("Effect library not loaded"));
                default:
                    throw (new RuntimeException("Cannot initialize Visualizer engine, error: "
                            +result));
                }
            }
            mId = id[0];
            if (native_getEnabled()) {
                mState = STATE_ENABLED;
            } else {
                mState = STATE_INITIALIZED;
            }
        }
    }
lpVisualizer = new Visualizer(String16(opPackageNameStr.c_str()),
                              0,
                              android_media_visualizer_effect_callback,
                              lpJniStorage,
                              (audio_session_t) sessionId);

Visualizer构造方法中只是调用initCaptureSize方法去初始化捕获的size:

uint32_t Visualizer::initCaptureSize()
{
    uint32_t buf32[sizeof(effect_param_t) / sizeof(uint32_t) + 2];
    effect_param_t *p = (effect_param_t *)buf32;

    p->psize = sizeof(uint32_t);
    p->vsize = sizeof(uint32_t);
    *(int32_t *)p->data = VISUALIZER_PARAM_CAPTURE_SIZE;
    status_t status = getParameter(p);

    if (status == NO_ERROR) {
        status = p->status;
    }

    uint32_t size = 0;
    if (status == NO_ERROR) {
        size = *((int32_t *)p->data + 1);
    }
    mCaptureSize = size;

    ALOGV("initCaptureSize size %d status %d", mCaptureSize, status);

    return size;
}

2.3 设置参数

  1. setCaptureSize
[-> Visualizer.java: setCaptureSize()
Visualizer.java -> Visualizer.cpp: setCaptureSize()
note left: JNI: native_setCaptureSize()

status_t Visualizer::setCaptureSize(uint32_t size)
{
    // 1:检查参数
    if (size > VISUALIZER_CAPTURE_SIZE_MAX ||
        size < VISUALIZER_CAPTURE_SIZE_MIN ||
        popcount(size) != 1) {
        return BAD_VALUE;
    }

    Mutex::Autolock _l(mCaptureLock);
    // 2:检查状态
    if (mEnabled) {
        return INVALID_OPERATION;
    }

    uint32_t buf32[sizeof(effect_param_t) / sizeof(uint32_t) + 2];
    effect_param_t *p = (effect_param_t *)buf32;

    p->psize = sizeof(uint32_t);
    p->vsize = sizeof(uint32_t);
    *(int32_t *)p->data = VISUALIZER_PARAM_CAPTURE_SIZE;
    *((int32_t *)p->data + 1)= size;
    status_t status = setParameter(p); // 3:设置参数

    ALOGV("setCaptureSize size %d  status %d p->status %d", size, status, p->status);

    if (status == NO_ERROR) {
        status = p->status;
        if (status == NO_ERROR) {
            mCaptureSize = size; // 4:参数赋值
        }
    }

    return status;
}
  1. setDataCaptureListener
    public int setDataCaptureListener(OnDataCaptureListener listener,
            int rate, boolean waveform, boolean fft) {
        synchronized (mListenerLock) {
            mCaptureListener = listener;
        }
        if (listener == null) {
            // make sure capture callback is stopped in native code
            waveform = false;
            fft = false;
        }
        int status = native_setPeriodicCapture(rate, waveform, fft);// 1
        if (status == SUCCESS) {
            if ((listener != null) && (mNativeEventHandler == null)) {
                Looper looper;
                if ((looper = Looper.myLooper()) != null) {
                    mNativeEventHandler = new NativeEventHandler(this, looper); // 2
                } else if ((looper = Looper.getMainLooper()) != null) {
                    mNativeEventHandler = new NativeEventHandler(this, looper);
                } else {
                    mNativeEventHandler = null;
                    status = ERROR_NO_INIT;
                }
            }
        }
        return status;
    }
  1. 向native层设置参数
[-> Visualizer.java: setDataCaptureListener()
Visualizer.java -> android_media_Visualizer.cpp: android_media_setPeriodicCapture()
note left: JNI: native_setPeriodicCapture
android_media_Visualizer.cpp -> Visualizer.cpp: setCaptureCallBack()
static jint
android_media_setPeriodicCapture(JNIEnv *env, jobject thiz, jint rate, jboolean jWaveform, jboolean jFft)
{
    sp<Visualizer> lpVisualizer = getVisualizer(env, thiz);
    if (lpVisualizer == 0) {
        return VISUALIZER_ERROR_NO_INIT;
    }
    VisualizerJniStorage* lpJniStorage = (VisualizerJniStorage *)env->GetLongField(thiz,
            fields.fidJniData);
    if (lpJniStorage == NULL) {
        return VISUALIZER_ERROR_NO_INIT;
    }

    ALOGV("setPeriodicCapture: rate %d, jWaveform %d jFft %d",
            rate,
            jWaveform,
            jFft);

    uint32_t flags = Visualizer::CAPTURE_CALL_JAVA;
    if (jWaveform) flags |= Visualizer::CAPTURE_WAVEFORM;
    if (jFft) flags |= Visualizer::CAPTURE_FFT;
    Visualizer::capture_cbk_t cbk = captureCallback;
    if (!jWaveform && !jFft) cbk = NULL;

    return translateError(lpVisualizer->setCaptureCallBack(cbk,
                                                &lpJniStorage->mCallbackData,
                                                flags,
                                                rate));
}
status_t Visualizer::setCaptureCallBack(capture_cbk_t cbk, void* user, uint32_t flags,
        uint32_t rate)
{
    if (rate > CAPTURE_RATE_MAX) {
        return BAD_VALUE;
    }
    Mutex::Autolock _l(mCaptureLock);

    if (mEnabled) {
        return INVALID_OPERATION;
    }

    if (mCaptureThread != 0) {
        mCaptureLock.unlock();
        mCaptureThread->requestExitAndWait();
        mCaptureLock.lock();
    }

    mCaptureThread.clear();
    mCaptureCallBack = cbk;
    mCaptureCbkUser = user;
    mCaptureFlags = flags;
    mCaptureRate = rate;

    if (cbk != NULL) {
        mCaptureThread = new CaptureThread(*this, rate, ((flags & CAPTURE_CALL_JAVA) != 0));
    }
    ALOGV("setCaptureCallBack() rate: %d thread %p flags 0x%08x",
            rate, mCaptureThread.get(), mCaptureFlags);
    return NO_ERROR;
}
  1. NativeEventHandler设置时域和频域数据的回传

native回去到数据后会调用java层的postEventFromNative()方法回传数据。具体流程见1.2.4节。

    //---------------------------------------------------------
    // Java methods called from the native side
    //--------------------
    @SuppressWarnings("unused")
    private static void postEventFromNative(Object effect_ref,
            int what, int arg1, int arg2, Object obj) {
        Visualizer visu = (Visualizer)((WeakReference)effect_ref).get();
        if (visu == null) {
            return;
        }

        if (visu.mNativeEventHandler != null) {
            Message m = visu.mNativeEventHandler.obtainMessage(what, arg1, arg2, obj);
            visu.mNativeEventHandler.sendMessage(m);
        }

    }
        private void handleCaptureMessage(Message msg) {
            OnDataCaptureListener l = null;
            synchronized (mListenerLock) {
                l = mVisualizer.mCaptureListener;
            }

            if (l != null) {
                byte[] data = (byte[])msg.obj;
                int samplingRate = msg.arg1;

                switch(msg.what) {
                case NATIVE_EVENT_PCM_CAPTURE:
                    l.onWaveFormDataCapture(mVisualizer, data, samplingRate);
                    break;
                case NATIVE_EVENT_FFT_CAPTURE:
                    l.onFftDataCapture(mVisualizer, data, samplingRate);
                    break;
                default:
                    Log.e(TAG,"Unknown native event in handleCaptureMessge: "+msg.what);
                    break;
                }
            }
        }

2.4 开始捕获

设置完参数后使用setEnabled()开始捕获数据。


捕获流程.png
status_t Visualizer::setEnabled(bool enabled)
{
    Mutex::Autolock _l(mCaptureLock);

    sp<CaptureThread> t = mCaptureThread;
    if (t != 0) {
        if (enabled) {
            if (t->exitPending()) {
                if (t->requestExitAndWait() == WOULD_BLOCK) {
                    ALOGE("Visualizer::enable() called from thread");
                    return INVALID_OPERATION;
                }
            }
        }
        t->mLock.lock();
    }

    status_t status = AudioEffect::setEnabled(enabled);

    if (t != 0) {
        if (enabled && status == NO_ERROR) {
            t->run("Visualizer"); // 1: 线程开始捕获数据
        } else {
            t->requestExit(); // 2: 线程停止捕获数据
        }
    }

    if (t != 0) {
        t->mLock.unlock();
    }

    return status;
}
bool Visualizer::CaptureThread::threadLoop()
{
    ALOGV("CaptureThread %p enter", this);
    while (!exitPending()) // 判断是否执行了requestExit()
    {
        usleep(mSleepTimeUs);
        mReceiver.periodicCapture();
    }
    ALOGV("CaptureThread %p exiting", this);
    return false;
}
void Visualizer::periodicCapture()
{
    Mutex::Autolock _l(mCaptureLock);
    ALOGV("periodicCapture() %p mCaptureCallBack %p mCaptureFlags 0x%08x",
            this, mCaptureCallBack, mCaptureFlags);
    if (mCaptureCallBack != NULL &&
        (mCaptureFlags & (CAPTURE_WAVEFORM|CAPTURE_FFT)) &&
        mCaptureSize != 0) {
        uint8_t waveform[mCaptureSize];
        status_t status = getWaveForm(waveform); // 1:获取时域数据
        if (status != NO_ERROR) {
            return;
        }
        uint8_t fft[mCaptureSize];
        if (mCaptureFlags & CAPTURE_FFT) {
            status = doFft(fft, waveform); // 2:获取频域数据
        }
        if (status != NO_ERROR) {
            return;
        }
        uint8_t *wavePtr = NULL;
        uint8_t *fftPtr = NULL;
        uint32_t waveSize = 0;
        uint32_t fftSize = 0;
        if (mCaptureFlags & CAPTURE_WAVEFORM) {
            wavePtr = waveform;
            waveSize = mCaptureSize;
        }
        if (mCaptureFlags & CAPTURE_FFT) {
            fftPtr = fft;
            fftSize = mCaptureSize;
        }
        mCaptureCallBack(mCaptureCbkUser, waveSize, wavePtr, fftSize, fftPtr, mSampleRate); // 3:数据回传到java层
    }
}
status_t Visualizer::getWaveForm(uint8_t *waveform)
{
    if (waveform == NULL) {
        return BAD_VALUE;
    }
    if (mCaptureSize == 0) {
        return NO_INIT;
    }

    status_t status = NO_ERROR;
    if (mEnabled) {
        uint32_t replySize = mCaptureSize;
        status = command(VISUALIZER_CMD_CAPTURE, 0, NULL, &replySize, waveform); // 实时抓取PCM数据流
        ALOGV("getWaveForm() command returned %d", status);
        if ((status == NO_ERROR) && (replySize == 0)) {
            status = NOT_ENOUGH_DATA;
        }
    } else {
        ALOGV("getWaveForm() disabled");
        memset(waveform, 0x80, mCaptureSize);
    }
    return status;
}
    case VISUALIZER_CMD_CAPTURE: {
        uint32_t captureSize = pContext->mCaptureSize;
        if (pReplyData == NULL || replySize == NULL || *replySize != captureSize) {
            ALOGV("VISUALIZER_CMD_CAPTURE() error *replySize %" PRIu32 " captureSize %" PRIu32,
                    *replySize, captureSize);
            return -EINVAL;
        }
        if (pContext->mState == VISUALIZER_STATE_ACTIVE) {
            const uint32_t deltaMs = Visualizer_getDeltaTimeMsFromUpdatedTime(pContext);

            // if audio framework has stopped playing audio although the effect is still
            // active we must clear the capture buffer to return silence
            if ((pContext->mLastCaptureIdx == pContext->mCaptureIdx) &&
                    (pContext->mBufferUpdateTime.tv_sec != 0) &&
                    (deltaMs > MAX_STALL_TIME_MS)) {
                    ALOGV("capture going to idle");
                    pContext->mBufferUpdateTime.tv_sec = 0;
                    memset(pReplyData, 0x80, captureSize);
            } else {
                int32_t latencyMs = pContext->mLatency;
                latencyMs -= deltaMs;
                if (latencyMs < 0) {
                    latencyMs = 0;
                }
                uint32_t deltaSmpl = captureSize
                        + pContext->mConfig.inputCfg.samplingRate * latencyMs / 1000;

                // large sample rate, latency, or capture size, could cause overflow.
                // do not offset more than the size of buffer.
                if (deltaSmpl > CAPTURE_BUF_SIZE) {
                    android_errorWriteLog(0x534e4554, "31781965");
                    deltaSmpl = CAPTURE_BUF_SIZE;
                }

                int32_t capturePoint = pContext->mCaptureIdx - deltaSmpl;
                // a negative capturePoint means we wrap the buffer.
                if (capturePoint < 0) {
                    uint32_t size = -capturePoint;
                    if (size > captureSize) {
                        size = captureSize;
                    }
                    memcpy(pReplyData,
                           pContext->mCaptureBuf + CAPTURE_BUF_SIZE + capturePoint,
                           size);
                    pReplyData = (char *)pReplyData + size;
                    captureSize -= size;
                    capturePoint = 0;
                }
                memcpy(pReplyData,
                       pContext->mCaptureBuf + capturePoint,
                       captureSize);
            }

            pContext->mLastCaptureIdx = pContext->mCaptureIdx;
        } else {
            memset(pReplyData, 0x80, captureSize);
        }

        } break;

根据时域数据做FFT变换得到频域数据。

status_t Visualizer::doFft(uint8_t *fft, uint8_t *waveform)
{
    int32_t workspace[mCaptureSize >> 1];
    int32_t nonzero = 0;

    for (uint32_t i = 0; i < mCaptureSize; i += 2) {
        workspace[i >> 1] =
                ((waveform[i] ^ 0x80) << 24) | ((waveform[i + 1] ^ 0x80) << 8);
        nonzero |= workspace[i >> 1];
    }

    if (nonzero) {
        fixed_fft_real(mCaptureSize >> 1, workspace);
    }

    for (uint32_t i = 0; i < mCaptureSize; i += 2) {
        short tmp = workspace[i >> 1] >> 21;
        while (tmp > 127 || tmp < -128) tmp >>= 1;
        fft[i] = tmp;
        tmp = workspace[i >> 1];
        tmp >>= 5;
        while (tmp > 127 || tmp < -128) tmp >>= 1;
        fft[i + 1] = tmp;
    }

    return NO_ERROR;
}

mCaptureCallBack即是setDataCaptureListener参数设置过程中的android_media_Visualizer.cpp#captureCallback(),主要代码如下:

    if (waveformSize != 0 && waveform != NULL) {
        jbyteArray jArray;

        ensureArraySize(env, &callbackInfo->waveform_data, waveformSize);
        jArray = callbackInfo->waveform_data;

        if (jArray != NULL) {
            jbyte *nArray = env->GetByteArrayElements(jArray, NULL);
            memcpy(nArray, waveform, waveformSize);
            env->ReleaseByteArrayElements(jArray, nArray, 0);
            env->CallStaticVoidMethod(
                callbackInfo->visualizer_class,
                fields.midPostNativeEvent,
                callbackInfo->visualizer_ref,
                NATIVE_EVENT_PCM_CAPTURE,
                samplingrate,
                0,
                jArray);
        }
    }

    if (fftSize != 0 && fft != NULL) {
        jbyteArray jArray;

        ensureArraySize(env, &callbackInfo->fft_data, fftSize);
        jArray = callbackInfo->fft_data;

        if (jArray != NULL) {
            jbyte *nArray = env->GetByteArrayElements(jArray, NULL);
            memcpy(nArray, fft, fftSize);
            env->ReleaseByteArrayElements(jArray, nArray, 0);
            env->CallStaticVoidMethod(
                callbackInfo->visualizer_class,
                fields.midPostNativeEvent,
                callbackInfo->visualizer_ref,
                NATIVE_EVENT_FFT_CAPTURE,
                samplingrate,
                0,
                jArray);
        }
    }

对应调用java层的postEventFromNative()方法:

// Get the postEvent method
fields.midPostNativeEvent = env->GetStaticMethodID(
        fields.clazzEffect,
        "postEventFromNative", "(Ljava/lang/Object;IIILjava/lang/Object;)V");

2.5 AudioSessionId的作用

在2.2小节中Visualizer实例化时需要传入audioSession参数。首先会使用sessionId去创建一个AudioEffect,用于音效处理功能。

Visualizer::Visualizer (const String16& opPackageName,
         int32_t priority,
         effect_callback_t cbf,
         void* user,
         audio_session_t sessionId)
    :   AudioEffect(SL_IID_VISUALIZATION, opPackageName, NULL, priority, cbf, user, sessionId),
        mCaptureRate(CAPTURE_RATE_DEF),
        mCaptureSize(CAPTURE_SIZE_DEF),
        mSampleRate(44100000),
        mScalingMode(VISUALIZER_SCALING_MODE_NORMALIZED),
        mMeasurementMode(MEASUREMENT_MODE_NONE),
        mCaptureCallBack(NULL),
        mCaptureCbkUser(NULL)

通过audioFlinger去创建一个Effect。

    iEffect = audioFlinger->createEffect((effect_descriptor_t *)&mDescriptor,
            mIEffectClient, priority, io, mSessionId, mOpPackageName, mClientPid,
            &mStatus, &mId, &enabled);
上一篇下一篇

猜你喜欢

热点阅读