移动流媒体android开发那些事

Android Camera实时数据采集及通过MediaCode

2016-12-13  本文已影响728人  60916fc63567

吐槽:

其实常用流程都差不多,但是有时候还是会忘记某一步的详细用法,但是各位朋友请注意,官方已经不推荐Camera类的使用(现在是android.hardware.camera2),但无奈公司项目之前是使用Camera类实现的,并且Camera2貌似是基于API 21以上的,这Android 7的风声都放出来了,可是6.0现在出了3个多月了市场占有率也才貌似3%不到,什么时候才能有个标准化和统一规范,作为一名Android开发者实属不易啊,叹气~

Android实现摄像头实时数据采集及通过硬编码编码数据的流程

/*

* 编码器获取数据,编码,编码后的数据的处理等大致流程如下:

*/

/* 1.获取原始帧 */

@Override

onPreviewFrame( byte[] onPreviewData, Camera camera) {

/* 在此可以对onPreviewData进行Rotate或者Scale

* 也可以转换yuv的格式,例如yuv420P(YV12)或者yuv420SP(NV21/NV12)

* 相关开源类库可以使用libyuv/ffmpeg等

*/

getRawFrame(onPreviewData)

/* 然后将onPreviewData加入Camera回调*/

addCallbackBuffer(onPreviewData);

}

private void getRawFrame( byte[] rawFrame ) { encodFrame(rawFrame); }

/* 2.进行编码 */

private byte[] encodFrame(byte[] inputData) { return encodedData; }

/* 3.取得编码后的数据便可进行相应的操作,可以保存为本地文件,也可进行推流 */

Operation ? Send(byte[] sendData) : Save(byte[] saveData)

上述代码onPreviewFrame为Camera类的接口,使用Camera前需要进行SurfaceView及SurfaceHolder的初始化及相应interface的实现:

// init the preview surface

private void initview() {

SurfaceView surfaceView = (SurfaceView) findViewById(R.id.record_surface);

SurfaceHolder surfaceHolder = surfaceView.getHolder();

surfaceHolder.addCallback(this);

surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);

}

@Override

public void surfaceCreated(SurfaceHolder holder) {

openCamera(holder); // 开启相机

}

@Override

public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {

}

@Override

public void surfaceDestroyed(SurfaceHolder holder) {

releaseCamera(); // 在surfaceDestroyed的时候记得releaseCamera

}

private void openCamera(SurfaceHolder holder) {

releaseCamera();

try {

camera = getCamera(Camera.CameraInfo.CAMERA_FACING_BACK); // 根据需求选择前/后置摄像头

} catch (Exception e) {

camera = null;

if (AppContext.isDebugMode) {

e.printStackTrace();

}

}

if(mCamera != null){

try {

mCamera.setPreviewCallback(this);

mCamera.setDisplayOrientation(90); // 此方法为官方提供的旋转显示部分的方法,并不会影响onPreviewFrame方法中的原始数据;

if(parameters == null){

parameters = mCamera.getParameters();

}

parameters.setPreviewFormat(ImageFormat.NV21); // 常用格式:NV21 / YV12

parameters.setPreviewSize(width, height); // 还可以设置很多相机的参数,但是建议先遍历当前相机是否支持该配置,不然可能会导致出错;

mCamera.setParameters(parameters);

mCamera.setPreviewDisplay(holder);

mCamera.startPreview();

} catch (IOException e) {

e.printStackTrace();

}

}

}

@TargetApi(9)

private Camera getCamera(int cameraType) {

Camera camera = null;

try {

camera = Camera.open(cameraType);

} catch (Exception e) {

e.printStackTrace();

}

return camera; // returns null if camera is unavailable

}

private synchronized void releaseCamera() {

if (camera != null) {

try {

camera.setPreviewCallback(null);

} catch (Exception e) {

e.printStackTrace();

}

try {

camera.stopPreview();

} catch (Exception e) {

e.printStackTrace();

}

try {

camera.release();

} catch (Exception e) {

e.printStackTrace();

}

camera = null;

}

}

MediaCodec硬编码实现部分:

此处推荐参考SRS开源项目中的实现方法:https://github.com/ossrs/srs-sea.git

// video device.

private Camera camera;

private MediaCodec vencoder;

private MediaCodecInfo vmci;

private MediaCodec.BufferInfo vebi;

private byte[] vbuffer;

// video camera settings.

private Camera.Size vsize;

private int vcolor;

private int vbitrate_kbps = 300;

private final static int VFPS = 20;

private final static int VGOP = 5;

private final static int VWIDTH = 640;

private final static int VHEIGHT = 480;

/* 首先需要初始化MediaCodec的配置 */

private void initMediaCodec() {

// choose the right vencoder, perfer qcom then google.

vcolor = chooseVideoEncoder();

// vencoder yuv to 264 es stream.

// requires sdk level 16+, Android 4.1, 4.1.1, the JELLY_BEAN

try {

vencoder = MediaCodec.createByCodecName(vmci.getName());

} catch (IOException e) {

Log.e(TAG, "create vencoder failed.");

e.printStackTrace();

return;

}

vebi = new MediaCodec.BufferInfo();

// setup the vencoder.

// @see https://developer.android.com/reference/android/media/MediaCodec.html

MediaFormat vformat = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, vsize.width, vsize.height);

vformat.setInteger(MediaFormat.KEY_COLOR_FORMAT, vcolor);

vformat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 0);

vformat.setInteger(MediaFormat.KEY_BIT_RATE, 1000 * vbitrate_kbps);

vformat.setInteger(MediaFormat.KEY_FRAME_RATE, VFPS);

vformat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, VGOP);

Log.i(TAG, String.format("vencoder %s, color=%d, bitrate=%d, fps=%d, gop=%d, size=%dx%d",

vmci.getName(), vcolor, vbitrate_kbps, VFPS, VGOP, vsize.width, vsize.height));

// the following error can be ignored:

// 1. the storeMetaDataInBuffers error:

//      [OMX.qcom.video.encoder.avc] storeMetaDataInBuffers (output) failed w/ err -2147483648

//      @see http://bigflake.com/mediacodec/#q12

vencoder.configure(vformat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);

vencoder.start();

}

// for the vbuffer for YV12(android YUV), @see below:

// https://developer.android.com/reference/android/hardware/Camera.Parameters.html#setPreviewFormat(int)

// https://developer.android.com/reference/android/graphics/ImageFormat.html#YV12

private int getYuvBuffer(int width, int height) {

// stride = ALIGN(width, 16)

int stride = (int) Math.ceil(width / 16.0) * 16;

// y_size = stride * height

int y_size = stride * height;

// c_stride = ALIGN(stride/2, 16)

int c_stride = (int) Math.ceil(width / 32.0) * 16;

// c_size = c_stride * height/2

int c_size = c_stride * height / 2;

// size = y_size + c_size * 2

return y_size + c_size * 2;

}

// choose the video encoder by name.

private MediaCodecInfo chooseVideoEncoder(String name, MediaCodecInfo def) {

int nbCodecs = MediaCodecList.getCodecCount();

for (int i = 0; i < nbCodecs; i++) {

MediaCodecInfo mci = MediaCodecList.getCodecInfoAt(i);

if (!mci.isEncoder()) {

continue;

}

String[] types = mci.getSupportedTypes();

for (int j = 0; j < types.length; j++) {

if (types[j].equalsIgnoreCase(VCODEC)) {

//Log.i(TAG, String.format("vencoder %s types: %s", mci.getName(), types[j]));

if (name == null) {

return mci;

}

if (mci.getName().contains(name)) {

return mci;

}

}

}

}

return def;

}

// choose the right supported color format. @see below:

// https://developer.android.com/reference/android/media/MediaCodecInfo.html

// https://developer.android.com/reference/android/media/MediaCodecInfo.CodecCapabilities.html

private int chooseVideoEncoder() {

// choose the encoder "video/avc":

//      1. select one when type matched.

//      2. perfer google avc.

//      3. perfer qcom avc.

vmci = chooseVideoEncoder(null, null);

//vmci = chooseVideoEncoder("google", vmci);

//vmci = chooseVideoEncoder("qcom", vmci);

int matchedColorFormat = 0;

MediaCodecInfo.CodecCapabilities cc = vmci.getCapabilitiesForType(VCODEC);

for (int i = 0; i < cc.colorFormats.length; i++) {

int cf = cc.colorFormats[i];

Log.i(TAG, String.format("vencoder %s supports color fomart 0x%x(%d)", vmci.getName(), cf, cf));

// choose YUV for h.264, prefer the bigger one.

// corresponding to the color space transform in onPreviewFrame

if ((cf >= cc.COLOR_FormatYUV420Planar && cf <= cc.COLOR_FormatYUV420SemiPlanar)) {

if (cf > matchedColorFormat) {

matchedColorFormat = cf;

}

}

}

for (int i = 0; i < cc.profileLevels.length; i++) {

MediaCodecInfo.CodecProfileLevel pl = cc.profileLevels[i];

Log.i(TAG, String.format("vencoder %s support profile %d, level %d", vmci.getName(), pl.profile, pl.level));

}

Log.i(TAG, String.format("vencoder %s choose color format 0x%x(%d)", vmci.getName(), matchedColorFormat, matchedColorFormat));

return matchedColorFormat;

}

上述代码为SRS的部分实现,仅作参考。

还推荐一个项目,该项目实现了编码后的数据存为本地.h264文件,方便分析,本人Fork的git地址:https://github.com/eterrao/MediaCodecEncodeH264.git

原作者git地址:https://github.com/sszhangpengfei/MediaCodecEncodeH264.git

(在此感谢拥有开源共享精神的各位朋友,因为你们我才能在学习和成长的路上少了很多坑!)

实际上MediaCodec的实现步骤基本都大同小异,但是请注意在API20以后编码器数据处理的机制有所改变,官方给出的建议如下:

链接:developer.android.com/reference/android/media/MediaCodec.html

以下摘抄官方API:

Depending on the API version, you can process data in three ways:

Asynchronous Processing using Buffers

SinceLOLLIPOP, the preferred method is to process data asynchronously by setting a callback before callingconfigure. Asynchronous mode changes the state transitions slightly, because you must callstart()afterflush()to transition the codec to the Running sub-state and start receiving input buffers. Similarly, upon an initial call tostartthe codec will move directly to the Running sub-state and start passing available input buffers via the callback.

MediaCodec is typically used like this in asynchronous mode:

Synchronous Processing using Buffers

SinceLOLLIPOP, you should retrieve input and output buffers usinggetInput/OutputBuffer(int)and/orgetInput/OutputImage(int)even when using the codec in synchronous mode. This allows certain optimizations by the framework, e.g. when processing dynamic content. This optimization is disabled if you callgetInput/OutputBuffers().

Note:do not mix the methods of using buffers and buffer arrays at the same time. Specifically, only callgetInput/OutputBuffersdirectly afterstart()or after having dequeued an output buffer ID with the value ofINFO_OUTPUT_FORMAT_CHANGED.

MediaCodec is typically used like this in synchronous mode:

Synchronous Processing using Buffer Arrays (deprecated)

In versionsKITKAT_WATCHand before, the set of input and output buffers are represented by theByteBuffer[]arrays. After a successful call tostart(), retrieve the buffer arrays usinggetInput/OutputBuffers(). Use the buffer ID-s as indices into these arrays (when non-negative), as demonstrated in the sample below. Note that there is no inherent correlation between the size of the arrays and the number of input and output buffers used by the system, although the array size provides an upper bound.

相关参考资料:

Android实时直播,一千行java搞定不依赖jni,延迟0.8至3秒,强悍移动端来袭

android硬编码h264-MediaCodec

Android中MediaMuxer和MediaCodec用例 - audio+video

http://bigflake.com/mediacodec/

原文地址:http://www.cnblogs.com/raomengyang/p/5138023.html?utm_source=tuicool&utm_medium=referral

上一篇下一篇

猜你喜欢

热点阅读