Android的Camera异步优化问题统计
2020-09-25 本文已影响0人
PC8067
1. onPreviewFrame()
函数线程
同
open(int)
的线程一致
首先看PreviewCallback
接口的实现
public interface PreviewCallback
{
/**
* Called as preview frames are displayed. This callback is invoked
* on the event thread {@link #open(int)} was called from.
*
* <p>If using the {@link android.graphics.ImageFormat#YV12} format,
* refer to the equations in {@link Camera.Parameters#setPreviewFormat}
* for the arrangement of the pixel data in the preview callback
* buffers.
*
* @param data the contents of the preview frame in the format defined
* by {@link android.graphics.ImageFormat}, which can be queried
* with {@link android.hardware.Camera.Parameters#getPreviewFormat()}.
* If {@link android.hardware.Camera.Parameters#setPreviewFormat(int)}
* is never called, the default will be the YCbCr_420_SP
* (NV21) format.
* @param camera the Camera service object.
*/
void onPreviewFrame(byte[] data, Camera camera);
};
分析代码发现,onPreviewFrame
的调用由EventHandler
中的CAMERA_MSG_PREVIEW_FRAME
消息触发。
private class EventHandler extends Handler
{
private final Camera mCamera;
@UnsupportedAppUsage
public EventHandler(Camera c, Looper looper) {
super(looper);
mCamera = c;
}
@Override
public void handleMessage(Message msg) {
switch(msg.what) {
case CAMERA_MSG_PREVIEW_FRAME:
PreviewCallback pCb = mPreviewCallback;
if (pCb != null) {
if (mOneShot) {
// Clear the callback variable before the callback
// in case the app calls setPreviewCallback from
// the callback function
mPreviewCallback = null;
} else if (!mWithBuffer) {
// We're faking the camera preview mode to prevent
// the app from being flooded with preview frames.
// Set to oneshot mode again.
setHasPreviewCallback(true, false);
}
pCb.onPreviewFrame((byte[])msg.obj, mCamera);
}
return;
}
}
接着分析EventHandler mEventHandler的初始化逻辑;
public static Camera open(int cameraId) {
return new Camera(cameraId);
}
Camera(int cameraId) {
int err = cameraInitNormal(cameraId);
if (checkInitErrors(err)) {
if (err == -EACCES) {
throw new RuntimeException("Fail to connect to camera service");
} else if (err == -ENODEV) {
throw new RuntimeException("Camera initialization failed");
}
// Should never hit this.
throw new RuntimeException("Unknown camera error");
}
initAppOps();
}
private int cameraInitNormal(int cameraId) {
return cameraInitVersion(cameraId, CAMERA_HAL_API_VERSION_NORMAL_CONNECT);
}
private int cameraInitVersion(int cameraId, int halVersion) {
mShutterCallback = null;
mRawImageCallback = null;
mJpegCallback = null;
mPreviewCallback = null;
mPostviewCallback = null;
mUsingPreviewAllocation = false;
mZoomListener = null;
Looper looper;
if ((looper = Looper.myLooper()) != null) {
mEventHandler = new EventHandler(this, looper);
} else if ((looper = Looper.getMainLooper()) != null) {
mEventHandler = new EventHandler(this, looper);
} else {
mEventHandler = null;
}
return native_setup(new WeakReference<Camera>(this), cameraId, halVersion,
ActivityThread.currentOpPackageName());
}
因此onPreviewFrame()
函数线程不是普通的Thread
线程,而是Looper
线程,比如HandlerThread
。普通线程Thread
的话,仍然是MainLooper
线程。注意GLSurfaceView
内部为普通的Thread
线程
2. setPreviewDisplay(SurfaceHolder holder)
与setPreviewTexture(SurfaceTexture surfaceTexture)
说明
Camera必须要求设置其中之一,否则
onPreviewFrame()
没有任何回调数据
-
`setPreviewDisplay(SurfaceHolder holder)
-
针对
SurfaceView`;
-
-
setPreviewTexture(SurfaceTexture surfaceTexture)
- 针对
GLSurfaceView
和TextureView
; - 针对自定义
SurfaceTexture
,无预览实现方式;
private static final int MAGIC_TEXTURE_ID = 10;//OpenCV的用法。这里为什么要设置10,求解?求解?求解? SurfaceTexture surfaceTexture = new SurfaceTexture(MAGIC_TEXTURE_ID); mCameraDevice.setPreviewTexture(surfaceTexture);
- 针对
3. NV21及I420的OpenGL的shader实现
3.1 补充知识
/**
*
* @param target
* 指定活动纹理单元的目标纹理。必须是
* GL_TEXTURE_2D,
* GL_TEXTURE_CUBE_MAP_POSITIVE_X,GL_TEXTURE_CUBE_MAP_NEGATIVE_X,
* GL_TEXTURE_CUBE_MAP_POSITIVE_Y,GL_TEXTURE_CUBE_MAP_NEGATIVE_Y,
* GL_TEXTURE_CUBE_MAP_POSITIVE_Z,或GL_TEXTURE_CUBE_MAP_NEGATIVE_Z。
* @param level
* 指定详细程度编号。级别0是基本图像级别。级别n是第n个缩略图缩小图像。
* @param internalformat
* 指定纹理的内部格式。指定了颜色都有那些成分构成 如RGB表示颜色组成由Red分量Green分量Blue分量构成,RGBA表示颜色组成由Red分量Green分量Blue分量Alpha分量构成。
* 必须是下列符号常量之一:
* GL_ALPHA : 单个Alpha值
* GL_LUMINANCE: 单个亮度值
* GL_LUMINANCE_ALPHA : 先是亮度分量,然后是Alpha值
* GL_RGB: 先是红色分量,再是绿色分量,然后是蓝色分量
* GL_RGBA:先是红色分量,再是绿色分量,然后是蓝色分量,最后是Alpha分量
* @param width
* 指定纹理图像的宽度。所有实现都支持宽度至少为64 texels的2D纹理图像和宽度至少为16 texels的立方体贴图纹理图像。
* @param height
* 指定纹理图像的高度所有实现都支持至少64像素高的2D纹理图像和至少16像素高的立方体贴图纹理图像。
* @param border
* 指定边框的宽度。必须为0。
* @param format
* 指定纹理数据的格式。表示了颜色数据分量在内存中的数据存储格式。
* 必须匹配internalformat。下面的符号值被接受:
* GL_ALPHA : 单个Alpha值
* GL_LUMINANCE: 单个亮度值
* GL_LUMINANCE_ALPHA : 先是亮度分量,然后是Alpha值
* GL_RGB: 先是红色分量,再是绿色分量,然后是蓝色分量
* GL_RGBA:先是红色分量,再是绿色分量,然后是蓝色分量,最后是Alpha分量
* @param type
* 指定纹理数据的数据类型。下面的符号值被接受:
* GL_UNSIGNED_BYTE : 无符号的8位整数
* GL_UNSIGNED_SHORT_5_6_5 : 无符号的16位整数, 图像格式5_6_5
* GL_UNSIGNED_SHORT_4_4_4_4 : 无符号的16位整数, 图像格式4_4_4_4
* GL_UNSIGNED_SHORT_5_5_5_1 : 无符号的16位整数, 图像格式5_5_5_1
* @param pixels
* 指定一个指向内存中图像数据的指针。pixels可能是一个空指针。
* 在这种情况下,会分配纹理内存以适应宽度width和高度的纹理height。比如FrameBuffer使用
*/
public static native void glTexImage2D(
int target,
int level,
int internalformat,
int width,
int height,
int border,
int format,
int type,
java.nio.Buffer pixels
);
需要将YUV/RGBA等转换为纹理。
3.2 NV21的OpenGL使用
3.2.1 NV21的shader
public static final String SHADER_VERTEX = "" +
"attribute lowp vec4 position;\n" +
"attribute lowp vec4 inCoordinate;\n" +
" \n" +
"varying lowp vec2 textureCoordinate;\n" +
" \n" +
"void main()\n" +
"{\n" +
" gl_Position = position;\n" +
" textureCoordinate = inCoordinate.xy;\n" +
"}";
public static final String SHADER_FRAGMENT = "precision highp float; \n" +
"varying vec2 textureCoordinate; \n" +
"uniform sampler2D y_texture; \n" +
"uniform sampler2D uv_texture; \n" +
"void main (void){ \n" +
" float r, g, b, y, u, v; \n" +
" y = texture2D(y_texture, textureCoordinate).r; \n" +
" u = texture2D(uv_texture, textureCoordinate).a - 0.5;\n" +
" v = texture2D(uv_texture, textureCoordinate).r - 0.5;\n" +
" r = y + 1.13983*v; \n" +
" g = y - 0.39465*u - 0.58060*v; \n" +
" b = y + 2.03211*u; \n" +
" gl_FragColor = vec4(r, g, b, 1.0); \n" +
"} ";
3.2.1 NV21的shader使用
byte[] nv21Buffer = new byte[]{0, 0, 0};
ByteBuffer mYBuffer = ByteBuffer.allocate(width * height).order(ByteOrder.nativeOrder());
mYBuffer.put(nv21Buffer, 0, width*height);
mYBuffer.position(0);
ByteBuffer mUVBuffer = ByteBuffer.allocate(width*height/2).order(ByteOrder.nativeOrder());
mUVBuffer.put(nv21Buffer, width*height, width*height/2);
mUVBuffer.position(0);
GLES20.glClearColor(0, 0, 0, 0);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glViewport(0, 0, mSurfaceViewWidth, mSurfaceViewHeight);
GLES20.glUseProgram(mProgramId);
GLES20.glVertexAttribPointer(mAttribPosition, 2, GLES20.GL_FLOAT, false, 0, mVertexBuffer);
GLES20.glEnableVertexAttribArray(mAttribPosition);
GLES20.glVertexAttribPointer(mAttribTexCoord, 2, GLES20.GL_FLOAT, false, 0, mTextureBuffer);
GLES20.glEnableVertexAttribArray(mAttribTexCoord);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mInputTextures[0]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, width, height, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, mYBuffer);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mInputTextures[1]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE_ALPHA, width / 2, height / 2, 0, GLES20.GL_LUMINANCE_ALPHA, GLES20.GL_UNSIGNED_BYTE, mUVBuffer);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
//y
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mInputTextures[0]);
GLES20.glUniform1i(mUniformTextureY, 0);
//u
GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mInputTextures[1]);
GLES20.glUniform1i(mUniformTextureUV, 1);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GLES20.glDisableVertexAttribArray(mAttribPosition);
GLES20.glDisableVertexAttribArray(mAttribTexCoord);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
3.3 I420的OpenGL使用
3.3.1 I420的shader
public static final String SHADER_VERTEX = "" +
"attribute vec4 position;\n" +
"attribute vec4 inCoordinate;\n" +
" \n" +
"varying vec2 textureCoordinate;\n" +
" \n" +
"void main()\n" +
"{\n" +
" gl_Position = position;\n" +
" textureCoordinate = inCoordinate.xy;\n" +
"}";
public static final String SHADER_FRAGMENT =
"precision highp float;\n" +
"uniform sampler2D tex_y;\n" +
"uniform sampler2D tex_u;\n" +
"uniform sampler2D tex_v;\n" +
"varying vec2 textureCoordinate;\n" +
"void main() {\n" +
"vec4 c = vec4((texture2D(tex_y, textureCoordinate).r - 16./255.) * 1.164);\n" +
"vec4 U = vec4(texture2D(tex_u, textureCoordinate).r - 128./255.);\n" +
"vec4 V = vec4(texture2D(tex_v, textureCoordinate).r - 128./255.);\n" +
"c += V * vec4(1.596, -0.813, 0, 0);\n" +
"c += U * vec4(0, -0.392, 2.017, 0);\n" +
"c.a = 1.0;\n" +
"gl_FragColor = c;\n" +
"}\n";
3.2.1 I420的shader使用
此处仅仅提供实现细节
private void convertYUVTextures(ByteBuffer data, int width, int heigth) {
//y
data.position(0);
data.limit(width * heigth);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mInputTextures[0]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, width, heigth, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, data);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
//u
data.clear();
data.position(width*heigth);
ByteBuffer uBuffer = data.slice();
uBuffer.limit(width * heigth / 4);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mInputTextures[1]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, width / 2, heigth / 2, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, uBuffer);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
//v
data.clear();
data.position(width * heigth * 5 / 4);
ByteBuffer vBuffer = data.slice();
vBuffer.limit(width * heigth / 4);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mInputTextures[2]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, width / 2, heigth / 2, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, vBuffer);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
}