OpenGL ES摄像头预览加立方体叠加
摄像头预览可以使用SurfaceView或者TextureView,在Android中也可以使用OpenGL ES中的GLSurfaceView来预览摄像头画面,并在此基础上绘制2维/3维图形。本文实现在摄像头预览时叠加一个3D立方体。
一、Camera预览
GLSurfaceView代码如下:
public class Camera3DGLSurface extends GLSurfaceView implements SurfaceTexture.OnFrameAvailableListener {
private BaseCameraRenderer render;
public Camera3DGLSurface(Context context) {
super(context);
}
public Camera3DGLSurface(Context context, AttributeSet attrs) {
super(context, attrs);
setEGLContextClientVersion(3);
render = new Camera3DRender(context,this);
setRenderer(render);
setRenderMode(RENDERMODE_WHEN_DIRTY);
render.setObjectRender(new CubicRender());
}
public void setObjectRender(AbsObjectRender absObjectRender){
if (render != null){
render.setObjectRender(absObjectRender);
}
}
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
requestRender();
}
}
BaseCameraRenderer实现如下:
public abstract class BaseCameraRenderer implements GLSurfaceView.Renderer {
private static final String TAG = "BaseCameraRenderer";
private List<AbsObjectRender> objectRenders = new ArrayList<>();
protected float[] mProjectMatrix = new float[16];
protected float[] mCameraMatrix = new float[16];
public void setObjectRender(AbsObjectRender absObjectRender){
objectRenders.clear();
objectRenders.add(absObjectRender);
}
public void setObjectRenders(List<AbsObjectRender> absObjectRenders){
objectRenders.clear();
objectRenders.addAll(absObjectRenders);
}
@Override
public void onSurfaceCreated(GL10 gl10, EGLConfig eglConfig) {
for (AbsObjectRender objRender:objectRenders){
objRender.initProgram();
}
}
@Override
public void onSurfaceChanged(GL10 gl10, int i, int i1) {
for (AbsObjectRender objRender:objectRenders){
objRender.setProjAndCamMatrix(mProjectMatrix,mCameraMatrix);
objRender.setScreenWidthHeight(i,i1);
}
}
@Override
public void onDrawFrame(GL10 gl10) {
// Log.e(TAG, "onDrawFrame: onDrawFrame");
for (AbsObjectRender objRender:objectRenders){
if (!objRender.isAlreadyInited()){ //初始化不成功,先进行初始化
Log.e(TAG, "onDrawFrame: 初始化不成功,重新初始化");
objRender.initProgram();
objRender.setProjAndCamMatrix(mProjectMatrix,mCameraMatrix);
}
objRender.onDrawFrame();
}
}
}
Camera3DRender代码如下:
class Camera3DRender(ctx: Context?, listener: OnFrameAvailableListener?): BaseCameraRenderer(){
private val TAG = "Camera3DRender"
private var mContext: Context? = null
private var mCameraManeger: CameraManeger? = null
private var mCameraTexture: SurfaceTexture? = null
private var listener: OnFrameAvailableListener? = null
private var mProgram = 0
private var uPosHandle = 0
private var aTexHandle = 0
private var mMVPMatrixHandle = 0
//透视矩阵、相机矩阵定义放在基类中,方便传给其他绘制对象
private val mMVPMatrix = FloatArray(16)
private val mTempMatrix = FloatArray(16)
private val mPosCoordinate = floatArrayOf(
-1f, -1f,1f,
-1f, 1f,1f,
1f, -1f,1f,
1f, 1f,1f)
private val mTexCoordinate = floatArrayOf(0f, 1f, 1f, 1f, 0f, 0f, 1f, 0f)
private var mPosBuffer: FloatBuffer? = null
private var mTexBuffer: FloatBuffer? = null
init {
this.mContext = ctx
Matrix.setIdentityM(mProjectMatrix, 0)
Matrix.setIdentityM(mCameraMatrix, 0)
Matrix.setIdentityM(mMVPMatrix, 0)
Matrix.setIdentityM(mTempMatrix, 0)
this.listener = listener
mCameraManeger = CameraManeger()
mPosBuffer = GLDataUtil.createFloatBuffer(mPosCoordinate)
mTexBuffer = GLDataUtil.createFloatBuffer(mTexCoordinate)
}
override fun onSurfaceCreated(gl: GL10?, config: EGLConfig?) {
//编译顶点着色程序
val vertexShaderStr = ResReadUtils.readResource(R.raw.vertex_camera3d_texture)
val vertexShaderId = ShaderUtils.compileVertexShader(vertexShaderStr)
//编译片段着色程序
val fragmentShaderStr = ResReadUtils.readResource(R.raw.fragment_camera_shade)
val fragmentShaderId = ShaderUtils.compileFragmentShader(fragmentShaderStr)
//连接程序
mProgram = ShaderUtils.linkProgram(vertexShaderId, fragmentShaderId)
createAndBindVideoTexture()
mCameraManeger!!.OpenCamera(mCameraTexture)
// 调用父类,完成另外添加进来的图形的初始化
super.onSurfaceCreated(gl, config)
}
override fun onSurfaceChanged(gl: GL10?, width: Int, height: Int) {
super.onSurfaceChanged(gl, width, height)
}
override fun onDrawFrame(gl: GL10?) {
GLES30.glEnable(GLES20.GL_DEPTH_TEST)
GLES30.glClear(GLES20.GL_COLOR_BUFFER_BIT or GLES20.GL_DEPTH_BUFFER_BIT)
//开启深度测试
/********** 绘制摄像头画面 */
//在OpenGLES环境中使用程序
GLES30.glUseProgram(mProgram)
uPosHandle = GLES20.glGetAttribLocation(mProgram, "position")
aTexHandle = GLES20.glGetAttribLocation(mProgram, "inputTextureCoordinate")
mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "textureTransform")
// 将前面计算得到的mMVPMatrix(frustumM setLookAtM 通过multiplyMM 相乘得到的矩阵) 传入vMatrix中,与顶点矩阵进行相乘
GLES30.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mMVPMatrix, 0)
mCameraTexture!!.updateTexImage() //通过此方法更新接收到的预览数据
GLES30.glVertexAttribPointer(uPosHandle, 3, GLES30.GL_FLOAT, false, 0, mPosBuffer)
GLES30.glVertexAttribPointer(aTexHandle, 2, GLES30.GL_FLOAT, false, 0, mTexBuffer)
GLES30.glEnableVertexAttribArray(uPosHandle)
GLES30.glEnableVertexAttribArray(aTexHandle)
//顶点个数是4个 mPosCoordinate.length/2每个定点x、y2个坐标,所以得到顶点个数。
GLES30.glDrawArrays(GLES30.GL_TRIANGLE_STRIP, 0, mPosCoordinate.size / 2)
GLES30.glDisableVertexAttribArray(uPosHandle)
GLES30.glDisableVertexAttribArray(aTexHandle)
GLES30.glUseProgram(0)
super.onDrawFrame(gl)
}
private fun createAndBindVideoTexture() {
val texture = IntArray(1)
GLES30.glGenTextures(1, texture, 0) //生成一个OpenGl纹理
GLES30.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texture[0]) //申请纹理存储区域并设置相关参数
GLES30.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR.toFloat())
GLES30.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR.toFloat())
GLES30.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE)
GLES30.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE)
mCameraTexture = SurfaceTexture(texture[0]) //以上面OpenGl生成的纹理函数参数创建SurfaceTexture,SurfaceTexture接收的数据将传入该纹理
mCameraTexture!!.setOnFrameAvailableListener(listener) //设置SurfaceTexture的回调,通过摄像头预览数据已更新
}
}
Camera3DRender继承了父类BaseCameraRenderer,便于添加其他绘图元素。
顶点着色器vertex_camera3d_texture.glsl
uniform mat4 textureTransform;
attribute vec2 inputTextureCoordinate;
attribute vec3 position; //NDK坐标点
varying vec2 textureCoordinate; //纹理坐标点变换后输出
void main() {
// gl_Position = position;
vec4 pos = vec4(position, 1.0);
gl_Position = pos.xyww;
textureCoordinate = inputTextureCoordinate;
}
片元着色器fragment_camera_shad.glsl
#extension GL_OES_EGL_image_external : require
precision mediump float;
uniform samplerExternalOES videoTex;
varying vec2 textureCoordinate;
void main() {
vec4 tc = texture2D(videoTex, textureCoordinate);
float color = tc.r * 0.3 + tc.g * 0.59 + tc.b * 0.11;
// gl_FragColor = vec4(color,color,color,1.0);
gl_FragColor = tc;
}
二 如何保证摄像头预览在所有绘制的3D物体的后面
关键看顶点着色器代码:
vec4 pos = vec4(position, 1.0);
gl_Position = pos.xyww;
可以看到赋值给gl_Position的不是通常的pos.xyzw。而是xyww。
正常的gl_Position 应该等于 pos.xyzw。此处将z替换成了w。在典型情况下,w坐标设为1.0。x,y,z值通过除以w,来进⾏缩放。⽽除以1.0则本质上不改变x,y,z值。
如何理解呢?
透视投影想要模仿的效果,它是使用透视投影矩阵来完成的。这个投影矩阵将给定的平截头体范围映射到裁剪空间,除此之外还修改了每个顶点坐标的w值,从而使得离观察者越远的顶点坐标w分量越大。 被变换到裁剪空间的坐标都会在-w到w的范围之间(任何大于这个范围的坐标都会被裁剪掉)。OpenGL要求所有可见的坐标都落在-1.0到1.0范围内,作为顶点着色器最后的输出,因此,一旦坐标在裁剪空间内之后,透视除法就会被应用到裁剪空间坐标上:
透视除法是在顶点着色器运行之后执行的,将gl_Position的xyz坐标除以w分量。
1--所以每个点x,y,z坐标最终都会除以w。并且最终得到的值在-1到1之间。z可取的最大值就是w
深度缓冲包含了一个介于0.0和1.0之间的深度值,它将会与观察者视角所看见的场景中所有物体的z值进行比较。观察空间的z值可能是投影平截头体的近平面(Near)和远平面(Far)之间的任何值。
image.png2--分子分母都是负数。所以z值越大,最终得到的Fdepth值越大,即深度值越大。所以将z取值为w,则能保证摄像头的预览画面深度最大,不会遮挡其他绘制的物体。
image.png深度值很大一部分是由很小的z值所决定的,这给了近处的物体很大的深度精度。
所以通过顶点着色器可以知道,虽然顶点中传了z值,但是实际并没有用z值,而是用了w值代替了z值。所以代码还可以修改为。
Camera3DRender代码修改
private val mPosCoordinate = floatArrayOf(
-1f, -1f,
-1f, 1f,
1f, -1f,
1f, 1f)
GLES30.glVertexAttribPointer(uPosHandle, 2, GLES30.GL_FLOAT, false, 0, mPosBuffer)
顶点着色器代码也做相应修改
uniform mat4 textureTransform;
attribute vec2 inputTextureCoordinate;
attribute vec2 position; //NDK坐标点
varying vec2 textureCoordinate; //纹理坐标点变换后输出
void main() {
// gl_Position = position;
vec4 pos = vec4(position,1.0, 1.0);
gl_Position = pos.xyww;
textureCoordinate = inputTextureCoordinate;
}
可以直接传2维坐标,因为坐标z实际不用。
三、绘制立方体
AbsObjectRender代码如下:
public abstract class AbsObjectRender {
//投影矩阵
protected float[] projectMatrix = new float[16];
//相机矩阵
protected float[] cameraMatrix = new float[16];
//顶点数组buffer
private FloatBuffer vertexBuffer;
//颜色数组buffer
private FloatBuffer colorBuffer;
//渲染程序
public int mProgram = 0;
//屏幕宽和高
protected int mWidth =0;
protected int mHeight =0;
/**
*【说明】: 在onSurfaceCreated中调用,program要在onSurfaceCreated中调用才能成功
*@author daijun
*@date 2020/6/30 13:57
*@param
*@return
*/
abstract public void initProgram();
/**
*【说明】:在onSurfaceChanged中调用,保存投影矩阵和相机矩阵
*@author daijun
*@date 2020/6/30 13:57
*@param
*@return
*/
public void setProjAndCamMatrix(float[] projectMatrix,float[] cameraMatrix){
this.projectMatrix = projectMatrix;
this.cameraMatrix = cameraMatrix;
}
public boolean isAlreadyInited(){
return !(mProgram == 0);
}
/**
*【说明】:在onDrawFrame中调用
*@author daijun
*@date 2020/6/30 14:23
*@param
*@return
*/
abstract public void onDrawFrame();
public void setScreenWidthHeight(int width,int heigt){
this.mWidth = width;
this.mHeight = heigt;
}
}
CubicRender代码如下:
主要进行立方体的绘制,常规操作不细讲。
class CubicRender: AbsObjectRender() {
private val TAG = "CubicRender"
private var cubeVertices: FloatArray? = floatArrayOf( // positions // texture Coords
-0.5f, -0.5f, -0.5f, 0.0f, 0.0f,
0.5f, -0.5f, -0.5f, 1.0f, 0.0f,
0.5f, 0.5f, -0.5f, 1.0f, 1.0f,
0.5f, 0.5f, -0.5f, 1.0f, 1.0f,
-0.5f, 0.5f, -0.5f, 0.0f, 1.0f,
-0.5f, -0.5f, -0.5f, 0.0f, 0.0f,
-0.5f, -0.5f, 0.5f, 0.0f, 0.0f,
0.5f, -0.5f, 0.5f, 1.0f, 0.0f,
0.5f, 0.5f, 0.5f, 1.0f, 1.0f,
0.5f, 0.5f, 0.5f, 1.0f, 1.0f,
-0.5f, 0.5f, 0.5f, 0.0f, 1.0f,
-0.5f, -0.5f, 0.5f, 0.0f, 0.0f,
-0.5f, 0.5f, 0.5f, 1.0f, 0.0f,
-0.5f, 0.5f, -0.5f, 1.0f, 1.0f,
-0.5f, -0.5f, -0.5f, 0.0f, 1.0f,
-0.5f, -0.5f, -0.5f, 0.0f, 1.0f,
-0.5f, -0.5f, 0.5f, 0.0f, 0.0f,
-0.5f, 0.5f, 0.5f, 1.0f, 0.0f,
0.5f, 0.5f, 0.5f, 1.0f, 0.0f,
0.5f, 0.5f, -0.5f, 1.0f, 1.0f,
0.5f, -0.5f, -0.5f, 0.0f, 1.0f,
0.5f, -0.5f, -0.5f, 0.0f, 1.0f,
0.5f, -0.5f, 0.5f, 0.0f, 0.0f,
0.5f, 0.5f, 0.5f, 1.0f, 0.0f,
-0.5f, -0.5f, -0.5f, 0.0f, 1.0f,
0.5f, -0.5f, -0.5f, 1.0f, 1.0f,
0.5f, -0.5f, 0.5f, 1.0f, 0.0f,
0.5f, -0.5f, 0.5f, 1.0f, 0.0f,
-0.5f, -0.5f, 0.5f, 0.0f, 0.0f,
-0.5f, -0.5f, -0.5f, 0.0f, 1.0f,
-0.5f, 0.5f, -0.5f, 0.0f, 1.0f,
0.5f, 0.5f, -0.5f, 1.0f, 1.0f,
0.5f, 0.5f, 0.5f, 1.0f, 0.0f,
0.5f, 0.5f, 0.5f, 1.0f, 0.0f,
-0.5f, 0.5f, 0.5f, 0.0f, 0.0f,
-0.5f, 0.5f, -0.5f, 0.0f, 1.0f
)
protected var vertexShaderCode: String? = null
protected var fragmentShaderCode: String? = null
private var textureRenderer: TextureRenderer? = null
private val modelMatrix: FloatArray? = FloatArray(16)
private val mMVPMatrix: FloatArray? = FloatArray(16)
private var viewMatrix: FloatArray? = FloatArray(16)
private var projectionMatrix: FloatArray? = FloatArray(16)
private var cubeTexture:Int = 0
//累计旋转过的角度
private var angle = 0f
/**
* 【说明】: 在onSurfaceCreated中调用,program要在onSurfaceCreated中调用才能成功
* @author daijun
* @date 2020/6/30 13:57
* @param
* @return
*/
override fun initProgram() {
vertexShaderCode = ResReadUtils.readResource(R.raw.texture_vertext)
fragmentShaderCode = ResReadUtils.readResource(R.raw.texture_fragment)
cubeTexture = TextureUtils.loadTexture(AppCore.getInstance().context,R.drawable.hzw5)
textureRenderer = TextureRenderer()
mProgram = textureRenderer!!.shaderProgram
if (mProgram == 0){
Log.e(TAG, "initProgram: cubic 初始化失败")
}
}
// 设置透视和view矩阵
private fun initProjectViewMatrix(aWidth : Int,aHeight:Int){
var width = aWidth
var height = aHeight
var ratio:Float = ((width+0.0f)/height)
//初始化矩阵
Matrix.frustumM(projectionMatrix, 0, -ratio, ratio, -1f, 1f, 1f, 7f)
Matrix.setLookAtM(viewMatrix, 0, 0f, 0f, 2f, 0f, 0f, 0.0f, 0f, 1f, 0f)
}
override fun onDrawFrame() {
drawTexture()
}
private fun drawTexture() {
initProjectViewMatrix(mWidth,mHeight)
textureRenderer!!.start()
val vertexBuffer: FloatBuffer = GLDataUtil.createFloatBuffer(cubeVertices)
GLES30.glVertexAttribPointer(textureRenderer!!.positionHandle, 3, GLES30.GL_FLOAT,
false, 5 * 4, vertexBuffer)
vertexBuffer.position(3)
GLES30.glVertexAttribPointer(textureRenderer!!.textCoordsHandle, 2, GLES30.GL_FLOAT,
false, 5 * 4, vertexBuffer)
Matrix.setIdentityM(modelMatrix, 0)
// Matrix.translateM(modelMatrix, 0, 0.5f, 0.5f, -2f)
Matrix.scaleM(modelMatrix, 0, 0.5f, 0.5f, 0.5f)
Matrix.rotateM(modelMatrix, 0, angle, 1.0f, 1.0f, 0f)
Matrix.multiplyMM(mMVPMatrix, 0, viewMatrix, 0, modelMatrix, 0)
Matrix.multiplyMM(mMVPMatrix, 0, projectionMatrix, 0, mMVPMatrix, 0)
GLES30.glUniformMatrix4fv(textureRenderer!!.mMVPMatrixHandle, 1, false, mMVPMatrix, 0)
GLES30.glActiveTexture(GLES30.GL_TEXTURE0)
GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, cubeTexture)
GLES30.glUniform1i(textureRenderer!!.texturePosHandle, 0)
GLES30.glDrawArrays(GLES30.GL_TRIANGLES, 0, 36)
textureRenderer!!.end()
angle +=2
if(angle >= 360){
angle = 0F
}
}
inner class TextureRenderer{
var shaderProgram:Int =0
var positionHandle:Int = 0
var textCoordsHandle:Int = 0
var mMVPMatrixHandle:Int =0
var texturePosHandle:Int =0
init {
var vertexShaderId:Int = ShaderUtils.compileVertexShader(vertexShaderCode)
var fragmentShaderId:Int = ShaderUtils.compileFragmentShader(fragmentShaderCode)
shaderProgram = ShaderUtils.linkProgram(vertexShaderId, fragmentShaderId)
positionHandle = GLES30.glGetAttribLocation(shaderProgram, "aPosition")
textCoordsHandle = GLES30.glGetAttribLocation(shaderProgram, "aTexCoords")
mMVPMatrixHandle = GLES30.glGetUniformLocation(shaderProgram, "uMVPMatrix")
texturePosHandle = GLES30.glGetUniformLocation(shaderProgram, "texture")
}
fun start() {
GLES30.glUseProgram(shaderProgram)
GLES30.glEnableVertexAttribArray(positionHandle)
GLES30.glEnableVertexAttribArray(texturePosHandle)
}
fun end() {
GLES30.glDisableVertexAttribArray(positionHandle)
GLES30.glDisableVertexAttribArray(texturePosHandle)
GLES30.glUseProgram(0)
}
}
}
顶点着色器texture_vertext.glsl
uniform mat4 uMVPMatrix;
attribute vec3 aPosition;
attribute vec2 aTexCoords;
varying vec2 TexCoord;
void main() {
gl_Position = uMVPMatrix * vec4(aPosition, 1.0);
TexCoord = aTexCoords;
}
片元着色器texture_fragment.glsl
precision mediump float;
uniform sampler2D texture;
varying vec2 TexCoord;
void main() {
gl_FragColor = texture2D(texture, TexCoord);
}
四 效果
image.png参考:
https://learnopengl-cn.github.io/04%20Advanced%20OpenGL/01%20Depth%20testing/
https://learnopengl-cn.github.io/01%20Getting%20started/08%20Coordinate%20Systems/