android camera acquisition and preview - the second method

android camera acquisition and preview - the second method

android camera acquisition and preview - several methods
http://blog.csdn.net/f562230829/article/details/78905530

The first method
http://blog.csdn.net/f562230829/article/details/78905652

demo address

git clone git@gitee.com:audiocn/androidCamera.git
https://gitee.com/audiocn/androidCamera.git


GLSurfaceView + setPreviewTexture + opengl-es

This way is more commonly used now
Create SurfaceTexture in the onSurfaceCreated function of GLSurfaceView, and then the camera renders the image to SurfaceTexture through setPreviewTexture. If there is an image update, the onFrameAvailable callback will be triggered to fill the image to the texture id associated with SurfaceTexture,
Use OpenGL es to render the texture to the screen to complete the preview.

Main code

Open permission

To turn on the camera, you need to add the camera permission in Android manifest.xml 
<uses-permission android:name="android.permission.CAMERA"/>
Note: for higher version mobile phones, dynamic application is also required

GLSurfaceView

    <android.opengl.GLSurfaceView
        android:id="@+id/glSurfaceView"
        android:layout_width="fill_parent"
        android:layout_height="fill_parent" />

Processing GLSurfaceView and camera in UI thread

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_demo2);
        setTitle("Demo2");

        initView();
    }


    private void initView(){
        try {
            mGlSurfaceView = (GLSurfaceView) findViewById(R.id.glSurfaceView);

            CameraController.getInstance().openCamera(mGlSurfaceView);
        }catch (Exception e){
            e.printStackTrace();
        }

    }

Set GLSurfaceView

    public boolean openCamera(GLSurfaceView glSurfaceView){
        boolean b = true;
        try{
            mGlsurfaceView = glSurfaceView;

            //Using opengl-es 2.0
            mGlsurfaceView.setEGLContextClientVersion(2);

            //Set the render interface of opengl rendering, and OpenGL thread calls the render interface
            //opengl thread will call onSurfaceCreated onSurfaceChanged onDrawFrame
            mGlsurfaceView.setRenderer(this);
            //Set render moderendermode? When? Dirty mode to trigger onDrawFrame only when requestRender() is actively called
            //Rendermode? Continuously mode is the opengl thread timing trigger onDrawFrame
            mGlsurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
            //Life cycle of glsurfaceview
            mGlsurfaceView.getHolder().addCallback(new SurfaceHolder.Callback() {
                @Override
                public void surfaceCreated(SurfaceHolder holder) {

                }

                @Override
                public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
                }

                @Override
                public void surfaceDestroyed(SurfaceHolder holder) {
                    closeCamera();
                }
            });
        }catch (Exception e){
            e.printStackTrace();
            b = false;
        }
        return b;
    }

@Override
    public void onSurfaceCreated(GL10 gl, EGLConfig config) {
        Log.i(TAG,"onSurfaceCreated");

        initSurfaceTexture();
    }

     @Override
    public void onFrameAvailable(SurfaceTexture surfaceTexture) {
        Log.i(TAG,"onFrameAvailable");
        synchronized(this) {
            updateSurface = true;
        }
        //Call this function, and opengl thread will trigger onDrawFrame to render the image saved by texture id to the screen
        mGlsurfaceView.requestRender();
    }

    @Override
    public void onSurfaceCreated(GL10 gl, EGLConfig config) {
        Log.i(TAG,"onSurfaceCreated");

        initSurfaceTexture();
    }

    @Override
    public void onSurfaceChanged(GL10 gl, int width, int height) {
        Log.i(TAG,"onSurfaceChanged");
        try {
            //Turn on the camera
            int cameraId = openCommonCamera();
            //Preview camera data to SurfaceTexture
            mCamera.setPreviewTexture(mSurfaceTexture);
            setPameras();
            mCamera.startPreview();

            //Give the texture id bound to SurfaceTexture to opengl rendering class
            mRenderScreen = new RenderScreen(mSurfaceTextureId);
            mRenderScreen.setSreenSize(width,height);

        }catch (Exception e){
            e.printStackTrace();
        }
    }

    @Override
    public void onDrawFrame(GL10 gl) {
        try {
            Log.i(TAG,"onDrawFrame");
            synchronized(this) {
                if (updateSurface) {
                    //Gave the data to mSurfaceTextureId
                    mSurfaceTexture.updateTexImage();
                    mSurfaceTexture.getTransformMatrix(mTexMtx);
                    updateSurface = false;
                }
            }

            //Render to screen
            mRenderScreen.draw(mTexMtx);

        }catch (Exception e){
            e.printStackTrace();
        }
    }

    private void initSurfaceTexture(){

        int[] textures = new int[1];
        GLES20.glGenTextures(1, textures, 0);
        //Camera texture
        mSurfaceTextureId = textures[0];
        mSurfaceTexture = new SurfaceTexture(mSurfaceTextureId);
        //When camera data is updated, the onFrameAvailable function is triggered
        mSurfaceTexture.setOnFrameAvailableListener(this);
        GLES20.glDisable(GLES20.GL_DEPTH_TEST);
        GLES20.glDisable(GLES20.GL_CULL_FACE);
        GLES20.glDisable(GLES20.GL_BLEND);
        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mSurfaceTextureId);
        GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
                GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
                GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
                GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
                GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
    }

OpenGL es rendering

public class RenderScreen {
    private final FloatBuffer mNormalVtxBuf = GlUtil.createVertexBuffer();  //Vertex coordinates
    private final FloatBuffer mNormalVtxBufImage = GlUtil.createVertexBufferImage();  //Vertex coordinates
    private FloatBuffer mCameraTexCoordBuffer; //Texture coordinates, generated based on window size and image size
    private final float[] mPosMtx = GlUtil.createIdentityMtx();

    private int mFboTexId;

    private int mProgram = -1;
    private int maPositionHandle = -1;
    private int maTexCoordHandle = -1;
    private int muPosMtxHandle = -1;
    private int muTexMtxHandle = -1;
    private int muSamplerHandle = -1;

    private int mScreenW = -1;
    private int mScreenH = -1;

    private boolean mirrorImage;//Whether to turn on image

    public RenderScreen(int id) {
        initGL();

        mFboTexId = id;
        mirrorImage = false;
    }

    public void setSreenSize(int width, int height) {
        mScreenW = width;
        mScreenH = height;

        initCameraTexCoordBuffer();
    }

    public void setTextureId(int textureId) {
        //Camera texture copy
        mFboTexId = textureId;
    }

    private void initCameraTexCoordBuffer() {
        int cameraWidth, cameraHeight;
        Camera.Size size = CameraController.getInstance().getmPreviewSize();
        int width = size.width;
        int height = size.height;
        //Adjustment of width and height of TODO horizontal and vertical screens
        if(CameraController.getInstance().isLandscape()) {
            cameraWidth = Math.max(width, height);
            cameraHeight = Math.min(width, height);
        } else {
            cameraWidth = Math.min(width, height);
            cameraHeight = Math.max(width, height);
        }

        float hRatio = mScreenW / ((float)cameraWidth);
        float vRatio = mScreenH / ((float)cameraHeight);

        float ratio;
        if(hRatio > vRatio) {
            ratio = mScreenH / (cameraHeight * hRatio);
            final float vtx[] = {
                    //UV
                    0f, 0.5f + ratio/2,
                    0f, 0.5f - ratio/2,
                    1f, 0.5f + ratio/2,
                    1f, 0.5f - ratio/2,
            };
            ByteBuffer bb = ByteBuffer.allocateDirect(4 * vtx.length);
            bb.order(ByteOrder.nativeOrder());
            mCameraTexCoordBuffer = bb.asFloatBuffer();
            mCameraTexCoordBuffer.put(vtx);
            mCameraTexCoordBuffer.position(0);
        } else {
            ratio = mScreenW/ (cameraWidth * vRatio);
            //Horizontal display
//            final float vtx[] = {
//                    //UV
//                    0.5f - ratio/2, 1f,
//                    0.5f - ratio/2, 0f,
//                    0.5f + ratio/2, 1f,
//                    0.5f + ratio/2, 0f,
//            };
            //Vertical display amplification
//            final float vtx[] = {
//                    //UV
//                    0.5f - ratio/2, 1f,
//                    0.5f + ratio/2, 1f,
//                    0.5f - ratio/2, 0f,
//                    0.5f + ratio/2, 0f,
//            };
            //Vertical screen does not enlarge
            final float vtx[] = {
                    //UV
                    0f, 0.5f + ratio/2,
                    1f, 0.5f + ratio/2,
                    0f, 0.5f - ratio/2,
                    1f, 0.5f - ratio/2,
            };
            ByteBuffer bb = ByteBuffer.allocateDirect(4 * vtx.length);
            bb.order(ByteOrder.nativeOrder());
            mCameraTexCoordBuffer = bb.asFloatBuffer();
            mCameraTexCoordBuffer.put(vtx);
            mCameraTexCoordBuffer.position(0);
        }
    }

    public void draw(final float[] tex_mtx) {
        if (mScreenW <= 0 || mScreenH <= 0) {
            return;
        }

        //Set viewport size
        GLES20.glViewport(0, 0, mScreenW, mScreenH);
        GLES20.glClearColor(0f, 0f, 0f, 1f);
        GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);

        GLES20.glUseProgram(mProgram);

        //Set point coordinates
        if(mirrorImage){
            mNormalVtxBuf.position(0);
            GLES20.glVertexAttribPointer(maPositionHandle,
                    3, GLES20.GL_FLOAT, false, 4 * 3, mNormalVtxBuf);
        }else{
            mNormalVtxBufImage.position(0);
            GLES20.glVertexAttribPointer(maPositionHandle,
                    3, GLES20.GL_FLOAT, false, 4 * 3, mNormalVtxBufImage);
        }
        GLES20.glEnableVertexAttribArray(maPositionHandle);

        //Set texture coordinates
        mCameraTexCoordBuffer.position(0);
        GLES20.glVertexAttribPointer(maTexCoordHandle,
                2, GLES20.GL_FLOAT, false, 4 * 2, mCameraTexCoordBuffer);
        GLES20.glEnableVertexAttribArray(maTexCoordHandle);

        //Set transformation matrix
        if(muPosMtxHandle>= 0)
            GLES20.glUniformMatrix4fv(muPosMtxHandle, 1, false, mPosMtx, 0);

        if(muTexMtxHandle>= 0)
            GLES20.glUniformMatrix4fv(muTexMtxHandle, 1, false, tex_mtx, 0);

        //Bind texture, render texture
        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mFboTexId);
        GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);

    }

    private void initGL() {
        GlUtil.checkGlError("initGL_S");

        final String vertexShader =
                //
                "attribute vec4 position;\n" +
                        "attribute vec4 inputTextureCoordinate;\n" +
                        "uniform   mat4 uPosMtx;\n" +
                        "varying   vec2 textureCoordinate;\n" +
                        "void main() {\n" +
                        "  gl_Position = uPosMtx * position;\n" +
                        "  textureCoordinate   = inputTextureCoordinate.xy;\n" +
                        "}\n";
        final String fragmentShader =
                //
                "precision mediump float;\n" +
                        "uniform sampler2D uSampler;\n" +
                        "varying vec2  textureCoordinate;\n" +
                        "void main() {\n" +
                        "  gl_FragColor = texture2D(uSampler, textureCoordinate);\n" +
                        "}\n";


         String SHARDE_NULL_VERTEX = "attribute vec4 position;\n" +
                "attribute vec4 inputTextureCoordinate;\n" +
                "\n" +
                "uniform   mat4 uPosMtx;\n" +
                "uniform   mat4 uTexMtx;\n" +
                "varying   vec2 textureCoordinate;\n" +
                "void main() {\n" +
                "  gl_Position = uPosMtx * position;\n" +
                "  textureCoordinate   = (uTexMtx * inputTextureCoordinate).xy;\n" +
                "}";

         String SHARDE_NULL_FRAGMENT = "#extension GL_OES_EGL_image_external : require\n" +
                "precision mediump float;\n" +
                "varying vec2 textureCoordinate;\n" +
                "uniform samplerExternalOES uSampler;\n" +
                "void main() {\n" +
                "    vec4 tc = texture2D(uSampler, textureCoordinate);\n" +
                "    gl_FragColor = vec4(tc.r, tc.g, tc.b, 1.0);\n" +
                "}";

//        mProgram = GlUtil.createProgram(vertexShader, fragmentShader);
        mProgram = GlUtil.createProgram(SHARDE_NULL_VERTEX, SHARDE_NULL_FRAGMENT);
        maPositionHandle = GLES20.glGetAttribLocation(mProgram, "position");
        maTexCoordHandle = GLES20.glGetAttribLocation(mProgram, "inputTextureCoordinate");
        muPosMtxHandle = GLES20.glGetUniformLocation(mProgram, "uPosMtx");
        muTexMtxHandle = GLES20.glGetUniformLocation(mProgram, "uTexMtx");
        muSamplerHandle = GLES20.glGetUniformLocation(mProgram, "uSampler");

        GlUtil.checkGlError("initGL_E");
    }
}

Tags: Android git Attribute xml

Posted on Tue, 05 May 2020 15:25:00 -0700 by yanjchan