Using SurfaceTexture in Android

后端 未结 4 605
遥遥无期
遥遥无期 2020-12-24 03:51

I want to play a video into an OpenGL texture on XOOM using Android 3.0. I have come across SurfaceTexture in the goole developer docs which has been added in API 11 http://

相关标签:
4条回答
  • 2020-12-24 04:05

    NVIDIA has a full and working sample in their TEGRA Android Developer pack. The sample is written pure Java and runs in Standard Eclipse+Android SDK - so you just need to install the samples. The name of the project is surfacetexture (or something like it). It works nice! Hopes it helps you.

    0 讨论(0)
  • 2020-12-24 04:07

    If using API level 11 to 14 you can just define GL_TEXTURE_EXTERNAL_OES yourself by placing

    private static final int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
    

    in your code. This seems to work just fine for me.

    0 讨论(0)
  • 2020-12-24 04:11

    Example code. This creates a new external texture suitable for use in a SurfaceTexture, then wraps it in said SurfaceTexture and passes it to the camera as a surface to write the preview into.

    int[] textures = new int[1];
    // generate one texture pointer and bind it as an external texture.
    GLES20.glGenTextures(1, textures, 0);
    GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0]);
    // No mip-mapping with camera source.
    GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
            GL10.GL_TEXTURE_MIN_FILTER,
                            GL10.GL_LINEAR);        
    GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
            GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
    // Clamp to edge is only option.
    GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
            GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
            GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
    
    
    int texture_id = textures[0];
    SurfaceTexture mTexture = new SurfaceTexture(texture_id);
    mTexture.setOnFrameAvailableListener(this);
    
    Camera cam = Camera.open();
    cam.setPreviewTexture(mTexture);
    

    Note that if you render this object, you'll need to be careful: It's NOT a 2D texture, so it needs special treatment in the shader.

    0 讨论(0)
  • 2020-12-24 04:23

    A demo locate https://github.com/crossle/MediaPlayerSurface

    import java.io.IOException;
    import java.nio.ByteBuffer;
    import java.nio.ByteOrder;
    import java.nio.FloatBuffer;
    
    import javax.microedition.khronos.egl.EGLConfig;
    import javax.microedition.khronos.opengles.GL10;
    
    import android.content.Context;
    import android.graphics.SurfaceTexture;
    import android.media.MediaPlayer;
    import android.opengl.GLES20;
    import android.opengl.GLSurfaceView;
    import android.opengl.Matrix;
    import android.util.Log;
    import android.view.Surface;
    
    class VideoSurfaceView extends GLSurfaceView {
    
        VideoRender mRenderer;
        private MediaPlayer mMediaPlayer = null;
    
        public VideoSurfaceView(Context context, MediaPlayer mp) {
            super(context);
    
            setEGLContextClientVersion(2);
            mMediaPlayer = mp;
            mRenderer = new VideoRender(context);
            setRenderer(mRenderer);
        }
    
        @Override
        public void onResume() {
            queueEvent(new Runnable(){
                    public void run() {
                        mRenderer.setMediaPlayer(mMediaPlayer);
                    }});
    
            super.onResume();
        }
    
        private static class VideoRender
            implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
            private static String TAG = "VideoRender";
    
            private static final int FLOAT_SIZE_BYTES = 4;
            private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
            private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
            private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
            private final float[] mTriangleVerticesData = {
                // X, Y, Z, U, V
                -1.0f, -1.0f, 0, 0.f, 0.f,
                1.0f, -1.0f, 0, 1.f, 0.f,
                -1.0f,  1.0f, 0, 0.f, 1.f,
                1.0f,  1.0f, 0, 1.f, 1.f,
            };
    
            private FloatBuffer mTriangleVertices;
    
            private final String mVertexShader =
                    "uniform mat4 uMVPMatrix;\n" +
                    "uniform mat4 uSTMatrix;\n" +
                    "attribute vec4 aPosition;\n" +
                    "attribute vec4 aTextureCoord;\n" +
                    "varying vec2 vTextureCoord;\n" +
                    "void main() {\n" +
                    "  gl_Position = uMVPMatrix * aPosition;\n" +
                    "  vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
                    "}\n";
    
            private final String mFragmentShader =
                    "#extension GL_OES_EGL_image_external : require\n" +
                    "precision mediump float;\n" +
                    "varying vec2 vTextureCoord;\n" +
                    "uniform samplerExternalOES sTexture;\n" +
                    "void main() {\n" +
                    "  gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
                    "}\n";
    
            private float[] mMVPMatrix = new float[16];
            private float[] mSTMatrix = new float[16];
    
            private int mProgram;
            private int mTextureID;
            private int muMVPMatrixHandle;
            private int muSTMatrixHandle;
            private int maPositionHandle;
            private int maTextureHandle;
    
            private SurfaceTexture mSurface;
            private boolean updateSurface = false;
    
            private static int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
    
            private MediaPlayer mMediaPlayer;
    
            public VideoRender(Context context) {
                mTriangleVertices = ByteBuffer.allocateDirect(
                    mTriangleVerticesData.length * FLOAT_SIZE_BYTES)
                        .order(ByteOrder.nativeOrder()).asFloatBuffer();
                mTriangleVertices.put(mTriangleVerticesData).position(0);
    
                Matrix.setIdentityM(mSTMatrix, 0);
            }
    
            public void setMediaPlayer(MediaPlayer player) {
                mMediaPlayer = player;
            }
    
            public void onDrawFrame(GL10 glUnused) {
                synchronized(this) {
                    if (updateSurface) {
                        mSurface.updateTexImage();
                        mSurface.getTransformMatrix(mSTMatrix);
                        updateSurface = false;
                    }
                }
    
                GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
                GLES20.glClear( GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
    
                GLES20.glUseProgram(mProgram);
                checkGlError("glUseProgram");
    
                GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
                GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
    
                mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
                GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
                    TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
                checkGlError("glVertexAttribPointer maPosition");
                GLES20.glEnableVertexAttribArray(maPositionHandle);
                checkGlError("glEnableVertexAttribArray maPositionHandle");
    
                mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
                GLES20.glVertexAttribPointer(maTextureHandle, 3, GLES20.GL_FLOAT, false,
                    TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
                checkGlError("glVertexAttribPointer maTextureHandle");
                GLES20.glEnableVertexAttribArray(maTextureHandle);
                checkGlError("glEnableVertexAttribArray maTextureHandle");
    
                Matrix.setIdentityM(mMVPMatrix, 0);
                GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
                GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
    
                GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
                checkGlError("glDrawArrays");
                GLES20.glFinish();
    
            }
    
            public void onSurfaceChanged(GL10 glUnused, int width, int height) {
    
            }
    
            public void onSurfaceCreated(GL10 glUnused, EGLConfig config) {
                mProgram = createProgram(mVertexShader, mFragmentShader);
                if (mProgram == 0) {
                    return;
                }
                maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
                checkGlError("glGetAttribLocation aPosition");
                if (maPositionHandle == -1) {
                    throw new RuntimeException("Could not get attrib location for aPosition");
                }
                maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
                checkGlError("glGetAttribLocation aTextureCoord");
                if (maTextureHandle == -1) {
                    throw new RuntimeException("Could not get attrib location for aTextureCoord");
                }
    
                muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
                checkGlError("glGetUniformLocation uMVPMatrix");
                if (muMVPMatrixHandle == -1) {
                    throw new RuntimeException("Could not get attrib location for uMVPMatrix");
                }
    
                muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
                checkGlError("glGetUniformLocation uSTMatrix");
                if (muSTMatrixHandle == -1) {
                    throw new RuntimeException("Could not get attrib location for uSTMatrix");
                }
    
    
                int[] textures = new int[1];
                GLES20.glGenTextures(1, textures, 0);
    
                mTextureID = textures[0];
                GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
                checkGlError("glBindTexture mTextureID");
    
                GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
                                       GLES20.GL_NEAREST);
                GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
                                       GLES20.GL_LINEAR);
    
                /*
                 * Create the SurfaceTexture that will feed this textureID,
                 * and pass it to the MediaPlayer
                 */
                mSurface = new SurfaceTexture(mTextureID);
                mSurface.setOnFrameAvailableListener(this);
    
                Surface surface = new Surface(mSurface);
                mMediaPlayer.setSurface(surface);
                surface.release();
    
                try {
                    mMediaPlayer.prepare();
                } catch (IOException t) {
                    Log.e(TAG, "media player prepare failed");
                }
    
                synchronized(this) {
                    updateSurface = false;
                }
    
                mMediaPlayer.start();
            }
    
            synchronized public void onFrameAvailable(SurfaceTexture surface) {
                updateSurface = true;
            }
    
            private int loadShader(int shaderType, String source) {
                int shader = GLES20.glCreateShader(shaderType);
                if (shader != 0) {
                    GLES20.glShaderSource(shader, source);
                    GLES20.glCompileShader(shader);
                    int[] compiled = new int[1];
                    GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
                    if (compiled[0] == 0) {
                        Log.e(TAG, "Could not compile shader " + shaderType + ":");
                        Log.e(TAG, GLES20.glGetShaderInfoLog(shader));
                        GLES20.glDeleteShader(shader);
                        shader = 0;
                    }
                }
                return shader;
            }
    
            private int createProgram(String vertexSource, String fragmentSource) {
                int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
                if (vertexShader == 0) {
                    return 0;
                }
                int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
                if (pixelShader == 0) {
                    return 0;
                }
    
                int program = GLES20.glCreateProgram();
                if (program != 0) {
                    GLES20.glAttachShader(program, vertexShader);
                    checkGlError("glAttachShader");
                    GLES20.glAttachShader(program, pixelShader);
                    checkGlError("glAttachShader");
                    GLES20.glLinkProgram(program);
                    int[] linkStatus = new int[1];
                    GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
                    if (linkStatus[0] != GLES20.GL_TRUE) {
                        Log.e(TAG, "Could not link program: ");
                        Log.e(TAG, GLES20.glGetProgramInfoLog(program));
                        GLES20.glDeleteProgram(program);
                        program = 0;
                    }
                }
                return program;
            }
    
            private void checkGlError(String op) {
                int error;
                while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
                    Log.e(TAG, op + ": glError " + error);
                    throw new RuntimeException(op + ": glError " + error);
                }
            }
    
        }  // End of class VideoRender.
    
    }  // End of class VideoSurfaceView.
    
    0 讨论(0)
提交回复
热议问题