问题
I have a Custom GLSurfaceView created by D.J.I. The video rendering is done by their own compiled class and it allows only one GLSurfaceView to receive their encoded video.
I would like to multiply the video output on the screen to view with a V.R viewer, but not sure if it is possible. I know that by coping their custom GLSurfaceView I can do some manipulation, such as controlling the size of the video output. (if I change this line: "SplitDjiSurfaceView.w = width;" to "width/2") I'll get the left side working well.
Q: How can I multiply the video and play them side-by-side on one GLSurfaceView that I can then give to the VideoCallBack?
Custom GLSurfaceView:
import android.content.Context;
import android.opengl.GLSurfaceView;
import android.opengl.GLSurfaceView.EGLConfigChooser;
import android.opengl.GLSurfaceView.EGLContextFactory;
import android.opengl.GLSurfaceView.Renderer;
import android.util.AttributeSet;
import android.util.Log;
import dji.midware.natives.FPVController;
import dji.sdk.api.DJIDrone;
import dji.sdk.api.Camera.DJICameraSettingsTypeDef.CameraPreviewResolustionType;
import dji.sdk.api.DJIDroneTypeDef.DJIDroneType;
import dji.sdk.natives.CamShow;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
import javax.microedition.khronos.opengles.GL10;
public class DjiGLSurfaceView extends GLSurfaceView {
private static String TAG = "MyGLSurfaceView.java";
private static final boolean DEBUG = false;
private static int w = 0;
private static int h = 0;
private boolean isPause = false;
public DjiGLSurfaceView(Context context, AttributeSet attrs) {
super(context, attrs);
this.init(false, 0, 0);
}
public DjiGLSurfaceView(Context context) {
super(context);
this.init(false, 0, 0);
}
public DjiGLSurfaceView(Context context, boolean translucent, int depth, int stencil) {
super(context);
this.init(translucent, depth, stencil);
}
private void init(boolean translucent, int depth, int stencil) {
if(translucent) {
this.getHolder().setFormat(-3);
}
this.setEGLContextFactory(new DjiGLSurfaceView.ContextFactory((DjiGLSurfaceView.ContextFactory)null));
this.setEGLConfigChooser(translucent?new DjiGLSurfaceView.ConfigChooser(8, 8, 8, 8, depth, stencil):new DjiGLSurfaceView.ConfigChooser(5, 6, 5, 0, depth, stencil));
this.setRenderer(new DjiGLSurfaceView.MyRenderer((DjiGLSurfaceView.MyRenderer)null));
this.setRenderMode(0);
}
private static void checkEglError(String prompt, EGL10 egl) {
int error;
while((error = egl.eglGetError()) != 12288) {
Log.e(TAG, String.format("%s: EGL error: 0x%x", new Object[]{prompt, Integer.valueOf(error)}));
}
}
public int fcb() {
this.requestRender();
return 0;
}
private int getType(int type) {
byte result = 1;
if(type == 0) {
result = 1;
} else if(type == 1) {
result = 2;
} else if(type == 2) {
result = 4;
} else if(type == 3) {
result = 8;
}
return result;
}
public boolean start() {
(new Thread() {
public void run() {
if(DJIDrone.getDroneType() == DJIDroneType.DJIDrone_Vision) {
CamShow.native_setOnStreamCB(DjiGLSurfaceView.this, "fcb");
} else {
FPVController.native_setOnStreamCB(DjiGLSurfaceView.this, "fcb");
}
}
}).start();
return true;
}
public boolean setStreamType(final CameraPreviewResolustionType type) {
boolean result = false;
if(DJIDrone.getDroneType() == DJIDroneType.DJIDrone_Vision) {
(new Thread() {
public void run() {
CamShow.native_pauseStream(true);
CamShow.native_setType(DjiGLSurfaceView.this.getType(type.value()));
CamShow.native_pauseStream(false);
try {
Thread.sleep(1000L);
} catch (InterruptedException var2) {
var2.printStackTrace();
}
}
}).start();
result = true;
} else {
result = false;
}
return result;
}
public boolean setDataToDecoder(byte[] videoBuffer, int size) {
boolean result = false;
boolean returnVal = true;
int returnVal1;
if(DJIDrone.getDroneType() == DJIDroneType.DJIDrone_Vision) {
returnVal1 = CamShow.native_setDataToDecoder(videoBuffer, size);
} else {
returnVal1 = FPVController.native_setDataToDecoder(videoBuffer, size);
}
if(returnVal1 == 0) {
result = true;
}
return result;
}
public boolean pause() {
this.isPause = true;
if(DJIDrone.getDroneType() == DJIDroneType.DJIDrone_Vision) {
CamShow.native_pauseStream(true);
}
return true;
}
public boolean resume() {
if(DJIDrone.getDroneType() == DJIDroneType.DJIDrone_Vision) {
CamShow.native_pauseStream(false);
}
this.isPause = false;
return true;
}
public boolean destroy() {
this.isPause = false;
return true;
}
public boolean getIsPause() {
return this.isPause;
}
private void setIsPause(boolean isPause) {
this.isPause = isPause;
}
private static class ConfigChooser implements EGLConfigChooser {
private static int EGL_OPENGL_ES2_BIT = 4;
private static int[] s_configAttribs2;
protected int mRedSize;
protected int mGreenSize;
protected int mBlueSize;
protected int mAlphaSize;
protected int mDepthSize;
protected int mStencilSize;
private int[] mValue = new int[1];
static {
s_configAttribs2 = new int[]{12324, EGL_OPENGL_ES2_BIT, 12323, EGL_OPENGL_ES2_BIT, 12322, EGL_OPENGL_ES2_BIT, 12352, EGL_OPENGL_ES2_BIT, 12344};
}
public ConfigChooser(int r, int g, int b, int a, int depth, int stencil) {
this.mRedSize = r;
this.mGreenSize = g;
this.mBlueSize = b;
this.mAlphaSize = a;
this.mDepthSize = depth;
this.mStencilSize = stencil;
}
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) {
int[] num_config = new int[1];
egl.eglChooseConfig(display, s_configAttribs2, (EGLConfig[])null, 0, num_config);
int numConfigs = num_config[0];
if(numConfigs <= 0) {
throw new IllegalArgumentException("No configs match configSpec");
} else {
EGLConfig[] configs = new EGLConfig[numConfigs];
egl.eglChooseConfig(display, s_configAttribs2, configs, numConfigs, num_config);
return this.chooseConfig(egl, display, configs);
}
}
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display, EGLConfig[] configs) {
EGLConfig[] var7 = configs;
int var6 = configs.length;
for(int var5 = 0; var5 < var6; ++var5) {
EGLConfig config = var7[var5];
int d = this.findConfigAttrib(egl, display, config, 12325, 0);
int s = this.findConfigAttrib(egl, display, config, 12326, 0);
if(d >= this.mDepthSize && s >= this.mStencilSize) {
int r = this.findConfigAttrib(egl, display, config, 12324, 0);
int g = this.findConfigAttrib(egl, display, config, 12323, 0);
int b = this.findConfigAttrib(egl, display, config, 12322, 0);
int a = this.findConfigAttrib(egl, display, config, 12321, 0);
if(r == this.mRedSize && g == this.mGreenSize && b == this.mBlueSize && a == this.mAlphaSize) {
return config;
}
}
}
return null;
}
private int findConfigAttrib(EGL10 egl, EGLDisplay display, EGLConfig config, int attribute, int defaultValue) {
return egl.eglGetConfigAttrib(display, config, attribute, this.mValue)?this.mValue[0]:defaultValue;
}
private void printConfigs(EGL10 egl, EGLDisplay display, EGLConfig[] configs) {
int numConfigs = configs.length;
Log.w(DjiGLSurfaceView.TAG, String.format("%d configurations", new Object[]{Integer.valueOf(numConfigs)}));
for(int i = 0; i < numConfigs; ++i) {
Log.w(DjiGLSurfaceView.TAG, String.format("Configuration %d:\n", new Object[]{Integer.valueOf(i)}));
this.printConfig(egl, display, configs[i]);
}
}
private void printConfig(EGL10 egl, EGLDisplay display, EGLConfig config) {
int[] attributes = new int[]{12320, 12321, 12322, 12323, 12324, 12325, 12326, 12327, 12328, 12329, 12330, 12331, 12332, 12333, 12334, 12335, 12336, 12337, 12338, 12339, 12340, 12343, 12342, 12341, 12345, 12346, 12347, 12348, 12349, 12350, 12351, 12352, 12354};
String[] names = new String[]{"EGL_BUFFER_SIZE", "EGL_ALPHA_SIZE", "EGL_BLUE_SIZE", "EGL_GREEN_SIZE", "EGL_RED_SIZE", "EGL_DEPTH_SIZE", "EGL_STENCIL_SIZE", "EGL_CONFIG_CAVEAT", "EGL_CONFIG_ID", "EGL_LEVEL", "EGL_MAX_PBUFFER_HEIGHT", "EGL_MAX_PBUFFER_PIXELS", "EGL_MAX_PBUFFER_WIDTH", "EGL_NATIVE_RENDERABLE", "EGL_NATIVE_VISUAL_ID", "EGL_NATIVE_VISUAL_TYPE", "EGL_PRESERVED_RESOURCES", "EGL_SAMPLES", "EGL_SAMPLE_BUFFERS", "EGL_SURFACE_TYPE", "EGL_TRANSPARENT_TYPE", "EGL_TRANSPARENT_RED_VALUE", "EGL_TRANSPARENT_GREEN_VALUE", "EGL_TRANSPARENT_BLUE_VALUE", "EGL_BIND_TO_TEXTURE_RGB", "EGL_BIND_TO_TEXTURE_RGBA", "EGL_MIN_SWAP_INTERVAL", "EGL_MAX_SWAP_INTERVAL", "EGL_LUMINANCE_SIZE", "EGL_ALPHA_MASK_SIZE", "EGL_COLOR_BUFFER_TYPE", "EGL_RENDERABLE_TYPE", "EGL_CONFORMANT"};
int[] value = new int[1];
for(int i = 0; i < attributes.length; ++i) {
int attribute = attributes[i];
String name = names[i];
if(egl.eglGetConfigAttrib(display, config, attribute, value)) {
Log.w(DjiGLSurfaceView.TAG, String.format(" %s: %d\n", new Object[]{name, Integer.valueOf(value[0])}));
}
}
}
}
private static class ContextFactory implements EGLContextFactory {
private static int EGL_CONTEXT_CLIENT_VERSION = 12440;
private ContextFactory() {
}
public EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig eglConfig) {
Log.w(DjiGLSurfaceView.TAG, "creating OpenGL ES 2.0 context");
DjiGLSurfaceView.checkEglError("Before eglCreateContext", egl);
int[] attrib_list = new int[]{EGL_CONTEXT_CLIENT_VERSION, 2, 12344};
EGLContext context = egl.eglCreateContext(display, eglConfig, EGL10.EGL_NO_CONTEXT, attrib_list);
DjiGLSurfaceView.checkEglError("After eglCreateContext", egl);
return context;
}
public void destroyContext(EGL10 egl, EGLDisplay display, EGLContext context) {
egl.eglDestroyContext(display, context);
}
}
private class MyRenderer implements Renderer {
private MyRenderer() {
}
public void onDrawFrame(GL10 gl) {
if(!DjiGLSurfaceView.this.isPause) {
if(DJIDrone.getDroneType() == DJIDroneType.DJIDrone_Vision) {
CamShow.native_GLDrawFrame();
} else {
FPVController.native_GLDrawFrame();
}
}
}
public void onSurfaceChanged(GL10 gl, int width, int height) {
DjiGLSurfaceView.w = width;
DjiGLSurfaceView.h = height;
if(DJIDrone.getDroneType() == DJIDroneType.DJIDrone_Vision) {
CamShow.native_GLInit(DjiGLSurfaceView.w, DjiGLSurfaceView.h);
} else {
FPVController.native_GLInit(DjiGLSurfaceView.w, DjiGLSurfaceView.h);
}
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
}
}
}
Implementation of the GLSurfaceView with VideoCallBack:
mReceivedVideoDataCallBack = new DJIReceivedVideoDataCallBack(){
@Override
public void onResult(byte[] videoBuffer, int size)
{
djiGLSurfaceView.setDataToDecoder(videoBuffer,size);
}
};
DJIDrone.getDjiCamera().setReceivedVideoDataCallBack(mReceivedVideoDataCallBack);
Putting two surfaceviews side-by-side does not work, one fails while the other works fine. I'm guessing the Renderer can only work with one. It must be one GLSurfaceView to work.
I must be leaving something out, please feel free to ask for anything...
回答1:
I'm not familiar with the library, but my guess is that it works by using the current EGL context and surface, probably decoding the video in software and uploading it as a GLES texture.
If that's the case, you can configure EGL with a different surface, and when you call into native_GLDrawFrame()
it will draw onto that instead. If the current surface is an FBO, it will use that instead of the SurfaceView. You can then use GLES to render the attached texture as often as you want. (I assume you're trying for some sort of stereo effect.)
One example of this sort of thing can be found in Grafika's "record GL app" activity. The doFrame() method, when in RECMETHOD_FBO
mode, will render to an FBO, then blit from it twice (once to the screen, once to a video encoder). Something like this should work for you.
You will need to lift the EGL/GLES code from Grafika if you don't have an equivalent.
(You can't read pixels back from a SurfaceView, so you need to render them twice. I assume that, if the library did that for you, you wouldn't be asking this, so capturing the rendering and blitting it is necessary.)
It's also entirely possible that my guess at the library's workings is wrong, so some experimentation may be necessary.
Edit: I should note that, while you can't get pixels back from a SurfaceView, you can read them before the frame is submitted. So if the library works the way I think it does, it's rendering to GLES without calling eglSwapBuffers()
(which is invoked by GLSurfaceView when onDrawFrame()
returns). So in onDrawFrame()
you could read the pixels back with glReadPixels()
, upload them to a texture in a second GLSurfaceView context with glTexImage2D()
, and draw them on a different surface. This is slower than the FBO approach because the pixels have to be copied out to the CPU and back in, but it might work.
I should also point out that, whatever solution you arrive at, you are much better off having both sides of the video on a single Surface, rather than two separate SurfaceViews. If you use two Surfaces, you can't guarantee that both of them will be updated on the same display refresh, so you might have frames where one side is slightly behind the other. It would be much better to capture the output and then render it twice to a single SurfaceView. (GLSurfaceView is just a SurfaceView with some additional code to handle the EGL setup and thread management.)
来源:https://stackoverflow.com/questions/31340382/manipulate-a-custom-glsurfaceview-in-android