我有由D.J.I創建的自定義GLSurfaceView。視頻渲染是由他們自己編譯的類完成的,它只允許一個GLSurfaceView接收他們編碼的視頻。在Android中操作自定義GLSurfaceView
我想在屏幕上乘以視頻輸出以用V.R查看器查看,但不確定是否有可能。我知道通過應對他們自定義的GLSurfaceView我可以做一些操作,比如控制視頻輸出的大小。 (如果我改變這一行:「SplitDjiSurfaceView.w =寬度;」到「寬度/ 2」) 我會讓左側運行良好。
問:如何將視頻相乘並在一個GLSurfaceView上並排播放,然後將其傳送給VideoCallBack?
定製GLSurfaceView:
import android.content.Context;
import android.opengl.GLSurfaceView;
import android.opengl.GLSurfaceView.EGLConfigChooser;
import android.opengl.GLSurfaceView.EGLContextFactory;
import android.opengl.GLSurfaceView.Renderer;
import android.util.AttributeSet;
import android.util.Log;
import dji.midware.natives.FPVController;
import dji.sdk.api.DJIDrone;
import dji.sdk.api.Camera.DJICameraSettingsTypeDef.CameraPreviewResolustionType;
import dji.sdk.api.DJIDroneTypeDef.DJIDroneType;
import dji.sdk.natives.CamShow;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
import javax.microedition.khronos.opengles.GL10;
public class DjiGLSurfaceView extends GLSurfaceView {
private static String TAG = "MyGLSurfaceView.java";
private static final boolean DEBUG = false;
private static int w = 0;
private static int h = 0;
private boolean isPause = false;
public DjiGLSurfaceView(Context context, AttributeSet attrs) {
super(context, attrs);
this.init(false, 0, 0);
}
public DjiGLSurfaceView(Context context) {
super(context);
this.init(false, 0, 0);
}
public DjiGLSurfaceView(Context context, boolean translucent, int depth, int stencil) {
super(context);
this.init(translucent, depth, stencil);
}
private void init(boolean translucent, int depth, int stencil) {
if(translucent) {
this.getHolder().setFormat(-3);
}
this.setEGLContextFactory(new DjiGLSurfaceView.ContextFactory((DjiGLSurfaceView.ContextFactory)null));
this.setEGLConfigChooser(translucent?new DjiGLSurfaceView.ConfigChooser(8, 8, 8, 8, depth, stencil):new DjiGLSurfaceView.ConfigChooser(5, 6, 5, 0, depth, stencil));
this.setRenderer(new DjiGLSurfaceView.MyRenderer((DjiGLSurfaceView.MyRenderer)null));
this.setRenderMode(0);
}
private static void checkEglError(String prompt, EGL10 egl) {
int error;
while((error = egl.eglGetError()) != 12288) {
Log.e(TAG, String.format("%s: EGL error: 0x%x", new Object[]{prompt, Integer.valueOf(error)}));
}
}
public int fcb() {
this.requestRender();
return 0;
}
private int getType(int type) {
byte result = 1;
if(type == 0) {
result = 1;
} else if(type == 1) {
result = 2;
} else if(type == 2) {
result = 4;
} else if(type == 3) {
result = 8;
}
return result;
}
public boolean start() {
(new Thread() {
public void run() {
if(DJIDrone.getDroneType() == DJIDroneType.DJIDrone_Vision) {
CamShow.native_setOnStreamCB(DjiGLSurfaceView.this, "fcb");
} else {
FPVController.native_setOnStreamCB(DjiGLSurfaceView.this, "fcb");
}
}
}).start();
return true;
}
public boolean setStreamType(final CameraPreviewResolustionType type) {
boolean result = false;
if(DJIDrone.getDroneType() == DJIDroneType.DJIDrone_Vision) {
(new Thread() {
public void run() {
CamShow.native_pauseStream(true);
CamShow.native_setType(DjiGLSurfaceView.this.getType(type.value()));
CamShow.native_pauseStream(false);
try {
Thread.sleep(1000L);
} catch (InterruptedException var2) {
var2.printStackTrace();
}
}
}).start();
result = true;
} else {
result = false;
}
return result;
}
public boolean setDataToDecoder(byte[] videoBuffer, int size) {
boolean result = false;
boolean returnVal = true;
int returnVal1;
if(DJIDrone.getDroneType() == DJIDroneType.DJIDrone_Vision) {
returnVal1 = CamShow.native_setDataToDecoder(videoBuffer, size);
} else {
returnVal1 = FPVController.native_setDataToDecoder(videoBuffer, size);
}
if(returnVal1 == 0) {
result = true;
}
return result;
}
public boolean pause() {
this.isPause = true;
if(DJIDrone.getDroneType() == DJIDroneType.DJIDrone_Vision) {
CamShow.native_pauseStream(true);
}
return true;
}
public boolean resume() {
if(DJIDrone.getDroneType() == DJIDroneType.DJIDrone_Vision) {
CamShow.native_pauseStream(false);
}
this.isPause = false;
return true;
}
public boolean destroy() {
this.isPause = false;
return true;
}
public boolean getIsPause() {
return this.isPause;
}
private void setIsPause(boolean isPause) {
this.isPause = isPause;
}
private static class ConfigChooser implements EGLConfigChooser {
private static int EGL_OPENGL_ES2_BIT = 4;
private static int[] s_configAttribs2;
protected int mRedSize;
protected int mGreenSize;
protected int mBlueSize;
protected int mAlphaSize;
protected int mDepthSize;
protected int mStencilSize;
private int[] mValue = new int[1];
static {
s_configAttribs2 = new int[]{12324, EGL_OPENGL_ES2_BIT, 12323, EGL_OPENGL_ES2_BIT, 12322, EGL_OPENGL_ES2_BIT, 12352, EGL_OPENGL_ES2_BIT, 12344};
}
public ConfigChooser(int r, int g, int b, int a, int depth, int stencil) {
this.mRedSize = r;
this.mGreenSize = g;
this.mBlueSize = b;
this.mAlphaSize = a;
this.mDepthSize = depth;
this.mStencilSize = stencil;
}
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) {
int[] num_config = new int[1];
egl.eglChooseConfig(display, s_configAttribs2, (EGLConfig[])null, 0, num_config);
int numConfigs = num_config[0];
if(numConfigs <= 0) {
throw new IllegalArgumentException("No configs match configSpec");
} else {
EGLConfig[] configs = new EGLConfig[numConfigs];
egl.eglChooseConfig(display, s_configAttribs2, configs, numConfigs, num_config);
return this.chooseConfig(egl, display, configs);
}
}
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display, EGLConfig[] configs) {
EGLConfig[] var7 = configs;
int var6 = configs.length;
for(int var5 = 0; var5 < var6; ++var5) {
EGLConfig config = var7[var5];
int d = this.findConfigAttrib(egl, display, config, 12325, 0);
int s = this.findConfigAttrib(egl, display, config, 12326, 0);
if(d >= this.mDepthSize && s >= this.mStencilSize) {
int r = this.findConfigAttrib(egl, display, config, 12324, 0);
int g = this.findConfigAttrib(egl, display, config, 12323, 0);
int b = this.findConfigAttrib(egl, display, config, 12322, 0);
int a = this.findConfigAttrib(egl, display, config, 12321, 0);
if(r == this.mRedSize && g == this.mGreenSize && b == this.mBlueSize && a == this.mAlphaSize) {
return config;
}
}
}
return null;
}
private int findConfigAttrib(EGL10 egl, EGLDisplay display, EGLConfig config, int attribute, int defaultValue) {
return egl.eglGetConfigAttrib(display, config, attribute, this.mValue)?this.mValue[0]:defaultValue;
}
private void printConfigs(EGL10 egl, EGLDisplay display, EGLConfig[] configs) {
int numConfigs = configs.length;
Log.w(DjiGLSurfaceView.TAG, String.format("%d configurations", new Object[]{Integer.valueOf(numConfigs)}));
for(int i = 0; i < numConfigs; ++i) {
Log.w(DjiGLSurfaceView.TAG, String.format("Configuration %d:\n", new Object[]{Integer.valueOf(i)}));
this.printConfig(egl, display, configs[i]);
}
}
private void printConfig(EGL10 egl, EGLDisplay display, EGLConfig config) {
int[] attributes = new int[]{12320, 12321, 12322, 12323, 12324, 12325, 12326, 12327, 12328, 12329, 12330, 12331, 12332, 12333, 12334, 12335, 12336, 12337, 12338, 12339, 12340, 12343, 12342, 12341, 12345, 12346, 12347, 12348, 12349, 12350, 12351, 12352, 12354};
String[] names = new String[]{"EGL_BUFFER_SIZE", "EGL_ALPHA_SIZE", "EGL_BLUE_SIZE", "EGL_GREEN_SIZE", "EGL_RED_SIZE", "EGL_DEPTH_SIZE", "EGL_STENCIL_SIZE", "EGL_CONFIG_CAVEAT", "EGL_CONFIG_ID", "EGL_LEVEL", "EGL_MAX_PBUFFER_HEIGHT", "EGL_MAX_PBUFFER_PIXELS", "EGL_MAX_PBUFFER_WIDTH", "EGL_NATIVE_RENDERABLE", "EGL_NATIVE_VISUAL_ID", "EGL_NATIVE_VISUAL_TYPE", "EGL_PRESERVED_RESOURCES", "EGL_SAMPLES", "EGL_SAMPLE_BUFFERS", "EGL_SURFACE_TYPE", "EGL_TRANSPARENT_TYPE", "EGL_TRANSPARENT_RED_VALUE", "EGL_TRANSPARENT_GREEN_VALUE", "EGL_TRANSPARENT_BLUE_VALUE", "EGL_BIND_TO_TEXTURE_RGB", "EGL_BIND_TO_TEXTURE_RGBA", "EGL_MIN_SWAP_INTERVAL", "EGL_MAX_SWAP_INTERVAL", "EGL_LUMINANCE_SIZE", "EGL_ALPHA_MASK_SIZE", "EGL_COLOR_BUFFER_TYPE", "EGL_RENDERABLE_TYPE", "EGL_CONFORMANT"};
int[] value = new int[1];
for(int i = 0; i < attributes.length; ++i) {
int attribute = attributes[i];
String name = names[i];
if(egl.eglGetConfigAttrib(display, config, attribute, value)) {
Log.w(DjiGLSurfaceView.TAG, String.format(" %s: %d\n", new Object[]{name, Integer.valueOf(value[0])}));
}
}
}
}
private static class ContextFactory implements EGLContextFactory {
private static int EGL_CONTEXT_CLIENT_VERSION = 12440;
private ContextFactory() {
}
public EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig eglConfig) {
Log.w(DjiGLSurfaceView.TAG, "creating OpenGL ES 2.0 context");
DjiGLSurfaceView.checkEglError("Before eglCreateContext", egl);
int[] attrib_list = new int[]{EGL_CONTEXT_CLIENT_VERSION, 2, 12344};
EGLContext context = egl.eglCreateContext(display, eglConfig, EGL10.EGL_NO_CONTEXT, attrib_list);
DjiGLSurfaceView.checkEglError("After eglCreateContext", egl);
return context;
}
public void destroyContext(EGL10 egl, EGLDisplay display, EGLContext context) {
egl.eglDestroyContext(display, context);
}
}
private class MyRenderer implements Renderer {
private MyRenderer() {
}
public void onDrawFrame(GL10 gl) {
if(!DjiGLSurfaceView.this.isPause) {
if(DJIDrone.getDroneType() == DJIDroneType.DJIDrone_Vision) {
CamShow.native_GLDrawFrame();
} else {
FPVController.native_GLDrawFrame();
}
}
}
public void onSurfaceChanged(GL10 gl, int width, int height) {
DjiGLSurfaceView.w = width;
DjiGLSurfaceView.h = height;
if(DJIDrone.getDroneType() == DJIDroneType.DJIDrone_Vision) {
CamShow.native_GLInit(DjiGLSurfaceView.w, DjiGLSurfaceView.h);
} else {
FPVController.native_GLInit(DjiGLSurfaceView.w, DjiGLSurfaceView.h);
}
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
}
}
}
與VideoCallBack的GLSurfaceView的執行情況:
mReceivedVideoDataCallBack = new DJIReceivedVideoDataCallBack(){
@Override
public void onResult(byte[] videoBuffer, int size)
{
djiGLSurfaceView.setDataToDecoder(videoBuffer,size);
}
};
DJIDrone.getDjiCamera().setReceivedVideoDataCallBack(mReceivedVideoDataCallBack);
根據推斷surfaceviews並排側不起作用,一個,而其他工作正常失敗。我猜Renderer只能使用一個。它必須是一個GLSurfaceView才能工作。
我必須離開的東西了,請隨時問什麼...