2015-07-10 122 views
1

我有由D.J.I創建的自定義GLSurfaceView。視頻渲染是由他們自己編譯的類完成的,它只允許一個GLSurfaceView接收他們編碼的視頻。在Android中操作自定義GLSurfaceView

我想在屏幕上乘以視頻輸出以用V.R查看器查看,但不確定是否有可能。我知道通過應對他們自定義的GLSurfaceView我可以做一些操作,比如控制視頻輸出的大小。 (如果我改變這一行:「SplitDjiSurfaceView.w =寬度;」到「寬度/ 2」) 我會讓左側運行良好。

問:如何將視頻相乘並在一個GLSurfaceView上並排播放,然後將其傳送給VideoCallBack?

定製GLSurfaceView:

import android.content.Context; 
import android.opengl.GLSurfaceView; 
import android.opengl.GLSurfaceView.EGLConfigChooser; 
import android.opengl.GLSurfaceView.EGLContextFactory; 
import android.opengl.GLSurfaceView.Renderer; 
import android.util.AttributeSet; 
import android.util.Log; 
import dji.midware.natives.FPVController; 
import dji.sdk.api.DJIDrone; 
import dji.sdk.api.Camera.DJICameraSettingsTypeDef.CameraPreviewResolustionType; 
import dji.sdk.api.DJIDroneTypeDef.DJIDroneType; 
import dji.sdk.natives.CamShow; 
import javax.microedition.khronos.egl.EGL10; 
import javax.microedition.khronos.egl.EGLConfig; 
import javax.microedition.khronos.egl.EGLContext; 
import javax.microedition.khronos.egl.EGLDisplay; 
import javax.microedition.khronos.opengles.GL10; 

public class DjiGLSurfaceView extends GLSurfaceView { 
    private static String TAG = "MyGLSurfaceView.java"; 
    private static final boolean DEBUG = false; 
    private static int w = 0; 
    private static int h = 0; 
    private boolean isPause = false; 

    public DjiGLSurfaceView(Context context, AttributeSet attrs) { 
     super(context, attrs); 
     this.init(false, 0, 0); 
    } 

    public DjiGLSurfaceView(Context context) { 
     super(context); 
     this.init(false, 0, 0); 
    } 

    public DjiGLSurfaceView(Context context, boolean translucent, int depth, int stencil) { 
     super(context); 
     this.init(translucent, depth, stencil); 
    } 

    private void init(boolean translucent, int depth, int stencil) { 
     if(translucent) { 
      this.getHolder().setFormat(-3); 
     } 

     this.setEGLContextFactory(new DjiGLSurfaceView.ContextFactory((DjiGLSurfaceView.ContextFactory)null)); 
     this.setEGLConfigChooser(translucent?new DjiGLSurfaceView.ConfigChooser(8, 8, 8, 8, depth, stencil):new DjiGLSurfaceView.ConfigChooser(5, 6, 5, 0, depth, stencil)); 
     this.setRenderer(new DjiGLSurfaceView.MyRenderer((DjiGLSurfaceView.MyRenderer)null)); 
     this.setRenderMode(0); 
    } 

    private static void checkEglError(String prompt, EGL10 egl) { 
     int error; 
     while((error = egl.eglGetError()) != 12288) { 
      Log.e(TAG, String.format("%s: EGL error: 0x%x", new Object[]{prompt, Integer.valueOf(error)})); 
     } 

    } 

    public int fcb() { 
     this.requestRender(); 
     return 0; 
    } 

    private int getType(int type) { 
     byte result = 1; 
     if(type == 0) { 
      result = 1; 
     } else if(type == 1) { 
      result = 2; 
     } else if(type == 2) { 
      result = 4; 
     } else if(type == 3) { 
      result = 8; 
     } 

     return result; 
    } 

    public boolean start() { 
     (new Thread() { 
      public void run() { 
       if(DJIDrone.getDroneType() == DJIDroneType.DJIDrone_Vision) { 
        CamShow.native_setOnStreamCB(DjiGLSurfaceView.this, "fcb"); 
       } else { 
        FPVController.native_setOnStreamCB(DjiGLSurfaceView.this, "fcb"); 
       } 

      } 
     }).start(); 
     return true; 
    } 

    public boolean setStreamType(final CameraPreviewResolustionType type) { 
     boolean result = false; 
     if(DJIDrone.getDroneType() == DJIDroneType.DJIDrone_Vision) { 
      (new Thread() { 
       public void run() { 
        CamShow.native_pauseStream(true); 
        CamShow.native_setType(DjiGLSurfaceView.this.getType(type.value())); 
        CamShow.native_pauseStream(false); 

        try { 
         Thread.sleep(1000L); 
        } catch (InterruptedException var2) { 
         var2.printStackTrace(); 
        } 

       } 
      }).start(); 
      result = true; 
     } else { 
      result = false; 
     } 

     return result; 
    } 

    public boolean setDataToDecoder(byte[] videoBuffer, int size) { 
     boolean result = false; 
     boolean returnVal = true; 
     int returnVal1; 
     if(DJIDrone.getDroneType() == DJIDroneType.DJIDrone_Vision) { 
      returnVal1 = CamShow.native_setDataToDecoder(videoBuffer, size); 
     } else { 
      returnVal1 = FPVController.native_setDataToDecoder(videoBuffer, size); 
     } 

     if(returnVal1 == 0) { 
      result = true; 
     } 

     return result; 
    } 

    public boolean pause() { 
     this.isPause = true; 
     if(DJIDrone.getDroneType() == DJIDroneType.DJIDrone_Vision) { 
      CamShow.native_pauseStream(true); 
     } 

     return true; 
    } 

    public boolean resume() { 
     if(DJIDrone.getDroneType() == DJIDroneType.DJIDrone_Vision) { 
      CamShow.native_pauseStream(false); 
     } 

     this.isPause = false; 
     return true; 
    } 

    public boolean destroy() { 
     this.isPause = false; 
     return true; 
    } 

    public boolean getIsPause() { 
     return this.isPause; 
    } 

    private void setIsPause(boolean isPause) { 
     this.isPause = isPause; 
    } 

    private static class ConfigChooser implements EGLConfigChooser { 
     private static int EGL_OPENGL_ES2_BIT = 4; 
     private static int[] s_configAttribs2; 
     protected int mRedSize; 
     protected int mGreenSize; 
     protected int mBlueSize; 
     protected int mAlphaSize; 
     protected int mDepthSize; 
     protected int mStencilSize; 
     private int[] mValue = new int[1]; 

     static { 
      s_configAttribs2 = new int[]{12324, EGL_OPENGL_ES2_BIT, 12323, EGL_OPENGL_ES2_BIT, 12322, EGL_OPENGL_ES2_BIT, 12352, EGL_OPENGL_ES2_BIT, 12344}; 
     } 

     public ConfigChooser(int r, int g, int b, int a, int depth, int stencil) { 
      this.mRedSize = r; 
      this.mGreenSize = g; 
      this.mBlueSize = b; 
      this.mAlphaSize = a; 
      this.mDepthSize = depth; 
      this.mStencilSize = stencil; 
     } 

     public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) { 
      int[] num_config = new int[1]; 
      egl.eglChooseConfig(display, s_configAttribs2, (EGLConfig[])null, 0, num_config); 
      int numConfigs = num_config[0]; 
      if(numConfigs <= 0) { 
       throw new IllegalArgumentException("No configs match configSpec"); 
      } else { 
       EGLConfig[] configs = new EGLConfig[numConfigs]; 
       egl.eglChooseConfig(display, s_configAttribs2, configs, numConfigs, num_config); 
       return this.chooseConfig(egl, display, configs); 
      } 
     } 

     public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display, EGLConfig[] configs) { 
      EGLConfig[] var7 = configs; 
      int var6 = configs.length; 

      for(int var5 = 0; var5 < var6; ++var5) { 
       EGLConfig config = var7[var5]; 
       int d = this.findConfigAttrib(egl, display, config, 12325, 0); 
       int s = this.findConfigAttrib(egl, display, config, 12326, 0); 
       if(d >= this.mDepthSize && s >= this.mStencilSize) { 
        int r = this.findConfigAttrib(egl, display, config, 12324, 0); 
        int g = this.findConfigAttrib(egl, display, config, 12323, 0); 
        int b = this.findConfigAttrib(egl, display, config, 12322, 0); 
        int a = this.findConfigAttrib(egl, display, config, 12321, 0); 
        if(r == this.mRedSize && g == this.mGreenSize && b == this.mBlueSize && a == this.mAlphaSize) { 
         return config; 
        } 
       } 
      } 

      return null; 
     } 

     private int findConfigAttrib(EGL10 egl, EGLDisplay display, EGLConfig config, int attribute, int defaultValue) { 
      return egl.eglGetConfigAttrib(display, config, attribute, this.mValue)?this.mValue[0]:defaultValue; 
     } 

     private void printConfigs(EGL10 egl, EGLDisplay display, EGLConfig[] configs) { 
      int numConfigs = configs.length; 
      Log.w(DjiGLSurfaceView.TAG, String.format("%d configurations", new Object[]{Integer.valueOf(numConfigs)})); 

      for(int i = 0; i < numConfigs; ++i) { 
       Log.w(DjiGLSurfaceView.TAG, String.format("Configuration %d:\n", new Object[]{Integer.valueOf(i)})); 
       this.printConfig(egl, display, configs[i]); 
      } 

     } 

     private void printConfig(EGL10 egl, EGLDisplay display, EGLConfig config) { 
      int[] attributes = new int[]{12320, 12321, 12322, 12323, 12324, 12325, 12326, 12327, 12328, 12329, 12330, 12331, 12332, 12333, 12334, 12335, 12336, 12337, 12338, 12339, 12340, 12343, 12342, 12341, 12345, 12346, 12347, 12348, 12349, 12350, 12351, 12352, 12354}; 
      String[] names = new String[]{"EGL_BUFFER_SIZE", "EGL_ALPHA_SIZE", "EGL_BLUE_SIZE", "EGL_GREEN_SIZE", "EGL_RED_SIZE", "EGL_DEPTH_SIZE", "EGL_STENCIL_SIZE", "EGL_CONFIG_CAVEAT", "EGL_CONFIG_ID", "EGL_LEVEL", "EGL_MAX_PBUFFER_HEIGHT", "EGL_MAX_PBUFFER_PIXELS", "EGL_MAX_PBUFFER_WIDTH", "EGL_NATIVE_RENDERABLE", "EGL_NATIVE_VISUAL_ID", "EGL_NATIVE_VISUAL_TYPE", "EGL_PRESERVED_RESOURCES", "EGL_SAMPLES", "EGL_SAMPLE_BUFFERS", "EGL_SURFACE_TYPE", "EGL_TRANSPARENT_TYPE", "EGL_TRANSPARENT_RED_VALUE", "EGL_TRANSPARENT_GREEN_VALUE", "EGL_TRANSPARENT_BLUE_VALUE", "EGL_BIND_TO_TEXTURE_RGB", "EGL_BIND_TO_TEXTURE_RGBA", "EGL_MIN_SWAP_INTERVAL", "EGL_MAX_SWAP_INTERVAL", "EGL_LUMINANCE_SIZE", "EGL_ALPHA_MASK_SIZE", "EGL_COLOR_BUFFER_TYPE", "EGL_RENDERABLE_TYPE", "EGL_CONFORMANT"}; 
      int[] value = new int[1]; 

      for(int i = 0; i < attributes.length; ++i) { 
       int attribute = attributes[i]; 
       String name = names[i]; 
       if(egl.eglGetConfigAttrib(display, config, attribute, value)) { 
        Log.w(DjiGLSurfaceView.TAG, String.format(" %s: %d\n", new Object[]{name, Integer.valueOf(value[0])})); 
       } 
      } 

     } 
    } 

    private static class ContextFactory implements EGLContextFactory { 
     private static int EGL_CONTEXT_CLIENT_VERSION = 12440; 

     private ContextFactory() { 
     } 

     public EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig eglConfig) { 
      Log.w(DjiGLSurfaceView.TAG, "creating OpenGL ES 2.0 context"); 
      DjiGLSurfaceView.checkEglError("Before eglCreateContext", egl); 
      int[] attrib_list = new int[]{EGL_CONTEXT_CLIENT_VERSION, 2, 12344}; 
      EGLContext context = egl.eglCreateContext(display, eglConfig, EGL10.EGL_NO_CONTEXT, attrib_list); 
      DjiGLSurfaceView.checkEglError("After eglCreateContext", egl); 
      return context; 
     } 

     public void destroyContext(EGL10 egl, EGLDisplay display, EGLContext context) { 
      egl.eglDestroyContext(display, context); 
     } 
    } 

    private class MyRenderer implements Renderer { 
     private MyRenderer() { 
     } 

     public void onDrawFrame(GL10 gl) { 
      if(!DjiGLSurfaceView.this.isPause) { 
       if(DJIDrone.getDroneType() == DJIDroneType.DJIDrone_Vision) { 
        CamShow.native_GLDrawFrame(); 
       } else { 
        FPVController.native_GLDrawFrame(); 
       } 
      } 

     } 

     public void onSurfaceChanged(GL10 gl, int width, int height) { 
      DjiGLSurfaceView.w = width; 
      DjiGLSurfaceView.h = height; 
      if(DJIDrone.getDroneType() == DJIDroneType.DJIDrone_Vision) { 
       CamShow.native_GLInit(DjiGLSurfaceView.w, DjiGLSurfaceView.h); 
      } else { 
       FPVController.native_GLInit(DjiGLSurfaceView.w, DjiGLSurfaceView.h); 
      } 

     } 

     public void onSurfaceCreated(GL10 gl, EGLConfig config) { 
     } 
    } 
} 

與VideoCallBack的GLSurfaceView的執行情況:

 mReceivedVideoDataCallBack = new DJIReceivedVideoDataCallBack(){ 

     @Override 
     public void onResult(byte[] videoBuffer, int size) 
     { 
      djiGLSurfaceView.setDataToDecoder(videoBuffer,size); 
     } 
    }; 

    DJIDrone.getDjiCamera().setReceivedVideoDataCallBack(mReceivedVideoDataCallBack); 

根據推斷surfaceviews並排側不起作用,一個,而其他工作正常失敗。我猜Renderer只能使用一個。它必須是一個GLSurfaceView才能工作。

我必須離開的東西了,請隨時問什麼...

回答

3

我不熟悉的圖書館,但我的猜測是,它的工作原理是利用當前EGL上下文和表面,可能在軟件中解碼視頻並將其上傳爲GLES紋理。

如果是這種情況,可以使用不同的曲面來配置EGL,而當您調用native_GLDrawFrame()時,它將代替它。如果當前曲面是FBO,它將使用該曲面而不是SurfaceView。然後,您可以使用GLES隨意渲染附加的紋理。 (我假設你正在嘗試某種立體聲效果。)

這種事情的一個例子可以在Grafika's「記錄GL應用程序」活動中找到。在RECMETHOD_FBO模式下,doFrame()方法將呈現給FBO,然後從其中兩次(一次到屏幕,一次到視頻編碼器)出現。像這樣的東西應該適合你。

如果您沒有等價物,您將需要從Grafika中提取EGL/GLES代碼。 (你不能從SurfaceView讀回像素,所以你需要渲染它們兩次,我假設,如果庫爲你做了,你不會問這個,所以捕獲渲染和blitting這是必要的。)

這也是完全可能的,我對圖書館工作的猜測是錯誤的,所以一些實驗可能是必要的。

編輯:我應該注意到,儘管無法從SurfaceView中獲取像素,但您可以在提交框架之前閱讀它們。因此,如果庫按我認爲的方式工作,則它將呈現給GLES而不調用eglSwapBuffers()(當onDrawFrame()返回時由GLSurfaceView調用)。因此,在onDrawFrame()中,您可以使用glReadPixels()讀取像素,將它們上載到第二個GLSurfaceView上下文中的紋理上,並使用glTexImage2D(),然後在不同的表面上繪製它們。這比FBO方法慢,因爲像素必須被複制到CPU並返回,但它可能工作。

我還應該指出,無論您到達哪種解決方案,在單個Surface上都有更好的視頻雙方,而不是兩個獨立的SurfaceView。如果您使用兩個曲面,則不能保證它們都將在相同的顯示刷新上進行更新,因此您可能會有一側略微落後於另一側的幀。捕獲輸出然後渲染兩次到單個SurfaceView會好得多。 (GLSurfaceView只是一個SurfaceView,有一些額外的代碼來處理EGL設置和線程管理。)